therubyracer 0.9.0beta2 → 0.9.0beta3

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of therubyracer might be problematic. Click here for more details.

Files changed (920) hide show
  1. data/.gitmodules +3 -0
  2. data/ext/v8/upstream/Makefile +1 -2
  3. data/ext/v8/upstream/v8/.gitignore +33 -0
  4. data/ext/v8/upstream/v8/AUTHORS +42 -0
  5. data/ext/v8/upstream/v8/ChangeLog +2663 -0
  6. data/ext/v8/upstream/{3.1.8 → v8}/LICENSE +0 -0
  7. data/ext/v8/upstream/{3.1.8 → v8}/LICENSE.strongtalk +0 -0
  8. data/ext/v8/upstream/{3.1.8 → v8}/LICENSE.v8 +0 -0
  9. data/ext/v8/upstream/{3.1.8 → v8}/LICENSE.valgrind +0 -0
  10. data/ext/v8/upstream/v8/SConstruct +1473 -0
  11. data/ext/v8/upstream/{3.1.8 → v8}/build/README.txt +0 -0
  12. data/ext/v8/upstream/{3.1.8 → v8}/build/all.gyp +0 -0
  13. data/ext/v8/upstream/{3.1.8 → v8}/build/armu.gypi +0 -0
  14. data/ext/v8/upstream/{3.1.8 → v8}/build/common.gypi +0 -0
  15. data/ext/v8/upstream/{3.1.8 → v8}/build/gyp_v8 +0 -0
  16. data/ext/v8/upstream/v8/include/v8-debug.h +394 -0
  17. data/ext/v8/upstream/v8/include/v8-preparser.h +116 -0
  18. data/ext/v8/upstream/v8/include/v8-profiler.h +505 -0
  19. data/ext/v8/upstream/v8/include/v8-testing.h +104 -0
  20. data/ext/v8/upstream/v8/include/v8.h +4000 -0
  21. data/ext/v8/upstream/{3.1.8 → v8}/include/v8stdint.h +0 -0
  22. data/ext/v8/upstream/v8/preparser/SConscript +38 -0
  23. data/ext/v8/upstream/v8/preparser/preparser-process.cc +169 -0
  24. data/ext/v8/upstream/v8/src/SConscript +380 -0
  25. data/ext/v8/upstream/v8/src/accessors.cc +766 -0
  26. data/ext/v8/upstream/{3.1.8 → v8}/src/accessors.h +0 -0
  27. data/ext/v8/upstream/v8/src/allocation-inl.h +49 -0
  28. data/ext/v8/upstream/v8/src/allocation.cc +122 -0
  29. data/ext/v8/upstream/v8/src/allocation.h +143 -0
  30. data/ext/v8/upstream/v8/src/api.cc +5678 -0
  31. data/ext/v8/upstream/v8/src/api.h +572 -0
  32. data/ext/v8/upstream/{3.1.8 → v8}/src/apinatives.js +0 -0
  33. data/ext/v8/upstream/v8/src/apiutils.h +73 -0
  34. data/ext/v8/upstream/v8/src/arguments.h +116 -0
  35. data/ext/v8/upstream/v8/src/arm/assembler-arm-inl.h +353 -0
  36. data/ext/v8/upstream/v8/src/arm/assembler-arm.cc +2877 -0
  37. data/ext/v8/upstream/v8/src/arm/assembler-arm.h +1382 -0
  38. data/ext/v8/upstream/v8/src/arm/builtins-arm.cc +1634 -0
  39. data/ext/v8/upstream/v8/src/arm/code-stubs-arm.cc +6917 -0
  40. data/ext/v8/upstream/v8/src/arm/code-stubs-arm.h +623 -0
  41. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/codegen-arm-inl.h +0 -0
  42. data/ext/v8/upstream/v8/src/arm/codegen-arm.cc +7437 -0
  43. data/ext/v8/upstream/v8/src/arm/codegen-arm.h +595 -0
  44. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/constants-arm.cc +0 -0
  45. data/ext/v8/upstream/v8/src/arm/constants-arm.h +778 -0
  46. data/ext/v8/upstream/v8/src/arm/cpu-arm.cc +149 -0
  47. data/ext/v8/upstream/v8/src/arm/debug-arm.cc +317 -0
  48. data/ext/v8/upstream/v8/src/arm/deoptimizer-arm.cc +737 -0
  49. data/ext/v8/upstream/v8/src/arm/disasm-arm.cc +1503 -0
  50. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/frames-arm.cc +0 -0
  51. data/ext/v8/upstream/v8/src/arm/frames-arm.h +168 -0
  52. data/ext/v8/upstream/v8/src/arm/full-codegen-arm.cc +4374 -0
  53. data/ext/v8/upstream/v8/src/arm/ic-arm.cc +1793 -0
  54. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/jump-target-arm.cc +0 -0
  55. data/ext/v8/upstream/v8/src/arm/lithium-arm.cc +2120 -0
  56. data/ext/v8/upstream/v8/src/arm/lithium-arm.h +2179 -0
  57. data/ext/v8/upstream/v8/src/arm/lithium-codegen-arm.cc +4132 -0
  58. data/ext/v8/upstream/v8/src/arm/lithium-codegen-arm.h +329 -0
  59. data/ext/v8/upstream/v8/src/arm/lithium-gap-resolver-arm.cc +305 -0
  60. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/lithium-gap-resolver-arm.h +0 -0
  61. data/ext/v8/upstream/v8/src/arm/macro-assembler-arm.cc +2939 -0
  62. data/ext/v8/upstream/v8/src/arm/macro-assembler-arm.h +1071 -0
  63. data/ext/v8/upstream/v8/src/arm/regexp-macro-assembler-arm.cc +1287 -0
  64. data/ext/v8/upstream/v8/src/arm/regexp-macro-assembler-arm.h +253 -0
  65. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/register-allocator-arm-inl.h +0 -0
  66. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/register-allocator-arm.cc +0 -0
  67. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/register-allocator-arm.h +0 -0
  68. data/ext/v8/upstream/v8/src/arm/simulator-arm.cc +3288 -0
  69. data/ext/v8/upstream/v8/src/arm/simulator-arm.h +413 -0
  70. data/ext/v8/upstream/v8/src/arm/stub-cache-arm.cc +4034 -0
  71. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/virtual-frame-arm-inl.h +0 -0
  72. data/ext/v8/upstream/v8/src/arm/virtual-frame-arm.cc +843 -0
  73. data/ext/v8/upstream/v8/src/arm/virtual-frame-arm.h +523 -0
  74. data/ext/v8/upstream/v8/src/array.js +1249 -0
  75. data/ext/v8/upstream/v8/src/assembler.cc +1067 -0
  76. data/ext/v8/upstream/v8/src/assembler.h +823 -0
  77. data/ext/v8/upstream/v8/src/ast-inl.h +112 -0
  78. data/ext/v8/upstream/v8/src/ast.cc +1078 -0
  79. data/ext/v8/upstream/v8/src/ast.h +2234 -0
  80. data/ext/v8/upstream/v8/src/atomicops.h +167 -0
  81. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_arm_gcc.h +0 -0
  82. data/ext/v8/upstream/v8/src/atomicops_internals_mips_gcc.h +169 -0
  83. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_x86_gcc.cc +0 -0
  84. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_x86_gcc.h +0 -0
  85. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_x86_macosx.h +0 -0
  86. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_x86_msvc.h +0 -0
  87. data/ext/v8/upstream/{3.1.8 → v8}/src/bignum-dtoa.cc +0 -0
  88. data/ext/v8/upstream/{3.1.8 → v8}/src/bignum-dtoa.h +0 -0
  89. data/ext/v8/upstream/{3.1.8 → v8}/src/bignum.cc +0 -0
  90. data/ext/v8/upstream/{3.1.8 → v8}/src/bignum.h +0 -0
  91. data/ext/v8/upstream/v8/src/bootstrapper.cc +2138 -0
  92. data/ext/v8/upstream/v8/src/bootstrapper.h +185 -0
  93. data/ext/v8/upstream/v8/src/builtins.cc +1708 -0
  94. data/ext/v8/upstream/v8/src/builtins.h +368 -0
  95. data/ext/v8/upstream/{3.1.8 → v8}/src/bytecodes-irregexp.h +0 -0
  96. data/ext/v8/upstream/{3.1.8 → v8}/src/cached-powers.cc +0 -0
  97. data/ext/v8/upstream/{3.1.8 → v8}/src/cached-powers.h +0 -0
  98. data/ext/v8/upstream/{3.1.8 → v8}/src/char-predicates-inl.h +0 -0
  99. data/ext/v8/upstream/{3.1.8 → v8}/src/char-predicates.h +0 -0
  100. data/ext/v8/upstream/v8/src/checks.cc +110 -0
  101. data/ext/v8/upstream/v8/src/checks.h +296 -0
  102. data/ext/v8/upstream/{3.1.8 → v8}/src/circular-queue-inl.h +0 -0
  103. data/ext/v8/upstream/{3.1.8 → v8}/src/circular-queue.cc +0 -0
  104. data/ext/v8/upstream/{3.1.8 → v8}/src/circular-queue.h +0 -0
  105. data/ext/v8/upstream/v8/src/code-stubs.cc +240 -0
  106. data/ext/v8/upstream/v8/src/code-stubs.h +971 -0
  107. data/ext/v8/upstream/{3.1.8 → v8}/src/code.h +0 -0
  108. data/ext/v8/upstream/v8/src/codegen-inl.h +68 -0
  109. data/ext/v8/upstream/v8/src/codegen.cc +505 -0
  110. data/ext/v8/upstream/v8/src/codegen.h +245 -0
  111. data/ext/v8/upstream/v8/src/compilation-cache.cc +540 -0
  112. data/ext/v8/upstream/v8/src/compilation-cache.h +287 -0
  113. data/ext/v8/upstream/v8/src/compiler.cc +792 -0
  114. data/ext/v8/upstream/v8/src/compiler.h +307 -0
  115. data/ext/v8/upstream/v8/src/contexts.cc +327 -0
  116. data/ext/v8/upstream/v8/src/contexts.h +382 -0
  117. data/ext/v8/upstream/{3.1.8 → v8}/src/conversions-inl.h +0 -0
  118. data/ext/v8/upstream/v8/src/conversions.cc +1125 -0
  119. data/ext/v8/upstream/{3.1.8 → v8}/src/conversions.h +0 -0
  120. data/ext/v8/upstream/v8/src/counters.cc +93 -0
  121. data/ext/v8/upstream/v8/src/counters.h +254 -0
  122. data/ext/v8/upstream/v8/src/cpu-profiler-inl.h +101 -0
  123. data/ext/v8/upstream/v8/src/cpu-profiler.cc +606 -0
  124. data/ext/v8/upstream/v8/src/cpu-profiler.h +305 -0
  125. data/ext/v8/upstream/v8/src/cpu.h +67 -0
  126. data/ext/v8/upstream/v8/src/d8-debug.cc +367 -0
  127. data/ext/v8/upstream/v8/src/d8-debug.h +158 -0
  128. data/ext/v8/upstream/v8/src/d8-posix.cc +695 -0
  129. data/ext/v8/upstream/{3.1.8 → v8}/src/d8-readline.cc +0 -0
  130. data/ext/v8/upstream/{3.1.8 → v8}/src/d8-windows.cc +0 -0
  131. data/ext/v8/upstream/v8/src/d8.cc +796 -0
  132. data/ext/v8/upstream/v8/src/d8.gyp +88 -0
  133. data/ext/v8/upstream/{3.1.8 → v8}/src/d8.h +0 -0
  134. data/ext/v8/upstream/{3.1.8 → v8}/src/d8.js +0 -0
  135. data/ext/v8/upstream/{3.1.8 → v8}/src/data-flow.cc +0 -0
  136. data/ext/v8/upstream/v8/src/data-flow.h +379 -0
  137. data/ext/v8/upstream/{3.1.8 → v8}/src/date.js +0 -0
  138. data/ext/v8/upstream/{3.1.8 → v8}/src/dateparser-inl.h +0 -0
  139. data/ext/v8/upstream/{3.1.8 → v8}/src/dateparser.cc +0 -0
  140. data/ext/v8/upstream/v8/src/dateparser.h +265 -0
  141. data/ext/v8/upstream/v8/src/debug-agent.cc +447 -0
  142. data/ext/v8/upstream/v8/src/debug-agent.h +129 -0
  143. data/ext/v8/upstream/{3.1.8 → v8}/src/debug-debugger.js +0 -0
  144. data/ext/v8/upstream/v8/src/debug.cc +3188 -0
  145. data/ext/v8/upstream/v8/src/debug.h +1055 -0
  146. data/ext/v8/upstream/v8/src/deoptimizer.cc +1296 -0
  147. data/ext/v8/upstream/v8/src/deoptimizer.h +629 -0
  148. data/ext/v8/upstream/v8/src/disasm.h +80 -0
  149. data/ext/v8/upstream/v8/src/disassembler.cc +339 -0
  150. data/ext/v8/upstream/{3.1.8 → v8}/src/disassembler.h +0 -0
  151. data/ext/v8/upstream/{3.1.8 → v8}/src/diy-fp.cc +0 -0
  152. data/ext/v8/upstream/{3.1.8 → v8}/src/diy-fp.h +0 -0
  153. data/ext/v8/upstream/{3.1.8 → v8}/src/double.h +0 -0
  154. data/ext/v8/upstream/{3.1.8 → v8}/src/dtoa.cc +0 -0
  155. data/ext/v8/upstream/{3.1.8 → v8}/src/dtoa.h +0 -0
  156. data/ext/v8/upstream/v8/src/execution.cc +791 -0
  157. data/ext/v8/upstream/v8/src/execution.h +291 -0
  158. data/ext/v8/upstream/v8/src/extensions/experimental/break-iterator.cc +250 -0
  159. data/ext/v8/upstream/v8/src/extensions/experimental/break-iterator.h +89 -0
  160. data/ext/v8/upstream/v8/src/extensions/experimental/experimental.gyp +55 -0
  161. data/ext/v8/upstream/v8/src/extensions/experimental/i18n-extension.cc +284 -0
  162. data/ext/v8/upstream/{3.1.8 → v8}/src/extensions/experimental/i18n-extension.h +0 -0
  163. data/ext/v8/upstream/v8/src/extensions/externalize-string-extension.cc +141 -0
  164. data/ext/v8/upstream/{3.1.8 → v8}/src/extensions/externalize-string-extension.h +0 -0
  165. data/ext/v8/upstream/v8/src/extensions/gc-extension.cc +58 -0
  166. data/ext/v8/upstream/{3.1.8 → v8}/src/extensions/gc-extension.h +0 -0
  167. data/ext/v8/upstream/v8/src/factory.cc +1194 -0
  168. data/ext/v8/upstream/v8/src/factory.h +436 -0
  169. data/ext/v8/upstream/{3.1.8 → v8}/src/fast-dtoa.cc +0 -0
  170. data/ext/v8/upstream/{3.1.8 → v8}/src/fast-dtoa.h +0 -0
  171. data/ext/v8/upstream/{3.1.8 → v8}/src/fixed-dtoa.cc +0 -0
  172. data/ext/v8/upstream/{3.1.8 → v8}/src/fixed-dtoa.h +0 -0
  173. data/ext/v8/upstream/v8/src/flag-definitions.h +556 -0
  174. data/ext/v8/upstream/{3.1.8 → v8}/src/flags.cc +0 -0
  175. data/ext/v8/upstream/{3.1.8 → v8}/src/flags.h +0 -0
  176. data/ext/v8/upstream/v8/src/frame-element.cc +37 -0
  177. data/ext/v8/upstream/v8/src/frame-element.h +269 -0
  178. data/ext/v8/upstream/v8/src/frames-inl.h +236 -0
  179. data/ext/v8/upstream/v8/src/frames.cc +1273 -0
  180. data/ext/v8/upstream/v8/src/frames.h +854 -0
  181. data/ext/v8/upstream/v8/src/full-codegen.cc +1385 -0
  182. data/ext/v8/upstream/v8/src/full-codegen.h +753 -0
  183. data/ext/v8/upstream/v8/src/func-name-inferrer.cc +91 -0
  184. data/ext/v8/upstream/v8/src/func-name-inferrer.h +111 -0
  185. data/ext/v8/upstream/v8/src/gdb-jit.cc +1548 -0
  186. data/ext/v8/upstream/{3.1.8 → v8}/src/gdb-jit.h +0 -0
  187. data/ext/v8/upstream/v8/src/global-handles.cc +596 -0
  188. data/ext/v8/upstream/v8/src/global-handles.h +239 -0
  189. data/ext/v8/upstream/v8/src/globals.h +325 -0
  190. data/ext/v8/upstream/v8/src/handles-inl.h +177 -0
  191. data/ext/v8/upstream/v8/src/handles.cc +965 -0
  192. data/ext/v8/upstream/v8/src/handles.h +372 -0
  193. data/ext/v8/upstream/{3.1.8 → v8}/src/hashmap.cc +0 -0
  194. data/ext/v8/upstream/v8/src/hashmap.h +121 -0
  195. data/ext/v8/upstream/v8/src/heap-inl.h +703 -0
  196. data/ext/v8/upstream/v8/src/heap-profiler.cc +1173 -0
  197. data/ext/v8/upstream/v8/src/heap-profiler.h +396 -0
  198. data/ext/v8/upstream/v8/src/heap.cc +5856 -0
  199. data/ext/v8/upstream/v8/src/heap.h +2264 -0
  200. data/ext/v8/upstream/v8/src/hydrogen-instructions.cc +1639 -0
  201. data/ext/v8/upstream/v8/src/hydrogen-instructions.h +3657 -0
  202. data/ext/v8/upstream/v8/src/hydrogen.cc +6011 -0
  203. data/ext/v8/upstream/v8/src/hydrogen.h +1137 -0
  204. data/ext/v8/upstream/v8/src/ia32/assembler-ia32-inl.h +430 -0
  205. data/ext/v8/upstream/v8/src/ia32/assembler-ia32.cc +2846 -0
  206. data/ext/v8/upstream/v8/src/ia32/assembler-ia32.h +1159 -0
  207. data/ext/v8/upstream/v8/src/ia32/builtins-ia32.cc +1596 -0
  208. data/ext/v8/upstream/v8/src/ia32/code-stubs-ia32.cc +6549 -0
  209. data/ext/v8/upstream/v8/src/ia32/code-stubs-ia32.h +495 -0
  210. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/codegen-ia32-inl.h +0 -0
  211. data/ext/v8/upstream/v8/src/ia32/codegen-ia32.cc +10385 -0
  212. data/ext/v8/upstream/v8/src/ia32/codegen-ia32.h +801 -0
  213. data/ext/v8/upstream/v8/src/ia32/cpu-ia32.cc +88 -0
  214. data/ext/v8/upstream/v8/src/ia32/debug-ia32.cc +312 -0
  215. data/ext/v8/upstream/v8/src/ia32/deoptimizer-ia32.cc +774 -0
  216. data/ext/v8/upstream/v8/src/ia32/disasm-ia32.cc +1620 -0
  217. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/frames-ia32.cc +0 -0
  218. data/ext/v8/upstream/v8/src/ia32/frames-ia32.h +140 -0
  219. data/ext/v8/upstream/v8/src/ia32/full-codegen-ia32.cc +4357 -0
  220. data/ext/v8/upstream/v8/src/ia32/ic-ia32.cc +1779 -0
  221. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/jump-target-ia32.cc +0 -0
  222. data/ext/v8/upstream/v8/src/ia32/lithium-codegen-ia32.cc +4158 -0
  223. data/ext/v8/upstream/v8/src/ia32/lithium-codegen-ia32.h +318 -0
  224. data/ext/v8/upstream/v8/src/ia32/lithium-gap-resolver-ia32.cc +466 -0
  225. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/lithium-gap-resolver-ia32.h +0 -0
  226. data/ext/v8/upstream/v8/src/ia32/lithium-ia32.cc +2181 -0
  227. data/ext/v8/upstream/v8/src/ia32/lithium-ia32.h +2235 -0
  228. data/ext/v8/upstream/v8/src/ia32/macro-assembler-ia32.cc +2056 -0
  229. data/ext/v8/upstream/v8/src/ia32/macro-assembler-ia32.h +807 -0
  230. data/ext/v8/upstream/v8/src/ia32/regexp-macro-assembler-ia32.cc +1264 -0
  231. data/ext/v8/upstream/v8/src/ia32/regexp-macro-assembler-ia32.h +216 -0
  232. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/register-allocator-ia32-inl.h +0 -0
  233. data/ext/v8/upstream/v8/src/ia32/register-allocator-ia32.cc +157 -0
  234. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/register-allocator-ia32.h +0 -0
  235. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/simulator-ia32.cc +0 -0
  236. data/ext/v8/upstream/v8/src/ia32/simulator-ia32.h +72 -0
  237. data/ext/v8/upstream/v8/src/ia32/stub-cache-ia32.cc +3711 -0
  238. data/ext/v8/upstream/v8/src/ia32/virtual-frame-ia32.cc +1366 -0
  239. data/ext/v8/upstream/v8/src/ia32/virtual-frame-ia32.h +650 -0
  240. data/ext/v8/upstream/v8/src/ic-inl.h +130 -0
  241. data/ext/v8/upstream/v8/src/ic.cc +2389 -0
  242. data/ext/v8/upstream/v8/src/ic.h +675 -0
  243. data/ext/v8/upstream/{3.1.8 → v8}/src/inspector.cc +0 -0
  244. data/ext/v8/upstream/{3.1.8 → v8}/src/inspector.h +0 -0
  245. data/ext/v8/upstream/v8/src/interpreter-irregexp.cc +659 -0
  246. data/ext/v8/upstream/v8/src/interpreter-irregexp.h +49 -0
  247. data/ext/v8/upstream/v8/src/isolate.cc +883 -0
  248. data/ext/v8/upstream/v8/src/isolate.h +1306 -0
  249. data/ext/v8/upstream/v8/src/json.js +342 -0
  250. data/ext/v8/upstream/v8/src/jsregexp.cc +5371 -0
  251. data/ext/v8/upstream/v8/src/jsregexp.h +1483 -0
  252. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target-heavy-inl.h +0 -0
  253. data/ext/v8/upstream/v8/src/jump-target-heavy.cc +427 -0
  254. data/ext/v8/upstream/v8/src/jump-target-heavy.h +238 -0
  255. data/ext/v8/upstream/v8/src/jump-target-inl.h +48 -0
  256. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target-light-inl.h +0 -0
  257. data/ext/v8/upstream/v8/src/jump-target-light.cc +111 -0
  258. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target-light.h +0 -0
  259. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target.cc +0 -0
  260. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target.h +0 -0
  261. data/ext/v8/upstream/{3.1.8 → v8}/src/list-inl.h +0 -0
  262. data/ext/v8/upstream/{3.1.8 → v8}/src/list.h +0 -0
  263. data/ext/v8/upstream/v8/src/lithium-allocator-inl.h +142 -0
  264. data/ext/v8/upstream/v8/src/lithium-allocator.cc +2105 -0
  265. data/ext/v8/upstream/v8/src/lithium-allocator.h +630 -0
  266. data/ext/v8/upstream/v8/src/lithium.cc +169 -0
  267. data/ext/v8/upstream/{3.1.8 → v8}/src/lithium.h +0 -0
  268. data/ext/v8/upstream/{3.1.8 → v8}/src/liveedit-debugger.js +0 -0
  269. data/ext/v8/upstream/v8/src/liveedit.cc +1693 -0
  270. data/ext/v8/upstream/v8/src/liveedit.h +179 -0
  271. data/ext/v8/upstream/{3.1.8 → v8}/src/liveobjectlist-inl.h +0 -0
  272. data/ext/v8/upstream/v8/src/liveobjectlist.cc +2589 -0
  273. data/ext/v8/upstream/v8/src/liveobjectlist.h +322 -0
  274. data/ext/v8/upstream/{3.1.8 → v8}/src/log-inl.h +0 -0
  275. data/ext/v8/upstream/v8/src/log-utils.cc +423 -0
  276. data/ext/v8/upstream/v8/src/log-utils.h +229 -0
  277. data/ext/v8/upstream/v8/src/log.cc +1666 -0
  278. data/ext/v8/upstream/v8/src/log.h +446 -0
  279. data/ext/v8/upstream/{3.1.8 → v8}/src/macro-assembler.h +0 -0
  280. data/ext/v8/upstream/{3.1.8 → v8}/src/macros.py +0 -0
  281. data/ext/v8/upstream/v8/src/mark-compact.cc +3092 -0
  282. data/ext/v8/upstream/v8/src/mark-compact.h +506 -0
  283. data/ext/v8/upstream/{3.1.8 → v8}/src/math.js +0 -0
  284. data/ext/v8/upstream/v8/src/messages.cc +166 -0
  285. data/ext/v8/upstream/{3.1.8 → v8}/src/messages.h +0 -0
  286. data/ext/v8/upstream/v8/src/messages.js +1090 -0
  287. data/ext/v8/upstream/v8/src/mips/assembler-mips-inl.h +335 -0
  288. data/ext/v8/upstream/v8/src/mips/assembler-mips.cc +2093 -0
  289. data/ext/v8/upstream/v8/src/mips/assembler-mips.h +1066 -0
  290. data/ext/v8/upstream/v8/src/mips/builtins-mips.cc +148 -0
  291. data/ext/v8/upstream/v8/src/mips/code-stubs-mips.cc +752 -0
  292. data/ext/v8/upstream/v8/src/mips/code-stubs-mips.h +511 -0
  293. data/ext/v8/upstream/v8/src/mips/codegen-mips-inl.h +64 -0
  294. data/ext/v8/upstream/v8/src/mips/codegen-mips.cc +1213 -0
  295. data/ext/v8/upstream/v8/src/mips/codegen-mips.h +633 -0
  296. data/ext/v8/upstream/v8/src/mips/constants-mips.cc +352 -0
  297. data/ext/v8/upstream/v8/src/mips/constants-mips.h +723 -0
  298. data/ext/v8/upstream/v8/src/mips/cpu-mips.cc +90 -0
  299. data/ext/v8/upstream/v8/src/mips/debug-mips.cc +155 -0
  300. data/ext/v8/upstream/v8/src/mips/deoptimizer-mips.cc +91 -0
  301. data/ext/v8/upstream/v8/src/mips/disasm-mips.cc +1023 -0
  302. data/ext/v8/upstream/v8/src/mips/frames-mips.cc +48 -0
  303. data/ext/v8/upstream/v8/src/mips/frames-mips.h +179 -0
  304. data/ext/v8/upstream/v8/src/mips/full-codegen-mips.cc +727 -0
  305. data/ext/v8/upstream/v8/src/mips/ic-mips.cc +244 -0
  306. data/ext/v8/upstream/v8/src/mips/jump-target-mips.cc +80 -0
  307. data/ext/v8/upstream/v8/src/mips/lithium-codegen-mips.h +65 -0
  308. data/ext/v8/upstream/v8/src/mips/lithium-mips.h +304 -0
  309. data/ext/v8/upstream/v8/src/mips/macro-assembler-mips.cc +3327 -0
  310. data/ext/v8/upstream/v8/src/mips/macro-assembler-mips.h +1058 -0
  311. data/ext/v8/upstream/v8/src/mips/regexp-macro-assembler-mips.cc +478 -0
  312. data/ext/v8/upstream/v8/src/mips/regexp-macro-assembler-mips.h +250 -0
  313. data/ext/v8/upstream/v8/src/mips/register-allocator-mips-inl.h +134 -0
  314. data/ext/v8/upstream/{3.1.8 → v8}/src/mips/register-allocator-mips.cc +0 -0
  315. data/ext/v8/upstream/v8/src/mips/register-allocator-mips.h +47 -0
  316. data/ext/v8/upstream/v8/src/mips/simulator-mips.cc +2438 -0
  317. data/ext/v8/upstream/v8/src/mips/simulator-mips.h +394 -0
  318. data/ext/v8/upstream/v8/src/mips/stub-cache-mips.cc +601 -0
  319. data/ext/v8/upstream/v8/src/mips/virtual-frame-mips-inl.h +58 -0
  320. data/ext/v8/upstream/v8/src/mips/virtual-frame-mips.cc +307 -0
  321. data/ext/v8/upstream/v8/src/mips/virtual-frame-mips.h +530 -0
  322. data/ext/v8/upstream/v8/src/mirror-debugger.js +2381 -0
  323. data/ext/v8/upstream/v8/src/mksnapshot.cc +256 -0
  324. data/ext/v8/upstream/{3.1.8 → v8}/src/natives.h +0 -0
  325. data/ext/v8/upstream/v8/src/objects-debug.cc +722 -0
  326. data/ext/v8/upstream/v8/src/objects-inl.h +4166 -0
  327. data/ext/v8/upstream/v8/src/objects-printer.cc +801 -0
  328. data/ext/v8/upstream/v8/src/objects-visiting.cc +142 -0
  329. data/ext/v8/upstream/v8/src/objects-visiting.h +422 -0
  330. data/ext/v8/upstream/v8/src/objects.cc +10296 -0
  331. data/ext/v8/upstream/v8/src/objects.h +6662 -0
  332. data/ext/v8/upstream/v8/src/parser.cc +5168 -0
  333. data/ext/v8/upstream/v8/src/parser.h +823 -0
  334. data/ext/v8/upstream/v8/src/platform-cygwin.cc +811 -0
  335. data/ext/v8/upstream/v8/src/platform-freebsd.cc +854 -0
  336. data/ext/v8/upstream/v8/src/platform-linux.cc +1120 -0
  337. data/ext/v8/upstream/v8/src/platform-macos.cc +865 -0
  338. data/ext/v8/upstream/v8/src/platform-nullos.cc +504 -0
  339. data/ext/v8/upstream/v8/src/platform-openbsd.cc +672 -0
  340. data/ext/v8/upstream/v8/src/platform-posix.cc +424 -0
  341. data/ext/v8/upstream/v8/src/platform-solaris.cc +796 -0
  342. data/ext/v8/upstream/v8/src/platform-tls-mac.h +62 -0
  343. data/ext/v8/upstream/v8/src/platform-tls-win32.h +62 -0
  344. data/ext/v8/upstream/v8/src/platform-tls.h +50 -0
  345. data/ext/v8/upstream/v8/src/platform-win32.cc +2072 -0
  346. data/ext/v8/upstream/v8/src/platform.h +693 -0
  347. data/ext/v8/upstream/v8/src/preparse-data.cc +185 -0
  348. data/ext/v8/upstream/{3.1.8 → v8}/src/preparse-data.h +0 -0
  349. data/ext/v8/upstream/v8/src/preparser-api.cc +219 -0
  350. data/ext/v8/upstream/v8/src/preparser.cc +1205 -0
  351. data/ext/v8/upstream/{3.1.8 → v8}/src/preparser.h +0 -0
  352. data/ext/v8/upstream/v8/src/prettyprinter.cc +1530 -0
  353. data/ext/v8/upstream/v8/src/prettyprinter.h +223 -0
  354. data/ext/v8/upstream/{3.1.8 → v8}/src/profile-generator-inl.h +0 -0
  355. data/ext/v8/upstream/v8/src/profile-generator.cc +3095 -0
  356. data/ext/v8/upstream/v8/src/profile-generator.h +1125 -0
  357. data/ext/v8/upstream/v8/src/property.cc +102 -0
  358. data/ext/v8/upstream/v8/src/property.h +348 -0
  359. data/ext/v8/upstream/{3.1.8 → v8}/src/regexp-macro-assembler-irregexp-inl.h +0 -0
  360. data/ext/v8/upstream/v8/src/regexp-macro-assembler-irregexp.cc +470 -0
  361. data/ext/v8/upstream/{3.1.8 → v8}/src/regexp-macro-assembler-irregexp.h +0 -0
  362. data/ext/v8/upstream/{3.1.8 → v8}/src/regexp-macro-assembler-tracer.cc +0 -0
  363. data/ext/v8/upstream/{3.1.8 → v8}/src/regexp-macro-assembler-tracer.h +0 -0
  364. data/ext/v8/upstream/v8/src/regexp-macro-assembler.cc +266 -0
  365. data/ext/v8/upstream/v8/src/regexp-macro-assembler.h +236 -0
  366. data/ext/v8/upstream/v8/src/regexp-stack.cc +111 -0
  367. data/ext/v8/upstream/v8/src/regexp-stack.h +147 -0
  368. data/ext/v8/upstream/v8/src/regexp.js +483 -0
  369. data/ext/v8/upstream/v8/src/register-allocator-inl.h +141 -0
  370. data/ext/v8/upstream/v8/src/register-allocator.cc +98 -0
  371. data/ext/v8/upstream/v8/src/register-allocator.h +310 -0
  372. data/ext/v8/upstream/v8/src/rewriter.cc +1024 -0
  373. data/ext/v8/upstream/{3.1.8 → v8}/src/rewriter.h +0 -0
  374. data/ext/v8/upstream/v8/src/runtime-profiler.cc +478 -0
  375. data/ext/v8/upstream/v8/src/runtime-profiler.h +192 -0
  376. data/ext/v8/upstream/v8/src/runtime.cc +11949 -0
  377. data/ext/v8/upstream/v8/src/runtime.h +643 -0
  378. data/ext/v8/upstream/{3.1.8 → v8}/src/runtime.js +0 -0
  379. data/ext/v8/upstream/v8/src/safepoint-table.cc +256 -0
  380. data/ext/v8/upstream/v8/src/safepoint-table.h +269 -0
  381. data/ext/v8/upstream/v8/src/scanner-base.cc +964 -0
  382. data/ext/v8/upstream/v8/src/scanner-base.h +664 -0
  383. data/ext/v8/upstream/v8/src/scanner.cc +584 -0
  384. data/ext/v8/upstream/v8/src/scanner.h +196 -0
  385. data/ext/v8/upstream/v8/src/scopeinfo.cc +631 -0
  386. data/ext/v8/upstream/v8/src/scopeinfo.h +249 -0
  387. data/ext/v8/upstream/v8/src/scopes.cc +1093 -0
  388. data/ext/v8/upstream/v8/src/scopes.h +508 -0
  389. data/ext/v8/upstream/v8/src/serialize.cc +1574 -0
  390. data/ext/v8/upstream/v8/src/serialize.h +589 -0
  391. data/ext/v8/upstream/{3.1.8 → v8}/src/shell.h +0 -0
  392. data/ext/v8/upstream/{3.1.8 → v8}/src/simulator.h +0 -0
  393. data/ext/v8/upstream/v8/src/small-pointer-list.h +163 -0
  394. data/ext/v8/upstream/{3.1.8 → v8}/src/smart-pointer.h +0 -0
  395. data/ext/v8/upstream/v8/src/snapshot-common.cc +82 -0
  396. data/ext/v8/upstream/{3.1.8 → v8}/src/snapshot-empty.cc +0 -0
  397. data/ext/v8/upstream/v8/src/snapshot.h +73 -0
  398. data/ext/v8/upstream/v8/src/spaces-inl.h +529 -0
  399. data/ext/v8/upstream/v8/src/spaces.cc +3147 -0
  400. data/ext/v8/upstream/v8/src/spaces.h +2368 -0
  401. data/ext/v8/upstream/{3.1.8 → v8}/src/splay-tree-inl.h +0 -0
  402. data/ext/v8/upstream/{3.1.8 → v8}/src/splay-tree.h +0 -0
  403. data/ext/v8/upstream/v8/src/string-search.cc +41 -0
  404. data/ext/v8/upstream/v8/src/string-search.h +568 -0
  405. data/ext/v8/upstream/v8/src/string-stream.cc +592 -0
  406. data/ext/v8/upstream/{3.1.8 → v8}/src/string-stream.h +0 -0
  407. data/ext/v8/upstream/v8/src/string.js +915 -0
  408. data/ext/v8/upstream/{3.1.8 → v8}/src/strtod.cc +0 -0
  409. data/ext/v8/upstream/{3.1.8 → v8}/src/strtod.h +0 -0
  410. data/ext/v8/upstream/v8/src/stub-cache.cc +1940 -0
  411. data/ext/v8/upstream/v8/src/stub-cache.h +866 -0
  412. data/ext/v8/upstream/{3.1.8 → v8}/src/third_party/valgrind/valgrind.h +0 -0
  413. data/ext/v8/upstream/v8/src/token.cc +63 -0
  414. data/ext/v8/upstream/v8/src/token.h +288 -0
  415. data/ext/v8/upstream/v8/src/top.cc +983 -0
  416. data/ext/v8/upstream/v8/src/type-info.cc +472 -0
  417. data/ext/v8/upstream/v8/src/type-info.h +290 -0
  418. data/ext/v8/upstream/{3.1.8 → v8}/src/unbound-queue-inl.h +0 -0
  419. data/ext/v8/upstream/{3.1.8 → v8}/src/unbound-queue.h +0 -0
  420. data/ext/v8/upstream/{3.1.8 → v8}/src/unicode-inl.h +0 -0
  421. data/ext/v8/upstream/v8/src/unicode.cc +1624 -0
  422. data/ext/v8/upstream/v8/src/unicode.h +280 -0
  423. data/ext/v8/upstream/{3.1.8 → v8}/src/uri.js +0 -0
  424. data/ext/v8/upstream/{3.1.8 → v8}/src/utils.cc +0 -0
  425. data/ext/v8/upstream/v8/src/utils.h +796 -0
  426. data/ext/v8/upstream/v8/src/v8-counters.cc +62 -0
  427. data/ext/v8/upstream/v8/src/v8-counters.h +311 -0
  428. data/ext/v8/upstream/v8/src/v8.cc +215 -0
  429. data/ext/v8/upstream/v8/src/v8.h +130 -0
  430. data/ext/v8/upstream/{3.1.8 → v8}/src/v8checks.h +0 -0
  431. data/ext/v8/upstream/{3.1.8 → v8}/src/v8dll-main.cc +0 -0
  432. data/ext/v8/upstream/v8/src/v8globals.h +486 -0
  433. data/ext/v8/upstream/{3.1.8/src/memory.h → v8/src/v8memory.h} +0 -0
  434. data/ext/v8/upstream/v8/src/v8natives.js +1293 -0
  435. data/ext/v8/upstream/{3.1.8 → v8}/src/v8preparserdll-main.cc +0 -0
  436. data/ext/v8/upstream/v8/src/v8threads.cc +453 -0
  437. data/ext/v8/upstream/v8/src/v8threads.h +164 -0
  438. data/ext/v8/upstream/v8/src/v8utils.h +317 -0
  439. data/ext/v8/upstream/{3.1.8 → v8}/src/variables.cc +0 -0
  440. data/ext/v8/upstream/v8/src/variables.h +212 -0
  441. data/ext/v8/upstream/v8/src/version.cc +116 -0
  442. data/ext/v8/upstream/v8/src/version.h +68 -0
  443. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame-heavy-inl.h +0 -0
  444. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame-heavy.cc +0 -0
  445. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame-inl.h +0 -0
  446. data/ext/v8/upstream/v8/src/virtual-frame-light-inl.h +171 -0
  447. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame-light.cc +0 -0
  448. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame.cc +0 -0
  449. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame.h +0 -0
  450. data/ext/v8/upstream/v8/src/vm-state-inl.h +138 -0
  451. data/ext/v8/upstream/v8/src/vm-state.h +70 -0
  452. data/ext/v8/upstream/v8/src/win32-headers.h +96 -0
  453. data/ext/v8/upstream/v8/src/x64/assembler-x64-inl.h +456 -0
  454. data/ext/v8/upstream/v8/src/x64/assembler-x64.cc +2954 -0
  455. data/ext/v8/upstream/v8/src/x64/assembler-x64.h +1630 -0
  456. data/ext/v8/upstream/v8/src/x64/builtins-x64.cc +1493 -0
  457. data/ext/v8/upstream/v8/src/x64/code-stubs-x64.cc +5132 -0
  458. data/ext/v8/upstream/v8/src/x64/code-stubs-x64.h +477 -0
  459. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/codegen-x64-inl.h +0 -0
  460. data/ext/v8/upstream/v8/src/x64/codegen-x64.cc +8843 -0
  461. data/ext/v8/upstream/v8/src/x64/codegen-x64.h +753 -0
  462. data/ext/v8/upstream/v8/src/x64/cpu-x64.cc +88 -0
  463. data/ext/v8/upstream/v8/src/x64/debug-x64.cc +318 -0
  464. data/ext/v8/upstream/v8/src/x64/deoptimizer-x64.cc +815 -0
  465. data/ext/v8/upstream/v8/src/x64/disasm-x64.cc +1752 -0
  466. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/frames-x64.cc +0 -0
  467. data/ext/v8/upstream/v8/src/x64/frames-x64.h +130 -0
  468. data/ext/v8/upstream/v8/src/x64/full-codegen-x64.cc +4339 -0
  469. data/ext/v8/upstream/v8/src/x64/ic-x64.cc +1752 -0
  470. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/jump-target-x64.cc +0 -0
  471. data/ext/v8/upstream/v8/src/x64/lithium-codegen-x64.cc +3970 -0
  472. data/ext/v8/upstream/v8/src/x64/lithium-codegen-x64.h +318 -0
  473. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/lithium-gap-resolver-x64.cc +0 -0
  474. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/lithium-gap-resolver-x64.h +0 -0
  475. data/ext/v8/upstream/v8/src/x64/lithium-x64.cc +2115 -0
  476. data/ext/v8/upstream/v8/src/x64/lithium-x64.h +2161 -0
  477. data/ext/v8/upstream/v8/src/x64/macro-assembler-x64.cc +2911 -0
  478. data/ext/v8/upstream/v8/src/x64/macro-assembler-x64.h +1984 -0
  479. data/ext/v8/upstream/v8/src/x64/regexp-macro-assembler-x64.cc +1398 -0
  480. data/ext/v8/upstream/v8/src/x64/regexp-macro-assembler-x64.h +282 -0
  481. data/ext/v8/upstream/v8/src/x64/register-allocator-x64-inl.h +87 -0
  482. data/ext/v8/upstream/v8/src/x64/register-allocator-x64.cc +95 -0
  483. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/register-allocator-x64.h +0 -0
  484. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/simulator-x64.cc +0 -0
  485. data/ext/v8/upstream/v8/src/x64/simulator-x64.h +71 -0
  486. data/ext/v8/upstream/v8/src/x64/stub-cache-x64.cc +3460 -0
  487. data/ext/v8/upstream/v8/src/x64/virtual-frame-x64.cc +1296 -0
  488. data/ext/v8/upstream/v8/src/x64/virtual-frame-x64.h +597 -0
  489. data/ext/v8/upstream/v8/src/zone-inl.h +129 -0
  490. data/ext/v8/upstream/v8/src/zone.cc +196 -0
  491. data/ext/v8/upstream/v8/src/zone.h +236 -0
  492. data/ext/v8/upstream/{3.1.8 → v8}/tools/codemap.js +0 -0
  493. data/ext/v8/upstream/{3.1.8 → v8}/tools/consarray.js +0 -0
  494. data/ext/v8/upstream/{3.1.8 → v8}/tools/csvparser.js +0 -0
  495. data/ext/v8/upstream/{3.1.8 → v8}/tools/disasm.py +0 -0
  496. data/ext/v8/upstream/v8/tools/freebsd-tick-processor +10 -0
  497. data/ext/v8/upstream/{3.1.8 → v8}/tools/gc-nvp-trace-processor.py +0 -0
  498. data/ext/v8/upstream/{3.1.8 → v8}/tools/generate-ten-powers.scm +0 -0
  499. data/ext/v8/upstream/{3.1.8 → v8}/tools/grokdump.py +0 -0
  500. data/ext/v8/upstream/v8/tools/gyp/v8.gyp +844 -0
  501. data/ext/v8/upstream/{3.1.8 → v8}/tools/js2c.py +0 -0
  502. data/ext/v8/upstream/{3.1.8 → v8}/tools/jsmin.py +0 -0
  503. data/ext/v8/upstream/v8/tools/linux-tick-processor +35 -0
  504. data/ext/v8/upstream/{3.1.8 → v8}/tools/ll_prof.py +0 -0
  505. data/ext/v8/upstream/{3.1.8 → v8}/tools/logreader.js +0 -0
  506. data/ext/v8/upstream/{3.1.8 → v8}/tools/mac-nm +0 -0
  507. data/ext/v8/upstream/{3.1.8 → v8}/tools/mac-tick-processor +0 -0
  508. data/ext/v8/upstream/{3.1.8 → v8}/tools/oom_dump/README +0 -0
  509. data/ext/v8/upstream/{3.1.8 → v8}/tools/oom_dump/SConstruct +0 -0
  510. data/ext/v8/upstream/{3.1.8 → v8}/tools/oom_dump/oom_dump.cc +0 -0
  511. data/ext/v8/upstream/{3.1.8 → v8}/tools/presubmit.py +0 -0
  512. data/ext/v8/upstream/{3.1.8 → v8}/tools/process-heap-prof.py +0 -0
  513. data/ext/v8/upstream/{3.1.8 → v8}/tools/profile.js +0 -0
  514. data/ext/v8/upstream/{3.1.8 → v8}/tools/profile_view.js +0 -0
  515. data/ext/v8/upstream/{3.1.8 → v8}/tools/run-valgrind.py +0 -0
  516. data/ext/v8/upstream/{3.1.8 → v8}/tools/splaytree.js +0 -0
  517. data/ext/v8/upstream/{3.1.8 → v8}/tools/stats-viewer.py +0 -0
  518. data/ext/v8/upstream/v8/tools/test.py +1490 -0
  519. data/ext/v8/upstream/{3.1.8 → v8}/tools/tickprocessor-driver.js +0 -0
  520. data/ext/v8/upstream/v8/tools/tickprocessor.js +877 -0
  521. data/ext/v8/upstream/{3.1.8 → v8}/tools/utils.py +0 -0
  522. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/README.txt +0 -0
  523. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/arm.vsprops +0 -0
  524. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/common.vsprops +0 -0
  525. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/d8.vcproj +0 -0
  526. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/d8_arm.vcproj +0 -0
  527. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/d8_x64.vcproj +0 -0
  528. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/d8js2c.cmd +0 -0
  529. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/debug.vsprops +0 -0
  530. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/ia32.vsprops +0 -0
  531. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/js2c.cmd +0 -0
  532. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/release.vsprops +0 -0
  533. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8.sln +0 -0
  534. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8.vcproj +0 -0
  535. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_arm.sln +0 -0
  536. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_arm.vcproj +0 -0
  537. data/ext/v8/upstream/v8/tools/visual_studio/v8_base.vcproj +1308 -0
  538. data/ext/v8/upstream/v8/tools/visual_studio/v8_base_arm.vcproj +1238 -0
  539. data/ext/v8/upstream/v8/tools/visual_studio/v8_base_x64.vcproj +1300 -0
  540. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_cctest.vcproj +0 -0
  541. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_cctest_arm.vcproj +0 -0
  542. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_cctest_x64.vcproj +0 -0
  543. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_mksnapshot.vcproj +0 -0
  544. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_mksnapshot_x64.vcproj +0 -0
  545. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_process_sample.vcproj +0 -0
  546. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_process_sample_arm.vcproj +0 -0
  547. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_process_sample_x64.vcproj +0 -0
  548. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_shell_sample.vcproj +0 -0
  549. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_shell_sample_arm.vcproj +0 -0
  550. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_shell_sample_x64.vcproj +0 -0
  551. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_snapshot.vcproj +0 -0
  552. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_snapshot_cc.vcproj +0 -0
  553. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_snapshot_cc_x64.vcproj +0 -0
  554. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_snapshot_x64.vcproj +0 -0
  555. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_x64.sln +0 -0
  556. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_x64.vcproj +0 -0
  557. data/ext/v8/upstream/v8/tools/visual_studio/x64.vsprops +18 -0
  558. data/ext/v8/upstream/{3.1.8 → v8}/tools/windows-tick-processor.bat +0 -0
  559. data/ext/v8/v8_callbacks.cpp +52 -92
  560. data/ext/v8/v8_date.cpp +2 -3
  561. data/ext/v8/v8_object.cpp +4 -0
  562. data/ext/v8/v8_template.cpp +2 -2
  563. data/ext/v8/v8_try_catch.cpp +8 -38
  564. data/lib/v8/version.rb +1 -1
  565. data/spec/ext/ext_spec_helper.rb +2 -20
  566. data/spec/ext/object_spec.rb +0 -12
  567. data/spec/ext/try_catch_spec.rb +29 -1
  568. data/spec/spec_helper.rb +1 -0
  569. data/spec/v8/portal/proxies_spec.rb +1 -84
  570. data/specmem/handle_memspec.rb +41 -0
  571. data/specmem/object_memspec.rb +16 -0
  572. data/specmem/proxies_memspec.rb +86 -0
  573. data/specmem/spec_helper.rb +24 -0
  574. data/therubyracer.gemspec +7 -2
  575. metadata +564 -541
  576. data/ext/v8/upstream/3.1.8/.gitignore +0 -31
  577. data/ext/v8/upstream/3.1.8/AUTHORS +0 -40
  578. data/ext/v8/upstream/3.1.8/ChangeLog +0 -2566
  579. data/ext/v8/upstream/3.1.8/SConstruct +0 -1192
  580. data/ext/v8/upstream/3.1.8/include/v8-debug.h +0 -384
  581. data/ext/v8/upstream/3.1.8/include/v8-preparser.h +0 -116
  582. data/ext/v8/upstream/3.1.8/include/v8-profiler.h +0 -426
  583. data/ext/v8/upstream/3.1.8/include/v8-testing.h +0 -99
  584. data/ext/v8/upstream/3.1.8/include/v8.h +0 -3846
  585. data/ext/v8/upstream/3.1.8/preparser/preparser-process.cc +0 -206
  586. data/ext/v8/upstream/3.1.8/src/SConscript +0 -356
  587. data/ext/v8/upstream/3.1.8/src/accessors.cc +0 -907
  588. data/ext/v8/upstream/3.1.8/src/allocation.cc +0 -204
  589. data/ext/v8/upstream/3.1.8/src/allocation.h +0 -176
  590. data/ext/v8/upstream/3.1.8/src/api.cc +0 -5191
  591. data/ext/v8/upstream/3.1.8/src/api.h +0 -508
  592. data/ext/v8/upstream/3.1.8/src/apiutils.h +0 -80
  593. data/ext/v8/upstream/3.1.8/src/arguments.h +0 -105
  594. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm-inl.h +0 -352
  595. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.cc +0 -2756
  596. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.h +0 -1294
  597. data/ext/v8/upstream/3.1.8/src/arm/builtins-arm.cc +0 -1628
  598. data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.cc +0 -6783
  599. data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.h +0 -657
  600. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.cc +0 -7403
  601. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.h +0 -595
  602. data/ext/v8/upstream/3.1.8/src/arm/constants-arm.h +0 -769
  603. data/ext/v8/upstream/3.1.8/src/arm/cpu-arm.cc +0 -147
  604. data/ext/v8/upstream/3.1.8/src/arm/debug-arm.cc +0 -315
  605. data/ext/v8/upstream/3.1.8/src/arm/deoptimizer-arm.cc +0 -700
  606. data/ext/v8/upstream/3.1.8/src/arm/disasm-arm.cc +0 -1439
  607. data/ext/v8/upstream/3.1.8/src/arm/frames-arm.h +0 -168
  608. data/ext/v8/upstream/3.1.8/src/arm/full-codegen-arm.cc +0 -4230
  609. data/ext/v8/upstream/3.1.8/src/arm/ic-arm.cc +0 -1799
  610. data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.cc +0 -2041
  611. data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.h +0 -2046
  612. data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.cc +0 -3822
  613. data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.h +0 -312
  614. data/ext/v8/upstream/3.1.8/src/arm/lithium-gap-resolver-arm.cc +0 -303
  615. data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.cc +0 -2701
  616. data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.h +0 -1015
  617. data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.cc +0 -1280
  618. data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.h +0 -252
  619. data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.cc +0 -3165
  620. data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.h +0 -402
  621. data/ext/v8/upstream/3.1.8/src/arm/stub-cache-arm.cc +0 -4077
  622. data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.cc +0 -843
  623. data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.h +0 -520
  624. data/ext/v8/upstream/3.1.8/src/array.js +0 -1231
  625. data/ext/v8/upstream/3.1.8/src/assembler.cc +0 -973
  626. data/ext/v8/upstream/3.1.8/src/assembler.h +0 -787
  627. data/ext/v8/upstream/3.1.8/src/ast-inl.h +0 -107
  628. data/ext/v8/upstream/3.1.8/src/ast.cc +0 -1067
  629. data/ext/v8/upstream/3.1.8/src/ast.h +0 -2177
  630. data/ext/v8/upstream/3.1.8/src/atomicops.h +0 -165
  631. data/ext/v8/upstream/3.1.8/src/bootstrapper.cc +0 -1888
  632. data/ext/v8/upstream/3.1.8/src/bootstrapper.h +0 -118
  633. data/ext/v8/upstream/3.1.8/src/builtins.cc +0 -1586
  634. data/ext/v8/upstream/3.1.8/src/builtins.h +0 -339
  635. data/ext/v8/upstream/3.1.8/src/checks.cc +0 -110
  636. data/ext/v8/upstream/3.1.8/src/checks.h +0 -292
  637. data/ext/v8/upstream/3.1.8/src/code-stubs.cc +0 -230
  638. data/ext/v8/upstream/3.1.8/src/code-stubs.h +0 -950
  639. data/ext/v8/upstream/3.1.8/src/codegen-inl.h +0 -64
  640. data/ext/v8/upstream/3.1.8/src/codegen.cc +0 -495
  641. data/ext/v8/upstream/3.1.8/src/codegen.h +0 -245
  642. data/ext/v8/upstream/3.1.8/src/compilation-cache.cc +0 -654
  643. data/ext/v8/upstream/3.1.8/src/compilation-cache.h +0 -112
  644. data/ext/v8/upstream/3.1.8/src/compiler.cc +0 -806
  645. data/ext/v8/upstream/3.1.8/src/compiler.h +0 -290
  646. data/ext/v8/upstream/3.1.8/src/contexts.cc +0 -320
  647. data/ext/v8/upstream/3.1.8/src/contexts.h +0 -376
  648. data/ext/v8/upstream/3.1.8/src/conversions.cc +0 -1069
  649. data/ext/v8/upstream/3.1.8/src/counters.cc +0 -78
  650. data/ext/v8/upstream/3.1.8/src/counters.h +0 -242
  651. data/ext/v8/upstream/3.1.8/src/cpu-profiler-inl.h +0 -100
  652. data/ext/v8/upstream/3.1.8/src/cpu-profiler.cc +0 -554
  653. data/ext/v8/upstream/3.1.8/src/cpu-profiler.h +0 -291
  654. data/ext/v8/upstream/3.1.8/src/cpu.h +0 -65
  655. data/ext/v8/upstream/3.1.8/src/d8-debug.cc +0 -367
  656. data/ext/v8/upstream/3.1.8/src/d8-debug.h +0 -157
  657. data/ext/v8/upstream/3.1.8/src/d8-posix.cc +0 -693
  658. data/ext/v8/upstream/3.1.8/src/d8.cc +0 -792
  659. data/ext/v8/upstream/3.1.8/src/d8.gyp +0 -85
  660. data/ext/v8/upstream/3.1.8/src/data-flow.h +0 -379
  661. data/ext/v8/upstream/3.1.8/src/dateparser.h +0 -263
  662. data/ext/v8/upstream/3.1.8/src/debug-agent.cc +0 -446
  663. data/ext/v8/upstream/3.1.8/src/debug-agent.h +0 -131
  664. data/ext/v8/upstream/3.1.8/src/debug.cc +0 -3085
  665. data/ext/v8/upstream/3.1.8/src/debug.h +0 -1025
  666. data/ext/v8/upstream/3.1.8/src/deoptimizer.cc +0 -1185
  667. data/ext/v8/upstream/3.1.8/src/deoptimizer.h +0 -529
  668. data/ext/v8/upstream/3.1.8/src/disasm.h +0 -77
  669. data/ext/v8/upstream/3.1.8/src/disassembler.cc +0 -338
  670. data/ext/v8/upstream/3.1.8/src/execution.cc +0 -735
  671. data/ext/v8/upstream/3.1.8/src/execution.h +0 -322
  672. data/ext/v8/upstream/3.1.8/src/extensions/experimental/experimental.gyp +0 -53
  673. data/ext/v8/upstream/3.1.8/src/extensions/experimental/i18n-extension.cc +0 -264
  674. data/ext/v8/upstream/3.1.8/src/extensions/externalize-string-extension.cc +0 -141
  675. data/ext/v8/upstream/3.1.8/src/extensions/gc-extension.cc +0 -58
  676. data/ext/v8/upstream/3.1.8/src/factory.cc +0 -1087
  677. data/ext/v8/upstream/3.1.8/src/factory.h +0 -432
  678. data/ext/v8/upstream/3.1.8/src/flag-definitions.h +0 -552
  679. data/ext/v8/upstream/3.1.8/src/frame-element.cc +0 -42
  680. data/ext/v8/upstream/3.1.8/src/frame-element.h +0 -277
  681. data/ext/v8/upstream/3.1.8/src/frames-inl.h +0 -210
  682. data/ext/v8/upstream/3.1.8/src/frames.cc +0 -1232
  683. data/ext/v8/upstream/3.1.8/src/frames.h +0 -826
  684. data/ext/v8/upstream/3.1.8/src/full-codegen.cc +0 -1382
  685. data/ext/v8/upstream/3.1.8/src/full-codegen.h +0 -751
  686. data/ext/v8/upstream/3.1.8/src/func-name-inferrer.cc +0 -90
  687. data/ext/v8/upstream/3.1.8/src/func-name-inferrer.h +0 -111
  688. data/ext/v8/upstream/3.1.8/src/gdb-jit.cc +0 -1547
  689. data/ext/v8/upstream/3.1.8/src/global-handles.cc +0 -534
  690. data/ext/v8/upstream/3.1.8/src/global-handles.h +0 -181
  691. data/ext/v8/upstream/3.1.8/src/globals.h +0 -325
  692. data/ext/v8/upstream/3.1.8/src/handles-inl.h +0 -80
  693. data/ext/v8/upstream/3.1.8/src/handles.cc +0 -910
  694. data/ext/v8/upstream/3.1.8/src/handles.h +0 -424
  695. data/ext/v8/upstream/3.1.8/src/hashmap.h +0 -121
  696. data/ext/v8/upstream/3.1.8/src/heap-inl.h +0 -587
  697. data/ext/v8/upstream/3.1.8/src/heap-profiler.cc +0 -1128
  698. data/ext/v8/upstream/3.1.8/src/heap-profiler.h +0 -381
  699. data/ext/v8/upstream/3.1.8/src/heap.cc +0 -5610
  700. data/ext/v8/upstream/3.1.8/src/heap.h +0 -2218
  701. data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.cc +0 -1490
  702. data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.h +0 -3493
  703. data/ext/v8/upstream/3.1.8/src/hydrogen.cc +0 -6056
  704. data/ext/v8/upstream/3.1.8/src/hydrogen.h +0 -1091
  705. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32-inl.h +0 -429
  706. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.cc +0 -2800
  707. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.h +0 -1093
  708. data/ext/v8/upstream/3.1.8/src/ia32/builtins-ia32.cc +0 -1590
  709. data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.cc +0 -6624
  710. data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.h +0 -536
  711. data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.cc +0 -10354
  712. data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.h +0 -798
  713. data/ext/v8/upstream/3.1.8/src/ia32/cpu-ia32.cc +0 -87
  714. data/ext/v8/upstream/3.1.8/src/ia32/debug-ia32.cc +0 -309
  715. data/ext/v8/upstream/3.1.8/src/ia32/deoptimizer-ia32.cc +0 -664
  716. data/ext/v8/upstream/3.1.8/src/ia32/disasm-ia32.cc +0 -1597
  717. data/ext/v8/upstream/3.1.8/src/ia32/frames-ia32.h +0 -140
  718. data/ext/v8/upstream/3.1.8/src/ia32/full-codegen-ia32.cc +0 -4278
  719. data/ext/v8/upstream/3.1.8/src/ia32/ic-ia32.cc +0 -1786
  720. data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.cc +0 -3880
  721. data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.h +0 -309
  722. data/ext/v8/upstream/3.1.8/src/ia32/lithium-gap-resolver-ia32.cc +0 -460
  723. data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.cc +0 -2095
  724. data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.h +0 -2127
  725. data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.cc +0 -2031
  726. data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.h +0 -798
  727. data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.cc +0 -1253
  728. data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.h +0 -215
  729. data/ext/v8/upstream/3.1.8/src/ia32/register-allocator-ia32.cc +0 -157
  730. data/ext/v8/upstream/3.1.8/src/ia32/simulator-ia32.h +0 -72
  731. data/ext/v8/upstream/3.1.8/src/ia32/stub-cache-ia32.cc +0 -3732
  732. data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.cc +0 -1360
  733. data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.h +0 -646
  734. data/ext/v8/upstream/3.1.8/src/ic-inl.h +0 -129
  735. data/ext/v8/upstream/3.1.8/src/ic.cc +0 -2333
  736. data/ext/v8/upstream/3.1.8/src/ic.h +0 -639
  737. data/ext/v8/upstream/3.1.8/src/interpreter-irregexp.cc +0 -655
  738. data/ext/v8/upstream/3.1.8/src/interpreter-irregexp.h +0 -48
  739. data/ext/v8/upstream/3.1.8/src/json.js +0 -342
  740. data/ext/v8/upstream/3.1.8/src/jsregexp.cc +0 -5340
  741. data/ext/v8/upstream/3.1.8/src/jsregexp.h +0 -1484
  742. data/ext/v8/upstream/3.1.8/src/jump-target-heavy.cc +0 -430
  743. data/ext/v8/upstream/3.1.8/src/jump-target-heavy.h +0 -244
  744. data/ext/v8/upstream/3.1.8/src/jump-target-inl.h +0 -48
  745. data/ext/v8/upstream/3.1.8/src/jump-target-light.cc +0 -111
  746. data/ext/v8/upstream/3.1.8/src/lithium-allocator-inl.h +0 -140
  747. data/ext/v8/upstream/3.1.8/src/lithium-allocator.cc +0 -2093
  748. data/ext/v8/upstream/3.1.8/src/lithium-allocator.h +0 -644
  749. data/ext/v8/upstream/3.1.8/src/lithium.cc +0 -168
  750. data/ext/v8/upstream/3.1.8/src/liveedit.cc +0 -1650
  751. data/ext/v8/upstream/3.1.8/src/liveedit.h +0 -174
  752. data/ext/v8/upstream/3.1.8/src/liveobjectlist.cc +0 -2527
  753. data/ext/v8/upstream/3.1.8/src/liveobjectlist.h +0 -322
  754. data/ext/v8/upstream/3.1.8/src/log-utils.cc +0 -336
  755. data/ext/v8/upstream/3.1.8/src/log-utils.h +0 -232
  756. data/ext/v8/upstream/3.1.8/src/log.cc +0 -1608
  757. data/ext/v8/upstream/3.1.8/src/log.h +0 -379
  758. data/ext/v8/upstream/3.1.8/src/mark-compact.cc +0 -2957
  759. data/ext/v8/upstream/3.1.8/src/mark-compact.h +0 -433
  760. data/ext/v8/upstream/3.1.8/src/messages.cc +0 -164
  761. data/ext/v8/upstream/3.1.8/src/messages.js +0 -1071
  762. data/ext/v8/upstream/3.1.8/src/mips/assembler-mips-inl.h +0 -215
  763. data/ext/v8/upstream/3.1.8/src/mips/assembler-mips.cc +0 -1219
  764. data/ext/v8/upstream/3.1.8/src/mips/assembler-mips.h +0 -667
  765. data/ext/v8/upstream/3.1.8/src/mips/builtins-mips.cc +0 -205
  766. data/ext/v8/upstream/3.1.8/src/mips/codegen-mips-inl.h +0 -70
  767. data/ext/v8/upstream/3.1.8/src/mips/codegen-mips.cc +0 -1437
  768. data/ext/v8/upstream/3.1.8/src/mips/codegen-mips.h +0 -431
  769. data/ext/v8/upstream/3.1.8/src/mips/constants-mips.cc +0 -328
  770. data/ext/v8/upstream/3.1.8/src/mips/constants-mips.h +0 -525
  771. data/ext/v8/upstream/3.1.8/src/mips/cpu-mips.cc +0 -73
  772. data/ext/v8/upstream/3.1.8/src/mips/debug-mips.cc +0 -127
  773. data/ext/v8/upstream/3.1.8/src/mips/disasm-mips.cc +0 -787
  774. data/ext/v8/upstream/3.1.8/src/mips/fast-codegen-mips.cc +0 -77
  775. data/ext/v8/upstream/3.1.8/src/mips/frames-mips.cc +0 -96
  776. data/ext/v8/upstream/3.1.8/src/mips/frames-mips.h +0 -164
  777. data/ext/v8/upstream/3.1.8/src/mips/full-codegen-mips.cc +0 -277
  778. data/ext/v8/upstream/3.1.8/src/mips/ic-mips.cc +0 -208
  779. data/ext/v8/upstream/3.1.8/src/mips/jump-target-mips.cc +0 -175
  780. data/ext/v8/upstream/3.1.8/src/mips/macro-assembler-mips.cc +0 -1326
  781. data/ext/v8/upstream/3.1.8/src/mips/macro-assembler-mips.h +0 -461
  782. data/ext/v8/upstream/3.1.8/src/mips/register-allocator-mips-inl.h +0 -137
  783. data/ext/v8/upstream/3.1.8/src/mips/register-allocator-mips.h +0 -46
  784. data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.cc +0 -1650
  785. data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.h +0 -311
  786. data/ext/v8/upstream/3.1.8/src/mips/stub-cache-mips.cc +0 -418
  787. data/ext/v8/upstream/3.1.8/src/mips/virtual-frame-mips.cc +0 -319
  788. data/ext/v8/upstream/3.1.8/src/mips/virtual-frame-mips.h +0 -548
  789. data/ext/v8/upstream/3.1.8/src/mirror-debugger.js +0 -2380
  790. data/ext/v8/upstream/3.1.8/src/mksnapshot.cc +0 -256
  791. data/ext/v8/upstream/3.1.8/src/objects-debug.cc +0 -722
  792. data/ext/v8/upstream/3.1.8/src/objects-inl.h +0 -3946
  793. data/ext/v8/upstream/3.1.8/src/objects-printer.cc +0 -801
  794. data/ext/v8/upstream/3.1.8/src/objects-visiting.cc +0 -142
  795. data/ext/v8/upstream/3.1.8/src/objects-visiting.h +0 -401
  796. data/ext/v8/upstream/3.1.8/src/objects.cc +0 -10044
  797. data/ext/v8/upstream/3.1.8/src/objects.h +0 -6571
  798. data/ext/v8/upstream/3.1.8/src/parser.cc +0 -5165
  799. data/ext/v8/upstream/3.1.8/src/parser.h +0 -802
  800. data/ext/v8/upstream/3.1.8/src/platform-cygwin.cc +0 -745
  801. data/ext/v8/upstream/3.1.8/src/platform-freebsd.cc +0 -702
  802. data/ext/v8/upstream/3.1.8/src/platform-linux.cc +0 -981
  803. data/ext/v8/upstream/3.1.8/src/platform-macos.cc +0 -732
  804. data/ext/v8/upstream/3.1.8/src/platform-nullos.cc +0 -498
  805. data/ext/v8/upstream/3.1.8/src/platform-openbsd.cc +0 -657
  806. data/ext/v8/upstream/3.1.8/src/platform-posix.cc +0 -399
  807. data/ext/v8/upstream/3.1.8/src/platform-solaris.cc +0 -714
  808. data/ext/v8/upstream/3.1.8/src/platform-win32.cc +0 -1974
  809. data/ext/v8/upstream/3.1.8/src/platform.h +0 -636
  810. data/ext/v8/upstream/3.1.8/src/preparse-data.cc +0 -183
  811. data/ext/v8/upstream/3.1.8/src/preparser-api.cc +0 -213
  812. data/ext/v8/upstream/3.1.8/src/preparser.cc +0 -1205
  813. data/ext/v8/upstream/3.1.8/src/prettyprinter.cc +0 -1539
  814. data/ext/v8/upstream/3.1.8/src/prettyprinter.h +0 -223
  815. data/ext/v8/upstream/3.1.8/src/profile-generator.cc +0 -2899
  816. data/ext/v8/upstream/3.1.8/src/profile-generator.h +0 -1151
  817. data/ext/v8/upstream/3.1.8/src/property.cc +0 -96
  818. data/ext/v8/upstream/3.1.8/src/property.h +0 -337
  819. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-irregexp.cc +0 -470
  820. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.cc +0 -257
  821. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.h +0 -231
  822. data/ext/v8/upstream/3.1.8/src/regexp-stack.cc +0 -103
  823. data/ext/v8/upstream/3.1.8/src/regexp-stack.h +0 -123
  824. data/ext/v8/upstream/3.1.8/src/regexp.js +0 -483
  825. data/ext/v8/upstream/3.1.8/src/register-allocator-inl.h +0 -141
  826. data/ext/v8/upstream/3.1.8/src/register-allocator.cc +0 -104
  827. data/ext/v8/upstream/3.1.8/src/register-allocator.h +0 -320
  828. data/ext/v8/upstream/3.1.8/src/rewriter.cc +0 -1023
  829. data/ext/v8/upstream/3.1.8/src/runtime-profiler.cc +0 -443
  830. data/ext/v8/upstream/3.1.8/src/runtime-profiler.h +0 -77
  831. data/ext/v8/upstream/3.1.8/src/runtime.cc +0 -11592
  832. data/ext/v8/upstream/3.1.8/src/runtime.h +0 -582
  833. data/ext/v8/upstream/3.1.8/src/safepoint-table.cc +0 -253
  834. data/ext/v8/upstream/3.1.8/src/safepoint-table.h +0 -263
  835. data/ext/v8/upstream/3.1.8/src/scanner-base.cc +0 -971
  836. data/ext/v8/upstream/3.1.8/src/scanner-base.h +0 -653
  837. data/ext/v8/upstream/3.1.8/src/scanner.cc +0 -586
  838. data/ext/v8/upstream/3.1.8/src/scanner.h +0 -194
  839. data/ext/v8/upstream/3.1.8/src/scopeinfo.cc +0 -636
  840. data/ext/v8/upstream/3.1.8/src/scopeinfo.h +0 -238
  841. data/ext/v8/upstream/3.1.8/src/scopes.cc +0 -1063
  842. data/ext/v8/upstream/3.1.8/src/scopes.h +0 -494
  843. data/ext/v8/upstream/3.1.8/src/serialize.cc +0 -1535
  844. data/ext/v8/upstream/3.1.8/src/serialize.h +0 -584
  845. data/ext/v8/upstream/3.1.8/src/snapshot-common.cc +0 -82
  846. data/ext/v8/upstream/3.1.8/src/snapshot.h +0 -71
  847. data/ext/v8/upstream/3.1.8/src/spaces-inl.h +0 -524
  848. data/ext/v8/upstream/3.1.8/src/spaces.cc +0 -3254
  849. data/ext/v8/upstream/3.1.8/src/spaces.h +0 -2362
  850. data/ext/v8/upstream/3.1.8/src/string-search.cc +0 -40
  851. data/ext/v8/upstream/3.1.8/src/string-search.h +0 -567
  852. data/ext/v8/upstream/3.1.8/src/string-stream.cc +0 -584
  853. data/ext/v8/upstream/3.1.8/src/string.js +0 -915
  854. data/ext/v8/upstream/3.1.8/src/stub-cache.cc +0 -1878
  855. data/ext/v8/upstream/3.1.8/src/stub-cache.h +0 -849
  856. data/ext/v8/upstream/3.1.8/src/token.cc +0 -63
  857. data/ext/v8/upstream/3.1.8/src/token.h +0 -288
  858. data/ext/v8/upstream/3.1.8/src/top.cc +0 -1152
  859. data/ext/v8/upstream/3.1.8/src/top.h +0 -608
  860. data/ext/v8/upstream/3.1.8/src/type-info.cc +0 -406
  861. data/ext/v8/upstream/3.1.8/src/type-info.h +0 -283
  862. data/ext/v8/upstream/3.1.8/src/unicode.cc +0 -1624
  863. data/ext/v8/upstream/3.1.8/src/unicode.h +0 -280
  864. data/ext/v8/upstream/3.1.8/src/utils.h +0 -793
  865. data/ext/v8/upstream/3.1.8/src/v8-counters.cc +0 -55
  866. data/ext/v8/upstream/3.1.8/src/v8-counters.h +0 -290
  867. data/ext/v8/upstream/3.1.8/src/v8.cc +0 -270
  868. data/ext/v8/upstream/3.1.8/src/v8.h +0 -127
  869. data/ext/v8/upstream/3.1.8/src/v8globals.h +0 -480
  870. data/ext/v8/upstream/3.1.8/src/v8natives.js +0 -1252
  871. data/ext/v8/upstream/3.1.8/src/v8threads.cc +0 -440
  872. data/ext/v8/upstream/3.1.8/src/v8threads.h +0 -157
  873. data/ext/v8/upstream/3.1.8/src/v8utils.h +0 -354
  874. data/ext/v8/upstream/3.1.8/src/variables.h +0 -212
  875. data/ext/v8/upstream/3.1.8/src/version.cc +0 -95
  876. data/ext/v8/upstream/3.1.8/src/version.h +0 -64
  877. data/ext/v8/upstream/3.1.8/src/virtual-frame-light-inl.h +0 -170
  878. data/ext/v8/upstream/3.1.8/src/vm-state-inl.h +0 -134
  879. data/ext/v8/upstream/3.1.8/src/vm-state.h +0 -68
  880. data/ext/v8/upstream/3.1.8/src/win32-headers.h +0 -95
  881. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64-inl.h +0 -455
  882. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.cc +0 -3162
  883. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.h +0 -1584
  884. data/ext/v8/upstream/3.1.8/src/x64/builtins-x64.cc +0 -1492
  885. data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.cc +0 -5150
  886. data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.h +0 -519
  887. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.cc +0 -8835
  888. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.h +0 -750
  889. data/ext/v8/upstream/3.1.8/src/x64/cpu-x64.cc +0 -86
  890. data/ext/v8/upstream/3.1.8/src/x64/debug-x64.cc +0 -316
  891. data/ext/v8/upstream/3.1.8/src/x64/deoptimizer-x64.cc +0 -781
  892. data/ext/v8/upstream/3.1.8/src/x64/disasm-x64.cc +0 -1737
  893. data/ext/v8/upstream/3.1.8/src/x64/frames-x64.h +0 -130
  894. data/ext/v8/upstream/3.1.8/src/x64/full-codegen-x64.cc +0 -3984
  895. data/ext/v8/upstream/3.1.8/src/x64/ic-x64.cc +0 -1761
  896. data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.cc +0 -3639
  897. data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.h +0 -305
  898. data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.cc +0 -2044
  899. data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.h +0 -2052
  900. data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.cc +0 -2660
  901. data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.h +0 -1852
  902. data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.cc +0 -1382
  903. data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.h +0 -278
  904. data/ext/v8/upstream/3.1.8/src/x64/register-allocator-x64-inl.h +0 -87
  905. data/ext/v8/upstream/3.1.8/src/x64/register-allocator-x64.cc +0 -91
  906. data/ext/v8/upstream/3.1.8/src/x64/simulator-x64.h +0 -71
  907. data/ext/v8/upstream/3.1.8/src/x64/stub-cache-x64.cc +0 -3509
  908. data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.cc +0 -1292
  909. data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.h +0 -593
  910. data/ext/v8/upstream/3.1.8/src/zone-inl.h +0 -83
  911. data/ext/v8/upstream/3.1.8/src/zone.cc +0 -195
  912. data/ext/v8/upstream/3.1.8/src/zone.h +0 -233
  913. data/ext/v8/upstream/3.1.8/tools/gyp/v8.gyp +0 -869
  914. data/ext/v8/upstream/3.1.8/tools/linux-tick-processor +0 -33
  915. data/ext/v8/upstream/3.1.8/tools/tickprocessor.js +0 -863
  916. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base.vcproj +0 -1296
  917. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_arm.vcproj +0 -1234
  918. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_x64.vcproj +0 -1296
  919. data/ext/v8/upstream/3.1.8/tools/visual_studio/x64.vsprops +0 -17
  920. data/spec/ext/mem_spec.rb +0 -42
@@ -0,0 +1,71 @@
1
+ // Copyright 2011 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #ifndef V8_X64_SIMULATOR_X64_H_
29
+ #define V8_X64_SIMULATOR_X64_H_
30
+
31
+ #include "allocation.h"
32
+
33
+ namespace v8 {
34
+ namespace internal {
35
+
36
+ // Since there is no simulator for the x64 architecture the only thing we can
37
+ // do is to call the entry directly.
38
+ // TODO(X64): Don't pass p0, since it isn't used?
39
+ #define CALL_GENERATED_CODE(entry, p0, p1, p2, p3, p4) \
40
+ (entry(p0, p1, p2, p3, p4))
41
+
42
+ typedef int (*regexp_matcher)(String*, int, const byte*,
43
+ const byte*, int*, Address, int, Isolate*);
44
+
45
+ // Call the generated regexp code directly. The code at the entry address should
46
+ // expect eight int/pointer sized arguments and return an int.
47
+ #define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7) \
48
+ (FUNCTION_CAST<regexp_matcher>(entry)(p0, p1, p2, p3, p4, p5, p6, p7))
49
+
50
+ #define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
51
+ (reinterpret_cast<TryCatch*>(try_catch_address))
52
+
53
+ // The stack limit beyond which we will throw stack overflow errors in
54
+ // generated code. Because generated code on x64 uses the C stack, we
55
+ // just use the C stack limit.
56
+ class SimulatorStack : public v8::internal::AllStatic {
57
+ public:
58
+ static inline uintptr_t JsLimitFromCLimit(uintptr_t c_limit) {
59
+ return c_limit;
60
+ }
61
+
62
+ static inline uintptr_t RegisterCTryCatch(uintptr_t try_catch_address) {
63
+ return try_catch_address;
64
+ }
65
+
66
+ static inline void UnregisterCTryCatch() { }
67
+ };
68
+
69
+ } } // namespace v8::internal
70
+
71
+ #endif // V8_X64_SIMULATOR_X64_H_
@@ -0,0 +1,3460 @@
1
+ // Copyright 2011 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #include "v8.h"
29
+
30
+ #if defined(V8_TARGET_ARCH_X64)
31
+
32
+ #include "ic-inl.h"
33
+ #include "codegen-inl.h"
34
+ #include "stub-cache.h"
35
+
36
+ namespace v8 {
37
+ namespace internal {
38
+
39
+ #define __ ACCESS_MASM(masm)
40
+
41
+
42
+ static void ProbeTable(Isolate* isolate,
43
+ MacroAssembler* masm,
44
+ Code::Flags flags,
45
+ StubCache::Table table,
46
+ Register name,
47
+ Register offset) {
48
+ ASSERT_EQ(8, kPointerSize);
49
+ ASSERT_EQ(16, sizeof(StubCache::Entry));
50
+ // The offset register holds the entry offset times four (due to masking
51
+ // and shifting optimizations).
52
+ ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
53
+ Label miss;
54
+
55
+ __ LoadAddress(kScratchRegister, key_offset);
56
+ // Check that the key in the entry matches the name.
57
+ // Multiply entry offset by 16 to get the entry address. Since the
58
+ // offset register already holds the entry offset times four, multiply
59
+ // by a further four.
60
+ __ cmpl(name, Operand(kScratchRegister, offset, times_4, 0));
61
+ __ j(not_equal, &miss);
62
+ // Get the code entry from the cache.
63
+ // Use key_offset + kPointerSize, rather than loading value_offset.
64
+ __ movq(kScratchRegister,
65
+ Operand(kScratchRegister, offset, times_4, kPointerSize));
66
+ // Check that the flags match what we're looking for.
67
+ __ movl(offset, FieldOperand(kScratchRegister, Code::kFlagsOffset));
68
+ __ and_(offset, Immediate(~Code::kFlagsNotUsedInLookup));
69
+ __ cmpl(offset, Immediate(flags));
70
+ __ j(not_equal, &miss);
71
+
72
+ // Jump to the first instruction in the code stub.
73
+ __ addq(kScratchRegister, Immediate(Code::kHeaderSize - kHeapObjectTag));
74
+ __ jmp(kScratchRegister);
75
+
76
+ __ bind(&miss);
77
+ }
78
+
79
+
80
+ // Helper function used to check that the dictionary doesn't contain
81
+ // the property. This function may return false negatives, so miss_label
82
+ // must always call a backup property check that is complete.
83
+ // This function is safe to call if the receiver has fast properties.
84
+ // Name must be a symbol and receiver must be a heap object.
85
+ static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
86
+ Label* miss_label,
87
+ Register receiver,
88
+ String* name,
89
+ Register r0,
90
+ Register r1) {
91
+ ASSERT(name->IsSymbol());
92
+ Counters* counters = masm->isolate()->counters();
93
+ __ IncrementCounter(counters->negative_lookups(), 1);
94
+ __ IncrementCounter(counters->negative_lookups_miss(), 1);
95
+
96
+ Label done;
97
+ __ movq(r0, FieldOperand(receiver, HeapObject::kMapOffset));
98
+
99
+ const int kInterceptorOrAccessCheckNeededMask =
100
+ (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
101
+
102
+ // Bail out if the receiver has a named interceptor or requires access checks.
103
+ __ testb(FieldOperand(r0, Map::kBitFieldOffset),
104
+ Immediate(kInterceptorOrAccessCheckNeededMask));
105
+ __ j(not_zero, miss_label);
106
+
107
+ // Check that receiver is a JSObject.
108
+ __ CmpInstanceType(r0, FIRST_JS_OBJECT_TYPE);
109
+ __ j(below, miss_label);
110
+
111
+ // Load properties array.
112
+ Register properties = r0;
113
+ __ movq(properties, FieldOperand(receiver, JSObject::kPropertiesOffset));
114
+
115
+ // Check that the properties array is a dictionary.
116
+ __ CompareRoot(FieldOperand(properties, HeapObject::kMapOffset),
117
+ Heap::kHashTableMapRootIndex);
118
+ __ j(not_equal, miss_label);
119
+
120
+ // Compute the capacity mask.
121
+ const int kCapacityOffset =
122
+ StringDictionary::kHeaderSize +
123
+ StringDictionary::kCapacityIndex * kPointerSize;
124
+
125
+ // Generate an unrolled loop that performs a few probes before
126
+ // giving up.
127
+ static const int kProbes = 4;
128
+ const int kElementsStartOffset =
129
+ StringDictionary::kHeaderSize +
130
+ StringDictionary::kElementsStartIndex * kPointerSize;
131
+
132
+ // If names of slots in range from 1 to kProbes - 1 for the hash value are
133
+ // not equal to the name and kProbes-th slot is not used (its name is the
134
+ // undefined value), it guarantees the hash table doesn't contain the
135
+ // property. It's true even if some slots represent deleted properties
136
+ // (their names are the null value).
137
+ for (int i = 0; i < kProbes; i++) {
138
+ // r0 points to properties hash.
139
+ // Compute the masked index: (hash + i + i * i) & mask.
140
+ Register index = r1;
141
+ // Capacity is smi 2^n.
142
+ __ SmiToInteger32(index, FieldOperand(properties, kCapacityOffset));
143
+ __ decl(index);
144
+ __ and_(index,
145
+ Immediate(name->Hash() + StringDictionary::GetProbeOffset(i)));
146
+
147
+ // Scale the index by multiplying by the entry size.
148
+ ASSERT(StringDictionary::kEntrySize == 3);
149
+ __ lea(index, Operand(index, index, times_2, 0)); // index *= 3.
150
+
151
+ Register entity_name = r1;
152
+ // Having undefined at this place means the name is not contained.
153
+ ASSERT_EQ(kSmiTagSize, 1);
154
+ __ movq(entity_name, Operand(properties, index, times_pointer_size,
155
+ kElementsStartOffset - kHeapObjectTag));
156
+ __ Cmp(entity_name, masm->isolate()->factory()->undefined_value());
157
+ // __ jmp(miss_label);
158
+ if (i != kProbes - 1) {
159
+ __ j(equal, &done);
160
+
161
+ // Stop if found the property.
162
+ __ Cmp(entity_name, Handle<String>(name));
163
+ __ j(equal, miss_label);
164
+
165
+ // Check if the entry name is not a symbol.
166
+ __ movq(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
167
+ __ testb(FieldOperand(entity_name, Map::kInstanceTypeOffset),
168
+ Immediate(kIsSymbolMask));
169
+ __ j(zero, miss_label);
170
+ } else {
171
+ // Give up probing if still not found the undefined value.
172
+ __ j(not_equal, miss_label);
173
+ }
174
+ }
175
+
176
+ __ bind(&done);
177
+ __ DecrementCounter(counters->negative_lookups_miss(), 1);
178
+ }
179
+
180
+
181
+ void StubCache::GenerateProbe(MacroAssembler* masm,
182
+ Code::Flags flags,
183
+ Register receiver,
184
+ Register name,
185
+ Register scratch,
186
+ Register extra,
187
+ Register extra2) {
188
+ Isolate* isolate = masm->isolate();
189
+ Label miss;
190
+ USE(extra); // The register extra is not used on the X64 platform.
191
+ USE(extra2); // The register extra2 is not used on the X64 platform.
192
+ // Make sure that code is valid. The shifting code relies on the
193
+ // entry size being 16.
194
+ ASSERT(sizeof(Entry) == 16);
195
+
196
+ // Make sure the flags do not name a specific type.
197
+ ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
198
+
199
+ // Make sure that there are no register conflicts.
200
+ ASSERT(!scratch.is(receiver));
201
+ ASSERT(!scratch.is(name));
202
+
203
+ // Check scratch register is valid, extra and extra2 are unused.
204
+ ASSERT(!scratch.is(no_reg));
205
+ ASSERT(extra2.is(no_reg));
206
+
207
+ // Check that the receiver isn't a smi.
208
+ __ JumpIfSmi(receiver, &miss);
209
+
210
+ // Get the map of the receiver and compute the hash.
211
+ __ movl(scratch, FieldOperand(name, String::kHashFieldOffset));
212
+ // Use only the low 32 bits of the map pointer.
213
+ __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
214
+ __ xor_(scratch, Immediate(flags));
215
+ __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
216
+
217
+ // Probe the primary table.
218
+ ProbeTable(isolate, masm, flags, kPrimary, name, scratch);
219
+
220
+ // Primary miss: Compute hash for secondary probe.
221
+ __ movl(scratch, FieldOperand(name, String::kHashFieldOffset));
222
+ __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
223
+ __ xor_(scratch, Immediate(flags));
224
+ __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
225
+ __ subl(scratch, name);
226
+ __ addl(scratch, Immediate(flags));
227
+ __ and_(scratch, Immediate((kSecondaryTableSize - 1) << kHeapObjectTagSize));
228
+
229
+ // Probe the secondary table.
230
+ ProbeTable(isolate, masm, flags, kSecondary, name, scratch);
231
+
232
+ // Cache miss: Fall-through and let caller handle the miss by
233
+ // entering the runtime system.
234
+ __ bind(&miss);
235
+ }
236
+
237
+
238
+ void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
239
+ int index,
240
+ Register prototype) {
241
+ // Load the global or builtins object from the current context.
242
+ __ movq(prototype,
243
+ Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
244
+ // Load the global context from the global or builtins object.
245
+ __ movq(prototype,
246
+ FieldOperand(prototype, GlobalObject::kGlobalContextOffset));
247
+ // Load the function from the global context.
248
+ __ movq(prototype, Operand(prototype, Context::SlotOffset(index)));
249
+ // Load the initial map. The global functions all have initial maps.
250
+ __ movq(prototype,
251
+ FieldOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
252
+ // Load the prototype from the initial map.
253
+ __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
254
+ }
255
+
256
+
257
+ void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
258
+ MacroAssembler* masm, int index, Register prototype, Label* miss) {
259
+ Isolate* isolate = masm->isolate();
260
+ // Check we're still in the same context.
261
+ __ Move(prototype, isolate->global());
262
+ __ cmpq(Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)),
263
+ prototype);
264
+ __ j(not_equal, miss);
265
+ // Get the global function with the given index.
266
+ JSFunction* function =
267
+ JSFunction::cast(isolate->global_context()->get(index));
268
+ // Load its initial map. The global functions all have initial maps.
269
+ __ Move(prototype, Handle<Map>(function->initial_map()));
270
+ // Load the prototype from the initial map.
271
+ __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
272
+ }
273
+
274
+
275
+ void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
276
+ Register receiver,
277
+ Register scratch,
278
+ Label* miss_label) {
279
+ // Check that the receiver isn't a smi.
280
+ __ JumpIfSmi(receiver, miss_label);
281
+
282
+ // Check that the object is a JS array.
283
+ __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
284
+ __ j(not_equal, miss_label);
285
+
286
+ // Load length directly from the JS array.
287
+ __ movq(rax, FieldOperand(receiver, JSArray::kLengthOffset));
288
+ __ ret(0);
289
+ }
290
+
291
+
292
+ // Generate code to check if an object is a string. If the object is
293
+ // a string, the map's instance type is left in the scratch register.
294
+ static void GenerateStringCheck(MacroAssembler* masm,
295
+ Register receiver,
296
+ Register scratch,
297
+ Label* smi,
298
+ Label* non_string_object) {
299
+ // Check that the object isn't a smi.
300
+ __ JumpIfSmi(receiver, smi);
301
+
302
+ // Check that the object is a string.
303
+ __ movq(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
304
+ __ movzxbq(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
305
+ ASSERT(kNotStringTag != 0);
306
+ __ testl(scratch, Immediate(kNotStringTag));
307
+ __ j(not_zero, non_string_object);
308
+ }
309
+
310
+
311
+ void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
312
+ Register receiver,
313
+ Register scratch1,
314
+ Register scratch2,
315
+ Label* miss,
316
+ bool support_wrappers) {
317
+ Label check_wrapper;
318
+
319
+ // Check if the object is a string leaving the instance type in the
320
+ // scratch register.
321
+ GenerateStringCheck(masm, receiver, scratch1, miss,
322
+ support_wrappers ? &check_wrapper : miss);
323
+
324
+ // Load length directly from the string.
325
+ __ movq(rax, FieldOperand(receiver, String::kLengthOffset));
326
+ __ ret(0);
327
+
328
+ if (support_wrappers) {
329
+ // Check if the object is a JSValue wrapper.
330
+ __ bind(&check_wrapper);
331
+ __ cmpl(scratch1, Immediate(JS_VALUE_TYPE));
332
+ __ j(not_equal, miss);
333
+
334
+ // Check if the wrapped value is a string and load the length
335
+ // directly if it is.
336
+ __ movq(scratch2, FieldOperand(receiver, JSValue::kValueOffset));
337
+ GenerateStringCheck(masm, scratch2, scratch1, miss, miss);
338
+ __ movq(rax, FieldOperand(scratch2, String::kLengthOffset));
339
+ __ ret(0);
340
+ }
341
+ }
342
+
343
+
344
+ void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
345
+ Register receiver,
346
+ Register result,
347
+ Register scratch,
348
+ Label* miss_label) {
349
+ __ TryGetFunctionPrototype(receiver, result, miss_label);
350
+ if (!result.is(rax)) __ movq(rax, result);
351
+ __ ret(0);
352
+ }
353
+
354
+
355
+ // Load a fast property out of a holder object (src). In-object properties
356
+ // are loaded directly otherwise the property is loaded from the properties
357
+ // fixed array.
358
+ void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
359
+ Register dst, Register src,
360
+ JSObject* holder, int index) {
361
+ // Adjust for the number of properties stored in the holder.
362
+ index -= holder->map()->inobject_properties();
363
+ if (index < 0) {
364
+ // Get the property straight out of the holder.
365
+ int offset = holder->map()->instance_size() + (index * kPointerSize);
366
+ __ movq(dst, FieldOperand(src, offset));
367
+ } else {
368
+ // Calculate the offset into the properties array.
369
+ int offset = index * kPointerSize + FixedArray::kHeaderSize;
370
+ __ movq(dst, FieldOperand(src, JSObject::kPropertiesOffset));
371
+ __ movq(dst, FieldOperand(dst, offset));
372
+ }
373
+ }
374
+
375
+
376
+ static void PushInterceptorArguments(MacroAssembler* masm,
377
+ Register receiver,
378
+ Register holder,
379
+ Register name,
380
+ JSObject* holder_obj) {
381
+ __ push(name);
382
+ InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
383
+ ASSERT(!masm->isolate()->heap()->InNewSpace(interceptor));
384
+ __ Move(kScratchRegister, Handle<Object>(interceptor));
385
+ __ push(kScratchRegister);
386
+ __ push(receiver);
387
+ __ push(holder);
388
+ __ push(FieldOperand(kScratchRegister, InterceptorInfo::kDataOffset));
389
+ }
390
+
391
+
392
+ static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
393
+ Register receiver,
394
+ Register holder,
395
+ Register name,
396
+ JSObject* holder_obj) {
397
+ PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
398
+
399
+ ExternalReference ref =
400
+ ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
401
+ masm->isolate());
402
+ __ movq(rax, Immediate(5));
403
+ __ LoadAddress(rbx, ref);
404
+
405
+ CEntryStub stub(1);
406
+ __ CallStub(&stub);
407
+ }
408
+
409
+
410
+ // Number of pointers to be reserved on stack for fast API call.
411
+ static const int kFastApiCallArguments = 3;
412
+
413
+
414
+ // Reserves space for the extra arguments to API function in the
415
+ // caller's frame.
416
+ //
417
+ // These arguments are set by CheckPrototypes and GenerateFastApiCall.
418
+ static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
419
+ // ----------- S t a t e -------------
420
+ // -- rsp[0] : return address
421
+ // -- rsp[8] : last argument in the internal frame of the caller
422
+ // -----------------------------------
423
+ __ movq(scratch, Operand(rsp, 0));
424
+ __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
425
+ __ movq(Operand(rsp, 0), scratch);
426
+ __ Move(scratch, Smi::FromInt(0));
427
+ for (int i = 1; i <= kFastApiCallArguments; i++) {
428
+ __ movq(Operand(rsp, i * kPointerSize), scratch);
429
+ }
430
+ }
431
+
432
+
433
+ // Undoes the effects of ReserveSpaceForFastApiCall.
434
+ static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
435
+ // ----------- S t a t e -------------
436
+ // -- rsp[0] : return address.
437
+ // -- rsp[8] : last fast api call extra argument.
438
+ // -- ...
439
+ // -- rsp[kFastApiCallArguments * 8] : first fast api call extra argument.
440
+ // -- rsp[kFastApiCallArguments * 8 + 8] : last argument in the internal
441
+ // frame.
442
+ // -----------------------------------
443
+ __ movq(scratch, Operand(rsp, 0));
444
+ __ movq(Operand(rsp, kFastApiCallArguments * kPointerSize), scratch);
445
+ __ addq(rsp, Immediate(kPointerSize * kFastApiCallArguments));
446
+ }
447
+
448
+
449
+ // Generates call to API function.
450
+ static MaybeObject* GenerateFastApiCall(MacroAssembler* masm,
451
+ const CallOptimization& optimization,
452
+ int argc) {
453
+ // ----------- S t a t e -------------
454
+ // -- rsp[0] : return address
455
+ // -- rsp[8] : object passing the type check
456
+ // (last fast api call extra argument,
457
+ // set by CheckPrototypes)
458
+ // -- rsp[16] : api function
459
+ // (first fast api call extra argument)
460
+ // -- rsp[24] : api call data
461
+ // -- rsp[32] : last argument
462
+ // -- ...
463
+ // -- rsp[(argc + 3) * 8] : first argument
464
+ // -- rsp[(argc + 4) * 8] : receiver
465
+ // -----------------------------------
466
+ // Get the function and setup the context.
467
+ JSFunction* function = optimization.constant_function();
468
+ __ Move(rdi, Handle<JSFunction>(function));
469
+ __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
470
+
471
+ // Pass the additional arguments.
472
+ __ movq(Operand(rsp, 2 * kPointerSize), rdi);
473
+ Object* call_data = optimization.api_call_info()->data();
474
+ Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info());
475
+ if (masm->isolate()->heap()->InNewSpace(call_data)) {
476
+ __ Move(rcx, api_call_info_handle);
477
+ __ movq(rbx, FieldOperand(rcx, CallHandlerInfo::kDataOffset));
478
+ __ movq(Operand(rsp, 3 * kPointerSize), rbx);
479
+ } else {
480
+ __ Move(Operand(rsp, 3 * kPointerSize), Handle<Object>(call_data));
481
+ }
482
+
483
+ // Prepare arguments.
484
+ __ lea(rbx, Operand(rsp, 3 * kPointerSize));
485
+
486
+ Object* callback = optimization.api_call_info()->callback();
487
+ Address api_function_address = v8::ToCData<Address>(callback);
488
+ ApiFunction fun(api_function_address);
489
+
490
+ #ifdef _WIN64
491
+ // Win64 uses first register--rcx--for returned value.
492
+ Register arguments_arg = rdx;
493
+ #else
494
+ Register arguments_arg = rdi;
495
+ #endif
496
+
497
+ // Allocate the v8::Arguments structure in the arguments' space since
498
+ // it's not controlled by GC.
499
+ const int kApiStackSpace = 4;
500
+
501
+ __ PrepareCallApiFunction(kApiStackSpace);
502
+
503
+ __ movq(StackSpaceOperand(0), rbx); // v8::Arguments::implicit_args_.
504
+ __ addq(rbx, Immediate(argc * kPointerSize));
505
+ __ movq(StackSpaceOperand(1), rbx); // v8::Arguments::values_.
506
+ __ Set(StackSpaceOperand(2), argc); // v8::Arguments::length_.
507
+ // v8::Arguments::is_construct_call_.
508
+ __ Set(StackSpaceOperand(3), 0);
509
+
510
+ // v8::InvocationCallback's argument.
511
+ __ lea(arguments_arg, StackSpaceOperand(0));
512
+ // Emitting a stub call may try to allocate (if the code is not
513
+ // already generated). Do not allow the assembler to perform a
514
+ // garbage collection but instead return the allocation failure
515
+ // object.
516
+ return masm->TryCallApiFunctionAndReturn(&fun,
517
+ argc + kFastApiCallArguments + 1);
518
+ }
519
+
520
+
521
+ class CallInterceptorCompiler BASE_EMBEDDED {
522
+ public:
523
+ CallInterceptorCompiler(StubCompiler* stub_compiler,
524
+ const ParameterCount& arguments,
525
+ Register name)
526
+ : stub_compiler_(stub_compiler),
527
+ arguments_(arguments),
528
+ name_(name) {}
529
+
530
+ MaybeObject* Compile(MacroAssembler* masm,
531
+ JSObject* object,
532
+ JSObject* holder,
533
+ String* name,
534
+ LookupResult* lookup,
535
+ Register receiver,
536
+ Register scratch1,
537
+ Register scratch2,
538
+ Register scratch3,
539
+ Label* miss) {
540
+ ASSERT(holder->HasNamedInterceptor());
541
+ ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
542
+
543
+ // Check that the receiver isn't a smi.
544
+ __ JumpIfSmi(receiver, miss);
545
+
546
+ CallOptimization optimization(lookup);
547
+
548
+ if (optimization.is_constant_call()) {
549
+ return CompileCacheable(masm,
550
+ object,
551
+ receiver,
552
+ scratch1,
553
+ scratch2,
554
+ scratch3,
555
+ holder,
556
+ lookup,
557
+ name,
558
+ optimization,
559
+ miss);
560
+ } else {
561
+ CompileRegular(masm,
562
+ object,
563
+ receiver,
564
+ scratch1,
565
+ scratch2,
566
+ scratch3,
567
+ name,
568
+ holder,
569
+ miss);
570
+ return masm->isolate()->heap()->undefined_value(); // Success.
571
+ }
572
+ }
573
+
574
+ private:
575
+ MaybeObject* CompileCacheable(MacroAssembler* masm,
576
+ JSObject* object,
577
+ Register receiver,
578
+ Register scratch1,
579
+ Register scratch2,
580
+ Register scratch3,
581
+ JSObject* interceptor_holder,
582
+ LookupResult* lookup,
583
+ String* name,
584
+ const CallOptimization& optimization,
585
+ Label* miss_label) {
586
+ ASSERT(optimization.is_constant_call());
587
+ ASSERT(!lookup->holder()->IsGlobalObject());
588
+
589
+ int depth1 = kInvalidProtoDepth;
590
+ int depth2 = kInvalidProtoDepth;
591
+ bool can_do_fast_api_call = false;
592
+ if (optimization.is_simple_api_call() &&
593
+ !lookup->holder()->IsGlobalObject()) {
594
+ depth1 =
595
+ optimization.GetPrototypeDepthOfExpectedType(object,
596
+ interceptor_holder);
597
+ if (depth1 == kInvalidProtoDepth) {
598
+ depth2 =
599
+ optimization.GetPrototypeDepthOfExpectedType(interceptor_holder,
600
+ lookup->holder());
601
+ }
602
+ can_do_fast_api_call = (depth1 != kInvalidProtoDepth) ||
603
+ (depth2 != kInvalidProtoDepth);
604
+ }
605
+
606
+ Counters* counters = masm->isolate()->counters();
607
+ __ IncrementCounter(counters->call_const_interceptor(), 1);
608
+
609
+ if (can_do_fast_api_call) {
610
+ __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1);
611
+ ReserveSpaceForFastApiCall(masm, scratch1);
612
+ }
613
+
614
+ // Check that the maps from receiver to interceptor's holder
615
+ // haven't changed and thus we can invoke interceptor.
616
+ Label miss_cleanup;
617
+ Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
618
+ Register holder =
619
+ stub_compiler_->CheckPrototypes(object, receiver,
620
+ interceptor_holder, scratch1,
621
+ scratch2, scratch3, name, depth1, miss);
622
+
623
+ // Invoke an interceptor and if it provides a value,
624
+ // branch to |regular_invoke|.
625
+ Label regular_invoke;
626
+ LoadWithInterceptor(masm, receiver, holder, interceptor_holder,
627
+ &regular_invoke);
628
+
629
+ // Interceptor returned nothing for this property. Try to use cached
630
+ // constant function.
631
+
632
+ // Check that the maps from interceptor's holder to constant function's
633
+ // holder haven't changed and thus we can use cached constant function.
634
+ if (interceptor_holder != lookup->holder()) {
635
+ stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
636
+ lookup->holder(), scratch1,
637
+ scratch2, scratch3, name, depth2, miss);
638
+ } else {
639
+ // CheckPrototypes has a side effect of fetching a 'holder'
640
+ // for API (object which is instanceof for the signature). It's
641
+ // safe to omit it here, as if present, it should be fetched
642
+ // by the previous CheckPrototypes.
643
+ ASSERT(depth2 == kInvalidProtoDepth);
644
+ }
645
+
646
+ // Invoke function.
647
+ if (can_do_fast_api_call) {
648
+ MaybeObject* result = GenerateFastApiCall(masm,
649
+ optimization,
650
+ arguments_.immediate());
651
+ if (result->IsFailure()) return result;
652
+ } else {
653
+ __ InvokeFunction(optimization.constant_function(), arguments_,
654
+ JUMP_FUNCTION);
655
+ }
656
+
657
+ // Deferred code for fast API call case---clean preallocated space.
658
+ if (can_do_fast_api_call) {
659
+ __ bind(&miss_cleanup);
660
+ FreeSpaceForFastApiCall(masm, scratch1);
661
+ __ jmp(miss_label);
662
+ }
663
+
664
+ // Invoke a regular function.
665
+ __ bind(&regular_invoke);
666
+ if (can_do_fast_api_call) {
667
+ FreeSpaceForFastApiCall(masm, scratch1);
668
+ }
669
+
670
+ return masm->isolate()->heap()->undefined_value(); // Success.
671
+ }
672
+
673
+ void CompileRegular(MacroAssembler* masm,
674
+ JSObject* object,
675
+ Register receiver,
676
+ Register scratch1,
677
+ Register scratch2,
678
+ Register scratch3,
679
+ String* name,
680
+ JSObject* interceptor_holder,
681
+ Label* miss_label) {
682
+ Register holder =
683
+ stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
684
+ scratch1, scratch2, scratch3, name,
685
+ miss_label);
686
+
687
+ __ EnterInternalFrame();
688
+ // Save the name_ register across the call.
689
+ __ push(name_);
690
+
691
+ PushInterceptorArguments(masm,
692
+ receiver,
693
+ holder,
694
+ name_,
695
+ interceptor_holder);
696
+
697
+ __ CallExternalReference(
698
+ ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
699
+ masm->isolate()),
700
+ 5);
701
+
702
+ // Restore the name_ register.
703
+ __ pop(name_);
704
+ __ LeaveInternalFrame();
705
+ }
706
+
707
+ void LoadWithInterceptor(MacroAssembler* masm,
708
+ Register receiver,
709
+ Register holder,
710
+ JSObject* holder_obj,
711
+ Label* interceptor_succeeded) {
712
+ __ EnterInternalFrame();
713
+ __ push(holder); // Save the holder.
714
+ __ push(name_); // Save the name.
715
+
716
+ CompileCallLoadPropertyWithInterceptor(masm,
717
+ receiver,
718
+ holder,
719
+ name_,
720
+ holder_obj);
721
+
722
+ __ pop(name_); // Restore the name.
723
+ __ pop(receiver); // Restore the holder.
724
+ __ LeaveInternalFrame();
725
+
726
+ __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
727
+ __ j(not_equal, interceptor_succeeded);
728
+ }
729
+
730
+ StubCompiler* stub_compiler_;
731
+ const ParameterCount& arguments_;
732
+ Register name_;
733
+ };
734
+
735
+
736
+ void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
737
+ ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
738
+ Code* code = NULL;
739
+ if (kind == Code::LOAD_IC) {
740
+ code = masm->isolate()->builtins()->builtin(Builtins::kLoadIC_Miss);
741
+ } else {
742
+ code = masm->isolate()->builtins()->builtin(Builtins::kKeyedLoadIC_Miss);
743
+ }
744
+
745
+ Handle<Code> ic(code);
746
+ __ Jump(ic, RelocInfo::CODE_TARGET);
747
+ }
748
+
749
+
750
+ // Both name_reg and receiver_reg are preserved on jumps to miss_label,
751
+ // but may be destroyed if store is successful.
752
+ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
753
+ JSObject* object,
754
+ int index,
755
+ Map* transition,
756
+ Register receiver_reg,
757
+ Register name_reg,
758
+ Register scratch,
759
+ Label* miss_label) {
760
+ // Check that the object isn't a smi.
761
+ __ JumpIfSmi(receiver_reg, miss_label);
762
+
763
+ // Check that the map of the object hasn't changed.
764
+ __ Cmp(FieldOperand(receiver_reg, HeapObject::kMapOffset),
765
+ Handle<Map>(object->map()));
766
+ __ j(not_equal, miss_label);
767
+
768
+ // Perform global security token check if needed.
769
+ if (object->IsJSGlobalProxy()) {
770
+ __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
771
+ }
772
+
773
+ // Stub never generated for non-global objects that require access
774
+ // checks.
775
+ ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
776
+
777
+ // Perform map transition for the receiver if necessary.
778
+ if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
779
+ // The properties must be extended before we can store the value.
780
+ // We jump to a runtime call that extends the properties array.
781
+ __ pop(scratch); // Return address.
782
+ __ push(receiver_reg);
783
+ __ Push(Handle<Map>(transition));
784
+ __ push(rax);
785
+ __ push(scratch);
786
+ __ TailCallExternalReference(
787
+ ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
788
+ masm->isolate()),
789
+ 3,
790
+ 1);
791
+ return;
792
+ }
793
+
794
+ if (transition != NULL) {
795
+ // Update the map of the object; no write barrier updating is
796
+ // needed because the map is never in new space.
797
+ __ Move(FieldOperand(receiver_reg, HeapObject::kMapOffset),
798
+ Handle<Map>(transition));
799
+ }
800
+
801
+ // Adjust for the number of properties stored in the object. Even in the
802
+ // face of a transition we can use the old map here because the size of the
803
+ // object and the number of in-object properties is not going to change.
804
+ index -= object->map()->inobject_properties();
805
+
806
+ if (index < 0) {
807
+ // Set the property straight into the object.
808
+ int offset = object->map()->instance_size() + (index * kPointerSize);
809
+ __ movq(FieldOperand(receiver_reg, offset), rax);
810
+
811
+ // Update the write barrier for the array address.
812
+ // Pass the value being stored in the now unused name_reg.
813
+ __ movq(name_reg, rax);
814
+ __ RecordWrite(receiver_reg, offset, name_reg, scratch);
815
+ } else {
816
+ // Write to the properties array.
817
+ int offset = index * kPointerSize + FixedArray::kHeaderSize;
818
+ // Get the properties array (optimistically).
819
+ __ movq(scratch, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
820
+ __ movq(FieldOperand(scratch, offset), rax);
821
+
822
+ // Update the write barrier for the array address.
823
+ // Pass the value being stored in the now unused name_reg.
824
+ __ movq(name_reg, rax);
825
+ __ RecordWrite(scratch, offset, name_reg, receiver_reg);
826
+ }
827
+
828
+ // Return the value (register rax).
829
+ __ ret(0);
830
+ }
831
+
832
+
833
+ // Generate code to check that a global property cell is empty. Create
834
+ // the property cell at compilation time if no cell exists for the
835
+ // property.
836
+ MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell(
837
+ MacroAssembler* masm,
838
+ GlobalObject* global,
839
+ String* name,
840
+ Register scratch,
841
+ Label* miss) {
842
+ Object* probe;
843
+ { MaybeObject* maybe_probe = global->EnsurePropertyCell(name);
844
+ if (!maybe_probe->ToObject(&probe)) return maybe_probe;
845
+ }
846
+ JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe);
847
+ ASSERT(cell->value()->IsTheHole());
848
+ __ Move(scratch, Handle<Object>(cell));
849
+ __ Cmp(FieldOperand(scratch, JSGlobalPropertyCell::kValueOffset),
850
+ masm->isolate()->factory()->the_hole_value());
851
+ __ j(not_equal, miss);
852
+ return cell;
853
+ }
854
+
855
+
856
+ #undef __
857
+ #define __ ACCESS_MASM((masm()))
858
+
859
+
860
+ Register StubCompiler::CheckPrototypes(JSObject* object,
861
+ Register object_reg,
862
+ JSObject* holder,
863
+ Register holder_reg,
864
+ Register scratch1,
865
+ Register scratch2,
866
+ String* name,
867
+ int save_at_depth,
868
+ Label* miss) {
869
+ // Make sure there's no overlap between holder and object registers.
870
+ ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
871
+ ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
872
+ && !scratch2.is(scratch1));
873
+
874
+ // Keep track of the current object in register reg. On the first
875
+ // iteration, reg is an alias for object_reg, on later iterations,
876
+ // it is an alias for holder_reg.
877
+ Register reg = object_reg;
878
+ int depth = 0;
879
+
880
+ if (save_at_depth == depth) {
881
+ __ movq(Operand(rsp, kPointerSize), object_reg);
882
+ }
883
+
884
+ // Check the maps in the prototype chain.
885
+ // Traverse the prototype chain from the object and do map checks.
886
+ JSObject* current = object;
887
+ while (current != holder) {
888
+ depth++;
889
+
890
+ // Only global objects and objects that do not require access
891
+ // checks are allowed in stubs.
892
+ ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
893
+
894
+ JSObject* prototype = JSObject::cast(current->GetPrototype());
895
+ if (!current->HasFastProperties() &&
896
+ !current->IsJSGlobalObject() &&
897
+ !current->IsJSGlobalProxy()) {
898
+ if (!name->IsSymbol()) {
899
+ MaybeObject* lookup_result = heap()->LookupSymbol(name);
900
+ if (lookup_result->IsFailure()) {
901
+ set_failure(Failure::cast(lookup_result));
902
+ return reg;
903
+ } else {
904
+ name = String::cast(lookup_result->ToObjectUnchecked());
905
+ }
906
+ }
907
+ ASSERT(current->property_dictionary()->FindEntry(name) ==
908
+ StringDictionary::kNotFound);
909
+
910
+ GenerateDictionaryNegativeLookup(masm(),
911
+ miss,
912
+ reg,
913
+ name,
914
+ scratch1,
915
+ scratch2);
916
+ __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
917
+ reg = holder_reg; // from now the object is in holder_reg
918
+ __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
919
+ } else if (heap()->InNewSpace(prototype)) {
920
+ // Get the map of the current object.
921
+ __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
922
+ __ Cmp(scratch1, Handle<Map>(current->map()));
923
+ // Branch on the result of the map check.
924
+ __ j(not_equal, miss);
925
+ // Check access rights to the global object. This has to happen
926
+ // after the map check so that we know that the object is
927
+ // actually a global object.
928
+ if (current->IsJSGlobalProxy()) {
929
+ __ CheckAccessGlobalProxy(reg, scratch1, miss);
930
+
931
+ // Restore scratch register to be the map of the object.
932
+ // We load the prototype from the map in the scratch register.
933
+ __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
934
+ }
935
+ // The prototype is in new space; we cannot store a reference
936
+ // to it in the code. Load it from the map.
937
+ reg = holder_reg; // from now the object is in holder_reg
938
+ __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
939
+
940
+ } else {
941
+ // Check the map of the current object.
942
+ __ Cmp(FieldOperand(reg, HeapObject::kMapOffset),
943
+ Handle<Map>(current->map()));
944
+ // Branch on the result of the map check.
945
+ __ j(not_equal, miss);
946
+ // Check access rights to the global object. This has to happen
947
+ // after the map check so that we know that the object is
948
+ // actually a global object.
949
+ if (current->IsJSGlobalProxy()) {
950
+ __ CheckAccessGlobalProxy(reg, scratch1, miss);
951
+ }
952
+ // The prototype is in old space; load it directly.
953
+ reg = holder_reg; // from now the object is in holder_reg
954
+ __ Move(reg, Handle<JSObject>(prototype));
955
+ }
956
+
957
+ if (save_at_depth == depth) {
958
+ __ movq(Operand(rsp, kPointerSize), reg);
959
+ }
960
+
961
+ // Go to the next object in the prototype chain.
962
+ current = prototype;
963
+ }
964
+
965
+ // Check the holder map.
966
+ __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), Handle<Map>(holder->map()));
967
+ __ j(not_equal, miss);
968
+
969
+ // Log the check depth.
970
+ LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
971
+
972
+ // Perform security check for access to the global object and return
973
+ // the holder register.
974
+ ASSERT(current == holder);
975
+ ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
976
+ if (current->IsJSGlobalProxy()) {
977
+ __ CheckAccessGlobalProxy(reg, scratch1, miss);
978
+ }
979
+
980
+ // If we've skipped any global objects, it's not enough to verify
981
+ // that their maps haven't changed. We also need to check that the
982
+ // property cell for the property is still empty.
983
+ current = object;
984
+ while (current != holder) {
985
+ if (current->IsGlobalObject()) {
986
+ MaybeObject* cell = GenerateCheckPropertyCell(masm(),
987
+ GlobalObject::cast(current),
988
+ name,
989
+ scratch1,
990
+ miss);
991
+ if (cell->IsFailure()) {
992
+ set_failure(Failure::cast(cell));
993
+ return reg;
994
+ }
995
+ }
996
+ current = JSObject::cast(current->GetPrototype());
997
+ }
998
+
999
+ // Return the register containing the holder.
1000
+ return reg;
1001
+ }
1002
+
1003
+
1004
+ void StubCompiler::GenerateLoadField(JSObject* object,
1005
+ JSObject* holder,
1006
+ Register receiver,
1007
+ Register scratch1,
1008
+ Register scratch2,
1009
+ Register scratch3,
1010
+ int index,
1011
+ String* name,
1012
+ Label* miss) {
1013
+ // Check that the receiver isn't a smi.
1014
+ __ JumpIfSmi(receiver, miss);
1015
+
1016
+ // Check the prototype chain.
1017
+ Register reg =
1018
+ CheckPrototypes(object, receiver, holder,
1019
+ scratch1, scratch2, scratch3, name, miss);
1020
+
1021
+ // Get the value from the properties.
1022
+ GenerateFastPropertyLoad(masm(), rax, reg, holder, index);
1023
+ __ ret(0);
1024
+ }
1025
+
1026
+
1027
+ MaybeObject* StubCompiler::GenerateLoadCallback(JSObject* object,
1028
+ JSObject* holder,
1029
+ Register receiver,
1030
+ Register name_reg,
1031
+ Register scratch1,
1032
+ Register scratch2,
1033
+ Register scratch3,
1034
+ AccessorInfo* callback,
1035
+ String* name,
1036
+ Label* miss) {
1037
+ // Check that the receiver isn't a smi.
1038
+ __ JumpIfSmi(receiver, miss);
1039
+
1040
+ // Check that the maps haven't changed.
1041
+ Register reg =
1042
+ CheckPrototypes(object, receiver, holder, scratch1,
1043
+ scratch2, scratch3, name, miss);
1044
+
1045
+ Handle<AccessorInfo> callback_handle(callback);
1046
+
1047
+ // Insert additional parameters into the stack frame above return address.
1048
+ ASSERT(!scratch2.is(reg));
1049
+ __ pop(scratch2); // Get return address to place it below.
1050
+
1051
+ __ push(receiver); // receiver
1052
+ __ push(reg); // holder
1053
+ if (heap()->InNewSpace(callback_handle->data())) {
1054
+ __ Move(scratch1, callback_handle);
1055
+ __ push(FieldOperand(scratch1, AccessorInfo::kDataOffset)); // data
1056
+ } else {
1057
+ __ Push(Handle<Object>(callback_handle->data()));
1058
+ }
1059
+ __ push(name_reg); // name
1060
+ // Save a pointer to where we pushed the arguments pointer.
1061
+ // This will be passed as the const AccessorInfo& to the C++ callback.
1062
+
1063
+ #ifdef _WIN64
1064
+ // Win64 uses first register--rcx--for returned value.
1065
+ Register accessor_info_arg = r8;
1066
+ Register name_arg = rdx;
1067
+ #else
1068
+ Register accessor_info_arg = rsi;
1069
+ Register name_arg = rdi;
1070
+ #endif
1071
+
1072
+ ASSERT(!name_arg.is(scratch2));
1073
+ __ movq(name_arg, rsp);
1074
+ __ push(scratch2); // Restore return address.
1075
+
1076
+ // Do call through the api.
1077
+ Address getter_address = v8::ToCData<Address>(callback->getter());
1078
+ ApiFunction fun(getter_address);
1079
+
1080
+ // 3 elements array for v8::Agruments::values_ and handler for name.
1081
+ const int kStackSpace = 4;
1082
+
1083
+ // Allocate v8::AccessorInfo in non-GCed stack space.
1084
+ const int kArgStackSpace = 1;
1085
+
1086
+ __ PrepareCallApiFunction(kArgStackSpace);
1087
+ __ lea(rax, Operand(name_arg, 3 * kPointerSize));
1088
+
1089
+ // v8::AccessorInfo::args_.
1090
+ __ movq(StackSpaceOperand(0), rax);
1091
+
1092
+ // The context register (rsi) has been saved in PrepareCallApiFunction and
1093
+ // could be used to pass arguments.
1094
+ __ lea(accessor_info_arg, StackSpaceOperand(0));
1095
+
1096
+ // Emitting a stub call may try to allocate (if the code is not
1097
+ // already generated). Do not allow the assembler to perform a
1098
+ // garbage collection but instead return the allocation failure
1099
+ // object.
1100
+ return masm()->TryCallApiFunctionAndReturn(&fun, kStackSpace);
1101
+ }
1102
+
1103
+
1104
+ void StubCompiler::GenerateLoadConstant(JSObject* object,
1105
+ JSObject* holder,
1106
+ Register receiver,
1107
+ Register scratch1,
1108
+ Register scratch2,
1109
+ Register scratch3,
1110
+ Object* value,
1111
+ String* name,
1112
+ Label* miss) {
1113
+ // Check that the receiver isn't a smi.
1114
+ __ JumpIfSmi(receiver, miss);
1115
+
1116
+ // Check that the maps haven't changed.
1117
+ Register reg =
1118
+ CheckPrototypes(object, receiver, holder,
1119
+ scratch1, scratch2, scratch3, name, miss);
1120
+
1121
+ // Return the constant value.
1122
+ __ Move(rax, Handle<Object>(value));
1123
+ __ ret(0);
1124
+ }
1125
+
1126
+
1127
+ void StubCompiler::GenerateLoadInterceptor(JSObject* object,
1128
+ JSObject* interceptor_holder,
1129
+ LookupResult* lookup,
1130
+ Register receiver,
1131
+ Register name_reg,
1132
+ Register scratch1,
1133
+ Register scratch2,
1134
+ Register scratch3,
1135
+ String* name,
1136
+ Label* miss) {
1137
+ ASSERT(interceptor_holder->HasNamedInterceptor());
1138
+ ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1139
+
1140
+ // Check that the receiver isn't a smi.
1141
+ __ JumpIfSmi(receiver, miss);
1142
+
1143
+ // So far the most popular follow ups for interceptor loads are FIELD
1144
+ // and CALLBACKS, so inline only them, other cases may be added
1145
+ // later.
1146
+ bool compile_followup_inline = false;
1147
+ if (lookup->IsProperty() && lookup->IsCacheable()) {
1148
+ if (lookup->type() == FIELD) {
1149
+ compile_followup_inline = true;
1150
+ } else if (lookup->type() == CALLBACKS &&
1151
+ lookup->GetCallbackObject()->IsAccessorInfo() &&
1152
+ AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL) {
1153
+ compile_followup_inline = true;
1154
+ }
1155
+ }
1156
+
1157
+ if (compile_followup_inline) {
1158
+ // Compile the interceptor call, followed by inline code to load the
1159
+ // property from further up the prototype chain if the call fails.
1160
+ // Check that the maps haven't changed.
1161
+ Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1162
+ scratch1, scratch2, scratch3,
1163
+ name, miss);
1164
+ ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
1165
+
1166
+ // Save necessary data before invoking an interceptor.
1167
+ // Requires a frame to make GC aware of pushed pointers.
1168
+ __ EnterInternalFrame();
1169
+
1170
+ if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1171
+ // CALLBACKS case needs a receiver to be passed into C++ callback.
1172
+ __ push(receiver);
1173
+ }
1174
+ __ push(holder_reg);
1175
+ __ push(name_reg);
1176
+
1177
+ // Invoke an interceptor. Note: map checks from receiver to
1178
+ // interceptor's holder has been compiled before (see a caller
1179
+ // of this method.)
1180
+ CompileCallLoadPropertyWithInterceptor(masm(),
1181
+ receiver,
1182
+ holder_reg,
1183
+ name_reg,
1184
+ interceptor_holder);
1185
+
1186
+ // Check if interceptor provided a value for property. If it's
1187
+ // the case, return immediately.
1188
+ Label interceptor_failed;
1189
+ __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
1190
+ __ j(equal, &interceptor_failed);
1191
+ __ LeaveInternalFrame();
1192
+ __ ret(0);
1193
+
1194
+ __ bind(&interceptor_failed);
1195
+ __ pop(name_reg);
1196
+ __ pop(holder_reg);
1197
+ if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1198
+ __ pop(receiver);
1199
+ }
1200
+
1201
+ __ LeaveInternalFrame();
1202
+
1203
+ // Check that the maps from interceptor's holder to lookup's holder
1204
+ // haven't changed. And load lookup's holder into |holder| register.
1205
+ if (interceptor_holder != lookup->holder()) {
1206
+ holder_reg = CheckPrototypes(interceptor_holder,
1207
+ holder_reg,
1208
+ lookup->holder(),
1209
+ scratch1,
1210
+ scratch2,
1211
+ scratch3,
1212
+ name,
1213
+ miss);
1214
+ }
1215
+
1216
+ if (lookup->type() == FIELD) {
1217
+ // We found FIELD property in prototype chain of interceptor's holder.
1218
+ // Retrieve a field from field's holder.
1219
+ GenerateFastPropertyLoad(masm(), rax, holder_reg,
1220
+ lookup->holder(), lookup->GetFieldIndex());
1221
+ __ ret(0);
1222
+ } else {
1223
+ // We found CALLBACKS property in prototype chain of interceptor's
1224
+ // holder.
1225
+ ASSERT(lookup->type() == CALLBACKS);
1226
+ ASSERT(lookup->GetCallbackObject()->IsAccessorInfo());
1227
+ AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
1228
+ ASSERT(callback != NULL);
1229
+ ASSERT(callback->getter() != NULL);
1230
+
1231
+ // Tail call to runtime.
1232
+ // Important invariant in CALLBACKS case: the code above must be
1233
+ // structured to never clobber |receiver| register.
1234
+ __ pop(scratch2); // return address
1235
+ __ push(receiver);
1236
+ __ push(holder_reg);
1237
+ __ Move(holder_reg, Handle<AccessorInfo>(callback));
1238
+ __ push(FieldOperand(holder_reg, AccessorInfo::kDataOffset));
1239
+ __ push(holder_reg);
1240
+ __ push(name_reg);
1241
+ __ push(scratch2); // restore return address
1242
+
1243
+ ExternalReference ref =
1244
+ ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
1245
+ isolate());
1246
+ __ TailCallExternalReference(ref, 5, 1);
1247
+ }
1248
+ } else { // !compile_followup_inline
1249
+ // Call the runtime system to load the interceptor.
1250
+ // Check that the maps haven't changed.
1251
+ Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1252
+ scratch1, scratch2, scratch3,
1253
+ name, miss);
1254
+ __ pop(scratch2); // save old return address
1255
+ PushInterceptorArguments(masm(), receiver, holder_reg,
1256
+ name_reg, interceptor_holder);
1257
+ __ push(scratch2); // restore old return address
1258
+
1259
+ ExternalReference ref = ExternalReference(
1260
+ IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), isolate());
1261
+ __ TailCallExternalReference(ref, 5, 1);
1262
+ }
1263
+ }
1264
+
1265
+
1266
+ void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
1267
+ if (kind_ == Code::KEYED_CALL_IC) {
1268
+ __ Cmp(rcx, Handle<String>(name));
1269
+ __ j(not_equal, miss);
1270
+ }
1271
+ }
1272
+
1273
+
1274
+ void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object,
1275
+ JSObject* holder,
1276
+ String* name,
1277
+ Label* miss) {
1278
+ ASSERT(holder->IsGlobalObject());
1279
+
1280
+ // Get the number of arguments.
1281
+ const int argc = arguments().immediate();
1282
+
1283
+ // Get the receiver from the stack.
1284
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1285
+
1286
+ // If the object is the holder then we know that it's a global
1287
+ // object which can only happen for contextual calls. In this case,
1288
+ // the receiver cannot be a smi.
1289
+ if (object != holder) {
1290
+ __ JumpIfSmi(rdx, miss);
1291
+ }
1292
+
1293
+ // Check that the maps haven't changed.
1294
+ CheckPrototypes(object, rdx, holder, rbx, rax, rdi, name, miss);
1295
+ }
1296
+
1297
+
1298
+ void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
1299
+ JSFunction* function,
1300
+ Label* miss) {
1301
+ // Get the value from the cell.
1302
+ __ Move(rdi, Handle<JSGlobalPropertyCell>(cell));
1303
+ __ movq(rdi, FieldOperand(rdi, JSGlobalPropertyCell::kValueOffset));
1304
+
1305
+ // Check that the cell contains the same function.
1306
+ if (heap()->InNewSpace(function)) {
1307
+ // We can't embed a pointer to a function in new space so we have
1308
+ // to verify that the shared function info is unchanged. This has
1309
+ // the nice side effect that multiple closures based on the same
1310
+ // function can all use this call IC. Before we load through the
1311
+ // function, we have to verify that it still is a function.
1312
+ __ JumpIfSmi(rdi, miss);
1313
+ __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rax);
1314
+ __ j(not_equal, miss);
1315
+
1316
+ // Check the shared function info. Make sure it hasn't changed.
1317
+ __ Move(rax, Handle<SharedFunctionInfo>(function->shared()));
1318
+ __ cmpq(FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset), rax);
1319
+ __ j(not_equal, miss);
1320
+ } else {
1321
+ __ Cmp(rdi, Handle<JSFunction>(function));
1322
+ __ j(not_equal, miss);
1323
+ }
1324
+ }
1325
+
1326
+
1327
+ MaybeObject* CallStubCompiler::GenerateMissBranch() {
1328
+ MaybeObject* maybe_obj = isolate()->stub_cache()->ComputeCallMiss(
1329
+ arguments().immediate(), kind_);
1330
+ Object* obj;
1331
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1332
+ __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
1333
+ return obj;
1334
+ }
1335
+
1336
+
1337
+ MaybeObject* CallStubCompiler::CompileCallField(JSObject* object,
1338
+ JSObject* holder,
1339
+ int index,
1340
+ String* name) {
1341
+ // ----------- S t a t e -------------
1342
+ // rcx : function name
1343
+ // rsp[0] : return address
1344
+ // rsp[8] : argument argc
1345
+ // rsp[16] : argument argc - 1
1346
+ // ...
1347
+ // rsp[argc * 8] : argument 1
1348
+ // rsp[(argc + 1) * 8] : argument 0 = receiver
1349
+ // -----------------------------------
1350
+ Label miss;
1351
+
1352
+ GenerateNameCheck(name, &miss);
1353
+
1354
+ // Get the receiver from the stack.
1355
+ const int argc = arguments().immediate();
1356
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1357
+
1358
+ // Check that the receiver isn't a smi.
1359
+ __ JumpIfSmi(rdx, &miss);
1360
+
1361
+ // Do the right check and compute the holder register.
1362
+ Register reg = CheckPrototypes(object, rdx, holder, rbx, rax, rdi,
1363
+ name, &miss);
1364
+
1365
+ GenerateFastPropertyLoad(masm(), rdi, reg, holder, index);
1366
+
1367
+ // Check that the function really is a function.
1368
+ __ JumpIfSmi(rdi, &miss);
1369
+ __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rbx);
1370
+ __ j(not_equal, &miss);
1371
+
1372
+ // Patch the receiver on the stack with the global proxy if
1373
+ // necessary.
1374
+ if (object->IsGlobalObject()) {
1375
+ __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
1376
+ __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
1377
+ }
1378
+
1379
+ // Invoke the function.
1380
+ __ InvokeFunction(rdi, arguments(), JUMP_FUNCTION);
1381
+
1382
+ // Handle call cache miss.
1383
+ __ bind(&miss);
1384
+ MaybeObject* maybe_result = GenerateMissBranch();
1385
+ if (maybe_result->IsFailure()) return maybe_result;
1386
+
1387
+ // Return the generated code.
1388
+ return GetCode(FIELD, name);
1389
+ }
1390
+
1391
+
1392
+ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
1393
+ JSObject* holder,
1394
+ JSGlobalPropertyCell* cell,
1395
+ JSFunction* function,
1396
+ String* name) {
1397
+ // ----------- S t a t e -------------
1398
+ // -- rcx : name
1399
+ // -- rsp[0] : return address
1400
+ // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1401
+ // -- ...
1402
+ // -- rsp[(argc + 1) * 8] : receiver
1403
+ // -----------------------------------
1404
+
1405
+ // If object is not an array, bail out to regular call.
1406
+ if (!object->IsJSArray() || cell != NULL) return heap()->undefined_value();
1407
+
1408
+ Label miss;
1409
+
1410
+ GenerateNameCheck(name, &miss);
1411
+
1412
+ // Get the receiver from the stack.
1413
+ const int argc = arguments().immediate();
1414
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1415
+
1416
+ // Check that the receiver isn't a smi.
1417
+ __ JumpIfSmi(rdx, &miss);
1418
+
1419
+ CheckPrototypes(JSObject::cast(object),
1420
+ rdx,
1421
+ holder,
1422
+ rbx,
1423
+ rax,
1424
+ rdi,
1425
+ name,
1426
+ &miss);
1427
+
1428
+ if (argc == 0) {
1429
+ // Noop, return the length.
1430
+ __ movq(rax, FieldOperand(rdx, JSArray::kLengthOffset));
1431
+ __ ret((argc + 1) * kPointerSize);
1432
+ } else {
1433
+ Label call_builtin;
1434
+
1435
+ // Get the elements array of the object.
1436
+ __ movq(rbx, FieldOperand(rdx, JSArray::kElementsOffset));
1437
+
1438
+ // Check that the elements are in fast mode and writable.
1439
+ __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
1440
+ factory()->fixed_array_map());
1441
+ __ j(not_equal, &call_builtin);
1442
+
1443
+ if (argc == 1) { // Otherwise fall through to call builtin.
1444
+ Label exit, with_write_barrier, attempt_to_grow_elements;
1445
+
1446
+ // Get the array's length into rax and calculate new length.
1447
+ __ SmiToInteger32(rax, FieldOperand(rdx, JSArray::kLengthOffset));
1448
+ STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue);
1449
+ __ addl(rax, Immediate(argc));
1450
+
1451
+ // Get the element's length into rcx.
1452
+ __ SmiToInteger32(rcx, FieldOperand(rbx, FixedArray::kLengthOffset));
1453
+
1454
+ // Check if we could survive without allocation.
1455
+ __ cmpl(rax, rcx);
1456
+ __ j(greater, &attempt_to_grow_elements);
1457
+
1458
+ // Save new length.
1459
+ __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax);
1460
+
1461
+ // Push the element.
1462
+ __ movq(rcx, Operand(rsp, argc * kPointerSize));
1463
+ __ lea(rdx, FieldOperand(rbx,
1464
+ rax, times_pointer_size,
1465
+ FixedArray::kHeaderSize - argc * kPointerSize));
1466
+ __ movq(Operand(rdx, 0), rcx);
1467
+
1468
+ // Check if value is a smi.
1469
+ __ Integer32ToSmi(rax, rax); // Return new length as smi.
1470
+
1471
+ __ JumpIfNotSmi(rcx, &with_write_barrier);
1472
+
1473
+ __ bind(&exit);
1474
+ __ ret((argc + 1) * kPointerSize);
1475
+
1476
+ __ bind(&with_write_barrier);
1477
+
1478
+ __ InNewSpace(rbx, rcx, equal, &exit);
1479
+
1480
+ __ RecordWriteHelper(rbx, rdx, rcx);
1481
+
1482
+ __ ret((argc + 1) * kPointerSize);
1483
+
1484
+ __ bind(&attempt_to_grow_elements);
1485
+ if (!FLAG_inline_new) {
1486
+ __ jmp(&call_builtin);
1487
+ }
1488
+
1489
+ ExternalReference new_space_allocation_top =
1490
+ ExternalReference::new_space_allocation_top_address(isolate());
1491
+ ExternalReference new_space_allocation_limit =
1492
+ ExternalReference::new_space_allocation_limit_address(isolate());
1493
+
1494
+ const int kAllocationDelta = 4;
1495
+ // Load top.
1496
+ __ Load(rcx, new_space_allocation_top);
1497
+
1498
+ // Check if it's the end of elements.
1499
+ __ lea(rdx, FieldOperand(rbx,
1500
+ rax, times_pointer_size,
1501
+ FixedArray::kHeaderSize - argc * kPointerSize));
1502
+ __ cmpq(rdx, rcx);
1503
+ __ j(not_equal, &call_builtin);
1504
+ __ addq(rcx, Immediate(kAllocationDelta * kPointerSize));
1505
+ Operand limit_operand =
1506
+ masm()->ExternalOperand(new_space_allocation_limit);
1507
+ __ cmpq(rcx, limit_operand);
1508
+ __ j(above, &call_builtin);
1509
+
1510
+ // We fit and could grow elements.
1511
+ __ Store(new_space_allocation_top, rcx);
1512
+ __ movq(rcx, Operand(rsp, argc * kPointerSize));
1513
+
1514
+ // Push the argument...
1515
+ __ movq(Operand(rdx, 0), rcx);
1516
+ // ... and fill the rest with holes.
1517
+ __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
1518
+ for (int i = 1; i < kAllocationDelta; i++) {
1519
+ __ movq(Operand(rdx, i * kPointerSize), kScratchRegister);
1520
+ }
1521
+
1522
+ // Restore receiver to rdx as finish sequence assumes it's here.
1523
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1524
+
1525
+ // Increment element's and array's sizes.
1526
+ __ SmiAddConstant(FieldOperand(rbx, FixedArray::kLengthOffset),
1527
+ Smi::FromInt(kAllocationDelta));
1528
+
1529
+ // Make new length a smi before returning it.
1530
+ __ Integer32ToSmi(rax, rax);
1531
+ __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rax);
1532
+
1533
+ // Elements are in new space, so write barrier is not required.
1534
+ __ ret((argc + 1) * kPointerSize);
1535
+ }
1536
+
1537
+ __ bind(&call_builtin);
1538
+ __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush,
1539
+ isolate()),
1540
+ argc + 1,
1541
+ 1);
1542
+ }
1543
+
1544
+ __ bind(&miss);
1545
+ MaybeObject* maybe_result = GenerateMissBranch();
1546
+ if (maybe_result->IsFailure()) return maybe_result;
1547
+
1548
+ // Return the generated code.
1549
+ return GetCode(function);
1550
+ }
1551
+
1552
+
1553
+ MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object,
1554
+ JSObject* holder,
1555
+ JSGlobalPropertyCell* cell,
1556
+ JSFunction* function,
1557
+ String* name) {
1558
+ // ----------- S t a t e -------------
1559
+ // -- rcx : name
1560
+ // -- rsp[0] : return address
1561
+ // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1562
+ // -- ...
1563
+ // -- rsp[(argc + 1) * 8] : receiver
1564
+ // -----------------------------------
1565
+
1566
+ // If object is not an array, bail out to regular call.
1567
+ if (!object->IsJSArray() || cell != NULL) return heap()->undefined_value();
1568
+
1569
+ Label miss, return_undefined, call_builtin;
1570
+
1571
+ GenerateNameCheck(name, &miss);
1572
+
1573
+ // Get the receiver from the stack.
1574
+ const int argc = arguments().immediate();
1575
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1576
+
1577
+ // Check that the receiver isn't a smi.
1578
+ __ JumpIfSmi(rdx, &miss);
1579
+
1580
+ CheckPrototypes(JSObject::cast(object), rdx,
1581
+ holder, rbx,
1582
+ rax, rdi, name, &miss);
1583
+
1584
+ // Get the elements array of the object.
1585
+ __ movq(rbx, FieldOperand(rdx, JSArray::kElementsOffset));
1586
+
1587
+ // Check that the elements are in fast mode and writable.
1588
+ __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
1589
+ Heap::kFixedArrayMapRootIndex);
1590
+ __ j(not_equal, &call_builtin);
1591
+
1592
+ // Get the array's length into rcx and calculate new length.
1593
+ __ SmiToInteger32(rcx, FieldOperand(rdx, JSArray::kLengthOffset));
1594
+ __ subl(rcx, Immediate(1));
1595
+ __ j(negative, &return_undefined);
1596
+
1597
+ // Get the last element.
1598
+ __ LoadRoot(r9, Heap::kTheHoleValueRootIndex);
1599
+ __ movq(rax, FieldOperand(rbx,
1600
+ rcx, times_pointer_size,
1601
+ FixedArray::kHeaderSize));
1602
+ // Check if element is already the hole.
1603
+ __ cmpq(rax, r9);
1604
+ // If so, call slow-case to also check prototypes for value.
1605
+ __ j(equal, &call_builtin);
1606
+
1607
+ // Set the array's length.
1608
+ __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rcx);
1609
+
1610
+ // Fill with the hole and return original value.
1611
+ __ movq(FieldOperand(rbx,
1612
+ rcx, times_pointer_size,
1613
+ FixedArray::kHeaderSize),
1614
+ r9);
1615
+ __ ret((argc + 1) * kPointerSize);
1616
+
1617
+ __ bind(&return_undefined);
1618
+ __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1619
+ __ ret((argc + 1) * kPointerSize);
1620
+
1621
+ __ bind(&call_builtin);
1622
+ __ TailCallExternalReference(
1623
+ ExternalReference(Builtins::c_ArrayPop, isolate()),
1624
+ argc + 1,
1625
+ 1);
1626
+
1627
+ __ bind(&miss);
1628
+ MaybeObject* maybe_result = GenerateMissBranch();
1629
+ if (maybe_result->IsFailure()) return maybe_result;
1630
+
1631
+ // Return the generated code.
1632
+ return GetCode(function);
1633
+ }
1634
+
1635
+
1636
+ MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall(
1637
+ Object* object,
1638
+ JSObject* holder,
1639
+ JSGlobalPropertyCell* cell,
1640
+ JSFunction* function,
1641
+ String* name) {
1642
+ // ----------- S t a t e -------------
1643
+ // -- rcx : function name
1644
+ // -- rsp[0] : return address
1645
+ // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1646
+ // -- ...
1647
+ // -- rsp[(argc + 1) * 8] : receiver
1648
+ // -----------------------------------
1649
+
1650
+ // If object is not a string, bail out to regular call.
1651
+ if (!object->IsString() || cell != NULL) return heap()->undefined_value();
1652
+
1653
+ const int argc = arguments().immediate();
1654
+
1655
+ Label miss;
1656
+ Label name_miss;
1657
+ Label index_out_of_range;
1658
+ Label* index_out_of_range_label = &index_out_of_range;
1659
+
1660
+ if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) {
1661
+ index_out_of_range_label = &miss;
1662
+ }
1663
+
1664
+ GenerateNameCheck(name, &name_miss);
1665
+
1666
+ // Check that the maps starting from the prototype haven't changed.
1667
+ GenerateDirectLoadGlobalFunctionPrototype(masm(),
1668
+ Context::STRING_FUNCTION_INDEX,
1669
+ rax,
1670
+ &miss);
1671
+ ASSERT(object != holder);
1672
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
1673
+ rbx, rdx, rdi, name, &miss);
1674
+
1675
+ Register receiver = rbx;
1676
+ Register index = rdi;
1677
+ Register scratch = rdx;
1678
+ Register result = rax;
1679
+ __ movq(receiver, Operand(rsp, (argc + 1) * kPointerSize));
1680
+ if (argc > 0) {
1681
+ __ movq(index, Operand(rsp, (argc - 0) * kPointerSize));
1682
+ } else {
1683
+ __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1684
+ }
1685
+
1686
+ StringCharCodeAtGenerator char_code_at_generator(receiver,
1687
+ index,
1688
+ scratch,
1689
+ result,
1690
+ &miss, // When not a string.
1691
+ &miss, // When not a number.
1692
+ index_out_of_range_label,
1693
+ STRING_INDEX_IS_NUMBER);
1694
+ char_code_at_generator.GenerateFast(masm());
1695
+ __ ret((argc + 1) * kPointerSize);
1696
+
1697
+ StubRuntimeCallHelper call_helper;
1698
+ char_code_at_generator.GenerateSlow(masm(), call_helper);
1699
+
1700
+ if (index_out_of_range.is_linked()) {
1701
+ __ bind(&index_out_of_range);
1702
+ __ LoadRoot(rax, Heap::kNanValueRootIndex);
1703
+ __ ret((argc + 1) * kPointerSize);
1704
+ }
1705
+
1706
+ __ bind(&miss);
1707
+ // Restore function name in rcx.
1708
+ __ Move(rcx, Handle<String>(name));
1709
+ __ bind(&name_miss);
1710
+ MaybeObject* maybe_result = GenerateMissBranch();
1711
+ if (maybe_result->IsFailure()) return maybe_result;
1712
+
1713
+ // Return the generated code.
1714
+ return GetCode(function);
1715
+ }
1716
+
1717
+
1718
+ MaybeObject* CallStubCompiler::CompileStringCharAtCall(
1719
+ Object* object,
1720
+ JSObject* holder,
1721
+ JSGlobalPropertyCell* cell,
1722
+ JSFunction* function,
1723
+ String* name) {
1724
+ // ----------- S t a t e -------------
1725
+ // -- rcx : function name
1726
+ // -- rsp[0] : return address
1727
+ // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1728
+ // -- ...
1729
+ // -- rsp[(argc + 1) * 8] : receiver
1730
+ // -----------------------------------
1731
+
1732
+ // If object is not a string, bail out to regular call.
1733
+ if (!object->IsString() || cell != NULL) return heap()->undefined_value();
1734
+
1735
+ const int argc = arguments().immediate();
1736
+
1737
+ Label miss;
1738
+ Label name_miss;
1739
+ Label index_out_of_range;
1740
+ Label* index_out_of_range_label = &index_out_of_range;
1741
+
1742
+ if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) {
1743
+ index_out_of_range_label = &miss;
1744
+ }
1745
+
1746
+ GenerateNameCheck(name, &name_miss);
1747
+
1748
+ // Check that the maps starting from the prototype haven't changed.
1749
+ GenerateDirectLoadGlobalFunctionPrototype(masm(),
1750
+ Context::STRING_FUNCTION_INDEX,
1751
+ rax,
1752
+ &miss);
1753
+ ASSERT(object != holder);
1754
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
1755
+ rbx, rdx, rdi, name, &miss);
1756
+
1757
+ Register receiver = rax;
1758
+ Register index = rdi;
1759
+ Register scratch1 = rbx;
1760
+ Register scratch2 = rdx;
1761
+ Register result = rax;
1762
+ __ movq(receiver, Operand(rsp, (argc + 1) * kPointerSize));
1763
+ if (argc > 0) {
1764
+ __ movq(index, Operand(rsp, (argc - 0) * kPointerSize));
1765
+ } else {
1766
+ __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1767
+ }
1768
+
1769
+ StringCharAtGenerator char_at_generator(receiver,
1770
+ index,
1771
+ scratch1,
1772
+ scratch2,
1773
+ result,
1774
+ &miss, // When not a string.
1775
+ &miss, // When not a number.
1776
+ index_out_of_range_label,
1777
+ STRING_INDEX_IS_NUMBER);
1778
+ char_at_generator.GenerateFast(masm());
1779
+ __ ret((argc + 1) * kPointerSize);
1780
+
1781
+ StubRuntimeCallHelper call_helper;
1782
+ char_at_generator.GenerateSlow(masm(), call_helper);
1783
+
1784
+ if (index_out_of_range.is_linked()) {
1785
+ __ bind(&index_out_of_range);
1786
+ __ LoadRoot(rax, Heap::kEmptyStringRootIndex);
1787
+ __ ret((argc + 1) * kPointerSize);
1788
+ }
1789
+
1790
+ __ bind(&miss);
1791
+ // Restore function name in rcx.
1792
+ __ Move(rcx, Handle<String>(name));
1793
+ __ bind(&name_miss);
1794
+ MaybeObject* maybe_result = GenerateMissBranch();
1795
+ if (maybe_result->IsFailure()) return maybe_result;
1796
+
1797
+ // Return the generated code.
1798
+ return GetCode(function);
1799
+ }
1800
+
1801
+
1802
+ MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall(
1803
+ Object* object,
1804
+ JSObject* holder,
1805
+ JSGlobalPropertyCell* cell,
1806
+ JSFunction* function,
1807
+ String* name) {
1808
+ // ----------- S t a t e -------------
1809
+ // -- rcx : function name
1810
+ // -- rsp[0] : return address
1811
+ // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1812
+ // -- ...
1813
+ // -- rsp[(argc + 1) * 8] : receiver
1814
+ // -----------------------------------
1815
+
1816
+ const int argc = arguments().immediate();
1817
+
1818
+ // If the object is not a JSObject or we got an unexpected number of
1819
+ // arguments, bail out to the regular call.
1820
+ if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
1821
+
1822
+ Label miss;
1823
+ GenerateNameCheck(name, &miss);
1824
+
1825
+ if (cell == NULL) {
1826
+ __ movq(rdx, Operand(rsp, 2 * kPointerSize));
1827
+
1828
+ __ JumpIfSmi(rdx, &miss);
1829
+
1830
+ CheckPrototypes(JSObject::cast(object), rdx, holder, rbx, rax, rdi, name,
1831
+ &miss);
1832
+ } else {
1833
+ ASSERT(cell->value() == function);
1834
+ GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
1835
+ GenerateLoadFunctionFromCell(cell, function, &miss);
1836
+ }
1837
+
1838
+ // Load the char code argument.
1839
+ Register code = rbx;
1840
+ __ movq(code, Operand(rsp, 1 * kPointerSize));
1841
+
1842
+ // Check the code is a smi.
1843
+ Label slow;
1844
+ __ JumpIfNotSmi(code, &slow);
1845
+
1846
+ // Convert the smi code to uint16.
1847
+ __ SmiAndConstant(code, code, Smi::FromInt(0xffff));
1848
+
1849
+ StringCharFromCodeGenerator char_from_code_generator(code, rax);
1850
+ char_from_code_generator.GenerateFast(masm());
1851
+ __ ret(2 * kPointerSize);
1852
+
1853
+ StubRuntimeCallHelper call_helper;
1854
+ char_from_code_generator.GenerateSlow(masm(), call_helper);
1855
+
1856
+ // Tail call the full function. We do not have to patch the receiver
1857
+ // because the function makes no use of it.
1858
+ __ bind(&slow);
1859
+ __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
1860
+
1861
+ __ bind(&miss);
1862
+ // rcx: function name.
1863
+ MaybeObject* maybe_result = GenerateMissBranch();
1864
+ if (maybe_result->IsFailure()) return maybe_result;
1865
+
1866
+ // Return the generated code.
1867
+ return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
1868
+ }
1869
+
1870
+
1871
+ MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object,
1872
+ JSObject* holder,
1873
+ JSGlobalPropertyCell* cell,
1874
+ JSFunction* function,
1875
+ String* name) {
1876
+ // TODO(872): implement this.
1877
+ return heap()->undefined_value();
1878
+ }
1879
+
1880
+
1881
+ MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object,
1882
+ JSObject* holder,
1883
+ JSGlobalPropertyCell* cell,
1884
+ JSFunction* function,
1885
+ String* name) {
1886
+ // ----------- S t a t e -------------
1887
+ // -- rcx : function name
1888
+ // -- rsp[0] : return address
1889
+ // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1890
+ // -- ...
1891
+ // -- rsp[(argc + 1) * 8] : receiver
1892
+ // -----------------------------------
1893
+
1894
+ const int argc = arguments().immediate();
1895
+
1896
+ // If the object is not a JSObject or we got an unexpected number of
1897
+ // arguments, bail out to the regular call.
1898
+ if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
1899
+
1900
+ Label miss;
1901
+ GenerateNameCheck(name, &miss);
1902
+
1903
+ if (cell == NULL) {
1904
+ __ movq(rdx, Operand(rsp, 2 * kPointerSize));
1905
+
1906
+ __ JumpIfSmi(rdx, &miss);
1907
+
1908
+ CheckPrototypes(JSObject::cast(object), rdx, holder, rbx, rax, rdi, name,
1909
+ &miss);
1910
+ } else {
1911
+ ASSERT(cell->value() == function);
1912
+ GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
1913
+ GenerateLoadFunctionFromCell(cell, function, &miss);
1914
+ }
1915
+
1916
+ // Load the (only) argument into rax.
1917
+ __ movq(rax, Operand(rsp, 1 * kPointerSize));
1918
+
1919
+ // Check if the argument is a smi.
1920
+ Label not_smi;
1921
+ STATIC_ASSERT(kSmiTag == 0);
1922
+ __ JumpIfNotSmi(rax, &not_smi);
1923
+ __ SmiToInteger32(rax, rax);
1924
+
1925
+ // Set ebx to 1...1 (== -1) if the argument is negative, or to 0...0
1926
+ // otherwise.
1927
+ __ movl(rbx, rax);
1928
+ __ sarl(rbx, Immediate(kBitsPerInt - 1));
1929
+
1930
+ // Do bitwise not or do nothing depending on ebx.
1931
+ __ xorl(rax, rbx);
1932
+
1933
+ // Add 1 or do nothing depending on ebx.
1934
+ __ subl(rax, rbx);
1935
+
1936
+ // If the result is still negative, go to the slow case.
1937
+ // This only happens for the most negative smi.
1938
+ Label slow;
1939
+ __ j(negative, &slow);
1940
+
1941
+ // Smi case done.
1942
+ __ Integer32ToSmi(rax, rax);
1943
+ __ ret(2 * kPointerSize);
1944
+
1945
+ // Check if the argument is a heap number and load its value.
1946
+ __ bind(&not_smi);
1947
+ __ CheckMap(rax, factory()->heap_number_map(), &slow, true);
1948
+ __ movq(rbx, FieldOperand(rax, HeapNumber::kValueOffset));
1949
+
1950
+ // Check the sign of the argument. If the argument is positive,
1951
+ // just return it.
1952
+ Label negative_sign;
1953
+ const int sign_mask_shift =
1954
+ (HeapNumber::kExponentOffset - HeapNumber::kValueOffset) * kBitsPerByte;
1955
+ __ movq(rdi, static_cast<int64_t>(HeapNumber::kSignMask) << sign_mask_shift,
1956
+ RelocInfo::NONE);
1957
+ __ testq(rbx, rdi);
1958
+ __ j(not_zero, &negative_sign);
1959
+ __ ret(2 * kPointerSize);
1960
+
1961
+ // If the argument is negative, clear the sign, and return a new
1962
+ // number. We still have the sign mask in rdi.
1963
+ __ bind(&negative_sign);
1964
+ __ xor_(rbx, rdi);
1965
+ __ AllocateHeapNumber(rax, rdx, &slow);
1966
+ __ movq(FieldOperand(rax, HeapNumber::kValueOffset), rbx);
1967
+ __ ret(2 * kPointerSize);
1968
+
1969
+ // Tail call the full function. We do not have to patch the receiver
1970
+ // because the function makes no use of it.
1971
+ __ bind(&slow);
1972
+ __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
1973
+
1974
+ __ bind(&miss);
1975
+ // rcx: function name.
1976
+ MaybeObject* maybe_result = GenerateMissBranch();
1977
+ if (maybe_result->IsFailure()) return maybe_result;
1978
+
1979
+ // Return the generated code.
1980
+ return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
1981
+ }
1982
+
1983
+
1984
+ MaybeObject* CallStubCompiler::CompileFastApiCall(
1985
+ const CallOptimization& optimization,
1986
+ Object* object,
1987
+ JSObject* holder,
1988
+ JSGlobalPropertyCell* cell,
1989
+ JSFunction* function,
1990
+ String* name) {
1991
+ ASSERT(optimization.is_simple_api_call());
1992
+ // Bail out if object is a global object as we don't want to
1993
+ // repatch it to global receiver.
1994
+ if (object->IsGlobalObject()) return heap()->undefined_value();
1995
+ if (cell != NULL) return heap()->undefined_value();
1996
+ int depth = optimization.GetPrototypeDepthOfExpectedType(
1997
+ JSObject::cast(object), holder);
1998
+ if (depth == kInvalidProtoDepth) return heap()->undefined_value();
1999
+
2000
+ Label miss, miss_before_stack_reserved;
2001
+
2002
+ GenerateNameCheck(name, &miss_before_stack_reserved);
2003
+
2004
+ // Get the receiver from the stack.
2005
+ const int argc = arguments().immediate();
2006
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
2007
+
2008
+ // Check that the receiver isn't a smi.
2009
+ __ JumpIfSmi(rdx, &miss_before_stack_reserved);
2010
+
2011
+ Counters* counters = isolate()->counters();
2012
+ __ IncrementCounter(counters->call_const(), 1);
2013
+ __ IncrementCounter(counters->call_const_fast_api(), 1);
2014
+
2015
+ // Allocate space for v8::Arguments implicit values. Must be initialized
2016
+ // before calling any runtime function.
2017
+ __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
2018
+
2019
+ // Check that the maps haven't changed and find a Holder as a side effect.
2020
+ CheckPrototypes(JSObject::cast(object), rdx, holder,
2021
+ rbx, rax, rdi, name, depth, &miss);
2022
+
2023
+ // Move the return address on top of the stack.
2024
+ __ movq(rax, Operand(rsp, 3 * kPointerSize));
2025
+ __ movq(Operand(rsp, 0 * kPointerSize), rax);
2026
+
2027
+ MaybeObject* result = GenerateFastApiCall(masm(), optimization, argc);
2028
+ if (result->IsFailure()) return result;
2029
+
2030
+ __ bind(&miss);
2031
+ __ addq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
2032
+
2033
+ __ bind(&miss_before_stack_reserved);
2034
+ MaybeObject* maybe_result = GenerateMissBranch();
2035
+ if (maybe_result->IsFailure()) return maybe_result;
2036
+
2037
+ // Return the generated code.
2038
+ return GetCode(function);
2039
+ }
2040
+
2041
+
2042
+ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
2043
+ JSObject* holder,
2044
+ JSFunction* function,
2045
+ String* name,
2046
+ CheckType check) {
2047
+ // ----------- S t a t e -------------
2048
+ // rcx : function name
2049
+ // rsp[0] : return address
2050
+ // rsp[8] : argument argc
2051
+ // rsp[16] : argument argc - 1
2052
+ // ...
2053
+ // rsp[argc * 8] : argument 1
2054
+ // rsp[(argc + 1) * 8] : argument 0 = receiver
2055
+ // -----------------------------------
2056
+
2057
+ if (HasCustomCallGenerator(function)) {
2058
+ MaybeObject* maybe_result = CompileCustomCall(
2059
+ object, holder, NULL, function, name);
2060
+ Object* result;
2061
+ if (!maybe_result->ToObject(&result)) return maybe_result;
2062
+ // undefined means bail out to regular compiler.
2063
+ if (!result->IsUndefined()) return result;
2064
+ }
2065
+
2066
+ Label miss;
2067
+
2068
+ GenerateNameCheck(name, &miss);
2069
+
2070
+ // Get the receiver from the stack.
2071
+ const int argc = arguments().immediate();
2072
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
2073
+
2074
+ // Check that the receiver isn't a smi.
2075
+ if (check != NUMBER_CHECK) {
2076
+ __ JumpIfSmi(rdx, &miss);
2077
+ }
2078
+
2079
+ // Make sure that it's okay not to patch the on stack receiver
2080
+ // unless we're doing a receiver map check.
2081
+ ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2082
+
2083
+ Counters* counters = isolate()->counters();
2084
+ SharedFunctionInfo* function_info = function->shared();
2085
+ switch (check) {
2086
+ case RECEIVER_MAP_CHECK:
2087
+ __ IncrementCounter(counters->call_const(), 1);
2088
+
2089
+ // Check that the maps haven't changed.
2090
+ CheckPrototypes(JSObject::cast(object), rdx, holder,
2091
+ rbx, rax, rdi, name, &miss);
2092
+
2093
+ // Patch the receiver on the stack with the global proxy if
2094
+ // necessary.
2095
+ if (object->IsGlobalObject()) {
2096
+ __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
2097
+ __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
2098
+ }
2099
+ break;
2100
+
2101
+ case STRING_CHECK:
2102
+ if (!function->IsBuiltin() && !function_info->strict_mode()) {
2103
+ // Calling non-strict non-builtins with a value as the receiver
2104
+ // requires boxing.
2105
+ __ jmp(&miss);
2106
+ } else {
2107
+ // Check that the object is a two-byte string or a symbol.
2108
+ __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, rax);
2109
+ __ j(above_equal, &miss);
2110
+ // Check that the maps starting from the prototype haven't changed.
2111
+ GenerateDirectLoadGlobalFunctionPrototype(
2112
+ masm(), Context::STRING_FUNCTION_INDEX, rax, &miss);
2113
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
2114
+ rbx, rdx, rdi, name, &miss);
2115
+ }
2116
+ break;
2117
+
2118
+ case NUMBER_CHECK: {
2119
+ if (!function->IsBuiltin() && !function_info->strict_mode()) {
2120
+ // Calling non-strict non-builtins with a value as the receiver
2121
+ // requires boxing.
2122
+ __ jmp(&miss);
2123
+ } else {
2124
+ Label fast;
2125
+ // Check that the object is a smi or a heap number.
2126
+ __ JumpIfSmi(rdx, &fast);
2127
+ __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rax);
2128
+ __ j(not_equal, &miss);
2129
+ __ bind(&fast);
2130
+ // Check that the maps starting from the prototype haven't changed.
2131
+ GenerateDirectLoadGlobalFunctionPrototype(
2132
+ masm(), Context::NUMBER_FUNCTION_INDEX, rax, &miss);
2133
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
2134
+ rbx, rdx, rdi, name, &miss);
2135
+ }
2136
+ break;
2137
+ }
2138
+
2139
+ case BOOLEAN_CHECK: {
2140
+ if (!function->IsBuiltin() && !function_info->strict_mode()) {
2141
+ // Calling non-strict non-builtins with a value as the receiver
2142
+ // requires boxing.
2143
+ __ jmp(&miss);
2144
+ } else {
2145
+ Label fast;
2146
+ // Check that the object is a boolean.
2147
+ __ CompareRoot(rdx, Heap::kTrueValueRootIndex);
2148
+ __ j(equal, &fast);
2149
+ __ CompareRoot(rdx, Heap::kFalseValueRootIndex);
2150
+ __ j(not_equal, &miss);
2151
+ __ bind(&fast);
2152
+ // Check that the maps starting from the prototype haven't changed.
2153
+ GenerateDirectLoadGlobalFunctionPrototype(
2154
+ masm(), Context::BOOLEAN_FUNCTION_INDEX, rax, &miss);
2155
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
2156
+ rbx, rdx, rdi, name, &miss);
2157
+ }
2158
+ break;
2159
+ }
2160
+
2161
+ default:
2162
+ UNREACHABLE();
2163
+ }
2164
+
2165
+ __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2166
+
2167
+ // Handle call cache miss.
2168
+ __ bind(&miss);
2169
+ MaybeObject* maybe_result = GenerateMissBranch();
2170
+ if (maybe_result->IsFailure()) return maybe_result;
2171
+
2172
+ // Return the generated code.
2173
+ return GetCode(function);
2174
+ }
2175
+
2176
+
2177
+ MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object,
2178
+ JSObject* holder,
2179
+ String* name) {
2180
+ // ----------- S t a t e -------------
2181
+ // rcx : function name
2182
+ // rsp[0] : return address
2183
+ // rsp[8] : argument argc
2184
+ // rsp[16] : argument argc - 1
2185
+ // ...
2186
+ // rsp[argc * 8] : argument 1
2187
+ // rsp[(argc + 1) * 8] : argument 0 = receiver
2188
+ // -----------------------------------
2189
+ Label miss;
2190
+
2191
+ GenerateNameCheck(name, &miss);
2192
+
2193
+ // Get the number of arguments.
2194
+ const int argc = arguments().immediate();
2195
+
2196
+ LookupResult lookup;
2197
+ LookupPostInterceptor(holder, name, &lookup);
2198
+
2199
+ // Get the receiver from the stack.
2200
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
2201
+
2202
+ CallInterceptorCompiler compiler(this, arguments(), rcx);
2203
+ MaybeObject* result = compiler.Compile(masm(),
2204
+ object,
2205
+ holder,
2206
+ name,
2207
+ &lookup,
2208
+ rdx,
2209
+ rbx,
2210
+ rdi,
2211
+ rax,
2212
+ &miss);
2213
+ if (result->IsFailure()) return result;
2214
+
2215
+ // Restore receiver.
2216
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
2217
+
2218
+ // Check that the function really is a function.
2219
+ __ JumpIfSmi(rax, &miss);
2220
+ __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
2221
+ __ j(not_equal, &miss);
2222
+
2223
+ // Patch the receiver on the stack with the global proxy if
2224
+ // necessary.
2225
+ if (object->IsGlobalObject()) {
2226
+ __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
2227
+ __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
2228
+ }
2229
+
2230
+ // Invoke the function.
2231
+ __ movq(rdi, rax);
2232
+ __ InvokeFunction(rdi, arguments(), JUMP_FUNCTION);
2233
+
2234
+ // Handle load cache miss.
2235
+ __ bind(&miss);
2236
+ MaybeObject* maybe_result = GenerateMissBranch();
2237
+ if (maybe_result->IsFailure()) return maybe_result;
2238
+
2239
+ // Return the generated code.
2240
+ return GetCode(INTERCEPTOR, name);
2241
+ }
2242
+
2243
+
2244
+ MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object,
2245
+ GlobalObject* holder,
2246
+ JSGlobalPropertyCell* cell,
2247
+ JSFunction* function,
2248
+ String* name) {
2249
+ // ----------- S t a t e -------------
2250
+ // rcx : function name
2251
+ // rsp[0] : return address
2252
+ // rsp[8] : argument argc
2253
+ // rsp[16] : argument argc - 1
2254
+ // ...
2255
+ // rsp[argc * 8] : argument 1
2256
+ // rsp[(argc + 1) * 8] : argument 0 = receiver
2257
+ // -----------------------------------
2258
+
2259
+ if (HasCustomCallGenerator(function)) {
2260
+ MaybeObject* maybe_result = CompileCustomCall(
2261
+ object, holder, cell, function, name);
2262
+ Object* result;
2263
+ if (!maybe_result->ToObject(&result)) return maybe_result;
2264
+ // undefined means bail out to regular compiler.
2265
+ if (!result->IsUndefined()) return result;
2266
+ }
2267
+
2268
+ Label miss;
2269
+
2270
+ GenerateNameCheck(name, &miss);
2271
+
2272
+ // Get the number of arguments.
2273
+ const int argc = arguments().immediate();
2274
+
2275
+ GenerateGlobalReceiverCheck(object, holder, name, &miss);
2276
+
2277
+ GenerateLoadFunctionFromCell(cell, function, &miss);
2278
+
2279
+ // Patch the receiver on the stack with the global proxy.
2280
+ if (object->IsGlobalObject()) {
2281
+ __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
2282
+ __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
2283
+ }
2284
+
2285
+ // Setup the context (function already in rdi).
2286
+ __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2287
+
2288
+ // Jump to the cached code (tail call).
2289
+ Counters* counters = isolate()->counters();
2290
+ __ IncrementCounter(counters->call_global_inline(), 1);
2291
+ ASSERT(function->is_compiled());
2292
+ ParameterCount expected(function->shared()->formal_parameter_count());
2293
+ if (V8::UseCrankshaft()) {
2294
+ // TODO(kasperl): For now, we always call indirectly through the
2295
+ // code field in the function to allow recompilation to take effect
2296
+ // without changing any of the call sites.
2297
+ __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2298
+ __ InvokeCode(rdx, expected, arguments(), JUMP_FUNCTION);
2299
+ } else {
2300
+ Handle<Code> code(function->code());
2301
+ __ InvokeCode(code, expected, arguments(),
2302
+ RelocInfo::CODE_TARGET, JUMP_FUNCTION);
2303
+ }
2304
+ // Handle call cache miss.
2305
+ __ bind(&miss);
2306
+ __ IncrementCounter(counters->call_global_inline_miss(), 1);
2307
+ MaybeObject* maybe_result = GenerateMissBranch();
2308
+ if (maybe_result->IsFailure()) return maybe_result;
2309
+
2310
+ // Return the generated code.
2311
+ return GetCode(NORMAL, name);
2312
+ }
2313
+
2314
+
2315
+ MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object,
2316
+ int index,
2317
+ Map* transition,
2318
+ String* name) {
2319
+ // ----------- S t a t e -------------
2320
+ // -- rax : value
2321
+ // -- rcx : name
2322
+ // -- rdx : receiver
2323
+ // -- rsp[0] : return address
2324
+ // -----------------------------------
2325
+ Label miss;
2326
+
2327
+ // Generate store field code. Preserves receiver and name on jump to miss.
2328
+ GenerateStoreField(masm(),
2329
+ object,
2330
+ index,
2331
+ transition,
2332
+ rdx, rcx, rbx,
2333
+ &miss);
2334
+
2335
+ // Handle store cache miss.
2336
+ __ bind(&miss);
2337
+ Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2338
+ __ Jump(ic, RelocInfo::CODE_TARGET);
2339
+
2340
+ // Return the generated code.
2341
+ return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2342
+ }
2343
+
2344
+
2345
+ MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
2346
+ AccessorInfo* callback,
2347
+ String* name) {
2348
+ // ----------- S t a t e -------------
2349
+ // -- rax : value
2350
+ // -- rcx : name
2351
+ // -- rdx : receiver
2352
+ // -- rsp[0] : return address
2353
+ // -----------------------------------
2354
+ Label miss;
2355
+
2356
+ // Check that the object isn't a smi.
2357
+ __ JumpIfSmi(rdx, &miss);
2358
+
2359
+ // Check that the map of the object hasn't changed.
2360
+ __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2361
+ Handle<Map>(object->map()));
2362
+ __ j(not_equal, &miss);
2363
+
2364
+ // Perform global security token check if needed.
2365
+ if (object->IsJSGlobalProxy()) {
2366
+ __ CheckAccessGlobalProxy(rdx, rbx, &miss);
2367
+ }
2368
+
2369
+ // Stub never generated for non-global objects that require access
2370
+ // checks.
2371
+ ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2372
+
2373
+ __ pop(rbx); // remove the return address
2374
+ __ push(rdx); // receiver
2375
+ __ Push(Handle<AccessorInfo>(callback)); // callback info
2376
+ __ push(rcx); // name
2377
+ __ push(rax); // value
2378
+ __ push(rbx); // restore return address
2379
+
2380
+ // Do tail-call to the runtime system.
2381
+ ExternalReference store_callback_property =
2382
+ ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
2383
+ __ TailCallExternalReference(store_callback_property, 4, 1);
2384
+
2385
+ // Handle store cache miss.
2386
+ __ bind(&miss);
2387
+ Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2388
+ __ Jump(ic, RelocInfo::CODE_TARGET);
2389
+
2390
+ // Return the generated code.
2391
+ return GetCode(CALLBACKS, name);
2392
+ }
2393
+
2394
+
2395
+ MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
2396
+ String* name) {
2397
+ // ----------- S t a t e -------------
2398
+ // -- rax : value
2399
+ // -- rcx : name
2400
+ // -- rdx : receiver
2401
+ // -- rsp[0] : return address
2402
+ // -----------------------------------
2403
+ Label miss;
2404
+
2405
+ // Check that the object isn't a smi.
2406
+ __ JumpIfSmi(rdx, &miss);
2407
+
2408
+ // Check that the map of the object hasn't changed.
2409
+ __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2410
+ Handle<Map>(receiver->map()));
2411
+ __ j(not_equal, &miss);
2412
+
2413
+ // Perform global security token check if needed.
2414
+ if (receiver->IsJSGlobalProxy()) {
2415
+ __ CheckAccessGlobalProxy(rdx, rbx, &miss);
2416
+ }
2417
+
2418
+ // Stub never generated for non-global objects that require access
2419
+ // checks.
2420
+ ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2421
+
2422
+ __ pop(rbx); // remove the return address
2423
+ __ push(rdx); // receiver
2424
+ __ push(rcx); // name
2425
+ __ push(rax); // value
2426
+ __ Push(Smi::FromInt(strict_mode_));
2427
+ __ push(rbx); // restore return address
2428
+
2429
+ // Do tail-call to the runtime system.
2430
+ ExternalReference store_ic_property =
2431
+ ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
2432
+ __ TailCallExternalReference(store_ic_property, 4, 1);
2433
+
2434
+ // Handle store cache miss.
2435
+ __ bind(&miss);
2436
+ Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2437
+ __ Jump(ic, RelocInfo::CODE_TARGET);
2438
+
2439
+ // Return the generated code.
2440
+ return GetCode(INTERCEPTOR, name);
2441
+ }
2442
+
2443
+
2444
+ MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
2445
+ JSGlobalPropertyCell* cell,
2446
+ String* name) {
2447
+ // ----------- S t a t e -------------
2448
+ // -- rax : value
2449
+ // -- rcx : name
2450
+ // -- rdx : receiver
2451
+ // -- rsp[0] : return address
2452
+ // -----------------------------------
2453
+ Label miss;
2454
+
2455
+ // Check that the map of the global has not changed.
2456
+ __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2457
+ Handle<Map>(object->map()));
2458
+ __ j(not_equal, &miss);
2459
+
2460
+ // Check that the value in the cell is not the hole. If it is, this
2461
+ // cell could have been deleted and reintroducing the global needs
2462
+ // to update the property details in the property dictionary of the
2463
+ // global object. We bail out to the runtime system to do that.
2464
+ __ Move(rbx, Handle<JSGlobalPropertyCell>(cell));
2465
+ __ CompareRoot(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset),
2466
+ Heap::kTheHoleValueRootIndex);
2467
+ __ j(equal, &miss);
2468
+
2469
+ // Store the value in the cell.
2470
+ __ movq(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset), rax);
2471
+
2472
+ // Return the value (register rax).
2473
+ Counters* counters = isolate()->counters();
2474
+ __ IncrementCounter(counters->named_store_global_inline(), 1);
2475
+ __ ret(0);
2476
+
2477
+ // Handle store cache miss.
2478
+ __ bind(&miss);
2479
+ __ IncrementCounter(counters->named_store_global_inline_miss(), 1);
2480
+ Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2481
+ __ Jump(ic, RelocInfo::CODE_TARGET);
2482
+
2483
+ // Return the generated code.
2484
+ return GetCode(NORMAL, name);
2485
+ }
2486
+
2487
+
2488
+ MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
2489
+ int index,
2490
+ Map* transition,
2491
+ String* name) {
2492
+ // ----------- S t a t e -------------
2493
+ // -- rax : value
2494
+ // -- rcx : key
2495
+ // -- rdx : receiver
2496
+ // -- rsp[0] : return address
2497
+ // -----------------------------------
2498
+ Label miss;
2499
+
2500
+ Counters* counters = isolate()->counters();
2501
+ __ IncrementCounter(counters->keyed_store_field(), 1);
2502
+
2503
+ // Check that the name has not changed.
2504
+ __ Cmp(rcx, Handle<String>(name));
2505
+ __ j(not_equal, &miss);
2506
+
2507
+ // Generate store field code. Preserves receiver and name on jump to miss.
2508
+ GenerateStoreField(masm(),
2509
+ object,
2510
+ index,
2511
+ transition,
2512
+ rdx, rcx, rbx,
2513
+ &miss);
2514
+
2515
+ // Handle store cache miss.
2516
+ __ bind(&miss);
2517
+ __ DecrementCounter(counters->keyed_store_field(), 1);
2518
+ Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
2519
+ __ Jump(ic, RelocInfo::CODE_TARGET);
2520
+
2521
+ // Return the generated code.
2522
+ return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2523
+ }
2524
+
2525
+
2526
+ MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
2527
+ JSObject* receiver) {
2528
+ // ----------- S t a t e -------------
2529
+ // -- rax : value
2530
+ // -- rcx : key
2531
+ // -- rdx : receiver
2532
+ // -- rsp[0] : return address
2533
+ // -----------------------------------
2534
+ Label miss;
2535
+
2536
+ // Check that the receiver isn't a smi.
2537
+ __ JumpIfSmi(rdx, &miss);
2538
+
2539
+ // Check that the map matches.
2540
+ __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2541
+ Handle<Map>(receiver->map()));
2542
+ __ j(not_equal, &miss);
2543
+
2544
+ // Check that the key is a smi.
2545
+ __ JumpIfNotSmi(rcx, &miss);
2546
+
2547
+ // Get the elements array and make sure it is a fast element array, not 'cow'.
2548
+ __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset));
2549
+ __ Cmp(FieldOperand(rdi, HeapObject::kMapOffset),
2550
+ factory()->fixed_array_map());
2551
+ __ j(not_equal, &miss);
2552
+
2553
+ // Check that the key is within bounds.
2554
+ if (receiver->IsJSArray()) {
2555
+ __ SmiCompare(rcx, FieldOperand(rdx, JSArray::kLengthOffset));
2556
+ __ j(above_equal, &miss);
2557
+ } else {
2558
+ __ SmiCompare(rcx, FieldOperand(rdi, FixedArray::kLengthOffset));
2559
+ __ j(above_equal, &miss);
2560
+ }
2561
+
2562
+ // Do the store and update the write barrier. Make sure to preserve
2563
+ // the value in register eax.
2564
+ __ movq(rdx, rax);
2565
+ __ SmiToInteger32(rcx, rcx);
2566
+ __ movq(FieldOperand(rdi, rcx, times_pointer_size, FixedArray::kHeaderSize),
2567
+ rax);
2568
+ __ RecordWrite(rdi, 0, rdx, rcx);
2569
+
2570
+ // Done.
2571
+ __ ret(0);
2572
+
2573
+ // Handle store cache miss.
2574
+ __ bind(&miss);
2575
+ Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
2576
+ __ jmp(ic, RelocInfo::CODE_TARGET);
2577
+
2578
+ // Return the generated code.
2579
+ return GetCode(NORMAL, NULL);
2580
+ }
2581
+
2582
+
2583
+ MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
2584
+ JSObject* object,
2585
+ JSObject* last) {
2586
+ // ----------- S t a t e -------------
2587
+ // -- rax : receiver
2588
+ // -- rcx : name
2589
+ // -- rsp[0] : return address
2590
+ // -----------------------------------
2591
+ Label miss;
2592
+
2593
+ // Chech that receiver is not a smi.
2594
+ __ JumpIfSmi(rax, &miss);
2595
+
2596
+ // Check the maps of the full prototype chain. Also check that
2597
+ // global property cells up to (but not including) the last object
2598
+ // in the prototype chain are empty.
2599
+ CheckPrototypes(object, rax, last, rbx, rdx, rdi, name, &miss);
2600
+
2601
+ // If the last object in the prototype chain is a global object,
2602
+ // check that the global property cell is empty.
2603
+ if (last->IsGlobalObject()) {
2604
+ MaybeObject* cell = GenerateCheckPropertyCell(masm(),
2605
+ GlobalObject::cast(last),
2606
+ name,
2607
+ rdx,
2608
+ &miss);
2609
+ if (cell->IsFailure()) {
2610
+ miss.Unuse();
2611
+ return cell;
2612
+ }
2613
+ }
2614
+
2615
+ // Return undefined if maps of the full prototype chain are still the
2616
+ // same and no global property with this name contains a value.
2617
+ __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
2618
+ __ ret(0);
2619
+
2620
+ __ bind(&miss);
2621
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
2622
+
2623
+ // Return the generated code.
2624
+ return GetCode(NONEXISTENT, heap()->empty_string());
2625
+ }
2626
+
2627
+
2628
+ MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object,
2629
+ JSObject* holder,
2630
+ int index,
2631
+ String* name) {
2632
+ // ----------- S t a t e -------------
2633
+ // -- rax : receiver
2634
+ // -- rcx : name
2635
+ // -- rsp[0] : return address
2636
+ // -----------------------------------
2637
+ Label miss;
2638
+
2639
+ GenerateLoadField(object, holder, rax, rbx, rdx, rdi, index, name, &miss);
2640
+ __ bind(&miss);
2641
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
2642
+
2643
+ // Return the generated code.
2644
+ return GetCode(FIELD, name);
2645
+ }
2646
+
2647
+
2648
+ MaybeObject* LoadStubCompiler::CompileLoadCallback(String* name,
2649
+ JSObject* object,
2650
+ JSObject* holder,
2651
+ AccessorInfo* callback) {
2652
+ // ----------- S t a t e -------------
2653
+ // -- rax : receiver
2654
+ // -- rcx : name
2655
+ // -- rsp[0] : return address
2656
+ // -----------------------------------
2657
+ Label miss;
2658
+
2659
+ MaybeObject* result = GenerateLoadCallback(object, holder, rax, rcx, rdx, rbx,
2660
+ rdi, callback, name, &miss);
2661
+ if (result->IsFailure()) {
2662
+ miss.Unuse();
2663
+ return result;
2664
+ }
2665
+
2666
+ __ bind(&miss);
2667
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
2668
+
2669
+ // Return the generated code.
2670
+ return GetCode(CALLBACKS, name);
2671
+ }
2672
+
2673
+
2674
+ MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object,
2675
+ JSObject* holder,
2676
+ Object* value,
2677
+ String* name) {
2678
+ // ----------- S t a t e -------------
2679
+ // -- rax : receiver
2680
+ // -- rcx : name
2681
+ // -- rsp[0] : return address
2682
+ // -----------------------------------
2683
+ Label miss;
2684
+
2685
+ GenerateLoadConstant(object, holder, rax, rbx, rdx, rdi, value, name, &miss);
2686
+ __ bind(&miss);
2687
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
2688
+
2689
+ // Return the generated code.
2690
+ return GetCode(CONSTANT_FUNCTION, name);
2691
+ }
2692
+
2693
+
2694
+ MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
2695
+ JSObject* holder,
2696
+ String* name) {
2697
+ // ----------- S t a t e -------------
2698
+ // -- rax : receiver
2699
+ // -- rcx : name
2700
+ // -- rsp[0] : return address
2701
+ // -----------------------------------
2702
+ Label miss;
2703
+
2704
+ LookupResult lookup;
2705
+ LookupPostInterceptor(holder, name, &lookup);
2706
+
2707
+ // TODO(368): Compile in the whole chain: all the interceptors in
2708
+ // prototypes and ultimate answer.
2709
+ GenerateLoadInterceptor(receiver,
2710
+ holder,
2711
+ &lookup,
2712
+ rax,
2713
+ rcx,
2714
+ rdx,
2715
+ rbx,
2716
+ rdi,
2717
+ name,
2718
+ &miss);
2719
+
2720
+ __ bind(&miss);
2721
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
2722
+
2723
+ // Return the generated code.
2724
+ return GetCode(INTERCEPTOR, name);
2725
+ }
2726
+
2727
+
2728
+ MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
2729
+ GlobalObject* holder,
2730
+ JSGlobalPropertyCell* cell,
2731
+ String* name,
2732
+ bool is_dont_delete) {
2733
+ // ----------- S t a t e -------------
2734
+ // -- rax : receiver
2735
+ // -- rcx : name
2736
+ // -- rsp[0] : return address
2737
+ // -----------------------------------
2738
+ Label miss;
2739
+
2740
+ // If the object is the holder then we know that it's a global
2741
+ // object which can only happen for contextual loads. In this case,
2742
+ // the receiver cannot be a smi.
2743
+ if (object != holder) {
2744
+ __ JumpIfSmi(rax, &miss);
2745
+ }
2746
+
2747
+ // Check that the maps haven't changed.
2748
+ CheckPrototypes(object, rax, holder, rbx, rdx, rdi, name, &miss);
2749
+
2750
+ // Get the value from the cell.
2751
+ __ Move(rbx, Handle<JSGlobalPropertyCell>(cell));
2752
+ __ movq(rbx, FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset));
2753
+
2754
+ // Check for deleted property if property can actually be deleted.
2755
+ if (!is_dont_delete) {
2756
+ __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
2757
+ __ j(equal, &miss);
2758
+ } else if (FLAG_debug_code) {
2759
+ __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
2760
+ __ Check(not_equal, "DontDelete cells can't contain the hole");
2761
+ }
2762
+
2763
+ Counters* counters = isolate()->counters();
2764
+ __ IncrementCounter(counters->named_load_global_stub(), 1);
2765
+ __ movq(rax, rbx);
2766
+ __ ret(0);
2767
+
2768
+ __ bind(&miss);
2769
+ __ IncrementCounter(counters->named_load_global_stub_miss(), 1);
2770
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
2771
+
2772
+ // Return the generated code.
2773
+ return GetCode(NORMAL, name);
2774
+ }
2775
+
2776
+
2777
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
2778
+ JSObject* receiver,
2779
+ JSObject* holder,
2780
+ int index) {
2781
+ // ----------- S t a t e -------------
2782
+ // -- rax : key
2783
+ // -- rdx : receiver
2784
+ // -- rsp[0] : return address
2785
+ // -----------------------------------
2786
+ Label miss;
2787
+
2788
+ Counters* counters = isolate()->counters();
2789
+ __ IncrementCounter(counters->keyed_load_field(), 1);
2790
+
2791
+ // Check that the name has not changed.
2792
+ __ Cmp(rax, Handle<String>(name));
2793
+ __ j(not_equal, &miss);
2794
+
2795
+ GenerateLoadField(receiver, holder, rdx, rbx, rcx, rdi, index, name, &miss);
2796
+
2797
+ __ bind(&miss);
2798
+ __ DecrementCounter(counters->keyed_load_field(), 1);
2799
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2800
+
2801
+ // Return the generated code.
2802
+ return GetCode(FIELD, name);
2803
+ }
2804
+
2805
+
2806
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback(
2807
+ String* name,
2808
+ JSObject* receiver,
2809
+ JSObject* holder,
2810
+ AccessorInfo* callback) {
2811
+ // ----------- S t a t e -------------
2812
+ // -- rax : key
2813
+ // -- rdx : receiver
2814
+ // -- rsp[0] : return address
2815
+ // -----------------------------------
2816
+ Label miss;
2817
+
2818
+ Counters* counters = isolate()->counters();
2819
+ __ IncrementCounter(counters->keyed_load_callback(), 1);
2820
+
2821
+ // Check that the name has not changed.
2822
+ __ Cmp(rax, Handle<String>(name));
2823
+ __ j(not_equal, &miss);
2824
+
2825
+ MaybeObject* result = GenerateLoadCallback(receiver, holder, rdx, rax, rbx,
2826
+ rcx, rdi, callback, name, &miss);
2827
+ if (result->IsFailure()) {
2828
+ miss.Unuse();
2829
+ return result;
2830
+ }
2831
+
2832
+ __ bind(&miss);
2833
+
2834
+ __ DecrementCounter(counters->keyed_load_callback(), 1);
2835
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2836
+
2837
+ // Return the generated code.
2838
+ return GetCode(CALLBACKS, name);
2839
+ }
2840
+
2841
+
2842
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
2843
+ JSObject* receiver,
2844
+ JSObject* holder,
2845
+ Object* value) {
2846
+ // ----------- S t a t e -------------
2847
+ // -- rax : key
2848
+ // -- rdx : receiver
2849
+ // -- rsp[0] : return address
2850
+ // -----------------------------------
2851
+ Label miss;
2852
+
2853
+ Counters* counters = isolate()->counters();
2854
+ __ IncrementCounter(counters->keyed_load_constant_function(), 1);
2855
+
2856
+ // Check that the name has not changed.
2857
+ __ Cmp(rax, Handle<String>(name));
2858
+ __ j(not_equal, &miss);
2859
+
2860
+ GenerateLoadConstant(receiver, holder, rdx, rbx, rcx, rdi,
2861
+ value, name, &miss);
2862
+ __ bind(&miss);
2863
+ __ DecrementCounter(counters->keyed_load_constant_function(), 1);
2864
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2865
+
2866
+ // Return the generated code.
2867
+ return GetCode(CONSTANT_FUNCTION, name);
2868
+ }
2869
+
2870
+
2871
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
2872
+ JSObject* holder,
2873
+ String* name) {
2874
+ // ----------- S t a t e -------------
2875
+ // -- rax : key
2876
+ // -- rdx : receiver
2877
+ // -- rsp[0] : return address
2878
+ // -----------------------------------
2879
+ Label miss;
2880
+
2881
+ Counters* counters = isolate()->counters();
2882
+ __ IncrementCounter(counters->keyed_load_interceptor(), 1);
2883
+
2884
+ // Check that the name has not changed.
2885
+ __ Cmp(rax, Handle<String>(name));
2886
+ __ j(not_equal, &miss);
2887
+
2888
+ LookupResult lookup;
2889
+ LookupPostInterceptor(holder, name, &lookup);
2890
+ GenerateLoadInterceptor(receiver,
2891
+ holder,
2892
+ &lookup,
2893
+ rdx,
2894
+ rax,
2895
+ rcx,
2896
+ rbx,
2897
+ rdi,
2898
+ name,
2899
+ &miss);
2900
+ __ bind(&miss);
2901
+ __ DecrementCounter(counters->keyed_load_interceptor(), 1);
2902
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2903
+
2904
+ // Return the generated code.
2905
+ return GetCode(INTERCEPTOR, name);
2906
+ }
2907
+
2908
+
2909
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
2910
+ // ----------- S t a t e -------------
2911
+ // -- rax : key
2912
+ // -- rdx : receiver
2913
+ // -- rsp[0] : return address
2914
+ // -----------------------------------
2915
+ Label miss;
2916
+
2917
+ Counters* counters = isolate()->counters();
2918
+ __ IncrementCounter(counters->keyed_load_array_length(), 1);
2919
+
2920
+ // Check that the name has not changed.
2921
+ __ Cmp(rax, Handle<String>(name));
2922
+ __ j(not_equal, &miss);
2923
+
2924
+ GenerateLoadArrayLength(masm(), rdx, rcx, &miss);
2925
+ __ bind(&miss);
2926
+ __ DecrementCounter(counters->keyed_load_array_length(), 1);
2927
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2928
+
2929
+ // Return the generated code.
2930
+ return GetCode(CALLBACKS, name);
2931
+ }
2932
+
2933
+
2934
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
2935
+ // ----------- S t a t e -------------
2936
+ // -- rax : key
2937
+ // -- rdx : receiver
2938
+ // -- rsp[0] : return address
2939
+ // -----------------------------------
2940
+ Label miss;
2941
+
2942
+ Counters* counters = isolate()->counters();
2943
+ __ IncrementCounter(counters->keyed_load_string_length(), 1);
2944
+
2945
+ // Check that the name has not changed.
2946
+ __ Cmp(rax, Handle<String>(name));
2947
+ __ j(not_equal, &miss);
2948
+
2949
+ GenerateLoadStringLength(masm(), rdx, rcx, rbx, &miss, true);
2950
+ __ bind(&miss);
2951
+ __ DecrementCounter(counters->keyed_load_string_length(), 1);
2952
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2953
+
2954
+ // Return the generated code.
2955
+ return GetCode(CALLBACKS, name);
2956
+ }
2957
+
2958
+
2959
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
2960
+ // ----------- S t a t e -------------
2961
+ // -- rax : key
2962
+ // -- rdx : receiver
2963
+ // -- rsp[0] : return address
2964
+ // -----------------------------------
2965
+ Label miss;
2966
+
2967
+ Counters* counters = isolate()->counters();
2968
+ __ IncrementCounter(counters->keyed_load_function_prototype(), 1);
2969
+
2970
+ // Check that the name has not changed.
2971
+ __ Cmp(rax, Handle<String>(name));
2972
+ __ j(not_equal, &miss);
2973
+
2974
+ GenerateLoadFunctionPrototype(masm(), rdx, rcx, rbx, &miss);
2975
+ __ bind(&miss);
2976
+ __ DecrementCounter(counters->keyed_load_function_prototype(), 1);
2977
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2978
+
2979
+ // Return the generated code.
2980
+ return GetCode(CALLBACKS, name);
2981
+ }
2982
+
2983
+
2984
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) {
2985
+ // ----------- S t a t e -------------
2986
+ // -- rax : key
2987
+ // -- rdx : receiver
2988
+ // -- rsp[0] : return address
2989
+ // -----------------------------------
2990
+ Label miss;
2991
+
2992
+ // Check that the receiver isn't a smi.
2993
+ __ JumpIfSmi(rdx, &miss);
2994
+
2995
+ // Check that the map matches.
2996
+ __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2997
+ Handle<Map>(receiver->map()));
2998
+ __ j(not_equal, &miss);
2999
+
3000
+ // Check that the key is a smi.
3001
+ __ JumpIfNotSmi(rax, &miss);
3002
+
3003
+ // Get the elements array.
3004
+ __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
3005
+ __ AssertFastElements(rcx);
3006
+
3007
+ // Check that the key is within bounds.
3008
+ __ SmiCompare(rax, FieldOperand(rcx, FixedArray::kLengthOffset));
3009
+ __ j(above_equal, &miss);
3010
+
3011
+ // Load the result and make sure it's not the hole.
3012
+ SmiIndex index = masm()->SmiToIndex(rbx, rax, kPointerSizeLog2);
3013
+ __ movq(rbx, FieldOperand(rcx,
3014
+ index.reg,
3015
+ index.scale,
3016
+ FixedArray::kHeaderSize));
3017
+ __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
3018
+ __ j(equal, &miss);
3019
+ __ movq(rax, rbx);
3020
+ __ ret(0);
3021
+
3022
+ __ bind(&miss);
3023
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3024
+
3025
+ // Return the generated code.
3026
+ return GetCode(NORMAL, NULL);
3027
+ }
3028
+
3029
+
3030
+ // Specialized stub for constructing objects from functions which only have only
3031
+ // simple assignments of the form this.x = ...; in their body.
3032
+ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
3033
+ // ----------- S t a t e -------------
3034
+ // -- rax : argc
3035
+ // -- rdi : constructor
3036
+ // -- rsp[0] : return address
3037
+ // -- rsp[4] : last argument
3038
+ // -----------------------------------
3039
+ Label generic_stub_call;
3040
+
3041
+ // Use r8 for holding undefined which is used in several places below.
3042
+ __ Move(r8, factory()->undefined_value());
3043
+
3044
+ #ifdef ENABLE_DEBUGGER_SUPPORT
3045
+ // Check to see whether there are any break points in the function code. If
3046
+ // there are jump to the generic constructor stub which calls the actual
3047
+ // code for the function thereby hitting the break points.
3048
+ __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
3049
+ __ movq(rbx, FieldOperand(rbx, SharedFunctionInfo::kDebugInfoOffset));
3050
+ __ cmpq(rbx, r8);
3051
+ __ j(not_equal, &generic_stub_call);
3052
+ #endif
3053
+
3054
+ // Load the initial map and verify that it is in fact a map.
3055
+ __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
3056
+ // Will both indicate a NULL and a Smi.
3057
+ ASSERT(kSmiTag == 0);
3058
+ __ JumpIfSmi(rbx, &generic_stub_call);
3059
+ __ CmpObjectType(rbx, MAP_TYPE, rcx);
3060
+ __ j(not_equal, &generic_stub_call);
3061
+
3062
+ #ifdef DEBUG
3063
+ // Cannot construct functions this way.
3064
+ // rdi: constructor
3065
+ // rbx: initial map
3066
+ __ CmpInstanceType(rbx, JS_FUNCTION_TYPE);
3067
+ __ Assert(not_equal, "Function constructed by construct stub.");
3068
+ #endif
3069
+
3070
+ // Now allocate the JSObject in new space.
3071
+ // rdi: constructor
3072
+ // rbx: initial map
3073
+ __ movzxbq(rcx, FieldOperand(rbx, Map::kInstanceSizeOffset));
3074
+ __ shl(rcx, Immediate(kPointerSizeLog2));
3075
+ __ AllocateInNewSpace(rcx,
3076
+ rdx,
3077
+ rcx,
3078
+ no_reg,
3079
+ &generic_stub_call,
3080
+ NO_ALLOCATION_FLAGS);
3081
+
3082
+ // Allocated the JSObject, now initialize the fields and add the heap tag.
3083
+ // rbx: initial map
3084
+ // rdx: JSObject (untagged)
3085
+ __ movq(Operand(rdx, JSObject::kMapOffset), rbx);
3086
+ __ Move(rbx, factory()->empty_fixed_array());
3087
+ __ movq(Operand(rdx, JSObject::kPropertiesOffset), rbx);
3088
+ __ movq(Operand(rdx, JSObject::kElementsOffset), rbx);
3089
+
3090
+ // rax: argc
3091
+ // rdx: JSObject (untagged)
3092
+ // Load the address of the first in-object property into r9.
3093
+ __ lea(r9, Operand(rdx, JSObject::kHeaderSize));
3094
+ // Calculate the location of the first argument. The stack contains only the
3095
+ // return address on top of the argc arguments.
3096
+ __ lea(rcx, Operand(rsp, rax, times_pointer_size, 0));
3097
+
3098
+ // rax: argc
3099
+ // rcx: first argument
3100
+ // rdx: JSObject (untagged)
3101
+ // r8: undefined
3102
+ // r9: first in-object property of the JSObject
3103
+ // Fill the initialized properties with a constant value or a passed argument
3104
+ // depending on the this.x = ...; assignment in the function.
3105
+ SharedFunctionInfo* shared = function->shared();
3106
+ for (int i = 0; i < shared->this_property_assignments_count(); i++) {
3107
+ if (shared->IsThisPropertyAssignmentArgument(i)) {
3108
+ // Check if the argument assigned to the property is actually passed.
3109
+ // If argument is not passed the property is set to undefined,
3110
+ // otherwise find it on the stack.
3111
+ int arg_number = shared->GetThisPropertyAssignmentArgument(i);
3112
+ __ movq(rbx, r8);
3113
+ __ cmpq(rax, Immediate(arg_number));
3114
+ __ cmovq(above, rbx, Operand(rcx, arg_number * -kPointerSize));
3115
+ // Store value in the property.
3116
+ __ movq(Operand(r9, i * kPointerSize), rbx);
3117
+ } else {
3118
+ // Set the property to the constant value.
3119
+ Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i));
3120
+ __ Move(Operand(r9, i * kPointerSize), constant);
3121
+ }
3122
+ }
3123
+
3124
+ // Fill the unused in-object property fields with undefined.
3125
+ ASSERT(function->has_initial_map());
3126
+ for (int i = shared->this_property_assignments_count();
3127
+ i < function->initial_map()->inobject_properties();
3128
+ i++) {
3129
+ __ movq(Operand(r9, i * kPointerSize), r8);
3130
+ }
3131
+
3132
+ // rax: argc
3133
+ // rdx: JSObject (untagged)
3134
+ // Move argc to rbx and the JSObject to return to rax and tag it.
3135
+ __ movq(rbx, rax);
3136
+ __ movq(rax, rdx);
3137
+ __ or_(rax, Immediate(kHeapObjectTag));
3138
+
3139
+ // rax: JSObject
3140
+ // rbx: argc
3141
+ // Remove caller arguments and receiver from the stack and return.
3142
+ __ pop(rcx);
3143
+ __ lea(rsp, Operand(rsp, rbx, times_pointer_size, 1 * kPointerSize));
3144
+ __ push(rcx);
3145
+ Counters* counters = isolate()->counters();
3146
+ __ IncrementCounter(counters->constructed_objects(), 1);
3147
+ __ IncrementCounter(counters->constructed_objects_stub(), 1);
3148
+ __ ret(0);
3149
+
3150
+ // Jump to the generic stub in case the specialized code cannot handle the
3151
+ // construction.
3152
+ __ bind(&generic_stub_call);
3153
+ Code* code =
3154
+ isolate()->builtins()->builtin(Builtins::kJSConstructStubGeneric);
3155
+ Handle<Code> generic_construct_stub(code);
3156
+ __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
3157
+
3158
+ // Return the generated code.
3159
+ return GetCode();
3160
+ }
3161
+
3162
+
3163
+ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
3164
+ JSObject* receiver, ExternalArrayType array_type, Code::Flags flags) {
3165
+ // ----------- S t a t e -------------
3166
+ // -- rax : key
3167
+ // -- rdx : receiver
3168
+ // -- rsp[0] : return address
3169
+ // -----------------------------------
3170
+ Label slow;
3171
+
3172
+ // Check that the object isn't a smi.
3173
+ __ JumpIfSmi(rdx, &slow);
3174
+
3175
+ // Check that the key is a smi.
3176
+ __ JumpIfNotSmi(rax, &slow);
3177
+
3178
+ // Check that the map matches.
3179
+ __ CheckMap(rdx, Handle<Map>(receiver->map()), &slow, false);
3180
+ __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
3181
+
3182
+ // Check that the index is in range.
3183
+ __ SmiToInteger32(rcx, rax);
3184
+ __ cmpl(rcx, FieldOperand(rbx, ExternalArray::kLengthOffset));
3185
+ // Unsigned comparison catches both negative and too-large values.
3186
+ __ j(above_equal, &slow);
3187
+
3188
+ // rax: index (as a smi)
3189
+ // rdx: receiver (JSObject)
3190
+ // rcx: untagged index
3191
+ // rbx: elements array
3192
+ __ movq(rbx, FieldOperand(rbx, ExternalArray::kExternalPointerOffset));
3193
+ // rbx: base pointer of external storage
3194
+ switch (array_type) {
3195
+ case kExternalByteArray:
3196
+ __ movsxbq(rcx, Operand(rbx, rcx, times_1, 0));
3197
+ break;
3198
+ case kExternalPixelArray:
3199
+ case kExternalUnsignedByteArray:
3200
+ __ movzxbq(rcx, Operand(rbx, rcx, times_1, 0));
3201
+ break;
3202
+ case kExternalShortArray:
3203
+ __ movsxwq(rcx, Operand(rbx, rcx, times_2, 0));
3204
+ break;
3205
+ case kExternalUnsignedShortArray:
3206
+ __ movzxwq(rcx, Operand(rbx, rcx, times_2, 0));
3207
+ break;
3208
+ case kExternalIntArray:
3209
+ __ movsxlq(rcx, Operand(rbx, rcx, times_4, 0));
3210
+ break;
3211
+ case kExternalUnsignedIntArray:
3212
+ __ movl(rcx, Operand(rbx, rcx, times_4, 0));
3213
+ break;
3214
+ case kExternalFloatArray:
3215
+ __ cvtss2sd(xmm0, Operand(rbx, rcx, times_4, 0));
3216
+ break;
3217
+ default:
3218
+ UNREACHABLE();
3219
+ break;
3220
+ }
3221
+
3222
+ // rax: index
3223
+ // rdx: receiver
3224
+ // For integer array types:
3225
+ // rcx: value
3226
+ // For floating-point array type:
3227
+ // xmm0: value as double.
3228
+
3229
+ ASSERT(kSmiValueSize == 32);
3230
+ if (array_type == kExternalUnsignedIntArray) {
3231
+ // For the UnsignedInt array type, we need to see whether
3232
+ // the value can be represented in a Smi. If not, we need to convert
3233
+ // it to a HeapNumber.
3234
+ NearLabel box_int;
3235
+
3236
+ __ JumpIfUIntNotValidSmiValue(rcx, &box_int);
3237
+
3238
+ __ Integer32ToSmi(rax, rcx);
3239
+ __ ret(0);
3240
+
3241
+ __ bind(&box_int);
3242
+
3243
+ // Allocate a HeapNumber for the int and perform int-to-double
3244
+ // conversion.
3245
+ // The value is zero-extended since we loaded the value from memory
3246
+ // with movl.
3247
+ __ cvtqsi2sd(xmm0, rcx);
3248
+
3249
+ __ AllocateHeapNumber(rcx, rbx, &slow);
3250
+ // Set the value.
3251
+ __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm0);
3252
+ __ movq(rax, rcx);
3253
+ __ ret(0);
3254
+ } else if (array_type == kExternalFloatArray) {
3255
+ // For the floating-point array type, we need to always allocate a
3256
+ // HeapNumber.
3257
+ __ AllocateHeapNumber(rcx, rbx, &slow);
3258
+ // Set the value.
3259
+ __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm0);
3260
+ __ movq(rax, rcx);
3261
+ __ ret(0);
3262
+ } else {
3263
+ __ Integer32ToSmi(rax, rcx);
3264
+ __ ret(0);
3265
+ }
3266
+
3267
+ // Slow case: Jump to runtime.
3268
+ __ bind(&slow);
3269
+ Counters* counters = isolate()->counters();
3270
+ __ IncrementCounter(counters->keyed_load_external_array_slow(), 1);
3271
+
3272
+ // ----------- S t a t e -------------
3273
+ // -- rax : key
3274
+ // -- rdx : receiver
3275
+ // -- rsp[0] : return address
3276
+ // -----------------------------------
3277
+
3278
+ __ pop(rbx);
3279
+ __ push(rdx); // receiver
3280
+ __ push(rax); // name
3281
+ __ push(rbx); // return address
3282
+
3283
+ // Perform tail call to the entry.
3284
+ __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
3285
+
3286
+ // Return the generated code.
3287
+ return GetCode(flags);
3288
+ }
3289
+
3290
+
3291
+ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
3292
+ JSObject* receiver, ExternalArrayType array_type, Code::Flags flags) {
3293
+ // ----------- S t a t e -------------
3294
+ // -- rax : value
3295
+ // -- rcx : key
3296
+ // -- rdx : receiver
3297
+ // -- rsp[0] : return address
3298
+ // -----------------------------------
3299
+ Label slow;
3300
+
3301
+ // Check that the object isn't a smi.
3302
+ __ JumpIfSmi(rdx, &slow);
3303
+
3304
+ // Check that the map matches.
3305
+ __ CheckMap(rdx, Handle<Map>(receiver->map()), &slow, false);
3306
+ __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
3307
+
3308
+ // Check that the key is a smi.
3309
+ __ JumpIfNotSmi(rcx, &slow);
3310
+
3311
+ // Check that the index is in range.
3312
+ __ SmiToInteger32(rdi, rcx); // Untag the index.
3313
+ __ cmpl(rdi, FieldOperand(rbx, ExternalArray::kLengthOffset));
3314
+ // Unsigned comparison catches both negative and too-large values.
3315
+ __ j(above_equal, &slow);
3316
+
3317
+ // Handle both smis and HeapNumbers in the fast path. Go to the
3318
+ // runtime for all other kinds of values.
3319
+ // rax: value
3320
+ // rcx: key (a smi)
3321
+ // rdx: receiver (a JSObject)
3322
+ // rbx: elements array
3323
+ // rdi: untagged key
3324
+ NearLabel check_heap_number;
3325
+ if (array_type == kExternalPixelArray) {
3326
+ // Float to pixel conversion is only implemented in the runtime for now.
3327
+ __ JumpIfNotSmi(rax, &slow);
3328
+ } else {
3329
+ __ JumpIfNotSmi(rax, &check_heap_number);
3330
+ }
3331
+ // No more branches to slow case on this path. Key and receiver not needed.
3332
+ __ SmiToInteger32(rdx, rax);
3333
+ __ movq(rbx, FieldOperand(rbx, ExternalArray::kExternalPointerOffset));
3334
+ // rbx: base pointer of external storage
3335
+ switch (array_type) {
3336
+ case kExternalPixelArray:
3337
+ { // Clamp the value to [0..255].
3338
+ NearLabel done;
3339
+ __ testl(rdx, Immediate(0xFFFFFF00));
3340
+ __ j(zero, &done);
3341
+ __ setcc(negative, rdx); // 1 if negative, 0 if positive.
3342
+ __ decb(rdx); // 0 if negative, 255 if positive.
3343
+ __ bind(&done);
3344
+ }
3345
+ __ movb(Operand(rbx, rdi, times_1, 0), rdx);
3346
+ break;
3347
+ case kExternalByteArray:
3348
+ case kExternalUnsignedByteArray:
3349
+ __ movb(Operand(rbx, rdi, times_1, 0), rdx);
3350
+ break;
3351
+ case kExternalShortArray:
3352
+ case kExternalUnsignedShortArray:
3353
+ __ movw(Operand(rbx, rdi, times_2, 0), rdx);
3354
+ break;
3355
+ case kExternalIntArray:
3356
+ case kExternalUnsignedIntArray:
3357
+ __ movl(Operand(rbx, rdi, times_4, 0), rdx);
3358
+ break;
3359
+ case kExternalFloatArray:
3360
+ // Need to perform int-to-float conversion.
3361
+ __ cvtlsi2ss(xmm0, rdx);
3362
+ __ movss(Operand(rbx, rdi, times_4, 0), xmm0);
3363
+ break;
3364
+ default:
3365
+ UNREACHABLE();
3366
+ break;
3367
+ }
3368
+ __ ret(0);
3369
+
3370
+ // TODO(danno): handle heap number -> pixel array conversion
3371
+ if (array_type != kExternalPixelArray) {
3372
+ __ bind(&check_heap_number);
3373
+ // rax: value
3374
+ // rcx: key (a smi)
3375
+ // rdx: receiver (a JSObject)
3376
+ // rbx: elements array
3377
+ // rdi: untagged key
3378
+ __ CmpObjectType(rax, HEAP_NUMBER_TYPE, kScratchRegister);
3379
+ __ j(not_equal, &slow);
3380
+ // No more branches to slow case on this path.
3381
+
3382
+ // The WebGL specification leaves the behavior of storing NaN and
3383
+ // +/-Infinity into integer arrays basically undefined. For more
3384
+ // reproducible behavior, convert these to zero.
3385
+ __ movsd(xmm0, FieldOperand(rax, HeapNumber::kValueOffset));
3386
+ __ movq(rbx, FieldOperand(rbx, ExternalArray::kExternalPointerOffset));
3387
+ // rdi: untagged index
3388
+ // rbx: base pointer of external storage
3389
+ // top of FPU stack: value
3390
+ if (array_type == kExternalFloatArray) {
3391
+ __ cvtsd2ss(xmm0, xmm0);
3392
+ __ movss(Operand(rbx, rdi, times_4, 0), xmm0);
3393
+ __ ret(0);
3394
+ } else {
3395
+ // Perform float-to-int conversion with truncation (round-to-zero)
3396
+ // behavior.
3397
+
3398
+ // Convert to int32 and store the low byte/word.
3399
+ // If the value is NaN or +/-infinity, the result is 0x80000000,
3400
+ // which is automatically zero when taken mod 2^n, n < 32.
3401
+ // rdx: value (converted to an untagged integer)
3402
+ // rdi: untagged index
3403
+ // rbx: base pointer of external storage
3404
+ switch (array_type) {
3405
+ case kExternalByteArray:
3406
+ case kExternalUnsignedByteArray:
3407
+ __ cvttsd2si(rdx, xmm0);
3408
+ __ movb(Operand(rbx, rdi, times_1, 0), rdx);
3409
+ break;
3410
+ case kExternalShortArray:
3411
+ case kExternalUnsignedShortArray:
3412
+ __ cvttsd2si(rdx, xmm0);
3413
+ __ movw(Operand(rbx, rdi, times_2, 0), rdx);
3414
+ break;
3415
+ case kExternalIntArray:
3416
+ case kExternalUnsignedIntArray: {
3417
+ // Convert to int64, so that NaN and infinities become
3418
+ // 0x8000000000000000, which is zero mod 2^32.
3419
+ __ cvttsd2siq(rdx, xmm0);
3420
+ __ movl(Operand(rbx, rdi, times_4, 0), rdx);
3421
+ break;
3422
+ }
3423
+ default:
3424
+ UNREACHABLE();
3425
+ break;
3426
+ }
3427
+ __ ret(0);
3428
+ }
3429
+ }
3430
+
3431
+ // Slow case: call runtime.
3432
+ __ bind(&slow);
3433
+
3434
+ // ----------- S t a t e -------------
3435
+ // -- rax : value
3436
+ // -- rcx : key
3437
+ // -- rdx : receiver
3438
+ // -- rsp[0] : return address
3439
+ // -----------------------------------
3440
+
3441
+ __ pop(rbx);
3442
+ __ push(rdx); // receiver
3443
+ __ push(rcx); // key
3444
+ __ push(rax); // value
3445
+ __ Push(Smi::FromInt(NONE)); // PropertyAttributes
3446
+ __ Push(Smi::FromInt(
3447
+ Code::ExtractExtraICStateFromFlags(flags) & kStrictMode));
3448
+ __ push(rbx); // return address
3449
+
3450
+ // Do tail-call to runtime routine.
3451
+ __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
3452
+
3453
+ return GetCode(flags);
3454
+ }
3455
+
3456
+ #undef __
3457
+
3458
+ } } // namespace v8::internal
3459
+
3460
+ #endif // V8_TARGET_ARCH_X64