therubyracer 0.8.1.pre2 → 0.8.1

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of therubyracer might be problematic. Click here for more details.

Files changed (877) hide show
  1. data/Changelog.md +2 -1
  2. data/README.md +6 -3
  3. data/ext/v8/upstream/3.1.8/.gitignore +31 -0
  4. data/ext/v8/upstream/3.1.8/AUTHORS +40 -0
  5. data/ext/v8/upstream/3.1.8/ChangeLog +2566 -0
  6. data/ext/v8/upstream/3.1.8/LICENSE +52 -0
  7. data/ext/v8/upstream/3.1.8/LICENSE.strongtalk +29 -0
  8. data/ext/v8/upstream/3.1.8/LICENSE.v8 +26 -0
  9. data/ext/v8/upstream/3.1.8/LICENSE.valgrind +45 -0
  10. data/ext/v8/upstream/3.1.8/SConstruct +1192 -0
  11. data/ext/v8/upstream/3.1.8/build/README.txt +25 -0
  12. data/ext/v8/upstream/3.1.8/build/all.gyp +18 -0
  13. data/ext/v8/upstream/3.1.8/build/armu.gypi +32 -0
  14. data/ext/v8/upstream/3.1.8/build/common.gypi +82 -0
  15. data/ext/v8/upstream/3.1.8/build/gyp_v8 +145 -0
  16. data/ext/v8/upstream/3.1.8/include/v8-debug.h +384 -0
  17. data/ext/v8/upstream/3.1.8/include/v8-preparser.h +116 -0
  18. data/ext/v8/upstream/3.1.8/include/v8-profiler.h +426 -0
  19. data/ext/v8/upstream/3.1.8/include/v8-testing.h +99 -0
  20. data/ext/v8/upstream/3.1.8/include/v8.h +3846 -0
  21. data/ext/v8/upstream/3.1.8/include/v8stdint.h +53 -0
  22. data/ext/v8/upstream/3.1.8/preparser/preparser-process.cc +206 -0
  23. data/ext/v8/upstream/3.1.8/src/SConscript +356 -0
  24. data/ext/v8/upstream/3.1.8/src/accessors.cc +907 -0
  25. data/ext/v8/upstream/3.1.8/src/accessors.h +121 -0
  26. data/ext/v8/upstream/3.1.8/src/allocation.cc +204 -0
  27. data/ext/v8/upstream/3.1.8/src/allocation.h +176 -0
  28. data/ext/v8/upstream/3.1.8/src/api.cc +5191 -0
  29. data/ext/v8/upstream/3.1.8/src/api.h +508 -0
  30. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/apinatives.js +0 -0
  31. data/ext/v8/upstream/3.1.8/src/apiutils.h +80 -0
  32. data/ext/v8/upstream/3.1.8/src/arguments.h +105 -0
  33. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm-inl.h +352 -0
  34. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.cc +2756 -0
  35. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.h +1294 -0
  36. data/ext/v8/upstream/3.1.8/src/arm/builtins-arm.cc +1628 -0
  37. data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.cc +6783 -0
  38. data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.h +657 -0
  39. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm-inl.h +48 -0
  40. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.cc +7403 -0
  41. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.h +595 -0
  42. data/ext/v8/upstream/3.1.8/src/arm/constants-arm.cc +152 -0
  43. data/ext/v8/upstream/3.1.8/src/arm/constants-arm.h +769 -0
  44. data/ext/v8/upstream/3.1.8/src/arm/cpu-arm.cc +147 -0
  45. data/ext/v8/upstream/3.1.8/src/arm/debug-arm.cc +315 -0
  46. data/ext/v8/upstream/3.1.8/src/arm/deoptimizer-arm.cc +700 -0
  47. data/ext/v8/upstream/3.1.8/src/arm/disasm-arm.cc +1439 -0
  48. data/ext/v8/upstream/3.1.8/src/arm/frames-arm.cc +45 -0
  49. data/ext/v8/upstream/3.1.8/src/arm/frames-arm.h +168 -0
  50. data/ext/v8/upstream/3.1.8/src/arm/full-codegen-arm.cc +4230 -0
  51. data/ext/v8/upstream/3.1.8/src/arm/ic-arm.cc +1799 -0
  52. data/ext/v8/upstream/3.1.8/src/arm/jump-target-arm.cc +174 -0
  53. data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.cc +2041 -0
  54. data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.h +2046 -0
  55. data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.cc +3822 -0
  56. data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.h +312 -0
  57. data/ext/v8/upstream/3.1.8/src/arm/lithium-gap-resolver-arm.cc +303 -0
  58. data/ext/v8/upstream/3.1.8/src/arm/lithium-gap-resolver-arm.h +84 -0
  59. data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.cc +2701 -0
  60. data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.h +1015 -0
  61. data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.cc +1280 -0
  62. data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.h +252 -0
  63. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/register-allocator-arm-inl.h +0 -0
  64. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/register-allocator-arm.cc +0 -0
  65. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/register-allocator-arm.h +0 -0
  66. data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.cc +3165 -0
  67. data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.h +402 -0
  68. data/ext/v8/upstream/3.1.8/src/arm/stub-cache-arm.cc +4077 -0
  69. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/virtual-frame-arm-inl.h +0 -0
  70. data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.cc +843 -0
  71. data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.h +520 -0
  72. data/ext/v8/upstream/3.1.8/src/array.js +1231 -0
  73. data/ext/v8/upstream/3.1.8/src/assembler.cc +973 -0
  74. data/ext/v8/upstream/3.1.8/src/assembler.h +787 -0
  75. data/ext/v8/upstream/3.1.8/src/ast-inl.h +107 -0
  76. data/ext/v8/upstream/3.1.8/src/ast.cc +1067 -0
  77. data/ext/v8/upstream/3.1.8/src/ast.h +2177 -0
  78. data/ext/v8/upstream/3.1.8/src/atomicops.h +165 -0
  79. data/ext/v8/upstream/3.1.8/src/atomicops_internals_arm_gcc.h +145 -0
  80. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_gcc.cc +126 -0
  81. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_gcc.h +287 -0
  82. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_macosx.h +301 -0
  83. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_msvc.h +203 -0
  84. data/ext/v8/upstream/3.1.8/src/bignum-dtoa.cc +655 -0
  85. data/ext/v8/upstream/3.1.8/src/bignum-dtoa.h +81 -0
  86. data/ext/v8/upstream/3.1.8/src/bignum.cc +768 -0
  87. data/ext/v8/upstream/3.1.8/src/bignum.h +140 -0
  88. data/ext/v8/upstream/3.1.8/src/bootstrapper.cc +1888 -0
  89. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/bootstrapper.h +0 -0
  90. data/ext/v8/upstream/3.1.8/src/builtins.cc +1586 -0
  91. data/ext/v8/upstream/3.1.8/src/builtins.h +339 -0
  92. data/ext/v8/upstream/3.1.8/src/bytecodes-irregexp.h +105 -0
  93. data/ext/v8/upstream/3.1.8/src/cached-powers.cc +177 -0
  94. data/ext/v8/upstream/3.1.8/src/cached-powers.h +65 -0
  95. data/ext/v8/upstream/3.1.8/src/char-predicates-inl.h +94 -0
  96. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/char-predicates.h +0 -0
  97. data/ext/v8/upstream/3.1.8/src/checks.cc +110 -0
  98. data/ext/v8/upstream/3.1.8/src/checks.h +292 -0
  99. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/circular-queue-inl.h +0 -0
  100. data/ext/v8/upstream/3.1.8/src/circular-queue.cc +122 -0
  101. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/circular-queue.h +0 -0
  102. data/ext/v8/upstream/3.1.8/src/code-stubs.cc +230 -0
  103. data/ext/v8/upstream/3.1.8/src/code-stubs.h +950 -0
  104. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/code.h +0 -0
  105. data/ext/v8/upstream/3.1.8/src/codegen-inl.h +64 -0
  106. data/ext/v8/upstream/3.1.8/src/codegen.cc +495 -0
  107. data/ext/v8/upstream/3.1.8/src/codegen.h +245 -0
  108. data/ext/v8/upstream/3.1.8/src/compilation-cache.cc +654 -0
  109. data/ext/v8/upstream/3.1.8/src/compilation-cache.h +112 -0
  110. data/ext/v8/upstream/3.1.8/src/compiler.cc +806 -0
  111. data/ext/v8/upstream/3.1.8/src/compiler.h +290 -0
  112. data/ext/v8/upstream/3.1.8/src/contexts.cc +320 -0
  113. data/ext/v8/upstream/3.1.8/src/contexts.h +376 -0
  114. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/conversions-inl.h +0 -0
  115. data/ext/v8/upstream/3.1.8/src/conversions.cc +1069 -0
  116. data/ext/v8/upstream/3.1.8/src/conversions.h +122 -0
  117. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/counters.cc +0 -0
  118. data/ext/v8/upstream/3.1.8/src/counters.h +242 -0
  119. data/ext/v8/upstream/3.1.8/src/cpu-profiler-inl.h +100 -0
  120. data/ext/v8/upstream/3.1.8/src/cpu-profiler.cc +554 -0
  121. data/ext/v8/upstream/3.1.8/src/cpu-profiler.h +291 -0
  122. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/cpu.h +0 -0
  123. data/ext/v8/upstream/3.1.8/src/d8-debug.cc +367 -0
  124. data/ext/v8/upstream/3.1.8/src/d8-debug.h +157 -0
  125. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/d8-posix.cc +0 -0
  126. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/d8-readline.cc +0 -0
  127. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/d8-windows.cc +0 -0
  128. data/ext/v8/upstream/3.1.8/src/d8.cc +792 -0
  129. data/ext/v8/upstream/3.1.8/src/d8.gyp +85 -0
  130. data/ext/v8/upstream/3.1.8/src/d8.h +231 -0
  131. data/ext/v8/upstream/3.1.8/src/d8.js +2798 -0
  132. data/ext/v8/upstream/3.1.8/src/data-flow.cc +545 -0
  133. data/ext/v8/upstream/3.1.8/src/data-flow.h +379 -0
  134. data/ext/v8/upstream/3.1.8/src/date.js +1103 -0
  135. data/ext/v8/upstream/3.1.8/src/dateparser-inl.h +125 -0
  136. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/dateparser.cc +0 -0
  137. data/ext/v8/upstream/3.1.8/src/dateparser.h +263 -0
  138. data/ext/v8/upstream/3.1.8/src/debug-agent.cc +446 -0
  139. data/ext/v8/upstream/3.1.8/src/debug-agent.h +131 -0
  140. data/ext/v8/upstream/3.1.8/src/debug-debugger.js +2569 -0
  141. data/ext/v8/upstream/3.1.8/src/debug.cc +3085 -0
  142. data/ext/v8/upstream/3.1.8/src/debug.h +1025 -0
  143. data/ext/v8/upstream/3.1.8/src/deoptimizer.cc +1185 -0
  144. data/ext/v8/upstream/3.1.8/src/deoptimizer.h +529 -0
  145. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/disasm.h +0 -0
  146. data/ext/v8/upstream/3.1.8/src/disassembler.cc +338 -0
  147. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/disassembler.h +0 -0
  148. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/diy-fp.cc +0 -0
  149. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/diy-fp.h +0 -0
  150. data/ext/v8/upstream/3.1.8/src/double.h +238 -0
  151. data/ext/v8/upstream/3.1.8/src/dtoa.cc +103 -0
  152. data/ext/v8/upstream/3.1.8/src/dtoa.h +85 -0
  153. data/ext/v8/upstream/3.1.8/src/execution.cc +735 -0
  154. data/ext/v8/upstream/3.1.8/src/execution.h +322 -0
  155. data/ext/v8/upstream/3.1.8/src/extensions/experimental/experimental.gyp +53 -0
  156. data/ext/v8/upstream/3.1.8/src/extensions/experimental/i18n-extension.cc +264 -0
  157. data/ext/v8/upstream/3.1.8/src/extensions/experimental/i18n-extension.h +64 -0
  158. data/ext/v8/upstream/3.1.8/src/extensions/externalize-string-extension.cc +141 -0
  159. data/ext/v8/upstream/3.1.8/src/extensions/externalize-string-extension.h +50 -0
  160. data/ext/v8/upstream/3.1.8/src/extensions/gc-extension.cc +58 -0
  161. data/ext/v8/upstream/3.1.8/src/extensions/gc-extension.h +49 -0
  162. data/ext/v8/upstream/3.1.8/src/factory.cc +1087 -0
  163. data/ext/v8/upstream/3.1.8/src/factory.h +432 -0
  164. data/ext/v8/upstream/3.1.8/src/fast-dtoa.cc +736 -0
  165. data/ext/v8/upstream/3.1.8/src/fast-dtoa.h +83 -0
  166. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/fixed-dtoa.cc +0 -0
  167. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/fixed-dtoa.h +0 -0
  168. data/ext/v8/upstream/3.1.8/src/flag-definitions.h +552 -0
  169. data/ext/v8/upstream/3.1.8/src/flags.cc +551 -0
  170. data/ext/v8/upstream/3.1.8/src/flags.h +79 -0
  171. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/frame-element.cc +0 -0
  172. data/ext/v8/upstream/3.1.8/src/frame-element.h +277 -0
  173. data/ext/v8/upstream/3.1.8/src/frames-inl.h +210 -0
  174. data/ext/v8/upstream/3.1.8/src/frames.cc +1232 -0
  175. data/ext/v8/upstream/3.1.8/src/frames.h +826 -0
  176. data/ext/v8/upstream/3.1.8/src/full-codegen.cc +1382 -0
  177. data/ext/v8/upstream/3.1.8/src/full-codegen.h +751 -0
  178. data/ext/v8/upstream/3.1.8/src/func-name-inferrer.cc +90 -0
  179. data/ext/v8/upstream/3.1.8/src/func-name-inferrer.h +111 -0
  180. data/ext/v8/upstream/3.1.8/src/gdb-jit.cc +1547 -0
  181. data/ext/v8/upstream/3.1.8/src/gdb-jit.h +138 -0
  182. data/ext/v8/upstream/3.1.8/src/global-handles.cc +534 -0
  183. data/ext/v8/upstream/3.1.8/src/global-handles.h +181 -0
  184. data/ext/v8/upstream/3.1.8/src/globals.h +325 -0
  185. data/ext/v8/upstream/3.1.8/src/handles-inl.h +80 -0
  186. data/ext/v8/upstream/3.1.8/src/handles.cc +910 -0
  187. data/ext/v8/upstream/3.1.8/src/handles.h +424 -0
  188. data/ext/v8/upstream/3.1.8/src/hashmap.cc +230 -0
  189. data/ext/v8/upstream/3.1.8/src/hashmap.h +121 -0
  190. data/ext/v8/upstream/3.1.8/src/heap-inl.h +587 -0
  191. data/ext/v8/upstream/3.1.8/src/heap-profiler.cc +1128 -0
  192. data/ext/v8/upstream/3.1.8/src/heap-profiler.h +381 -0
  193. data/ext/v8/upstream/3.1.8/src/heap.cc +5610 -0
  194. data/ext/v8/upstream/3.1.8/src/heap.h +2218 -0
  195. data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.cc +1490 -0
  196. data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.h +3493 -0
  197. data/ext/v8/upstream/3.1.8/src/hydrogen.cc +6056 -0
  198. data/ext/v8/upstream/3.1.8/src/hydrogen.h +1091 -0
  199. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32-inl.h +429 -0
  200. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.cc +2800 -0
  201. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.h +1093 -0
  202. data/ext/v8/upstream/3.1.8/src/ia32/builtins-ia32.cc +1590 -0
  203. data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.cc +6624 -0
  204. data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.h +536 -0
  205. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/codegen-ia32-inl.h +0 -0
  206. data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.cc +10354 -0
  207. data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.h +798 -0
  208. data/ext/v8/upstream/3.1.8/src/ia32/cpu-ia32.cc +87 -0
  209. data/ext/v8/upstream/3.1.8/src/ia32/debug-ia32.cc +309 -0
  210. data/ext/v8/upstream/3.1.8/src/ia32/deoptimizer-ia32.cc +664 -0
  211. data/ext/v8/upstream/3.1.8/src/ia32/disasm-ia32.cc +1597 -0
  212. data/ext/v8/upstream/3.1.8/src/ia32/frames-ia32.cc +45 -0
  213. data/ext/v8/upstream/3.1.8/src/ia32/frames-ia32.h +140 -0
  214. data/ext/v8/upstream/3.1.8/src/ia32/full-codegen-ia32.cc +4278 -0
  215. data/ext/v8/upstream/3.1.8/src/ia32/ic-ia32.cc +1786 -0
  216. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/jump-target-ia32.cc +0 -0
  217. data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.cc +3880 -0
  218. data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.h +309 -0
  219. data/ext/v8/upstream/3.1.8/src/ia32/lithium-gap-resolver-ia32.cc +460 -0
  220. data/ext/v8/upstream/3.1.8/src/ia32/lithium-gap-resolver-ia32.h +110 -0
  221. data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.cc +2095 -0
  222. data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.h +2127 -0
  223. data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.cc +2031 -0
  224. data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.h +798 -0
  225. data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.cc +1253 -0
  226. data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.h +215 -0
  227. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/register-allocator-ia32-inl.h +0 -0
  228. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/register-allocator-ia32.cc +0 -0
  229. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/register-allocator-ia32.h +0 -0
  230. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/simulator-ia32.cc +0 -0
  231. data/ext/v8/upstream/3.1.8/src/ia32/simulator-ia32.h +72 -0
  232. data/ext/v8/upstream/3.1.8/src/ia32/stub-cache-ia32.cc +3732 -0
  233. data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.cc +1360 -0
  234. data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.h +646 -0
  235. data/ext/v8/upstream/3.1.8/src/ic-inl.h +129 -0
  236. data/ext/v8/upstream/3.1.8/src/ic.cc +2333 -0
  237. data/ext/v8/upstream/3.1.8/src/ic.h +639 -0
  238. data/ext/v8/upstream/3.1.8/src/inspector.cc +63 -0
  239. data/ext/v8/upstream/3.1.8/src/inspector.h +62 -0
  240. data/ext/v8/upstream/3.1.8/src/interpreter-irregexp.cc +655 -0
  241. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/interpreter-irregexp.h +0 -0
  242. data/ext/v8/upstream/3.1.8/src/json.js +342 -0
  243. data/ext/v8/upstream/3.1.8/src/jsregexp.cc +5340 -0
  244. data/ext/v8/upstream/3.1.8/src/jsregexp.h +1484 -0
  245. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target-heavy-inl.h +0 -0
  246. data/ext/v8/upstream/3.1.8/src/jump-target-heavy.cc +430 -0
  247. data/ext/v8/upstream/3.1.8/src/jump-target-heavy.h +244 -0
  248. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target-inl.h +0 -0
  249. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target-light-inl.h +0 -0
  250. data/ext/v8/upstream/3.1.8/src/jump-target-light.cc +111 -0
  251. data/ext/v8/upstream/3.1.8/src/jump-target-light.h +193 -0
  252. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target.cc +0 -0
  253. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target.h +0 -0
  254. data/ext/v8/upstream/3.1.8/src/list-inl.h +206 -0
  255. data/ext/v8/upstream/3.1.8/src/list.h +164 -0
  256. data/ext/v8/upstream/3.1.8/src/lithium-allocator-inl.h +140 -0
  257. data/ext/v8/upstream/3.1.8/src/lithium-allocator.cc +2093 -0
  258. data/ext/v8/upstream/3.1.8/src/lithium-allocator.h +644 -0
  259. data/ext/v8/upstream/3.1.8/src/lithium.cc +168 -0
  260. data/ext/v8/upstream/3.1.8/src/lithium.h +592 -0
  261. data/ext/v8/upstream/3.1.8/src/liveedit-debugger.js +1082 -0
  262. data/ext/v8/upstream/3.1.8/src/liveedit.cc +1650 -0
  263. data/ext/v8/upstream/3.1.8/src/liveedit.h +174 -0
  264. data/ext/v8/upstream/3.1.8/src/liveobjectlist-inl.h +126 -0
  265. data/ext/v8/upstream/3.1.8/src/liveobjectlist.cc +2527 -0
  266. data/ext/v8/upstream/3.1.8/src/liveobjectlist.h +322 -0
  267. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/log-inl.h +0 -0
  268. data/ext/v8/upstream/3.1.8/src/log-utils.cc +336 -0
  269. data/ext/v8/upstream/3.1.8/src/log-utils.h +232 -0
  270. data/ext/v8/upstream/3.1.8/src/log.cc +1608 -0
  271. data/ext/v8/upstream/3.1.8/src/log.h +379 -0
  272. data/ext/v8/upstream/3.1.8/src/macro-assembler.h +120 -0
  273. data/ext/v8/upstream/3.1.8/src/macros.py +178 -0
  274. data/ext/v8/upstream/3.1.8/src/mark-compact.cc +2957 -0
  275. data/ext/v8/upstream/3.1.8/src/mark-compact.h +433 -0
  276. data/ext/v8/upstream/3.1.8/src/math.js +264 -0
  277. data/ext/v8/upstream/3.1.8/src/memory.h +82 -0
  278. data/ext/v8/upstream/3.1.8/src/messages.cc +164 -0
  279. data/ext/v8/upstream/3.1.8/src/messages.h +114 -0
  280. data/ext/v8/upstream/3.1.8/src/messages.js +1071 -0
  281. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/assembler-mips-inl.h +0 -0
  282. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/assembler-mips.cc +0 -0
  283. data/ext/v8/upstream/3.1.8/src/mips/assembler-mips.h +667 -0
  284. data/ext/v8/upstream/3.1.8/src/mips/builtins-mips.cc +205 -0
  285. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/codegen-mips-inl.h +0 -0
  286. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/codegen-mips.cc +0 -0
  287. data/ext/v8/upstream/3.1.8/src/mips/codegen-mips.h +431 -0
  288. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/constants-mips.cc +0 -0
  289. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/constants-mips.h +0 -0
  290. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/cpu-mips.cc +0 -0
  291. data/ext/v8/upstream/3.1.8/src/mips/debug-mips.cc +127 -0
  292. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/disasm-mips.cc +0 -0
  293. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/fast-codegen-mips.cc +0 -0
  294. data/ext/v8/upstream/3.1.8/src/mips/frames-mips.cc +96 -0
  295. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/frames-mips.h +0 -0
  296. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/full-codegen-mips.cc +0 -0
  297. data/ext/v8/upstream/3.1.8/src/mips/ic-mips.cc +208 -0
  298. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/jump-target-mips.cc +0 -0
  299. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/macro-assembler-mips.cc +0 -0
  300. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/macro-assembler-mips.h +0 -0
  301. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/register-allocator-mips-inl.h +0 -0
  302. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/register-allocator-mips.cc +0 -0
  303. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/register-allocator-mips.h +0 -0
  304. data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.cc +1650 -0
  305. data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.h +311 -0
  306. data/ext/v8/upstream/3.1.8/src/mips/stub-cache-mips.cc +418 -0
  307. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/virtual-frame-mips.cc +0 -0
  308. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/virtual-frame-mips.h +0 -0
  309. data/ext/v8/upstream/3.1.8/src/mirror-debugger.js +2380 -0
  310. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mksnapshot.cc +0 -0
  311. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/natives.h +0 -0
  312. data/ext/v8/upstream/3.1.8/src/objects-debug.cc +722 -0
  313. data/ext/v8/upstream/3.1.8/src/objects-inl.h +3946 -0
  314. data/ext/v8/upstream/3.1.8/src/objects-printer.cc +801 -0
  315. data/ext/v8/upstream/3.1.8/src/objects-visiting.cc +142 -0
  316. data/ext/v8/upstream/3.1.8/src/objects-visiting.h +401 -0
  317. data/ext/v8/upstream/3.1.8/src/objects.cc +10044 -0
  318. data/ext/v8/upstream/3.1.8/src/objects.h +6571 -0
  319. data/ext/v8/upstream/3.1.8/src/parser.cc +5165 -0
  320. data/ext/v8/upstream/3.1.8/src/parser.h +802 -0
  321. data/ext/v8/upstream/3.1.8/src/platform-cygwin.cc +745 -0
  322. data/ext/v8/upstream/3.1.8/src/platform-freebsd.cc +702 -0
  323. data/ext/v8/upstream/3.1.8/src/platform-linux.cc +981 -0
  324. data/ext/v8/upstream/3.1.8/src/platform-macos.cc +732 -0
  325. data/ext/v8/upstream/3.1.8/src/platform-nullos.cc +498 -0
  326. data/ext/v8/upstream/3.1.8/src/platform-openbsd.cc +657 -0
  327. data/ext/v8/upstream/3.1.8/src/platform-posix.cc +399 -0
  328. data/ext/v8/upstream/3.1.8/src/platform-solaris.cc +714 -0
  329. data/ext/v8/upstream/3.1.8/src/platform-win32.cc +1974 -0
  330. data/ext/v8/upstream/3.1.8/src/platform.h +636 -0
  331. data/ext/v8/upstream/3.1.8/src/preparse-data.cc +183 -0
  332. data/ext/v8/upstream/3.1.8/src/preparse-data.h +249 -0
  333. data/ext/v8/upstream/3.1.8/src/preparser-api.cc +213 -0
  334. data/ext/v8/upstream/3.1.8/src/preparser.cc +1205 -0
  335. data/ext/v8/upstream/3.1.8/src/preparser.h +278 -0
  336. data/ext/v8/upstream/3.1.8/src/prettyprinter.cc +1539 -0
  337. data/ext/v8/upstream/3.1.8/src/prettyprinter.h +223 -0
  338. data/ext/v8/upstream/3.1.8/src/profile-generator-inl.h +128 -0
  339. data/ext/v8/upstream/3.1.8/src/profile-generator.cc +2899 -0
  340. data/ext/v8/upstream/3.1.8/src/profile-generator.h +1151 -0
  341. data/ext/v8/upstream/3.1.8/src/property.cc +96 -0
  342. data/ext/v8/upstream/3.1.8/src/property.h +337 -0
  343. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/regexp-macro-assembler-irregexp-inl.h +0 -0
  344. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-irregexp.cc +470 -0
  345. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-irregexp.h +142 -0
  346. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-tracer.cc +373 -0
  347. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-tracer.h +104 -0
  348. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.cc +257 -0
  349. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.h +231 -0
  350. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/regexp-stack.cc +0 -0
  351. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/regexp-stack.h +0 -0
  352. data/ext/v8/upstream/3.1.8/src/regexp.js +483 -0
  353. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/register-allocator-inl.h +0 -0
  354. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/register-allocator.cc +0 -0
  355. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/register-allocator.h +0 -0
  356. data/ext/v8/upstream/3.1.8/src/rewriter.cc +1023 -0
  357. data/ext/v8/upstream/3.1.8/src/rewriter.h +59 -0
  358. data/ext/v8/upstream/3.1.8/src/runtime-profiler.cc +443 -0
  359. data/ext/v8/upstream/3.1.8/src/runtime-profiler.h +77 -0
  360. data/ext/v8/upstream/3.1.8/src/runtime.cc +11592 -0
  361. data/ext/v8/upstream/3.1.8/src/runtime.h +582 -0
  362. data/ext/v8/upstream/3.1.8/src/runtime.js +643 -0
  363. data/ext/v8/upstream/3.1.8/src/safepoint-table.cc +253 -0
  364. data/ext/v8/upstream/3.1.8/src/safepoint-table.h +263 -0
  365. data/ext/v8/upstream/3.1.8/src/scanner-base.cc +971 -0
  366. data/ext/v8/upstream/3.1.8/src/scanner-base.h +653 -0
  367. data/ext/v8/upstream/3.1.8/src/scanner.cc +586 -0
  368. data/ext/v8/upstream/3.1.8/src/scanner.h +194 -0
  369. data/ext/v8/upstream/3.1.8/src/scopeinfo.cc +636 -0
  370. data/ext/v8/upstream/3.1.8/src/scopeinfo.h +238 -0
  371. data/ext/v8/upstream/3.1.8/src/scopes.cc +1063 -0
  372. data/ext/v8/upstream/3.1.8/src/scopes.h +494 -0
  373. data/ext/v8/upstream/3.1.8/src/serialize.cc +1535 -0
  374. data/ext/v8/upstream/3.1.8/src/serialize.h +584 -0
  375. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/shell.h +0 -0
  376. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/simulator.h +0 -0
  377. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/smart-pointer.h +0 -0
  378. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/snapshot-common.cc +0 -0
  379. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/snapshot-empty.cc +0 -0
  380. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/snapshot.h +0 -0
  381. data/ext/v8/upstream/3.1.8/src/spaces-inl.h +524 -0
  382. data/ext/v8/upstream/3.1.8/src/spaces.cc +3254 -0
  383. data/ext/v8/upstream/3.1.8/src/spaces.h +2362 -0
  384. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/splay-tree-inl.h +0 -0
  385. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/splay-tree.h +0 -0
  386. data/ext/v8/upstream/3.1.8/src/string-search.cc +40 -0
  387. data/ext/v8/upstream/3.1.8/src/string-search.h +567 -0
  388. data/ext/v8/upstream/3.1.8/src/string-stream.cc +584 -0
  389. data/ext/v8/upstream/3.1.8/src/string-stream.h +191 -0
  390. data/ext/v8/upstream/3.1.8/src/string.js +915 -0
  391. data/ext/v8/upstream/3.1.8/src/strtod.cc +440 -0
  392. data/ext/v8/upstream/3.1.8/src/strtod.h +40 -0
  393. data/ext/v8/upstream/3.1.8/src/stub-cache.cc +1878 -0
  394. data/ext/v8/upstream/3.1.8/src/stub-cache.h +849 -0
  395. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/third_party/valgrind/valgrind.h +0 -0
  396. data/ext/v8/upstream/3.1.8/src/token.cc +63 -0
  397. data/ext/v8/upstream/3.1.8/src/token.h +288 -0
  398. data/ext/v8/upstream/3.1.8/src/top.cc +1152 -0
  399. data/ext/v8/upstream/3.1.8/src/top.h +608 -0
  400. data/ext/v8/upstream/3.1.8/src/type-info.cc +406 -0
  401. data/ext/v8/upstream/3.1.8/src/type-info.h +283 -0
  402. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/unbound-queue-inl.h +0 -0
  403. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/unbound-queue.h +0 -0
  404. data/ext/v8/upstream/3.1.8/src/unicode-inl.h +238 -0
  405. data/ext/v8/upstream/3.1.8/src/unicode.cc +1624 -0
  406. data/ext/v8/upstream/3.1.8/src/unicode.h +280 -0
  407. data/ext/v8/upstream/3.1.8/src/uri.js +402 -0
  408. data/ext/v8/upstream/3.1.8/src/utils.cc +371 -0
  409. data/ext/v8/upstream/3.1.8/src/utils.h +793 -0
  410. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/v8-counters.cc +0 -0
  411. data/ext/v8/upstream/3.1.8/src/v8-counters.h +290 -0
  412. data/ext/v8/upstream/3.1.8/src/v8.cc +270 -0
  413. data/ext/v8/upstream/3.1.8/src/v8.h +127 -0
  414. data/ext/v8/upstream/3.1.8/src/v8checks.h +64 -0
  415. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/v8dll-main.cc +0 -0
  416. data/ext/v8/upstream/3.1.8/src/v8globals.h +480 -0
  417. data/ext/v8/upstream/3.1.8/src/v8natives.js +1252 -0
  418. data/ext/v8/upstream/3.1.8/src/v8preparserdll-main.cc +39 -0
  419. data/ext/v8/upstream/3.1.8/src/v8threads.cc +440 -0
  420. data/ext/v8/upstream/3.1.8/src/v8threads.h +157 -0
  421. data/ext/v8/upstream/3.1.8/src/v8utils.h +354 -0
  422. data/ext/v8/upstream/3.1.8/src/variables.cc +132 -0
  423. data/ext/v8/upstream/3.1.8/src/variables.h +212 -0
  424. data/ext/v8/upstream/3.1.8/src/version.cc +95 -0
  425. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/version.h +0 -0
  426. data/ext/v8/upstream/3.1.8/src/virtual-frame-heavy-inl.h +190 -0
  427. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-heavy.cc +0 -0
  428. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-inl.h +0 -0
  429. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-light-inl.h +0 -0
  430. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-light.cc +0 -0
  431. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame.cc +0 -0
  432. data/ext/v8/upstream/3.1.8/src/virtual-frame.h +59 -0
  433. data/ext/v8/upstream/3.1.8/src/vm-state-inl.h +134 -0
  434. data/ext/v8/upstream/3.1.8/src/vm-state.h +68 -0
  435. data/ext/v8/upstream/3.1.8/src/win32-headers.h +95 -0
  436. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64-inl.h +455 -0
  437. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.cc +3162 -0
  438. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.h +1584 -0
  439. data/ext/v8/upstream/3.1.8/src/x64/builtins-x64.cc +1492 -0
  440. data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.cc +5150 -0
  441. data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.h +519 -0
  442. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64-inl.h +46 -0
  443. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.cc +8835 -0
  444. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.h +750 -0
  445. data/ext/v8/upstream/3.1.8/src/x64/cpu-x64.cc +86 -0
  446. data/ext/v8/upstream/3.1.8/src/x64/debug-x64.cc +316 -0
  447. data/ext/v8/upstream/3.1.8/src/x64/deoptimizer-x64.cc +781 -0
  448. data/ext/v8/upstream/3.1.8/src/x64/disasm-x64.cc +1737 -0
  449. data/ext/v8/upstream/3.1.8/src/x64/frames-x64.cc +45 -0
  450. data/ext/v8/upstream/3.1.8/src/x64/frames-x64.h +130 -0
  451. data/ext/v8/upstream/3.1.8/src/x64/full-codegen-x64.cc +3984 -0
  452. data/ext/v8/upstream/3.1.8/src/x64/ic-x64.cc +1761 -0
  453. data/ext/v8/upstream/3.1.8/src/x64/jump-target-x64.cc +437 -0
  454. data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.cc +3639 -0
  455. data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.h +305 -0
  456. data/ext/v8/upstream/3.1.8/src/x64/lithium-gap-resolver-x64.cc +320 -0
  457. data/ext/v8/upstream/3.1.8/src/x64/lithium-gap-resolver-x64.h +74 -0
  458. data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.cc +2044 -0
  459. data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.h +2052 -0
  460. data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.cc +2660 -0
  461. data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.h +1852 -0
  462. data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.cc +1382 -0
  463. data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.h +278 -0
  464. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/register-allocator-x64-inl.h +0 -0
  465. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/register-allocator-x64.cc +0 -0
  466. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/register-allocator-x64.h +0 -0
  467. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/simulator-x64.cc +0 -0
  468. data/ext/v8/upstream/3.1.8/src/x64/simulator-x64.h +71 -0
  469. data/ext/v8/upstream/3.1.8/src/x64/stub-cache-x64.cc +3509 -0
  470. data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.cc +1292 -0
  471. data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.h +593 -0
  472. data/ext/v8/upstream/3.1.8/src/zone-inl.h +83 -0
  473. data/ext/v8/upstream/3.1.8/src/zone.cc +195 -0
  474. data/ext/v8/upstream/3.1.8/src/zone.h +233 -0
  475. data/ext/v8/upstream/3.1.8/tools/codemap.js +265 -0
  476. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/consarray.js +0 -0
  477. data/ext/v8/upstream/3.1.8/tools/csvparser.js +78 -0
  478. data/ext/v8/upstream/3.1.8/tools/disasm.py +92 -0
  479. data/ext/v8/upstream/3.1.8/tools/gc-nvp-trace-processor.py +328 -0
  480. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/generate-ten-powers.scm +0 -0
  481. data/ext/v8/upstream/3.1.8/tools/grokdump.py +840 -0
  482. data/ext/v8/upstream/3.1.8/tools/gyp/v8.gyp +869 -0
  483. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/js2c.py +0 -0
  484. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/jsmin.py +0 -0
  485. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/linux-tick-processor +0 -0
  486. data/ext/v8/upstream/3.1.8/tools/ll_prof.py +919 -0
  487. data/ext/v8/upstream/3.1.8/tools/logreader.js +185 -0
  488. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/mac-nm +0 -0
  489. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/mac-tick-processor +0 -0
  490. data/ext/v8/upstream/3.1.8/tools/oom_dump/README +31 -0
  491. data/ext/v8/upstream/3.1.8/tools/oom_dump/SConstruct +42 -0
  492. data/ext/v8/upstream/3.1.8/tools/oom_dump/oom_dump.cc +288 -0
  493. data/ext/v8/upstream/3.1.8/tools/presubmit.py +305 -0
  494. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/process-heap-prof.py +0 -0
  495. data/ext/v8/upstream/3.1.8/tools/profile.js +751 -0
  496. data/ext/v8/upstream/3.1.8/tools/profile_view.js +219 -0
  497. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/run-valgrind.py +0 -0
  498. data/ext/v8/upstream/3.1.8/tools/splaytree.js +316 -0
  499. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/stats-viewer.py +0 -0
  500. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/tickprocessor-driver.js +0 -0
  501. data/ext/v8/upstream/3.1.8/tools/tickprocessor.js +863 -0
  502. data/ext/v8/upstream/3.1.8/tools/utils.py +96 -0
  503. data/ext/v8/upstream/3.1.8/tools/visual_studio/README.txt +70 -0
  504. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/arm.vsprops +0 -0
  505. data/ext/v8/upstream/3.1.8/tools/visual_studio/common.vsprops +34 -0
  506. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8.vcproj +0 -0
  507. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8_arm.vcproj +0 -0
  508. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8_x64.vcproj +0 -0
  509. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8js2c.cmd +0 -0
  510. data/ext/v8/upstream/3.1.8/tools/visual_studio/debug.vsprops +17 -0
  511. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/ia32.vsprops +0 -0
  512. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/js2c.cmd +0 -0
  513. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/release.vsprops +0 -0
  514. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8.sln +0 -0
  515. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8.vcproj +0 -0
  516. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_arm.sln +0 -0
  517. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_arm.vcproj +0 -0
  518. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base.vcproj +1296 -0
  519. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_arm.vcproj +1234 -0
  520. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_x64.vcproj +1296 -0
  521. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_cctest.vcproj +0 -0
  522. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_cctest_arm.vcproj +0 -0
  523. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_cctest_x64.vcproj +0 -0
  524. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_mksnapshot.vcproj +0 -0
  525. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_mksnapshot_x64.vcproj +0 -0
  526. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_process_sample.vcproj +0 -0
  527. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_process_sample_arm.vcproj +0 -0
  528. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_process_sample_x64.vcproj +0 -0
  529. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_shell_sample.vcproj +147 -0
  530. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_shell_sample_arm.vcproj +147 -0
  531. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_shell_sample_x64.vcproj +163 -0
  532. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot.vcproj +0 -0
  533. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot_cc.vcproj +0 -0
  534. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot_cc_x64.vcproj +0 -0
  535. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot_x64.vcproj +0 -0
  536. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_x64.sln +0 -0
  537. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_x64.vcproj +0 -0
  538. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/x64.vsprops +0 -0
  539. data/ext/v8/upstream/3.1.8/tools/windows-tick-processor.bat +30 -0
  540. data/ext/v8/upstream/Makefile +2 -1
  541. data/ext/v8/v8_template.cpp +2 -2
  542. data/lib/v8/version.rb +1 -1
  543. data/spec/redjs/jsapi_spec.rb +2 -2
  544. metadata +552 -490
  545. data/ext/v8/upstream/2.3.3/.gitignore +0 -26
  546. data/ext/v8/upstream/2.3.3/AUTHORS +0 -31
  547. data/ext/v8/upstream/2.3.3/ChangeLog +0 -1916
  548. data/ext/v8/upstream/2.3.3/LICENSE +0 -55
  549. data/ext/v8/upstream/2.3.3/SConstruct +0 -1154
  550. data/ext/v8/upstream/2.3.3/include/v8-debug.h +0 -381
  551. data/ext/v8/upstream/2.3.3/include/v8-profiler.h +0 -353
  552. data/ext/v8/upstream/2.3.3/include/v8.h +0 -3616
  553. data/ext/v8/upstream/2.3.3/src/SConscript +0 -330
  554. data/ext/v8/upstream/2.3.3/src/accessors.cc +0 -661
  555. data/ext/v8/upstream/2.3.3/src/accessors.h +0 -114
  556. data/ext/v8/upstream/2.3.3/src/allocation.cc +0 -198
  557. data/ext/v8/upstream/2.3.3/src/allocation.h +0 -169
  558. data/ext/v8/upstream/2.3.3/src/api.cc +0 -4795
  559. data/ext/v8/upstream/2.3.3/src/api.h +0 -485
  560. data/ext/v8/upstream/2.3.3/src/apiutils.h +0 -69
  561. data/ext/v8/upstream/2.3.3/src/arguments.h +0 -96
  562. data/ext/v8/upstream/2.3.3/src/arm/assembler-arm-inl.h +0 -305
  563. data/ext/v8/upstream/2.3.3/src/arm/assembler-arm.cc +0 -2580
  564. data/ext/v8/upstream/2.3.3/src/arm/assembler-arm.h +0 -1275
  565. data/ext/v8/upstream/2.3.3/src/arm/builtins-arm.cc +0 -1320
  566. data/ext/v8/upstream/2.3.3/src/arm/codegen-arm-inl.h +0 -48
  567. data/ext/v8/upstream/2.3.3/src/arm/codegen-arm.cc +0 -11398
  568. data/ext/v8/upstream/2.3.3/src/arm/codegen-arm.h +0 -1102
  569. data/ext/v8/upstream/2.3.3/src/arm/constants-arm.cc +0 -154
  570. data/ext/v8/upstream/2.3.3/src/arm/constants-arm.h +0 -388
  571. data/ext/v8/upstream/2.3.3/src/arm/cpu-arm.cc +0 -142
  572. data/ext/v8/upstream/2.3.3/src/arm/debug-arm.cc +0 -309
  573. data/ext/v8/upstream/2.3.3/src/arm/disasm-arm.cc +0 -1459
  574. data/ext/v8/upstream/2.3.3/src/arm/fast-codegen-arm.cc +0 -241
  575. data/ext/v8/upstream/2.3.3/src/arm/frames-arm.cc +0 -123
  576. data/ext/v8/upstream/2.3.3/src/arm/frames-arm.h +0 -162
  577. data/ext/v8/upstream/2.3.3/src/arm/full-codegen-arm.cc +0 -3178
  578. data/ext/v8/upstream/2.3.3/src/arm/ic-arm.cc +0 -2258
  579. data/ext/v8/upstream/2.3.3/src/arm/jump-target-arm.cc +0 -164
  580. data/ext/v8/upstream/2.3.3/src/arm/macro-assembler-arm.cc +0 -1892
  581. data/ext/v8/upstream/2.3.3/src/arm/macro-assembler-arm.h +0 -727
  582. data/ext/v8/upstream/2.3.3/src/arm/regexp-macro-assembler-arm.cc +0 -1261
  583. data/ext/v8/upstream/2.3.3/src/arm/regexp-macro-assembler-arm.h +0 -266
  584. data/ext/v8/upstream/2.3.3/src/arm/simulator-arm.cc +0 -2822
  585. data/ext/v8/upstream/2.3.3/src/arm/simulator-arm.h +0 -361
  586. data/ext/v8/upstream/2.3.3/src/arm/stub-cache-arm.cc +0 -2387
  587. data/ext/v8/upstream/2.3.3/src/arm/virtual-frame-arm.cc +0 -834
  588. data/ext/v8/upstream/2.3.3/src/arm/virtual-frame-arm.h +0 -519
  589. data/ext/v8/upstream/2.3.3/src/array.js +0 -1127
  590. data/ext/v8/upstream/2.3.3/src/assembler.cc +0 -801
  591. data/ext/v8/upstream/2.3.3/src/assembler.h +0 -573
  592. data/ext/v8/upstream/2.3.3/src/ast-inl.h +0 -81
  593. data/ext/v8/upstream/2.3.3/src/ast.cc +0 -1152
  594. data/ext/v8/upstream/2.3.3/src/ast.h +0 -2106
  595. data/ext/v8/upstream/2.3.3/src/bootstrapper.cc +0 -1819
  596. data/ext/v8/upstream/2.3.3/src/builtins.cc +0 -1529
  597. data/ext/v8/upstream/2.3.3/src/builtins.h +0 -263
  598. data/ext/v8/upstream/2.3.3/src/bytecodes-irregexp.h +0 -104
  599. data/ext/v8/upstream/2.3.3/src/cached-powers.h +0 -119
  600. data/ext/v8/upstream/2.3.3/src/char-predicates-inl.h +0 -86
  601. data/ext/v8/upstream/2.3.3/src/checks.cc +0 -100
  602. data/ext/v8/upstream/2.3.3/src/checks.h +0 -310
  603. data/ext/v8/upstream/2.3.3/src/circular-queue.cc +0 -121
  604. data/ext/v8/upstream/2.3.3/src/code-stubs.cc +0 -177
  605. data/ext/v8/upstream/2.3.3/src/code-stubs.h +0 -177
  606. data/ext/v8/upstream/2.3.3/src/codegen-inl.h +0 -60
  607. data/ext/v8/upstream/2.3.3/src/codegen.cc +0 -516
  608. data/ext/v8/upstream/2.3.3/src/codegen.h +0 -897
  609. data/ext/v8/upstream/2.3.3/src/compilation-cache.cc +0 -562
  610. data/ext/v8/upstream/2.3.3/src/compilation-cache.h +0 -102
  611. data/ext/v8/upstream/2.3.3/src/compiler.cc +0 -654
  612. data/ext/v8/upstream/2.3.3/src/compiler.h +0 -299
  613. data/ext/v8/upstream/2.3.3/src/contexts.cc +0 -256
  614. data/ext/v8/upstream/2.3.3/src/contexts.h +0 -342
  615. data/ext/v8/upstream/2.3.3/src/conversions.cc +0 -1119
  616. data/ext/v8/upstream/2.3.3/src/conversions.h +0 -123
  617. data/ext/v8/upstream/2.3.3/src/counters.h +0 -239
  618. data/ext/v8/upstream/2.3.3/src/cpu-profiler-inl.h +0 -100
  619. data/ext/v8/upstream/2.3.3/src/cpu-profiler.cc +0 -538
  620. data/ext/v8/upstream/2.3.3/src/cpu-profiler.h +0 -285
  621. data/ext/v8/upstream/2.3.3/src/d8-debug.cc +0 -356
  622. data/ext/v8/upstream/2.3.3/src/d8-debug.h +0 -155
  623. data/ext/v8/upstream/2.3.3/src/d8.cc +0 -783
  624. data/ext/v8/upstream/2.3.3/src/d8.h +0 -227
  625. data/ext/v8/upstream/2.3.3/src/d8.js +0 -1683
  626. data/ext/v8/upstream/2.3.3/src/data-flow.cc +0 -758
  627. data/ext/v8/upstream/2.3.3/src/data-flow.h +0 -278
  628. data/ext/v8/upstream/2.3.3/src/date.js +0 -1059
  629. data/ext/v8/upstream/2.3.3/src/dateparser-inl.h +0 -123
  630. data/ext/v8/upstream/2.3.3/src/dateparser.h +0 -244
  631. data/ext/v8/upstream/2.3.3/src/debug-agent.cc +0 -427
  632. data/ext/v8/upstream/2.3.3/src/debug-agent.h +0 -129
  633. data/ext/v8/upstream/2.3.3/src/debug-debugger.js +0 -2227
  634. data/ext/v8/upstream/2.3.3/src/debug.cc +0 -3005
  635. data/ext/v8/upstream/2.3.3/src/debug.h +0 -993
  636. data/ext/v8/upstream/2.3.3/src/disassembler.cc +0 -312
  637. data/ext/v8/upstream/2.3.3/src/double.h +0 -169
  638. data/ext/v8/upstream/2.3.3/src/dtoa-config.c +0 -92
  639. data/ext/v8/upstream/2.3.3/src/dtoa.cc +0 -77
  640. data/ext/v8/upstream/2.3.3/src/dtoa.h +0 -81
  641. data/ext/v8/upstream/2.3.3/src/execution.cc +0 -809
  642. data/ext/v8/upstream/2.3.3/src/execution.h +0 -336
  643. data/ext/v8/upstream/2.3.3/src/factory.cc +0 -1003
  644. data/ext/v8/upstream/2.3.3/src/factory.h +0 -410
  645. data/ext/v8/upstream/2.3.3/src/fast-codegen.cc +0 -746
  646. data/ext/v8/upstream/2.3.3/src/fast-codegen.h +0 -161
  647. data/ext/v8/upstream/2.3.3/src/fast-dtoa.cc +0 -505
  648. data/ext/v8/upstream/2.3.3/src/fast-dtoa.h +0 -58
  649. data/ext/v8/upstream/2.3.3/src/flag-definitions.h +0 -455
  650. data/ext/v8/upstream/2.3.3/src/flags.cc +0 -551
  651. data/ext/v8/upstream/2.3.3/src/flags.h +0 -81
  652. data/ext/v8/upstream/2.3.3/src/flow-graph.cc +0 -763
  653. data/ext/v8/upstream/2.3.3/src/flow-graph.h +0 -180
  654. data/ext/v8/upstream/2.3.3/src/frame-element.h +0 -273
  655. data/ext/v8/upstream/2.3.3/src/frames-inl.h +0 -217
  656. data/ext/v8/upstream/2.3.3/src/frames.cc +0 -826
  657. data/ext/v8/upstream/2.3.3/src/frames.h +0 -682
  658. data/ext/v8/upstream/2.3.3/src/full-codegen.cc +0 -1443
  659. data/ext/v8/upstream/2.3.3/src/full-codegen.h +0 -548
  660. data/ext/v8/upstream/2.3.3/src/func-name-inferrer.cc +0 -76
  661. data/ext/v8/upstream/2.3.3/src/func-name-inferrer.h +0 -135
  662. data/ext/v8/upstream/2.3.3/src/global-handles.cc +0 -520
  663. data/ext/v8/upstream/2.3.3/src/global-handles.h +0 -180
  664. data/ext/v8/upstream/2.3.3/src/globals.h +0 -669
  665. data/ext/v8/upstream/2.3.3/src/handles-inl.h +0 -76
  666. data/ext/v8/upstream/2.3.3/src/handles.cc +0 -825
  667. data/ext/v8/upstream/2.3.3/src/handles.h +0 -393
  668. data/ext/v8/upstream/2.3.3/src/hashmap.cc +0 -226
  669. data/ext/v8/upstream/2.3.3/src/hashmap.h +0 -120
  670. data/ext/v8/upstream/2.3.3/src/heap-inl.h +0 -493
  671. data/ext/v8/upstream/2.3.3/src/heap-profiler.cc +0 -779
  672. data/ext/v8/upstream/2.3.3/src/heap-profiler.h +0 -323
  673. data/ext/v8/upstream/2.3.3/src/heap.cc +0 -4994
  674. data/ext/v8/upstream/2.3.3/src/heap.h +0 -1984
  675. data/ext/v8/upstream/2.3.3/src/ia32/assembler-ia32-inl.h +0 -360
  676. data/ext/v8/upstream/2.3.3/src/ia32/assembler-ia32.cc +0 -2600
  677. data/ext/v8/upstream/2.3.3/src/ia32/assembler-ia32.h +0 -969
  678. data/ext/v8/upstream/2.3.3/src/ia32/builtins-ia32.cc +0 -1261
  679. data/ext/v8/upstream/2.3.3/src/ia32/codegen-ia32.cc +0 -13968
  680. data/ext/v8/upstream/2.3.3/src/ia32/codegen-ia32.h +0 -1097
  681. data/ext/v8/upstream/2.3.3/src/ia32/cpu-ia32.cc +0 -83
  682. data/ext/v8/upstream/2.3.3/src/ia32/debug-ia32.cc +0 -309
  683. data/ext/v8/upstream/2.3.3/src/ia32/disasm-ia32.cc +0 -1471
  684. data/ext/v8/upstream/2.3.3/src/ia32/fast-codegen-ia32.cc +0 -954
  685. data/ext/v8/upstream/2.3.3/src/ia32/fast-codegen-ia32.h +0 -155
  686. data/ext/v8/upstream/2.3.3/src/ia32/frames-ia32.cc +0 -115
  687. data/ext/v8/upstream/2.3.3/src/ia32/frames-ia32.h +0 -135
  688. data/ext/v8/upstream/2.3.3/src/ia32/full-codegen-ia32.cc +0 -3281
  689. data/ext/v8/upstream/2.3.3/src/ia32/ic-ia32.cc +0 -1966
  690. data/ext/v8/upstream/2.3.3/src/ia32/macro-assembler-ia32.cc +0 -1610
  691. data/ext/v8/upstream/2.3.3/src/ia32/macro-assembler-ia32.h +0 -610
  692. data/ext/v8/upstream/2.3.3/src/ia32/regexp-macro-assembler-ia32.cc +0 -1247
  693. data/ext/v8/upstream/2.3.3/src/ia32/regexp-macro-assembler-ia32.h +0 -214
  694. data/ext/v8/upstream/2.3.3/src/ia32/simulator-ia32.h +0 -62
  695. data/ext/v8/upstream/2.3.3/src/ia32/stub-cache-ia32.cc +0 -2750
  696. data/ext/v8/upstream/2.3.3/src/ia32/virtual-frame-ia32.cc +0 -1334
  697. data/ext/v8/upstream/2.3.3/src/ia32/virtual-frame-ia32.h +0 -627
  698. data/ext/v8/upstream/2.3.3/src/ic-inl.h +0 -120
  699. data/ext/v8/upstream/2.3.3/src/ic.cc +0 -1827
  700. data/ext/v8/upstream/2.3.3/src/ic.h +0 -515
  701. data/ext/v8/upstream/2.3.3/src/interpreter-irregexp.cc +0 -646
  702. data/ext/v8/upstream/2.3.3/src/json.js +0 -268
  703. data/ext/v8/upstream/2.3.3/src/jsregexp.cc +0 -5283
  704. data/ext/v8/upstream/2.3.3/src/jsregexp.h +0 -1463
  705. data/ext/v8/upstream/2.3.3/src/jump-target-heavy.cc +0 -429
  706. data/ext/v8/upstream/2.3.3/src/jump-target-heavy.h +0 -244
  707. data/ext/v8/upstream/2.3.3/src/jump-target-light.cc +0 -110
  708. data/ext/v8/upstream/2.3.3/src/jump-target-light.h +0 -192
  709. data/ext/v8/upstream/2.3.3/src/list-inl.h +0 -166
  710. data/ext/v8/upstream/2.3.3/src/list.h +0 -159
  711. data/ext/v8/upstream/2.3.3/src/liveedit-debugger.js +0 -1057
  712. data/ext/v8/upstream/2.3.3/src/liveedit.cc +0 -1480
  713. data/ext/v8/upstream/2.3.3/src/liveedit.h +0 -170
  714. data/ext/v8/upstream/2.3.3/src/log-utils.cc +0 -497
  715. data/ext/v8/upstream/2.3.3/src/log-utils.h +0 -289
  716. data/ext/v8/upstream/2.3.3/src/log.cc +0 -1561
  717. data/ext/v8/upstream/2.3.3/src/log.h +0 -384
  718. data/ext/v8/upstream/2.3.3/src/macro-assembler.h +0 -86
  719. data/ext/v8/upstream/2.3.3/src/macros.py +0 -177
  720. data/ext/v8/upstream/2.3.3/src/mark-compact.cc +0 -2330
  721. data/ext/v8/upstream/2.3.3/src/mark-compact.h +0 -451
  722. data/ext/v8/upstream/2.3.3/src/math.js +0 -264
  723. data/ext/v8/upstream/2.3.3/src/memory.h +0 -74
  724. data/ext/v8/upstream/2.3.3/src/messages.cc +0 -183
  725. data/ext/v8/upstream/2.3.3/src/messages.h +0 -113
  726. data/ext/v8/upstream/2.3.3/src/messages.js +0 -982
  727. data/ext/v8/upstream/2.3.3/src/mips/assembler-mips.h +0 -668
  728. data/ext/v8/upstream/2.3.3/src/mips/builtins-mips.cc +0 -205
  729. data/ext/v8/upstream/2.3.3/src/mips/codegen-mips.h +0 -434
  730. data/ext/v8/upstream/2.3.3/src/mips/debug-mips.cc +0 -131
  731. data/ext/v8/upstream/2.3.3/src/mips/frames-mips.cc +0 -102
  732. data/ext/v8/upstream/2.3.3/src/mips/ic-mips.cc +0 -220
  733. data/ext/v8/upstream/2.3.3/src/mips/simulator-mips.cc +0 -1651
  734. data/ext/v8/upstream/2.3.3/src/mips/simulator-mips.h +0 -311
  735. data/ext/v8/upstream/2.3.3/src/mips/stub-cache-mips.cc +0 -403
  736. data/ext/v8/upstream/2.3.3/src/mirror-debugger.js +0 -2380
  737. data/ext/v8/upstream/2.3.3/src/objects-debug.cc +0 -1366
  738. data/ext/v8/upstream/2.3.3/src/objects-inl.h +0 -3333
  739. data/ext/v8/upstream/2.3.3/src/objects.cc +0 -8820
  740. data/ext/v8/upstream/2.3.3/src/objects.h +0 -5373
  741. data/ext/v8/upstream/2.3.3/src/oprofile-agent.cc +0 -108
  742. data/ext/v8/upstream/2.3.3/src/oprofile-agent.h +0 -77
  743. data/ext/v8/upstream/2.3.3/src/parser.cc +0 -5207
  744. data/ext/v8/upstream/2.3.3/src/parser.h +0 -197
  745. data/ext/v8/upstream/2.3.3/src/platform-freebsd.cc +0 -667
  746. data/ext/v8/upstream/2.3.3/src/platform-linux.cc +0 -862
  747. data/ext/v8/upstream/2.3.3/src/platform-macos.cc +0 -665
  748. data/ext/v8/upstream/2.3.3/src/platform-nullos.cc +0 -454
  749. data/ext/v8/upstream/2.3.3/src/platform-openbsd.cc +0 -622
  750. data/ext/v8/upstream/2.3.3/src/platform-posix.cc +0 -362
  751. data/ext/v8/upstream/2.3.3/src/platform-solaris.cc +0 -653
  752. data/ext/v8/upstream/2.3.3/src/platform-win32.cc +0 -1911
  753. data/ext/v8/upstream/2.3.3/src/platform.h +0 -577
  754. data/ext/v8/upstream/2.3.3/src/powers-ten.h +0 -2461
  755. data/ext/v8/upstream/2.3.3/src/prettyprinter.cc +0 -1531
  756. data/ext/v8/upstream/2.3.3/src/prettyprinter.h +0 -221
  757. data/ext/v8/upstream/2.3.3/src/profile-generator-inl.h +0 -148
  758. data/ext/v8/upstream/2.3.3/src/profile-generator.cc +0 -1830
  759. data/ext/v8/upstream/2.3.3/src/profile-generator.h +0 -853
  760. data/ext/v8/upstream/2.3.3/src/property.cc +0 -96
  761. data/ext/v8/upstream/2.3.3/src/property.h +0 -315
  762. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-irregexp.cc +0 -464
  763. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-irregexp.h +0 -141
  764. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-tracer.cc +0 -356
  765. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-tracer.h +0 -103
  766. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler.cc +0 -261
  767. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler.h +0 -228
  768. data/ext/v8/upstream/2.3.3/src/regexp.js +0 -549
  769. data/ext/v8/upstream/2.3.3/src/rewriter.cc +0 -1038
  770. data/ext/v8/upstream/2.3.3/src/rewriter.h +0 -54
  771. data/ext/v8/upstream/2.3.3/src/runtime.cc +0 -10599
  772. data/ext/v8/upstream/2.3.3/src/runtime.h +0 -459
  773. data/ext/v8/upstream/2.3.3/src/runtime.js +0 -629
  774. data/ext/v8/upstream/2.3.3/src/scanner.cc +0 -1346
  775. data/ext/v8/upstream/2.3.3/src/scanner.h +0 -503
  776. data/ext/v8/upstream/2.3.3/src/scopeinfo.cc +0 -637
  777. data/ext/v8/upstream/2.3.3/src/scopeinfo.h +0 -233
  778. data/ext/v8/upstream/2.3.3/src/scopes.cc +0 -962
  779. data/ext/v8/upstream/2.3.3/src/scopes.h +0 -400
  780. data/ext/v8/upstream/2.3.3/src/serialize.cc +0 -1461
  781. data/ext/v8/upstream/2.3.3/src/serialize.h +0 -581
  782. data/ext/v8/upstream/2.3.3/src/spaces-inl.h +0 -483
  783. data/ext/v8/upstream/2.3.3/src/spaces.cc +0 -2901
  784. data/ext/v8/upstream/2.3.3/src/spaces.h +0 -2197
  785. data/ext/v8/upstream/2.3.3/src/string-stream.cc +0 -584
  786. data/ext/v8/upstream/2.3.3/src/string-stream.h +0 -189
  787. data/ext/v8/upstream/2.3.3/src/string.js +0 -1006
  788. data/ext/v8/upstream/2.3.3/src/stub-cache.cc +0 -1379
  789. data/ext/v8/upstream/2.3.3/src/stub-cache.h +0 -756
  790. data/ext/v8/upstream/2.3.3/src/third_party/dtoa/COPYING +0 -15
  791. data/ext/v8/upstream/2.3.3/src/third_party/dtoa/dtoa.c +0 -3334
  792. data/ext/v8/upstream/2.3.3/src/token.cc +0 -56
  793. data/ext/v8/upstream/2.3.3/src/token.h +0 -270
  794. data/ext/v8/upstream/2.3.3/src/top.cc +0 -1067
  795. data/ext/v8/upstream/2.3.3/src/top.h +0 -463
  796. data/ext/v8/upstream/2.3.3/src/type-info.cc +0 -53
  797. data/ext/v8/upstream/2.3.3/src/type-info.h +0 -244
  798. data/ext/v8/upstream/2.3.3/src/unicode-inl.h +0 -238
  799. data/ext/v8/upstream/2.3.3/src/unicode.cc +0 -749
  800. data/ext/v8/upstream/2.3.3/src/unicode.h +0 -279
  801. data/ext/v8/upstream/2.3.3/src/uri.js +0 -415
  802. data/ext/v8/upstream/2.3.3/src/utils.cc +0 -285
  803. data/ext/v8/upstream/2.3.3/src/utils.h +0 -745
  804. data/ext/v8/upstream/2.3.3/src/v8-counters.h +0 -250
  805. data/ext/v8/upstream/2.3.3/src/v8.cc +0 -228
  806. data/ext/v8/upstream/2.3.3/src/v8.h +0 -121
  807. data/ext/v8/upstream/2.3.3/src/v8natives.js +0 -1188
  808. data/ext/v8/upstream/2.3.3/src/v8threads.cc +0 -461
  809. data/ext/v8/upstream/2.3.3/src/v8threads.h +0 -159
  810. data/ext/v8/upstream/2.3.3/src/variables.cc +0 -119
  811. data/ext/v8/upstream/2.3.3/src/variables.h +0 -205
  812. data/ext/v8/upstream/2.3.3/src/version.cc +0 -88
  813. data/ext/v8/upstream/2.3.3/src/virtual-frame-heavy-inl.h +0 -192
  814. data/ext/v8/upstream/2.3.3/src/virtual-frame.h +0 -46
  815. data/ext/v8/upstream/2.3.3/src/vm-state-inl.h +0 -137
  816. data/ext/v8/upstream/2.3.3/src/vm-state.cc +0 -39
  817. data/ext/v8/upstream/2.3.3/src/vm-state.h +0 -77
  818. data/ext/v8/upstream/2.3.3/src/x64/assembler-x64-inl.h +0 -400
  819. data/ext/v8/upstream/2.3.3/src/x64/assembler-x64.cc +0 -2963
  820. data/ext/v8/upstream/2.3.3/src/x64/assembler-x64.h +0 -1438
  821. data/ext/v8/upstream/2.3.3/src/x64/builtins-x64.cc +0 -1296
  822. data/ext/v8/upstream/2.3.3/src/x64/codegen-x64-inl.h +0 -46
  823. data/ext/v8/upstream/2.3.3/src/x64/codegen-x64.cc +0 -12491
  824. data/ext/v8/upstream/2.3.3/src/x64/codegen-x64.h +0 -1090
  825. data/ext/v8/upstream/2.3.3/src/x64/cpu-x64.cc +0 -83
  826. data/ext/v8/upstream/2.3.3/src/x64/debug-x64.cc +0 -267
  827. data/ext/v8/upstream/2.3.3/src/x64/disasm-x64.cc +0 -1696
  828. data/ext/v8/upstream/2.3.3/src/x64/fast-codegen-x64.cc +0 -250
  829. data/ext/v8/upstream/2.3.3/src/x64/frames-x64.cc +0 -113
  830. data/ext/v8/upstream/2.3.3/src/x64/frames-x64.h +0 -125
  831. data/ext/v8/upstream/2.3.3/src/x64/full-codegen-x64.cc +0 -3270
  832. data/ext/v8/upstream/2.3.3/src/x64/ic-x64.cc +0 -1907
  833. data/ext/v8/upstream/2.3.3/src/x64/jump-target-x64.cc +0 -437
  834. data/ext/v8/upstream/2.3.3/src/x64/macro-assembler-x64.cc +0 -2793
  835. data/ext/v8/upstream/2.3.3/src/x64/macro-assembler-x64.h +0 -916
  836. data/ext/v8/upstream/2.3.3/src/x64/regexp-macro-assembler-x64.cc +0 -1374
  837. data/ext/v8/upstream/2.3.3/src/x64/regexp-macro-assembler-x64.h +0 -277
  838. data/ext/v8/upstream/2.3.3/src/x64/simulator-x64.h +0 -63
  839. data/ext/v8/upstream/2.3.3/src/x64/stub-cache-x64.cc +0 -2560
  840. data/ext/v8/upstream/2.3.3/src/x64/virtual-frame-x64.cc +0 -1264
  841. data/ext/v8/upstream/2.3.3/src/x64/virtual-frame-x64.h +0 -590
  842. data/ext/v8/upstream/2.3.3/src/zone-inl.h +0 -82
  843. data/ext/v8/upstream/2.3.3/src/zone.cc +0 -194
  844. data/ext/v8/upstream/2.3.3/src/zone.h +0 -221
  845. data/ext/v8/upstream/2.3.3/tools/codemap.js +0 -270
  846. data/ext/v8/upstream/2.3.3/tools/csvparser.js +0 -83
  847. data/ext/v8/upstream/2.3.3/tools/gc-nvp-trace-processor.py +0 -317
  848. data/ext/v8/upstream/2.3.3/tools/gyp/v8.gyp +0 -749
  849. data/ext/v8/upstream/2.3.3/tools/linux-tick-processor.py +0 -78
  850. data/ext/v8/upstream/2.3.3/tools/logreader.js +0 -338
  851. data/ext/v8/upstream/2.3.3/tools/oprofile/annotate +0 -7
  852. data/ext/v8/upstream/2.3.3/tools/oprofile/common +0 -19
  853. data/ext/v8/upstream/2.3.3/tools/oprofile/dump +0 -7
  854. data/ext/v8/upstream/2.3.3/tools/oprofile/report +0 -7
  855. data/ext/v8/upstream/2.3.3/tools/oprofile/reset +0 -7
  856. data/ext/v8/upstream/2.3.3/tools/oprofile/run +0 -14
  857. data/ext/v8/upstream/2.3.3/tools/oprofile/shutdown +0 -7
  858. data/ext/v8/upstream/2.3.3/tools/oprofile/start +0 -7
  859. data/ext/v8/upstream/2.3.3/tools/presubmit.py +0 -299
  860. data/ext/v8/upstream/2.3.3/tools/profile.js +0 -691
  861. data/ext/v8/upstream/2.3.3/tools/profile_view.js +0 -224
  862. data/ext/v8/upstream/2.3.3/tools/splaytree.js +0 -322
  863. data/ext/v8/upstream/2.3.3/tools/splaytree.py +0 -226
  864. data/ext/v8/upstream/2.3.3/tools/tickprocessor.js +0 -862
  865. data/ext/v8/upstream/2.3.3/tools/tickprocessor.py +0 -571
  866. data/ext/v8/upstream/2.3.3/tools/utils.py +0 -88
  867. data/ext/v8/upstream/2.3.3/tools/visual_studio/README.txt +0 -71
  868. data/ext/v8/upstream/2.3.3/tools/visual_studio/common.vsprops +0 -34
  869. data/ext/v8/upstream/2.3.3/tools/visual_studio/debug.vsprops +0 -17
  870. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_base.vcproj +0 -1143
  871. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_base_arm.vcproj +0 -1115
  872. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_base_x64.vcproj +0 -1096
  873. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_shell_sample.vcproj +0 -145
  874. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_shell_sample_arm.vcproj +0 -145
  875. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_shell_sample_x64.vcproj +0 -161
  876. data/ext/v8/upstream/2.3.3/tools/windows-tick-processor.bat +0 -29
  877. data/ext/v8/upstream/2.3.3/tools/windows-tick-processor.py +0 -137
@@ -0,0 +1,278 @@
1
+ // Copyright 2010 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #ifndef V8_X64_REGEXP_MACRO_ASSEMBLER_X64_H_
29
+ #define V8_X64_REGEXP_MACRO_ASSEMBLER_X64_H_
30
+
31
+ namespace v8 {
32
+ namespace internal {
33
+
34
+ #ifndef V8_INTERPRETED_REGEXP
35
+
36
+ class RegExpMacroAssemblerX64: public NativeRegExpMacroAssembler {
37
+ public:
38
+ RegExpMacroAssemblerX64(Mode mode, int registers_to_save);
39
+ virtual ~RegExpMacroAssemblerX64();
40
+ virtual int stack_limit_slack();
41
+ virtual void AdvanceCurrentPosition(int by);
42
+ virtual void AdvanceRegister(int reg, int by);
43
+ virtual void Backtrack();
44
+ virtual void Bind(Label* label);
45
+ virtual void CheckAtStart(Label* on_at_start);
46
+ virtual void CheckCharacter(uint32_t c, Label* on_equal);
47
+ virtual void CheckCharacterAfterAnd(uint32_t c,
48
+ uint32_t mask,
49
+ Label* on_equal);
50
+ virtual void CheckCharacterGT(uc16 limit, Label* on_greater);
51
+ virtual void CheckCharacterLT(uc16 limit, Label* on_less);
52
+ virtual void CheckCharacters(Vector<const uc16> str,
53
+ int cp_offset,
54
+ Label* on_failure,
55
+ bool check_end_of_string);
56
+ // A "greedy loop" is a loop that is both greedy and with a simple
57
+ // body. It has a particularly simple implementation.
58
+ virtual void CheckGreedyLoop(Label* on_tos_equals_current_position);
59
+ virtual void CheckNotAtStart(Label* on_not_at_start);
60
+ virtual void CheckNotBackReference(int start_reg, Label* on_no_match);
61
+ virtual void CheckNotBackReferenceIgnoreCase(int start_reg,
62
+ Label* on_no_match);
63
+ virtual void CheckNotRegistersEqual(int reg1, int reg2, Label* on_not_equal);
64
+ virtual void CheckNotCharacter(uint32_t c, Label* on_not_equal);
65
+ virtual void CheckNotCharacterAfterAnd(uint32_t c,
66
+ uint32_t mask,
67
+ Label* on_not_equal);
68
+ virtual void CheckNotCharacterAfterMinusAnd(uc16 c,
69
+ uc16 minus,
70
+ uc16 mask,
71
+ Label* on_not_equal);
72
+ // Checks whether the given offset from the current position is before
73
+ // the end of the string.
74
+ virtual void CheckPosition(int cp_offset, Label* on_outside_input);
75
+ virtual bool CheckSpecialCharacterClass(uc16 type,
76
+ Label* on_no_match);
77
+ virtual void Fail();
78
+ virtual Handle<Object> GetCode(Handle<String> source);
79
+ virtual void GoTo(Label* label);
80
+ virtual void IfRegisterGE(int reg, int comparand, Label* if_ge);
81
+ virtual void IfRegisterLT(int reg, int comparand, Label* if_lt);
82
+ virtual void IfRegisterEqPos(int reg, Label* if_eq);
83
+ virtual IrregexpImplementation Implementation();
84
+ virtual void LoadCurrentCharacter(int cp_offset,
85
+ Label* on_end_of_input,
86
+ bool check_bounds = true,
87
+ int characters = 1);
88
+ virtual void PopCurrentPosition();
89
+ virtual void PopRegister(int register_index);
90
+ virtual void PushBacktrack(Label* label);
91
+ virtual void PushCurrentPosition();
92
+ virtual void PushRegister(int register_index,
93
+ StackCheckFlag check_stack_limit);
94
+ virtual void ReadCurrentPositionFromRegister(int reg);
95
+ virtual void ReadStackPointerFromRegister(int reg);
96
+ virtual void SetCurrentPositionFromEnd(int by);
97
+ virtual void SetRegister(int register_index, int to);
98
+ virtual void Succeed();
99
+ virtual void WriteCurrentPositionToRegister(int reg, int cp_offset);
100
+ virtual void ClearRegisters(int reg_from, int reg_to);
101
+ virtual void WriteStackPointerToRegister(int reg);
102
+
103
+ static Result Match(Handle<Code> regexp,
104
+ Handle<String> subject,
105
+ int* offsets_vector,
106
+ int offsets_vector_length,
107
+ int previous_index);
108
+
109
+ static Result Execute(Code* code,
110
+ String* input,
111
+ int start_offset,
112
+ const byte* input_start,
113
+ const byte* input_end,
114
+ int* output,
115
+ bool at_start);
116
+
117
+ // Called from RegExp if the stack-guard is triggered.
118
+ // If the code object is relocated, the return address is fixed before
119
+ // returning.
120
+ static int CheckStackGuardState(Address* return_address,
121
+ Code* re_code,
122
+ Address re_frame);
123
+
124
+ private:
125
+ // Offsets from rbp of function parameters and stored registers.
126
+ static const int kFramePointer = 0;
127
+ // Above the frame pointer - function parameters and return address.
128
+ static const int kReturn_eip = kFramePointer + kPointerSize;
129
+ static const int kFrameAlign = kReturn_eip + kPointerSize;
130
+
131
+ #ifdef _WIN64
132
+ // Parameters (first four passed as registers, but with room on stack).
133
+ // In Microsoft 64-bit Calling Convention, there is room on the callers
134
+ // stack (before the return address) to spill parameter registers. We
135
+ // use this space to store the register passed parameters.
136
+ static const int kInputString = kFrameAlign;
137
+ // StartIndex is passed as 32 bit int.
138
+ static const int kStartIndex = kInputString + kPointerSize;
139
+ static const int kInputStart = kStartIndex + kPointerSize;
140
+ static const int kInputEnd = kInputStart + kPointerSize;
141
+ static const int kRegisterOutput = kInputEnd + kPointerSize;
142
+ static const int kStackHighEnd = kRegisterOutput + kPointerSize;
143
+ // DirectCall is passed as 32 bit int (values 0 or 1).
144
+ static const int kDirectCall = kStackHighEnd + kPointerSize;
145
+ #else
146
+ // In AMD64 ABI Calling Convention, the first six integer parameters
147
+ // are passed as registers, and caller must allocate space on the stack
148
+ // if it wants them stored. We push the parameters after the frame pointer.
149
+ static const int kInputString = kFramePointer - kPointerSize;
150
+ static const int kStartIndex = kInputString - kPointerSize;
151
+ static const int kInputStart = kStartIndex - kPointerSize;
152
+ static const int kInputEnd = kInputStart - kPointerSize;
153
+ static const int kRegisterOutput = kInputEnd - kPointerSize;
154
+ static const int kStackHighEnd = kRegisterOutput - kPointerSize;
155
+ static const int kDirectCall = kFrameAlign;
156
+ #endif
157
+
158
+ #ifdef _WIN64
159
+ // Microsoft calling convention has three callee-saved registers
160
+ // (that we are using). We push these after the frame pointer.
161
+ static const int kBackup_rsi = kFramePointer - kPointerSize;
162
+ static const int kBackup_rdi = kBackup_rsi - kPointerSize;
163
+ static const int kBackup_rbx = kBackup_rdi - kPointerSize;
164
+ static const int kLastCalleeSaveRegister = kBackup_rbx;
165
+ #else
166
+ // AMD64 Calling Convention has only one callee-save register that
167
+ // we use. We push this after the frame pointer (and after the
168
+ // parameters).
169
+ static const int kBackup_rbx = kStackHighEnd - kPointerSize;
170
+ static const int kLastCalleeSaveRegister = kBackup_rbx;
171
+ #endif
172
+
173
+ // When adding local variables remember to push space for them in
174
+ // the frame in GetCode.
175
+ static const int kInputStartMinusOne =
176
+ kLastCalleeSaveRegister - kPointerSize;
177
+
178
+ // First register address. Following registers are below it on the stack.
179
+ static const int kRegisterZero = kInputStartMinusOne - kPointerSize;
180
+
181
+ // Initial size of code buffer.
182
+ static const size_t kRegExpCodeSize = 1024;
183
+
184
+ // Load a number of characters at the given offset from the
185
+ // current position, into the current-character register.
186
+ void LoadCurrentCharacterUnchecked(int cp_offset, int character_count);
187
+
188
+ // Check whether preemption has been requested.
189
+ void CheckPreemption();
190
+
191
+ // Check whether we are exceeding the stack limit on the backtrack stack.
192
+ void CheckStackLimit();
193
+
194
+ // Generate a call to CheckStackGuardState.
195
+ void CallCheckStackGuardState();
196
+
197
+ // The rbp-relative location of a regexp register.
198
+ Operand register_location(int register_index);
199
+
200
+ // The register containing the current character after LoadCurrentCharacter.
201
+ inline Register current_character() { return rdx; }
202
+
203
+ // The register containing the backtrack stack top. Provides a meaningful
204
+ // name to the register.
205
+ inline Register backtrack_stackpointer() { return rcx; }
206
+
207
+ // The registers containing a self pointer to this code's Code object.
208
+ inline Register code_object_pointer() { return r8; }
209
+
210
+ // Byte size of chars in the string to match (decided by the Mode argument)
211
+ inline int char_size() { return static_cast<int>(mode_); }
212
+
213
+ // Equivalent to a conditional branch to the label, unless the label
214
+ // is NULL, in which case it is a conditional Backtrack.
215
+ void BranchOrBacktrack(Condition condition, Label* to);
216
+
217
+ void MarkPositionForCodeRelativeFixup() {
218
+ code_relative_fixup_positions_.Add(masm_->pc_offset());
219
+ }
220
+
221
+ void FixupCodeRelativePositions();
222
+
223
+ // Call and return internally in the generated code in a way that
224
+ // is GC-safe (i.e., doesn't leave absolute code addresses on the stack)
225
+ inline void SafeCall(Label* to);
226
+ inline void SafeCallTarget(Label* label);
227
+ inline void SafeReturn();
228
+
229
+ // Pushes the value of a register on the backtrack stack. Decrements the
230
+ // stack pointer (rcx) by a word size and stores the register's value there.
231
+ inline void Push(Register source);
232
+
233
+ // Pushes a value on the backtrack stack. Decrements the stack pointer (rcx)
234
+ // by a word size and stores the value there.
235
+ inline void Push(Immediate value);
236
+
237
+ // Pushes the Code object relative offset of a label on the backtrack stack
238
+ // (i.e., a backtrack target). Decrements the stack pointer (rcx)
239
+ // by a word size and stores the value there.
240
+ inline void Push(Label* label);
241
+
242
+ // Pops a value from the backtrack stack. Reads the word at the stack pointer
243
+ // (rcx) and increments it by a word size.
244
+ inline void Pop(Register target);
245
+
246
+ // Drops the top value from the backtrack stack without reading it.
247
+ // Increments the stack pointer (rcx) by a word size.
248
+ inline void Drop();
249
+
250
+ MacroAssembler* masm_;
251
+
252
+ ZoneList<int> code_relative_fixup_positions_;
253
+
254
+ // Which mode to generate code for (ASCII or UC16).
255
+ Mode mode_;
256
+
257
+ // One greater than maximal register index actually used.
258
+ int num_registers_;
259
+
260
+ // Number of registers to output at the end (the saved registers
261
+ // are always 0..num_saved_registers_-1)
262
+ int num_saved_registers_;
263
+
264
+ // Labels used internally.
265
+ Label entry_label_;
266
+ Label start_label_;
267
+ Label success_label_;
268
+ Label backtrack_label_;
269
+ Label exit_label_;
270
+ Label check_preempt_label_;
271
+ Label stack_overflow_label_;
272
+ };
273
+
274
+ #endif // V8_INTERPRETED_REGEXP
275
+
276
+ }} // namespace v8::internal
277
+
278
+ #endif // V8_X64_REGEXP_MACRO_ASSEMBLER_X64_H_
@@ -0,0 +1,71 @@
1
+ // Copyright 2011 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #ifndef V8_X64_SIMULATOR_X64_H_
29
+ #define V8_X64_SIMULATOR_X64_H_
30
+
31
+ #include "allocation.h"
32
+
33
+ namespace v8 {
34
+ namespace internal {
35
+
36
+ // Since there is no simulator for the x64 architecture the only thing we can
37
+ // do is to call the entry directly.
38
+ // TODO(X64): Don't pass p0, since it isn't used?
39
+ #define CALL_GENERATED_CODE(entry, p0, p1, p2, p3, p4) \
40
+ (entry(p0, p1, p2, p3, p4))
41
+
42
+ typedef int (*regexp_matcher)(String*, int, const byte*,
43
+ const byte*, int*, Address, int);
44
+
45
+ // Call the generated regexp code directly. The code at the entry address should
46
+ // expect seven int/pointer sized arguments and return an int.
47
+ #define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6) \
48
+ (FUNCTION_CAST<regexp_matcher>(entry)(p0, p1, p2, p3, p4, p5, p6))
49
+
50
+ #define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
51
+ (reinterpret_cast<TryCatch*>(try_catch_address))
52
+
53
+ // The stack limit beyond which we will throw stack overflow errors in
54
+ // generated code. Because generated code on x64 uses the C stack, we
55
+ // just use the C stack limit.
56
+ class SimulatorStack : public v8::internal::AllStatic {
57
+ public:
58
+ static inline uintptr_t JsLimitFromCLimit(uintptr_t c_limit) {
59
+ return c_limit;
60
+ }
61
+
62
+ static inline uintptr_t RegisterCTryCatch(uintptr_t try_catch_address) {
63
+ return try_catch_address;
64
+ }
65
+
66
+ static inline void UnregisterCTryCatch() { }
67
+ };
68
+
69
+ } } // namespace v8::internal
70
+
71
+ #endif // V8_X64_SIMULATOR_X64_H_
@@ -0,0 +1,3509 @@
1
+ // Copyright 2011 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #include "v8.h"
29
+
30
+ #if defined(V8_TARGET_ARCH_X64)
31
+
32
+ #include "ic-inl.h"
33
+ #include "codegen-inl.h"
34
+ #include "stub-cache.h"
35
+
36
+ namespace v8 {
37
+ namespace internal {
38
+
39
+ #define __ ACCESS_MASM(masm)
40
+
41
+
42
+ static void ProbeTable(MacroAssembler* masm,
43
+ Code::Flags flags,
44
+ StubCache::Table table,
45
+ Register name,
46
+ Register offset) {
47
+ ASSERT_EQ(8, kPointerSize);
48
+ ASSERT_EQ(16, sizeof(StubCache::Entry));
49
+ // The offset register holds the entry offset times four (due to masking
50
+ // and shifting optimizations).
51
+ ExternalReference key_offset(SCTableReference::keyReference(table));
52
+ Label miss;
53
+
54
+ __ movq(kScratchRegister, key_offset);
55
+ // Check that the key in the entry matches the name.
56
+ // Multiply entry offset by 16 to get the entry address. Since the
57
+ // offset register already holds the entry offset times four, multiply
58
+ // by a further four.
59
+ __ cmpl(name, Operand(kScratchRegister, offset, times_4, 0));
60
+ __ j(not_equal, &miss);
61
+ // Get the code entry from the cache.
62
+ // Use key_offset + kPointerSize, rather than loading value_offset.
63
+ __ movq(kScratchRegister,
64
+ Operand(kScratchRegister, offset, times_4, kPointerSize));
65
+ // Check that the flags match what we're looking for.
66
+ __ movl(offset, FieldOperand(kScratchRegister, Code::kFlagsOffset));
67
+ __ and_(offset, Immediate(~Code::kFlagsNotUsedInLookup));
68
+ __ cmpl(offset, Immediate(flags));
69
+ __ j(not_equal, &miss);
70
+
71
+ // Jump to the first instruction in the code stub.
72
+ __ addq(kScratchRegister, Immediate(Code::kHeaderSize - kHeapObjectTag));
73
+ __ jmp(kScratchRegister);
74
+
75
+ __ bind(&miss);
76
+ }
77
+
78
+
79
+ // Helper function used to check that the dictionary doesn't contain
80
+ // the property. This function may return false negatives, so miss_label
81
+ // must always call a backup property check that is complete.
82
+ // This function is safe to call if the receiver has fast properties.
83
+ // Name must be a symbol and receiver must be a heap object.
84
+ static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
85
+ Label* miss_label,
86
+ Register receiver,
87
+ String* name,
88
+ Register r0,
89
+ Register r1) {
90
+ ASSERT(name->IsSymbol());
91
+ __ IncrementCounter(&Counters::negative_lookups, 1);
92
+ __ IncrementCounter(&Counters::negative_lookups_miss, 1);
93
+
94
+ Label done;
95
+ __ movq(r0, FieldOperand(receiver, HeapObject::kMapOffset));
96
+
97
+ const int kInterceptorOrAccessCheckNeededMask =
98
+ (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
99
+
100
+ // Bail out if the receiver has a named interceptor or requires access checks.
101
+ __ testb(FieldOperand(r0, Map::kBitFieldOffset),
102
+ Immediate(kInterceptorOrAccessCheckNeededMask));
103
+ __ j(not_zero, miss_label);
104
+
105
+ // Check that receiver is a JSObject.
106
+ __ CmpInstanceType(r0, FIRST_JS_OBJECT_TYPE);
107
+ __ j(below, miss_label);
108
+
109
+ // Load properties array.
110
+ Register properties = r0;
111
+ __ movq(properties, FieldOperand(receiver, JSObject::kPropertiesOffset));
112
+
113
+ // Check that the properties array is a dictionary.
114
+ __ CompareRoot(FieldOperand(properties, HeapObject::kMapOffset),
115
+ Heap::kHashTableMapRootIndex);
116
+ __ j(not_equal, miss_label);
117
+
118
+ // Compute the capacity mask.
119
+ const int kCapacityOffset =
120
+ StringDictionary::kHeaderSize +
121
+ StringDictionary::kCapacityIndex * kPointerSize;
122
+
123
+ // Generate an unrolled loop that performs a few probes before
124
+ // giving up.
125
+ static const int kProbes = 4;
126
+ const int kElementsStartOffset =
127
+ StringDictionary::kHeaderSize +
128
+ StringDictionary::kElementsStartIndex * kPointerSize;
129
+
130
+ // If names of slots in range from 1 to kProbes - 1 for the hash value are
131
+ // not equal to the name and kProbes-th slot is not used (its name is the
132
+ // undefined value), it guarantees the hash table doesn't contain the
133
+ // property. It's true even if some slots represent deleted properties
134
+ // (their names are the null value).
135
+ for (int i = 0; i < kProbes; i++) {
136
+ // r0 points to properties hash.
137
+ // Compute the masked index: (hash + i + i * i) & mask.
138
+ Register index = r1;
139
+ // Capacity is smi 2^n.
140
+ __ SmiToInteger32(index, FieldOperand(properties, kCapacityOffset));
141
+ __ decl(index);
142
+ __ and_(index,
143
+ Immediate(name->Hash() + StringDictionary::GetProbeOffset(i)));
144
+
145
+ // Scale the index by multiplying by the entry size.
146
+ ASSERT(StringDictionary::kEntrySize == 3);
147
+ __ lea(index, Operand(index, index, times_2, 0)); // index *= 3.
148
+
149
+ Register entity_name = r1;
150
+ // Having undefined at this place means the name is not contained.
151
+ ASSERT_EQ(kSmiTagSize, 1);
152
+ __ movq(entity_name, Operand(properties, index, times_pointer_size,
153
+ kElementsStartOffset - kHeapObjectTag));
154
+ __ Cmp(entity_name, Factory::undefined_value());
155
+ // __ jmp(miss_label);
156
+ if (i != kProbes - 1) {
157
+ __ j(equal, &done);
158
+
159
+ // Stop if found the property.
160
+ __ Cmp(entity_name, Handle<String>(name));
161
+ __ j(equal, miss_label);
162
+
163
+ // Check if the entry name is not a symbol.
164
+ __ movq(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
165
+ __ testb(FieldOperand(entity_name, Map::kInstanceTypeOffset),
166
+ Immediate(kIsSymbolMask));
167
+ __ j(zero, miss_label);
168
+ } else {
169
+ // Give up probing if still not found the undefined value.
170
+ __ j(not_equal, miss_label);
171
+ }
172
+ }
173
+
174
+ __ bind(&done);
175
+ __ DecrementCounter(&Counters::negative_lookups_miss, 1);
176
+ }
177
+
178
+
179
+ void StubCache::GenerateProbe(MacroAssembler* masm,
180
+ Code::Flags flags,
181
+ Register receiver,
182
+ Register name,
183
+ Register scratch,
184
+ Register extra,
185
+ Register extra2) {
186
+ Label miss;
187
+ USE(extra); // The register extra is not used on the X64 platform.
188
+ USE(extra2); // The register extra2 is not used on the X64 platform.
189
+ // Make sure that code is valid. The shifting code relies on the
190
+ // entry size being 16.
191
+ ASSERT(sizeof(Entry) == 16);
192
+
193
+ // Make sure the flags do not name a specific type.
194
+ ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
195
+
196
+ // Make sure that there are no register conflicts.
197
+ ASSERT(!scratch.is(receiver));
198
+ ASSERT(!scratch.is(name));
199
+
200
+ // Check scratch register is valid, extra and extra2 are unused.
201
+ ASSERT(!scratch.is(no_reg));
202
+ ASSERT(extra2.is(no_reg));
203
+
204
+ // Check that the receiver isn't a smi.
205
+ __ JumpIfSmi(receiver, &miss);
206
+
207
+ // Get the map of the receiver and compute the hash.
208
+ __ movl(scratch, FieldOperand(name, String::kHashFieldOffset));
209
+ // Use only the low 32 bits of the map pointer.
210
+ __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
211
+ __ xor_(scratch, Immediate(flags));
212
+ __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
213
+
214
+ // Probe the primary table.
215
+ ProbeTable(masm, flags, kPrimary, name, scratch);
216
+
217
+ // Primary miss: Compute hash for secondary probe.
218
+ __ movl(scratch, FieldOperand(name, String::kHashFieldOffset));
219
+ __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
220
+ __ xor_(scratch, Immediate(flags));
221
+ __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
222
+ __ subl(scratch, name);
223
+ __ addl(scratch, Immediate(flags));
224
+ __ and_(scratch, Immediate((kSecondaryTableSize - 1) << kHeapObjectTagSize));
225
+
226
+ // Probe the secondary table.
227
+ ProbeTable(masm, flags, kSecondary, name, scratch);
228
+
229
+ // Cache miss: Fall-through and let caller handle the miss by
230
+ // entering the runtime system.
231
+ __ bind(&miss);
232
+ }
233
+
234
+
235
+ void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
236
+ int index,
237
+ Register prototype) {
238
+ // Load the global or builtins object from the current context.
239
+ __ movq(prototype,
240
+ Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
241
+ // Load the global context from the global or builtins object.
242
+ __ movq(prototype,
243
+ FieldOperand(prototype, GlobalObject::kGlobalContextOffset));
244
+ // Load the function from the global context.
245
+ __ movq(prototype, Operand(prototype, Context::SlotOffset(index)));
246
+ // Load the initial map. The global functions all have initial maps.
247
+ __ movq(prototype,
248
+ FieldOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
249
+ // Load the prototype from the initial map.
250
+ __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
251
+ }
252
+
253
+
254
+ void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
255
+ MacroAssembler* masm, int index, Register prototype, Label* miss) {
256
+ // Check we're still in the same context.
257
+ __ Move(prototype, Top::global());
258
+ __ cmpq(Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)),
259
+ prototype);
260
+ __ j(not_equal, miss);
261
+ // Get the global function with the given index.
262
+ JSFunction* function = JSFunction::cast(Top::global_context()->get(index));
263
+ // Load its initial map. The global functions all have initial maps.
264
+ __ Move(prototype, Handle<Map>(function->initial_map()));
265
+ // Load the prototype from the initial map.
266
+ __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
267
+ }
268
+
269
+
270
+ void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
271
+ Register receiver,
272
+ Register scratch,
273
+ Label* miss_label) {
274
+ // Check that the receiver isn't a smi.
275
+ __ JumpIfSmi(receiver, miss_label);
276
+
277
+ // Check that the object is a JS array.
278
+ __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
279
+ __ j(not_equal, miss_label);
280
+
281
+ // Load length directly from the JS array.
282
+ __ movq(rax, FieldOperand(receiver, JSArray::kLengthOffset));
283
+ __ ret(0);
284
+ }
285
+
286
+
287
+ // Generate code to check if an object is a string. If the object is
288
+ // a string, the map's instance type is left in the scratch register.
289
+ static void GenerateStringCheck(MacroAssembler* masm,
290
+ Register receiver,
291
+ Register scratch,
292
+ Label* smi,
293
+ Label* non_string_object) {
294
+ // Check that the object isn't a smi.
295
+ __ JumpIfSmi(receiver, smi);
296
+
297
+ // Check that the object is a string.
298
+ __ movq(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
299
+ __ movzxbq(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
300
+ ASSERT(kNotStringTag != 0);
301
+ __ testl(scratch, Immediate(kNotStringTag));
302
+ __ j(not_zero, non_string_object);
303
+ }
304
+
305
+
306
+ void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
307
+ Register receiver,
308
+ Register scratch1,
309
+ Register scratch2,
310
+ Label* miss,
311
+ bool support_wrappers) {
312
+ Label check_wrapper;
313
+
314
+ // Check if the object is a string leaving the instance type in the
315
+ // scratch register.
316
+ GenerateStringCheck(masm, receiver, scratch1, miss,
317
+ support_wrappers ? &check_wrapper : miss);
318
+
319
+ // Load length directly from the string.
320
+ __ movq(rax, FieldOperand(receiver, String::kLengthOffset));
321
+ __ ret(0);
322
+
323
+ if (support_wrappers) {
324
+ // Check if the object is a JSValue wrapper.
325
+ __ bind(&check_wrapper);
326
+ __ cmpl(scratch1, Immediate(JS_VALUE_TYPE));
327
+ __ j(not_equal, miss);
328
+
329
+ // Check if the wrapped value is a string and load the length
330
+ // directly if it is.
331
+ __ movq(scratch2, FieldOperand(receiver, JSValue::kValueOffset));
332
+ GenerateStringCheck(masm, scratch2, scratch1, miss, miss);
333
+ __ movq(rax, FieldOperand(scratch2, String::kLengthOffset));
334
+ __ ret(0);
335
+ }
336
+ }
337
+
338
+
339
+ void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
340
+ Register receiver,
341
+ Register result,
342
+ Register scratch,
343
+ Label* miss_label) {
344
+ __ TryGetFunctionPrototype(receiver, result, miss_label);
345
+ if (!result.is(rax)) __ movq(rax, result);
346
+ __ ret(0);
347
+ }
348
+
349
+
350
+ // Load a fast property out of a holder object (src). In-object properties
351
+ // are loaded directly otherwise the property is loaded from the properties
352
+ // fixed array.
353
+ void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
354
+ Register dst, Register src,
355
+ JSObject* holder, int index) {
356
+ // Adjust for the number of properties stored in the holder.
357
+ index -= holder->map()->inobject_properties();
358
+ if (index < 0) {
359
+ // Get the property straight out of the holder.
360
+ int offset = holder->map()->instance_size() + (index * kPointerSize);
361
+ __ movq(dst, FieldOperand(src, offset));
362
+ } else {
363
+ // Calculate the offset into the properties array.
364
+ int offset = index * kPointerSize + FixedArray::kHeaderSize;
365
+ __ movq(dst, FieldOperand(src, JSObject::kPropertiesOffset));
366
+ __ movq(dst, FieldOperand(dst, offset));
367
+ }
368
+ }
369
+
370
+
371
+ static void PushInterceptorArguments(MacroAssembler* masm,
372
+ Register receiver,
373
+ Register holder,
374
+ Register name,
375
+ JSObject* holder_obj) {
376
+ __ push(name);
377
+ InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
378
+ ASSERT(!Heap::InNewSpace(interceptor));
379
+ __ Move(kScratchRegister, Handle<Object>(interceptor));
380
+ __ push(kScratchRegister);
381
+ __ push(receiver);
382
+ __ push(holder);
383
+ __ push(FieldOperand(kScratchRegister, InterceptorInfo::kDataOffset));
384
+ }
385
+
386
+
387
+ static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
388
+ Register receiver,
389
+ Register holder,
390
+ Register name,
391
+ JSObject* holder_obj) {
392
+ PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
393
+
394
+ ExternalReference ref =
395
+ ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly));
396
+ __ movq(rax, Immediate(5));
397
+ __ movq(rbx, ref);
398
+
399
+ CEntryStub stub(1);
400
+ __ CallStub(&stub);
401
+ }
402
+
403
+
404
+ // Number of pointers to be reserved on stack for fast API call.
405
+ static const int kFastApiCallArguments = 3;
406
+
407
+
408
+ // Reserves space for the extra arguments to API function in the
409
+ // caller's frame.
410
+ //
411
+ // These arguments are set by CheckPrototypes and GenerateFastApiCall.
412
+ static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
413
+ // ----------- S t a t e -------------
414
+ // -- rsp[0] : return address
415
+ // -- rsp[8] : last argument in the internal frame of the caller
416
+ // -----------------------------------
417
+ __ movq(scratch, Operand(rsp, 0));
418
+ __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
419
+ __ movq(Operand(rsp, 0), scratch);
420
+ __ Move(scratch, Smi::FromInt(0));
421
+ for (int i = 1; i <= kFastApiCallArguments; i++) {
422
+ __ movq(Operand(rsp, i * kPointerSize), scratch);
423
+ }
424
+ }
425
+
426
+
427
+ // Undoes the effects of ReserveSpaceForFastApiCall.
428
+ static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
429
+ // ----------- S t a t e -------------
430
+ // -- rsp[0] : return address.
431
+ // -- rsp[8] : last fast api call extra argument.
432
+ // -- ...
433
+ // -- rsp[kFastApiCallArguments * 8] : first fast api call extra argument.
434
+ // -- rsp[kFastApiCallArguments * 8 + 8] : last argument in the internal
435
+ // frame.
436
+ // -----------------------------------
437
+ __ movq(scratch, Operand(rsp, 0));
438
+ __ movq(Operand(rsp, kFastApiCallArguments * kPointerSize), scratch);
439
+ __ addq(rsp, Immediate(kPointerSize * kFastApiCallArguments));
440
+ }
441
+
442
+
443
+ // Generates call to API function.
444
+ static MaybeObject* GenerateFastApiCall(MacroAssembler* masm,
445
+ const CallOptimization& optimization,
446
+ int argc) {
447
+ // ----------- S t a t e -------------
448
+ // -- rsp[0] : return address
449
+ // -- rsp[8] : object passing the type check
450
+ // (last fast api call extra argument,
451
+ // set by CheckPrototypes)
452
+ // -- rsp[16] : api function
453
+ // (first fast api call extra argument)
454
+ // -- rsp[24] : api call data
455
+ // -- rsp[32] : last argument
456
+ // -- ...
457
+ // -- rsp[(argc + 3) * 8] : first argument
458
+ // -- rsp[(argc + 4) * 8] : receiver
459
+ // -----------------------------------
460
+ // Get the function and setup the context.
461
+ JSFunction* function = optimization.constant_function();
462
+ __ Move(rdi, Handle<JSFunction>(function));
463
+ __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
464
+
465
+ // Pass the additional arguments.
466
+ __ movq(Operand(rsp, 2 * kPointerSize), rdi);
467
+ Object* call_data = optimization.api_call_info()->data();
468
+ Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info());
469
+ if (Heap::InNewSpace(call_data)) {
470
+ __ Move(rcx, api_call_info_handle);
471
+ __ movq(rbx, FieldOperand(rcx, CallHandlerInfo::kDataOffset));
472
+ __ movq(Operand(rsp, 3 * kPointerSize), rbx);
473
+ } else {
474
+ __ Move(Operand(rsp, 3 * kPointerSize), Handle<Object>(call_data));
475
+ }
476
+
477
+ // Prepare arguments.
478
+ __ lea(rbx, Operand(rsp, 3 * kPointerSize));
479
+
480
+ Object* callback = optimization.api_call_info()->callback();
481
+ Address api_function_address = v8::ToCData<Address>(callback);
482
+ ApiFunction fun(api_function_address);
483
+
484
+ #ifdef _WIN64
485
+ // Win64 uses first register--rcx--for returned value.
486
+ Register arguments_arg = rdx;
487
+ #else
488
+ Register arguments_arg = rdi;
489
+ #endif
490
+
491
+ // Allocate the v8::Arguments structure in the arguments' space since
492
+ // it's not controlled by GC.
493
+ const int kApiStackSpace = 4;
494
+
495
+ __ PrepareCallApiFunction(kApiStackSpace);
496
+
497
+ __ movq(StackSpaceOperand(0), rbx); // v8::Arguments::implicit_args_.
498
+ __ addq(rbx, Immediate(argc * kPointerSize));
499
+ __ movq(StackSpaceOperand(1), rbx); // v8::Arguments::values_.
500
+ __ Set(StackSpaceOperand(2), argc); // v8::Arguments::length_.
501
+ // v8::Arguments::is_construct_call_.
502
+ __ Set(StackSpaceOperand(3), 0);
503
+
504
+ // v8::InvocationCallback's argument.
505
+ __ lea(arguments_arg, StackSpaceOperand(0));
506
+ // Emitting a stub call may try to allocate (if the code is not
507
+ // already generated). Do not allow the assembler to perform a
508
+ // garbage collection but instead return the allocation failure
509
+ // object.
510
+ return masm->TryCallApiFunctionAndReturn(&fun,
511
+ argc + kFastApiCallArguments + 1);
512
+ }
513
+
514
+
515
+ class CallInterceptorCompiler BASE_EMBEDDED {
516
+ public:
517
+ CallInterceptorCompiler(StubCompiler* stub_compiler,
518
+ const ParameterCount& arguments,
519
+ Register name)
520
+ : stub_compiler_(stub_compiler),
521
+ arguments_(arguments),
522
+ name_(name) {}
523
+
524
+ MaybeObject* Compile(MacroAssembler* masm,
525
+ JSObject* object,
526
+ JSObject* holder,
527
+ String* name,
528
+ LookupResult* lookup,
529
+ Register receiver,
530
+ Register scratch1,
531
+ Register scratch2,
532
+ Register scratch3,
533
+ Label* miss) {
534
+ ASSERT(holder->HasNamedInterceptor());
535
+ ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
536
+
537
+ // Check that the receiver isn't a smi.
538
+ __ JumpIfSmi(receiver, miss);
539
+
540
+ CallOptimization optimization(lookup);
541
+
542
+ if (optimization.is_constant_call()) {
543
+ return CompileCacheable(masm,
544
+ object,
545
+ receiver,
546
+ scratch1,
547
+ scratch2,
548
+ scratch3,
549
+ holder,
550
+ lookup,
551
+ name,
552
+ optimization,
553
+ miss);
554
+ } else {
555
+ CompileRegular(masm,
556
+ object,
557
+ receiver,
558
+ scratch1,
559
+ scratch2,
560
+ scratch3,
561
+ name,
562
+ holder,
563
+ miss);
564
+ return Heap::undefined_value(); // Success.
565
+ }
566
+ }
567
+
568
+ private:
569
+ MaybeObject* CompileCacheable(MacroAssembler* masm,
570
+ JSObject* object,
571
+ Register receiver,
572
+ Register scratch1,
573
+ Register scratch2,
574
+ Register scratch3,
575
+ JSObject* interceptor_holder,
576
+ LookupResult* lookup,
577
+ String* name,
578
+ const CallOptimization& optimization,
579
+ Label* miss_label) {
580
+ ASSERT(optimization.is_constant_call());
581
+ ASSERT(!lookup->holder()->IsGlobalObject());
582
+
583
+ int depth1 = kInvalidProtoDepth;
584
+ int depth2 = kInvalidProtoDepth;
585
+ bool can_do_fast_api_call = false;
586
+ if (optimization.is_simple_api_call() &&
587
+ !lookup->holder()->IsGlobalObject()) {
588
+ depth1 =
589
+ optimization.GetPrototypeDepthOfExpectedType(object,
590
+ interceptor_holder);
591
+ if (depth1 == kInvalidProtoDepth) {
592
+ depth2 =
593
+ optimization.GetPrototypeDepthOfExpectedType(interceptor_holder,
594
+ lookup->holder());
595
+ }
596
+ can_do_fast_api_call = (depth1 != kInvalidProtoDepth) ||
597
+ (depth2 != kInvalidProtoDepth);
598
+ }
599
+
600
+ __ IncrementCounter(&Counters::call_const_interceptor, 1);
601
+
602
+ if (can_do_fast_api_call) {
603
+ __ IncrementCounter(&Counters::call_const_interceptor_fast_api, 1);
604
+ ReserveSpaceForFastApiCall(masm, scratch1);
605
+ }
606
+
607
+ // Check that the maps from receiver to interceptor's holder
608
+ // haven't changed and thus we can invoke interceptor.
609
+ Label miss_cleanup;
610
+ Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
611
+ Register holder =
612
+ stub_compiler_->CheckPrototypes(object, receiver,
613
+ interceptor_holder, scratch1,
614
+ scratch2, scratch3, name, depth1, miss);
615
+
616
+ // Invoke an interceptor and if it provides a value,
617
+ // branch to |regular_invoke|.
618
+ Label regular_invoke;
619
+ LoadWithInterceptor(masm, receiver, holder, interceptor_holder,
620
+ &regular_invoke);
621
+
622
+ // Interceptor returned nothing for this property. Try to use cached
623
+ // constant function.
624
+
625
+ // Check that the maps from interceptor's holder to constant function's
626
+ // holder haven't changed and thus we can use cached constant function.
627
+ if (interceptor_holder != lookup->holder()) {
628
+ stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
629
+ lookup->holder(), scratch1,
630
+ scratch2, scratch3, name, depth2, miss);
631
+ } else {
632
+ // CheckPrototypes has a side effect of fetching a 'holder'
633
+ // for API (object which is instanceof for the signature). It's
634
+ // safe to omit it here, as if present, it should be fetched
635
+ // by the previous CheckPrototypes.
636
+ ASSERT(depth2 == kInvalidProtoDepth);
637
+ }
638
+
639
+ // Invoke function.
640
+ if (can_do_fast_api_call) {
641
+ MaybeObject* result = GenerateFastApiCall(masm,
642
+ optimization,
643
+ arguments_.immediate());
644
+ if (result->IsFailure()) return result;
645
+ } else {
646
+ __ InvokeFunction(optimization.constant_function(), arguments_,
647
+ JUMP_FUNCTION);
648
+ }
649
+
650
+ // Deferred code for fast API call case---clean preallocated space.
651
+ if (can_do_fast_api_call) {
652
+ __ bind(&miss_cleanup);
653
+ FreeSpaceForFastApiCall(masm, scratch1);
654
+ __ jmp(miss_label);
655
+ }
656
+
657
+ // Invoke a regular function.
658
+ __ bind(&regular_invoke);
659
+ if (can_do_fast_api_call) {
660
+ FreeSpaceForFastApiCall(masm, scratch1);
661
+ }
662
+
663
+ return Heap::undefined_value(); // Success.
664
+ }
665
+
666
+ void CompileRegular(MacroAssembler* masm,
667
+ JSObject* object,
668
+ Register receiver,
669
+ Register scratch1,
670
+ Register scratch2,
671
+ Register scratch3,
672
+ String* name,
673
+ JSObject* interceptor_holder,
674
+ Label* miss_label) {
675
+ Register holder =
676
+ stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
677
+ scratch1, scratch2, scratch3, name,
678
+ miss_label);
679
+
680
+ __ EnterInternalFrame();
681
+ // Save the name_ register across the call.
682
+ __ push(name_);
683
+
684
+ PushInterceptorArguments(masm,
685
+ receiver,
686
+ holder,
687
+ name_,
688
+ interceptor_holder);
689
+
690
+ __ CallExternalReference(
691
+ ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall)),
692
+ 5);
693
+
694
+ // Restore the name_ register.
695
+ __ pop(name_);
696
+ __ LeaveInternalFrame();
697
+ }
698
+
699
+ void LoadWithInterceptor(MacroAssembler* masm,
700
+ Register receiver,
701
+ Register holder,
702
+ JSObject* holder_obj,
703
+ Label* interceptor_succeeded) {
704
+ __ EnterInternalFrame();
705
+ __ push(holder); // Save the holder.
706
+ __ push(name_); // Save the name.
707
+
708
+ CompileCallLoadPropertyWithInterceptor(masm,
709
+ receiver,
710
+ holder,
711
+ name_,
712
+ holder_obj);
713
+
714
+ __ pop(name_); // Restore the name.
715
+ __ pop(receiver); // Restore the holder.
716
+ __ LeaveInternalFrame();
717
+
718
+ __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
719
+ __ j(not_equal, interceptor_succeeded);
720
+ }
721
+
722
+ StubCompiler* stub_compiler_;
723
+ const ParameterCount& arguments_;
724
+ Register name_;
725
+ };
726
+
727
+
728
+ void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
729
+ ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
730
+ Code* code = NULL;
731
+ if (kind == Code::LOAD_IC) {
732
+ code = Builtins::builtin(Builtins::LoadIC_Miss);
733
+ } else {
734
+ code = Builtins::builtin(Builtins::KeyedLoadIC_Miss);
735
+ }
736
+
737
+ Handle<Code> ic(code);
738
+ __ Jump(ic, RelocInfo::CODE_TARGET);
739
+ }
740
+
741
+
742
+ // Both name_reg and receiver_reg are preserved on jumps to miss_label,
743
+ // but may be destroyed if store is successful.
744
+ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
745
+ JSObject* object,
746
+ int index,
747
+ Map* transition,
748
+ Register receiver_reg,
749
+ Register name_reg,
750
+ Register scratch,
751
+ Label* miss_label) {
752
+ // Check that the object isn't a smi.
753
+ __ JumpIfSmi(receiver_reg, miss_label);
754
+
755
+ // Check that the map of the object hasn't changed.
756
+ __ Cmp(FieldOperand(receiver_reg, HeapObject::kMapOffset),
757
+ Handle<Map>(object->map()));
758
+ __ j(not_equal, miss_label);
759
+
760
+ // Perform global security token check if needed.
761
+ if (object->IsJSGlobalProxy()) {
762
+ __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
763
+ }
764
+
765
+ // Stub never generated for non-global objects that require access
766
+ // checks.
767
+ ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
768
+
769
+ // Perform map transition for the receiver if necessary.
770
+ if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
771
+ // The properties must be extended before we can store the value.
772
+ // We jump to a runtime call that extends the properties array.
773
+ __ pop(scratch); // Return address.
774
+ __ push(receiver_reg);
775
+ __ Push(Handle<Map>(transition));
776
+ __ push(rax);
777
+ __ push(scratch);
778
+ __ TailCallExternalReference(
779
+ ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage)), 3, 1);
780
+ return;
781
+ }
782
+
783
+ if (transition != NULL) {
784
+ // Update the map of the object; no write barrier updating is
785
+ // needed because the map is never in new space.
786
+ __ Move(FieldOperand(receiver_reg, HeapObject::kMapOffset),
787
+ Handle<Map>(transition));
788
+ }
789
+
790
+ // Adjust for the number of properties stored in the object. Even in the
791
+ // face of a transition we can use the old map here because the size of the
792
+ // object and the number of in-object properties is not going to change.
793
+ index -= object->map()->inobject_properties();
794
+
795
+ if (index < 0) {
796
+ // Set the property straight into the object.
797
+ int offset = object->map()->instance_size() + (index * kPointerSize);
798
+ __ movq(FieldOperand(receiver_reg, offset), rax);
799
+
800
+ // Update the write barrier for the array address.
801
+ // Pass the value being stored in the now unused name_reg.
802
+ __ movq(name_reg, rax);
803
+ __ RecordWrite(receiver_reg, offset, name_reg, scratch);
804
+ } else {
805
+ // Write to the properties array.
806
+ int offset = index * kPointerSize + FixedArray::kHeaderSize;
807
+ // Get the properties array (optimistically).
808
+ __ movq(scratch, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
809
+ __ movq(FieldOperand(scratch, offset), rax);
810
+
811
+ // Update the write barrier for the array address.
812
+ // Pass the value being stored in the now unused name_reg.
813
+ __ movq(name_reg, rax);
814
+ __ RecordWrite(scratch, offset, name_reg, receiver_reg);
815
+ }
816
+
817
+ // Return the value (register rax).
818
+ __ ret(0);
819
+ }
820
+
821
+
822
+ // Generate code to check that a global property cell is empty. Create
823
+ // the property cell at compilation time if no cell exists for the
824
+ // property.
825
+ MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell(
826
+ MacroAssembler* masm,
827
+ GlobalObject* global,
828
+ String* name,
829
+ Register scratch,
830
+ Label* miss) {
831
+ Object* probe;
832
+ { MaybeObject* maybe_probe = global->EnsurePropertyCell(name);
833
+ if (!maybe_probe->ToObject(&probe)) return maybe_probe;
834
+ }
835
+ JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe);
836
+ ASSERT(cell->value()->IsTheHole());
837
+ __ Move(scratch, Handle<Object>(cell));
838
+ __ Cmp(FieldOperand(scratch, JSGlobalPropertyCell::kValueOffset),
839
+ Factory::the_hole_value());
840
+ __ j(not_equal, miss);
841
+ return cell;
842
+ }
843
+
844
+
845
+ #undef __
846
+ #define __ ACCESS_MASM((masm()))
847
+
848
+
849
+ Register StubCompiler::CheckPrototypes(JSObject* object,
850
+ Register object_reg,
851
+ JSObject* holder,
852
+ Register holder_reg,
853
+ Register scratch1,
854
+ Register scratch2,
855
+ String* name,
856
+ int save_at_depth,
857
+ Label* miss) {
858
+ // Make sure there's no overlap between holder and object registers.
859
+ ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
860
+ ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
861
+ && !scratch2.is(scratch1));
862
+
863
+ // Keep track of the current object in register reg. On the first
864
+ // iteration, reg is an alias for object_reg, on later iterations,
865
+ // it is an alias for holder_reg.
866
+ Register reg = object_reg;
867
+ int depth = 0;
868
+
869
+ if (save_at_depth == depth) {
870
+ __ movq(Operand(rsp, kPointerSize), object_reg);
871
+ }
872
+
873
+ // Check the maps in the prototype chain.
874
+ // Traverse the prototype chain from the object and do map checks.
875
+ JSObject* current = object;
876
+ while (current != holder) {
877
+ depth++;
878
+
879
+ // Only global objects and objects that do not require access
880
+ // checks are allowed in stubs.
881
+ ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
882
+
883
+ JSObject* prototype = JSObject::cast(current->GetPrototype());
884
+ if (!current->HasFastProperties() &&
885
+ !current->IsJSGlobalObject() &&
886
+ !current->IsJSGlobalProxy()) {
887
+ if (!name->IsSymbol()) {
888
+ MaybeObject* lookup_result = Heap::LookupSymbol(name);
889
+ if (lookup_result->IsFailure()) {
890
+ set_failure(Failure::cast(lookup_result));
891
+ return reg;
892
+ } else {
893
+ name = String::cast(lookup_result->ToObjectUnchecked());
894
+ }
895
+ }
896
+ ASSERT(current->property_dictionary()->FindEntry(name) ==
897
+ StringDictionary::kNotFound);
898
+
899
+ GenerateDictionaryNegativeLookup(masm(),
900
+ miss,
901
+ reg,
902
+ name,
903
+ scratch1,
904
+ scratch2);
905
+ __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
906
+ reg = holder_reg; // from now the object is in holder_reg
907
+ __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
908
+ } else if (Heap::InNewSpace(prototype)) {
909
+ // Get the map of the current object.
910
+ __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
911
+ __ Cmp(scratch1, Handle<Map>(current->map()));
912
+ // Branch on the result of the map check.
913
+ __ j(not_equal, miss);
914
+ // Check access rights to the global object. This has to happen
915
+ // after the map check so that we know that the object is
916
+ // actually a global object.
917
+ if (current->IsJSGlobalProxy()) {
918
+ __ CheckAccessGlobalProxy(reg, scratch1, miss);
919
+
920
+ // Restore scratch register to be the map of the object.
921
+ // We load the prototype from the map in the scratch register.
922
+ __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
923
+ }
924
+ // The prototype is in new space; we cannot store a reference
925
+ // to it in the code. Load it from the map.
926
+ reg = holder_reg; // from now the object is in holder_reg
927
+ __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
928
+
929
+ } else {
930
+ // Check the map of the current object.
931
+ __ Cmp(FieldOperand(reg, HeapObject::kMapOffset),
932
+ Handle<Map>(current->map()));
933
+ // Branch on the result of the map check.
934
+ __ j(not_equal, miss);
935
+ // Check access rights to the global object. This has to happen
936
+ // after the map check so that we know that the object is
937
+ // actually a global object.
938
+ if (current->IsJSGlobalProxy()) {
939
+ __ CheckAccessGlobalProxy(reg, scratch1, miss);
940
+ }
941
+ // The prototype is in old space; load it directly.
942
+ reg = holder_reg; // from now the object is in holder_reg
943
+ __ Move(reg, Handle<JSObject>(prototype));
944
+ }
945
+
946
+ if (save_at_depth == depth) {
947
+ __ movq(Operand(rsp, kPointerSize), reg);
948
+ }
949
+
950
+ // Go to the next object in the prototype chain.
951
+ current = prototype;
952
+ }
953
+
954
+ // Check the holder map.
955
+ __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), Handle<Map>(holder->map()));
956
+ __ j(not_equal, miss);
957
+
958
+ // Log the check depth.
959
+ LOG(IntEvent("check-maps-depth", depth + 1));
960
+
961
+ // Perform security check for access to the global object and return
962
+ // the holder register.
963
+ ASSERT(current == holder);
964
+ ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
965
+ if (current->IsJSGlobalProxy()) {
966
+ __ CheckAccessGlobalProxy(reg, scratch1, miss);
967
+ }
968
+
969
+ // If we've skipped any global objects, it's not enough to verify
970
+ // that their maps haven't changed. We also need to check that the
971
+ // property cell for the property is still empty.
972
+ current = object;
973
+ while (current != holder) {
974
+ if (current->IsGlobalObject()) {
975
+ MaybeObject* cell = GenerateCheckPropertyCell(masm(),
976
+ GlobalObject::cast(current),
977
+ name,
978
+ scratch1,
979
+ miss);
980
+ if (cell->IsFailure()) {
981
+ set_failure(Failure::cast(cell));
982
+ return reg;
983
+ }
984
+ }
985
+ current = JSObject::cast(current->GetPrototype());
986
+ }
987
+
988
+ // Return the register containing the holder.
989
+ return reg;
990
+ }
991
+
992
+
993
+ void StubCompiler::GenerateLoadField(JSObject* object,
994
+ JSObject* holder,
995
+ Register receiver,
996
+ Register scratch1,
997
+ Register scratch2,
998
+ Register scratch3,
999
+ int index,
1000
+ String* name,
1001
+ Label* miss) {
1002
+ // Check that the receiver isn't a smi.
1003
+ __ JumpIfSmi(receiver, miss);
1004
+
1005
+ // Check the prototype chain.
1006
+ Register reg =
1007
+ CheckPrototypes(object, receiver, holder,
1008
+ scratch1, scratch2, scratch3, name, miss);
1009
+
1010
+ // Get the value from the properties.
1011
+ GenerateFastPropertyLoad(masm(), rax, reg, holder, index);
1012
+ __ ret(0);
1013
+ }
1014
+
1015
+
1016
+ MaybeObject* StubCompiler::GenerateLoadCallback(JSObject* object,
1017
+ JSObject* holder,
1018
+ Register receiver,
1019
+ Register name_reg,
1020
+ Register scratch1,
1021
+ Register scratch2,
1022
+ Register scratch3,
1023
+ AccessorInfo* callback,
1024
+ String* name,
1025
+ Label* miss) {
1026
+ // Check that the receiver isn't a smi.
1027
+ __ JumpIfSmi(receiver, miss);
1028
+
1029
+ // Check that the maps haven't changed.
1030
+ Register reg =
1031
+ CheckPrototypes(object, receiver, holder, scratch1,
1032
+ scratch2, scratch3, name, miss);
1033
+
1034
+ Handle<AccessorInfo> callback_handle(callback);
1035
+
1036
+ // Insert additional parameters into the stack frame above return address.
1037
+ ASSERT(!scratch2.is(reg));
1038
+ __ pop(scratch2); // Get return address to place it below.
1039
+
1040
+ __ push(receiver); // receiver
1041
+ __ push(reg); // holder
1042
+ if (Heap::InNewSpace(callback_handle->data())) {
1043
+ __ Move(scratch1, callback_handle);
1044
+ __ push(FieldOperand(scratch1, AccessorInfo::kDataOffset)); // data
1045
+ } else {
1046
+ __ Push(Handle<Object>(callback_handle->data()));
1047
+ }
1048
+ __ push(name_reg); // name
1049
+ // Save a pointer to where we pushed the arguments pointer.
1050
+ // This will be passed as the const AccessorInfo& to the C++ callback.
1051
+
1052
+ #ifdef _WIN64
1053
+ // Win64 uses first register--rcx--for returned value.
1054
+ Register accessor_info_arg = r8;
1055
+ Register name_arg = rdx;
1056
+ #else
1057
+ Register accessor_info_arg = rsi;
1058
+ Register name_arg = rdi;
1059
+ #endif
1060
+
1061
+ ASSERT(!name_arg.is(scratch2));
1062
+ __ movq(name_arg, rsp);
1063
+ __ push(scratch2); // Restore return address.
1064
+
1065
+ // Do call through the api.
1066
+ Address getter_address = v8::ToCData<Address>(callback->getter());
1067
+ ApiFunction fun(getter_address);
1068
+
1069
+ // 3 elements array for v8::Agruments::values_ and handler for name.
1070
+ const int kStackSpace = 4;
1071
+
1072
+ // Allocate v8::AccessorInfo in non-GCed stack space.
1073
+ const int kArgStackSpace = 1;
1074
+
1075
+ __ PrepareCallApiFunction(kArgStackSpace);
1076
+ __ lea(rax, Operand(name_arg, 3 * kPointerSize));
1077
+
1078
+ // v8::AccessorInfo::args_.
1079
+ __ movq(StackSpaceOperand(0), rax);
1080
+
1081
+ // The context register (rsi) has been saved in PrepareCallApiFunction and
1082
+ // could be used to pass arguments.
1083
+ __ lea(accessor_info_arg, StackSpaceOperand(0));
1084
+
1085
+ // Emitting a stub call may try to allocate (if the code is not
1086
+ // already generated). Do not allow the assembler to perform a
1087
+ // garbage collection but instead return the allocation failure
1088
+ // object.
1089
+ return masm()->TryCallApiFunctionAndReturn(&fun, kStackSpace);
1090
+ }
1091
+
1092
+
1093
+ void StubCompiler::GenerateLoadConstant(JSObject* object,
1094
+ JSObject* holder,
1095
+ Register receiver,
1096
+ Register scratch1,
1097
+ Register scratch2,
1098
+ Register scratch3,
1099
+ Object* value,
1100
+ String* name,
1101
+ Label* miss) {
1102
+ // Check that the receiver isn't a smi.
1103
+ __ JumpIfSmi(receiver, miss);
1104
+
1105
+ // Check that the maps haven't changed.
1106
+ Register reg =
1107
+ CheckPrototypes(object, receiver, holder,
1108
+ scratch1, scratch2, scratch3, name, miss);
1109
+
1110
+ // Return the constant value.
1111
+ __ Move(rax, Handle<Object>(value));
1112
+ __ ret(0);
1113
+ }
1114
+
1115
+
1116
+ void StubCompiler::GenerateLoadInterceptor(JSObject* object,
1117
+ JSObject* interceptor_holder,
1118
+ LookupResult* lookup,
1119
+ Register receiver,
1120
+ Register name_reg,
1121
+ Register scratch1,
1122
+ Register scratch2,
1123
+ Register scratch3,
1124
+ String* name,
1125
+ Label* miss) {
1126
+ ASSERT(interceptor_holder->HasNamedInterceptor());
1127
+ ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1128
+
1129
+ // Check that the receiver isn't a smi.
1130
+ __ JumpIfSmi(receiver, miss);
1131
+
1132
+ // So far the most popular follow ups for interceptor loads are FIELD
1133
+ // and CALLBACKS, so inline only them, other cases may be added
1134
+ // later.
1135
+ bool compile_followup_inline = false;
1136
+ if (lookup->IsProperty() && lookup->IsCacheable()) {
1137
+ if (lookup->type() == FIELD) {
1138
+ compile_followup_inline = true;
1139
+ } else if (lookup->type() == CALLBACKS &&
1140
+ lookup->GetCallbackObject()->IsAccessorInfo() &&
1141
+ AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL) {
1142
+ compile_followup_inline = true;
1143
+ }
1144
+ }
1145
+
1146
+ if (compile_followup_inline) {
1147
+ // Compile the interceptor call, followed by inline code to load the
1148
+ // property from further up the prototype chain if the call fails.
1149
+ // Check that the maps haven't changed.
1150
+ Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1151
+ scratch1, scratch2, scratch3,
1152
+ name, miss);
1153
+ ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
1154
+
1155
+ // Save necessary data before invoking an interceptor.
1156
+ // Requires a frame to make GC aware of pushed pointers.
1157
+ __ EnterInternalFrame();
1158
+
1159
+ if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1160
+ // CALLBACKS case needs a receiver to be passed into C++ callback.
1161
+ __ push(receiver);
1162
+ }
1163
+ __ push(holder_reg);
1164
+ __ push(name_reg);
1165
+
1166
+ // Invoke an interceptor. Note: map checks from receiver to
1167
+ // interceptor's holder has been compiled before (see a caller
1168
+ // of this method.)
1169
+ CompileCallLoadPropertyWithInterceptor(masm(),
1170
+ receiver,
1171
+ holder_reg,
1172
+ name_reg,
1173
+ interceptor_holder);
1174
+
1175
+ // Check if interceptor provided a value for property. If it's
1176
+ // the case, return immediately.
1177
+ Label interceptor_failed;
1178
+ __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
1179
+ __ j(equal, &interceptor_failed);
1180
+ __ LeaveInternalFrame();
1181
+ __ ret(0);
1182
+
1183
+ __ bind(&interceptor_failed);
1184
+ __ pop(name_reg);
1185
+ __ pop(holder_reg);
1186
+ if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1187
+ __ pop(receiver);
1188
+ }
1189
+
1190
+ __ LeaveInternalFrame();
1191
+
1192
+ // Check that the maps from interceptor's holder to lookup's holder
1193
+ // haven't changed. And load lookup's holder into |holder| register.
1194
+ if (interceptor_holder != lookup->holder()) {
1195
+ holder_reg = CheckPrototypes(interceptor_holder,
1196
+ holder_reg,
1197
+ lookup->holder(),
1198
+ scratch1,
1199
+ scratch2,
1200
+ scratch3,
1201
+ name,
1202
+ miss);
1203
+ }
1204
+
1205
+ if (lookup->type() == FIELD) {
1206
+ // We found FIELD property in prototype chain of interceptor's holder.
1207
+ // Retrieve a field from field's holder.
1208
+ GenerateFastPropertyLoad(masm(), rax, holder_reg,
1209
+ lookup->holder(), lookup->GetFieldIndex());
1210
+ __ ret(0);
1211
+ } else {
1212
+ // We found CALLBACKS property in prototype chain of interceptor's
1213
+ // holder.
1214
+ ASSERT(lookup->type() == CALLBACKS);
1215
+ ASSERT(lookup->GetCallbackObject()->IsAccessorInfo());
1216
+ AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
1217
+ ASSERT(callback != NULL);
1218
+ ASSERT(callback->getter() != NULL);
1219
+
1220
+ // Tail call to runtime.
1221
+ // Important invariant in CALLBACKS case: the code above must be
1222
+ // structured to never clobber |receiver| register.
1223
+ __ pop(scratch2); // return address
1224
+ __ push(receiver);
1225
+ __ push(holder_reg);
1226
+ __ Move(holder_reg, Handle<AccessorInfo>(callback));
1227
+ __ push(FieldOperand(holder_reg, AccessorInfo::kDataOffset));
1228
+ __ push(holder_reg);
1229
+ __ push(name_reg);
1230
+ __ push(scratch2); // restore return address
1231
+
1232
+ ExternalReference ref =
1233
+ ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
1234
+ __ TailCallExternalReference(ref, 5, 1);
1235
+ }
1236
+ } else { // !compile_followup_inline
1237
+ // Call the runtime system to load the interceptor.
1238
+ // Check that the maps haven't changed.
1239
+ Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1240
+ scratch1, scratch2, scratch3,
1241
+ name, miss);
1242
+ __ pop(scratch2); // save old return address
1243
+ PushInterceptorArguments(masm(), receiver, holder_reg,
1244
+ name_reg, interceptor_holder);
1245
+ __ push(scratch2); // restore old return address
1246
+
1247
+ ExternalReference ref = ExternalReference(
1248
+ IC_Utility(IC::kLoadPropertyWithInterceptorForLoad));
1249
+ __ TailCallExternalReference(ref, 5, 1);
1250
+ }
1251
+ }
1252
+
1253
+
1254
+ void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
1255
+ if (kind_ == Code::KEYED_CALL_IC) {
1256
+ __ Cmp(rcx, Handle<String>(name));
1257
+ __ j(not_equal, miss);
1258
+ }
1259
+ }
1260
+
1261
+
1262
+ void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object,
1263
+ JSObject* holder,
1264
+ String* name,
1265
+ Label* miss) {
1266
+ ASSERT(holder->IsGlobalObject());
1267
+
1268
+ // Get the number of arguments.
1269
+ const int argc = arguments().immediate();
1270
+
1271
+ // Get the receiver from the stack.
1272
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1273
+
1274
+ // If the object is the holder then we know that it's a global
1275
+ // object which can only happen for contextual calls. In this case,
1276
+ // the receiver cannot be a smi.
1277
+ if (object != holder) {
1278
+ __ JumpIfSmi(rdx, miss);
1279
+ }
1280
+
1281
+ // Check that the maps haven't changed.
1282
+ CheckPrototypes(object, rdx, holder, rbx, rax, rdi, name, miss);
1283
+ }
1284
+
1285
+
1286
+ void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
1287
+ JSFunction* function,
1288
+ Label* miss) {
1289
+ // Get the value from the cell.
1290
+ __ Move(rdi, Handle<JSGlobalPropertyCell>(cell));
1291
+ __ movq(rdi, FieldOperand(rdi, JSGlobalPropertyCell::kValueOffset));
1292
+
1293
+ // Check that the cell contains the same function.
1294
+ if (Heap::InNewSpace(function)) {
1295
+ // We can't embed a pointer to a function in new space so we have
1296
+ // to verify that the shared function info is unchanged. This has
1297
+ // the nice side effect that multiple closures based on the same
1298
+ // function can all use this call IC. Before we load through the
1299
+ // function, we have to verify that it still is a function.
1300
+ __ JumpIfSmi(rdi, miss);
1301
+ __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rax);
1302
+ __ j(not_equal, miss);
1303
+
1304
+ // Check the shared function info. Make sure it hasn't changed.
1305
+ __ Move(rax, Handle<SharedFunctionInfo>(function->shared()));
1306
+ __ cmpq(FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset), rax);
1307
+ __ j(not_equal, miss);
1308
+ } else {
1309
+ __ Cmp(rdi, Handle<JSFunction>(function));
1310
+ __ j(not_equal, miss);
1311
+ }
1312
+ }
1313
+
1314
+
1315
+ MaybeObject* CallStubCompiler::GenerateMissBranch() {
1316
+ MaybeObject* maybe_obj = StubCache::ComputeCallMiss(arguments().immediate(),
1317
+ kind_);
1318
+ Object* obj;
1319
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1320
+ __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
1321
+ return obj;
1322
+ }
1323
+
1324
+
1325
+ MaybeObject* CallStubCompiler::CompileCallField(JSObject* object,
1326
+ JSObject* holder,
1327
+ int index,
1328
+ String* name) {
1329
+ // ----------- S t a t e -------------
1330
+ // rcx : function name
1331
+ // rsp[0] : return address
1332
+ // rsp[8] : argument argc
1333
+ // rsp[16] : argument argc - 1
1334
+ // ...
1335
+ // rsp[argc * 8] : argument 1
1336
+ // rsp[(argc + 1) * 8] : argument 0 = receiver
1337
+ // -----------------------------------
1338
+ Label miss;
1339
+
1340
+ GenerateNameCheck(name, &miss);
1341
+
1342
+ // Get the receiver from the stack.
1343
+ const int argc = arguments().immediate();
1344
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1345
+
1346
+ // Check that the receiver isn't a smi.
1347
+ __ JumpIfSmi(rdx, &miss);
1348
+
1349
+ // Do the right check and compute the holder register.
1350
+ Register reg = CheckPrototypes(object, rdx, holder, rbx, rax, rdi,
1351
+ name, &miss);
1352
+
1353
+ GenerateFastPropertyLoad(masm(), rdi, reg, holder, index);
1354
+
1355
+ // Check that the function really is a function.
1356
+ __ JumpIfSmi(rdi, &miss);
1357
+ __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rbx);
1358
+ __ j(not_equal, &miss);
1359
+
1360
+ // Patch the receiver on the stack with the global proxy if
1361
+ // necessary.
1362
+ if (object->IsGlobalObject()) {
1363
+ __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
1364
+ __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
1365
+ }
1366
+
1367
+ // Invoke the function.
1368
+ __ InvokeFunction(rdi, arguments(), JUMP_FUNCTION);
1369
+
1370
+ // Handle call cache miss.
1371
+ __ bind(&miss);
1372
+ Object* obj;
1373
+ { MaybeObject* maybe_obj = GenerateMissBranch();
1374
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1375
+ }
1376
+
1377
+ // Return the generated code.
1378
+ return GetCode(FIELD, name);
1379
+ }
1380
+
1381
+
1382
+ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
1383
+ JSObject* holder,
1384
+ JSGlobalPropertyCell* cell,
1385
+ JSFunction* function,
1386
+ String* name) {
1387
+ // ----------- S t a t e -------------
1388
+ // -- rcx : name
1389
+ // -- rsp[0] : return address
1390
+ // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1391
+ // -- ...
1392
+ // -- rsp[(argc + 1) * 8] : receiver
1393
+ // -----------------------------------
1394
+
1395
+ // If object is not an array, bail out to regular call.
1396
+ if (!object->IsJSArray() || cell != NULL) return Heap::undefined_value();
1397
+
1398
+ Label miss;
1399
+
1400
+ GenerateNameCheck(name, &miss);
1401
+
1402
+ // Get the receiver from the stack.
1403
+ const int argc = arguments().immediate();
1404
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1405
+
1406
+ // Check that the receiver isn't a smi.
1407
+ __ JumpIfSmi(rdx, &miss);
1408
+
1409
+ CheckPrototypes(JSObject::cast(object),
1410
+ rdx,
1411
+ holder,
1412
+ rbx,
1413
+ rax,
1414
+ rdi,
1415
+ name,
1416
+ &miss);
1417
+
1418
+ if (argc == 0) {
1419
+ // Noop, return the length.
1420
+ __ movq(rax, FieldOperand(rdx, JSArray::kLengthOffset));
1421
+ __ ret((argc + 1) * kPointerSize);
1422
+ } else {
1423
+ Label call_builtin;
1424
+
1425
+ // Get the elements array of the object.
1426
+ __ movq(rbx, FieldOperand(rdx, JSArray::kElementsOffset));
1427
+
1428
+ // Check that the elements are in fast mode and writable.
1429
+ __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
1430
+ Factory::fixed_array_map());
1431
+ __ j(not_equal, &call_builtin);
1432
+
1433
+ if (argc == 1) { // Otherwise fall through to call builtin.
1434
+ Label exit, with_write_barrier, attempt_to_grow_elements;
1435
+
1436
+ // Get the array's length into rax and calculate new length.
1437
+ __ SmiToInteger32(rax, FieldOperand(rdx, JSArray::kLengthOffset));
1438
+ STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue);
1439
+ __ addl(rax, Immediate(argc));
1440
+
1441
+ // Get the element's length into rcx.
1442
+ __ SmiToInteger32(rcx, FieldOperand(rbx, FixedArray::kLengthOffset));
1443
+
1444
+ // Check if we could survive without allocation.
1445
+ __ cmpl(rax, rcx);
1446
+ __ j(greater, &attempt_to_grow_elements);
1447
+
1448
+ // Save new length.
1449
+ __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax);
1450
+
1451
+ // Push the element.
1452
+ __ movq(rcx, Operand(rsp, argc * kPointerSize));
1453
+ __ lea(rdx, FieldOperand(rbx,
1454
+ rax, times_pointer_size,
1455
+ FixedArray::kHeaderSize - argc * kPointerSize));
1456
+ __ movq(Operand(rdx, 0), rcx);
1457
+
1458
+ // Check if value is a smi.
1459
+ __ Integer32ToSmi(rax, rax); // Return new length as smi.
1460
+
1461
+ __ JumpIfNotSmi(rcx, &with_write_barrier);
1462
+
1463
+ __ bind(&exit);
1464
+ __ ret((argc + 1) * kPointerSize);
1465
+
1466
+ __ bind(&with_write_barrier);
1467
+
1468
+ __ InNewSpace(rbx, rcx, equal, &exit);
1469
+
1470
+ __ RecordWriteHelper(rbx, rdx, rcx);
1471
+
1472
+ __ ret((argc + 1) * kPointerSize);
1473
+
1474
+ __ bind(&attempt_to_grow_elements);
1475
+ if (!FLAG_inline_new) {
1476
+ __ jmp(&call_builtin);
1477
+ }
1478
+
1479
+ ExternalReference new_space_allocation_top =
1480
+ ExternalReference::new_space_allocation_top_address();
1481
+ ExternalReference new_space_allocation_limit =
1482
+ ExternalReference::new_space_allocation_limit_address();
1483
+
1484
+ const int kAllocationDelta = 4;
1485
+ // Load top.
1486
+ __ movq(rcx, new_space_allocation_top);
1487
+ __ movq(rcx, Operand(rcx, 0));
1488
+
1489
+ // Check if it's the end of elements.
1490
+ __ lea(rdx, FieldOperand(rbx,
1491
+ rax, times_pointer_size,
1492
+ FixedArray::kHeaderSize - argc * kPointerSize));
1493
+ __ cmpq(rdx, rcx);
1494
+ __ j(not_equal, &call_builtin);
1495
+ __ addq(rcx, Immediate(kAllocationDelta * kPointerSize));
1496
+ __ movq(kScratchRegister, new_space_allocation_limit);
1497
+ __ cmpq(rcx, Operand(kScratchRegister, 0));
1498
+ __ j(above, &call_builtin);
1499
+
1500
+ // We fit and could grow elements.
1501
+ __ movq(kScratchRegister, new_space_allocation_top);
1502
+ __ movq(Operand(kScratchRegister, 0), rcx);
1503
+ __ movq(rcx, Operand(rsp, argc * kPointerSize));
1504
+
1505
+ // Push the argument...
1506
+ __ movq(Operand(rdx, 0), rcx);
1507
+ // ... and fill the rest with holes.
1508
+ __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
1509
+ for (int i = 1; i < kAllocationDelta; i++) {
1510
+ __ movq(Operand(rdx, i * kPointerSize), kScratchRegister);
1511
+ }
1512
+
1513
+ // Restore receiver to rdx as finish sequence assumes it's here.
1514
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1515
+
1516
+ // Increment element's and array's sizes.
1517
+ __ SmiAddConstant(FieldOperand(rbx, FixedArray::kLengthOffset),
1518
+ Smi::FromInt(kAllocationDelta));
1519
+
1520
+ // Make new length a smi before returning it.
1521
+ __ Integer32ToSmi(rax, rax);
1522
+ __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rax);
1523
+
1524
+ // Elements are in new space, so write barrier is not required.
1525
+ __ ret((argc + 1) * kPointerSize);
1526
+ }
1527
+
1528
+ __ bind(&call_builtin);
1529
+ __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush),
1530
+ argc + 1,
1531
+ 1);
1532
+ }
1533
+
1534
+ __ bind(&miss);
1535
+ Object* obj;
1536
+ { MaybeObject* maybe_obj = GenerateMissBranch();
1537
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1538
+ }
1539
+
1540
+ // Return the generated code.
1541
+ return GetCode(function);
1542
+ }
1543
+
1544
+
1545
+ MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object,
1546
+ JSObject* holder,
1547
+ JSGlobalPropertyCell* cell,
1548
+ JSFunction* function,
1549
+ String* name) {
1550
+ // ----------- S t a t e -------------
1551
+ // -- rcx : name
1552
+ // -- rsp[0] : return address
1553
+ // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1554
+ // -- ...
1555
+ // -- rsp[(argc + 1) * 8] : receiver
1556
+ // -----------------------------------
1557
+
1558
+ // If object is not an array, bail out to regular call.
1559
+ if (!object->IsJSArray() || cell != NULL) return Heap::undefined_value();
1560
+
1561
+ Label miss, return_undefined, call_builtin;
1562
+
1563
+ GenerateNameCheck(name, &miss);
1564
+
1565
+ // Get the receiver from the stack.
1566
+ const int argc = arguments().immediate();
1567
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1568
+
1569
+ // Check that the receiver isn't a smi.
1570
+ __ JumpIfSmi(rdx, &miss);
1571
+
1572
+ CheckPrototypes(JSObject::cast(object), rdx,
1573
+ holder, rbx,
1574
+ rax, rdi, name, &miss);
1575
+
1576
+ // Get the elements array of the object.
1577
+ __ movq(rbx, FieldOperand(rdx, JSArray::kElementsOffset));
1578
+
1579
+ // Check that the elements are in fast mode and writable.
1580
+ __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
1581
+ Heap::kFixedArrayMapRootIndex);
1582
+ __ j(not_equal, &call_builtin);
1583
+
1584
+ // Get the array's length into rcx and calculate new length.
1585
+ __ SmiToInteger32(rcx, FieldOperand(rdx, JSArray::kLengthOffset));
1586
+ __ subl(rcx, Immediate(1));
1587
+ __ j(negative, &return_undefined);
1588
+
1589
+ // Get the last element.
1590
+ __ LoadRoot(r9, Heap::kTheHoleValueRootIndex);
1591
+ __ movq(rax, FieldOperand(rbx,
1592
+ rcx, times_pointer_size,
1593
+ FixedArray::kHeaderSize));
1594
+ // Check if element is already the hole.
1595
+ __ cmpq(rax, r9);
1596
+ // If so, call slow-case to also check prototypes for value.
1597
+ __ j(equal, &call_builtin);
1598
+
1599
+ // Set the array's length.
1600
+ __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rcx);
1601
+
1602
+ // Fill with the hole and return original value.
1603
+ __ movq(FieldOperand(rbx,
1604
+ rcx, times_pointer_size,
1605
+ FixedArray::kHeaderSize),
1606
+ r9);
1607
+ __ ret((argc + 1) * kPointerSize);
1608
+
1609
+ __ bind(&return_undefined);
1610
+ __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1611
+ __ ret((argc + 1) * kPointerSize);
1612
+
1613
+ __ bind(&call_builtin);
1614
+ __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop),
1615
+ argc + 1,
1616
+ 1);
1617
+
1618
+ __ bind(&miss);
1619
+ Object* obj;
1620
+ { MaybeObject* maybe_obj = GenerateMissBranch();
1621
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1622
+ }
1623
+
1624
+ // Return the generated code.
1625
+ return GetCode(function);
1626
+ }
1627
+
1628
+
1629
+ MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall(
1630
+ Object* object,
1631
+ JSObject* holder,
1632
+ JSGlobalPropertyCell* cell,
1633
+ JSFunction* function,
1634
+ String* name) {
1635
+ // ----------- S t a t e -------------
1636
+ // -- rcx : function name
1637
+ // -- rsp[0] : return address
1638
+ // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1639
+ // -- ...
1640
+ // -- rsp[(argc + 1) * 8] : receiver
1641
+ // -----------------------------------
1642
+
1643
+ // If object is not a string, bail out to regular call.
1644
+ if (!object->IsString() || cell != NULL) return Heap::undefined_value();
1645
+
1646
+ const int argc = arguments().immediate();
1647
+
1648
+ Label miss;
1649
+ Label name_miss;
1650
+ Label index_out_of_range;
1651
+ Label* index_out_of_range_label = &index_out_of_range;
1652
+
1653
+ if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) {
1654
+ index_out_of_range_label = &miss;
1655
+ }
1656
+
1657
+ GenerateNameCheck(name, &name_miss);
1658
+
1659
+ // Check that the maps starting from the prototype haven't changed.
1660
+ GenerateDirectLoadGlobalFunctionPrototype(masm(),
1661
+ Context::STRING_FUNCTION_INDEX,
1662
+ rax,
1663
+ &miss);
1664
+ ASSERT(object != holder);
1665
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
1666
+ rbx, rdx, rdi, name, &miss);
1667
+
1668
+ Register receiver = rbx;
1669
+ Register index = rdi;
1670
+ Register scratch = rdx;
1671
+ Register result = rax;
1672
+ __ movq(receiver, Operand(rsp, (argc + 1) * kPointerSize));
1673
+ if (argc > 0) {
1674
+ __ movq(index, Operand(rsp, (argc - 0) * kPointerSize));
1675
+ } else {
1676
+ __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1677
+ }
1678
+
1679
+ StringCharCodeAtGenerator char_code_at_generator(receiver,
1680
+ index,
1681
+ scratch,
1682
+ result,
1683
+ &miss, // When not a string.
1684
+ &miss, // When not a number.
1685
+ index_out_of_range_label,
1686
+ STRING_INDEX_IS_NUMBER);
1687
+ char_code_at_generator.GenerateFast(masm());
1688
+ __ ret((argc + 1) * kPointerSize);
1689
+
1690
+ StubRuntimeCallHelper call_helper;
1691
+ char_code_at_generator.GenerateSlow(masm(), call_helper);
1692
+
1693
+ if (index_out_of_range.is_linked()) {
1694
+ __ bind(&index_out_of_range);
1695
+ __ LoadRoot(rax, Heap::kNanValueRootIndex);
1696
+ __ ret((argc + 1) * kPointerSize);
1697
+ }
1698
+
1699
+ __ bind(&miss);
1700
+ // Restore function name in rcx.
1701
+ __ Move(rcx, Handle<String>(name));
1702
+ __ bind(&name_miss);
1703
+ Object* obj;
1704
+ { MaybeObject* maybe_obj = GenerateMissBranch();
1705
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1706
+ }
1707
+
1708
+ // Return the generated code.
1709
+ return GetCode(function);
1710
+ }
1711
+
1712
+
1713
+ MaybeObject* CallStubCompiler::CompileStringCharAtCall(
1714
+ Object* object,
1715
+ JSObject* holder,
1716
+ JSGlobalPropertyCell* cell,
1717
+ JSFunction* function,
1718
+ String* name) {
1719
+ // ----------- S t a t e -------------
1720
+ // -- rcx : function name
1721
+ // -- rsp[0] : return address
1722
+ // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1723
+ // -- ...
1724
+ // -- rsp[(argc + 1) * 8] : receiver
1725
+ // -----------------------------------
1726
+
1727
+ // If object is not a string, bail out to regular call.
1728
+ if (!object->IsString() || cell != NULL) return Heap::undefined_value();
1729
+
1730
+ const int argc = arguments().immediate();
1731
+
1732
+ Label miss;
1733
+ Label name_miss;
1734
+ Label index_out_of_range;
1735
+ Label* index_out_of_range_label = &index_out_of_range;
1736
+
1737
+ if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) {
1738
+ index_out_of_range_label = &miss;
1739
+ }
1740
+
1741
+ GenerateNameCheck(name, &name_miss);
1742
+
1743
+ // Check that the maps starting from the prototype haven't changed.
1744
+ GenerateDirectLoadGlobalFunctionPrototype(masm(),
1745
+ Context::STRING_FUNCTION_INDEX,
1746
+ rax,
1747
+ &miss);
1748
+ ASSERT(object != holder);
1749
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
1750
+ rbx, rdx, rdi, name, &miss);
1751
+
1752
+ Register receiver = rax;
1753
+ Register index = rdi;
1754
+ Register scratch1 = rbx;
1755
+ Register scratch2 = rdx;
1756
+ Register result = rax;
1757
+ __ movq(receiver, Operand(rsp, (argc + 1) * kPointerSize));
1758
+ if (argc > 0) {
1759
+ __ movq(index, Operand(rsp, (argc - 0) * kPointerSize));
1760
+ } else {
1761
+ __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1762
+ }
1763
+
1764
+ StringCharAtGenerator char_at_generator(receiver,
1765
+ index,
1766
+ scratch1,
1767
+ scratch2,
1768
+ result,
1769
+ &miss, // When not a string.
1770
+ &miss, // When not a number.
1771
+ index_out_of_range_label,
1772
+ STRING_INDEX_IS_NUMBER);
1773
+ char_at_generator.GenerateFast(masm());
1774
+ __ ret((argc + 1) * kPointerSize);
1775
+
1776
+ StubRuntimeCallHelper call_helper;
1777
+ char_at_generator.GenerateSlow(masm(), call_helper);
1778
+
1779
+ if (index_out_of_range.is_linked()) {
1780
+ __ bind(&index_out_of_range);
1781
+ __ LoadRoot(rax, Heap::kEmptyStringRootIndex);
1782
+ __ ret((argc + 1) * kPointerSize);
1783
+ }
1784
+
1785
+ __ bind(&miss);
1786
+ // Restore function name in rcx.
1787
+ __ Move(rcx, Handle<String>(name));
1788
+ __ bind(&name_miss);
1789
+ Object* obj;
1790
+ { MaybeObject* maybe_obj = GenerateMissBranch();
1791
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1792
+ }
1793
+
1794
+ // Return the generated code.
1795
+ return GetCode(function);
1796
+ }
1797
+
1798
+
1799
+ MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall(
1800
+ Object* object,
1801
+ JSObject* holder,
1802
+ JSGlobalPropertyCell* cell,
1803
+ JSFunction* function,
1804
+ String* name) {
1805
+ // ----------- S t a t e -------------
1806
+ // -- rcx : function name
1807
+ // -- rsp[0] : return address
1808
+ // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1809
+ // -- ...
1810
+ // -- rsp[(argc + 1) * 8] : receiver
1811
+ // -----------------------------------
1812
+
1813
+ const int argc = arguments().immediate();
1814
+
1815
+ // If the object is not a JSObject or we got an unexpected number of
1816
+ // arguments, bail out to the regular call.
1817
+ if (!object->IsJSObject() || argc != 1) return Heap::undefined_value();
1818
+
1819
+ Label miss;
1820
+ GenerateNameCheck(name, &miss);
1821
+
1822
+ if (cell == NULL) {
1823
+ __ movq(rdx, Operand(rsp, 2 * kPointerSize));
1824
+
1825
+ __ JumpIfSmi(rdx, &miss);
1826
+
1827
+ CheckPrototypes(JSObject::cast(object), rdx, holder, rbx, rax, rdi, name,
1828
+ &miss);
1829
+ } else {
1830
+ ASSERT(cell->value() == function);
1831
+ GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
1832
+ GenerateLoadFunctionFromCell(cell, function, &miss);
1833
+ }
1834
+
1835
+ // Load the char code argument.
1836
+ Register code = rbx;
1837
+ __ movq(code, Operand(rsp, 1 * kPointerSize));
1838
+
1839
+ // Check the code is a smi.
1840
+ Label slow;
1841
+ __ JumpIfNotSmi(code, &slow);
1842
+
1843
+ // Convert the smi code to uint16.
1844
+ __ SmiAndConstant(code, code, Smi::FromInt(0xffff));
1845
+
1846
+ StringCharFromCodeGenerator char_from_code_generator(code, rax);
1847
+ char_from_code_generator.GenerateFast(masm());
1848
+ __ ret(2 * kPointerSize);
1849
+
1850
+ StubRuntimeCallHelper call_helper;
1851
+ char_from_code_generator.GenerateSlow(masm(), call_helper);
1852
+
1853
+ // Tail call the full function. We do not have to patch the receiver
1854
+ // because the function makes no use of it.
1855
+ __ bind(&slow);
1856
+ __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
1857
+
1858
+ __ bind(&miss);
1859
+ // rcx: function name.
1860
+ Object* obj;
1861
+ { MaybeObject* maybe_obj = GenerateMissBranch();
1862
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1863
+ }
1864
+
1865
+ // Return the generated code.
1866
+ return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
1867
+ }
1868
+
1869
+
1870
+ MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object,
1871
+ JSObject* holder,
1872
+ JSGlobalPropertyCell* cell,
1873
+ JSFunction* function,
1874
+ String* name) {
1875
+ // TODO(872): implement this.
1876
+ return Heap::undefined_value();
1877
+ }
1878
+
1879
+
1880
+ MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object,
1881
+ JSObject* holder,
1882
+ JSGlobalPropertyCell* cell,
1883
+ JSFunction* function,
1884
+ String* name) {
1885
+ // ----------- S t a t e -------------
1886
+ // -- rcx : function name
1887
+ // -- rsp[0] : return address
1888
+ // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1889
+ // -- ...
1890
+ // -- rsp[(argc + 1) * 8] : receiver
1891
+ // -----------------------------------
1892
+
1893
+ const int argc = arguments().immediate();
1894
+
1895
+ // If the object is not a JSObject or we got an unexpected number of
1896
+ // arguments, bail out to the regular call.
1897
+ if (!object->IsJSObject() || argc != 1) return Heap::undefined_value();
1898
+
1899
+ Label miss;
1900
+ GenerateNameCheck(name, &miss);
1901
+
1902
+ if (cell == NULL) {
1903
+ __ movq(rdx, Operand(rsp, 2 * kPointerSize));
1904
+
1905
+ __ JumpIfSmi(rdx, &miss);
1906
+
1907
+ CheckPrototypes(JSObject::cast(object), rdx, holder, rbx, rax, rdi, name,
1908
+ &miss);
1909
+ } else {
1910
+ ASSERT(cell->value() == function);
1911
+ GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
1912
+ GenerateLoadFunctionFromCell(cell, function, &miss);
1913
+ }
1914
+
1915
+ // Load the (only) argument into rax.
1916
+ __ movq(rax, Operand(rsp, 1 * kPointerSize));
1917
+
1918
+ // Check if the argument is a smi.
1919
+ Label not_smi;
1920
+ STATIC_ASSERT(kSmiTag == 0);
1921
+ __ JumpIfNotSmi(rax, &not_smi);
1922
+ __ SmiToInteger32(rax, rax);
1923
+
1924
+ // Set ebx to 1...1 (== -1) if the argument is negative, or to 0...0
1925
+ // otherwise.
1926
+ __ movl(rbx, rax);
1927
+ __ sarl(rbx, Immediate(kBitsPerInt - 1));
1928
+
1929
+ // Do bitwise not or do nothing depending on ebx.
1930
+ __ xorl(rax, rbx);
1931
+
1932
+ // Add 1 or do nothing depending on ebx.
1933
+ __ subl(rax, rbx);
1934
+
1935
+ // If the result is still negative, go to the slow case.
1936
+ // This only happens for the most negative smi.
1937
+ Label slow;
1938
+ __ j(negative, &slow);
1939
+
1940
+ // Smi case done.
1941
+ __ Integer32ToSmi(rax, rax);
1942
+ __ ret(2 * kPointerSize);
1943
+
1944
+ // Check if the argument is a heap number and load its value.
1945
+ __ bind(&not_smi);
1946
+ __ CheckMap(rax, Factory::heap_number_map(), &slow, true);
1947
+ __ movq(rbx, FieldOperand(rax, HeapNumber::kValueOffset));
1948
+
1949
+ // Check the sign of the argument. If the argument is positive,
1950
+ // just return it.
1951
+ Label negative_sign;
1952
+ const int sign_mask_shift =
1953
+ (HeapNumber::kExponentOffset - HeapNumber::kValueOffset) * kBitsPerByte;
1954
+ __ movq(rdi, static_cast<int64_t>(HeapNumber::kSignMask) << sign_mask_shift,
1955
+ RelocInfo::NONE);
1956
+ __ testq(rbx, rdi);
1957
+ __ j(not_zero, &negative_sign);
1958
+ __ ret(2 * kPointerSize);
1959
+
1960
+ // If the argument is negative, clear the sign, and return a new
1961
+ // number. We still have the sign mask in rdi.
1962
+ __ bind(&negative_sign);
1963
+ __ xor_(rbx, rdi);
1964
+ __ AllocateHeapNumber(rax, rdx, &slow);
1965
+ __ movq(FieldOperand(rax, HeapNumber::kValueOffset), rbx);
1966
+ __ ret(2 * kPointerSize);
1967
+
1968
+ // Tail call the full function. We do not have to patch the receiver
1969
+ // because the function makes no use of it.
1970
+ __ bind(&slow);
1971
+ __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
1972
+
1973
+ __ bind(&miss);
1974
+ // rcx: function name.
1975
+ Object* obj;
1976
+ { MaybeObject* maybe_obj = GenerateMissBranch();
1977
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1978
+ }
1979
+
1980
+ // Return the generated code.
1981
+ return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
1982
+ }
1983
+
1984
+
1985
+ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
1986
+ JSObject* holder,
1987
+ JSFunction* function,
1988
+ String* name,
1989
+ CheckType check) {
1990
+ // ----------- S t a t e -------------
1991
+ // rcx : function name
1992
+ // rsp[0] : return address
1993
+ // rsp[8] : argument argc
1994
+ // rsp[16] : argument argc - 1
1995
+ // ...
1996
+ // rsp[argc * 8] : argument 1
1997
+ // rsp[(argc + 1) * 8] : argument 0 = receiver
1998
+ // -----------------------------------
1999
+
2000
+ SharedFunctionInfo* function_info = function->shared();
2001
+ if (function_info->HasBuiltinFunctionId()) {
2002
+ BuiltinFunctionId id = function_info->builtin_function_id();
2003
+ MaybeObject* maybe_result = CompileCustomCall(
2004
+ id, object, holder, NULL, function, name);
2005
+ Object* result;
2006
+ if (!maybe_result->ToObject(&result)) return maybe_result;
2007
+ // undefined means bail out to regular compiler.
2008
+ if (!result->IsUndefined()) return result;
2009
+ }
2010
+
2011
+ Label miss_in_smi_check;
2012
+
2013
+ GenerateNameCheck(name, &miss_in_smi_check);
2014
+
2015
+ // Get the receiver from the stack.
2016
+ const int argc = arguments().immediate();
2017
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
2018
+
2019
+ // Check that the receiver isn't a smi.
2020
+ if (check != NUMBER_CHECK) {
2021
+ __ JumpIfSmi(rdx, &miss_in_smi_check);
2022
+ }
2023
+
2024
+ // Make sure that it's okay not to patch the on stack receiver
2025
+ // unless we're doing a receiver map check.
2026
+ ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2027
+
2028
+ CallOptimization optimization(function);
2029
+ int depth = kInvalidProtoDepth;
2030
+ Label miss;
2031
+
2032
+ switch (check) {
2033
+ case RECEIVER_MAP_CHECK:
2034
+ __ IncrementCounter(&Counters::call_const, 1);
2035
+
2036
+ if (optimization.is_simple_api_call() && !object->IsGlobalObject()) {
2037
+ depth = optimization.GetPrototypeDepthOfExpectedType(
2038
+ JSObject::cast(object), holder);
2039
+ }
2040
+
2041
+ if (depth != kInvalidProtoDepth) {
2042
+ __ IncrementCounter(&Counters::call_const_fast_api, 1);
2043
+
2044
+ // Allocate space for v8::Arguments implicit values. Must be initialized
2045
+ // before to call any runtime function.
2046
+ __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
2047
+ }
2048
+
2049
+ // Check that the maps haven't changed.
2050
+ CheckPrototypes(JSObject::cast(object), rdx, holder,
2051
+ rbx, rax, rdi, name, depth, &miss);
2052
+
2053
+ // Patch the receiver on the stack with the global proxy if
2054
+ // necessary.
2055
+ if (object->IsGlobalObject()) {
2056
+ ASSERT(depth == kInvalidProtoDepth);
2057
+ __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
2058
+ __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
2059
+ }
2060
+ break;
2061
+
2062
+ case STRING_CHECK:
2063
+ if (!function->IsBuiltin() && !function_info->strict_mode()) {
2064
+ // Calling non-strict non-builtins with a value as the receiver
2065
+ // requires boxing.
2066
+ __ jmp(&miss);
2067
+ } else {
2068
+ // Check that the object is a two-byte string or a symbol.
2069
+ __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, rax);
2070
+ __ j(above_equal, &miss);
2071
+ // Check that the maps starting from the prototype haven't changed.
2072
+ GenerateDirectLoadGlobalFunctionPrototype(
2073
+ masm(), Context::STRING_FUNCTION_INDEX, rax, &miss);
2074
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
2075
+ rbx, rdx, rdi, name, &miss);
2076
+ }
2077
+ break;
2078
+
2079
+ case NUMBER_CHECK: {
2080
+ if (!function->IsBuiltin() && !function_info->strict_mode()) {
2081
+ // Calling non-strict non-builtins with a value as the receiver
2082
+ // requires boxing.
2083
+ __ jmp(&miss);
2084
+ } else {
2085
+ Label fast;
2086
+ // Check that the object is a smi or a heap number.
2087
+ __ JumpIfSmi(rdx, &fast);
2088
+ __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rax);
2089
+ __ j(not_equal, &miss);
2090
+ __ bind(&fast);
2091
+ // Check that the maps starting from the prototype haven't changed.
2092
+ GenerateDirectLoadGlobalFunctionPrototype(
2093
+ masm(), Context::NUMBER_FUNCTION_INDEX, rax, &miss);
2094
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
2095
+ rbx, rdx, rdi, name, &miss);
2096
+ }
2097
+ break;
2098
+ }
2099
+
2100
+ case BOOLEAN_CHECK: {
2101
+ if (!function->IsBuiltin() && !function_info->strict_mode()) {
2102
+ // Calling non-strict non-builtins with a value as the receiver
2103
+ // requires boxing.
2104
+ __ jmp(&miss);
2105
+ } else {
2106
+ Label fast;
2107
+ // Check that the object is a boolean.
2108
+ __ CompareRoot(rdx, Heap::kTrueValueRootIndex);
2109
+ __ j(equal, &fast);
2110
+ __ CompareRoot(rdx, Heap::kFalseValueRootIndex);
2111
+ __ j(not_equal, &miss);
2112
+ __ bind(&fast);
2113
+ // Check that the maps starting from the prototype haven't changed.
2114
+ GenerateDirectLoadGlobalFunctionPrototype(
2115
+ masm(), Context::BOOLEAN_FUNCTION_INDEX, rax, &miss);
2116
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
2117
+ rbx, rdx, rdi, name, &miss);
2118
+ }
2119
+ break;
2120
+ }
2121
+
2122
+ default:
2123
+ UNREACHABLE();
2124
+ }
2125
+
2126
+ if (depth != kInvalidProtoDepth) {
2127
+ // Move the return address on top of the stack.
2128
+ __ movq(rax, Operand(rsp, 3 * kPointerSize));
2129
+ __ movq(Operand(rsp, 0 * kPointerSize), rax);
2130
+
2131
+ // rsp[2 * kPointerSize] is uninitialized, rsp[3 * kPointerSize] contains
2132
+ // duplicate of return address and will be overwritten.
2133
+ MaybeObject* result = GenerateFastApiCall(masm(), optimization, argc);
2134
+ if (result->IsFailure()) return result;
2135
+ } else {
2136
+ __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2137
+ }
2138
+
2139
+ // Handle call cache miss.
2140
+ __ bind(&miss);
2141
+ if (depth != kInvalidProtoDepth) {
2142
+ __ addq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
2143
+ }
2144
+
2145
+ // Handle call cache miss.
2146
+ __ bind(&miss_in_smi_check);
2147
+ Object* obj;
2148
+ { MaybeObject* maybe_obj = GenerateMissBranch();
2149
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2150
+ }
2151
+
2152
+ // Return the generated code.
2153
+ return GetCode(function);
2154
+ }
2155
+
2156
+
2157
+ MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object,
2158
+ JSObject* holder,
2159
+ String* name) {
2160
+ // ----------- S t a t e -------------
2161
+ // rcx : function name
2162
+ // rsp[0] : return address
2163
+ // rsp[8] : argument argc
2164
+ // rsp[16] : argument argc - 1
2165
+ // ...
2166
+ // rsp[argc * 8] : argument 1
2167
+ // rsp[(argc + 1) * 8] : argument 0 = receiver
2168
+ // -----------------------------------
2169
+ Label miss;
2170
+
2171
+ GenerateNameCheck(name, &miss);
2172
+
2173
+ // Get the number of arguments.
2174
+ const int argc = arguments().immediate();
2175
+
2176
+ LookupResult lookup;
2177
+ LookupPostInterceptor(holder, name, &lookup);
2178
+
2179
+ // Get the receiver from the stack.
2180
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
2181
+
2182
+ CallInterceptorCompiler compiler(this, arguments(), rcx);
2183
+ MaybeObject* result = compiler.Compile(masm(),
2184
+ object,
2185
+ holder,
2186
+ name,
2187
+ &lookup,
2188
+ rdx,
2189
+ rbx,
2190
+ rdi,
2191
+ rax,
2192
+ &miss);
2193
+ if (result->IsFailure()) return result;
2194
+
2195
+ // Restore receiver.
2196
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
2197
+
2198
+ // Check that the function really is a function.
2199
+ __ JumpIfSmi(rax, &miss);
2200
+ __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
2201
+ __ j(not_equal, &miss);
2202
+
2203
+ // Patch the receiver on the stack with the global proxy if
2204
+ // necessary.
2205
+ if (object->IsGlobalObject()) {
2206
+ __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
2207
+ __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
2208
+ }
2209
+
2210
+ // Invoke the function.
2211
+ __ movq(rdi, rax);
2212
+ __ InvokeFunction(rdi, arguments(), JUMP_FUNCTION);
2213
+
2214
+ // Handle load cache miss.
2215
+ __ bind(&miss);
2216
+ Object* obj;
2217
+ { MaybeObject* maybe_obj = GenerateMissBranch();
2218
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2219
+ }
2220
+
2221
+ // Return the generated code.
2222
+ return GetCode(INTERCEPTOR, name);
2223
+ }
2224
+
2225
+
2226
+ MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object,
2227
+ GlobalObject* holder,
2228
+ JSGlobalPropertyCell* cell,
2229
+ JSFunction* function,
2230
+ String* name) {
2231
+ // ----------- S t a t e -------------
2232
+ // rcx : function name
2233
+ // rsp[0] : return address
2234
+ // rsp[8] : argument argc
2235
+ // rsp[16] : argument argc - 1
2236
+ // ...
2237
+ // rsp[argc * 8] : argument 1
2238
+ // rsp[(argc + 1) * 8] : argument 0 = receiver
2239
+ // -----------------------------------
2240
+
2241
+ SharedFunctionInfo* function_info = function->shared();
2242
+ if (function_info->HasBuiltinFunctionId()) {
2243
+ BuiltinFunctionId id = function_info->builtin_function_id();
2244
+ MaybeObject* maybe_result = CompileCustomCall(
2245
+ id, object, holder, cell, function, name);
2246
+ Object* result;
2247
+ if (!maybe_result->ToObject(&result)) return maybe_result;
2248
+ // undefined means bail out to regular compiler.
2249
+ if (!result->IsUndefined()) return result;
2250
+ }
2251
+
2252
+ Label miss;
2253
+
2254
+ GenerateNameCheck(name, &miss);
2255
+
2256
+ // Get the number of arguments.
2257
+ const int argc = arguments().immediate();
2258
+
2259
+ GenerateGlobalReceiverCheck(object, holder, name, &miss);
2260
+
2261
+ GenerateLoadFunctionFromCell(cell, function, &miss);
2262
+
2263
+ // Patch the receiver on the stack with the global proxy.
2264
+ if (object->IsGlobalObject()) {
2265
+ __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
2266
+ __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
2267
+ }
2268
+
2269
+ // Setup the context (function already in rdi).
2270
+ __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2271
+
2272
+ // Jump to the cached code (tail call).
2273
+ __ IncrementCounter(&Counters::call_global_inline, 1);
2274
+ ASSERT(function->is_compiled());
2275
+ ParameterCount expected(function->shared()->formal_parameter_count());
2276
+ if (V8::UseCrankshaft()) {
2277
+ // TODO(kasperl): For now, we always call indirectly through the
2278
+ // code field in the function to allow recompilation to take effect
2279
+ // without changing any of the call sites.
2280
+ __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2281
+ __ InvokeCode(rdx, expected, arguments(), JUMP_FUNCTION);
2282
+ } else {
2283
+ Handle<Code> code(function->code());
2284
+ __ InvokeCode(code, expected, arguments(),
2285
+ RelocInfo::CODE_TARGET, JUMP_FUNCTION);
2286
+ }
2287
+ // Handle call cache miss.
2288
+ __ bind(&miss);
2289
+ __ IncrementCounter(&Counters::call_global_inline_miss, 1);
2290
+ Object* obj;
2291
+ { MaybeObject* maybe_obj = GenerateMissBranch();
2292
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2293
+ }
2294
+
2295
+ // Return the generated code.
2296
+ return GetCode(NORMAL, name);
2297
+ }
2298
+
2299
+
2300
+ MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object,
2301
+ int index,
2302
+ Map* transition,
2303
+ String* name) {
2304
+ // ----------- S t a t e -------------
2305
+ // -- rax : value
2306
+ // -- rcx : name
2307
+ // -- rdx : receiver
2308
+ // -- rsp[0] : return address
2309
+ // -----------------------------------
2310
+ Label miss;
2311
+
2312
+ // Generate store field code. Preserves receiver and name on jump to miss.
2313
+ GenerateStoreField(masm(),
2314
+ object,
2315
+ index,
2316
+ transition,
2317
+ rdx, rcx, rbx,
2318
+ &miss);
2319
+
2320
+ // Handle store cache miss.
2321
+ __ bind(&miss);
2322
+ Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2323
+ __ Jump(ic, RelocInfo::CODE_TARGET);
2324
+
2325
+ // Return the generated code.
2326
+ return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2327
+ }
2328
+
2329
+
2330
+ MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
2331
+ AccessorInfo* callback,
2332
+ String* name) {
2333
+ // ----------- S t a t e -------------
2334
+ // -- rax : value
2335
+ // -- rcx : name
2336
+ // -- rdx : receiver
2337
+ // -- rsp[0] : return address
2338
+ // -----------------------------------
2339
+ Label miss;
2340
+
2341
+ // Check that the object isn't a smi.
2342
+ __ JumpIfSmi(rdx, &miss);
2343
+
2344
+ // Check that the map of the object hasn't changed.
2345
+ __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2346
+ Handle<Map>(object->map()));
2347
+ __ j(not_equal, &miss);
2348
+
2349
+ // Perform global security token check if needed.
2350
+ if (object->IsJSGlobalProxy()) {
2351
+ __ CheckAccessGlobalProxy(rdx, rbx, &miss);
2352
+ }
2353
+
2354
+ // Stub never generated for non-global objects that require access
2355
+ // checks.
2356
+ ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2357
+
2358
+ __ pop(rbx); // remove the return address
2359
+ __ push(rdx); // receiver
2360
+ __ Push(Handle<AccessorInfo>(callback)); // callback info
2361
+ __ push(rcx); // name
2362
+ __ push(rax); // value
2363
+ __ push(rbx); // restore return address
2364
+
2365
+ // Do tail-call to the runtime system.
2366
+ ExternalReference store_callback_property =
2367
+ ExternalReference(IC_Utility(IC::kStoreCallbackProperty));
2368
+ __ TailCallExternalReference(store_callback_property, 4, 1);
2369
+
2370
+ // Handle store cache miss.
2371
+ __ bind(&miss);
2372
+ Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2373
+ __ Jump(ic, RelocInfo::CODE_TARGET);
2374
+
2375
+ // Return the generated code.
2376
+ return GetCode(CALLBACKS, name);
2377
+ }
2378
+
2379
+
2380
+ MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
2381
+ String* name) {
2382
+ // ----------- S t a t e -------------
2383
+ // -- rax : value
2384
+ // -- rcx : name
2385
+ // -- rdx : receiver
2386
+ // -- rsp[0] : return address
2387
+ // -----------------------------------
2388
+ Label miss;
2389
+
2390
+ // Check that the object isn't a smi.
2391
+ __ JumpIfSmi(rdx, &miss);
2392
+
2393
+ // Check that the map of the object hasn't changed.
2394
+ __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2395
+ Handle<Map>(receiver->map()));
2396
+ __ j(not_equal, &miss);
2397
+
2398
+ // Perform global security token check if needed.
2399
+ if (receiver->IsJSGlobalProxy()) {
2400
+ __ CheckAccessGlobalProxy(rdx, rbx, &miss);
2401
+ }
2402
+
2403
+ // Stub never generated for non-global objects that require access
2404
+ // checks.
2405
+ ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2406
+
2407
+ __ pop(rbx); // remove the return address
2408
+ __ push(rdx); // receiver
2409
+ __ push(rcx); // name
2410
+ __ push(rax); // value
2411
+ __ Push(Smi::FromInt(strict_mode_));
2412
+ __ push(rbx); // restore return address
2413
+
2414
+ // Do tail-call to the runtime system.
2415
+ ExternalReference store_ic_property =
2416
+ ExternalReference(IC_Utility(IC::kStoreInterceptorProperty));
2417
+ __ TailCallExternalReference(store_ic_property, 4, 1);
2418
+
2419
+ // Handle store cache miss.
2420
+ __ bind(&miss);
2421
+ Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2422
+ __ Jump(ic, RelocInfo::CODE_TARGET);
2423
+
2424
+ // Return the generated code.
2425
+ return GetCode(INTERCEPTOR, name);
2426
+ }
2427
+
2428
+
2429
+ MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
2430
+ JSGlobalPropertyCell* cell,
2431
+ String* name) {
2432
+ // ----------- S t a t e -------------
2433
+ // -- rax : value
2434
+ // -- rcx : name
2435
+ // -- rdx : receiver
2436
+ // -- rsp[0] : return address
2437
+ // -----------------------------------
2438
+ Label miss;
2439
+
2440
+ // Check that the map of the global has not changed.
2441
+ __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2442
+ Handle<Map>(object->map()));
2443
+ __ j(not_equal, &miss);
2444
+
2445
+ // Check that the value in the cell is not the hole. If it is, this
2446
+ // cell could have been deleted and reintroducing the global needs
2447
+ // to update the property details in the property dictionary of the
2448
+ // global object. We bail out to the runtime system to do that.
2449
+ __ Move(rbx, Handle<JSGlobalPropertyCell>(cell));
2450
+ __ CompareRoot(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset),
2451
+ Heap::kTheHoleValueRootIndex);
2452
+ __ j(equal, &miss);
2453
+
2454
+ // Store the value in the cell.
2455
+ __ movq(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset), rax);
2456
+
2457
+ // Return the value (register rax).
2458
+ __ IncrementCounter(&Counters::named_store_global_inline, 1);
2459
+ __ ret(0);
2460
+
2461
+ // Handle store cache miss.
2462
+ __ bind(&miss);
2463
+ __ IncrementCounter(&Counters::named_store_global_inline_miss, 1);
2464
+ Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2465
+ __ Jump(ic, RelocInfo::CODE_TARGET);
2466
+
2467
+ // Return the generated code.
2468
+ return GetCode(NORMAL, name);
2469
+ }
2470
+
2471
+
2472
+ MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
2473
+ int index,
2474
+ Map* transition,
2475
+ String* name) {
2476
+ // ----------- S t a t e -------------
2477
+ // -- rax : value
2478
+ // -- rcx : key
2479
+ // -- rdx : receiver
2480
+ // -- rsp[0] : return address
2481
+ // -----------------------------------
2482
+ Label miss;
2483
+
2484
+ __ IncrementCounter(&Counters::keyed_store_field, 1);
2485
+
2486
+ // Check that the name has not changed.
2487
+ __ Cmp(rcx, Handle<String>(name));
2488
+ __ j(not_equal, &miss);
2489
+
2490
+ // Generate store field code. Preserves receiver and name on jump to miss.
2491
+ GenerateStoreField(masm(),
2492
+ object,
2493
+ index,
2494
+ transition,
2495
+ rdx, rcx, rbx,
2496
+ &miss);
2497
+
2498
+ // Handle store cache miss.
2499
+ __ bind(&miss);
2500
+ __ DecrementCounter(&Counters::keyed_store_field, 1);
2501
+ Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
2502
+ __ Jump(ic, RelocInfo::CODE_TARGET);
2503
+
2504
+ // Return the generated code.
2505
+ return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2506
+ }
2507
+
2508
+
2509
+ MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
2510
+ JSObject* receiver) {
2511
+ // ----------- S t a t e -------------
2512
+ // -- rax : value
2513
+ // -- rcx : key
2514
+ // -- rdx : receiver
2515
+ // -- rsp[0] : return address
2516
+ // -----------------------------------
2517
+ Label miss;
2518
+
2519
+ // Check that the receiver isn't a smi.
2520
+ __ JumpIfSmi(rdx, &miss);
2521
+
2522
+ // Check that the map matches.
2523
+ __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2524
+ Handle<Map>(receiver->map()));
2525
+ __ j(not_equal, &miss);
2526
+
2527
+ // Check that the key is a smi.
2528
+ __ JumpIfNotSmi(rcx, &miss);
2529
+
2530
+ // Get the elements array and make sure it is a fast element array, not 'cow'.
2531
+ __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset));
2532
+ __ Cmp(FieldOperand(rdi, HeapObject::kMapOffset),
2533
+ Factory::fixed_array_map());
2534
+ __ j(not_equal, &miss);
2535
+
2536
+ // Check that the key is within bounds.
2537
+ if (receiver->IsJSArray()) {
2538
+ __ SmiCompare(rcx, FieldOperand(rdx, JSArray::kLengthOffset));
2539
+ __ j(above_equal, &miss);
2540
+ } else {
2541
+ __ SmiCompare(rcx, FieldOperand(rdi, FixedArray::kLengthOffset));
2542
+ __ j(above_equal, &miss);
2543
+ }
2544
+
2545
+ // Do the store and update the write barrier. Make sure to preserve
2546
+ // the value in register eax.
2547
+ __ movq(rdx, rax);
2548
+ __ SmiToInteger32(rcx, rcx);
2549
+ __ movq(FieldOperand(rdi, rcx, times_pointer_size, FixedArray::kHeaderSize),
2550
+ rax);
2551
+ __ RecordWrite(rdi, 0, rdx, rcx);
2552
+
2553
+ // Done.
2554
+ __ ret(0);
2555
+
2556
+ // Handle store cache miss.
2557
+ __ bind(&miss);
2558
+ Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
2559
+ __ jmp(ic, RelocInfo::CODE_TARGET);
2560
+
2561
+ // Return the generated code.
2562
+ return GetCode(NORMAL, NULL);
2563
+ }
2564
+
2565
+
2566
+ MaybeObject* KeyedStoreStubCompiler::CompileStorePixelArray(
2567
+ JSObject* receiver) {
2568
+ // ----------- S t a t e -------------
2569
+ // -- rax : value
2570
+ // -- rcx : key
2571
+ // -- rdx : receiver
2572
+ // -- rsp[0] : return address
2573
+ // -----------------------------------
2574
+ Label miss;
2575
+
2576
+ // Check that the map matches.
2577
+ __ CheckMap(rdx, Handle<Map>(receiver->map()), &miss, false);
2578
+
2579
+ // Do the load.
2580
+ GenerateFastPixelArrayStore(masm(),
2581
+ rdx,
2582
+ rcx,
2583
+ rax,
2584
+ rdi,
2585
+ rbx,
2586
+ true,
2587
+ false,
2588
+ &miss,
2589
+ &miss,
2590
+ NULL,
2591
+ &miss);
2592
+
2593
+ // Handle store cache miss.
2594
+ __ bind(&miss);
2595
+ Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
2596
+ __ jmp(ic, RelocInfo::CODE_TARGET);
2597
+
2598
+ // Return the generated code.
2599
+ return GetCode(NORMAL, NULL);
2600
+ }
2601
+
2602
+
2603
+ MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
2604
+ JSObject* object,
2605
+ JSObject* last) {
2606
+ // ----------- S t a t e -------------
2607
+ // -- rax : receiver
2608
+ // -- rcx : name
2609
+ // -- rsp[0] : return address
2610
+ // -----------------------------------
2611
+ Label miss;
2612
+
2613
+ // Chech that receiver is not a smi.
2614
+ __ JumpIfSmi(rax, &miss);
2615
+
2616
+ // Check the maps of the full prototype chain. Also check that
2617
+ // global property cells up to (but not including) the last object
2618
+ // in the prototype chain are empty.
2619
+ CheckPrototypes(object, rax, last, rbx, rdx, rdi, name, &miss);
2620
+
2621
+ // If the last object in the prototype chain is a global object,
2622
+ // check that the global property cell is empty.
2623
+ if (last->IsGlobalObject()) {
2624
+ MaybeObject* cell = GenerateCheckPropertyCell(masm(),
2625
+ GlobalObject::cast(last),
2626
+ name,
2627
+ rdx,
2628
+ &miss);
2629
+ if (cell->IsFailure()) {
2630
+ miss.Unuse();
2631
+ return cell;
2632
+ }
2633
+ }
2634
+
2635
+ // Return undefined if maps of the full prototype chain are still the
2636
+ // same and no global property with this name contains a value.
2637
+ __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
2638
+ __ ret(0);
2639
+
2640
+ __ bind(&miss);
2641
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
2642
+
2643
+ // Return the generated code.
2644
+ return GetCode(NONEXISTENT, Heap::empty_string());
2645
+ }
2646
+
2647
+
2648
+ MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object,
2649
+ JSObject* holder,
2650
+ int index,
2651
+ String* name) {
2652
+ // ----------- S t a t e -------------
2653
+ // -- rax : receiver
2654
+ // -- rcx : name
2655
+ // -- rsp[0] : return address
2656
+ // -----------------------------------
2657
+ Label miss;
2658
+
2659
+ GenerateLoadField(object, holder, rax, rbx, rdx, rdi, index, name, &miss);
2660
+ __ bind(&miss);
2661
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
2662
+
2663
+ // Return the generated code.
2664
+ return GetCode(FIELD, name);
2665
+ }
2666
+
2667
+
2668
+ MaybeObject* LoadStubCompiler::CompileLoadCallback(String* name,
2669
+ JSObject* object,
2670
+ JSObject* holder,
2671
+ AccessorInfo* callback) {
2672
+ // ----------- S t a t e -------------
2673
+ // -- rax : receiver
2674
+ // -- rcx : name
2675
+ // -- rsp[0] : return address
2676
+ // -----------------------------------
2677
+ Label miss;
2678
+
2679
+ MaybeObject* result = GenerateLoadCallback(object, holder, rax, rcx, rdx, rbx,
2680
+ rdi, callback, name, &miss);
2681
+ if (result->IsFailure()) {
2682
+ miss.Unuse();
2683
+ return result;
2684
+ }
2685
+
2686
+ __ bind(&miss);
2687
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
2688
+
2689
+ // Return the generated code.
2690
+ return GetCode(CALLBACKS, name);
2691
+ }
2692
+
2693
+
2694
+ MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object,
2695
+ JSObject* holder,
2696
+ Object* value,
2697
+ String* name) {
2698
+ // ----------- S t a t e -------------
2699
+ // -- rax : receiver
2700
+ // -- rcx : name
2701
+ // -- rsp[0] : return address
2702
+ // -----------------------------------
2703
+ Label miss;
2704
+
2705
+ GenerateLoadConstant(object, holder, rax, rbx, rdx, rdi, value, name, &miss);
2706
+ __ bind(&miss);
2707
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
2708
+
2709
+ // Return the generated code.
2710
+ return GetCode(CONSTANT_FUNCTION, name);
2711
+ }
2712
+
2713
+
2714
+ MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
2715
+ JSObject* holder,
2716
+ String* name) {
2717
+ // ----------- S t a t e -------------
2718
+ // -- rax : receiver
2719
+ // -- rcx : name
2720
+ // -- rsp[0] : return address
2721
+ // -----------------------------------
2722
+ Label miss;
2723
+
2724
+ LookupResult lookup;
2725
+ LookupPostInterceptor(holder, name, &lookup);
2726
+
2727
+ // TODO(368): Compile in the whole chain: all the interceptors in
2728
+ // prototypes and ultimate answer.
2729
+ GenerateLoadInterceptor(receiver,
2730
+ holder,
2731
+ &lookup,
2732
+ rax,
2733
+ rcx,
2734
+ rdx,
2735
+ rbx,
2736
+ rdi,
2737
+ name,
2738
+ &miss);
2739
+
2740
+ __ bind(&miss);
2741
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
2742
+
2743
+ // Return the generated code.
2744
+ return GetCode(INTERCEPTOR, name);
2745
+ }
2746
+
2747
+
2748
+ MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
2749
+ GlobalObject* holder,
2750
+ JSGlobalPropertyCell* cell,
2751
+ String* name,
2752
+ bool is_dont_delete) {
2753
+ // ----------- S t a t e -------------
2754
+ // -- rax : receiver
2755
+ // -- rcx : name
2756
+ // -- rsp[0] : return address
2757
+ // -----------------------------------
2758
+ Label miss;
2759
+
2760
+ // If the object is the holder then we know that it's a global
2761
+ // object which can only happen for contextual loads. In this case,
2762
+ // the receiver cannot be a smi.
2763
+ if (object != holder) {
2764
+ __ JumpIfSmi(rax, &miss);
2765
+ }
2766
+
2767
+ // Check that the maps haven't changed.
2768
+ CheckPrototypes(object, rax, holder, rbx, rdx, rdi, name, &miss);
2769
+
2770
+ // Get the value from the cell.
2771
+ __ Move(rbx, Handle<JSGlobalPropertyCell>(cell));
2772
+ __ movq(rbx, FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset));
2773
+
2774
+ // Check for deleted property if property can actually be deleted.
2775
+ if (!is_dont_delete) {
2776
+ __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
2777
+ __ j(equal, &miss);
2778
+ } else if (FLAG_debug_code) {
2779
+ __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
2780
+ __ Check(not_equal, "DontDelete cells can't contain the hole");
2781
+ }
2782
+
2783
+ __ IncrementCounter(&Counters::named_load_global_stub, 1);
2784
+ __ movq(rax, rbx);
2785
+ __ ret(0);
2786
+
2787
+ __ bind(&miss);
2788
+ __ IncrementCounter(&Counters::named_load_global_stub_miss, 1);
2789
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
2790
+
2791
+ // Return the generated code.
2792
+ return GetCode(NORMAL, name);
2793
+ }
2794
+
2795
+
2796
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
2797
+ JSObject* receiver,
2798
+ JSObject* holder,
2799
+ int index) {
2800
+ // ----------- S t a t e -------------
2801
+ // -- rax : key
2802
+ // -- rdx : receiver
2803
+ // -- rsp[0] : return address
2804
+ // -----------------------------------
2805
+ Label miss;
2806
+
2807
+ __ IncrementCounter(&Counters::keyed_load_field, 1);
2808
+
2809
+ // Check that the name has not changed.
2810
+ __ Cmp(rax, Handle<String>(name));
2811
+ __ j(not_equal, &miss);
2812
+
2813
+ GenerateLoadField(receiver, holder, rdx, rbx, rcx, rdi, index, name, &miss);
2814
+
2815
+ __ bind(&miss);
2816
+ __ DecrementCounter(&Counters::keyed_load_field, 1);
2817
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2818
+
2819
+ // Return the generated code.
2820
+ return GetCode(FIELD, name);
2821
+ }
2822
+
2823
+
2824
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback(
2825
+ String* name,
2826
+ JSObject* receiver,
2827
+ JSObject* holder,
2828
+ AccessorInfo* callback) {
2829
+ // ----------- S t a t e -------------
2830
+ // -- rax : key
2831
+ // -- rdx : receiver
2832
+ // -- rsp[0] : return address
2833
+ // -----------------------------------
2834
+ Label miss;
2835
+
2836
+ __ IncrementCounter(&Counters::keyed_load_callback, 1);
2837
+
2838
+ // Check that the name has not changed.
2839
+ __ Cmp(rax, Handle<String>(name));
2840
+ __ j(not_equal, &miss);
2841
+
2842
+ MaybeObject* result = GenerateLoadCallback(receiver, holder, rdx, rax, rbx,
2843
+ rcx, rdi, callback, name, &miss);
2844
+ if (result->IsFailure()) {
2845
+ miss.Unuse();
2846
+ return result;
2847
+ }
2848
+
2849
+ __ bind(&miss);
2850
+
2851
+ __ DecrementCounter(&Counters::keyed_load_callback, 1);
2852
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2853
+
2854
+ // Return the generated code.
2855
+ return GetCode(CALLBACKS, name);
2856
+ }
2857
+
2858
+
2859
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
2860
+ JSObject* receiver,
2861
+ JSObject* holder,
2862
+ Object* value) {
2863
+ // ----------- S t a t e -------------
2864
+ // -- rax : key
2865
+ // -- rdx : receiver
2866
+ // -- rsp[0] : return address
2867
+ // -----------------------------------
2868
+ Label miss;
2869
+
2870
+ __ IncrementCounter(&Counters::keyed_load_constant_function, 1);
2871
+
2872
+ // Check that the name has not changed.
2873
+ __ Cmp(rax, Handle<String>(name));
2874
+ __ j(not_equal, &miss);
2875
+
2876
+ GenerateLoadConstant(receiver, holder, rdx, rbx, rcx, rdi,
2877
+ value, name, &miss);
2878
+ __ bind(&miss);
2879
+ __ DecrementCounter(&Counters::keyed_load_constant_function, 1);
2880
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2881
+
2882
+ // Return the generated code.
2883
+ return GetCode(CONSTANT_FUNCTION, name);
2884
+ }
2885
+
2886
+
2887
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
2888
+ JSObject* holder,
2889
+ String* name) {
2890
+ // ----------- S t a t e -------------
2891
+ // -- rax : key
2892
+ // -- rdx : receiver
2893
+ // -- rsp[0] : return address
2894
+ // -----------------------------------
2895
+ Label miss;
2896
+
2897
+ __ IncrementCounter(&Counters::keyed_load_interceptor, 1);
2898
+
2899
+ // Check that the name has not changed.
2900
+ __ Cmp(rax, Handle<String>(name));
2901
+ __ j(not_equal, &miss);
2902
+
2903
+ LookupResult lookup;
2904
+ LookupPostInterceptor(holder, name, &lookup);
2905
+ GenerateLoadInterceptor(receiver,
2906
+ holder,
2907
+ &lookup,
2908
+ rdx,
2909
+ rax,
2910
+ rcx,
2911
+ rbx,
2912
+ rdi,
2913
+ name,
2914
+ &miss);
2915
+ __ bind(&miss);
2916
+ __ DecrementCounter(&Counters::keyed_load_interceptor, 1);
2917
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2918
+
2919
+ // Return the generated code.
2920
+ return GetCode(INTERCEPTOR, name);
2921
+ }
2922
+
2923
+
2924
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
2925
+ // ----------- S t a t e -------------
2926
+ // -- rax : key
2927
+ // -- rdx : receiver
2928
+ // -- rsp[0] : return address
2929
+ // -----------------------------------
2930
+ Label miss;
2931
+
2932
+ __ IncrementCounter(&Counters::keyed_load_array_length, 1);
2933
+
2934
+ // Check that the name has not changed.
2935
+ __ Cmp(rax, Handle<String>(name));
2936
+ __ j(not_equal, &miss);
2937
+
2938
+ GenerateLoadArrayLength(masm(), rdx, rcx, &miss);
2939
+ __ bind(&miss);
2940
+ __ DecrementCounter(&Counters::keyed_load_array_length, 1);
2941
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2942
+
2943
+ // Return the generated code.
2944
+ return GetCode(CALLBACKS, name);
2945
+ }
2946
+
2947
+
2948
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
2949
+ // ----------- S t a t e -------------
2950
+ // -- rax : key
2951
+ // -- rdx : receiver
2952
+ // -- rsp[0] : return address
2953
+ // -----------------------------------
2954
+ Label miss;
2955
+
2956
+ __ IncrementCounter(&Counters::keyed_load_string_length, 1);
2957
+
2958
+ // Check that the name has not changed.
2959
+ __ Cmp(rax, Handle<String>(name));
2960
+ __ j(not_equal, &miss);
2961
+
2962
+ GenerateLoadStringLength(masm(), rdx, rcx, rbx, &miss, true);
2963
+ __ bind(&miss);
2964
+ __ DecrementCounter(&Counters::keyed_load_string_length, 1);
2965
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2966
+
2967
+ // Return the generated code.
2968
+ return GetCode(CALLBACKS, name);
2969
+ }
2970
+
2971
+
2972
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
2973
+ // ----------- S t a t e -------------
2974
+ // -- rax : key
2975
+ // -- rdx : receiver
2976
+ // -- rsp[0] : return address
2977
+ // -----------------------------------
2978
+ Label miss;
2979
+
2980
+ __ IncrementCounter(&Counters::keyed_load_function_prototype, 1);
2981
+
2982
+ // Check that the name has not changed.
2983
+ __ Cmp(rax, Handle<String>(name));
2984
+ __ j(not_equal, &miss);
2985
+
2986
+ GenerateLoadFunctionPrototype(masm(), rdx, rcx, rbx, &miss);
2987
+ __ bind(&miss);
2988
+ __ DecrementCounter(&Counters::keyed_load_function_prototype, 1);
2989
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2990
+
2991
+ // Return the generated code.
2992
+ return GetCode(CALLBACKS, name);
2993
+ }
2994
+
2995
+
2996
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) {
2997
+ // ----------- S t a t e -------------
2998
+ // -- rax : key
2999
+ // -- rdx : receiver
3000
+ // -- esp[0] : return address
3001
+ // -----------------------------------
3002
+ Label miss;
3003
+
3004
+ // Check that the receiver isn't a smi.
3005
+ __ JumpIfSmi(rdx, &miss);
3006
+
3007
+ // Check that the map matches.
3008
+ __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
3009
+ Handle<Map>(receiver->map()));
3010
+ __ j(not_equal, &miss);
3011
+
3012
+ // Check that the key is a smi.
3013
+ __ JumpIfNotSmi(rax, &miss);
3014
+
3015
+ // Get the elements array.
3016
+ __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
3017
+ __ AssertFastElements(rcx);
3018
+
3019
+ // Check that the key is within bounds.
3020
+ __ SmiCompare(rax, FieldOperand(rcx, FixedArray::kLengthOffset));
3021
+ __ j(above_equal, &miss);
3022
+
3023
+ // Load the result and make sure it's not the hole.
3024
+ SmiIndex index = masm()->SmiToIndex(rbx, rax, kPointerSizeLog2);
3025
+ __ movq(rbx, FieldOperand(rcx,
3026
+ index.reg,
3027
+ index.scale,
3028
+ FixedArray::kHeaderSize));
3029
+ __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
3030
+ __ j(equal, &miss);
3031
+ __ movq(rax, rbx);
3032
+ __ ret(0);
3033
+
3034
+ __ bind(&miss);
3035
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3036
+
3037
+ // Return the generated code.
3038
+ return GetCode(NORMAL, NULL);
3039
+ }
3040
+
3041
+
3042
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadPixelArray(JSObject* receiver) {
3043
+ // ----------- S t a t e -------------
3044
+ // -- rax : key
3045
+ // -- rdx : receiver
3046
+ // -- esp[0] : return address
3047
+ // -----------------------------------
3048
+ Label miss;
3049
+
3050
+ // Check that the map matches.
3051
+ __ CheckMap(rdx, Handle<Map>(receiver->map()), &miss, false);
3052
+
3053
+ GenerateFastPixelArrayLoad(masm(),
3054
+ rdx,
3055
+ rax,
3056
+ rbx,
3057
+ rcx,
3058
+ rax,
3059
+ &miss,
3060
+ &miss,
3061
+ &miss);
3062
+
3063
+ __ bind(&miss);
3064
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3065
+
3066
+ // Return the generated code.
3067
+ return GetCode(NORMAL, NULL);
3068
+ }
3069
+
3070
+
3071
+ // Specialized stub for constructing objects from functions which only have only
3072
+ // simple assignments of the form this.x = ...; in their body.
3073
+ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
3074
+ // ----------- S t a t e -------------
3075
+ // -- rax : argc
3076
+ // -- rdi : constructor
3077
+ // -- rsp[0] : return address
3078
+ // -- rsp[4] : last argument
3079
+ // -----------------------------------
3080
+ Label generic_stub_call;
3081
+
3082
+ // Use r8 for holding undefined which is used in several places below.
3083
+ __ Move(r8, Factory::undefined_value());
3084
+
3085
+ #ifdef ENABLE_DEBUGGER_SUPPORT
3086
+ // Check to see whether there are any break points in the function code. If
3087
+ // there are jump to the generic constructor stub which calls the actual
3088
+ // code for the function thereby hitting the break points.
3089
+ __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
3090
+ __ movq(rbx, FieldOperand(rbx, SharedFunctionInfo::kDebugInfoOffset));
3091
+ __ cmpq(rbx, r8);
3092
+ __ j(not_equal, &generic_stub_call);
3093
+ #endif
3094
+
3095
+ // Load the initial map and verify that it is in fact a map.
3096
+ __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
3097
+ // Will both indicate a NULL and a Smi.
3098
+ ASSERT(kSmiTag == 0);
3099
+ __ JumpIfSmi(rbx, &generic_stub_call);
3100
+ __ CmpObjectType(rbx, MAP_TYPE, rcx);
3101
+ __ j(not_equal, &generic_stub_call);
3102
+
3103
+ #ifdef DEBUG
3104
+ // Cannot construct functions this way.
3105
+ // rdi: constructor
3106
+ // rbx: initial map
3107
+ __ CmpInstanceType(rbx, JS_FUNCTION_TYPE);
3108
+ __ Assert(not_equal, "Function constructed by construct stub.");
3109
+ #endif
3110
+
3111
+ // Now allocate the JSObject in new space.
3112
+ // rdi: constructor
3113
+ // rbx: initial map
3114
+ __ movzxbq(rcx, FieldOperand(rbx, Map::kInstanceSizeOffset));
3115
+ __ shl(rcx, Immediate(kPointerSizeLog2));
3116
+ __ AllocateInNewSpace(rcx,
3117
+ rdx,
3118
+ rcx,
3119
+ no_reg,
3120
+ &generic_stub_call,
3121
+ NO_ALLOCATION_FLAGS);
3122
+
3123
+ // Allocated the JSObject, now initialize the fields and add the heap tag.
3124
+ // rbx: initial map
3125
+ // rdx: JSObject (untagged)
3126
+ __ movq(Operand(rdx, JSObject::kMapOffset), rbx);
3127
+ __ Move(rbx, Factory::empty_fixed_array());
3128
+ __ movq(Operand(rdx, JSObject::kPropertiesOffset), rbx);
3129
+ __ movq(Operand(rdx, JSObject::kElementsOffset), rbx);
3130
+
3131
+ // rax: argc
3132
+ // rdx: JSObject (untagged)
3133
+ // Load the address of the first in-object property into r9.
3134
+ __ lea(r9, Operand(rdx, JSObject::kHeaderSize));
3135
+ // Calculate the location of the first argument. The stack contains only the
3136
+ // return address on top of the argc arguments.
3137
+ __ lea(rcx, Operand(rsp, rax, times_pointer_size, 0));
3138
+
3139
+ // rax: argc
3140
+ // rcx: first argument
3141
+ // rdx: JSObject (untagged)
3142
+ // r8: undefined
3143
+ // r9: first in-object property of the JSObject
3144
+ // Fill the initialized properties with a constant value or a passed argument
3145
+ // depending on the this.x = ...; assignment in the function.
3146
+ SharedFunctionInfo* shared = function->shared();
3147
+ for (int i = 0; i < shared->this_property_assignments_count(); i++) {
3148
+ if (shared->IsThisPropertyAssignmentArgument(i)) {
3149
+ // Check if the argument assigned to the property is actually passed.
3150
+ // If argument is not passed the property is set to undefined,
3151
+ // otherwise find it on the stack.
3152
+ int arg_number = shared->GetThisPropertyAssignmentArgument(i);
3153
+ __ movq(rbx, r8);
3154
+ __ cmpq(rax, Immediate(arg_number));
3155
+ __ cmovq(above, rbx, Operand(rcx, arg_number * -kPointerSize));
3156
+ // Store value in the property.
3157
+ __ movq(Operand(r9, i * kPointerSize), rbx);
3158
+ } else {
3159
+ // Set the property to the constant value.
3160
+ Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i));
3161
+ __ Move(Operand(r9, i * kPointerSize), constant);
3162
+ }
3163
+ }
3164
+
3165
+ // Fill the unused in-object property fields with undefined.
3166
+ ASSERT(function->has_initial_map());
3167
+ for (int i = shared->this_property_assignments_count();
3168
+ i < function->initial_map()->inobject_properties();
3169
+ i++) {
3170
+ __ movq(Operand(r9, i * kPointerSize), r8);
3171
+ }
3172
+
3173
+ // rax: argc
3174
+ // rdx: JSObject (untagged)
3175
+ // Move argc to rbx and the JSObject to return to rax and tag it.
3176
+ __ movq(rbx, rax);
3177
+ __ movq(rax, rdx);
3178
+ __ or_(rax, Immediate(kHeapObjectTag));
3179
+
3180
+ // rax: JSObject
3181
+ // rbx: argc
3182
+ // Remove caller arguments and receiver from the stack and return.
3183
+ __ pop(rcx);
3184
+ __ lea(rsp, Operand(rsp, rbx, times_pointer_size, 1 * kPointerSize));
3185
+ __ push(rcx);
3186
+ __ IncrementCounter(&Counters::constructed_objects, 1);
3187
+ __ IncrementCounter(&Counters::constructed_objects_stub, 1);
3188
+ __ ret(0);
3189
+
3190
+ // Jump to the generic stub in case the specialized code cannot handle the
3191
+ // construction.
3192
+ __ bind(&generic_stub_call);
3193
+ Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
3194
+ Handle<Code> generic_construct_stub(code);
3195
+ __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
3196
+
3197
+ // Return the generated code.
3198
+ return GetCode();
3199
+ }
3200
+
3201
+
3202
+ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
3203
+ ExternalArrayType array_type, Code::Flags flags) {
3204
+ // ----------- S t a t e -------------
3205
+ // -- rax : key
3206
+ // -- rdx : receiver
3207
+ // -- rsp[0] : return address
3208
+ // -----------------------------------
3209
+ Label slow;
3210
+
3211
+ // Check that the object isn't a smi.
3212
+ __ JumpIfSmi(rdx, &slow);
3213
+
3214
+ // Check that the key is a smi.
3215
+ __ JumpIfNotSmi(rax, &slow);
3216
+
3217
+ // Check that the object is a JS object.
3218
+ __ CmpObjectType(rdx, JS_OBJECT_TYPE, rcx);
3219
+ __ j(not_equal, &slow);
3220
+ // Check that the receiver does not require access checks. We need
3221
+ // to check this explicitly since this generic stub does not perform
3222
+ // map checks. The map is already in rdx.
3223
+ __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
3224
+ Immediate(1 << Map::kIsAccessCheckNeeded));
3225
+ __ j(not_zero, &slow);
3226
+
3227
+ // Check that the elements array is the appropriate type of
3228
+ // ExternalArray.
3229
+ // rax: index (as a smi)
3230
+ // rdx: JSObject
3231
+ __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
3232
+ __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
3233
+ Heap::RootIndexForExternalArrayType(array_type));
3234
+ __ j(not_equal, &slow);
3235
+
3236
+ // Check that the index is in range.
3237
+ __ SmiToInteger32(rcx, rax);
3238
+ __ cmpl(rcx, FieldOperand(rbx, ExternalArray::kLengthOffset));
3239
+ // Unsigned comparison catches both negative and too-large values.
3240
+ __ j(above_equal, &slow);
3241
+
3242
+ // rax: index (as a smi)
3243
+ // rdx: receiver (JSObject)
3244
+ // rcx: untagged index
3245
+ // rbx: elements array
3246
+ __ movq(rbx, FieldOperand(rbx, ExternalArray::kExternalPointerOffset));
3247
+ // rbx: base pointer of external storage
3248
+ switch (array_type) {
3249
+ case kExternalByteArray:
3250
+ __ movsxbq(rcx, Operand(rbx, rcx, times_1, 0));
3251
+ break;
3252
+ case kExternalUnsignedByteArray:
3253
+ __ movzxbq(rcx, Operand(rbx, rcx, times_1, 0));
3254
+ break;
3255
+ case kExternalShortArray:
3256
+ __ movsxwq(rcx, Operand(rbx, rcx, times_2, 0));
3257
+ break;
3258
+ case kExternalUnsignedShortArray:
3259
+ __ movzxwq(rcx, Operand(rbx, rcx, times_2, 0));
3260
+ break;
3261
+ case kExternalIntArray:
3262
+ __ movsxlq(rcx, Operand(rbx, rcx, times_4, 0));
3263
+ break;
3264
+ case kExternalUnsignedIntArray:
3265
+ __ movl(rcx, Operand(rbx, rcx, times_4, 0));
3266
+ break;
3267
+ case kExternalFloatArray:
3268
+ __ cvtss2sd(xmm0, Operand(rbx, rcx, times_4, 0));
3269
+ break;
3270
+ default:
3271
+ UNREACHABLE();
3272
+ break;
3273
+ }
3274
+
3275
+ // rax: index
3276
+ // rdx: receiver
3277
+ // For integer array types:
3278
+ // rcx: value
3279
+ // For floating-point array type:
3280
+ // xmm0: value as double.
3281
+
3282
+ ASSERT(kSmiValueSize == 32);
3283
+ if (array_type == kExternalUnsignedIntArray) {
3284
+ // For the UnsignedInt array type, we need to see whether
3285
+ // the value can be represented in a Smi. If not, we need to convert
3286
+ // it to a HeapNumber.
3287
+ NearLabel box_int;
3288
+
3289
+ __ JumpIfUIntNotValidSmiValue(rcx, &box_int);
3290
+
3291
+ __ Integer32ToSmi(rax, rcx);
3292
+ __ ret(0);
3293
+
3294
+ __ bind(&box_int);
3295
+
3296
+ // Allocate a HeapNumber for the int and perform int-to-double
3297
+ // conversion.
3298
+ // The value is zero-extended since we loaded the value from memory
3299
+ // with movl.
3300
+ __ cvtqsi2sd(xmm0, rcx);
3301
+
3302
+ __ AllocateHeapNumber(rcx, rbx, &slow);
3303
+ // Set the value.
3304
+ __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm0);
3305
+ __ movq(rax, rcx);
3306
+ __ ret(0);
3307
+ } else if (array_type == kExternalFloatArray) {
3308
+ // For the floating-point array type, we need to always allocate a
3309
+ // HeapNumber.
3310
+ __ AllocateHeapNumber(rcx, rbx, &slow);
3311
+ // Set the value.
3312
+ __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm0);
3313
+ __ movq(rax, rcx);
3314
+ __ ret(0);
3315
+ } else {
3316
+ __ Integer32ToSmi(rax, rcx);
3317
+ __ ret(0);
3318
+ }
3319
+
3320
+ // Slow case: Jump to runtime.
3321
+ __ bind(&slow);
3322
+ __ IncrementCounter(&Counters::keyed_load_external_array_slow, 1);
3323
+
3324
+ // ----------- S t a t e -------------
3325
+ // -- rax : key
3326
+ // -- rdx : receiver
3327
+ // -- rsp[0] : return address
3328
+ // -----------------------------------
3329
+
3330
+ __ pop(rbx);
3331
+ __ push(rdx); // receiver
3332
+ __ push(rax); // name
3333
+ __ push(rbx); // return address
3334
+
3335
+ // Perform tail call to the entry.
3336
+ __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
3337
+
3338
+ // Return the generated code.
3339
+ return GetCode(flags);
3340
+ }
3341
+
3342
+
3343
+ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
3344
+ ExternalArrayType array_type, Code::Flags flags) {
3345
+ // ----------- S t a t e -------------
3346
+ // -- rax : value
3347
+ // -- rcx : key
3348
+ // -- rdx : receiver
3349
+ // -- rsp[0] : return address
3350
+ // -----------------------------------
3351
+ Label slow;
3352
+
3353
+ // Check that the object isn't a smi.
3354
+ __ JumpIfSmi(rdx, &slow);
3355
+ // Get the map from the receiver.
3356
+ __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
3357
+ // Check that the receiver does not require access checks. We need
3358
+ // to do this because this generic stub does not perform map checks.
3359
+ __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
3360
+ Immediate(1 << Map::kIsAccessCheckNeeded));
3361
+ __ j(not_zero, &slow);
3362
+ // Check that the key is a smi.
3363
+ __ JumpIfNotSmi(rcx, &slow);
3364
+
3365
+ // Check that the object is a JS object.
3366
+ __ CmpInstanceType(rbx, JS_OBJECT_TYPE);
3367
+ __ j(not_equal, &slow);
3368
+
3369
+ // Check that the elements array is the appropriate type of
3370
+ // ExternalArray.
3371
+ // rax: value
3372
+ // rcx: key (a smi)
3373
+ // rdx: receiver (a JSObject)
3374
+ __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
3375
+ __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
3376
+ Heap::RootIndexForExternalArrayType(array_type));
3377
+ __ j(not_equal, &slow);
3378
+
3379
+ // Check that the index is in range.
3380
+ __ SmiToInteger32(rdi, rcx); // Untag the index.
3381
+ __ cmpl(rdi, FieldOperand(rbx, ExternalArray::kLengthOffset));
3382
+ // Unsigned comparison catches both negative and too-large values.
3383
+ __ j(above_equal, &slow);
3384
+
3385
+ // Handle both smis and HeapNumbers in the fast path. Go to the
3386
+ // runtime for all other kinds of values.
3387
+ // rax: value
3388
+ // rcx: key (a smi)
3389
+ // rdx: receiver (a JSObject)
3390
+ // rbx: elements array
3391
+ // rdi: untagged key
3392
+ NearLabel check_heap_number;
3393
+ __ JumpIfNotSmi(rax, &check_heap_number);
3394
+ // No more branches to slow case on this path. Key and receiver not needed.
3395
+ __ SmiToInteger32(rdx, rax);
3396
+ __ movq(rbx, FieldOperand(rbx, ExternalArray::kExternalPointerOffset));
3397
+ // rbx: base pointer of external storage
3398
+ switch (array_type) {
3399
+ case kExternalByteArray:
3400
+ case kExternalUnsignedByteArray:
3401
+ __ movb(Operand(rbx, rdi, times_1, 0), rdx);
3402
+ break;
3403
+ case kExternalShortArray:
3404
+ case kExternalUnsignedShortArray:
3405
+ __ movw(Operand(rbx, rdi, times_2, 0), rdx);
3406
+ break;
3407
+ case kExternalIntArray:
3408
+ case kExternalUnsignedIntArray:
3409
+ __ movl(Operand(rbx, rdi, times_4, 0), rdx);
3410
+ break;
3411
+ case kExternalFloatArray:
3412
+ // Need to perform int-to-float conversion.
3413
+ __ cvtlsi2ss(xmm0, rdx);
3414
+ __ movss(Operand(rbx, rdi, times_4, 0), xmm0);
3415
+ break;
3416
+ default:
3417
+ UNREACHABLE();
3418
+ break;
3419
+ }
3420
+ __ ret(0);
3421
+
3422
+ __ bind(&check_heap_number);
3423
+ // rax: value
3424
+ // rcx: key (a smi)
3425
+ // rdx: receiver (a JSObject)
3426
+ // rbx: elements array
3427
+ // rdi: untagged key
3428
+ __ CmpObjectType(rax, HEAP_NUMBER_TYPE, kScratchRegister);
3429
+ __ j(not_equal, &slow);
3430
+ // No more branches to slow case on this path.
3431
+
3432
+ // The WebGL specification leaves the behavior of storing NaN and
3433
+ // +/-Infinity into integer arrays basically undefined. For more
3434
+ // reproducible behavior, convert these to zero.
3435
+ __ movsd(xmm0, FieldOperand(rax, HeapNumber::kValueOffset));
3436
+ __ movq(rbx, FieldOperand(rbx, ExternalArray::kExternalPointerOffset));
3437
+ // rdi: untagged index
3438
+ // rbx: base pointer of external storage
3439
+ // top of FPU stack: value
3440
+ if (array_type == kExternalFloatArray) {
3441
+ __ cvtsd2ss(xmm0, xmm0);
3442
+ __ movss(Operand(rbx, rdi, times_4, 0), xmm0);
3443
+ __ ret(0);
3444
+ } else {
3445
+ // Perform float-to-int conversion with truncation (round-to-zero)
3446
+ // behavior.
3447
+
3448
+ // Convert to int32 and store the low byte/word.
3449
+ // If the value is NaN or +/-infinity, the result is 0x80000000,
3450
+ // which is automatically zero when taken mod 2^n, n < 32.
3451
+ // rdx: value (converted to an untagged integer)
3452
+ // rdi: untagged index
3453
+ // rbx: base pointer of external storage
3454
+ switch (array_type) {
3455
+ case kExternalByteArray:
3456
+ case kExternalUnsignedByteArray:
3457
+ __ cvttsd2si(rdx, xmm0);
3458
+ __ movb(Operand(rbx, rdi, times_1, 0), rdx);
3459
+ break;
3460
+ case kExternalShortArray:
3461
+ case kExternalUnsignedShortArray:
3462
+ __ cvttsd2si(rdx, xmm0);
3463
+ __ movw(Operand(rbx, rdi, times_2, 0), rdx);
3464
+ break;
3465
+ case kExternalIntArray:
3466
+ case kExternalUnsignedIntArray: {
3467
+ // Convert to int64, so that NaN and infinities become
3468
+ // 0x8000000000000000, which is zero mod 2^32.
3469
+ __ cvttsd2siq(rdx, xmm0);
3470
+ __ movl(Operand(rbx, rdi, times_4, 0), rdx);
3471
+ break;
3472
+ }
3473
+ default:
3474
+ UNREACHABLE();
3475
+ break;
3476
+ }
3477
+ __ ret(0);
3478
+ }
3479
+
3480
+ // Slow case: call runtime.
3481
+ __ bind(&slow);
3482
+
3483
+ // ----------- S t a t e -------------
3484
+ // -- rax : value
3485
+ // -- rcx : key
3486
+ // -- rdx : receiver
3487
+ // -- rsp[0] : return address
3488
+ // -----------------------------------
3489
+
3490
+ __ pop(rbx);
3491
+ __ push(rdx); // receiver
3492
+ __ push(rcx); // key
3493
+ __ push(rax); // value
3494
+ __ Push(Smi::FromInt(NONE)); // PropertyAttributes
3495
+ __ Push(Smi::FromInt(
3496
+ Code::ExtractExtraICStateFromFlags(flags) & kStrictMode));
3497
+ __ push(rbx); // return address
3498
+
3499
+ // Do tail-call to runtime routine.
3500
+ __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
3501
+
3502
+ return GetCode(flags);
3503
+ }
3504
+
3505
+ #undef __
3506
+
3507
+ } } // namespace v8::internal
3508
+
3509
+ #endif // V8_TARGET_ARCH_X64