therubyracer 0.9.0beta2 → 0.9.0beta3

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of therubyracer might be problematic. Click here for more details.

Files changed (920) hide show
  1. data/.gitmodules +3 -0
  2. data/ext/v8/upstream/Makefile +1 -2
  3. data/ext/v8/upstream/v8/.gitignore +33 -0
  4. data/ext/v8/upstream/v8/AUTHORS +42 -0
  5. data/ext/v8/upstream/v8/ChangeLog +2663 -0
  6. data/ext/v8/upstream/{3.1.8 → v8}/LICENSE +0 -0
  7. data/ext/v8/upstream/{3.1.8 → v8}/LICENSE.strongtalk +0 -0
  8. data/ext/v8/upstream/{3.1.8 → v8}/LICENSE.v8 +0 -0
  9. data/ext/v8/upstream/{3.1.8 → v8}/LICENSE.valgrind +0 -0
  10. data/ext/v8/upstream/v8/SConstruct +1473 -0
  11. data/ext/v8/upstream/{3.1.8 → v8}/build/README.txt +0 -0
  12. data/ext/v8/upstream/{3.1.8 → v8}/build/all.gyp +0 -0
  13. data/ext/v8/upstream/{3.1.8 → v8}/build/armu.gypi +0 -0
  14. data/ext/v8/upstream/{3.1.8 → v8}/build/common.gypi +0 -0
  15. data/ext/v8/upstream/{3.1.8 → v8}/build/gyp_v8 +0 -0
  16. data/ext/v8/upstream/v8/include/v8-debug.h +394 -0
  17. data/ext/v8/upstream/v8/include/v8-preparser.h +116 -0
  18. data/ext/v8/upstream/v8/include/v8-profiler.h +505 -0
  19. data/ext/v8/upstream/v8/include/v8-testing.h +104 -0
  20. data/ext/v8/upstream/v8/include/v8.h +4000 -0
  21. data/ext/v8/upstream/{3.1.8 → v8}/include/v8stdint.h +0 -0
  22. data/ext/v8/upstream/v8/preparser/SConscript +38 -0
  23. data/ext/v8/upstream/v8/preparser/preparser-process.cc +169 -0
  24. data/ext/v8/upstream/v8/src/SConscript +380 -0
  25. data/ext/v8/upstream/v8/src/accessors.cc +766 -0
  26. data/ext/v8/upstream/{3.1.8 → v8}/src/accessors.h +0 -0
  27. data/ext/v8/upstream/v8/src/allocation-inl.h +49 -0
  28. data/ext/v8/upstream/v8/src/allocation.cc +122 -0
  29. data/ext/v8/upstream/v8/src/allocation.h +143 -0
  30. data/ext/v8/upstream/v8/src/api.cc +5678 -0
  31. data/ext/v8/upstream/v8/src/api.h +572 -0
  32. data/ext/v8/upstream/{3.1.8 → v8}/src/apinatives.js +0 -0
  33. data/ext/v8/upstream/v8/src/apiutils.h +73 -0
  34. data/ext/v8/upstream/v8/src/arguments.h +116 -0
  35. data/ext/v8/upstream/v8/src/arm/assembler-arm-inl.h +353 -0
  36. data/ext/v8/upstream/v8/src/arm/assembler-arm.cc +2877 -0
  37. data/ext/v8/upstream/v8/src/arm/assembler-arm.h +1382 -0
  38. data/ext/v8/upstream/v8/src/arm/builtins-arm.cc +1634 -0
  39. data/ext/v8/upstream/v8/src/arm/code-stubs-arm.cc +6917 -0
  40. data/ext/v8/upstream/v8/src/arm/code-stubs-arm.h +623 -0
  41. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/codegen-arm-inl.h +0 -0
  42. data/ext/v8/upstream/v8/src/arm/codegen-arm.cc +7437 -0
  43. data/ext/v8/upstream/v8/src/arm/codegen-arm.h +595 -0
  44. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/constants-arm.cc +0 -0
  45. data/ext/v8/upstream/v8/src/arm/constants-arm.h +778 -0
  46. data/ext/v8/upstream/v8/src/arm/cpu-arm.cc +149 -0
  47. data/ext/v8/upstream/v8/src/arm/debug-arm.cc +317 -0
  48. data/ext/v8/upstream/v8/src/arm/deoptimizer-arm.cc +737 -0
  49. data/ext/v8/upstream/v8/src/arm/disasm-arm.cc +1503 -0
  50. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/frames-arm.cc +0 -0
  51. data/ext/v8/upstream/v8/src/arm/frames-arm.h +168 -0
  52. data/ext/v8/upstream/v8/src/arm/full-codegen-arm.cc +4374 -0
  53. data/ext/v8/upstream/v8/src/arm/ic-arm.cc +1793 -0
  54. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/jump-target-arm.cc +0 -0
  55. data/ext/v8/upstream/v8/src/arm/lithium-arm.cc +2120 -0
  56. data/ext/v8/upstream/v8/src/arm/lithium-arm.h +2179 -0
  57. data/ext/v8/upstream/v8/src/arm/lithium-codegen-arm.cc +4132 -0
  58. data/ext/v8/upstream/v8/src/arm/lithium-codegen-arm.h +329 -0
  59. data/ext/v8/upstream/v8/src/arm/lithium-gap-resolver-arm.cc +305 -0
  60. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/lithium-gap-resolver-arm.h +0 -0
  61. data/ext/v8/upstream/v8/src/arm/macro-assembler-arm.cc +2939 -0
  62. data/ext/v8/upstream/v8/src/arm/macro-assembler-arm.h +1071 -0
  63. data/ext/v8/upstream/v8/src/arm/regexp-macro-assembler-arm.cc +1287 -0
  64. data/ext/v8/upstream/v8/src/arm/regexp-macro-assembler-arm.h +253 -0
  65. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/register-allocator-arm-inl.h +0 -0
  66. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/register-allocator-arm.cc +0 -0
  67. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/register-allocator-arm.h +0 -0
  68. data/ext/v8/upstream/v8/src/arm/simulator-arm.cc +3288 -0
  69. data/ext/v8/upstream/v8/src/arm/simulator-arm.h +413 -0
  70. data/ext/v8/upstream/v8/src/arm/stub-cache-arm.cc +4034 -0
  71. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/virtual-frame-arm-inl.h +0 -0
  72. data/ext/v8/upstream/v8/src/arm/virtual-frame-arm.cc +843 -0
  73. data/ext/v8/upstream/v8/src/arm/virtual-frame-arm.h +523 -0
  74. data/ext/v8/upstream/v8/src/array.js +1249 -0
  75. data/ext/v8/upstream/v8/src/assembler.cc +1067 -0
  76. data/ext/v8/upstream/v8/src/assembler.h +823 -0
  77. data/ext/v8/upstream/v8/src/ast-inl.h +112 -0
  78. data/ext/v8/upstream/v8/src/ast.cc +1078 -0
  79. data/ext/v8/upstream/v8/src/ast.h +2234 -0
  80. data/ext/v8/upstream/v8/src/atomicops.h +167 -0
  81. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_arm_gcc.h +0 -0
  82. data/ext/v8/upstream/v8/src/atomicops_internals_mips_gcc.h +169 -0
  83. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_x86_gcc.cc +0 -0
  84. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_x86_gcc.h +0 -0
  85. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_x86_macosx.h +0 -0
  86. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_x86_msvc.h +0 -0
  87. data/ext/v8/upstream/{3.1.8 → v8}/src/bignum-dtoa.cc +0 -0
  88. data/ext/v8/upstream/{3.1.8 → v8}/src/bignum-dtoa.h +0 -0
  89. data/ext/v8/upstream/{3.1.8 → v8}/src/bignum.cc +0 -0
  90. data/ext/v8/upstream/{3.1.8 → v8}/src/bignum.h +0 -0
  91. data/ext/v8/upstream/v8/src/bootstrapper.cc +2138 -0
  92. data/ext/v8/upstream/v8/src/bootstrapper.h +185 -0
  93. data/ext/v8/upstream/v8/src/builtins.cc +1708 -0
  94. data/ext/v8/upstream/v8/src/builtins.h +368 -0
  95. data/ext/v8/upstream/{3.1.8 → v8}/src/bytecodes-irregexp.h +0 -0
  96. data/ext/v8/upstream/{3.1.8 → v8}/src/cached-powers.cc +0 -0
  97. data/ext/v8/upstream/{3.1.8 → v8}/src/cached-powers.h +0 -0
  98. data/ext/v8/upstream/{3.1.8 → v8}/src/char-predicates-inl.h +0 -0
  99. data/ext/v8/upstream/{3.1.8 → v8}/src/char-predicates.h +0 -0
  100. data/ext/v8/upstream/v8/src/checks.cc +110 -0
  101. data/ext/v8/upstream/v8/src/checks.h +296 -0
  102. data/ext/v8/upstream/{3.1.8 → v8}/src/circular-queue-inl.h +0 -0
  103. data/ext/v8/upstream/{3.1.8 → v8}/src/circular-queue.cc +0 -0
  104. data/ext/v8/upstream/{3.1.8 → v8}/src/circular-queue.h +0 -0
  105. data/ext/v8/upstream/v8/src/code-stubs.cc +240 -0
  106. data/ext/v8/upstream/v8/src/code-stubs.h +971 -0
  107. data/ext/v8/upstream/{3.1.8 → v8}/src/code.h +0 -0
  108. data/ext/v8/upstream/v8/src/codegen-inl.h +68 -0
  109. data/ext/v8/upstream/v8/src/codegen.cc +505 -0
  110. data/ext/v8/upstream/v8/src/codegen.h +245 -0
  111. data/ext/v8/upstream/v8/src/compilation-cache.cc +540 -0
  112. data/ext/v8/upstream/v8/src/compilation-cache.h +287 -0
  113. data/ext/v8/upstream/v8/src/compiler.cc +792 -0
  114. data/ext/v8/upstream/v8/src/compiler.h +307 -0
  115. data/ext/v8/upstream/v8/src/contexts.cc +327 -0
  116. data/ext/v8/upstream/v8/src/contexts.h +382 -0
  117. data/ext/v8/upstream/{3.1.8 → v8}/src/conversions-inl.h +0 -0
  118. data/ext/v8/upstream/v8/src/conversions.cc +1125 -0
  119. data/ext/v8/upstream/{3.1.8 → v8}/src/conversions.h +0 -0
  120. data/ext/v8/upstream/v8/src/counters.cc +93 -0
  121. data/ext/v8/upstream/v8/src/counters.h +254 -0
  122. data/ext/v8/upstream/v8/src/cpu-profiler-inl.h +101 -0
  123. data/ext/v8/upstream/v8/src/cpu-profiler.cc +606 -0
  124. data/ext/v8/upstream/v8/src/cpu-profiler.h +305 -0
  125. data/ext/v8/upstream/v8/src/cpu.h +67 -0
  126. data/ext/v8/upstream/v8/src/d8-debug.cc +367 -0
  127. data/ext/v8/upstream/v8/src/d8-debug.h +158 -0
  128. data/ext/v8/upstream/v8/src/d8-posix.cc +695 -0
  129. data/ext/v8/upstream/{3.1.8 → v8}/src/d8-readline.cc +0 -0
  130. data/ext/v8/upstream/{3.1.8 → v8}/src/d8-windows.cc +0 -0
  131. data/ext/v8/upstream/v8/src/d8.cc +796 -0
  132. data/ext/v8/upstream/v8/src/d8.gyp +88 -0
  133. data/ext/v8/upstream/{3.1.8 → v8}/src/d8.h +0 -0
  134. data/ext/v8/upstream/{3.1.8 → v8}/src/d8.js +0 -0
  135. data/ext/v8/upstream/{3.1.8 → v8}/src/data-flow.cc +0 -0
  136. data/ext/v8/upstream/v8/src/data-flow.h +379 -0
  137. data/ext/v8/upstream/{3.1.8 → v8}/src/date.js +0 -0
  138. data/ext/v8/upstream/{3.1.8 → v8}/src/dateparser-inl.h +0 -0
  139. data/ext/v8/upstream/{3.1.8 → v8}/src/dateparser.cc +0 -0
  140. data/ext/v8/upstream/v8/src/dateparser.h +265 -0
  141. data/ext/v8/upstream/v8/src/debug-agent.cc +447 -0
  142. data/ext/v8/upstream/v8/src/debug-agent.h +129 -0
  143. data/ext/v8/upstream/{3.1.8 → v8}/src/debug-debugger.js +0 -0
  144. data/ext/v8/upstream/v8/src/debug.cc +3188 -0
  145. data/ext/v8/upstream/v8/src/debug.h +1055 -0
  146. data/ext/v8/upstream/v8/src/deoptimizer.cc +1296 -0
  147. data/ext/v8/upstream/v8/src/deoptimizer.h +629 -0
  148. data/ext/v8/upstream/v8/src/disasm.h +80 -0
  149. data/ext/v8/upstream/v8/src/disassembler.cc +339 -0
  150. data/ext/v8/upstream/{3.1.8 → v8}/src/disassembler.h +0 -0
  151. data/ext/v8/upstream/{3.1.8 → v8}/src/diy-fp.cc +0 -0
  152. data/ext/v8/upstream/{3.1.8 → v8}/src/diy-fp.h +0 -0
  153. data/ext/v8/upstream/{3.1.8 → v8}/src/double.h +0 -0
  154. data/ext/v8/upstream/{3.1.8 → v8}/src/dtoa.cc +0 -0
  155. data/ext/v8/upstream/{3.1.8 → v8}/src/dtoa.h +0 -0
  156. data/ext/v8/upstream/v8/src/execution.cc +791 -0
  157. data/ext/v8/upstream/v8/src/execution.h +291 -0
  158. data/ext/v8/upstream/v8/src/extensions/experimental/break-iterator.cc +250 -0
  159. data/ext/v8/upstream/v8/src/extensions/experimental/break-iterator.h +89 -0
  160. data/ext/v8/upstream/v8/src/extensions/experimental/experimental.gyp +55 -0
  161. data/ext/v8/upstream/v8/src/extensions/experimental/i18n-extension.cc +284 -0
  162. data/ext/v8/upstream/{3.1.8 → v8}/src/extensions/experimental/i18n-extension.h +0 -0
  163. data/ext/v8/upstream/v8/src/extensions/externalize-string-extension.cc +141 -0
  164. data/ext/v8/upstream/{3.1.8 → v8}/src/extensions/externalize-string-extension.h +0 -0
  165. data/ext/v8/upstream/v8/src/extensions/gc-extension.cc +58 -0
  166. data/ext/v8/upstream/{3.1.8 → v8}/src/extensions/gc-extension.h +0 -0
  167. data/ext/v8/upstream/v8/src/factory.cc +1194 -0
  168. data/ext/v8/upstream/v8/src/factory.h +436 -0
  169. data/ext/v8/upstream/{3.1.8 → v8}/src/fast-dtoa.cc +0 -0
  170. data/ext/v8/upstream/{3.1.8 → v8}/src/fast-dtoa.h +0 -0
  171. data/ext/v8/upstream/{3.1.8 → v8}/src/fixed-dtoa.cc +0 -0
  172. data/ext/v8/upstream/{3.1.8 → v8}/src/fixed-dtoa.h +0 -0
  173. data/ext/v8/upstream/v8/src/flag-definitions.h +556 -0
  174. data/ext/v8/upstream/{3.1.8 → v8}/src/flags.cc +0 -0
  175. data/ext/v8/upstream/{3.1.8 → v8}/src/flags.h +0 -0
  176. data/ext/v8/upstream/v8/src/frame-element.cc +37 -0
  177. data/ext/v8/upstream/v8/src/frame-element.h +269 -0
  178. data/ext/v8/upstream/v8/src/frames-inl.h +236 -0
  179. data/ext/v8/upstream/v8/src/frames.cc +1273 -0
  180. data/ext/v8/upstream/v8/src/frames.h +854 -0
  181. data/ext/v8/upstream/v8/src/full-codegen.cc +1385 -0
  182. data/ext/v8/upstream/v8/src/full-codegen.h +753 -0
  183. data/ext/v8/upstream/v8/src/func-name-inferrer.cc +91 -0
  184. data/ext/v8/upstream/v8/src/func-name-inferrer.h +111 -0
  185. data/ext/v8/upstream/v8/src/gdb-jit.cc +1548 -0
  186. data/ext/v8/upstream/{3.1.8 → v8}/src/gdb-jit.h +0 -0
  187. data/ext/v8/upstream/v8/src/global-handles.cc +596 -0
  188. data/ext/v8/upstream/v8/src/global-handles.h +239 -0
  189. data/ext/v8/upstream/v8/src/globals.h +325 -0
  190. data/ext/v8/upstream/v8/src/handles-inl.h +177 -0
  191. data/ext/v8/upstream/v8/src/handles.cc +965 -0
  192. data/ext/v8/upstream/v8/src/handles.h +372 -0
  193. data/ext/v8/upstream/{3.1.8 → v8}/src/hashmap.cc +0 -0
  194. data/ext/v8/upstream/v8/src/hashmap.h +121 -0
  195. data/ext/v8/upstream/v8/src/heap-inl.h +703 -0
  196. data/ext/v8/upstream/v8/src/heap-profiler.cc +1173 -0
  197. data/ext/v8/upstream/v8/src/heap-profiler.h +396 -0
  198. data/ext/v8/upstream/v8/src/heap.cc +5856 -0
  199. data/ext/v8/upstream/v8/src/heap.h +2264 -0
  200. data/ext/v8/upstream/v8/src/hydrogen-instructions.cc +1639 -0
  201. data/ext/v8/upstream/v8/src/hydrogen-instructions.h +3657 -0
  202. data/ext/v8/upstream/v8/src/hydrogen.cc +6011 -0
  203. data/ext/v8/upstream/v8/src/hydrogen.h +1137 -0
  204. data/ext/v8/upstream/v8/src/ia32/assembler-ia32-inl.h +430 -0
  205. data/ext/v8/upstream/v8/src/ia32/assembler-ia32.cc +2846 -0
  206. data/ext/v8/upstream/v8/src/ia32/assembler-ia32.h +1159 -0
  207. data/ext/v8/upstream/v8/src/ia32/builtins-ia32.cc +1596 -0
  208. data/ext/v8/upstream/v8/src/ia32/code-stubs-ia32.cc +6549 -0
  209. data/ext/v8/upstream/v8/src/ia32/code-stubs-ia32.h +495 -0
  210. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/codegen-ia32-inl.h +0 -0
  211. data/ext/v8/upstream/v8/src/ia32/codegen-ia32.cc +10385 -0
  212. data/ext/v8/upstream/v8/src/ia32/codegen-ia32.h +801 -0
  213. data/ext/v8/upstream/v8/src/ia32/cpu-ia32.cc +88 -0
  214. data/ext/v8/upstream/v8/src/ia32/debug-ia32.cc +312 -0
  215. data/ext/v8/upstream/v8/src/ia32/deoptimizer-ia32.cc +774 -0
  216. data/ext/v8/upstream/v8/src/ia32/disasm-ia32.cc +1620 -0
  217. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/frames-ia32.cc +0 -0
  218. data/ext/v8/upstream/v8/src/ia32/frames-ia32.h +140 -0
  219. data/ext/v8/upstream/v8/src/ia32/full-codegen-ia32.cc +4357 -0
  220. data/ext/v8/upstream/v8/src/ia32/ic-ia32.cc +1779 -0
  221. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/jump-target-ia32.cc +0 -0
  222. data/ext/v8/upstream/v8/src/ia32/lithium-codegen-ia32.cc +4158 -0
  223. data/ext/v8/upstream/v8/src/ia32/lithium-codegen-ia32.h +318 -0
  224. data/ext/v8/upstream/v8/src/ia32/lithium-gap-resolver-ia32.cc +466 -0
  225. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/lithium-gap-resolver-ia32.h +0 -0
  226. data/ext/v8/upstream/v8/src/ia32/lithium-ia32.cc +2181 -0
  227. data/ext/v8/upstream/v8/src/ia32/lithium-ia32.h +2235 -0
  228. data/ext/v8/upstream/v8/src/ia32/macro-assembler-ia32.cc +2056 -0
  229. data/ext/v8/upstream/v8/src/ia32/macro-assembler-ia32.h +807 -0
  230. data/ext/v8/upstream/v8/src/ia32/regexp-macro-assembler-ia32.cc +1264 -0
  231. data/ext/v8/upstream/v8/src/ia32/regexp-macro-assembler-ia32.h +216 -0
  232. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/register-allocator-ia32-inl.h +0 -0
  233. data/ext/v8/upstream/v8/src/ia32/register-allocator-ia32.cc +157 -0
  234. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/register-allocator-ia32.h +0 -0
  235. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/simulator-ia32.cc +0 -0
  236. data/ext/v8/upstream/v8/src/ia32/simulator-ia32.h +72 -0
  237. data/ext/v8/upstream/v8/src/ia32/stub-cache-ia32.cc +3711 -0
  238. data/ext/v8/upstream/v8/src/ia32/virtual-frame-ia32.cc +1366 -0
  239. data/ext/v8/upstream/v8/src/ia32/virtual-frame-ia32.h +650 -0
  240. data/ext/v8/upstream/v8/src/ic-inl.h +130 -0
  241. data/ext/v8/upstream/v8/src/ic.cc +2389 -0
  242. data/ext/v8/upstream/v8/src/ic.h +675 -0
  243. data/ext/v8/upstream/{3.1.8 → v8}/src/inspector.cc +0 -0
  244. data/ext/v8/upstream/{3.1.8 → v8}/src/inspector.h +0 -0
  245. data/ext/v8/upstream/v8/src/interpreter-irregexp.cc +659 -0
  246. data/ext/v8/upstream/v8/src/interpreter-irregexp.h +49 -0
  247. data/ext/v8/upstream/v8/src/isolate.cc +883 -0
  248. data/ext/v8/upstream/v8/src/isolate.h +1306 -0
  249. data/ext/v8/upstream/v8/src/json.js +342 -0
  250. data/ext/v8/upstream/v8/src/jsregexp.cc +5371 -0
  251. data/ext/v8/upstream/v8/src/jsregexp.h +1483 -0
  252. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target-heavy-inl.h +0 -0
  253. data/ext/v8/upstream/v8/src/jump-target-heavy.cc +427 -0
  254. data/ext/v8/upstream/v8/src/jump-target-heavy.h +238 -0
  255. data/ext/v8/upstream/v8/src/jump-target-inl.h +48 -0
  256. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target-light-inl.h +0 -0
  257. data/ext/v8/upstream/v8/src/jump-target-light.cc +111 -0
  258. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target-light.h +0 -0
  259. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target.cc +0 -0
  260. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target.h +0 -0
  261. data/ext/v8/upstream/{3.1.8 → v8}/src/list-inl.h +0 -0
  262. data/ext/v8/upstream/{3.1.8 → v8}/src/list.h +0 -0
  263. data/ext/v8/upstream/v8/src/lithium-allocator-inl.h +142 -0
  264. data/ext/v8/upstream/v8/src/lithium-allocator.cc +2105 -0
  265. data/ext/v8/upstream/v8/src/lithium-allocator.h +630 -0
  266. data/ext/v8/upstream/v8/src/lithium.cc +169 -0
  267. data/ext/v8/upstream/{3.1.8 → v8}/src/lithium.h +0 -0
  268. data/ext/v8/upstream/{3.1.8 → v8}/src/liveedit-debugger.js +0 -0
  269. data/ext/v8/upstream/v8/src/liveedit.cc +1693 -0
  270. data/ext/v8/upstream/v8/src/liveedit.h +179 -0
  271. data/ext/v8/upstream/{3.1.8 → v8}/src/liveobjectlist-inl.h +0 -0
  272. data/ext/v8/upstream/v8/src/liveobjectlist.cc +2589 -0
  273. data/ext/v8/upstream/v8/src/liveobjectlist.h +322 -0
  274. data/ext/v8/upstream/{3.1.8 → v8}/src/log-inl.h +0 -0
  275. data/ext/v8/upstream/v8/src/log-utils.cc +423 -0
  276. data/ext/v8/upstream/v8/src/log-utils.h +229 -0
  277. data/ext/v8/upstream/v8/src/log.cc +1666 -0
  278. data/ext/v8/upstream/v8/src/log.h +446 -0
  279. data/ext/v8/upstream/{3.1.8 → v8}/src/macro-assembler.h +0 -0
  280. data/ext/v8/upstream/{3.1.8 → v8}/src/macros.py +0 -0
  281. data/ext/v8/upstream/v8/src/mark-compact.cc +3092 -0
  282. data/ext/v8/upstream/v8/src/mark-compact.h +506 -0
  283. data/ext/v8/upstream/{3.1.8 → v8}/src/math.js +0 -0
  284. data/ext/v8/upstream/v8/src/messages.cc +166 -0
  285. data/ext/v8/upstream/{3.1.8 → v8}/src/messages.h +0 -0
  286. data/ext/v8/upstream/v8/src/messages.js +1090 -0
  287. data/ext/v8/upstream/v8/src/mips/assembler-mips-inl.h +335 -0
  288. data/ext/v8/upstream/v8/src/mips/assembler-mips.cc +2093 -0
  289. data/ext/v8/upstream/v8/src/mips/assembler-mips.h +1066 -0
  290. data/ext/v8/upstream/v8/src/mips/builtins-mips.cc +148 -0
  291. data/ext/v8/upstream/v8/src/mips/code-stubs-mips.cc +752 -0
  292. data/ext/v8/upstream/v8/src/mips/code-stubs-mips.h +511 -0
  293. data/ext/v8/upstream/v8/src/mips/codegen-mips-inl.h +64 -0
  294. data/ext/v8/upstream/v8/src/mips/codegen-mips.cc +1213 -0
  295. data/ext/v8/upstream/v8/src/mips/codegen-mips.h +633 -0
  296. data/ext/v8/upstream/v8/src/mips/constants-mips.cc +352 -0
  297. data/ext/v8/upstream/v8/src/mips/constants-mips.h +723 -0
  298. data/ext/v8/upstream/v8/src/mips/cpu-mips.cc +90 -0
  299. data/ext/v8/upstream/v8/src/mips/debug-mips.cc +155 -0
  300. data/ext/v8/upstream/v8/src/mips/deoptimizer-mips.cc +91 -0
  301. data/ext/v8/upstream/v8/src/mips/disasm-mips.cc +1023 -0
  302. data/ext/v8/upstream/v8/src/mips/frames-mips.cc +48 -0
  303. data/ext/v8/upstream/v8/src/mips/frames-mips.h +179 -0
  304. data/ext/v8/upstream/v8/src/mips/full-codegen-mips.cc +727 -0
  305. data/ext/v8/upstream/v8/src/mips/ic-mips.cc +244 -0
  306. data/ext/v8/upstream/v8/src/mips/jump-target-mips.cc +80 -0
  307. data/ext/v8/upstream/v8/src/mips/lithium-codegen-mips.h +65 -0
  308. data/ext/v8/upstream/v8/src/mips/lithium-mips.h +304 -0
  309. data/ext/v8/upstream/v8/src/mips/macro-assembler-mips.cc +3327 -0
  310. data/ext/v8/upstream/v8/src/mips/macro-assembler-mips.h +1058 -0
  311. data/ext/v8/upstream/v8/src/mips/regexp-macro-assembler-mips.cc +478 -0
  312. data/ext/v8/upstream/v8/src/mips/regexp-macro-assembler-mips.h +250 -0
  313. data/ext/v8/upstream/v8/src/mips/register-allocator-mips-inl.h +134 -0
  314. data/ext/v8/upstream/{3.1.8 → v8}/src/mips/register-allocator-mips.cc +0 -0
  315. data/ext/v8/upstream/v8/src/mips/register-allocator-mips.h +47 -0
  316. data/ext/v8/upstream/v8/src/mips/simulator-mips.cc +2438 -0
  317. data/ext/v8/upstream/v8/src/mips/simulator-mips.h +394 -0
  318. data/ext/v8/upstream/v8/src/mips/stub-cache-mips.cc +601 -0
  319. data/ext/v8/upstream/v8/src/mips/virtual-frame-mips-inl.h +58 -0
  320. data/ext/v8/upstream/v8/src/mips/virtual-frame-mips.cc +307 -0
  321. data/ext/v8/upstream/v8/src/mips/virtual-frame-mips.h +530 -0
  322. data/ext/v8/upstream/v8/src/mirror-debugger.js +2381 -0
  323. data/ext/v8/upstream/v8/src/mksnapshot.cc +256 -0
  324. data/ext/v8/upstream/{3.1.8 → v8}/src/natives.h +0 -0
  325. data/ext/v8/upstream/v8/src/objects-debug.cc +722 -0
  326. data/ext/v8/upstream/v8/src/objects-inl.h +4166 -0
  327. data/ext/v8/upstream/v8/src/objects-printer.cc +801 -0
  328. data/ext/v8/upstream/v8/src/objects-visiting.cc +142 -0
  329. data/ext/v8/upstream/v8/src/objects-visiting.h +422 -0
  330. data/ext/v8/upstream/v8/src/objects.cc +10296 -0
  331. data/ext/v8/upstream/v8/src/objects.h +6662 -0
  332. data/ext/v8/upstream/v8/src/parser.cc +5168 -0
  333. data/ext/v8/upstream/v8/src/parser.h +823 -0
  334. data/ext/v8/upstream/v8/src/platform-cygwin.cc +811 -0
  335. data/ext/v8/upstream/v8/src/platform-freebsd.cc +854 -0
  336. data/ext/v8/upstream/v8/src/platform-linux.cc +1120 -0
  337. data/ext/v8/upstream/v8/src/platform-macos.cc +865 -0
  338. data/ext/v8/upstream/v8/src/platform-nullos.cc +504 -0
  339. data/ext/v8/upstream/v8/src/platform-openbsd.cc +672 -0
  340. data/ext/v8/upstream/v8/src/platform-posix.cc +424 -0
  341. data/ext/v8/upstream/v8/src/platform-solaris.cc +796 -0
  342. data/ext/v8/upstream/v8/src/platform-tls-mac.h +62 -0
  343. data/ext/v8/upstream/v8/src/platform-tls-win32.h +62 -0
  344. data/ext/v8/upstream/v8/src/platform-tls.h +50 -0
  345. data/ext/v8/upstream/v8/src/platform-win32.cc +2072 -0
  346. data/ext/v8/upstream/v8/src/platform.h +693 -0
  347. data/ext/v8/upstream/v8/src/preparse-data.cc +185 -0
  348. data/ext/v8/upstream/{3.1.8 → v8}/src/preparse-data.h +0 -0
  349. data/ext/v8/upstream/v8/src/preparser-api.cc +219 -0
  350. data/ext/v8/upstream/v8/src/preparser.cc +1205 -0
  351. data/ext/v8/upstream/{3.1.8 → v8}/src/preparser.h +0 -0
  352. data/ext/v8/upstream/v8/src/prettyprinter.cc +1530 -0
  353. data/ext/v8/upstream/v8/src/prettyprinter.h +223 -0
  354. data/ext/v8/upstream/{3.1.8 → v8}/src/profile-generator-inl.h +0 -0
  355. data/ext/v8/upstream/v8/src/profile-generator.cc +3095 -0
  356. data/ext/v8/upstream/v8/src/profile-generator.h +1125 -0
  357. data/ext/v8/upstream/v8/src/property.cc +102 -0
  358. data/ext/v8/upstream/v8/src/property.h +348 -0
  359. data/ext/v8/upstream/{3.1.8 → v8}/src/regexp-macro-assembler-irregexp-inl.h +0 -0
  360. data/ext/v8/upstream/v8/src/regexp-macro-assembler-irregexp.cc +470 -0
  361. data/ext/v8/upstream/{3.1.8 → v8}/src/regexp-macro-assembler-irregexp.h +0 -0
  362. data/ext/v8/upstream/{3.1.8 → v8}/src/regexp-macro-assembler-tracer.cc +0 -0
  363. data/ext/v8/upstream/{3.1.8 → v8}/src/regexp-macro-assembler-tracer.h +0 -0
  364. data/ext/v8/upstream/v8/src/regexp-macro-assembler.cc +266 -0
  365. data/ext/v8/upstream/v8/src/regexp-macro-assembler.h +236 -0
  366. data/ext/v8/upstream/v8/src/regexp-stack.cc +111 -0
  367. data/ext/v8/upstream/v8/src/regexp-stack.h +147 -0
  368. data/ext/v8/upstream/v8/src/regexp.js +483 -0
  369. data/ext/v8/upstream/v8/src/register-allocator-inl.h +141 -0
  370. data/ext/v8/upstream/v8/src/register-allocator.cc +98 -0
  371. data/ext/v8/upstream/v8/src/register-allocator.h +310 -0
  372. data/ext/v8/upstream/v8/src/rewriter.cc +1024 -0
  373. data/ext/v8/upstream/{3.1.8 → v8}/src/rewriter.h +0 -0
  374. data/ext/v8/upstream/v8/src/runtime-profiler.cc +478 -0
  375. data/ext/v8/upstream/v8/src/runtime-profiler.h +192 -0
  376. data/ext/v8/upstream/v8/src/runtime.cc +11949 -0
  377. data/ext/v8/upstream/v8/src/runtime.h +643 -0
  378. data/ext/v8/upstream/{3.1.8 → v8}/src/runtime.js +0 -0
  379. data/ext/v8/upstream/v8/src/safepoint-table.cc +256 -0
  380. data/ext/v8/upstream/v8/src/safepoint-table.h +269 -0
  381. data/ext/v8/upstream/v8/src/scanner-base.cc +964 -0
  382. data/ext/v8/upstream/v8/src/scanner-base.h +664 -0
  383. data/ext/v8/upstream/v8/src/scanner.cc +584 -0
  384. data/ext/v8/upstream/v8/src/scanner.h +196 -0
  385. data/ext/v8/upstream/v8/src/scopeinfo.cc +631 -0
  386. data/ext/v8/upstream/v8/src/scopeinfo.h +249 -0
  387. data/ext/v8/upstream/v8/src/scopes.cc +1093 -0
  388. data/ext/v8/upstream/v8/src/scopes.h +508 -0
  389. data/ext/v8/upstream/v8/src/serialize.cc +1574 -0
  390. data/ext/v8/upstream/v8/src/serialize.h +589 -0
  391. data/ext/v8/upstream/{3.1.8 → v8}/src/shell.h +0 -0
  392. data/ext/v8/upstream/{3.1.8 → v8}/src/simulator.h +0 -0
  393. data/ext/v8/upstream/v8/src/small-pointer-list.h +163 -0
  394. data/ext/v8/upstream/{3.1.8 → v8}/src/smart-pointer.h +0 -0
  395. data/ext/v8/upstream/v8/src/snapshot-common.cc +82 -0
  396. data/ext/v8/upstream/{3.1.8 → v8}/src/snapshot-empty.cc +0 -0
  397. data/ext/v8/upstream/v8/src/snapshot.h +73 -0
  398. data/ext/v8/upstream/v8/src/spaces-inl.h +529 -0
  399. data/ext/v8/upstream/v8/src/spaces.cc +3147 -0
  400. data/ext/v8/upstream/v8/src/spaces.h +2368 -0
  401. data/ext/v8/upstream/{3.1.8 → v8}/src/splay-tree-inl.h +0 -0
  402. data/ext/v8/upstream/{3.1.8 → v8}/src/splay-tree.h +0 -0
  403. data/ext/v8/upstream/v8/src/string-search.cc +41 -0
  404. data/ext/v8/upstream/v8/src/string-search.h +568 -0
  405. data/ext/v8/upstream/v8/src/string-stream.cc +592 -0
  406. data/ext/v8/upstream/{3.1.8 → v8}/src/string-stream.h +0 -0
  407. data/ext/v8/upstream/v8/src/string.js +915 -0
  408. data/ext/v8/upstream/{3.1.8 → v8}/src/strtod.cc +0 -0
  409. data/ext/v8/upstream/{3.1.8 → v8}/src/strtod.h +0 -0
  410. data/ext/v8/upstream/v8/src/stub-cache.cc +1940 -0
  411. data/ext/v8/upstream/v8/src/stub-cache.h +866 -0
  412. data/ext/v8/upstream/{3.1.8 → v8}/src/third_party/valgrind/valgrind.h +0 -0
  413. data/ext/v8/upstream/v8/src/token.cc +63 -0
  414. data/ext/v8/upstream/v8/src/token.h +288 -0
  415. data/ext/v8/upstream/v8/src/top.cc +983 -0
  416. data/ext/v8/upstream/v8/src/type-info.cc +472 -0
  417. data/ext/v8/upstream/v8/src/type-info.h +290 -0
  418. data/ext/v8/upstream/{3.1.8 → v8}/src/unbound-queue-inl.h +0 -0
  419. data/ext/v8/upstream/{3.1.8 → v8}/src/unbound-queue.h +0 -0
  420. data/ext/v8/upstream/{3.1.8 → v8}/src/unicode-inl.h +0 -0
  421. data/ext/v8/upstream/v8/src/unicode.cc +1624 -0
  422. data/ext/v8/upstream/v8/src/unicode.h +280 -0
  423. data/ext/v8/upstream/{3.1.8 → v8}/src/uri.js +0 -0
  424. data/ext/v8/upstream/{3.1.8 → v8}/src/utils.cc +0 -0
  425. data/ext/v8/upstream/v8/src/utils.h +796 -0
  426. data/ext/v8/upstream/v8/src/v8-counters.cc +62 -0
  427. data/ext/v8/upstream/v8/src/v8-counters.h +311 -0
  428. data/ext/v8/upstream/v8/src/v8.cc +215 -0
  429. data/ext/v8/upstream/v8/src/v8.h +130 -0
  430. data/ext/v8/upstream/{3.1.8 → v8}/src/v8checks.h +0 -0
  431. data/ext/v8/upstream/{3.1.8 → v8}/src/v8dll-main.cc +0 -0
  432. data/ext/v8/upstream/v8/src/v8globals.h +486 -0
  433. data/ext/v8/upstream/{3.1.8/src/memory.h → v8/src/v8memory.h} +0 -0
  434. data/ext/v8/upstream/v8/src/v8natives.js +1293 -0
  435. data/ext/v8/upstream/{3.1.8 → v8}/src/v8preparserdll-main.cc +0 -0
  436. data/ext/v8/upstream/v8/src/v8threads.cc +453 -0
  437. data/ext/v8/upstream/v8/src/v8threads.h +164 -0
  438. data/ext/v8/upstream/v8/src/v8utils.h +317 -0
  439. data/ext/v8/upstream/{3.1.8 → v8}/src/variables.cc +0 -0
  440. data/ext/v8/upstream/v8/src/variables.h +212 -0
  441. data/ext/v8/upstream/v8/src/version.cc +116 -0
  442. data/ext/v8/upstream/v8/src/version.h +68 -0
  443. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame-heavy-inl.h +0 -0
  444. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame-heavy.cc +0 -0
  445. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame-inl.h +0 -0
  446. data/ext/v8/upstream/v8/src/virtual-frame-light-inl.h +171 -0
  447. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame-light.cc +0 -0
  448. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame.cc +0 -0
  449. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame.h +0 -0
  450. data/ext/v8/upstream/v8/src/vm-state-inl.h +138 -0
  451. data/ext/v8/upstream/v8/src/vm-state.h +70 -0
  452. data/ext/v8/upstream/v8/src/win32-headers.h +96 -0
  453. data/ext/v8/upstream/v8/src/x64/assembler-x64-inl.h +456 -0
  454. data/ext/v8/upstream/v8/src/x64/assembler-x64.cc +2954 -0
  455. data/ext/v8/upstream/v8/src/x64/assembler-x64.h +1630 -0
  456. data/ext/v8/upstream/v8/src/x64/builtins-x64.cc +1493 -0
  457. data/ext/v8/upstream/v8/src/x64/code-stubs-x64.cc +5132 -0
  458. data/ext/v8/upstream/v8/src/x64/code-stubs-x64.h +477 -0
  459. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/codegen-x64-inl.h +0 -0
  460. data/ext/v8/upstream/v8/src/x64/codegen-x64.cc +8843 -0
  461. data/ext/v8/upstream/v8/src/x64/codegen-x64.h +753 -0
  462. data/ext/v8/upstream/v8/src/x64/cpu-x64.cc +88 -0
  463. data/ext/v8/upstream/v8/src/x64/debug-x64.cc +318 -0
  464. data/ext/v8/upstream/v8/src/x64/deoptimizer-x64.cc +815 -0
  465. data/ext/v8/upstream/v8/src/x64/disasm-x64.cc +1752 -0
  466. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/frames-x64.cc +0 -0
  467. data/ext/v8/upstream/v8/src/x64/frames-x64.h +130 -0
  468. data/ext/v8/upstream/v8/src/x64/full-codegen-x64.cc +4339 -0
  469. data/ext/v8/upstream/v8/src/x64/ic-x64.cc +1752 -0
  470. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/jump-target-x64.cc +0 -0
  471. data/ext/v8/upstream/v8/src/x64/lithium-codegen-x64.cc +3970 -0
  472. data/ext/v8/upstream/v8/src/x64/lithium-codegen-x64.h +318 -0
  473. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/lithium-gap-resolver-x64.cc +0 -0
  474. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/lithium-gap-resolver-x64.h +0 -0
  475. data/ext/v8/upstream/v8/src/x64/lithium-x64.cc +2115 -0
  476. data/ext/v8/upstream/v8/src/x64/lithium-x64.h +2161 -0
  477. data/ext/v8/upstream/v8/src/x64/macro-assembler-x64.cc +2911 -0
  478. data/ext/v8/upstream/v8/src/x64/macro-assembler-x64.h +1984 -0
  479. data/ext/v8/upstream/v8/src/x64/regexp-macro-assembler-x64.cc +1398 -0
  480. data/ext/v8/upstream/v8/src/x64/regexp-macro-assembler-x64.h +282 -0
  481. data/ext/v8/upstream/v8/src/x64/register-allocator-x64-inl.h +87 -0
  482. data/ext/v8/upstream/v8/src/x64/register-allocator-x64.cc +95 -0
  483. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/register-allocator-x64.h +0 -0
  484. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/simulator-x64.cc +0 -0
  485. data/ext/v8/upstream/v8/src/x64/simulator-x64.h +71 -0
  486. data/ext/v8/upstream/v8/src/x64/stub-cache-x64.cc +3460 -0
  487. data/ext/v8/upstream/v8/src/x64/virtual-frame-x64.cc +1296 -0
  488. data/ext/v8/upstream/v8/src/x64/virtual-frame-x64.h +597 -0
  489. data/ext/v8/upstream/v8/src/zone-inl.h +129 -0
  490. data/ext/v8/upstream/v8/src/zone.cc +196 -0
  491. data/ext/v8/upstream/v8/src/zone.h +236 -0
  492. data/ext/v8/upstream/{3.1.8 → v8}/tools/codemap.js +0 -0
  493. data/ext/v8/upstream/{3.1.8 → v8}/tools/consarray.js +0 -0
  494. data/ext/v8/upstream/{3.1.8 → v8}/tools/csvparser.js +0 -0
  495. data/ext/v8/upstream/{3.1.8 → v8}/tools/disasm.py +0 -0
  496. data/ext/v8/upstream/v8/tools/freebsd-tick-processor +10 -0
  497. data/ext/v8/upstream/{3.1.8 → v8}/tools/gc-nvp-trace-processor.py +0 -0
  498. data/ext/v8/upstream/{3.1.8 → v8}/tools/generate-ten-powers.scm +0 -0
  499. data/ext/v8/upstream/{3.1.8 → v8}/tools/grokdump.py +0 -0
  500. data/ext/v8/upstream/v8/tools/gyp/v8.gyp +844 -0
  501. data/ext/v8/upstream/{3.1.8 → v8}/tools/js2c.py +0 -0
  502. data/ext/v8/upstream/{3.1.8 → v8}/tools/jsmin.py +0 -0
  503. data/ext/v8/upstream/v8/tools/linux-tick-processor +35 -0
  504. data/ext/v8/upstream/{3.1.8 → v8}/tools/ll_prof.py +0 -0
  505. data/ext/v8/upstream/{3.1.8 → v8}/tools/logreader.js +0 -0
  506. data/ext/v8/upstream/{3.1.8 → v8}/tools/mac-nm +0 -0
  507. data/ext/v8/upstream/{3.1.8 → v8}/tools/mac-tick-processor +0 -0
  508. data/ext/v8/upstream/{3.1.8 → v8}/tools/oom_dump/README +0 -0
  509. data/ext/v8/upstream/{3.1.8 → v8}/tools/oom_dump/SConstruct +0 -0
  510. data/ext/v8/upstream/{3.1.8 → v8}/tools/oom_dump/oom_dump.cc +0 -0
  511. data/ext/v8/upstream/{3.1.8 → v8}/tools/presubmit.py +0 -0
  512. data/ext/v8/upstream/{3.1.8 → v8}/tools/process-heap-prof.py +0 -0
  513. data/ext/v8/upstream/{3.1.8 → v8}/tools/profile.js +0 -0
  514. data/ext/v8/upstream/{3.1.8 → v8}/tools/profile_view.js +0 -0
  515. data/ext/v8/upstream/{3.1.8 → v8}/tools/run-valgrind.py +0 -0
  516. data/ext/v8/upstream/{3.1.8 → v8}/tools/splaytree.js +0 -0
  517. data/ext/v8/upstream/{3.1.8 → v8}/tools/stats-viewer.py +0 -0
  518. data/ext/v8/upstream/v8/tools/test.py +1490 -0
  519. data/ext/v8/upstream/{3.1.8 → v8}/tools/tickprocessor-driver.js +0 -0
  520. data/ext/v8/upstream/v8/tools/tickprocessor.js +877 -0
  521. data/ext/v8/upstream/{3.1.8 → v8}/tools/utils.py +0 -0
  522. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/README.txt +0 -0
  523. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/arm.vsprops +0 -0
  524. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/common.vsprops +0 -0
  525. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/d8.vcproj +0 -0
  526. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/d8_arm.vcproj +0 -0
  527. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/d8_x64.vcproj +0 -0
  528. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/d8js2c.cmd +0 -0
  529. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/debug.vsprops +0 -0
  530. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/ia32.vsprops +0 -0
  531. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/js2c.cmd +0 -0
  532. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/release.vsprops +0 -0
  533. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8.sln +0 -0
  534. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8.vcproj +0 -0
  535. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_arm.sln +0 -0
  536. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_arm.vcproj +0 -0
  537. data/ext/v8/upstream/v8/tools/visual_studio/v8_base.vcproj +1308 -0
  538. data/ext/v8/upstream/v8/tools/visual_studio/v8_base_arm.vcproj +1238 -0
  539. data/ext/v8/upstream/v8/tools/visual_studio/v8_base_x64.vcproj +1300 -0
  540. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_cctest.vcproj +0 -0
  541. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_cctest_arm.vcproj +0 -0
  542. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_cctest_x64.vcproj +0 -0
  543. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_mksnapshot.vcproj +0 -0
  544. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_mksnapshot_x64.vcproj +0 -0
  545. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_process_sample.vcproj +0 -0
  546. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_process_sample_arm.vcproj +0 -0
  547. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_process_sample_x64.vcproj +0 -0
  548. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_shell_sample.vcproj +0 -0
  549. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_shell_sample_arm.vcproj +0 -0
  550. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_shell_sample_x64.vcproj +0 -0
  551. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_snapshot.vcproj +0 -0
  552. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_snapshot_cc.vcproj +0 -0
  553. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_snapshot_cc_x64.vcproj +0 -0
  554. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_snapshot_x64.vcproj +0 -0
  555. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_x64.sln +0 -0
  556. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_x64.vcproj +0 -0
  557. data/ext/v8/upstream/v8/tools/visual_studio/x64.vsprops +18 -0
  558. data/ext/v8/upstream/{3.1.8 → v8}/tools/windows-tick-processor.bat +0 -0
  559. data/ext/v8/v8_callbacks.cpp +52 -92
  560. data/ext/v8/v8_date.cpp +2 -3
  561. data/ext/v8/v8_object.cpp +4 -0
  562. data/ext/v8/v8_template.cpp +2 -2
  563. data/ext/v8/v8_try_catch.cpp +8 -38
  564. data/lib/v8/version.rb +1 -1
  565. data/spec/ext/ext_spec_helper.rb +2 -20
  566. data/spec/ext/object_spec.rb +0 -12
  567. data/spec/ext/try_catch_spec.rb +29 -1
  568. data/spec/spec_helper.rb +1 -0
  569. data/spec/v8/portal/proxies_spec.rb +1 -84
  570. data/specmem/handle_memspec.rb +41 -0
  571. data/specmem/object_memspec.rb +16 -0
  572. data/specmem/proxies_memspec.rb +86 -0
  573. data/specmem/spec_helper.rb +24 -0
  574. data/therubyracer.gemspec +7 -2
  575. metadata +564 -541
  576. data/ext/v8/upstream/3.1.8/.gitignore +0 -31
  577. data/ext/v8/upstream/3.1.8/AUTHORS +0 -40
  578. data/ext/v8/upstream/3.1.8/ChangeLog +0 -2566
  579. data/ext/v8/upstream/3.1.8/SConstruct +0 -1192
  580. data/ext/v8/upstream/3.1.8/include/v8-debug.h +0 -384
  581. data/ext/v8/upstream/3.1.8/include/v8-preparser.h +0 -116
  582. data/ext/v8/upstream/3.1.8/include/v8-profiler.h +0 -426
  583. data/ext/v8/upstream/3.1.8/include/v8-testing.h +0 -99
  584. data/ext/v8/upstream/3.1.8/include/v8.h +0 -3846
  585. data/ext/v8/upstream/3.1.8/preparser/preparser-process.cc +0 -206
  586. data/ext/v8/upstream/3.1.8/src/SConscript +0 -356
  587. data/ext/v8/upstream/3.1.8/src/accessors.cc +0 -907
  588. data/ext/v8/upstream/3.1.8/src/allocation.cc +0 -204
  589. data/ext/v8/upstream/3.1.8/src/allocation.h +0 -176
  590. data/ext/v8/upstream/3.1.8/src/api.cc +0 -5191
  591. data/ext/v8/upstream/3.1.8/src/api.h +0 -508
  592. data/ext/v8/upstream/3.1.8/src/apiutils.h +0 -80
  593. data/ext/v8/upstream/3.1.8/src/arguments.h +0 -105
  594. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm-inl.h +0 -352
  595. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.cc +0 -2756
  596. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.h +0 -1294
  597. data/ext/v8/upstream/3.1.8/src/arm/builtins-arm.cc +0 -1628
  598. data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.cc +0 -6783
  599. data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.h +0 -657
  600. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.cc +0 -7403
  601. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.h +0 -595
  602. data/ext/v8/upstream/3.1.8/src/arm/constants-arm.h +0 -769
  603. data/ext/v8/upstream/3.1.8/src/arm/cpu-arm.cc +0 -147
  604. data/ext/v8/upstream/3.1.8/src/arm/debug-arm.cc +0 -315
  605. data/ext/v8/upstream/3.1.8/src/arm/deoptimizer-arm.cc +0 -700
  606. data/ext/v8/upstream/3.1.8/src/arm/disasm-arm.cc +0 -1439
  607. data/ext/v8/upstream/3.1.8/src/arm/frames-arm.h +0 -168
  608. data/ext/v8/upstream/3.1.8/src/arm/full-codegen-arm.cc +0 -4230
  609. data/ext/v8/upstream/3.1.8/src/arm/ic-arm.cc +0 -1799
  610. data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.cc +0 -2041
  611. data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.h +0 -2046
  612. data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.cc +0 -3822
  613. data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.h +0 -312
  614. data/ext/v8/upstream/3.1.8/src/arm/lithium-gap-resolver-arm.cc +0 -303
  615. data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.cc +0 -2701
  616. data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.h +0 -1015
  617. data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.cc +0 -1280
  618. data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.h +0 -252
  619. data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.cc +0 -3165
  620. data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.h +0 -402
  621. data/ext/v8/upstream/3.1.8/src/arm/stub-cache-arm.cc +0 -4077
  622. data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.cc +0 -843
  623. data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.h +0 -520
  624. data/ext/v8/upstream/3.1.8/src/array.js +0 -1231
  625. data/ext/v8/upstream/3.1.8/src/assembler.cc +0 -973
  626. data/ext/v8/upstream/3.1.8/src/assembler.h +0 -787
  627. data/ext/v8/upstream/3.1.8/src/ast-inl.h +0 -107
  628. data/ext/v8/upstream/3.1.8/src/ast.cc +0 -1067
  629. data/ext/v8/upstream/3.1.8/src/ast.h +0 -2177
  630. data/ext/v8/upstream/3.1.8/src/atomicops.h +0 -165
  631. data/ext/v8/upstream/3.1.8/src/bootstrapper.cc +0 -1888
  632. data/ext/v8/upstream/3.1.8/src/bootstrapper.h +0 -118
  633. data/ext/v8/upstream/3.1.8/src/builtins.cc +0 -1586
  634. data/ext/v8/upstream/3.1.8/src/builtins.h +0 -339
  635. data/ext/v8/upstream/3.1.8/src/checks.cc +0 -110
  636. data/ext/v8/upstream/3.1.8/src/checks.h +0 -292
  637. data/ext/v8/upstream/3.1.8/src/code-stubs.cc +0 -230
  638. data/ext/v8/upstream/3.1.8/src/code-stubs.h +0 -950
  639. data/ext/v8/upstream/3.1.8/src/codegen-inl.h +0 -64
  640. data/ext/v8/upstream/3.1.8/src/codegen.cc +0 -495
  641. data/ext/v8/upstream/3.1.8/src/codegen.h +0 -245
  642. data/ext/v8/upstream/3.1.8/src/compilation-cache.cc +0 -654
  643. data/ext/v8/upstream/3.1.8/src/compilation-cache.h +0 -112
  644. data/ext/v8/upstream/3.1.8/src/compiler.cc +0 -806
  645. data/ext/v8/upstream/3.1.8/src/compiler.h +0 -290
  646. data/ext/v8/upstream/3.1.8/src/contexts.cc +0 -320
  647. data/ext/v8/upstream/3.1.8/src/contexts.h +0 -376
  648. data/ext/v8/upstream/3.1.8/src/conversions.cc +0 -1069
  649. data/ext/v8/upstream/3.1.8/src/counters.cc +0 -78
  650. data/ext/v8/upstream/3.1.8/src/counters.h +0 -242
  651. data/ext/v8/upstream/3.1.8/src/cpu-profiler-inl.h +0 -100
  652. data/ext/v8/upstream/3.1.8/src/cpu-profiler.cc +0 -554
  653. data/ext/v8/upstream/3.1.8/src/cpu-profiler.h +0 -291
  654. data/ext/v8/upstream/3.1.8/src/cpu.h +0 -65
  655. data/ext/v8/upstream/3.1.8/src/d8-debug.cc +0 -367
  656. data/ext/v8/upstream/3.1.8/src/d8-debug.h +0 -157
  657. data/ext/v8/upstream/3.1.8/src/d8-posix.cc +0 -693
  658. data/ext/v8/upstream/3.1.8/src/d8.cc +0 -792
  659. data/ext/v8/upstream/3.1.8/src/d8.gyp +0 -85
  660. data/ext/v8/upstream/3.1.8/src/data-flow.h +0 -379
  661. data/ext/v8/upstream/3.1.8/src/dateparser.h +0 -263
  662. data/ext/v8/upstream/3.1.8/src/debug-agent.cc +0 -446
  663. data/ext/v8/upstream/3.1.8/src/debug-agent.h +0 -131
  664. data/ext/v8/upstream/3.1.8/src/debug.cc +0 -3085
  665. data/ext/v8/upstream/3.1.8/src/debug.h +0 -1025
  666. data/ext/v8/upstream/3.1.8/src/deoptimizer.cc +0 -1185
  667. data/ext/v8/upstream/3.1.8/src/deoptimizer.h +0 -529
  668. data/ext/v8/upstream/3.1.8/src/disasm.h +0 -77
  669. data/ext/v8/upstream/3.1.8/src/disassembler.cc +0 -338
  670. data/ext/v8/upstream/3.1.8/src/execution.cc +0 -735
  671. data/ext/v8/upstream/3.1.8/src/execution.h +0 -322
  672. data/ext/v8/upstream/3.1.8/src/extensions/experimental/experimental.gyp +0 -53
  673. data/ext/v8/upstream/3.1.8/src/extensions/experimental/i18n-extension.cc +0 -264
  674. data/ext/v8/upstream/3.1.8/src/extensions/externalize-string-extension.cc +0 -141
  675. data/ext/v8/upstream/3.1.8/src/extensions/gc-extension.cc +0 -58
  676. data/ext/v8/upstream/3.1.8/src/factory.cc +0 -1087
  677. data/ext/v8/upstream/3.1.8/src/factory.h +0 -432
  678. data/ext/v8/upstream/3.1.8/src/flag-definitions.h +0 -552
  679. data/ext/v8/upstream/3.1.8/src/frame-element.cc +0 -42
  680. data/ext/v8/upstream/3.1.8/src/frame-element.h +0 -277
  681. data/ext/v8/upstream/3.1.8/src/frames-inl.h +0 -210
  682. data/ext/v8/upstream/3.1.8/src/frames.cc +0 -1232
  683. data/ext/v8/upstream/3.1.8/src/frames.h +0 -826
  684. data/ext/v8/upstream/3.1.8/src/full-codegen.cc +0 -1382
  685. data/ext/v8/upstream/3.1.8/src/full-codegen.h +0 -751
  686. data/ext/v8/upstream/3.1.8/src/func-name-inferrer.cc +0 -90
  687. data/ext/v8/upstream/3.1.8/src/func-name-inferrer.h +0 -111
  688. data/ext/v8/upstream/3.1.8/src/gdb-jit.cc +0 -1547
  689. data/ext/v8/upstream/3.1.8/src/global-handles.cc +0 -534
  690. data/ext/v8/upstream/3.1.8/src/global-handles.h +0 -181
  691. data/ext/v8/upstream/3.1.8/src/globals.h +0 -325
  692. data/ext/v8/upstream/3.1.8/src/handles-inl.h +0 -80
  693. data/ext/v8/upstream/3.1.8/src/handles.cc +0 -910
  694. data/ext/v8/upstream/3.1.8/src/handles.h +0 -424
  695. data/ext/v8/upstream/3.1.8/src/hashmap.h +0 -121
  696. data/ext/v8/upstream/3.1.8/src/heap-inl.h +0 -587
  697. data/ext/v8/upstream/3.1.8/src/heap-profiler.cc +0 -1128
  698. data/ext/v8/upstream/3.1.8/src/heap-profiler.h +0 -381
  699. data/ext/v8/upstream/3.1.8/src/heap.cc +0 -5610
  700. data/ext/v8/upstream/3.1.8/src/heap.h +0 -2218
  701. data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.cc +0 -1490
  702. data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.h +0 -3493
  703. data/ext/v8/upstream/3.1.8/src/hydrogen.cc +0 -6056
  704. data/ext/v8/upstream/3.1.8/src/hydrogen.h +0 -1091
  705. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32-inl.h +0 -429
  706. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.cc +0 -2800
  707. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.h +0 -1093
  708. data/ext/v8/upstream/3.1.8/src/ia32/builtins-ia32.cc +0 -1590
  709. data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.cc +0 -6624
  710. data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.h +0 -536
  711. data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.cc +0 -10354
  712. data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.h +0 -798
  713. data/ext/v8/upstream/3.1.8/src/ia32/cpu-ia32.cc +0 -87
  714. data/ext/v8/upstream/3.1.8/src/ia32/debug-ia32.cc +0 -309
  715. data/ext/v8/upstream/3.1.8/src/ia32/deoptimizer-ia32.cc +0 -664
  716. data/ext/v8/upstream/3.1.8/src/ia32/disasm-ia32.cc +0 -1597
  717. data/ext/v8/upstream/3.1.8/src/ia32/frames-ia32.h +0 -140
  718. data/ext/v8/upstream/3.1.8/src/ia32/full-codegen-ia32.cc +0 -4278
  719. data/ext/v8/upstream/3.1.8/src/ia32/ic-ia32.cc +0 -1786
  720. data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.cc +0 -3880
  721. data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.h +0 -309
  722. data/ext/v8/upstream/3.1.8/src/ia32/lithium-gap-resolver-ia32.cc +0 -460
  723. data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.cc +0 -2095
  724. data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.h +0 -2127
  725. data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.cc +0 -2031
  726. data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.h +0 -798
  727. data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.cc +0 -1253
  728. data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.h +0 -215
  729. data/ext/v8/upstream/3.1.8/src/ia32/register-allocator-ia32.cc +0 -157
  730. data/ext/v8/upstream/3.1.8/src/ia32/simulator-ia32.h +0 -72
  731. data/ext/v8/upstream/3.1.8/src/ia32/stub-cache-ia32.cc +0 -3732
  732. data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.cc +0 -1360
  733. data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.h +0 -646
  734. data/ext/v8/upstream/3.1.8/src/ic-inl.h +0 -129
  735. data/ext/v8/upstream/3.1.8/src/ic.cc +0 -2333
  736. data/ext/v8/upstream/3.1.8/src/ic.h +0 -639
  737. data/ext/v8/upstream/3.1.8/src/interpreter-irregexp.cc +0 -655
  738. data/ext/v8/upstream/3.1.8/src/interpreter-irregexp.h +0 -48
  739. data/ext/v8/upstream/3.1.8/src/json.js +0 -342
  740. data/ext/v8/upstream/3.1.8/src/jsregexp.cc +0 -5340
  741. data/ext/v8/upstream/3.1.8/src/jsregexp.h +0 -1484
  742. data/ext/v8/upstream/3.1.8/src/jump-target-heavy.cc +0 -430
  743. data/ext/v8/upstream/3.1.8/src/jump-target-heavy.h +0 -244
  744. data/ext/v8/upstream/3.1.8/src/jump-target-inl.h +0 -48
  745. data/ext/v8/upstream/3.1.8/src/jump-target-light.cc +0 -111
  746. data/ext/v8/upstream/3.1.8/src/lithium-allocator-inl.h +0 -140
  747. data/ext/v8/upstream/3.1.8/src/lithium-allocator.cc +0 -2093
  748. data/ext/v8/upstream/3.1.8/src/lithium-allocator.h +0 -644
  749. data/ext/v8/upstream/3.1.8/src/lithium.cc +0 -168
  750. data/ext/v8/upstream/3.1.8/src/liveedit.cc +0 -1650
  751. data/ext/v8/upstream/3.1.8/src/liveedit.h +0 -174
  752. data/ext/v8/upstream/3.1.8/src/liveobjectlist.cc +0 -2527
  753. data/ext/v8/upstream/3.1.8/src/liveobjectlist.h +0 -322
  754. data/ext/v8/upstream/3.1.8/src/log-utils.cc +0 -336
  755. data/ext/v8/upstream/3.1.8/src/log-utils.h +0 -232
  756. data/ext/v8/upstream/3.1.8/src/log.cc +0 -1608
  757. data/ext/v8/upstream/3.1.8/src/log.h +0 -379
  758. data/ext/v8/upstream/3.1.8/src/mark-compact.cc +0 -2957
  759. data/ext/v8/upstream/3.1.8/src/mark-compact.h +0 -433
  760. data/ext/v8/upstream/3.1.8/src/messages.cc +0 -164
  761. data/ext/v8/upstream/3.1.8/src/messages.js +0 -1071
  762. data/ext/v8/upstream/3.1.8/src/mips/assembler-mips-inl.h +0 -215
  763. data/ext/v8/upstream/3.1.8/src/mips/assembler-mips.cc +0 -1219
  764. data/ext/v8/upstream/3.1.8/src/mips/assembler-mips.h +0 -667
  765. data/ext/v8/upstream/3.1.8/src/mips/builtins-mips.cc +0 -205
  766. data/ext/v8/upstream/3.1.8/src/mips/codegen-mips-inl.h +0 -70
  767. data/ext/v8/upstream/3.1.8/src/mips/codegen-mips.cc +0 -1437
  768. data/ext/v8/upstream/3.1.8/src/mips/codegen-mips.h +0 -431
  769. data/ext/v8/upstream/3.1.8/src/mips/constants-mips.cc +0 -328
  770. data/ext/v8/upstream/3.1.8/src/mips/constants-mips.h +0 -525
  771. data/ext/v8/upstream/3.1.8/src/mips/cpu-mips.cc +0 -73
  772. data/ext/v8/upstream/3.1.8/src/mips/debug-mips.cc +0 -127
  773. data/ext/v8/upstream/3.1.8/src/mips/disasm-mips.cc +0 -787
  774. data/ext/v8/upstream/3.1.8/src/mips/fast-codegen-mips.cc +0 -77
  775. data/ext/v8/upstream/3.1.8/src/mips/frames-mips.cc +0 -96
  776. data/ext/v8/upstream/3.1.8/src/mips/frames-mips.h +0 -164
  777. data/ext/v8/upstream/3.1.8/src/mips/full-codegen-mips.cc +0 -277
  778. data/ext/v8/upstream/3.1.8/src/mips/ic-mips.cc +0 -208
  779. data/ext/v8/upstream/3.1.8/src/mips/jump-target-mips.cc +0 -175
  780. data/ext/v8/upstream/3.1.8/src/mips/macro-assembler-mips.cc +0 -1326
  781. data/ext/v8/upstream/3.1.8/src/mips/macro-assembler-mips.h +0 -461
  782. data/ext/v8/upstream/3.1.8/src/mips/register-allocator-mips-inl.h +0 -137
  783. data/ext/v8/upstream/3.1.8/src/mips/register-allocator-mips.h +0 -46
  784. data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.cc +0 -1650
  785. data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.h +0 -311
  786. data/ext/v8/upstream/3.1.8/src/mips/stub-cache-mips.cc +0 -418
  787. data/ext/v8/upstream/3.1.8/src/mips/virtual-frame-mips.cc +0 -319
  788. data/ext/v8/upstream/3.1.8/src/mips/virtual-frame-mips.h +0 -548
  789. data/ext/v8/upstream/3.1.8/src/mirror-debugger.js +0 -2380
  790. data/ext/v8/upstream/3.1.8/src/mksnapshot.cc +0 -256
  791. data/ext/v8/upstream/3.1.8/src/objects-debug.cc +0 -722
  792. data/ext/v8/upstream/3.1.8/src/objects-inl.h +0 -3946
  793. data/ext/v8/upstream/3.1.8/src/objects-printer.cc +0 -801
  794. data/ext/v8/upstream/3.1.8/src/objects-visiting.cc +0 -142
  795. data/ext/v8/upstream/3.1.8/src/objects-visiting.h +0 -401
  796. data/ext/v8/upstream/3.1.8/src/objects.cc +0 -10044
  797. data/ext/v8/upstream/3.1.8/src/objects.h +0 -6571
  798. data/ext/v8/upstream/3.1.8/src/parser.cc +0 -5165
  799. data/ext/v8/upstream/3.1.8/src/parser.h +0 -802
  800. data/ext/v8/upstream/3.1.8/src/platform-cygwin.cc +0 -745
  801. data/ext/v8/upstream/3.1.8/src/platform-freebsd.cc +0 -702
  802. data/ext/v8/upstream/3.1.8/src/platform-linux.cc +0 -981
  803. data/ext/v8/upstream/3.1.8/src/platform-macos.cc +0 -732
  804. data/ext/v8/upstream/3.1.8/src/platform-nullos.cc +0 -498
  805. data/ext/v8/upstream/3.1.8/src/platform-openbsd.cc +0 -657
  806. data/ext/v8/upstream/3.1.8/src/platform-posix.cc +0 -399
  807. data/ext/v8/upstream/3.1.8/src/platform-solaris.cc +0 -714
  808. data/ext/v8/upstream/3.1.8/src/platform-win32.cc +0 -1974
  809. data/ext/v8/upstream/3.1.8/src/platform.h +0 -636
  810. data/ext/v8/upstream/3.1.8/src/preparse-data.cc +0 -183
  811. data/ext/v8/upstream/3.1.8/src/preparser-api.cc +0 -213
  812. data/ext/v8/upstream/3.1.8/src/preparser.cc +0 -1205
  813. data/ext/v8/upstream/3.1.8/src/prettyprinter.cc +0 -1539
  814. data/ext/v8/upstream/3.1.8/src/prettyprinter.h +0 -223
  815. data/ext/v8/upstream/3.1.8/src/profile-generator.cc +0 -2899
  816. data/ext/v8/upstream/3.1.8/src/profile-generator.h +0 -1151
  817. data/ext/v8/upstream/3.1.8/src/property.cc +0 -96
  818. data/ext/v8/upstream/3.1.8/src/property.h +0 -337
  819. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-irregexp.cc +0 -470
  820. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.cc +0 -257
  821. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.h +0 -231
  822. data/ext/v8/upstream/3.1.8/src/regexp-stack.cc +0 -103
  823. data/ext/v8/upstream/3.1.8/src/regexp-stack.h +0 -123
  824. data/ext/v8/upstream/3.1.8/src/regexp.js +0 -483
  825. data/ext/v8/upstream/3.1.8/src/register-allocator-inl.h +0 -141
  826. data/ext/v8/upstream/3.1.8/src/register-allocator.cc +0 -104
  827. data/ext/v8/upstream/3.1.8/src/register-allocator.h +0 -320
  828. data/ext/v8/upstream/3.1.8/src/rewriter.cc +0 -1023
  829. data/ext/v8/upstream/3.1.8/src/runtime-profiler.cc +0 -443
  830. data/ext/v8/upstream/3.1.8/src/runtime-profiler.h +0 -77
  831. data/ext/v8/upstream/3.1.8/src/runtime.cc +0 -11592
  832. data/ext/v8/upstream/3.1.8/src/runtime.h +0 -582
  833. data/ext/v8/upstream/3.1.8/src/safepoint-table.cc +0 -253
  834. data/ext/v8/upstream/3.1.8/src/safepoint-table.h +0 -263
  835. data/ext/v8/upstream/3.1.8/src/scanner-base.cc +0 -971
  836. data/ext/v8/upstream/3.1.8/src/scanner-base.h +0 -653
  837. data/ext/v8/upstream/3.1.8/src/scanner.cc +0 -586
  838. data/ext/v8/upstream/3.1.8/src/scanner.h +0 -194
  839. data/ext/v8/upstream/3.1.8/src/scopeinfo.cc +0 -636
  840. data/ext/v8/upstream/3.1.8/src/scopeinfo.h +0 -238
  841. data/ext/v8/upstream/3.1.8/src/scopes.cc +0 -1063
  842. data/ext/v8/upstream/3.1.8/src/scopes.h +0 -494
  843. data/ext/v8/upstream/3.1.8/src/serialize.cc +0 -1535
  844. data/ext/v8/upstream/3.1.8/src/serialize.h +0 -584
  845. data/ext/v8/upstream/3.1.8/src/snapshot-common.cc +0 -82
  846. data/ext/v8/upstream/3.1.8/src/snapshot.h +0 -71
  847. data/ext/v8/upstream/3.1.8/src/spaces-inl.h +0 -524
  848. data/ext/v8/upstream/3.1.8/src/spaces.cc +0 -3254
  849. data/ext/v8/upstream/3.1.8/src/spaces.h +0 -2362
  850. data/ext/v8/upstream/3.1.8/src/string-search.cc +0 -40
  851. data/ext/v8/upstream/3.1.8/src/string-search.h +0 -567
  852. data/ext/v8/upstream/3.1.8/src/string-stream.cc +0 -584
  853. data/ext/v8/upstream/3.1.8/src/string.js +0 -915
  854. data/ext/v8/upstream/3.1.8/src/stub-cache.cc +0 -1878
  855. data/ext/v8/upstream/3.1.8/src/stub-cache.h +0 -849
  856. data/ext/v8/upstream/3.1.8/src/token.cc +0 -63
  857. data/ext/v8/upstream/3.1.8/src/token.h +0 -288
  858. data/ext/v8/upstream/3.1.8/src/top.cc +0 -1152
  859. data/ext/v8/upstream/3.1.8/src/top.h +0 -608
  860. data/ext/v8/upstream/3.1.8/src/type-info.cc +0 -406
  861. data/ext/v8/upstream/3.1.8/src/type-info.h +0 -283
  862. data/ext/v8/upstream/3.1.8/src/unicode.cc +0 -1624
  863. data/ext/v8/upstream/3.1.8/src/unicode.h +0 -280
  864. data/ext/v8/upstream/3.1.8/src/utils.h +0 -793
  865. data/ext/v8/upstream/3.1.8/src/v8-counters.cc +0 -55
  866. data/ext/v8/upstream/3.1.8/src/v8-counters.h +0 -290
  867. data/ext/v8/upstream/3.1.8/src/v8.cc +0 -270
  868. data/ext/v8/upstream/3.1.8/src/v8.h +0 -127
  869. data/ext/v8/upstream/3.1.8/src/v8globals.h +0 -480
  870. data/ext/v8/upstream/3.1.8/src/v8natives.js +0 -1252
  871. data/ext/v8/upstream/3.1.8/src/v8threads.cc +0 -440
  872. data/ext/v8/upstream/3.1.8/src/v8threads.h +0 -157
  873. data/ext/v8/upstream/3.1.8/src/v8utils.h +0 -354
  874. data/ext/v8/upstream/3.1.8/src/variables.h +0 -212
  875. data/ext/v8/upstream/3.1.8/src/version.cc +0 -95
  876. data/ext/v8/upstream/3.1.8/src/version.h +0 -64
  877. data/ext/v8/upstream/3.1.8/src/virtual-frame-light-inl.h +0 -170
  878. data/ext/v8/upstream/3.1.8/src/vm-state-inl.h +0 -134
  879. data/ext/v8/upstream/3.1.8/src/vm-state.h +0 -68
  880. data/ext/v8/upstream/3.1.8/src/win32-headers.h +0 -95
  881. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64-inl.h +0 -455
  882. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.cc +0 -3162
  883. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.h +0 -1584
  884. data/ext/v8/upstream/3.1.8/src/x64/builtins-x64.cc +0 -1492
  885. data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.cc +0 -5150
  886. data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.h +0 -519
  887. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.cc +0 -8835
  888. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.h +0 -750
  889. data/ext/v8/upstream/3.1.8/src/x64/cpu-x64.cc +0 -86
  890. data/ext/v8/upstream/3.1.8/src/x64/debug-x64.cc +0 -316
  891. data/ext/v8/upstream/3.1.8/src/x64/deoptimizer-x64.cc +0 -781
  892. data/ext/v8/upstream/3.1.8/src/x64/disasm-x64.cc +0 -1737
  893. data/ext/v8/upstream/3.1.8/src/x64/frames-x64.h +0 -130
  894. data/ext/v8/upstream/3.1.8/src/x64/full-codegen-x64.cc +0 -3984
  895. data/ext/v8/upstream/3.1.8/src/x64/ic-x64.cc +0 -1761
  896. data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.cc +0 -3639
  897. data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.h +0 -305
  898. data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.cc +0 -2044
  899. data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.h +0 -2052
  900. data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.cc +0 -2660
  901. data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.h +0 -1852
  902. data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.cc +0 -1382
  903. data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.h +0 -278
  904. data/ext/v8/upstream/3.1.8/src/x64/register-allocator-x64-inl.h +0 -87
  905. data/ext/v8/upstream/3.1.8/src/x64/register-allocator-x64.cc +0 -91
  906. data/ext/v8/upstream/3.1.8/src/x64/simulator-x64.h +0 -71
  907. data/ext/v8/upstream/3.1.8/src/x64/stub-cache-x64.cc +0 -3509
  908. data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.cc +0 -1292
  909. data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.h +0 -593
  910. data/ext/v8/upstream/3.1.8/src/zone-inl.h +0 -83
  911. data/ext/v8/upstream/3.1.8/src/zone.cc +0 -195
  912. data/ext/v8/upstream/3.1.8/src/zone.h +0 -233
  913. data/ext/v8/upstream/3.1.8/tools/gyp/v8.gyp +0 -869
  914. data/ext/v8/upstream/3.1.8/tools/linux-tick-processor +0 -33
  915. data/ext/v8/upstream/3.1.8/tools/tickprocessor.js +0 -863
  916. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base.vcproj +0 -1296
  917. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_arm.vcproj +0 -1234
  918. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_x64.vcproj +0 -1296
  919. data/ext/v8/upstream/3.1.8/tools/visual_studio/x64.vsprops +0 -17
  920. data/spec/ext/mem_spec.rb +0 -42
File without changes
@@ -0,0 +1,168 @@
1
+ // Copyright 2006-2008 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #ifndef V8_ARM_FRAMES_ARM_H_
29
+ #define V8_ARM_FRAMES_ARM_H_
30
+
31
+ namespace v8 {
32
+ namespace internal {
33
+
34
+
35
+ // The ARM ABI does not specify the usage of register r9, which may be reserved
36
+ // as the static base or thread register on some platforms, in which case we
37
+ // leave it alone. Adjust the value of kR9Available accordingly:
38
+ static const int kR9Available = 1; // 1 if available to us, 0 if reserved
39
+
40
+
41
+ // Register list in load/store instructions
42
+ // Note that the bit values must match those used in actual instruction encoding
43
+ static const int kNumRegs = 16;
44
+
45
+
46
+ // Caller-saved/arguments registers
47
+ static const RegList kJSCallerSaved =
48
+ 1 << 0 | // r0 a1
49
+ 1 << 1 | // r1 a2
50
+ 1 << 2 | // r2 a3
51
+ 1 << 3; // r3 a4
52
+
53
+ static const int kNumJSCallerSaved = 4;
54
+
55
+ typedef Object* JSCallerSavedBuffer[kNumJSCallerSaved];
56
+
57
+ // Return the code of the n-th caller-saved register available to JavaScript
58
+ // e.g. JSCallerSavedReg(0) returns r0.code() == 0
59
+ int JSCallerSavedCode(int n);
60
+
61
+
62
+ // Callee-saved registers preserved when switching from C to JavaScript
63
+ static const RegList kCalleeSaved =
64
+ 1 << 4 | // r4 v1
65
+ 1 << 5 | // r5 v2
66
+ 1 << 6 | // r6 v3
67
+ 1 << 7 | // r7 v4
68
+ 1 << 8 | // r8 v5 (cp in JavaScript code)
69
+ kR9Available << 9 | // r9 v6
70
+ 1 << 10 | // r10 v7
71
+ 1 << 11; // r11 v8 (fp in JavaScript code)
72
+
73
+ static const int kNumCalleeSaved = 7 + kR9Available;
74
+
75
+
76
+ // Number of registers for which space is reserved in safepoints. Must be a
77
+ // multiple of 8.
78
+ // TODO(regis): Only 8 registers may actually be sufficient. Revisit.
79
+ static const int kNumSafepointRegisters = 16;
80
+
81
+ // Define the list of registers actually saved at safepoints.
82
+ // Note that the number of saved registers may be smaller than the reserved
83
+ // space, i.e. kNumSafepointSavedRegisters <= kNumSafepointRegisters.
84
+ static const RegList kSafepointSavedRegisters = kJSCallerSaved | kCalleeSaved;
85
+ static const int kNumSafepointSavedRegisters =
86
+ kNumJSCallerSaved + kNumCalleeSaved;
87
+
88
+ // ----------------------------------------------------
89
+
90
+
91
+ class StackHandlerConstants : public AllStatic {
92
+ public:
93
+ static const int kNextOffset = 0 * kPointerSize;
94
+ static const int kStateOffset = 1 * kPointerSize;
95
+ static const int kFPOffset = 2 * kPointerSize;
96
+ static const int kPCOffset = 3 * kPointerSize;
97
+
98
+ static const int kSize = kPCOffset + kPointerSize;
99
+ };
100
+
101
+
102
+ class EntryFrameConstants : public AllStatic {
103
+ public:
104
+ static const int kCallerFPOffset = -3 * kPointerSize;
105
+ };
106
+
107
+
108
+ class ExitFrameConstants : public AllStatic {
109
+ public:
110
+ static const int kCodeOffset = -2 * kPointerSize;
111
+ static const int kSPOffset = -1 * kPointerSize;
112
+
113
+ // The caller fields are below the frame pointer on the stack.
114
+ static const int kCallerFPOffset = 0 * kPointerSize;
115
+ // The calling JS function is below FP.
116
+ static const int kCallerPCOffset = 1 * kPointerSize;
117
+
118
+ // FP-relative displacement of the caller's SP. It points just
119
+ // below the saved PC.
120
+ static const int kCallerSPDisplacement = 2 * kPointerSize;
121
+ };
122
+
123
+
124
+ class StandardFrameConstants : public AllStatic {
125
+ public:
126
+ static const int kExpressionsOffset = -3 * kPointerSize;
127
+ static const int kMarkerOffset = -2 * kPointerSize;
128
+ static const int kContextOffset = -1 * kPointerSize;
129
+ static const int kCallerFPOffset = 0 * kPointerSize;
130
+ static const int kCallerPCOffset = 1 * kPointerSize;
131
+ static const int kCallerSPOffset = 2 * kPointerSize;
132
+ };
133
+
134
+
135
+ class JavaScriptFrameConstants : public AllStatic {
136
+ public:
137
+ // FP-relative.
138
+ static const int kLocal0Offset = StandardFrameConstants::kExpressionsOffset;
139
+ static const int kLastParameterOffset = +2 * kPointerSize;
140
+ static const int kFunctionOffset = StandardFrameConstants::kMarkerOffset;
141
+
142
+ // Caller SP-relative.
143
+ static const int kParam0Offset = -2 * kPointerSize;
144
+ static const int kReceiverOffset = -1 * kPointerSize;
145
+ };
146
+
147
+
148
+ class ArgumentsAdaptorFrameConstants : public AllStatic {
149
+ public:
150
+ static const int kLengthOffset = StandardFrameConstants::kExpressionsOffset;
151
+ };
152
+
153
+
154
+ class InternalFrameConstants : public AllStatic {
155
+ public:
156
+ static const int kCodeOffset = StandardFrameConstants::kExpressionsOffset;
157
+ };
158
+
159
+
160
+ inline Object* JavaScriptFrame::function_slot_object() const {
161
+ const int offset = JavaScriptFrameConstants::kFunctionOffset;
162
+ return Memory::Object_at(fp() + offset);
163
+ }
164
+
165
+
166
+ } } // namespace v8::internal
167
+
168
+ #endif // V8_ARM_FRAMES_ARM_H_
@@ -0,0 +1,4374 @@
1
+ // Copyright 2011 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #include "v8.h"
29
+
30
+ #if defined(V8_TARGET_ARCH_ARM)
31
+
32
+ #include "code-stubs.h"
33
+ #include "codegen-inl.h"
34
+ #include "compiler.h"
35
+ #include "debug.h"
36
+ #include "full-codegen.h"
37
+ #include "parser.h"
38
+ #include "scopes.h"
39
+ #include "stub-cache.h"
40
+
41
+ #include "arm/code-stubs-arm.h"
42
+
43
+ namespace v8 {
44
+ namespace internal {
45
+
46
+ #define __ ACCESS_MASM(masm_)
47
+
48
+
49
+ // A patch site is a location in the code which it is possible to patch. This
50
+ // class has a number of methods to emit the code which is patchable and the
51
+ // method EmitPatchInfo to record a marker back to the patchable code. This
52
+ // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
53
+ // immediate value is used) is the delta from the pc to the first instruction of
54
+ // the patchable code.
55
+ class JumpPatchSite BASE_EMBEDDED {
56
+ public:
57
+ explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
58
+ #ifdef DEBUG
59
+ info_emitted_ = false;
60
+ #endif
61
+ }
62
+
63
+ ~JumpPatchSite() {
64
+ ASSERT(patch_site_.is_bound() == info_emitted_);
65
+ }
66
+
67
+ // When initially emitting this ensure that a jump is always generated to skip
68
+ // the inlined smi code.
69
+ void EmitJumpIfNotSmi(Register reg, Label* target) {
70
+ ASSERT(!patch_site_.is_bound() && !info_emitted_);
71
+ __ bind(&patch_site_);
72
+ __ cmp(reg, Operand(reg));
73
+ // Don't use b(al, ...) as that might emit the constant pool right after the
74
+ // branch. After patching when the branch is no longer unconditional
75
+ // execution can continue into the constant pool.
76
+ __ b(eq, target); // Always taken before patched.
77
+ }
78
+
79
+ // When initially emitting this ensure that a jump is never generated to skip
80
+ // the inlined smi code.
81
+ void EmitJumpIfSmi(Register reg, Label* target) {
82
+ ASSERT(!patch_site_.is_bound() && !info_emitted_);
83
+ __ bind(&patch_site_);
84
+ __ cmp(reg, Operand(reg));
85
+ __ b(ne, target); // Never taken before patched.
86
+ }
87
+
88
+ void EmitPatchInfo() {
89
+ int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
90
+ Register reg;
91
+ reg.set_code(delta_to_patch_site / kOff12Mask);
92
+ __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
93
+ #ifdef DEBUG
94
+ info_emitted_ = true;
95
+ #endif
96
+ }
97
+
98
+ bool is_bound() const { return patch_site_.is_bound(); }
99
+
100
+ private:
101
+ MacroAssembler* masm_;
102
+ Label patch_site_;
103
+ #ifdef DEBUG
104
+ bool info_emitted_;
105
+ #endif
106
+ };
107
+
108
+
109
+ // Generate code for a JS function. On entry to the function the receiver
110
+ // and arguments have been pushed on the stack left to right. The actual
111
+ // argument count matches the formal parameter count expected by the
112
+ // function.
113
+ //
114
+ // The live registers are:
115
+ // o r1: the JS function object being called (ie, ourselves)
116
+ // o cp: our context
117
+ // o fp: our caller's frame pointer
118
+ // o sp: stack pointer
119
+ // o lr: return address
120
+ //
121
+ // The function builds a JS frame. Please see JavaScriptFrameConstants in
122
+ // frames-arm.h for its layout.
123
+ void FullCodeGenerator::Generate(CompilationInfo* info) {
124
+ ASSERT(info_ == NULL);
125
+ info_ = info;
126
+ SetFunctionPosition(function());
127
+ Comment cmnt(masm_, "[ function compiled by full code generator");
128
+
129
+ #ifdef DEBUG
130
+ if (strlen(FLAG_stop_at) > 0 &&
131
+ info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
132
+ __ stop("stop-at");
133
+ }
134
+ #endif
135
+
136
+ int locals_count = scope()->num_stack_slots();
137
+
138
+ __ Push(lr, fp, cp, r1);
139
+ if (locals_count > 0) {
140
+ // Load undefined value here, so the value is ready for the loop
141
+ // below.
142
+ __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
143
+ }
144
+ // Adjust fp to point to caller's fp.
145
+ __ add(fp, sp, Operand(2 * kPointerSize));
146
+
147
+ { Comment cmnt(masm_, "[ Allocate locals");
148
+ for (int i = 0; i < locals_count; i++) {
149
+ __ push(ip);
150
+ }
151
+ }
152
+
153
+ bool function_in_register = true;
154
+
155
+ // Possibly allocate a local context.
156
+ int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
157
+ if (heap_slots > 0) {
158
+ Comment cmnt(masm_, "[ Allocate local context");
159
+ // Argument to NewContext is the function, which is in r1.
160
+ __ push(r1);
161
+ if (heap_slots <= FastNewContextStub::kMaximumSlots) {
162
+ FastNewContextStub stub(heap_slots);
163
+ __ CallStub(&stub);
164
+ } else {
165
+ __ CallRuntime(Runtime::kNewContext, 1);
166
+ }
167
+ function_in_register = false;
168
+ // Context is returned in both r0 and cp. It replaces the context
169
+ // passed to us. It's saved in the stack and kept live in cp.
170
+ __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
171
+ // Copy any necessary parameters into the context.
172
+ int num_parameters = scope()->num_parameters();
173
+ for (int i = 0; i < num_parameters; i++) {
174
+ Slot* slot = scope()->parameter(i)->AsSlot();
175
+ if (slot != NULL && slot->type() == Slot::CONTEXT) {
176
+ int parameter_offset = StandardFrameConstants::kCallerSPOffset +
177
+ (num_parameters - 1 - i) * kPointerSize;
178
+ // Load parameter from stack.
179
+ __ ldr(r0, MemOperand(fp, parameter_offset));
180
+ // Store it in the context.
181
+ __ mov(r1, Operand(Context::SlotOffset(slot->index())));
182
+ __ str(r0, MemOperand(cp, r1));
183
+ // Update the write barrier. This clobbers all involved
184
+ // registers, so we have to use two more registers to avoid
185
+ // clobbering cp.
186
+ __ mov(r2, Operand(cp));
187
+ __ RecordWrite(r2, Operand(r1), r3, r0);
188
+ }
189
+ }
190
+ }
191
+
192
+ Variable* arguments = scope()->arguments();
193
+ if (arguments != NULL) {
194
+ // Function uses arguments object.
195
+ Comment cmnt(masm_, "[ Allocate arguments object");
196
+ if (!function_in_register) {
197
+ // Load this again, if it's used by the local context below.
198
+ __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
199
+ } else {
200
+ __ mov(r3, r1);
201
+ }
202
+ // Receiver is just before the parameters on the caller's stack.
203
+ int offset = scope()->num_parameters() * kPointerSize;
204
+ __ add(r2, fp,
205
+ Operand(StandardFrameConstants::kCallerSPOffset + offset));
206
+ __ mov(r1, Operand(Smi::FromInt(scope()->num_parameters())));
207
+ __ Push(r3, r2, r1);
208
+
209
+ // Arguments to ArgumentsAccessStub:
210
+ // function, receiver address, parameter count.
211
+ // The stub will rewrite receiever and parameter count if the previous
212
+ // stack frame was an arguments adapter frame.
213
+ ArgumentsAccessStub stub(
214
+ is_strict_mode() ? ArgumentsAccessStub::NEW_STRICT
215
+ : ArgumentsAccessStub::NEW_NON_STRICT);
216
+ __ CallStub(&stub);
217
+
218
+ Variable* arguments_shadow = scope()->arguments_shadow();
219
+ if (arguments_shadow != NULL) {
220
+ // Duplicate the value; move-to-slot operation might clobber registers.
221
+ __ mov(r3, r0);
222
+ Move(arguments_shadow->AsSlot(), r3, r1, r2);
223
+ }
224
+ Move(arguments->AsSlot(), r0, r1, r2);
225
+ }
226
+
227
+ if (FLAG_trace) {
228
+ __ CallRuntime(Runtime::kTraceEnter, 0);
229
+ }
230
+
231
+ // Visit the declarations and body unless there is an illegal
232
+ // redeclaration.
233
+ if (scope()->HasIllegalRedeclaration()) {
234
+ Comment cmnt(masm_, "[ Declarations");
235
+ scope()->VisitIllegalRedeclaration(this);
236
+
237
+ } else {
238
+ { Comment cmnt(masm_, "[ Declarations");
239
+ // For named function expressions, declare the function name as a
240
+ // constant.
241
+ if (scope()->is_function_scope() && scope()->function() != NULL) {
242
+ EmitDeclaration(scope()->function(), Variable::CONST, NULL);
243
+ }
244
+ VisitDeclarations(scope()->declarations());
245
+ }
246
+
247
+ { Comment cmnt(masm_, "[ Stack check");
248
+ PrepareForBailout(info->function(), NO_REGISTERS);
249
+ Label ok;
250
+ __ LoadRoot(ip, Heap::kStackLimitRootIndex);
251
+ __ cmp(sp, Operand(ip));
252
+ __ b(hs, &ok);
253
+ StackCheckStub stub;
254
+ __ CallStub(&stub);
255
+ __ bind(&ok);
256
+ }
257
+
258
+ { Comment cmnt(masm_, "[ Body");
259
+ ASSERT(loop_depth() == 0);
260
+ VisitStatements(function()->body());
261
+ ASSERT(loop_depth() == 0);
262
+ }
263
+ }
264
+
265
+ // Always emit a 'return undefined' in case control fell off the end of
266
+ // the body.
267
+ { Comment cmnt(masm_, "[ return <undefined>;");
268
+ __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
269
+ }
270
+ EmitReturnSequence();
271
+
272
+ // Force emit the constant pool, so it doesn't get emitted in the middle
273
+ // of the stack check table.
274
+ masm()->CheckConstPool(true, false);
275
+ }
276
+
277
+
278
+ void FullCodeGenerator::ClearAccumulator() {
279
+ __ mov(r0, Operand(Smi::FromInt(0)));
280
+ }
281
+
282
+
283
+ void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
284
+ Comment cmnt(masm_, "[ Stack check");
285
+ Label ok;
286
+ __ LoadRoot(ip, Heap::kStackLimitRootIndex);
287
+ __ cmp(sp, Operand(ip));
288
+ __ b(hs, &ok);
289
+ StackCheckStub stub;
290
+ __ CallStub(&stub);
291
+ // Record a mapping of this PC offset to the OSR id. This is used to find
292
+ // the AST id from the unoptimized code in order to use it as a key into
293
+ // the deoptimization input data found in the optimized code.
294
+ RecordStackCheck(stmt->OsrEntryId());
295
+
296
+ __ bind(&ok);
297
+ PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
298
+ // Record a mapping of the OSR id to this PC. This is used if the OSR
299
+ // entry becomes the target of a bailout. We don't expect it to be, but
300
+ // we want it to work if it is.
301
+ PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
302
+ }
303
+
304
+
305
+ void FullCodeGenerator::EmitReturnSequence() {
306
+ Comment cmnt(masm_, "[ Return sequence");
307
+ if (return_label_.is_bound()) {
308
+ __ b(&return_label_);
309
+ } else {
310
+ __ bind(&return_label_);
311
+ if (FLAG_trace) {
312
+ // Push the return value on the stack as the parameter.
313
+ // Runtime::TraceExit returns its parameter in r0.
314
+ __ push(r0);
315
+ __ CallRuntime(Runtime::kTraceExit, 1);
316
+ }
317
+
318
+ #ifdef DEBUG
319
+ // Add a label for checking the size of the code used for returning.
320
+ Label check_exit_codesize;
321
+ masm_->bind(&check_exit_codesize);
322
+ #endif
323
+ // Make sure that the constant pool is not emitted inside of the return
324
+ // sequence.
325
+ { Assembler::BlockConstPoolScope block_const_pool(masm_);
326
+ // Here we use masm_-> instead of the __ macro to avoid the code coverage
327
+ // tool from instrumenting as we rely on the code size here.
328
+ int32_t sp_delta = (scope()->num_parameters() + 1) * kPointerSize;
329
+ CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
330
+ __ RecordJSReturn();
331
+ masm_->mov(sp, fp);
332
+ masm_->ldm(ia_w, sp, fp.bit() | lr.bit());
333
+ masm_->add(sp, sp, Operand(sp_delta));
334
+ masm_->Jump(lr);
335
+ }
336
+
337
+ #ifdef DEBUG
338
+ // Check that the size of the code used for returning is large enough
339
+ // for the debugger's requirements.
340
+ ASSERT(Assembler::kJSReturnSequenceInstructions <=
341
+ masm_->InstructionsGeneratedSince(&check_exit_codesize));
342
+ #endif
343
+ }
344
+ }
345
+
346
+
347
+ void FullCodeGenerator::EffectContext::Plug(Slot* slot) const {
348
+ }
349
+
350
+
351
+ void FullCodeGenerator::AccumulatorValueContext::Plug(Slot* slot) const {
352
+ codegen()->Move(result_register(), slot);
353
+ }
354
+
355
+
356
+ void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const {
357
+ codegen()->Move(result_register(), slot);
358
+ __ push(result_register());
359
+ }
360
+
361
+
362
+ void FullCodeGenerator::TestContext::Plug(Slot* slot) const {
363
+ // For simplicity we always test the accumulator register.
364
+ codegen()->Move(result_register(), slot);
365
+ codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
366
+ codegen()->DoTest(true_label_, false_label_, fall_through_);
367
+ }
368
+
369
+
370
+ void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
371
+ }
372
+
373
+
374
+ void FullCodeGenerator::AccumulatorValueContext::Plug(
375
+ Heap::RootListIndex index) const {
376
+ __ LoadRoot(result_register(), index);
377
+ }
378
+
379
+
380
+ void FullCodeGenerator::StackValueContext::Plug(
381
+ Heap::RootListIndex index) const {
382
+ __ LoadRoot(result_register(), index);
383
+ __ push(result_register());
384
+ }
385
+
386
+
387
+ void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
388
+ codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
389
+ true,
390
+ true_label_,
391
+ false_label_);
392
+ if (index == Heap::kUndefinedValueRootIndex ||
393
+ index == Heap::kNullValueRootIndex ||
394
+ index == Heap::kFalseValueRootIndex) {
395
+ if (false_label_ != fall_through_) __ b(false_label_);
396
+ } else if (index == Heap::kTrueValueRootIndex) {
397
+ if (true_label_ != fall_through_) __ b(true_label_);
398
+ } else {
399
+ __ LoadRoot(result_register(), index);
400
+ codegen()->DoTest(true_label_, false_label_, fall_through_);
401
+ }
402
+ }
403
+
404
+
405
+ void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
406
+ }
407
+
408
+
409
+ void FullCodeGenerator::AccumulatorValueContext::Plug(
410
+ Handle<Object> lit) const {
411
+ __ mov(result_register(), Operand(lit));
412
+ }
413
+
414
+
415
+ void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
416
+ // Immediates cannot be pushed directly.
417
+ __ mov(result_register(), Operand(lit));
418
+ __ push(result_register());
419
+ }
420
+
421
+
422
+ void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
423
+ codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
424
+ true,
425
+ true_label_,
426
+ false_label_);
427
+ ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
428
+ if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
429
+ if (false_label_ != fall_through_) __ b(false_label_);
430
+ } else if (lit->IsTrue() || lit->IsJSObject()) {
431
+ if (true_label_ != fall_through_) __ b(true_label_);
432
+ } else if (lit->IsString()) {
433
+ if (String::cast(*lit)->length() == 0) {
434
+ if (false_label_ != fall_through_) __ b(false_label_);
435
+ __ b(false_label_);
436
+ } else {
437
+ if (true_label_ != fall_through_) __ b(true_label_);
438
+ }
439
+ } else if (lit->IsSmi()) {
440
+ if (Smi::cast(*lit)->value() == 0) {
441
+ if (false_label_ != fall_through_) __ b(false_label_);
442
+ } else {
443
+ if (true_label_ != fall_through_) __ b(true_label_);
444
+ }
445
+ } else {
446
+ // For simplicity we always test the accumulator register.
447
+ __ mov(result_register(), Operand(lit));
448
+ codegen()->DoTest(true_label_, false_label_, fall_through_);
449
+ }
450
+ }
451
+
452
+
453
+ void FullCodeGenerator::EffectContext::DropAndPlug(int count,
454
+ Register reg) const {
455
+ ASSERT(count > 0);
456
+ __ Drop(count);
457
+ }
458
+
459
+
460
+ void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
461
+ int count,
462
+ Register reg) const {
463
+ ASSERT(count > 0);
464
+ __ Drop(count);
465
+ __ Move(result_register(), reg);
466
+ }
467
+
468
+
469
+ void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
470
+ Register reg) const {
471
+ ASSERT(count > 0);
472
+ if (count > 1) __ Drop(count - 1);
473
+ __ str(reg, MemOperand(sp, 0));
474
+ }
475
+
476
+
477
+ void FullCodeGenerator::TestContext::DropAndPlug(int count,
478
+ Register reg) const {
479
+ ASSERT(count > 0);
480
+ // For simplicity we always test the accumulator register.
481
+ __ Drop(count);
482
+ __ Move(result_register(), reg);
483
+ codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
484
+ codegen()->DoTest(true_label_, false_label_, fall_through_);
485
+ }
486
+
487
+
488
+ void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
489
+ Label* materialize_false) const {
490
+ ASSERT(materialize_true == materialize_false);
491
+ __ bind(materialize_true);
492
+ }
493
+
494
+
495
+ void FullCodeGenerator::AccumulatorValueContext::Plug(
496
+ Label* materialize_true,
497
+ Label* materialize_false) const {
498
+ Label done;
499
+ __ bind(materialize_true);
500
+ __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
501
+ __ jmp(&done);
502
+ __ bind(materialize_false);
503
+ __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
504
+ __ bind(&done);
505
+ }
506
+
507
+
508
+ void FullCodeGenerator::StackValueContext::Plug(
509
+ Label* materialize_true,
510
+ Label* materialize_false) const {
511
+ Label done;
512
+ __ bind(materialize_true);
513
+ __ LoadRoot(ip, Heap::kTrueValueRootIndex);
514
+ __ push(ip);
515
+ __ jmp(&done);
516
+ __ bind(materialize_false);
517
+ __ LoadRoot(ip, Heap::kFalseValueRootIndex);
518
+ __ push(ip);
519
+ __ bind(&done);
520
+ }
521
+
522
+
523
+ void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
524
+ Label* materialize_false) const {
525
+ ASSERT(materialize_true == true_label_);
526
+ ASSERT(materialize_false == false_label_);
527
+ }
528
+
529
+
530
+ void FullCodeGenerator::EffectContext::Plug(bool flag) const {
531
+ }
532
+
533
+
534
+ void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
535
+ Heap::RootListIndex value_root_index =
536
+ flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
537
+ __ LoadRoot(result_register(), value_root_index);
538
+ }
539
+
540
+
541
+ void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
542
+ Heap::RootListIndex value_root_index =
543
+ flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
544
+ __ LoadRoot(ip, value_root_index);
545
+ __ push(ip);
546
+ }
547
+
548
+
549
+ void FullCodeGenerator::TestContext::Plug(bool flag) const {
550
+ codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
551
+ true,
552
+ true_label_,
553
+ false_label_);
554
+ if (flag) {
555
+ if (true_label_ != fall_through_) __ b(true_label_);
556
+ } else {
557
+ if (false_label_ != fall_through_) __ b(false_label_);
558
+ }
559
+ }
560
+
561
+
562
+ void FullCodeGenerator::DoTest(Label* if_true,
563
+ Label* if_false,
564
+ Label* fall_through) {
565
+ if (CpuFeatures::IsSupported(VFP3)) {
566
+ CpuFeatures::Scope scope(VFP3);
567
+ // Emit the inlined tests assumed by the stub.
568
+ __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
569
+ __ cmp(result_register(), ip);
570
+ __ b(eq, if_false);
571
+ __ LoadRoot(ip, Heap::kTrueValueRootIndex);
572
+ __ cmp(result_register(), ip);
573
+ __ b(eq, if_true);
574
+ __ LoadRoot(ip, Heap::kFalseValueRootIndex);
575
+ __ cmp(result_register(), ip);
576
+ __ b(eq, if_false);
577
+ STATIC_ASSERT(kSmiTag == 0);
578
+ __ tst(result_register(), result_register());
579
+ __ b(eq, if_false);
580
+ __ JumpIfSmi(result_register(), if_true);
581
+
582
+ // Call the ToBoolean stub for all other cases.
583
+ ToBooleanStub stub(result_register());
584
+ __ CallStub(&stub);
585
+ __ tst(result_register(), result_register());
586
+ } else {
587
+ // Call the runtime to find the boolean value of the source and then
588
+ // translate it into control flow to the pair of labels.
589
+ __ push(result_register());
590
+ __ CallRuntime(Runtime::kToBool, 1);
591
+ __ LoadRoot(ip, Heap::kFalseValueRootIndex);
592
+ __ cmp(r0, ip);
593
+ }
594
+
595
+ // The stub returns nonzero for true.
596
+ Split(ne, if_true, if_false, fall_through);
597
+ }
598
+
599
+
600
+ void FullCodeGenerator::Split(Condition cond,
601
+ Label* if_true,
602
+ Label* if_false,
603
+ Label* fall_through) {
604
+ if (if_false == fall_through) {
605
+ __ b(cond, if_true);
606
+ } else if (if_true == fall_through) {
607
+ __ b(NegateCondition(cond), if_false);
608
+ } else {
609
+ __ b(cond, if_true);
610
+ __ b(if_false);
611
+ }
612
+ }
613
+
614
+
615
+ MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) {
616
+ switch (slot->type()) {
617
+ case Slot::PARAMETER:
618
+ case Slot::LOCAL:
619
+ return MemOperand(fp, SlotOffset(slot));
620
+ case Slot::CONTEXT: {
621
+ int context_chain_length =
622
+ scope()->ContextChainLength(slot->var()->scope());
623
+ __ LoadContext(scratch, context_chain_length);
624
+ return ContextOperand(scratch, slot->index());
625
+ }
626
+ case Slot::LOOKUP:
627
+ UNREACHABLE();
628
+ }
629
+ UNREACHABLE();
630
+ return MemOperand(r0, 0);
631
+ }
632
+
633
+
634
+ void FullCodeGenerator::Move(Register destination, Slot* source) {
635
+ // Use destination as scratch.
636
+ MemOperand slot_operand = EmitSlotSearch(source, destination);
637
+ __ ldr(destination, slot_operand);
638
+ }
639
+
640
+
641
+ void FullCodeGenerator::Move(Slot* dst,
642
+ Register src,
643
+ Register scratch1,
644
+ Register scratch2) {
645
+ ASSERT(dst->type() != Slot::LOOKUP); // Not yet implemented.
646
+ ASSERT(!scratch1.is(src) && !scratch2.is(src));
647
+ MemOperand location = EmitSlotSearch(dst, scratch1);
648
+ __ str(src, location);
649
+ // Emit the write barrier code if the location is in the heap.
650
+ if (dst->type() == Slot::CONTEXT) {
651
+ __ RecordWrite(scratch1,
652
+ Operand(Context::SlotOffset(dst->index())),
653
+ scratch2,
654
+ src);
655
+ }
656
+ }
657
+
658
+
659
+ void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state,
660
+ bool should_normalize,
661
+ Label* if_true,
662
+ Label* if_false) {
663
+ // Only prepare for bailouts before splits if we're in a test
664
+ // context. Otherwise, we let the Visit function deal with the
665
+ // preparation to avoid preparing with the same AST id twice.
666
+ if (!context()->IsTest() || !info_->IsOptimizable()) return;
667
+
668
+ Label skip;
669
+ if (should_normalize) __ b(&skip);
670
+
671
+ ForwardBailoutStack* current = forward_bailout_stack_;
672
+ while (current != NULL) {
673
+ PrepareForBailout(current->expr(), state);
674
+ current = current->parent();
675
+ }
676
+
677
+ if (should_normalize) {
678
+ __ LoadRoot(ip, Heap::kTrueValueRootIndex);
679
+ __ cmp(r0, ip);
680
+ Split(eq, if_true, if_false, NULL);
681
+ __ bind(&skip);
682
+ }
683
+ }
684
+
685
+
686
+ void FullCodeGenerator::EmitDeclaration(Variable* variable,
687
+ Variable::Mode mode,
688
+ FunctionLiteral* function) {
689
+ Comment cmnt(masm_, "[ Declaration");
690
+ ASSERT(variable != NULL); // Must have been resolved.
691
+ Slot* slot = variable->AsSlot();
692
+ Property* prop = variable->AsProperty();
693
+
694
+ if (slot != NULL) {
695
+ switch (slot->type()) {
696
+ case Slot::PARAMETER:
697
+ case Slot::LOCAL:
698
+ if (mode == Variable::CONST) {
699
+ __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
700
+ __ str(ip, MemOperand(fp, SlotOffset(slot)));
701
+ } else if (function != NULL) {
702
+ VisitForAccumulatorValue(function);
703
+ __ str(result_register(), MemOperand(fp, SlotOffset(slot)));
704
+ }
705
+ break;
706
+
707
+ case Slot::CONTEXT:
708
+ // We bypass the general EmitSlotSearch because we know more about
709
+ // this specific context.
710
+
711
+ // The variable in the decl always resides in the current function
712
+ // context.
713
+ ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
714
+ if (FLAG_debug_code) {
715
+ // Check that we're not inside a 'with'.
716
+ __ ldr(r1, ContextOperand(cp, Context::FCONTEXT_INDEX));
717
+ __ cmp(r1, cp);
718
+ __ Check(eq, "Unexpected declaration in current context.");
719
+ }
720
+ if (mode == Variable::CONST) {
721
+ __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
722
+ __ str(ip, ContextOperand(cp, slot->index()));
723
+ // No write barrier since the_hole_value is in old space.
724
+ } else if (function != NULL) {
725
+ VisitForAccumulatorValue(function);
726
+ __ str(result_register(), ContextOperand(cp, slot->index()));
727
+ int offset = Context::SlotOffset(slot->index());
728
+ // We know that we have written a function, which is not a smi.
729
+ __ mov(r1, Operand(cp));
730
+ __ RecordWrite(r1, Operand(offset), r2, result_register());
731
+ }
732
+ break;
733
+
734
+ case Slot::LOOKUP: {
735
+ __ mov(r2, Operand(variable->name()));
736
+ // Declaration nodes are always introduced in one of two modes.
737
+ ASSERT(mode == Variable::VAR ||
738
+ mode == Variable::CONST);
739
+ PropertyAttributes attr =
740
+ (mode == Variable::VAR) ? NONE : READ_ONLY;
741
+ __ mov(r1, Operand(Smi::FromInt(attr)));
742
+ // Push initial value, if any.
743
+ // Note: For variables we must not push an initial value (such as
744
+ // 'undefined') because we may have a (legal) redeclaration and we
745
+ // must not destroy the current value.
746
+ if (mode == Variable::CONST) {
747
+ __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
748
+ __ Push(cp, r2, r1, r0);
749
+ } else if (function != NULL) {
750
+ __ Push(cp, r2, r1);
751
+ // Push initial value for function declaration.
752
+ VisitForStackValue(function);
753
+ } else {
754
+ __ mov(r0, Operand(Smi::FromInt(0))); // No initial value!
755
+ __ Push(cp, r2, r1, r0);
756
+ }
757
+ __ CallRuntime(Runtime::kDeclareContextSlot, 4);
758
+ break;
759
+ }
760
+ }
761
+
762
+ } else if (prop != NULL) {
763
+ if (function != NULL || mode == Variable::CONST) {
764
+ // We are declaring a function or constant that rewrites to a
765
+ // property. Use (keyed) IC to set the initial value. We
766
+ // cannot visit the rewrite because it's shared and we risk
767
+ // recording duplicate AST IDs for bailouts from optimized code.
768
+ ASSERT(prop->obj()->AsVariableProxy() != NULL);
769
+ { AccumulatorValueContext for_object(this);
770
+ EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
771
+ }
772
+ if (function != NULL) {
773
+ __ push(r0);
774
+ VisitForAccumulatorValue(function);
775
+ __ pop(r2);
776
+ } else {
777
+ __ mov(r2, r0);
778
+ __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
779
+ }
780
+ ASSERT(prop->key()->AsLiteral() != NULL &&
781
+ prop->key()->AsLiteral()->handle()->IsSmi());
782
+ __ mov(r1, Operand(prop->key()->AsLiteral()->handle()));
783
+
784
+ Handle<Code> ic = is_strict_mode()
785
+ ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
786
+ : isolate()->builtins()->KeyedStoreIC_Initialize();
787
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
788
+ // Value in r0 is ignored (declarations are statements).
789
+ }
790
+ }
791
+ }
792
+
793
+
794
+ void FullCodeGenerator::VisitDeclaration(Declaration* decl) {
795
+ EmitDeclaration(decl->proxy()->var(), decl->mode(), decl->fun());
796
+ }
797
+
798
+
799
+ void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
800
+ // Call the runtime to declare the globals.
801
+ // The context is the first argument.
802
+ __ mov(r2, Operand(pairs));
803
+ __ mov(r1, Operand(Smi::FromInt(is_eval() ? 1 : 0)));
804
+ __ mov(r0, Operand(Smi::FromInt(strict_mode_flag())));
805
+ __ Push(cp, r2, r1, r0);
806
+ __ CallRuntime(Runtime::kDeclareGlobals, 4);
807
+ // Return value is ignored.
808
+ }
809
+
810
+
811
+ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
812
+ Comment cmnt(masm_, "[ SwitchStatement");
813
+ Breakable nested_statement(this, stmt);
814
+ SetStatementPosition(stmt);
815
+
816
+ // Keep the switch value on the stack until a case matches.
817
+ VisitForStackValue(stmt->tag());
818
+ PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
819
+
820
+ ZoneList<CaseClause*>* clauses = stmt->cases();
821
+ CaseClause* default_clause = NULL; // Can occur anywhere in the list.
822
+
823
+ Label next_test; // Recycled for each test.
824
+ // Compile all the tests with branches to their bodies.
825
+ for (int i = 0; i < clauses->length(); i++) {
826
+ CaseClause* clause = clauses->at(i);
827
+ clause->body_target()->entry_label()->Unuse();
828
+
829
+ // The default is not a test, but remember it as final fall through.
830
+ if (clause->is_default()) {
831
+ default_clause = clause;
832
+ continue;
833
+ }
834
+
835
+ Comment cmnt(masm_, "[ Case comparison");
836
+ __ bind(&next_test);
837
+ next_test.Unuse();
838
+
839
+ // Compile the label expression.
840
+ VisitForAccumulatorValue(clause->label());
841
+
842
+ // Perform the comparison as if via '==='.
843
+ __ ldr(r1, MemOperand(sp, 0)); // Switch value.
844
+ bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
845
+ JumpPatchSite patch_site(masm_);
846
+ if (inline_smi_code) {
847
+ Label slow_case;
848
+ __ orr(r2, r1, r0);
849
+ patch_site.EmitJumpIfNotSmi(r2, &slow_case);
850
+
851
+ __ cmp(r1, r0);
852
+ __ b(ne, &next_test);
853
+ __ Drop(1); // Switch value is no longer needed.
854
+ __ b(clause->body_target()->entry_label());
855
+ __ bind(&slow_case);
856
+ }
857
+
858
+ // Record position before stub call for type feedback.
859
+ SetSourcePosition(clause->position());
860
+ Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
861
+ EmitCallIC(ic, &patch_site);
862
+ __ cmp(r0, Operand(0));
863
+ __ b(ne, &next_test);
864
+ __ Drop(1); // Switch value is no longer needed.
865
+ __ b(clause->body_target()->entry_label());
866
+ }
867
+
868
+ // Discard the test value and jump to the default if present, otherwise to
869
+ // the end of the statement.
870
+ __ bind(&next_test);
871
+ __ Drop(1); // Switch value is no longer needed.
872
+ if (default_clause == NULL) {
873
+ __ b(nested_statement.break_target());
874
+ } else {
875
+ __ b(default_clause->body_target()->entry_label());
876
+ }
877
+
878
+ // Compile all the case bodies.
879
+ for (int i = 0; i < clauses->length(); i++) {
880
+ Comment cmnt(masm_, "[ Case body");
881
+ CaseClause* clause = clauses->at(i);
882
+ __ bind(clause->body_target()->entry_label());
883
+ PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
884
+ VisitStatements(clause->statements());
885
+ }
886
+
887
+ __ bind(nested_statement.break_target());
888
+ PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
889
+ }
890
+
891
+
892
+ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
893
+ Comment cmnt(masm_, "[ ForInStatement");
894
+ SetStatementPosition(stmt);
895
+
896
+ Label loop, exit;
897
+ ForIn loop_statement(this, stmt);
898
+ increment_loop_depth();
899
+
900
+ // Get the object to enumerate over. Both SpiderMonkey and JSC
901
+ // ignore null and undefined in contrast to the specification; see
902
+ // ECMA-262 section 12.6.4.
903
+ VisitForAccumulatorValue(stmt->enumerable());
904
+ __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
905
+ __ cmp(r0, ip);
906
+ __ b(eq, &exit);
907
+ Register null_value = r5;
908
+ __ LoadRoot(null_value, Heap::kNullValueRootIndex);
909
+ __ cmp(r0, null_value);
910
+ __ b(eq, &exit);
911
+
912
+ // Convert the object to a JS object.
913
+ Label convert, done_convert;
914
+ __ JumpIfSmi(r0, &convert);
915
+ __ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
916
+ __ b(hs, &done_convert);
917
+ __ bind(&convert);
918
+ __ push(r0);
919
+ __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_JS);
920
+ __ bind(&done_convert);
921
+ __ push(r0);
922
+
923
+ // Check cache validity in generated code. This is a fast case for
924
+ // the JSObject::IsSimpleEnum cache validity checks. If we cannot
925
+ // guarantee cache validity, call the runtime system to check cache
926
+ // validity or get the property names in a fixed array.
927
+ Label next, call_runtime;
928
+ // Preload a couple of values used in the loop.
929
+ Register empty_fixed_array_value = r6;
930
+ __ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
931
+ Register empty_descriptor_array_value = r7;
932
+ __ LoadRoot(empty_descriptor_array_value,
933
+ Heap::kEmptyDescriptorArrayRootIndex);
934
+ __ mov(r1, r0);
935
+ __ bind(&next);
936
+
937
+ // Check that there are no elements. Register r1 contains the
938
+ // current JS object we've reached through the prototype chain.
939
+ __ ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset));
940
+ __ cmp(r2, empty_fixed_array_value);
941
+ __ b(ne, &call_runtime);
942
+
943
+ // Check that instance descriptors are not empty so that we can
944
+ // check for an enum cache. Leave the map in r2 for the subsequent
945
+ // prototype load.
946
+ __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
947
+ __ ldr(r3, FieldMemOperand(r2, Map::kInstanceDescriptorsOffset));
948
+ __ cmp(r3, empty_descriptor_array_value);
949
+ __ b(eq, &call_runtime);
950
+
951
+ // Check that there is an enum cache in the non-empty instance
952
+ // descriptors (r3). This is the case if the next enumeration
953
+ // index field does not contain a smi.
954
+ __ ldr(r3, FieldMemOperand(r3, DescriptorArray::kEnumerationIndexOffset));
955
+ __ JumpIfSmi(r3, &call_runtime);
956
+
957
+ // For all objects but the receiver, check that the cache is empty.
958
+ Label check_prototype;
959
+ __ cmp(r1, r0);
960
+ __ b(eq, &check_prototype);
961
+ __ ldr(r3, FieldMemOperand(r3, DescriptorArray::kEnumCacheBridgeCacheOffset));
962
+ __ cmp(r3, empty_fixed_array_value);
963
+ __ b(ne, &call_runtime);
964
+
965
+ // Load the prototype from the map and loop if non-null.
966
+ __ bind(&check_prototype);
967
+ __ ldr(r1, FieldMemOperand(r2, Map::kPrototypeOffset));
968
+ __ cmp(r1, null_value);
969
+ __ b(ne, &next);
970
+
971
+ // The enum cache is valid. Load the map of the object being
972
+ // iterated over and use the cache for the iteration.
973
+ Label use_cache;
974
+ __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
975
+ __ b(&use_cache);
976
+
977
+ // Get the set of properties to enumerate.
978
+ __ bind(&call_runtime);
979
+ __ push(r0); // Duplicate the enumerable object on the stack.
980
+ __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
981
+
982
+ // If we got a map from the runtime call, we can do a fast
983
+ // modification check. Otherwise, we got a fixed array, and we have
984
+ // to do a slow check.
985
+ Label fixed_array;
986
+ __ mov(r2, r0);
987
+ __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
988
+ __ LoadRoot(ip, Heap::kMetaMapRootIndex);
989
+ __ cmp(r1, ip);
990
+ __ b(ne, &fixed_array);
991
+
992
+ // We got a map in register r0. Get the enumeration cache from it.
993
+ __ bind(&use_cache);
994
+ __ ldr(r1, FieldMemOperand(r0, Map::kInstanceDescriptorsOffset));
995
+ __ ldr(r1, FieldMemOperand(r1, DescriptorArray::kEnumerationIndexOffset));
996
+ __ ldr(r2, FieldMemOperand(r1, DescriptorArray::kEnumCacheBridgeCacheOffset));
997
+
998
+ // Setup the four remaining stack slots.
999
+ __ push(r0); // Map.
1000
+ __ ldr(r1, FieldMemOperand(r2, FixedArray::kLengthOffset));
1001
+ __ mov(r0, Operand(Smi::FromInt(0)));
1002
+ // Push enumeration cache, enumeration cache length (as smi) and zero.
1003
+ __ Push(r2, r1, r0);
1004
+ __ jmp(&loop);
1005
+
1006
+ // We got a fixed array in register r0. Iterate through that.
1007
+ __ bind(&fixed_array);
1008
+ __ mov(r1, Operand(Smi::FromInt(0))); // Map (0) - force slow check.
1009
+ __ Push(r1, r0);
1010
+ __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
1011
+ __ mov(r0, Operand(Smi::FromInt(0)));
1012
+ __ Push(r1, r0); // Fixed array length (as smi) and initial index.
1013
+
1014
+ // Generate code for doing the condition check.
1015
+ __ bind(&loop);
1016
+ // Load the current count to r0, load the length to r1.
1017
+ __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
1018
+ __ cmp(r0, r1); // Compare to the array length.
1019
+ __ b(hs, loop_statement.break_target());
1020
+
1021
+ // Get the current entry of the array into register r3.
1022
+ __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
1023
+ __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1024
+ __ ldr(r3, MemOperand(r2, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1025
+
1026
+ // Get the expected map from the stack or a zero map in the
1027
+ // permanent slow case into register r2.
1028
+ __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
1029
+
1030
+ // Check if the expected map still matches that of the enumerable.
1031
+ // If not, we have to filter the key.
1032
+ Label update_each;
1033
+ __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
1034
+ __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1035
+ __ cmp(r4, Operand(r2));
1036
+ __ b(eq, &update_each);
1037
+
1038
+ // Convert the entry to a string or (smi) 0 if it isn't a property
1039
+ // any more. If the property has been removed while iterating, we
1040
+ // just skip it.
1041
+ __ push(r1); // Enumerable.
1042
+ __ push(r3); // Current entry.
1043
+ __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_JS);
1044
+ __ mov(r3, Operand(r0), SetCC);
1045
+ __ b(eq, loop_statement.continue_target());
1046
+
1047
+ // Update the 'each' property or variable from the possibly filtered
1048
+ // entry in register r3.
1049
+ __ bind(&update_each);
1050
+ __ mov(result_register(), r3);
1051
+ // Perform the assignment as if via '='.
1052
+ { EffectContext context(this);
1053
+ EmitAssignment(stmt->each(), stmt->AssignmentId());
1054
+ }
1055
+
1056
+ // Generate code for the body of the loop.
1057
+ Visit(stmt->body());
1058
+
1059
+ // Generate code for the going to the next element by incrementing
1060
+ // the index (smi) stored on top of the stack.
1061
+ __ bind(loop_statement.continue_target());
1062
+ __ pop(r0);
1063
+ __ add(r0, r0, Operand(Smi::FromInt(1)));
1064
+ __ push(r0);
1065
+
1066
+ EmitStackCheck(stmt);
1067
+ __ b(&loop);
1068
+
1069
+ // Remove the pointers stored on the stack.
1070
+ __ bind(loop_statement.break_target());
1071
+ __ Drop(5);
1072
+
1073
+ // Exit and decrement the loop depth.
1074
+ __ bind(&exit);
1075
+ decrement_loop_depth();
1076
+ }
1077
+
1078
+
1079
+ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1080
+ bool pretenure) {
1081
+ // Use the fast case closure allocation code that allocates in new
1082
+ // space for nested functions that don't need literals cloning. If
1083
+ // we're running with the --always-opt or the --prepare-always-opt
1084
+ // flag, we need to use the runtime function so that the new function
1085
+ // we are creating here gets a chance to have its code optimized and
1086
+ // doesn't just get a copy of the existing unoptimized code.
1087
+ if (!FLAG_always_opt &&
1088
+ !FLAG_prepare_always_opt &&
1089
+ !pretenure &&
1090
+ scope()->is_function_scope() &&
1091
+ info->num_literals() == 0) {
1092
+ FastNewClosureStub stub(info->strict_mode() ? kStrictMode : kNonStrictMode);
1093
+ __ mov(r0, Operand(info));
1094
+ __ push(r0);
1095
+ __ CallStub(&stub);
1096
+ } else {
1097
+ __ mov(r0, Operand(info));
1098
+ __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex
1099
+ : Heap::kFalseValueRootIndex);
1100
+ __ Push(cp, r0, r1);
1101
+ __ CallRuntime(Runtime::kNewClosure, 3);
1102
+ }
1103
+ context()->Plug(r0);
1104
+ }
1105
+
1106
+
1107
+ void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1108
+ Comment cmnt(masm_, "[ VariableProxy");
1109
+ EmitVariableLoad(expr->var());
1110
+ }
1111
+
1112
+
1113
+ MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(
1114
+ Slot* slot,
1115
+ Label* slow) {
1116
+ ASSERT(slot->type() == Slot::CONTEXT);
1117
+ Register context = cp;
1118
+ Register next = r3;
1119
+ Register temp = r4;
1120
+
1121
+ for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
1122
+ if (s->num_heap_slots() > 0) {
1123
+ if (s->calls_eval()) {
1124
+ // Check that extension is NULL.
1125
+ __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1126
+ __ tst(temp, temp);
1127
+ __ b(ne, slow);
1128
+ }
1129
+ __ ldr(next, ContextOperand(context, Context::CLOSURE_INDEX));
1130
+ __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset));
1131
+ // Walk the rest of the chain without clobbering cp.
1132
+ context = next;
1133
+ }
1134
+ }
1135
+ // Check that last extension is NULL.
1136
+ __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1137
+ __ tst(temp, temp);
1138
+ __ b(ne, slow);
1139
+
1140
+ // This function is used only for loads, not stores, so it's safe to
1141
+ // return an cp-based operand (the write barrier cannot be allowed to
1142
+ // destroy the cp register).
1143
+ return ContextOperand(context, slot->index());
1144
+ }
1145
+
1146
+
1147
+ void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
1148
+ Slot* slot,
1149
+ TypeofState typeof_state,
1150
+ Label* slow,
1151
+ Label* done) {
1152
+ // Generate fast-case code for variables that might be shadowed by
1153
+ // eval-introduced variables. Eval is used a lot without
1154
+ // introducing variables. In those cases, we do not want to
1155
+ // perform a runtime call for all variables in the scope
1156
+ // containing the eval.
1157
+ if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
1158
+ EmitLoadGlobalSlotCheckExtensions(slot, typeof_state, slow);
1159
+ __ jmp(done);
1160
+ } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
1161
+ Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot();
1162
+ Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
1163
+ if (potential_slot != NULL) {
1164
+ // Generate fast case for locals that rewrite to slots.
1165
+ __ ldr(r0, ContextSlotOperandCheckExtensions(potential_slot, slow));
1166
+ if (potential_slot->var()->mode() == Variable::CONST) {
1167
+ __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1168
+ __ cmp(r0, ip);
1169
+ __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1170
+ }
1171
+ __ jmp(done);
1172
+ } else if (rewrite != NULL) {
1173
+ // Generate fast case for calls of an argument function.
1174
+ Property* property = rewrite->AsProperty();
1175
+ if (property != NULL) {
1176
+ VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1177
+ Literal* key_literal = property->key()->AsLiteral();
1178
+ if (obj_proxy != NULL &&
1179
+ key_literal != NULL &&
1180
+ obj_proxy->IsArguments() &&
1181
+ key_literal->handle()->IsSmi()) {
1182
+ // Load arguments object if there are no eval-introduced
1183
+ // variables. Then load the argument from the arguments
1184
+ // object using keyed load.
1185
+ __ ldr(r1,
1186
+ ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
1187
+ slow));
1188
+ __ mov(r0, Operand(key_literal->handle()));
1189
+ Handle<Code> ic =
1190
+ isolate()->builtins()->KeyedLoadIC_Initialize();
1191
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
1192
+ __ jmp(done);
1193
+ }
1194
+ }
1195
+ }
1196
+ }
1197
+ }
1198
+
1199
+
1200
+ void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
1201
+ Slot* slot,
1202
+ TypeofState typeof_state,
1203
+ Label* slow) {
1204
+ Register current = cp;
1205
+ Register next = r1;
1206
+ Register temp = r2;
1207
+
1208
+ Scope* s = scope();
1209
+ while (s != NULL) {
1210
+ if (s->num_heap_slots() > 0) {
1211
+ if (s->calls_eval()) {
1212
+ // Check that extension is NULL.
1213
+ __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1214
+ __ tst(temp, temp);
1215
+ __ b(ne, slow);
1216
+ }
1217
+ // Load next context in chain.
1218
+ __ ldr(next, ContextOperand(current, Context::CLOSURE_INDEX));
1219
+ __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset));
1220
+ // Walk the rest of the chain without clobbering cp.
1221
+ current = next;
1222
+ }
1223
+ // If no outer scope calls eval, we do not need to check more
1224
+ // context extensions.
1225
+ if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
1226
+ s = s->outer_scope();
1227
+ }
1228
+
1229
+ if (s->is_eval_scope()) {
1230
+ Label loop, fast;
1231
+ if (!current.is(next)) {
1232
+ __ Move(next, current);
1233
+ }
1234
+ __ bind(&loop);
1235
+ // Terminate at global context.
1236
+ __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1237
+ __ LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
1238
+ __ cmp(temp, ip);
1239
+ __ b(eq, &fast);
1240
+ // Check that extension is NULL.
1241
+ __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1242
+ __ tst(temp, temp);
1243
+ __ b(ne, slow);
1244
+ // Load next context in chain.
1245
+ __ ldr(next, ContextOperand(next, Context::CLOSURE_INDEX));
1246
+ __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset));
1247
+ __ b(&loop);
1248
+ __ bind(&fast);
1249
+ }
1250
+
1251
+ __ ldr(r0, GlobalObjectOperand());
1252
+ __ mov(r2, Operand(slot->var()->name()));
1253
+ RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1254
+ ? RelocInfo::CODE_TARGET
1255
+ : RelocInfo::CODE_TARGET_CONTEXT;
1256
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1257
+ EmitCallIC(ic, mode);
1258
+ }
1259
+
1260
+
1261
+ void FullCodeGenerator::EmitVariableLoad(Variable* var) {
1262
+ // Four cases: non-this global variables, lookup slots, all other
1263
+ // types of slots, and parameters that rewrite to explicit property
1264
+ // accesses on the arguments object.
1265
+ Slot* slot = var->AsSlot();
1266
+ Property* property = var->AsProperty();
1267
+
1268
+ if (var->is_global() && !var->is_this()) {
1269
+ Comment cmnt(masm_, "Global variable");
1270
+ // Use inline caching. Variable name is passed in r2 and the global
1271
+ // object (receiver) in r0.
1272
+ __ ldr(r0, GlobalObjectOperand());
1273
+ __ mov(r2, Operand(var->name()));
1274
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1275
+ EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1276
+ context()->Plug(r0);
1277
+
1278
+ } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
1279
+ Label done, slow;
1280
+
1281
+ // Generate code for loading from variables potentially shadowed
1282
+ // by eval-introduced variables.
1283
+ EmitDynamicLoadFromSlotFastCase(slot, NOT_INSIDE_TYPEOF, &slow, &done);
1284
+
1285
+ __ bind(&slow);
1286
+ Comment cmnt(masm_, "Lookup slot");
1287
+ __ mov(r1, Operand(var->name()));
1288
+ __ Push(cp, r1); // Context and name.
1289
+ __ CallRuntime(Runtime::kLoadContextSlot, 2);
1290
+ __ bind(&done);
1291
+
1292
+ context()->Plug(r0);
1293
+
1294
+ } else if (slot != NULL) {
1295
+ Comment cmnt(masm_, (slot->type() == Slot::CONTEXT)
1296
+ ? "Context slot"
1297
+ : "Stack slot");
1298
+ if (var->mode() == Variable::CONST) {
1299
+ // Constants may be the hole value if they have not been initialized.
1300
+ // Unhole them.
1301
+ MemOperand slot_operand = EmitSlotSearch(slot, r0);
1302
+ __ ldr(r0, slot_operand);
1303
+ __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1304
+ __ cmp(r0, ip);
1305
+ __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1306
+ context()->Plug(r0);
1307
+ } else {
1308
+ context()->Plug(slot);
1309
+ }
1310
+ } else {
1311
+ Comment cmnt(masm_, "Rewritten parameter");
1312
+ ASSERT_NOT_NULL(property);
1313
+ // Rewritten parameter accesses are of the form "slot[literal]".
1314
+
1315
+ // Assert that the object is in a slot.
1316
+ Variable* object_var = property->obj()->AsVariableProxy()->AsVariable();
1317
+ ASSERT_NOT_NULL(object_var);
1318
+ Slot* object_slot = object_var->AsSlot();
1319
+ ASSERT_NOT_NULL(object_slot);
1320
+
1321
+ // Load the object.
1322
+ Move(r1, object_slot);
1323
+
1324
+ // Assert that the key is a smi.
1325
+ Literal* key_literal = property->key()->AsLiteral();
1326
+ ASSERT_NOT_NULL(key_literal);
1327
+ ASSERT(key_literal->handle()->IsSmi());
1328
+
1329
+ // Load the key.
1330
+ __ mov(r0, Operand(key_literal->handle()));
1331
+
1332
+ // Call keyed load IC. It has arguments key and receiver in r0 and r1.
1333
+ Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1334
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
1335
+ context()->Plug(r0);
1336
+ }
1337
+ }
1338
+
1339
+
1340
+ void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1341
+ Comment cmnt(masm_, "[ RegExpLiteral");
1342
+ Label materialized;
1343
+ // Registers will be used as follows:
1344
+ // r5 = materialized value (RegExp literal)
1345
+ // r4 = JS function, literals array
1346
+ // r3 = literal index
1347
+ // r2 = RegExp pattern
1348
+ // r1 = RegExp flags
1349
+ // r0 = RegExp literal clone
1350
+ __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1351
+ __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
1352
+ int literal_offset =
1353
+ FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1354
+ __ ldr(r5, FieldMemOperand(r4, literal_offset));
1355
+ __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1356
+ __ cmp(r5, ip);
1357
+ __ b(ne, &materialized);
1358
+
1359
+ // Create regexp literal using runtime function.
1360
+ // Result will be in r0.
1361
+ __ mov(r3, Operand(Smi::FromInt(expr->literal_index())));
1362
+ __ mov(r2, Operand(expr->pattern()));
1363
+ __ mov(r1, Operand(expr->flags()));
1364
+ __ Push(r4, r3, r2, r1);
1365
+ __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1366
+ __ mov(r5, r0);
1367
+
1368
+ __ bind(&materialized);
1369
+ int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1370
+ Label allocated, runtime_allocate;
1371
+ __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
1372
+ __ jmp(&allocated);
1373
+
1374
+ __ bind(&runtime_allocate);
1375
+ __ push(r5);
1376
+ __ mov(r0, Operand(Smi::FromInt(size)));
1377
+ __ push(r0);
1378
+ __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1379
+ __ pop(r5);
1380
+
1381
+ __ bind(&allocated);
1382
+ // After this, registers are used as follows:
1383
+ // r0: Newly allocated regexp.
1384
+ // r5: Materialized regexp.
1385
+ // r2: temp.
1386
+ __ CopyFields(r0, r5, r2.bit(), size / kPointerSize);
1387
+ context()->Plug(r0);
1388
+ }
1389
+
1390
+
1391
+ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1392
+ Comment cmnt(masm_, "[ ObjectLiteral");
1393
+ __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1394
+ __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1395
+ __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1396
+ __ mov(r1, Operand(expr->constant_properties()));
1397
+ int flags = expr->fast_elements()
1398
+ ? ObjectLiteral::kFastElements
1399
+ : ObjectLiteral::kNoFlags;
1400
+ flags |= expr->has_function()
1401
+ ? ObjectLiteral::kHasFunction
1402
+ : ObjectLiteral::kNoFlags;
1403
+ __ mov(r0, Operand(Smi::FromInt(flags)));
1404
+ __ Push(r3, r2, r1, r0);
1405
+ if (expr->depth() > 1) {
1406
+ __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1407
+ } else {
1408
+ __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1409
+ }
1410
+
1411
+ // If result_saved is true the result is on top of the stack. If
1412
+ // result_saved is false the result is in r0.
1413
+ bool result_saved = false;
1414
+
1415
+ // Mark all computed expressions that are bound to a key that
1416
+ // is shadowed by a later occurrence of the same key. For the
1417
+ // marked expressions, no store code is emitted.
1418
+ expr->CalculateEmitStore();
1419
+
1420
+ for (int i = 0; i < expr->properties()->length(); i++) {
1421
+ ObjectLiteral::Property* property = expr->properties()->at(i);
1422
+ if (property->IsCompileTimeValue()) continue;
1423
+
1424
+ Literal* key = property->key();
1425
+ Expression* value = property->value();
1426
+ if (!result_saved) {
1427
+ __ push(r0); // Save result on stack
1428
+ result_saved = true;
1429
+ }
1430
+ switch (property->kind()) {
1431
+ case ObjectLiteral::Property::CONSTANT:
1432
+ UNREACHABLE();
1433
+ case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1434
+ ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
1435
+ // Fall through.
1436
+ case ObjectLiteral::Property::COMPUTED:
1437
+ if (key->handle()->IsSymbol()) {
1438
+ if (property->emit_store()) {
1439
+ VisitForAccumulatorValue(value);
1440
+ __ mov(r2, Operand(key->handle()));
1441
+ __ ldr(r1, MemOperand(sp));
1442
+ Handle<Code> ic = isolate()->builtins()->StoreIC_Initialize();
1443
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
1444
+ PrepareForBailoutForId(key->id(), NO_REGISTERS);
1445
+ } else {
1446
+ VisitForEffect(value);
1447
+ }
1448
+ break;
1449
+ }
1450
+ // Fall through.
1451
+ case ObjectLiteral::Property::PROTOTYPE:
1452
+ // Duplicate receiver on stack.
1453
+ __ ldr(r0, MemOperand(sp));
1454
+ __ push(r0);
1455
+ VisitForStackValue(key);
1456
+ VisitForStackValue(value);
1457
+ if (property->emit_store()) {
1458
+ __ mov(r0, Operand(Smi::FromInt(NONE))); // PropertyAttributes
1459
+ __ push(r0);
1460
+ __ CallRuntime(Runtime::kSetProperty, 4);
1461
+ } else {
1462
+ __ Drop(3);
1463
+ }
1464
+ break;
1465
+ case ObjectLiteral::Property::GETTER:
1466
+ case ObjectLiteral::Property::SETTER:
1467
+ // Duplicate receiver on stack.
1468
+ __ ldr(r0, MemOperand(sp));
1469
+ __ push(r0);
1470
+ VisitForStackValue(key);
1471
+ __ mov(r1, Operand(property->kind() == ObjectLiteral::Property::SETTER ?
1472
+ Smi::FromInt(1) :
1473
+ Smi::FromInt(0)));
1474
+ __ push(r1);
1475
+ VisitForStackValue(value);
1476
+ __ CallRuntime(Runtime::kDefineAccessor, 4);
1477
+ break;
1478
+ }
1479
+ }
1480
+
1481
+ if (expr->has_function()) {
1482
+ ASSERT(result_saved);
1483
+ __ ldr(r0, MemOperand(sp));
1484
+ __ push(r0);
1485
+ __ CallRuntime(Runtime::kToFastProperties, 1);
1486
+ }
1487
+
1488
+ if (result_saved) {
1489
+ context()->PlugTOS();
1490
+ } else {
1491
+ context()->Plug(r0);
1492
+ }
1493
+ }
1494
+
1495
+
1496
+ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1497
+ Comment cmnt(masm_, "[ ArrayLiteral");
1498
+
1499
+ ZoneList<Expression*>* subexprs = expr->values();
1500
+ int length = subexprs->length();
1501
+
1502
+ __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1503
+ __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1504
+ __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1505
+ __ mov(r1, Operand(expr->constant_elements()));
1506
+ __ Push(r3, r2, r1);
1507
+ if (expr->constant_elements()->map() ==
1508
+ isolate()->heap()->fixed_cow_array_map()) {
1509
+ FastCloneShallowArrayStub stub(
1510
+ FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
1511
+ __ CallStub(&stub);
1512
+ __ IncrementCounter(
1513
+ isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2);
1514
+ } else if (expr->depth() > 1) {
1515
+ __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1516
+ } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1517
+ __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1518
+ } else {
1519
+ FastCloneShallowArrayStub stub(
1520
+ FastCloneShallowArrayStub::CLONE_ELEMENTS, length);
1521
+ __ CallStub(&stub);
1522
+ }
1523
+
1524
+ bool result_saved = false; // Is the result saved to the stack?
1525
+
1526
+ // Emit code to evaluate all the non-constant subexpressions and to store
1527
+ // them into the newly cloned array.
1528
+ for (int i = 0; i < length; i++) {
1529
+ Expression* subexpr = subexprs->at(i);
1530
+ // If the subexpression is a literal or a simple materialized literal it
1531
+ // is already set in the cloned array.
1532
+ if (subexpr->AsLiteral() != NULL ||
1533
+ CompileTimeValue::IsCompileTimeValue(subexpr)) {
1534
+ continue;
1535
+ }
1536
+
1537
+ if (!result_saved) {
1538
+ __ push(r0);
1539
+ result_saved = true;
1540
+ }
1541
+ VisitForAccumulatorValue(subexpr);
1542
+
1543
+ // Store the subexpression value in the array's elements.
1544
+ __ ldr(r1, MemOperand(sp)); // Copy of array literal.
1545
+ __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset));
1546
+ int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1547
+ __ str(result_register(), FieldMemOperand(r1, offset));
1548
+
1549
+ // Update the write barrier for the array store with r0 as the scratch
1550
+ // register.
1551
+ __ RecordWrite(r1, Operand(offset), r2, result_register());
1552
+
1553
+ PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1554
+ }
1555
+
1556
+ if (result_saved) {
1557
+ context()->PlugTOS();
1558
+ } else {
1559
+ context()->Plug(r0);
1560
+ }
1561
+ }
1562
+
1563
+
1564
+ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1565
+ Comment cmnt(masm_, "[ Assignment");
1566
+ // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1567
+ // on the left-hand side.
1568
+ if (!expr->target()->IsValidLeftHandSide()) {
1569
+ VisitForEffect(expr->target());
1570
+ return;
1571
+ }
1572
+
1573
+ // Left-hand side can only be a property, a global or a (parameter or local)
1574
+ // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
1575
+ enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1576
+ LhsKind assign_type = VARIABLE;
1577
+ Property* property = expr->target()->AsProperty();
1578
+ if (property != NULL) {
1579
+ assign_type = (property->key()->IsPropertyName())
1580
+ ? NAMED_PROPERTY
1581
+ : KEYED_PROPERTY;
1582
+ }
1583
+
1584
+ // Evaluate LHS expression.
1585
+ switch (assign_type) {
1586
+ case VARIABLE:
1587
+ // Nothing to do here.
1588
+ break;
1589
+ case NAMED_PROPERTY:
1590
+ if (expr->is_compound()) {
1591
+ // We need the receiver both on the stack and in the accumulator.
1592
+ VisitForAccumulatorValue(property->obj());
1593
+ __ push(result_register());
1594
+ } else {
1595
+ VisitForStackValue(property->obj());
1596
+ }
1597
+ break;
1598
+ case KEYED_PROPERTY:
1599
+ if (expr->is_compound()) {
1600
+ if (property->is_arguments_access()) {
1601
+ VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1602
+ __ ldr(r0, EmitSlotSearch(obj_proxy->var()->AsSlot(), r0));
1603
+ __ push(r0);
1604
+ __ mov(r0, Operand(property->key()->AsLiteral()->handle()));
1605
+ } else {
1606
+ VisitForStackValue(property->obj());
1607
+ VisitForAccumulatorValue(property->key());
1608
+ }
1609
+ __ ldr(r1, MemOperand(sp, 0));
1610
+ __ push(r0);
1611
+ } else {
1612
+ if (property->is_arguments_access()) {
1613
+ VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1614
+ __ ldr(r1, EmitSlotSearch(obj_proxy->var()->AsSlot(), r0));
1615
+ __ mov(r0, Operand(property->key()->AsLiteral()->handle()));
1616
+ __ Push(r1, r0);
1617
+ } else {
1618
+ VisitForStackValue(property->obj());
1619
+ VisitForStackValue(property->key());
1620
+ }
1621
+ }
1622
+ break;
1623
+ }
1624
+
1625
+ // For compound assignments we need another deoptimization point after the
1626
+ // variable/property load.
1627
+ if (expr->is_compound()) {
1628
+ { AccumulatorValueContext context(this);
1629
+ switch (assign_type) {
1630
+ case VARIABLE:
1631
+ EmitVariableLoad(expr->target()->AsVariableProxy()->var());
1632
+ PrepareForBailout(expr->target(), TOS_REG);
1633
+ break;
1634
+ case NAMED_PROPERTY:
1635
+ EmitNamedPropertyLoad(property);
1636
+ PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1637
+ break;
1638
+ case KEYED_PROPERTY:
1639
+ EmitKeyedPropertyLoad(property);
1640
+ PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1641
+ break;
1642
+ }
1643
+ }
1644
+
1645
+ Token::Value op = expr->binary_op();
1646
+ __ push(r0); // Left operand goes on the stack.
1647
+ VisitForAccumulatorValue(expr->value());
1648
+
1649
+ OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1650
+ ? OVERWRITE_RIGHT
1651
+ : NO_OVERWRITE;
1652
+ SetSourcePosition(expr->position() + 1);
1653
+ AccumulatorValueContext context(this);
1654
+ if (ShouldInlineSmiCase(op)) {
1655
+ EmitInlineSmiBinaryOp(expr,
1656
+ op,
1657
+ mode,
1658
+ expr->target(),
1659
+ expr->value());
1660
+ } else {
1661
+ EmitBinaryOp(op, mode);
1662
+ }
1663
+
1664
+ // Deoptimization point in case the binary operation may have side effects.
1665
+ PrepareForBailout(expr->binary_operation(), TOS_REG);
1666
+ } else {
1667
+ VisitForAccumulatorValue(expr->value());
1668
+ }
1669
+
1670
+ // Record source position before possible IC call.
1671
+ SetSourcePosition(expr->position());
1672
+
1673
+ // Store the value.
1674
+ switch (assign_type) {
1675
+ case VARIABLE:
1676
+ EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1677
+ expr->op());
1678
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1679
+ context()->Plug(r0);
1680
+ break;
1681
+ case NAMED_PROPERTY:
1682
+ EmitNamedPropertyAssignment(expr);
1683
+ break;
1684
+ case KEYED_PROPERTY:
1685
+ EmitKeyedPropertyAssignment(expr);
1686
+ break;
1687
+ }
1688
+ }
1689
+
1690
+
1691
+ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1692
+ SetSourcePosition(prop->position());
1693
+ Literal* key = prop->key()->AsLiteral();
1694
+ __ mov(r2, Operand(key->handle()));
1695
+ // Call load IC. It has arguments receiver and property name r0 and r2.
1696
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1697
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
1698
+ }
1699
+
1700
+
1701
+ void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1702
+ SetSourcePosition(prop->position());
1703
+ // Call keyed load IC. It has arguments key and receiver in r0 and r1.
1704
+ Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1705
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
1706
+ }
1707
+
1708
+
1709
+ void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
1710
+ Token::Value op,
1711
+ OverwriteMode mode,
1712
+ Expression* left_expr,
1713
+ Expression* right_expr) {
1714
+ Label done, smi_case, stub_call;
1715
+
1716
+ Register scratch1 = r2;
1717
+ Register scratch2 = r3;
1718
+
1719
+ // Get the arguments.
1720
+ Register left = r1;
1721
+ Register right = r0;
1722
+ __ pop(left);
1723
+
1724
+ // Perform combined smi check on both operands.
1725
+ __ orr(scratch1, left, Operand(right));
1726
+ STATIC_ASSERT(kSmiTag == 0);
1727
+ JumpPatchSite patch_site(masm_);
1728
+ patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1729
+
1730
+ __ bind(&stub_call);
1731
+ TypeRecordingBinaryOpStub stub(op, mode);
1732
+ EmitCallIC(stub.GetCode(), &patch_site);
1733
+ __ jmp(&done);
1734
+
1735
+ __ bind(&smi_case);
1736
+ // Smi case. This code works the same way as the smi-smi case in the type
1737
+ // recording binary operation stub, see
1738
+ // TypeRecordingBinaryOpStub::GenerateSmiSmiOperation for comments.
1739
+ switch (op) {
1740
+ case Token::SAR:
1741
+ __ b(&stub_call);
1742
+ __ GetLeastBitsFromSmi(scratch1, right, 5);
1743
+ __ mov(right, Operand(left, ASR, scratch1));
1744
+ __ bic(right, right, Operand(kSmiTagMask));
1745
+ break;
1746
+ case Token::SHL: {
1747
+ __ b(&stub_call);
1748
+ __ SmiUntag(scratch1, left);
1749
+ __ GetLeastBitsFromSmi(scratch2, right, 5);
1750
+ __ mov(scratch1, Operand(scratch1, LSL, scratch2));
1751
+ __ add(scratch2, scratch1, Operand(0x40000000), SetCC);
1752
+ __ b(mi, &stub_call);
1753
+ __ SmiTag(right, scratch1);
1754
+ break;
1755
+ }
1756
+ case Token::SHR: {
1757
+ __ b(&stub_call);
1758
+ __ SmiUntag(scratch1, left);
1759
+ __ GetLeastBitsFromSmi(scratch2, right, 5);
1760
+ __ mov(scratch1, Operand(scratch1, LSR, scratch2));
1761
+ __ tst(scratch1, Operand(0xc0000000));
1762
+ __ b(ne, &stub_call);
1763
+ __ SmiTag(right, scratch1);
1764
+ break;
1765
+ }
1766
+ case Token::ADD:
1767
+ __ add(scratch1, left, Operand(right), SetCC);
1768
+ __ b(vs, &stub_call);
1769
+ __ mov(right, scratch1);
1770
+ break;
1771
+ case Token::SUB:
1772
+ __ sub(scratch1, left, Operand(right), SetCC);
1773
+ __ b(vs, &stub_call);
1774
+ __ mov(right, scratch1);
1775
+ break;
1776
+ case Token::MUL: {
1777
+ __ SmiUntag(ip, right);
1778
+ __ smull(scratch1, scratch2, left, ip);
1779
+ __ mov(ip, Operand(scratch1, ASR, 31));
1780
+ __ cmp(ip, Operand(scratch2));
1781
+ __ b(ne, &stub_call);
1782
+ __ tst(scratch1, Operand(scratch1));
1783
+ __ mov(right, Operand(scratch1), LeaveCC, ne);
1784
+ __ b(ne, &done);
1785
+ __ add(scratch2, right, Operand(left), SetCC);
1786
+ __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
1787
+ __ b(mi, &stub_call);
1788
+ break;
1789
+ }
1790
+ case Token::BIT_OR:
1791
+ __ orr(right, left, Operand(right));
1792
+ break;
1793
+ case Token::BIT_AND:
1794
+ __ and_(right, left, Operand(right));
1795
+ break;
1796
+ case Token::BIT_XOR:
1797
+ __ eor(right, left, Operand(right));
1798
+ break;
1799
+ default:
1800
+ UNREACHABLE();
1801
+ }
1802
+
1803
+ __ bind(&done);
1804
+ context()->Plug(r0);
1805
+ }
1806
+
1807
+
1808
+ void FullCodeGenerator::EmitBinaryOp(Token::Value op,
1809
+ OverwriteMode mode) {
1810
+ __ pop(r1);
1811
+ TypeRecordingBinaryOpStub stub(op, mode);
1812
+ EmitCallIC(stub.GetCode(), NULL);
1813
+ context()->Plug(r0);
1814
+ }
1815
+
1816
+
1817
+ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
1818
+ // Invalid left-hand sides are rewritten to have a 'throw
1819
+ // ReferenceError' on the left-hand side.
1820
+ if (!expr->IsValidLeftHandSide()) {
1821
+ VisitForEffect(expr);
1822
+ return;
1823
+ }
1824
+
1825
+ // Left-hand side can only be a property, a global or a (parameter or local)
1826
+ // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
1827
+ enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1828
+ LhsKind assign_type = VARIABLE;
1829
+ Property* prop = expr->AsProperty();
1830
+ if (prop != NULL) {
1831
+ assign_type = (prop->key()->IsPropertyName())
1832
+ ? NAMED_PROPERTY
1833
+ : KEYED_PROPERTY;
1834
+ }
1835
+
1836
+ switch (assign_type) {
1837
+ case VARIABLE: {
1838
+ Variable* var = expr->AsVariableProxy()->var();
1839
+ EffectContext context(this);
1840
+ EmitVariableAssignment(var, Token::ASSIGN);
1841
+ break;
1842
+ }
1843
+ case NAMED_PROPERTY: {
1844
+ __ push(r0); // Preserve value.
1845
+ VisitForAccumulatorValue(prop->obj());
1846
+ __ mov(r1, r0);
1847
+ __ pop(r0); // Restore value.
1848
+ __ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
1849
+ Handle<Code> ic = is_strict_mode()
1850
+ ? isolate()->builtins()->StoreIC_Initialize_Strict()
1851
+ : isolate()->builtins()->StoreIC_Initialize();
1852
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
1853
+ break;
1854
+ }
1855
+ case KEYED_PROPERTY: {
1856
+ __ push(r0); // Preserve value.
1857
+ if (prop->is_synthetic()) {
1858
+ ASSERT(prop->obj()->AsVariableProxy() != NULL);
1859
+ ASSERT(prop->key()->AsLiteral() != NULL);
1860
+ { AccumulatorValueContext for_object(this);
1861
+ EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
1862
+ }
1863
+ __ mov(r2, r0);
1864
+ __ mov(r1, Operand(prop->key()->AsLiteral()->handle()));
1865
+ } else {
1866
+ VisitForStackValue(prop->obj());
1867
+ VisitForAccumulatorValue(prop->key());
1868
+ __ mov(r1, r0);
1869
+ __ pop(r2);
1870
+ }
1871
+ __ pop(r0); // Restore value.
1872
+ Handle<Code> ic = is_strict_mode()
1873
+ ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
1874
+ : isolate()->builtins()->KeyedStoreIC_Initialize();
1875
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
1876
+ break;
1877
+ }
1878
+ }
1879
+ PrepareForBailoutForId(bailout_ast_id, TOS_REG);
1880
+ context()->Plug(r0);
1881
+ }
1882
+
1883
+
1884
+ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
1885
+ Token::Value op) {
1886
+ // Left-hand sides that rewrite to explicit property accesses do not reach
1887
+ // here.
1888
+ ASSERT(var != NULL);
1889
+ ASSERT(var->is_global() || var->AsSlot() != NULL);
1890
+
1891
+ if (var->is_global()) {
1892
+ ASSERT(!var->is_this());
1893
+ // Assignment to a global variable. Use inline caching for the
1894
+ // assignment. Right-hand-side value is passed in r0, variable name in
1895
+ // r2, and the global object in r1.
1896
+ __ mov(r2, Operand(var->name()));
1897
+ __ ldr(r1, GlobalObjectOperand());
1898
+ Handle<Code> ic = is_strict_mode()
1899
+ ? isolate()->builtins()->StoreIC_Initialize_Strict()
1900
+ : isolate()->builtins()->StoreIC_Initialize();
1901
+ EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1902
+
1903
+ } else if (op == Token::INIT_CONST) {
1904
+ // Like var declarations, const declarations are hoisted to function
1905
+ // scope. However, unlike var initializers, const initializers are able
1906
+ // to drill a hole to that function context, even from inside a 'with'
1907
+ // context. We thus bypass the normal static scope lookup.
1908
+ Slot* slot = var->AsSlot();
1909
+ Label skip;
1910
+ switch (slot->type()) {
1911
+ case Slot::PARAMETER:
1912
+ // No const parameters.
1913
+ UNREACHABLE();
1914
+ break;
1915
+ case Slot::LOCAL:
1916
+ // Detect const reinitialization by checking for the hole value.
1917
+ __ ldr(r1, MemOperand(fp, SlotOffset(slot)));
1918
+ __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1919
+ __ cmp(r1, ip);
1920
+ __ b(ne, &skip);
1921
+ __ str(result_register(), MemOperand(fp, SlotOffset(slot)));
1922
+ break;
1923
+ case Slot::CONTEXT: {
1924
+ __ ldr(r1, ContextOperand(cp, Context::FCONTEXT_INDEX));
1925
+ __ ldr(r2, ContextOperand(r1, slot->index()));
1926
+ __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1927
+ __ cmp(r2, ip);
1928
+ __ b(ne, &skip);
1929
+ __ str(r0, ContextOperand(r1, slot->index()));
1930
+ int offset = Context::SlotOffset(slot->index());
1931
+ __ mov(r3, r0); // Preserve the stored value in r0.
1932
+ __ RecordWrite(r1, Operand(offset), r3, r2);
1933
+ break;
1934
+ }
1935
+ case Slot::LOOKUP:
1936
+ __ push(r0);
1937
+ __ mov(r0, Operand(slot->var()->name()));
1938
+ __ Push(cp, r0); // Context and name.
1939
+ __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
1940
+ break;
1941
+ }
1942
+ __ bind(&skip);
1943
+
1944
+ } else if (var->mode() != Variable::CONST) {
1945
+ // Perform the assignment for non-const variables. Const assignments
1946
+ // are simply skipped.
1947
+ Slot* slot = var->AsSlot();
1948
+ switch (slot->type()) {
1949
+ case Slot::PARAMETER:
1950
+ case Slot::LOCAL:
1951
+ // Perform the assignment.
1952
+ __ str(result_register(), MemOperand(fp, SlotOffset(slot)));
1953
+ break;
1954
+
1955
+ case Slot::CONTEXT: {
1956
+ MemOperand target = EmitSlotSearch(slot, r1);
1957
+ // Perform the assignment and issue the write barrier.
1958
+ __ str(result_register(), target);
1959
+ // RecordWrite may destroy all its register arguments.
1960
+ __ mov(r3, result_register());
1961
+ int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
1962
+ __ RecordWrite(r1, Operand(offset), r2, r3);
1963
+ break;
1964
+ }
1965
+
1966
+ case Slot::LOOKUP:
1967
+ // Call the runtime for the assignment.
1968
+ __ push(r0); // Value.
1969
+ __ mov(r1, Operand(slot->var()->name()));
1970
+ __ mov(r0, Operand(Smi::FromInt(strict_mode_flag())));
1971
+ __ Push(cp, r1, r0); // Context, name, strict mode.
1972
+ __ CallRuntime(Runtime::kStoreContextSlot, 4);
1973
+ break;
1974
+ }
1975
+ }
1976
+ }
1977
+
1978
+
1979
+ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
1980
+ // Assignment to a property, using a named store IC.
1981
+ Property* prop = expr->target()->AsProperty();
1982
+ ASSERT(prop != NULL);
1983
+ ASSERT(prop->key()->AsLiteral() != NULL);
1984
+
1985
+ // If the assignment starts a block of assignments to the same object,
1986
+ // change to slow case to avoid the quadratic behavior of repeatedly
1987
+ // adding fast properties.
1988
+ if (expr->starts_initialization_block()) {
1989
+ __ push(result_register());
1990
+ __ ldr(ip, MemOperand(sp, kPointerSize)); // Receiver is now under value.
1991
+ __ push(ip);
1992
+ __ CallRuntime(Runtime::kToSlowProperties, 1);
1993
+ __ pop(result_register());
1994
+ }
1995
+
1996
+ // Record source code position before IC call.
1997
+ SetSourcePosition(expr->position());
1998
+ __ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
1999
+ // Load receiver to r1. Leave a copy in the stack if needed for turning the
2000
+ // receiver into fast case.
2001
+ if (expr->ends_initialization_block()) {
2002
+ __ ldr(r1, MemOperand(sp));
2003
+ } else {
2004
+ __ pop(r1);
2005
+ }
2006
+
2007
+ Handle<Code> ic = is_strict_mode()
2008
+ ? isolate()->builtins()->StoreIC_Initialize_Strict()
2009
+ : isolate()->builtins()->StoreIC_Initialize();
2010
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
2011
+
2012
+ // If the assignment ends an initialization block, revert to fast case.
2013
+ if (expr->ends_initialization_block()) {
2014
+ __ push(r0); // Result of assignment, saved even if not needed.
2015
+ // Receiver is under the result value.
2016
+ __ ldr(ip, MemOperand(sp, kPointerSize));
2017
+ __ push(ip);
2018
+ __ CallRuntime(Runtime::kToFastProperties, 1);
2019
+ __ pop(r0);
2020
+ __ Drop(1);
2021
+ }
2022
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2023
+ context()->Plug(r0);
2024
+ }
2025
+
2026
+
2027
+ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2028
+ // Assignment to a property, using a keyed store IC.
2029
+
2030
+ // If the assignment starts a block of assignments to the same object,
2031
+ // change to slow case to avoid the quadratic behavior of repeatedly
2032
+ // adding fast properties.
2033
+ if (expr->starts_initialization_block()) {
2034
+ __ push(result_register());
2035
+ // Receiver is now under the key and value.
2036
+ __ ldr(ip, MemOperand(sp, 2 * kPointerSize));
2037
+ __ push(ip);
2038
+ __ CallRuntime(Runtime::kToSlowProperties, 1);
2039
+ __ pop(result_register());
2040
+ }
2041
+
2042
+ // Record source code position before IC call.
2043
+ SetSourcePosition(expr->position());
2044
+ __ pop(r1); // Key.
2045
+ // Load receiver to r2. Leave a copy in the stack if needed for turning the
2046
+ // receiver into fast case.
2047
+ if (expr->ends_initialization_block()) {
2048
+ __ ldr(r2, MemOperand(sp));
2049
+ } else {
2050
+ __ pop(r2);
2051
+ }
2052
+
2053
+ Handle<Code> ic = is_strict_mode()
2054
+ ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
2055
+ : isolate()->builtins()->KeyedStoreIC_Initialize();
2056
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
2057
+
2058
+ // If the assignment ends an initialization block, revert to fast case.
2059
+ if (expr->ends_initialization_block()) {
2060
+ __ push(r0); // Result of assignment, saved even if not needed.
2061
+ // Receiver is under the result value.
2062
+ __ ldr(ip, MemOperand(sp, kPointerSize));
2063
+ __ push(ip);
2064
+ __ CallRuntime(Runtime::kToFastProperties, 1);
2065
+ __ pop(r0);
2066
+ __ Drop(1);
2067
+ }
2068
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2069
+ context()->Plug(r0);
2070
+ }
2071
+
2072
+
2073
+ void FullCodeGenerator::VisitProperty(Property* expr) {
2074
+ Comment cmnt(masm_, "[ Property");
2075
+ Expression* key = expr->key();
2076
+
2077
+ if (key->IsPropertyName()) {
2078
+ VisitForAccumulatorValue(expr->obj());
2079
+ EmitNamedPropertyLoad(expr);
2080
+ context()->Plug(r0);
2081
+ } else {
2082
+ VisitForStackValue(expr->obj());
2083
+ VisitForAccumulatorValue(expr->key());
2084
+ __ pop(r1);
2085
+ EmitKeyedPropertyLoad(expr);
2086
+ context()->Plug(r0);
2087
+ }
2088
+ }
2089
+
2090
+ void FullCodeGenerator::EmitCallWithIC(Call* expr,
2091
+ Handle<Object> name,
2092
+ RelocInfo::Mode mode) {
2093
+ // Code common for calls using the IC.
2094
+ ZoneList<Expression*>* args = expr->arguments();
2095
+ int arg_count = args->length();
2096
+ { PreservePositionScope scope(masm()->positions_recorder());
2097
+ for (int i = 0; i < arg_count; i++) {
2098
+ VisitForStackValue(args->at(i));
2099
+ }
2100
+ __ mov(r2, Operand(name));
2101
+ }
2102
+ // Record source position for debugger.
2103
+ SetSourcePosition(expr->position());
2104
+ // Call the IC initialization code.
2105
+ InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2106
+ Handle<Code> ic =
2107
+ isolate()->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
2108
+ EmitCallIC(ic, mode);
2109
+ RecordJSReturnSite(expr);
2110
+ // Restore context register.
2111
+ __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2112
+ context()->Plug(r0);
2113
+ }
2114
+
2115
+
2116
+ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2117
+ Expression* key,
2118
+ RelocInfo::Mode mode) {
2119
+ // Load the key.
2120
+ VisitForAccumulatorValue(key);
2121
+
2122
+ // Swap the name of the function and the receiver on the stack to follow
2123
+ // the calling convention for call ICs.
2124
+ __ pop(r1);
2125
+ __ push(r0);
2126
+ __ push(r1);
2127
+
2128
+ // Code common for calls using the IC.
2129
+ ZoneList<Expression*>* args = expr->arguments();
2130
+ int arg_count = args->length();
2131
+ { PreservePositionScope scope(masm()->positions_recorder());
2132
+ for (int i = 0; i < arg_count; i++) {
2133
+ VisitForStackValue(args->at(i));
2134
+ }
2135
+ }
2136
+ // Record source position for debugger.
2137
+ SetSourcePosition(expr->position());
2138
+ // Call the IC initialization code.
2139
+ InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2140
+ Handle<Code> ic =
2141
+ isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count, in_loop);
2142
+ __ ldr(r2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key.
2143
+ EmitCallIC(ic, mode);
2144
+ RecordJSReturnSite(expr);
2145
+ // Restore context register.
2146
+ __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2147
+ context()->DropAndPlug(1, r0); // Drop the key still on the stack.
2148
+ }
2149
+
2150
+
2151
+ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
2152
+ // Code common for calls using the call stub.
2153
+ ZoneList<Expression*>* args = expr->arguments();
2154
+ int arg_count = args->length();
2155
+ { PreservePositionScope scope(masm()->positions_recorder());
2156
+ for (int i = 0; i < arg_count; i++) {
2157
+ VisitForStackValue(args->at(i));
2158
+ }
2159
+ }
2160
+ // Record source position for debugger.
2161
+ SetSourcePosition(expr->position());
2162
+ InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2163
+ CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
2164
+ __ CallStub(&stub);
2165
+ RecordJSReturnSite(expr);
2166
+ // Restore context register.
2167
+ __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2168
+ context()->DropAndPlug(1, r0);
2169
+ }
2170
+
2171
+
2172
+ void FullCodeGenerator::EmitResolvePossiblyDirectEval(ResolveEvalFlag flag,
2173
+ int arg_count) {
2174
+ // Push copy of the first argument or undefined if it doesn't exist.
2175
+ if (arg_count > 0) {
2176
+ __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2177
+ } else {
2178
+ __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2179
+ }
2180
+ __ push(r1);
2181
+
2182
+ // Push the receiver of the enclosing function and do runtime call.
2183
+ __ ldr(r1, MemOperand(fp, (2 + scope()->num_parameters()) * kPointerSize));
2184
+ __ push(r1);
2185
+ // Push the strict mode flag.
2186
+ __ mov(r1, Operand(Smi::FromInt(strict_mode_flag())));
2187
+ __ push(r1);
2188
+
2189
+ __ CallRuntime(flag == SKIP_CONTEXT_LOOKUP
2190
+ ? Runtime::kResolvePossiblyDirectEvalNoLookup
2191
+ : Runtime::kResolvePossiblyDirectEval, 4);
2192
+ }
2193
+
2194
+
2195
+ void FullCodeGenerator::VisitCall(Call* expr) {
2196
+ #ifdef DEBUG
2197
+ // We want to verify that RecordJSReturnSite gets called on all paths
2198
+ // through this function. Avoid early returns.
2199
+ expr->return_is_recorded_ = false;
2200
+ #endif
2201
+
2202
+ Comment cmnt(masm_, "[ Call");
2203
+ Expression* fun = expr->expression();
2204
+ Variable* var = fun->AsVariableProxy()->AsVariable();
2205
+
2206
+ if (var != NULL && var->is_possibly_eval()) {
2207
+ // In a call to eval, we first call %ResolvePossiblyDirectEval to
2208
+ // resolve the function we need to call and the receiver of the
2209
+ // call. Then we call the resolved function using the given
2210
+ // arguments.
2211
+ ZoneList<Expression*>* args = expr->arguments();
2212
+ int arg_count = args->length();
2213
+
2214
+ { PreservePositionScope pos_scope(masm()->positions_recorder());
2215
+ VisitForStackValue(fun);
2216
+ __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2217
+ __ push(r2); // Reserved receiver slot.
2218
+
2219
+ // Push the arguments.
2220
+ for (int i = 0; i < arg_count; i++) {
2221
+ VisitForStackValue(args->at(i));
2222
+ }
2223
+
2224
+ // If we know that eval can only be shadowed by eval-introduced
2225
+ // variables we attempt to load the global eval function directly
2226
+ // in generated code. If we succeed, there is no need to perform a
2227
+ // context lookup in the runtime system.
2228
+ Label done;
2229
+ if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
2230
+ Label slow;
2231
+ EmitLoadGlobalSlotCheckExtensions(var->AsSlot(),
2232
+ NOT_INSIDE_TYPEOF,
2233
+ &slow);
2234
+ // Push the function and resolve eval.
2235
+ __ push(r0);
2236
+ EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count);
2237
+ __ jmp(&done);
2238
+ __ bind(&slow);
2239
+ }
2240
+
2241
+ // Push copy of the function (found below the arguments) and
2242
+ // resolve eval.
2243
+ __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2244
+ __ push(r1);
2245
+ EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count);
2246
+ if (done.is_linked()) {
2247
+ __ bind(&done);
2248
+ }
2249
+
2250
+ // The runtime call returns a pair of values in r0 (function) and
2251
+ // r1 (receiver). Touch up the stack with the right values.
2252
+ __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2253
+ __ str(r1, MemOperand(sp, arg_count * kPointerSize));
2254
+ }
2255
+
2256
+ // Record source position for debugger.
2257
+ SetSourcePosition(expr->position());
2258
+ InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2259
+ CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
2260
+ __ CallStub(&stub);
2261
+ RecordJSReturnSite(expr);
2262
+ // Restore context register.
2263
+ __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2264
+ context()->DropAndPlug(1, r0);
2265
+ } else if (var != NULL && !var->is_this() && var->is_global()) {
2266
+ // Push global object as receiver for the call IC.
2267
+ __ ldr(r0, GlobalObjectOperand());
2268
+ __ push(r0);
2269
+ EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT);
2270
+ } else if (var != NULL && var->AsSlot() != NULL &&
2271
+ var->AsSlot()->type() == Slot::LOOKUP) {
2272
+ // Call to a lookup slot (dynamically introduced variable).
2273
+ Label slow, done;
2274
+
2275
+ { PreservePositionScope scope(masm()->positions_recorder());
2276
+ // Generate code for loading from variables potentially shadowed
2277
+ // by eval-introduced variables.
2278
+ EmitDynamicLoadFromSlotFastCase(var->AsSlot(),
2279
+ NOT_INSIDE_TYPEOF,
2280
+ &slow,
2281
+ &done);
2282
+ }
2283
+
2284
+ __ bind(&slow);
2285
+ // Call the runtime to find the function to call (returned in r0)
2286
+ // and the object holding it (returned in edx).
2287
+ __ push(context_register());
2288
+ __ mov(r2, Operand(var->name()));
2289
+ __ push(r2);
2290
+ __ CallRuntime(Runtime::kLoadContextSlot, 2);
2291
+ __ Push(r0, r1); // Function, receiver.
2292
+
2293
+ // If fast case code has been generated, emit code to push the
2294
+ // function and receiver and have the slow path jump around this
2295
+ // code.
2296
+ if (done.is_linked()) {
2297
+ Label call;
2298
+ __ b(&call);
2299
+ __ bind(&done);
2300
+ // Push function.
2301
+ __ push(r0);
2302
+ // Push global receiver.
2303
+ __ ldr(r1, GlobalObjectOperand());
2304
+ __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
2305
+ __ push(r1);
2306
+ __ bind(&call);
2307
+ }
2308
+
2309
+ EmitCallWithStub(expr);
2310
+ } else if (fun->AsProperty() != NULL) {
2311
+ // Call to an object property.
2312
+ Property* prop = fun->AsProperty();
2313
+ Literal* key = prop->key()->AsLiteral();
2314
+ if (key != NULL && key->handle()->IsSymbol()) {
2315
+ // Call to a named property, use call IC.
2316
+ { PreservePositionScope scope(masm()->positions_recorder());
2317
+ VisitForStackValue(prop->obj());
2318
+ }
2319
+ EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
2320
+ } else {
2321
+ // Call to a keyed property.
2322
+ // For a synthetic property use keyed load IC followed by function call,
2323
+ // for a regular property use keyed CallIC.
2324
+ if (prop->is_synthetic()) {
2325
+ // Do not visit the object and key subexpressions (they are shared
2326
+ // by all occurrences of the same rewritten parameter).
2327
+ ASSERT(prop->obj()->AsVariableProxy() != NULL);
2328
+ ASSERT(prop->obj()->AsVariableProxy()->var()->AsSlot() != NULL);
2329
+ Slot* slot = prop->obj()->AsVariableProxy()->var()->AsSlot();
2330
+ MemOperand operand = EmitSlotSearch(slot, r1);
2331
+ __ ldr(r1, operand);
2332
+
2333
+ ASSERT(prop->key()->AsLiteral() != NULL);
2334
+ ASSERT(prop->key()->AsLiteral()->handle()->IsSmi());
2335
+ __ mov(r0, Operand(prop->key()->AsLiteral()->handle()));
2336
+
2337
+ // Record source code position for IC call.
2338
+ SetSourcePosition(prop->position());
2339
+
2340
+ Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2341
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
2342
+ __ ldr(r1, GlobalObjectOperand());
2343
+ __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
2344
+ __ Push(r0, r1); // Function, receiver.
2345
+ EmitCallWithStub(expr);
2346
+ } else {
2347
+ { PreservePositionScope scope(masm()->positions_recorder());
2348
+ VisitForStackValue(prop->obj());
2349
+ }
2350
+ EmitKeyedCallWithIC(expr, prop->key(), RelocInfo::CODE_TARGET);
2351
+ }
2352
+ }
2353
+ } else {
2354
+ { PreservePositionScope scope(masm()->positions_recorder());
2355
+ VisitForStackValue(fun);
2356
+ }
2357
+ // Load global receiver object.
2358
+ __ ldr(r1, GlobalObjectOperand());
2359
+ __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
2360
+ __ push(r1);
2361
+ // Emit function call.
2362
+ EmitCallWithStub(expr);
2363
+ }
2364
+
2365
+ #ifdef DEBUG
2366
+ // RecordJSReturnSite should have been called.
2367
+ ASSERT(expr->return_is_recorded_);
2368
+ #endif
2369
+ }
2370
+
2371
+
2372
+ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2373
+ Comment cmnt(masm_, "[ CallNew");
2374
+ // According to ECMA-262, section 11.2.2, page 44, the function
2375
+ // expression in new calls must be evaluated before the
2376
+ // arguments.
2377
+
2378
+ // Push constructor on the stack. If it's not a function it's used as
2379
+ // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2380
+ // ignored.
2381
+ VisitForStackValue(expr->expression());
2382
+
2383
+ // Push the arguments ("left-to-right") on the stack.
2384
+ ZoneList<Expression*>* args = expr->arguments();
2385
+ int arg_count = args->length();
2386
+ for (int i = 0; i < arg_count; i++) {
2387
+ VisitForStackValue(args->at(i));
2388
+ }
2389
+
2390
+ // Call the construct call builtin that handles allocation and
2391
+ // constructor invocation.
2392
+ SetSourcePosition(expr->position());
2393
+
2394
+ // Load function and argument count into r1 and r0.
2395
+ __ mov(r0, Operand(arg_count));
2396
+ __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2397
+
2398
+ Handle<Code> construct_builtin =
2399
+ isolate()->builtins()->JSConstructCall();
2400
+ __ Call(construct_builtin, RelocInfo::CONSTRUCT_CALL);
2401
+ context()->Plug(r0);
2402
+ }
2403
+
2404
+
2405
+ void FullCodeGenerator::EmitIsSmi(ZoneList<Expression*>* args) {
2406
+ ASSERT(args->length() == 1);
2407
+
2408
+ VisitForAccumulatorValue(args->at(0));
2409
+
2410
+ Label materialize_true, materialize_false;
2411
+ Label* if_true = NULL;
2412
+ Label* if_false = NULL;
2413
+ Label* fall_through = NULL;
2414
+ context()->PrepareTest(&materialize_true, &materialize_false,
2415
+ &if_true, &if_false, &fall_through);
2416
+
2417
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2418
+ __ tst(r0, Operand(kSmiTagMask));
2419
+ Split(eq, if_true, if_false, fall_through);
2420
+
2421
+ context()->Plug(if_true, if_false);
2422
+ }
2423
+
2424
+
2425
+ void FullCodeGenerator::EmitIsNonNegativeSmi(ZoneList<Expression*>* args) {
2426
+ ASSERT(args->length() == 1);
2427
+
2428
+ VisitForAccumulatorValue(args->at(0));
2429
+
2430
+ Label materialize_true, materialize_false;
2431
+ Label* if_true = NULL;
2432
+ Label* if_false = NULL;
2433
+ Label* fall_through = NULL;
2434
+ context()->PrepareTest(&materialize_true, &materialize_false,
2435
+ &if_true, &if_false, &fall_through);
2436
+
2437
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2438
+ __ tst(r0, Operand(kSmiTagMask | 0x80000000));
2439
+ Split(eq, if_true, if_false, fall_through);
2440
+
2441
+ context()->Plug(if_true, if_false);
2442
+ }
2443
+
2444
+
2445
+ void FullCodeGenerator::EmitIsObject(ZoneList<Expression*>* args) {
2446
+ ASSERT(args->length() == 1);
2447
+
2448
+ VisitForAccumulatorValue(args->at(0));
2449
+
2450
+ Label materialize_true, materialize_false;
2451
+ Label* if_true = NULL;
2452
+ Label* if_false = NULL;
2453
+ Label* fall_through = NULL;
2454
+ context()->PrepareTest(&materialize_true, &materialize_false,
2455
+ &if_true, &if_false, &fall_through);
2456
+
2457
+ __ JumpIfSmi(r0, if_false);
2458
+ __ LoadRoot(ip, Heap::kNullValueRootIndex);
2459
+ __ cmp(r0, ip);
2460
+ __ b(eq, if_true);
2461
+ __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
2462
+ // Undetectable objects behave like undefined when tested with typeof.
2463
+ __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset));
2464
+ __ tst(r1, Operand(1 << Map::kIsUndetectable));
2465
+ __ b(ne, if_false);
2466
+ __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset));
2467
+ __ cmp(r1, Operand(FIRST_JS_OBJECT_TYPE));
2468
+ __ b(lt, if_false);
2469
+ __ cmp(r1, Operand(LAST_JS_OBJECT_TYPE));
2470
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2471
+ Split(le, if_true, if_false, fall_through);
2472
+
2473
+ context()->Plug(if_true, if_false);
2474
+ }
2475
+
2476
+
2477
+ void FullCodeGenerator::EmitIsSpecObject(ZoneList<Expression*>* args) {
2478
+ ASSERT(args->length() == 1);
2479
+
2480
+ VisitForAccumulatorValue(args->at(0));
2481
+
2482
+ Label materialize_true, materialize_false;
2483
+ Label* if_true = NULL;
2484
+ Label* if_false = NULL;
2485
+ Label* fall_through = NULL;
2486
+ context()->PrepareTest(&materialize_true, &materialize_false,
2487
+ &if_true, &if_false, &fall_through);
2488
+
2489
+ __ JumpIfSmi(r0, if_false);
2490
+ __ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
2491
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2492
+ Split(ge, if_true, if_false, fall_through);
2493
+
2494
+ context()->Plug(if_true, if_false);
2495
+ }
2496
+
2497
+
2498
+ void FullCodeGenerator::EmitIsUndetectableObject(ZoneList<Expression*>* args) {
2499
+ ASSERT(args->length() == 1);
2500
+
2501
+ VisitForAccumulatorValue(args->at(0));
2502
+
2503
+ Label materialize_true, materialize_false;
2504
+ Label* if_true = NULL;
2505
+ Label* if_false = NULL;
2506
+ Label* fall_through = NULL;
2507
+ context()->PrepareTest(&materialize_true, &materialize_false,
2508
+ &if_true, &if_false, &fall_through);
2509
+
2510
+ __ JumpIfSmi(r0, if_false);
2511
+ __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2512
+ __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
2513
+ __ tst(r1, Operand(1 << Map::kIsUndetectable));
2514
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2515
+ Split(ne, if_true, if_false, fall_through);
2516
+
2517
+ context()->Plug(if_true, if_false);
2518
+ }
2519
+
2520
+
2521
+ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2522
+ ZoneList<Expression*>* args) {
2523
+
2524
+ ASSERT(args->length() == 1);
2525
+
2526
+ VisitForAccumulatorValue(args->at(0));
2527
+
2528
+ Label materialize_true, materialize_false;
2529
+ Label* if_true = NULL;
2530
+ Label* if_false = NULL;
2531
+ Label* fall_through = NULL;
2532
+ context()->PrepareTest(&materialize_true, &materialize_false,
2533
+ &if_true, &if_false, &fall_through);
2534
+
2535
+ if (FLAG_debug_code) __ AbortIfSmi(r0);
2536
+
2537
+ __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2538
+ __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset));
2539
+ __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
2540
+ __ b(ne, if_true);
2541
+
2542
+ // Check for fast case object. Generate false result for slow case object.
2543
+ __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset));
2544
+ __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
2545
+ __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
2546
+ __ cmp(r2, ip);
2547
+ __ b(eq, if_false);
2548
+
2549
+ // Look for valueOf symbol in the descriptor array, and indicate false if
2550
+ // found. The type is not checked, so if it is a transition it is a false
2551
+ // negative.
2552
+ __ ldr(r4, FieldMemOperand(r1, Map::kInstanceDescriptorsOffset));
2553
+ __ ldr(r3, FieldMemOperand(r4, FixedArray::kLengthOffset));
2554
+ // r4: descriptor array
2555
+ // r3: length of descriptor array
2556
+ // Calculate the end of the descriptor array.
2557
+ STATIC_ASSERT(kSmiTag == 0);
2558
+ STATIC_ASSERT(kSmiTagSize == 1);
2559
+ STATIC_ASSERT(kPointerSize == 4);
2560
+ __ add(r2, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2561
+ __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
2562
+
2563
+ // Calculate location of the first key name.
2564
+ __ add(r4,
2565
+ r4,
2566
+ Operand(FixedArray::kHeaderSize - kHeapObjectTag +
2567
+ DescriptorArray::kFirstIndex * kPointerSize));
2568
+ // Loop through all the keys in the descriptor array. If one of these is the
2569
+ // symbol valueOf the result is false.
2570
+ Label entry, loop;
2571
+ // The use of ip to store the valueOf symbol asumes that it is not otherwise
2572
+ // used in the loop below.
2573
+ __ mov(ip, Operand(FACTORY->value_of_symbol()));
2574
+ __ jmp(&entry);
2575
+ __ bind(&loop);
2576
+ __ ldr(r3, MemOperand(r4, 0));
2577
+ __ cmp(r3, ip);
2578
+ __ b(eq, if_false);
2579
+ __ add(r4, r4, Operand(kPointerSize));
2580
+ __ bind(&entry);
2581
+ __ cmp(r4, Operand(r2));
2582
+ __ b(ne, &loop);
2583
+
2584
+ // If a valueOf property is not found on the object check that it's
2585
+ // prototype is the un-modified String prototype. If not result is false.
2586
+ __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
2587
+ __ tst(r2, Operand(kSmiTagMask));
2588
+ __ b(eq, if_false);
2589
+ __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
2590
+ __ ldr(r3, ContextOperand(cp, Context::GLOBAL_INDEX));
2591
+ __ ldr(r3, FieldMemOperand(r3, GlobalObject::kGlobalContextOffset));
2592
+ __ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
2593
+ __ cmp(r2, r3);
2594
+ __ b(ne, if_false);
2595
+
2596
+ // Set the bit in the map to indicate that it has been checked safe for
2597
+ // default valueOf and set true result.
2598
+ __ ldrb(r2, FieldMemOperand(r4, Map::kBitField2Offset));
2599
+ __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
2600
+ __ strb(r2, FieldMemOperand(r4, Map::kBitField2Offset));
2601
+ __ jmp(if_true);
2602
+
2603
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2604
+ context()->Plug(if_true, if_false);
2605
+ }
2606
+
2607
+
2608
+ void FullCodeGenerator::EmitIsFunction(ZoneList<Expression*>* args) {
2609
+ ASSERT(args->length() == 1);
2610
+
2611
+ VisitForAccumulatorValue(args->at(0));
2612
+
2613
+ Label materialize_true, materialize_false;
2614
+ Label* if_true = NULL;
2615
+ Label* if_false = NULL;
2616
+ Label* fall_through = NULL;
2617
+ context()->PrepareTest(&materialize_true, &materialize_false,
2618
+ &if_true, &if_false, &fall_through);
2619
+
2620
+ __ JumpIfSmi(r0, if_false);
2621
+ __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
2622
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2623
+ Split(eq, if_true, if_false, fall_through);
2624
+
2625
+ context()->Plug(if_true, if_false);
2626
+ }
2627
+
2628
+
2629
+ void FullCodeGenerator::EmitIsArray(ZoneList<Expression*>* args) {
2630
+ ASSERT(args->length() == 1);
2631
+
2632
+ VisitForAccumulatorValue(args->at(0));
2633
+
2634
+ Label materialize_true, materialize_false;
2635
+ Label* if_true = NULL;
2636
+ Label* if_false = NULL;
2637
+ Label* fall_through = NULL;
2638
+ context()->PrepareTest(&materialize_true, &materialize_false,
2639
+ &if_true, &if_false, &fall_through);
2640
+
2641
+ __ JumpIfSmi(r0, if_false);
2642
+ __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
2643
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2644
+ Split(eq, if_true, if_false, fall_through);
2645
+
2646
+ context()->Plug(if_true, if_false);
2647
+ }
2648
+
2649
+
2650
+ void FullCodeGenerator::EmitIsRegExp(ZoneList<Expression*>* args) {
2651
+ ASSERT(args->length() == 1);
2652
+
2653
+ VisitForAccumulatorValue(args->at(0));
2654
+
2655
+ Label materialize_true, materialize_false;
2656
+ Label* if_true = NULL;
2657
+ Label* if_false = NULL;
2658
+ Label* fall_through = NULL;
2659
+ context()->PrepareTest(&materialize_true, &materialize_false,
2660
+ &if_true, &if_false, &fall_through);
2661
+
2662
+ __ JumpIfSmi(r0, if_false);
2663
+ __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
2664
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2665
+ Split(eq, if_true, if_false, fall_through);
2666
+
2667
+ context()->Plug(if_true, if_false);
2668
+ }
2669
+
2670
+
2671
+
2672
+ void FullCodeGenerator::EmitIsConstructCall(ZoneList<Expression*>* args) {
2673
+ ASSERT(args->length() == 0);
2674
+
2675
+ Label materialize_true, materialize_false;
2676
+ Label* if_true = NULL;
2677
+ Label* if_false = NULL;
2678
+ Label* fall_through = NULL;
2679
+ context()->PrepareTest(&materialize_true, &materialize_false,
2680
+ &if_true, &if_false, &fall_through);
2681
+
2682
+ // Get the frame pointer for the calling frame.
2683
+ __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2684
+
2685
+ // Skip the arguments adaptor frame if it exists.
2686
+ Label check_frame_marker;
2687
+ __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset));
2688
+ __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2689
+ __ b(ne, &check_frame_marker);
2690
+ __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset));
2691
+
2692
+ // Check the marker in the calling frame.
2693
+ __ bind(&check_frame_marker);
2694
+ __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset));
2695
+ __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
2696
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2697
+ Split(eq, if_true, if_false, fall_through);
2698
+
2699
+ context()->Plug(if_true, if_false);
2700
+ }
2701
+
2702
+
2703
+ void FullCodeGenerator::EmitObjectEquals(ZoneList<Expression*>* args) {
2704
+ ASSERT(args->length() == 2);
2705
+
2706
+ // Load the two objects into registers and perform the comparison.
2707
+ VisitForStackValue(args->at(0));
2708
+ VisitForAccumulatorValue(args->at(1));
2709
+
2710
+ Label materialize_true, materialize_false;
2711
+ Label* if_true = NULL;
2712
+ Label* if_false = NULL;
2713
+ Label* fall_through = NULL;
2714
+ context()->PrepareTest(&materialize_true, &materialize_false,
2715
+ &if_true, &if_false, &fall_through);
2716
+
2717
+ __ pop(r1);
2718
+ __ cmp(r0, r1);
2719
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2720
+ Split(eq, if_true, if_false, fall_through);
2721
+
2722
+ context()->Plug(if_true, if_false);
2723
+ }
2724
+
2725
+
2726
+ void FullCodeGenerator::EmitArguments(ZoneList<Expression*>* args) {
2727
+ ASSERT(args->length() == 1);
2728
+
2729
+ // ArgumentsAccessStub expects the key in edx and the formal
2730
+ // parameter count in r0.
2731
+ VisitForAccumulatorValue(args->at(0));
2732
+ __ mov(r1, r0);
2733
+ __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters())));
2734
+ ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
2735
+ __ CallStub(&stub);
2736
+ context()->Plug(r0);
2737
+ }
2738
+
2739
+
2740
+ void FullCodeGenerator::EmitArgumentsLength(ZoneList<Expression*>* args) {
2741
+ ASSERT(args->length() == 0);
2742
+
2743
+ Label exit;
2744
+ // Get the number of formal parameters.
2745
+ __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters())));
2746
+
2747
+ // Check if the calling frame is an arguments adaptor frame.
2748
+ __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2749
+ __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
2750
+ __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2751
+ __ b(ne, &exit);
2752
+
2753
+ // Arguments adaptor case: Read the arguments length from the
2754
+ // adaptor frame.
2755
+ __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
2756
+
2757
+ __ bind(&exit);
2758
+ context()->Plug(r0);
2759
+ }
2760
+
2761
+
2762
+ void FullCodeGenerator::EmitClassOf(ZoneList<Expression*>* args) {
2763
+ ASSERT(args->length() == 1);
2764
+ Label done, null, function, non_function_constructor;
2765
+
2766
+ VisitForAccumulatorValue(args->at(0));
2767
+
2768
+ // If the object is a smi, we return null.
2769
+ __ JumpIfSmi(r0, &null);
2770
+
2771
+ // Check that the object is a JS object but take special care of JS
2772
+ // functions to make sure they have 'Function' as their class.
2773
+ __ CompareObjectType(r0, r0, r1, FIRST_JS_OBJECT_TYPE); // Map is now in r0.
2774
+ __ b(lt, &null);
2775
+
2776
+ // As long as JS_FUNCTION_TYPE is the last instance type and it is
2777
+ // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
2778
+ // LAST_JS_OBJECT_TYPE.
2779
+ ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
2780
+ ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
2781
+ __ cmp(r1, Operand(JS_FUNCTION_TYPE));
2782
+ __ b(eq, &function);
2783
+
2784
+ // Check if the constructor in the map is a function.
2785
+ __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset));
2786
+ __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
2787
+ __ b(ne, &non_function_constructor);
2788
+
2789
+ // r0 now contains the constructor function. Grab the
2790
+ // instance class name from there.
2791
+ __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
2792
+ __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
2793
+ __ b(&done);
2794
+
2795
+ // Functions have class 'Function'.
2796
+ __ bind(&function);
2797
+ __ LoadRoot(r0, Heap::kfunction_class_symbolRootIndex);
2798
+ __ jmp(&done);
2799
+
2800
+ // Objects with a non-function constructor have class 'Object'.
2801
+ __ bind(&non_function_constructor);
2802
+ __ LoadRoot(r0, Heap::kfunction_class_symbolRootIndex);
2803
+ __ jmp(&done);
2804
+
2805
+ // Non-JS objects have class null.
2806
+ __ bind(&null);
2807
+ __ LoadRoot(r0, Heap::kNullValueRootIndex);
2808
+
2809
+ // All done.
2810
+ __ bind(&done);
2811
+
2812
+ context()->Plug(r0);
2813
+ }
2814
+
2815
+
2816
+ void FullCodeGenerator::EmitLog(ZoneList<Expression*>* args) {
2817
+ // Conditionally generate a log call.
2818
+ // Args:
2819
+ // 0 (literal string): The type of logging (corresponds to the flags).
2820
+ // This is used to determine whether or not to generate the log call.
2821
+ // 1 (string): Format string. Access the string at argument index 2
2822
+ // with '%2s' (see Logger::LogRuntime for all the formats).
2823
+ // 2 (array): Arguments to the format string.
2824
+ ASSERT_EQ(args->length(), 3);
2825
+ #ifdef ENABLE_LOGGING_AND_PROFILING
2826
+ if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
2827
+ VisitForStackValue(args->at(1));
2828
+ VisitForStackValue(args->at(2));
2829
+ __ CallRuntime(Runtime::kLog, 2);
2830
+ }
2831
+ #endif
2832
+ // Finally, we're expected to leave a value on the top of the stack.
2833
+ __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2834
+ context()->Plug(r0);
2835
+ }
2836
+
2837
+
2838
+ void FullCodeGenerator::EmitRandomHeapNumber(ZoneList<Expression*>* args) {
2839
+ ASSERT(args->length() == 0);
2840
+
2841
+ Label slow_allocate_heapnumber;
2842
+ Label heapnumber_allocated;
2843
+
2844
+ __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
2845
+ __ AllocateHeapNumber(r4, r1, r2, r6, &slow_allocate_heapnumber);
2846
+ __ jmp(&heapnumber_allocated);
2847
+
2848
+ __ bind(&slow_allocate_heapnumber);
2849
+ // Allocate a heap number.
2850
+ __ CallRuntime(Runtime::kNumberAlloc, 0);
2851
+ __ mov(r4, Operand(r0));
2852
+
2853
+ __ bind(&heapnumber_allocated);
2854
+
2855
+ // Convert 32 random bits in r0 to 0.(32 random bits) in a double
2856
+ // by computing:
2857
+ // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
2858
+ if (CpuFeatures::IsSupported(VFP3)) {
2859
+ __ PrepareCallCFunction(1, r0);
2860
+ __ mov(r0, Operand(ExternalReference::isolate_address()));
2861
+ __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
2862
+
2863
+ CpuFeatures::Scope scope(VFP3);
2864
+ // 0x41300000 is the top half of 1.0 x 2^20 as a double.
2865
+ // Create this constant using mov/orr to avoid PC relative load.
2866
+ __ mov(r1, Operand(0x41000000));
2867
+ __ orr(r1, r1, Operand(0x300000));
2868
+ // Move 0x41300000xxxxxxxx (x = random bits) to VFP.
2869
+ __ vmov(d7, r0, r1);
2870
+ // Move 0x4130000000000000 to VFP.
2871
+ __ mov(r0, Operand(0, RelocInfo::NONE));
2872
+ __ vmov(d8, r0, r1);
2873
+ // Subtract and store the result in the heap number.
2874
+ __ vsub(d7, d7, d8);
2875
+ __ sub(r0, r4, Operand(kHeapObjectTag));
2876
+ __ vstr(d7, r0, HeapNumber::kValueOffset);
2877
+ __ mov(r0, r4);
2878
+ } else {
2879
+ __ PrepareCallCFunction(2, r0);
2880
+ __ mov(r0, Operand(r4));
2881
+ __ mov(r1, Operand(ExternalReference::isolate_address()));
2882
+ __ CallCFunction(
2883
+ ExternalReference::fill_heap_number_with_random_function(isolate()), 2);
2884
+ }
2885
+
2886
+ context()->Plug(r0);
2887
+ }
2888
+
2889
+
2890
+ void FullCodeGenerator::EmitSubString(ZoneList<Expression*>* args) {
2891
+ // Load the arguments on the stack and call the stub.
2892
+ SubStringStub stub;
2893
+ ASSERT(args->length() == 3);
2894
+ VisitForStackValue(args->at(0));
2895
+ VisitForStackValue(args->at(1));
2896
+ VisitForStackValue(args->at(2));
2897
+ __ CallStub(&stub);
2898
+ context()->Plug(r0);
2899
+ }
2900
+
2901
+
2902
+ void FullCodeGenerator::EmitRegExpExec(ZoneList<Expression*>* args) {
2903
+ // Load the arguments on the stack and call the stub.
2904
+ RegExpExecStub stub;
2905
+ ASSERT(args->length() == 4);
2906
+ VisitForStackValue(args->at(0));
2907
+ VisitForStackValue(args->at(1));
2908
+ VisitForStackValue(args->at(2));
2909
+ VisitForStackValue(args->at(3));
2910
+ __ CallStub(&stub);
2911
+ context()->Plug(r0);
2912
+ }
2913
+
2914
+
2915
+ void FullCodeGenerator::EmitValueOf(ZoneList<Expression*>* args) {
2916
+ ASSERT(args->length() == 1);
2917
+
2918
+ VisitForAccumulatorValue(args->at(0)); // Load the object.
2919
+
2920
+ Label done;
2921
+ // If the object is a smi return the object.
2922
+ __ JumpIfSmi(r0, &done);
2923
+ // If the object is not a value type, return the object.
2924
+ __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
2925
+ __ b(ne, &done);
2926
+ __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset));
2927
+
2928
+ __ bind(&done);
2929
+ context()->Plug(r0);
2930
+ }
2931
+
2932
+
2933
+ void FullCodeGenerator::EmitMathPow(ZoneList<Expression*>* args) {
2934
+ // Load the arguments on the stack and call the runtime function.
2935
+ ASSERT(args->length() == 2);
2936
+ VisitForStackValue(args->at(0));
2937
+ VisitForStackValue(args->at(1));
2938
+ MathPowStub stub;
2939
+ __ CallStub(&stub);
2940
+ context()->Plug(r0);
2941
+ }
2942
+
2943
+
2944
+ void FullCodeGenerator::EmitSetValueOf(ZoneList<Expression*>* args) {
2945
+ ASSERT(args->length() == 2);
2946
+
2947
+ VisitForStackValue(args->at(0)); // Load the object.
2948
+ VisitForAccumulatorValue(args->at(1)); // Load the value.
2949
+ __ pop(r1); // r0 = value. r1 = object.
2950
+
2951
+ Label done;
2952
+ // If the object is a smi, return the value.
2953
+ __ JumpIfSmi(r1, &done);
2954
+
2955
+ // If the object is not a value type, return the value.
2956
+ __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
2957
+ __ b(ne, &done);
2958
+
2959
+ // Store the value.
2960
+ __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset));
2961
+ // Update the write barrier. Save the value as it will be
2962
+ // overwritten by the write barrier code and is needed afterward.
2963
+ __ RecordWrite(r1, Operand(JSValue::kValueOffset - kHeapObjectTag), r2, r3);
2964
+
2965
+ __ bind(&done);
2966
+ context()->Plug(r0);
2967
+ }
2968
+
2969
+
2970
+ void FullCodeGenerator::EmitNumberToString(ZoneList<Expression*>* args) {
2971
+ ASSERT_EQ(args->length(), 1);
2972
+
2973
+ // Load the argument on the stack and call the stub.
2974
+ VisitForStackValue(args->at(0));
2975
+
2976
+ NumberToStringStub stub;
2977
+ __ CallStub(&stub);
2978
+ context()->Plug(r0);
2979
+ }
2980
+
2981
+
2982
+ void FullCodeGenerator::EmitStringCharFromCode(ZoneList<Expression*>* args) {
2983
+ ASSERT(args->length() == 1);
2984
+
2985
+ VisitForAccumulatorValue(args->at(0));
2986
+
2987
+ Label done;
2988
+ StringCharFromCodeGenerator generator(r0, r1);
2989
+ generator.GenerateFast(masm_);
2990
+ __ jmp(&done);
2991
+
2992
+ NopRuntimeCallHelper call_helper;
2993
+ generator.GenerateSlow(masm_, call_helper);
2994
+
2995
+ __ bind(&done);
2996
+ context()->Plug(r1);
2997
+ }
2998
+
2999
+
3000
+ void FullCodeGenerator::EmitStringCharCodeAt(ZoneList<Expression*>* args) {
3001
+ ASSERT(args->length() == 2);
3002
+
3003
+ VisitForStackValue(args->at(0));
3004
+ VisitForAccumulatorValue(args->at(1));
3005
+
3006
+ Register object = r1;
3007
+ Register index = r0;
3008
+ Register scratch = r2;
3009
+ Register result = r3;
3010
+
3011
+ __ pop(object);
3012
+
3013
+ Label need_conversion;
3014
+ Label index_out_of_range;
3015
+ Label done;
3016
+ StringCharCodeAtGenerator generator(object,
3017
+ index,
3018
+ scratch,
3019
+ result,
3020
+ &need_conversion,
3021
+ &need_conversion,
3022
+ &index_out_of_range,
3023
+ STRING_INDEX_IS_NUMBER);
3024
+ generator.GenerateFast(masm_);
3025
+ __ jmp(&done);
3026
+
3027
+ __ bind(&index_out_of_range);
3028
+ // When the index is out of range, the spec requires us to return
3029
+ // NaN.
3030
+ __ LoadRoot(result, Heap::kNanValueRootIndex);
3031
+ __ jmp(&done);
3032
+
3033
+ __ bind(&need_conversion);
3034
+ // Load the undefined value into the result register, which will
3035
+ // trigger conversion.
3036
+ __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3037
+ __ jmp(&done);
3038
+
3039
+ NopRuntimeCallHelper call_helper;
3040
+ generator.GenerateSlow(masm_, call_helper);
3041
+
3042
+ __ bind(&done);
3043
+ context()->Plug(result);
3044
+ }
3045
+
3046
+
3047
+ void FullCodeGenerator::EmitStringCharAt(ZoneList<Expression*>* args) {
3048
+ ASSERT(args->length() == 2);
3049
+
3050
+ VisitForStackValue(args->at(0));
3051
+ VisitForAccumulatorValue(args->at(1));
3052
+
3053
+ Register object = r1;
3054
+ Register index = r0;
3055
+ Register scratch1 = r2;
3056
+ Register scratch2 = r3;
3057
+ Register result = r0;
3058
+
3059
+ __ pop(object);
3060
+
3061
+ Label need_conversion;
3062
+ Label index_out_of_range;
3063
+ Label done;
3064
+ StringCharAtGenerator generator(object,
3065
+ index,
3066
+ scratch1,
3067
+ scratch2,
3068
+ result,
3069
+ &need_conversion,
3070
+ &need_conversion,
3071
+ &index_out_of_range,
3072
+ STRING_INDEX_IS_NUMBER);
3073
+ generator.GenerateFast(masm_);
3074
+ __ jmp(&done);
3075
+
3076
+ __ bind(&index_out_of_range);
3077
+ // When the index is out of range, the spec requires us to return
3078
+ // the empty string.
3079
+ __ LoadRoot(result, Heap::kEmptyStringRootIndex);
3080
+ __ jmp(&done);
3081
+
3082
+ __ bind(&need_conversion);
3083
+ // Move smi zero into the result register, which will trigger
3084
+ // conversion.
3085
+ __ mov(result, Operand(Smi::FromInt(0)));
3086
+ __ jmp(&done);
3087
+
3088
+ NopRuntimeCallHelper call_helper;
3089
+ generator.GenerateSlow(masm_, call_helper);
3090
+
3091
+ __ bind(&done);
3092
+ context()->Plug(result);
3093
+ }
3094
+
3095
+
3096
+ void FullCodeGenerator::EmitStringAdd(ZoneList<Expression*>* args) {
3097
+ ASSERT_EQ(2, args->length());
3098
+
3099
+ VisitForStackValue(args->at(0));
3100
+ VisitForStackValue(args->at(1));
3101
+
3102
+ StringAddStub stub(NO_STRING_ADD_FLAGS);
3103
+ __ CallStub(&stub);
3104
+ context()->Plug(r0);
3105
+ }
3106
+
3107
+
3108
+ void FullCodeGenerator::EmitStringCompare(ZoneList<Expression*>* args) {
3109
+ ASSERT_EQ(2, args->length());
3110
+
3111
+ VisitForStackValue(args->at(0));
3112
+ VisitForStackValue(args->at(1));
3113
+
3114
+ StringCompareStub stub;
3115
+ __ CallStub(&stub);
3116
+ context()->Plug(r0);
3117
+ }
3118
+
3119
+
3120
+ void FullCodeGenerator::EmitMathSin(ZoneList<Expression*>* args) {
3121
+ // Load the argument on the stack and call the stub.
3122
+ TranscendentalCacheStub stub(TranscendentalCache::SIN,
3123
+ TranscendentalCacheStub::TAGGED);
3124
+ ASSERT(args->length() == 1);
3125
+ VisitForStackValue(args->at(0));
3126
+ __ CallStub(&stub);
3127
+ context()->Plug(r0);
3128
+ }
3129
+
3130
+
3131
+ void FullCodeGenerator::EmitMathCos(ZoneList<Expression*>* args) {
3132
+ // Load the argument on the stack and call the stub.
3133
+ TranscendentalCacheStub stub(TranscendentalCache::COS,
3134
+ TranscendentalCacheStub::TAGGED);
3135
+ ASSERT(args->length() == 1);
3136
+ VisitForStackValue(args->at(0));
3137
+ __ CallStub(&stub);
3138
+ context()->Plug(r0);
3139
+ }
3140
+
3141
+
3142
+ void FullCodeGenerator::EmitMathLog(ZoneList<Expression*>* args) {
3143
+ // Load the argument on the stack and call the stub.
3144
+ TranscendentalCacheStub stub(TranscendentalCache::LOG,
3145
+ TranscendentalCacheStub::TAGGED);
3146
+ ASSERT(args->length() == 1);
3147
+ VisitForStackValue(args->at(0));
3148
+ __ CallStub(&stub);
3149
+ context()->Plug(r0);
3150
+ }
3151
+
3152
+
3153
+ void FullCodeGenerator::EmitMathSqrt(ZoneList<Expression*>* args) {
3154
+ // Load the argument on the stack and call the runtime function.
3155
+ ASSERT(args->length() == 1);
3156
+ VisitForStackValue(args->at(0));
3157
+ __ CallRuntime(Runtime::kMath_sqrt, 1);
3158
+ context()->Plug(r0);
3159
+ }
3160
+
3161
+
3162
+ void FullCodeGenerator::EmitCallFunction(ZoneList<Expression*>* args) {
3163
+ ASSERT(args->length() >= 2);
3164
+
3165
+ int arg_count = args->length() - 2; // For receiver and function.
3166
+ VisitForStackValue(args->at(0)); // Receiver.
3167
+ for (int i = 0; i < arg_count; i++) {
3168
+ VisitForStackValue(args->at(i + 1));
3169
+ }
3170
+ VisitForAccumulatorValue(args->at(arg_count + 1)); // Function.
3171
+
3172
+ // InvokeFunction requires function in r1. Move it in there.
3173
+ if (!result_register().is(r1)) __ mov(r1, result_register());
3174
+ ParameterCount count(arg_count);
3175
+ __ InvokeFunction(r1, count, CALL_FUNCTION);
3176
+ __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3177
+ context()->Plug(r0);
3178
+ }
3179
+
3180
+
3181
+ void FullCodeGenerator::EmitRegExpConstructResult(ZoneList<Expression*>* args) {
3182
+ RegExpConstructResultStub stub;
3183
+ ASSERT(args->length() == 3);
3184
+ VisitForStackValue(args->at(0));
3185
+ VisitForStackValue(args->at(1));
3186
+ VisitForStackValue(args->at(2));
3187
+ __ CallStub(&stub);
3188
+ context()->Plug(r0);
3189
+ }
3190
+
3191
+
3192
+ void FullCodeGenerator::EmitSwapElements(ZoneList<Expression*>* args) {
3193
+ ASSERT(args->length() == 3);
3194
+ VisitForStackValue(args->at(0));
3195
+ VisitForStackValue(args->at(1));
3196
+ VisitForStackValue(args->at(2));
3197
+ Label done;
3198
+ Label slow_case;
3199
+ Register object = r0;
3200
+ Register index1 = r1;
3201
+ Register index2 = r2;
3202
+ Register elements = r3;
3203
+ Register scratch1 = r4;
3204
+ Register scratch2 = r5;
3205
+
3206
+ __ ldr(object, MemOperand(sp, 2 * kPointerSize));
3207
+ // Fetch the map and check if array is in fast case.
3208
+ // Check that object doesn't require security checks and
3209
+ // has no indexed interceptor.
3210
+ __ CompareObjectType(object, scratch1, scratch2, JS_ARRAY_TYPE);
3211
+ __ b(ne, &slow_case);
3212
+ // Map is now in scratch1.
3213
+
3214
+ __ ldrb(scratch2, FieldMemOperand(scratch1, Map::kBitFieldOffset));
3215
+ __ tst(scratch2, Operand(KeyedLoadIC::kSlowCaseBitFieldMask));
3216
+ __ b(ne, &slow_case);
3217
+
3218
+ // Check the object's elements are in fast case and writable.
3219
+ __ ldr(elements, FieldMemOperand(object, JSObject::kElementsOffset));
3220
+ __ ldr(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset));
3221
+ __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
3222
+ __ cmp(scratch1, ip);
3223
+ __ b(ne, &slow_case);
3224
+
3225
+ // Check that both indices are smis.
3226
+ __ ldr(index1, MemOperand(sp, 1 * kPointerSize));
3227
+ __ ldr(index2, MemOperand(sp, 0));
3228
+ __ JumpIfNotBothSmi(index1, index2, &slow_case);
3229
+
3230
+ // Check that both indices are valid.
3231
+ __ ldr(scratch1, FieldMemOperand(object, JSArray::kLengthOffset));
3232
+ __ cmp(scratch1, index1);
3233
+ __ cmp(scratch1, index2, hi);
3234
+ __ b(ls, &slow_case);
3235
+
3236
+ // Bring the address of the elements into index1 and index2.
3237
+ __ add(scratch1, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3238
+ __ add(index1,
3239
+ scratch1,
3240
+ Operand(index1, LSL, kPointerSizeLog2 - kSmiTagSize));
3241
+ __ add(index2,
3242
+ scratch1,
3243
+ Operand(index2, LSL, kPointerSizeLog2 - kSmiTagSize));
3244
+
3245
+ // Swap elements.
3246
+ __ ldr(scratch1, MemOperand(index1, 0));
3247
+ __ ldr(scratch2, MemOperand(index2, 0));
3248
+ __ str(scratch1, MemOperand(index2, 0));
3249
+ __ str(scratch2, MemOperand(index1, 0));
3250
+
3251
+ Label new_space;
3252
+ __ InNewSpace(elements, scratch1, eq, &new_space);
3253
+ // Possible optimization: do a check that both values are Smis
3254
+ // (or them and test against Smi mask.)
3255
+
3256
+ __ mov(scratch1, elements);
3257
+ __ RecordWriteHelper(elements, index1, scratch2);
3258
+ __ RecordWriteHelper(scratch1, index2, scratch2); // scratch1 holds elements.
3259
+
3260
+ __ bind(&new_space);
3261
+ // We are done. Drop elements from the stack, and return undefined.
3262
+ __ Drop(3);
3263
+ __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3264
+ __ jmp(&done);
3265
+
3266
+ __ bind(&slow_case);
3267
+ __ CallRuntime(Runtime::kSwapElements, 3);
3268
+
3269
+ __ bind(&done);
3270
+ context()->Plug(r0);
3271
+ }
3272
+
3273
+
3274
+ void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) {
3275
+ ASSERT_EQ(2, args->length());
3276
+
3277
+ ASSERT_NE(NULL, args->at(0)->AsLiteral());
3278
+ int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
3279
+
3280
+ Handle<FixedArray> jsfunction_result_caches(
3281
+ isolate()->global_context()->jsfunction_result_caches());
3282
+ if (jsfunction_result_caches->length() <= cache_id) {
3283
+ __ Abort("Attempt to use undefined cache.");
3284
+ __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3285
+ context()->Plug(r0);
3286
+ return;
3287
+ }
3288
+
3289
+ VisitForAccumulatorValue(args->at(1));
3290
+
3291
+ Register key = r0;
3292
+ Register cache = r1;
3293
+ __ ldr(cache, ContextOperand(cp, Context::GLOBAL_INDEX));
3294
+ __ ldr(cache, FieldMemOperand(cache, GlobalObject::kGlobalContextOffset));
3295
+ __ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3296
+ __ ldr(cache,
3297
+ FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3298
+
3299
+
3300
+ Label done, not_found;
3301
+ // tmp now holds finger offset as a smi.
3302
+ ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3303
+ __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
3304
+ // r2 now holds finger offset as a smi.
3305
+ __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3306
+ // r3 now points to the start of fixed array elements.
3307
+ __ ldr(r2, MemOperand(r3, r2, LSL, kPointerSizeLog2 - kSmiTagSize, PreIndex));
3308
+ // Note side effect of PreIndex: r3 now points to the key of the pair.
3309
+ __ cmp(key, r2);
3310
+ __ b(ne, &not_found);
3311
+
3312
+ __ ldr(r0, MemOperand(r3, kPointerSize));
3313
+ __ b(&done);
3314
+
3315
+ __ bind(&not_found);
3316
+ // Call runtime to perform the lookup.
3317
+ __ Push(cache, key);
3318
+ __ CallRuntime(Runtime::kGetFromCache, 2);
3319
+
3320
+ __ bind(&done);
3321
+ context()->Plug(r0);
3322
+ }
3323
+
3324
+
3325
+ void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) {
3326
+ ASSERT_EQ(2, args->length());
3327
+
3328
+ Register right = r0;
3329
+ Register left = r1;
3330
+ Register tmp = r2;
3331
+ Register tmp2 = r3;
3332
+
3333
+ VisitForStackValue(args->at(0));
3334
+ VisitForAccumulatorValue(args->at(1));
3335
+ __ pop(left);
3336
+
3337
+ Label done, fail, ok;
3338
+ __ cmp(left, Operand(right));
3339
+ __ b(eq, &ok);
3340
+ // Fail if either is a non-HeapObject.
3341
+ __ and_(tmp, left, Operand(right));
3342
+ __ tst(tmp, Operand(kSmiTagMask));
3343
+ __ b(eq, &fail);
3344
+ __ ldr(tmp, FieldMemOperand(left, HeapObject::kMapOffset));
3345
+ __ ldrb(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
3346
+ __ cmp(tmp2, Operand(JS_REGEXP_TYPE));
3347
+ __ b(ne, &fail);
3348
+ __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
3349
+ __ cmp(tmp, Operand(tmp2));
3350
+ __ b(ne, &fail);
3351
+ __ ldr(tmp, FieldMemOperand(left, JSRegExp::kDataOffset));
3352
+ __ ldr(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset));
3353
+ __ cmp(tmp, tmp2);
3354
+ __ b(eq, &ok);
3355
+ __ bind(&fail);
3356
+ __ LoadRoot(r0, Heap::kFalseValueRootIndex);
3357
+ __ jmp(&done);
3358
+ __ bind(&ok);
3359
+ __ LoadRoot(r0, Heap::kTrueValueRootIndex);
3360
+ __ bind(&done);
3361
+
3362
+ context()->Plug(r0);
3363
+ }
3364
+
3365
+
3366
+ void FullCodeGenerator::EmitHasCachedArrayIndex(ZoneList<Expression*>* args) {
3367
+ VisitForAccumulatorValue(args->at(0));
3368
+
3369
+ Label materialize_true, materialize_false;
3370
+ Label* if_true = NULL;
3371
+ Label* if_false = NULL;
3372
+ Label* fall_through = NULL;
3373
+ context()->PrepareTest(&materialize_true, &materialize_false,
3374
+ &if_true, &if_false, &fall_through);
3375
+
3376
+ __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3377
+ __ tst(r0, Operand(String::kContainsCachedArrayIndexMask));
3378
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
3379
+ Split(eq, if_true, if_false, fall_through);
3380
+
3381
+ context()->Plug(if_true, if_false);
3382
+ }
3383
+
3384
+
3385
+ void FullCodeGenerator::EmitGetCachedArrayIndex(ZoneList<Expression*>* args) {
3386
+ ASSERT(args->length() == 1);
3387
+ VisitForAccumulatorValue(args->at(0));
3388
+
3389
+ if (FLAG_debug_code) {
3390
+ __ AbortIfNotString(r0);
3391
+ }
3392
+
3393
+ __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3394
+ __ IndexFromHash(r0, r0);
3395
+
3396
+ context()->Plug(r0);
3397
+ }
3398
+
3399
+
3400
+ void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) {
3401
+ Label bailout, done, one_char_separator, long_separator,
3402
+ non_trivial_array, not_size_one_array, loop,
3403
+ empty_separator_loop, one_char_separator_loop,
3404
+ one_char_separator_loop_entry, long_separator_loop;
3405
+
3406
+ ASSERT(args->length() == 2);
3407
+ VisitForStackValue(args->at(1));
3408
+ VisitForAccumulatorValue(args->at(0));
3409
+
3410
+ // All aliases of the same register have disjoint lifetimes.
3411
+ Register array = r0;
3412
+ Register elements = no_reg; // Will be r0.
3413
+ Register result = no_reg; // Will be r0.
3414
+ Register separator = r1;
3415
+ Register array_length = r2;
3416
+ Register result_pos = no_reg; // Will be r2
3417
+ Register string_length = r3;
3418
+ Register string = r4;
3419
+ Register element = r5;
3420
+ Register elements_end = r6;
3421
+ Register scratch1 = r7;
3422
+ Register scratch2 = r9;
3423
+
3424
+ // Separator operand is on the stack.
3425
+ __ pop(separator);
3426
+
3427
+ // Check that the array is a JSArray.
3428
+ __ JumpIfSmi(array, &bailout);
3429
+ __ CompareObjectType(array, scratch1, scratch2, JS_ARRAY_TYPE);
3430
+ __ b(ne, &bailout);
3431
+
3432
+ // Check that the array has fast elements.
3433
+ __ ldrb(scratch2, FieldMemOperand(scratch1, Map::kBitField2Offset));
3434
+ __ tst(scratch2, Operand(1 << Map::kHasFastElements));
3435
+ __ b(eq, &bailout);
3436
+
3437
+ // If the array has length zero, return the empty string.
3438
+ __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3439
+ __ SmiUntag(array_length, SetCC);
3440
+ __ b(ne, &non_trivial_array);
3441
+ __ LoadRoot(r0, Heap::kEmptyStringRootIndex);
3442
+ __ b(&done);
3443
+
3444
+ __ bind(&non_trivial_array);
3445
+
3446
+ // Get the FixedArray containing array's elements.
3447
+ elements = array;
3448
+ __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3449
+ array = no_reg; // End of array's live range.
3450
+
3451
+ // Check that all array elements are sequential ASCII strings, and
3452
+ // accumulate the sum of their lengths, as a smi-encoded value.
3453
+ __ mov(string_length, Operand(0));
3454
+ __ add(element,
3455
+ elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3456
+ __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3457
+ // Loop condition: while (element < elements_end).
3458
+ // Live values in registers:
3459
+ // elements: Fixed array of strings.
3460
+ // array_length: Length of the fixed array of strings (not smi)
3461
+ // separator: Separator string
3462
+ // string_length: Accumulated sum of string lengths (smi).
3463
+ // element: Current array element.
3464
+ // elements_end: Array end.
3465
+ if (FLAG_debug_code) {
3466
+ __ cmp(array_length, Operand(0));
3467
+ __ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin");
3468
+ }
3469
+ __ bind(&loop);
3470
+ __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3471
+ __ JumpIfSmi(string, &bailout);
3472
+ __ ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
3473
+ __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3474
+ __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3475
+ __ ldr(scratch1, FieldMemOperand(string, SeqAsciiString::kLengthOffset));
3476
+ __ add(string_length, string_length, Operand(scratch1));
3477
+ __ b(vs, &bailout);
3478
+ __ cmp(element, elements_end);
3479
+ __ b(lt, &loop);
3480
+
3481
+ // If array_length is 1, return elements[0], a string.
3482
+ __ cmp(array_length, Operand(1));
3483
+ __ b(ne, &not_size_one_array);
3484
+ __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
3485
+ __ b(&done);
3486
+
3487
+ __ bind(&not_size_one_array);
3488
+
3489
+ // Live values in registers:
3490
+ // separator: Separator string
3491
+ // array_length: Length of the array.
3492
+ // string_length: Sum of string lengths (smi).
3493
+ // elements: FixedArray of strings.
3494
+
3495
+ // Check that the separator is a flat ASCII string.
3496
+ __ JumpIfSmi(separator, &bailout);
3497
+ __ ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
3498
+ __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3499
+ __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3500
+
3501
+ // Add (separator length times array_length) - separator length to the
3502
+ // string_length to get the length of the result string. array_length is not
3503
+ // smi but the other values are, so the result is a smi
3504
+ __ ldr(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
3505
+ __ sub(string_length, string_length, Operand(scratch1));
3506
+ __ smull(scratch2, ip, array_length, scratch1);
3507
+ // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
3508
+ // zero.
3509
+ __ cmp(ip, Operand(0));
3510
+ __ b(ne, &bailout);
3511
+ __ tst(scratch2, Operand(0x80000000));
3512
+ __ b(ne, &bailout);
3513
+ __ add(string_length, string_length, Operand(scratch2));
3514
+ __ b(vs, &bailout);
3515
+ __ SmiUntag(string_length);
3516
+
3517
+ // Get first element in the array to free up the elements register to be used
3518
+ // for the result.
3519
+ __ add(element,
3520
+ elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3521
+ result = elements; // End of live range for elements.
3522
+ elements = no_reg;
3523
+ // Live values in registers:
3524
+ // element: First array element
3525
+ // separator: Separator string
3526
+ // string_length: Length of result string (not smi)
3527
+ // array_length: Length of the array.
3528
+ __ AllocateAsciiString(result,
3529
+ string_length,
3530
+ scratch1,
3531
+ scratch2,
3532
+ elements_end,
3533
+ &bailout);
3534
+ // Prepare for looping. Set up elements_end to end of the array. Set
3535
+ // result_pos to the position of the result where to write the first
3536
+ // character.
3537
+ __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3538
+ result_pos = array_length; // End of live range for array_length.
3539
+ array_length = no_reg;
3540
+ __ add(result_pos,
3541
+ result,
3542
+ Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3543
+
3544
+ // Check the length of the separator.
3545
+ __ ldr(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
3546
+ __ cmp(scratch1, Operand(Smi::FromInt(1)));
3547
+ __ b(eq, &one_char_separator);
3548
+ __ b(gt, &long_separator);
3549
+
3550
+ // Empty separator case
3551
+ __ bind(&empty_separator_loop);
3552
+ // Live values in registers:
3553
+ // result_pos: the position to which we are currently copying characters.
3554
+ // element: Current array element.
3555
+ // elements_end: Array end.
3556
+
3557
+ // Copy next array element to the result.
3558
+ __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3559
+ __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
3560
+ __ SmiUntag(string_length);
3561
+ __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3562
+ __ CopyBytes(string, result_pos, string_length, scratch1);
3563
+ __ cmp(element, elements_end);
3564
+ __ b(lt, &empty_separator_loop); // End while (element < elements_end).
3565
+ ASSERT(result.is(r0));
3566
+ __ b(&done);
3567
+
3568
+ // One-character separator case
3569
+ __ bind(&one_char_separator);
3570
+ // Replace separator with its ascii character value.
3571
+ __ ldrb(separator, FieldMemOperand(separator, SeqAsciiString::kHeaderSize));
3572
+ // Jump into the loop after the code that copies the separator, so the first
3573
+ // element is not preceded by a separator
3574
+ __ jmp(&one_char_separator_loop_entry);
3575
+
3576
+ __ bind(&one_char_separator_loop);
3577
+ // Live values in registers:
3578
+ // result_pos: the position to which we are currently copying characters.
3579
+ // element: Current array element.
3580
+ // elements_end: Array end.
3581
+ // separator: Single separator ascii char (in lower byte).
3582
+
3583
+ // Copy the separator character to the result.
3584
+ __ strb(separator, MemOperand(result_pos, 1, PostIndex));
3585
+
3586
+ // Copy next array element to the result.
3587
+ __ bind(&one_char_separator_loop_entry);
3588
+ __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3589
+ __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
3590
+ __ SmiUntag(string_length);
3591
+ __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3592
+ __ CopyBytes(string, result_pos, string_length, scratch1);
3593
+ __ cmp(element, elements_end);
3594
+ __ b(lt, &one_char_separator_loop); // End while (element < elements_end).
3595
+ ASSERT(result.is(r0));
3596
+ __ b(&done);
3597
+
3598
+ // Long separator case (separator is more than one character). Entry is at the
3599
+ // label long_separator below.
3600
+ __ bind(&long_separator_loop);
3601
+ // Live values in registers:
3602
+ // result_pos: the position to which we are currently copying characters.
3603
+ // element: Current array element.
3604
+ // elements_end: Array end.
3605
+ // separator: Separator string.
3606
+
3607
+ // Copy the separator to the result.
3608
+ __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset));
3609
+ __ SmiUntag(string_length);
3610
+ __ add(string,
3611
+ separator,
3612
+ Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3613
+ __ CopyBytes(string, result_pos, string_length, scratch1);
3614
+
3615
+ __ bind(&long_separator);
3616
+ __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3617
+ __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
3618
+ __ SmiUntag(string_length);
3619
+ __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3620
+ __ CopyBytes(string, result_pos, string_length, scratch1);
3621
+ __ cmp(element, elements_end);
3622
+ __ b(lt, &long_separator_loop); // End while (element < elements_end).
3623
+ ASSERT(result.is(r0));
3624
+ __ b(&done);
3625
+
3626
+ __ bind(&bailout);
3627
+ __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3628
+ __ bind(&done);
3629
+ context()->Plug(r0);
3630
+ }
3631
+
3632
+
3633
+ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3634
+ Handle<String> name = expr->name();
3635
+ if (name->length() > 0 && name->Get(0) == '_') {
3636
+ Comment cmnt(masm_, "[ InlineRuntimeCall");
3637
+ EmitInlineRuntimeCall(expr);
3638
+ return;
3639
+ }
3640
+
3641
+ Comment cmnt(masm_, "[ CallRuntime");
3642
+ ZoneList<Expression*>* args = expr->arguments();
3643
+
3644
+ if (expr->is_jsruntime()) {
3645
+ // Prepare for calling JS runtime function.
3646
+ __ ldr(r0, GlobalObjectOperand());
3647
+ __ ldr(r0, FieldMemOperand(r0, GlobalObject::kBuiltinsOffset));
3648
+ __ push(r0);
3649
+ }
3650
+
3651
+ // Push the arguments ("left-to-right").
3652
+ int arg_count = args->length();
3653
+ for (int i = 0; i < arg_count; i++) {
3654
+ VisitForStackValue(args->at(i));
3655
+ }
3656
+
3657
+ if (expr->is_jsruntime()) {
3658
+ // Call the JS runtime function.
3659
+ __ mov(r2, Operand(expr->name()));
3660
+ Handle<Code> ic =
3661
+ isolate()->stub_cache()->ComputeCallInitialize(arg_count, NOT_IN_LOOP);
3662
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
3663
+ // Restore context register.
3664
+ __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3665
+ } else {
3666
+ // Call the C runtime function.
3667
+ __ CallRuntime(expr->function(), arg_count);
3668
+ }
3669
+ context()->Plug(r0);
3670
+ }
3671
+
3672
+
3673
+ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3674
+ switch (expr->op()) {
3675
+ case Token::DELETE: {
3676
+ Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3677
+ Property* prop = expr->expression()->AsProperty();
3678
+ Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
3679
+
3680
+ if (prop != NULL) {
3681
+ if (prop->is_synthetic()) {
3682
+ // Result of deleting parameters is false, even when they rewrite
3683
+ // to accesses on the arguments object.
3684
+ context()->Plug(false);
3685
+ } else {
3686
+ VisitForStackValue(prop->obj());
3687
+ VisitForStackValue(prop->key());
3688
+ __ mov(r1, Operand(Smi::FromInt(strict_mode_flag())));
3689
+ __ push(r1);
3690
+ __ InvokeBuiltin(Builtins::DELETE, CALL_JS);
3691
+ context()->Plug(r0);
3692
+ }
3693
+ } else if (var != NULL) {
3694
+ // Delete of an unqualified identifier is disallowed in strict mode
3695
+ // but "delete this" is.
3696
+ ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this());
3697
+ if (var->is_global()) {
3698
+ __ ldr(r2, GlobalObjectOperand());
3699
+ __ mov(r1, Operand(var->name()));
3700
+ __ mov(r0, Operand(Smi::FromInt(kNonStrictMode)));
3701
+ __ Push(r2, r1, r0);
3702
+ __ InvokeBuiltin(Builtins::DELETE, CALL_JS);
3703
+ context()->Plug(r0);
3704
+ } else if (var->AsSlot() != NULL &&
3705
+ var->AsSlot()->type() != Slot::LOOKUP) {
3706
+ // Result of deleting non-global, non-dynamic variables is false.
3707
+ // The subexpression does not have side effects.
3708
+ context()->Plug(false);
3709
+ } else {
3710
+ // Non-global variable. Call the runtime to try to delete from the
3711
+ // context where the variable was introduced.
3712
+ __ push(context_register());
3713
+ __ mov(r2, Operand(var->name()));
3714
+ __ push(r2);
3715
+ __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3716
+ context()->Plug(r0);
3717
+ }
3718
+ } else {
3719
+ // Result of deleting non-property, non-variable reference is true.
3720
+ // The subexpression may have side effects.
3721
+ VisitForEffect(expr->expression());
3722
+ context()->Plug(true);
3723
+ }
3724
+ break;
3725
+ }
3726
+
3727
+ case Token::VOID: {
3728
+ Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3729
+ VisitForEffect(expr->expression());
3730
+ context()->Plug(Heap::kUndefinedValueRootIndex);
3731
+ break;
3732
+ }
3733
+
3734
+ case Token::NOT: {
3735
+ Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3736
+ if (context()->IsEffect()) {
3737
+ // Unary NOT has no side effects so it's only necessary to visit the
3738
+ // subexpression. Match the optimizing compiler by not branching.
3739
+ VisitForEffect(expr->expression());
3740
+ } else {
3741
+ Label materialize_true, materialize_false;
3742
+ Label* if_true = NULL;
3743
+ Label* if_false = NULL;
3744
+ Label* fall_through = NULL;
3745
+
3746
+ // Notice that the labels are swapped.
3747
+ context()->PrepareTest(&materialize_true, &materialize_false,
3748
+ &if_false, &if_true, &fall_through);
3749
+ if (context()->IsTest()) ForwardBailoutToChild(expr);
3750
+ VisitForControl(expr->expression(), if_true, if_false, fall_through);
3751
+ context()->Plug(if_false, if_true); // Labels swapped.
3752
+ }
3753
+ break;
3754
+ }
3755
+
3756
+ case Token::TYPEOF: {
3757
+ Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3758
+ { StackValueContext context(this);
3759
+ VisitForTypeofValue(expr->expression());
3760
+ }
3761
+ __ CallRuntime(Runtime::kTypeof, 1);
3762
+ context()->Plug(r0);
3763
+ break;
3764
+ }
3765
+
3766
+ case Token::ADD: {
3767
+ Comment cmt(masm_, "[ UnaryOperation (ADD)");
3768
+ VisitForAccumulatorValue(expr->expression());
3769
+ Label no_conversion;
3770
+ __ tst(result_register(), Operand(kSmiTagMask));
3771
+ __ b(eq, &no_conversion);
3772
+ ToNumberStub convert_stub;
3773
+ __ CallStub(&convert_stub);
3774
+ __ bind(&no_conversion);
3775
+ context()->Plug(result_register());
3776
+ break;
3777
+ }
3778
+
3779
+ case Token::SUB: {
3780
+ Comment cmt(masm_, "[ UnaryOperation (SUB)");
3781
+ bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
3782
+ UnaryOverwriteMode overwrite =
3783
+ can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
3784
+ GenericUnaryOpStub stub(Token::SUB, overwrite, NO_UNARY_FLAGS);
3785
+ // GenericUnaryOpStub expects the argument to be in the
3786
+ // accumulator register r0.
3787
+ VisitForAccumulatorValue(expr->expression());
3788
+ __ CallStub(&stub);
3789
+ context()->Plug(r0);
3790
+ break;
3791
+ }
3792
+
3793
+ case Token::BIT_NOT: {
3794
+ Comment cmt(masm_, "[ UnaryOperation (BIT_NOT)");
3795
+ // The generic unary operation stub expects the argument to be
3796
+ // in the accumulator register r0.
3797
+ VisitForAccumulatorValue(expr->expression());
3798
+ Label done;
3799
+ bool inline_smi_code = ShouldInlineSmiCase(expr->op());
3800
+ if (inline_smi_code) {
3801
+ Label call_stub;
3802
+ __ JumpIfNotSmi(r0, &call_stub);
3803
+ __ mvn(r0, Operand(r0));
3804
+ // Bit-clear inverted smi-tag.
3805
+ __ bic(r0, r0, Operand(kSmiTagMask));
3806
+ __ b(&done);
3807
+ __ bind(&call_stub);
3808
+ }
3809
+ bool overwrite = expr->expression()->ResultOverwriteAllowed();
3810
+ UnaryOpFlags flags = inline_smi_code
3811
+ ? NO_UNARY_SMI_CODE_IN_STUB
3812
+ : NO_UNARY_FLAGS;
3813
+ UnaryOverwriteMode mode =
3814
+ overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
3815
+ GenericUnaryOpStub stub(Token::BIT_NOT, mode, flags);
3816
+ __ CallStub(&stub);
3817
+ __ bind(&done);
3818
+ context()->Plug(r0);
3819
+ break;
3820
+ }
3821
+
3822
+ default:
3823
+ UNREACHABLE();
3824
+ }
3825
+ }
3826
+
3827
+
3828
+ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3829
+ Comment cmnt(masm_, "[ CountOperation");
3830
+ SetSourcePosition(expr->position());
3831
+
3832
+ // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
3833
+ // as the left-hand side.
3834
+ if (!expr->expression()->IsValidLeftHandSide()) {
3835
+ VisitForEffect(expr->expression());
3836
+ return;
3837
+ }
3838
+
3839
+ // Expression can only be a property, a global or a (parameter or local)
3840
+ // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
3841
+ enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
3842
+ LhsKind assign_type = VARIABLE;
3843
+ Property* prop = expr->expression()->AsProperty();
3844
+ // In case of a property we use the uninitialized expression context
3845
+ // of the key to detect a named property.
3846
+ if (prop != NULL) {
3847
+ assign_type =
3848
+ (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
3849
+ }
3850
+
3851
+ // Evaluate expression and get value.
3852
+ if (assign_type == VARIABLE) {
3853
+ ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
3854
+ AccumulatorValueContext context(this);
3855
+ EmitVariableLoad(expr->expression()->AsVariableProxy()->var());
3856
+ } else {
3857
+ // Reserve space for result of postfix operation.
3858
+ if (expr->is_postfix() && !context()->IsEffect()) {
3859
+ __ mov(ip, Operand(Smi::FromInt(0)));
3860
+ __ push(ip);
3861
+ }
3862
+ if (assign_type == NAMED_PROPERTY) {
3863
+ // Put the object both on the stack and in the accumulator.
3864
+ VisitForAccumulatorValue(prop->obj());
3865
+ __ push(r0);
3866
+ EmitNamedPropertyLoad(prop);
3867
+ } else {
3868
+ if (prop->is_arguments_access()) {
3869
+ VariableProxy* obj_proxy = prop->obj()->AsVariableProxy();
3870
+ __ ldr(r0, EmitSlotSearch(obj_proxy->var()->AsSlot(), r0));
3871
+ __ push(r0);
3872
+ __ mov(r0, Operand(prop->key()->AsLiteral()->handle()));
3873
+ } else {
3874
+ VisitForStackValue(prop->obj());
3875
+ VisitForAccumulatorValue(prop->key());
3876
+ }
3877
+ __ ldr(r1, MemOperand(sp, 0));
3878
+ __ push(r0);
3879
+ EmitKeyedPropertyLoad(prop);
3880
+ }
3881
+ }
3882
+
3883
+ // We need a second deoptimization point after loading the value
3884
+ // in case evaluating the property load my have a side effect.
3885
+ if (assign_type == VARIABLE) {
3886
+ PrepareForBailout(expr->expression(), TOS_REG);
3887
+ } else {
3888
+ PrepareForBailout(expr->increment(), TOS_REG);
3889
+ }
3890
+
3891
+ // Call ToNumber only if operand is not a smi.
3892
+ Label no_conversion;
3893
+ __ JumpIfSmi(r0, &no_conversion);
3894
+ ToNumberStub convert_stub;
3895
+ __ CallStub(&convert_stub);
3896
+ __ bind(&no_conversion);
3897
+
3898
+ // Save result for postfix expressions.
3899
+ if (expr->is_postfix()) {
3900
+ if (!context()->IsEffect()) {
3901
+ // Save the result on the stack. If we have a named or keyed property
3902
+ // we store the result under the receiver that is currently on top
3903
+ // of the stack.
3904
+ switch (assign_type) {
3905
+ case VARIABLE:
3906
+ __ push(r0);
3907
+ break;
3908
+ case NAMED_PROPERTY:
3909
+ __ str(r0, MemOperand(sp, kPointerSize));
3910
+ break;
3911
+ case KEYED_PROPERTY:
3912
+ __ str(r0, MemOperand(sp, 2 * kPointerSize));
3913
+ break;
3914
+ }
3915
+ }
3916
+ }
3917
+
3918
+
3919
+ // Inline smi case if we are in a loop.
3920
+ Label stub_call, done;
3921
+ JumpPatchSite patch_site(masm_);
3922
+
3923
+ int count_value = expr->op() == Token::INC ? 1 : -1;
3924
+ if (ShouldInlineSmiCase(expr->op())) {
3925
+ __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
3926
+ __ b(vs, &stub_call);
3927
+ // We could eliminate this smi check if we split the code at
3928
+ // the first smi check before calling ToNumber.
3929
+ patch_site.EmitJumpIfSmi(r0, &done);
3930
+
3931
+ __ bind(&stub_call);
3932
+ // Call stub. Undo operation first.
3933
+ __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
3934
+ }
3935
+ __ mov(r1, Operand(Smi::FromInt(count_value)));
3936
+
3937
+ // Record position before stub call.
3938
+ SetSourcePosition(expr->position());
3939
+
3940
+ TypeRecordingBinaryOpStub stub(Token::ADD, NO_OVERWRITE);
3941
+ EmitCallIC(stub.GetCode(), &patch_site);
3942
+ __ bind(&done);
3943
+
3944
+ // Store the value returned in r0.
3945
+ switch (assign_type) {
3946
+ case VARIABLE:
3947
+ if (expr->is_postfix()) {
3948
+ { EffectContext context(this);
3949
+ EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3950
+ Token::ASSIGN);
3951
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3952
+ context.Plug(r0);
3953
+ }
3954
+ // For all contexts except EffectConstant We have the result on
3955
+ // top of the stack.
3956
+ if (!context()->IsEffect()) {
3957
+ context()->PlugTOS();
3958
+ }
3959
+ } else {
3960
+ EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3961
+ Token::ASSIGN);
3962
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3963
+ context()->Plug(r0);
3964
+ }
3965
+ break;
3966
+ case NAMED_PROPERTY: {
3967
+ __ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
3968
+ __ pop(r1);
3969
+ Handle<Code> ic = is_strict_mode()
3970
+ ? isolate()->builtins()->StoreIC_Initialize_Strict()
3971
+ : isolate()->builtins()->StoreIC_Initialize();
3972
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
3973
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3974
+ if (expr->is_postfix()) {
3975
+ if (!context()->IsEffect()) {
3976
+ context()->PlugTOS();
3977
+ }
3978
+ } else {
3979
+ context()->Plug(r0);
3980
+ }
3981
+ break;
3982
+ }
3983
+ case KEYED_PROPERTY: {
3984
+ __ pop(r1); // Key.
3985
+ __ pop(r2); // Receiver.
3986
+ Handle<Code> ic = is_strict_mode()
3987
+ ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3988
+ : isolate()->builtins()->KeyedStoreIC_Initialize();
3989
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
3990
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3991
+ if (expr->is_postfix()) {
3992
+ if (!context()->IsEffect()) {
3993
+ context()->PlugTOS();
3994
+ }
3995
+ } else {
3996
+ context()->Plug(r0);
3997
+ }
3998
+ break;
3999
+ }
4000
+ }
4001
+ }
4002
+
4003
+
4004
+ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4005
+ ASSERT(!context()->IsEffect());
4006
+ ASSERT(!context()->IsTest());
4007
+ VariableProxy* proxy = expr->AsVariableProxy();
4008
+ if (proxy != NULL && !proxy->var()->is_this() && proxy->var()->is_global()) {
4009
+ Comment cmnt(masm_, "Global variable");
4010
+ __ ldr(r0, GlobalObjectOperand());
4011
+ __ mov(r2, Operand(proxy->name()));
4012
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4013
+ // Use a regular load, not a contextual load, to avoid a reference
4014
+ // error.
4015
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
4016
+ PrepareForBailout(expr, TOS_REG);
4017
+ context()->Plug(r0);
4018
+ } else if (proxy != NULL &&
4019
+ proxy->var()->AsSlot() != NULL &&
4020
+ proxy->var()->AsSlot()->type() == Slot::LOOKUP) {
4021
+ Label done, slow;
4022
+
4023
+ // Generate code for loading from variables potentially shadowed
4024
+ // by eval-introduced variables.
4025
+ Slot* slot = proxy->var()->AsSlot();
4026
+ EmitDynamicLoadFromSlotFastCase(slot, INSIDE_TYPEOF, &slow, &done);
4027
+
4028
+ __ bind(&slow);
4029
+ __ mov(r0, Operand(proxy->name()));
4030
+ __ Push(cp, r0);
4031
+ __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4032
+ PrepareForBailout(expr, TOS_REG);
4033
+ __ bind(&done);
4034
+
4035
+ context()->Plug(r0);
4036
+ } else {
4037
+ // This expression cannot throw a reference error at the top level.
4038
+ context()->HandleExpression(expr);
4039
+ }
4040
+ }
4041
+
4042
+
4043
+ bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
4044
+ Expression* left,
4045
+ Expression* right,
4046
+ Label* if_true,
4047
+ Label* if_false,
4048
+ Label* fall_through) {
4049
+ if (op != Token::EQ && op != Token::EQ_STRICT) return false;
4050
+
4051
+ // Check for the pattern: typeof <expression> == <string literal>.
4052
+ Literal* right_literal = right->AsLiteral();
4053
+ if (right_literal == NULL) return false;
4054
+ Handle<Object> right_literal_value = right_literal->handle();
4055
+ if (!right_literal_value->IsString()) return false;
4056
+ UnaryOperation* left_unary = left->AsUnaryOperation();
4057
+ if (left_unary == NULL || left_unary->op() != Token::TYPEOF) return false;
4058
+ Handle<String> check = Handle<String>::cast(right_literal_value);
4059
+
4060
+ { AccumulatorValueContext context(this);
4061
+ VisitForTypeofValue(left_unary->expression());
4062
+ }
4063
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4064
+
4065
+ if (check->Equals(isolate()->heap()->number_symbol())) {
4066
+ __ JumpIfSmi(r0, if_true);
4067
+ __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
4068
+ __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4069
+ __ cmp(r0, ip);
4070
+ Split(eq, if_true, if_false, fall_through);
4071
+ } else if (check->Equals(isolate()->heap()->string_symbol())) {
4072
+ __ JumpIfSmi(r0, if_false);
4073
+ // Check for undetectable objects => false.
4074
+ __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
4075
+ __ b(ge, if_false);
4076
+ __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4077
+ __ tst(r1, Operand(1 << Map::kIsUndetectable));
4078
+ Split(eq, if_true, if_false, fall_through);
4079
+ } else if (check->Equals(isolate()->heap()->boolean_symbol())) {
4080
+ __ CompareRoot(r0, Heap::kTrueValueRootIndex);
4081
+ __ b(eq, if_true);
4082
+ __ CompareRoot(r0, Heap::kFalseValueRootIndex);
4083
+ Split(eq, if_true, if_false, fall_through);
4084
+ } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4085
+ __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
4086
+ __ b(eq, if_true);
4087
+ __ JumpIfSmi(r0, if_false);
4088
+ // Check for undetectable objects => true.
4089
+ __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
4090
+ __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4091
+ __ tst(r1, Operand(1 << Map::kIsUndetectable));
4092
+ Split(ne, if_true, if_false, fall_through);
4093
+
4094
+ } else if (check->Equals(isolate()->heap()->function_symbol())) {
4095
+ __ JumpIfSmi(r0, if_false);
4096
+ __ CompareObjectType(r0, r1, r0, FIRST_FUNCTION_CLASS_TYPE);
4097
+ Split(ge, if_true, if_false, fall_through);
4098
+
4099
+ } else if (check->Equals(isolate()->heap()->object_symbol())) {
4100
+ __ JumpIfSmi(r0, if_false);
4101
+ __ CompareRoot(r0, Heap::kNullValueRootIndex);
4102
+ __ b(eq, if_true);
4103
+ // Check for JS objects => true.
4104
+ __ CompareObjectType(r0, r0, r1, FIRST_JS_OBJECT_TYPE);
4105
+ __ b(lo, if_false);
4106
+ __ CompareInstanceType(r0, r1, FIRST_FUNCTION_CLASS_TYPE);
4107
+ __ b(hs, if_false);
4108
+ // Check for undetectable objects => false.
4109
+ __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4110
+ __ tst(r1, Operand(1 << Map::kIsUndetectable));
4111
+ Split(eq, if_true, if_false, fall_through);
4112
+ } else {
4113
+ if (if_false != fall_through) __ jmp(if_false);
4114
+ }
4115
+
4116
+ return true;
4117
+ }
4118
+
4119
+
4120
+ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4121
+ Comment cmnt(masm_, "[ CompareOperation");
4122
+ SetSourcePosition(expr->position());
4123
+
4124
+ // Always perform the comparison for its control flow. Pack the result
4125
+ // into the expression's context after the comparison is performed.
4126
+
4127
+ Label materialize_true, materialize_false;
4128
+ Label* if_true = NULL;
4129
+ Label* if_false = NULL;
4130
+ Label* fall_through = NULL;
4131
+ context()->PrepareTest(&materialize_true, &materialize_false,
4132
+ &if_true, &if_false, &fall_through);
4133
+
4134
+ // First we try a fast inlined version of the compare when one of
4135
+ // the operands is a literal.
4136
+ Token::Value op = expr->op();
4137
+ Expression* left = expr->left();
4138
+ Expression* right = expr->right();
4139
+ if (TryLiteralCompare(op, left, right, if_true, if_false, fall_through)) {
4140
+ context()->Plug(if_true, if_false);
4141
+ return;
4142
+ }
4143
+
4144
+ VisitForStackValue(expr->left());
4145
+ switch (op) {
4146
+ case Token::IN:
4147
+ VisitForStackValue(expr->right());
4148
+ __ InvokeBuiltin(Builtins::IN, CALL_JS);
4149
+ PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
4150
+ __ LoadRoot(ip, Heap::kTrueValueRootIndex);
4151
+ __ cmp(r0, ip);
4152
+ Split(eq, if_true, if_false, fall_through);
4153
+ break;
4154
+
4155
+ case Token::INSTANCEOF: {
4156
+ VisitForStackValue(expr->right());
4157
+ InstanceofStub stub(InstanceofStub::kNoFlags);
4158
+ __ CallStub(&stub);
4159
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4160
+ // The stub returns 0 for true.
4161
+ __ tst(r0, r0);
4162
+ Split(eq, if_true, if_false, fall_through);
4163
+ break;
4164
+ }
4165
+
4166
+ default: {
4167
+ VisitForAccumulatorValue(expr->right());
4168
+ Condition cond = eq;
4169
+ bool strict = false;
4170
+ switch (op) {
4171
+ case Token::EQ_STRICT:
4172
+ strict = true;
4173
+ // Fall through
4174
+ case Token::EQ:
4175
+ cond = eq;
4176
+ __ pop(r1);
4177
+ break;
4178
+ case Token::LT:
4179
+ cond = lt;
4180
+ __ pop(r1);
4181
+ break;
4182
+ case Token::GT:
4183
+ // Reverse left and right sides to obtain ECMA-262 conversion order.
4184
+ cond = lt;
4185
+ __ mov(r1, result_register());
4186
+ __ pop(r0);
4187
+ break;
4188
+ case Token::LTE:
4189
+ // Reverse left and right sides to obtain ECMA-262 conversion order.
4190
+ cond = ge;
4191
+ __ mov(r1, result_register());
4192
+ __ pop(r0);
4193
+ break;
4194
+ case Token::GTE:
4195
+ cond = ge;
4196
+ __ pop(r1);
4197
+ break;
4198
+ case Token::IN:
4199
+ case Token::INSTANCEOF:
4200
+ default:
4201
+ UNREACHABLE();
4202
+ }
4203
+
4204
+ bool inline_smi_code = ShouldInlineSmiCase(op);
4205
+ JumpPatchSite patch_site(masm_);
4206
+ if (inline_smi_code) {
4207
+ Label slow_case;
4208
+ __ orr(r2, r0, Operand(r1));
4209
+ patch_site.EmitJumpIfNotSmi(r2, &slow_case);
4210
+ __ cmp(r1, r0);
4211
+ Split(cond, if_true, if_false, NULL);
4212
+ __ bind(&slow_case);
4213
+ }
4214
+
4215
+ // Record position and call the compare IC.
4216
+ SetSourcePosition(expr->position());
4217
+ Handle<Code> ic = CompareIC::GetUninitialized(op);
4218
+ EmitCallIC(ic, &patch_site);
4219
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4220
+ __ cmp(r0, Operand(0));
4221
+ Split(cond, if_true, if_false, fall_through);
4222
+ }
4223
+ }
4224
+
4225
+ // Convert the result of the comparison into one expected for this
4226
+ // expression's context.
4227
+ context()->Plug(if_true, if_false);
4228
+ }
4229
+
4230
+
4231
+ void FullCodeGenerator::VisitCompareToNull(CompareToNull* expr) {
4232
+ Comment cmnt(masm_, "[ CompareToNull");
4233
+ Label materialize_true, materialize_false;
4234
+ Label* if_true = NULL;
4235
+ Label* if_false = NULL;
4236
+ Label* fall_through = NULL;
4237
+ context()->PrepareTest(&materialize_true, &materialize_false,
4238
+ &if_true, &if_false, &fall_through);
4239
+
4240
+ VisitForAccumulatorValue(expr->expression());
4241
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4242
+ __ LoadRoot(r1, Heap::kNullValueRootIndex);
4243
+ __ cmp(r0, r1);
4244
+ if (expr->is_strict()) {
4245
+ Split(eq, if_true, if_false, fall_through);
4246
+ } else {
4247
+ __ b(eq, if_true);
4248
+ __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
4249
+ __ cmp(r0, r1);
4250
+ __ b(eq, if_true);
4251
+ __ tst(r0, Operand(kSmiTagMask));
4252
+ __ b(eq, if_false);
4253
+ // It can be an undetectable object.
4254
+ __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
4255
+ __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
4256
+ __ and_(r1, r1, Operand(1 << Map::kIsUndetectable));
4257
+ __ cmp(r1, Operand(1 << Map::kIsUndetectable));
4258
+ Split(eq, if_true, if_false, fall_through);
4259
+ }
4260
+ context()->Plug(if_true, if_false);
4261
+ }
4262
+
4263
+
4264
+ void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4265
+ __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4266
+ context()->Plug(r0);
4267
+ }
4268
+
4269
+
4270
+ Register FullCodeGenerator::result_register() {
4271
+ return r0;
4272
+ }
4273
+
4274
+
4275
+ Register FullCodeGenerator::context_register() {
4276
+ return cp;
4277
+ }
4278
+
4279
+
4280
+ void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
4281
+ ASSERT(mode == RelocInfo::CODE_TARGET ||
4282
+ mode == RelocInfo::CODE_TARGET_CONTEXT);
4283
+ Counters* counters = isolate()->counters();
4284
+ switch (ic->kind()) {
4285
+ case Code::LOAD_IC:
4286
+ __ IncrementCounter(counters->named_load_full(), 1, r1, r2);
4287
+ break;
4288
+ case Code::KEYED_LOAD_IC:
4289
+ __ IncrementCounter(counters->keyed_load_full(), 1, r1, r2);
4290
+ break;
4291
+ case Code::STORE_IC:
4292
+ __ IncrementCounter(counters->named_store_full(), 1, r1, r2);
4293
+ break;
4294
+ case Code::KEYED_STORE_IC:
4295
+ __ IncrementCounter(counters->keyed_store_full(), 1, r1, r2);
4296
+ default:
4297
+ break;
4298
+ }
4299
+
4300
+ __ Call(ic, mode);
4301
+ }
4302
+
4303
+
4304
+ void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
4305
+ Counters* counters = isolate()->counters();
4306
+ switch (ic->kind()) {
4307
+ case Code::LOAD_IC:
4308
+ __ IncrementCounter(counters->named_load_full(), 1, r1, r2);
4309
+ break;
4310
+ case Code::KEYED_LOAD_IC:
4311
+ __ IncrementCounter(counters->keyed_load_full(), 1, r1, r2);
4312
+ break;
4313
+ case Code::STORE_IC:
4314
+ __ IncrementCounter(counters->named_store_full(), 1, r1, r2);
4315
+ break;
4316
+ case Code::KEYED_STORE_IC:
4317
+ __ IncrementCounter(counters->keyed_store_full(), 1, r1, r2);
4318
+ default:
4319
+ break;
4320
+ }
4321
+
4322
+ __ Call(ic, RelocInfo::CODE_TARGET);
4323
+ if (patch_site != NULL && patch_site->is_bound()) {
4324
+ patch_site->EmitPatchInfo();
4325
+ } else {
4326
+ __ nop(); // Signals no inlined code.
4327
+ }
4328
+ }
4329
+
4330
+
4331
+ void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4332
+ ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4333
+ __ str(value, MemOperand(fp, frame_offset));
4334
+ }
4335
+
4336
+
4337
+ void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4338
+ __ ldr(dst, ContextOperand(cp, context_index));
4339
+ }
4340
+
4341
+
4342
+ // ----------------------------------------------------------------------------
4343
+ // Non-local control flow support.
4344
+
4345
+ void FullCodeGenerator::EnterFinallyBlock() {
4346
+ ASSERT(!result_register().is(r1));
4347
+ // Store result register while executing finally block.
4348
+ __ push(result_register());
4349
+ // Cook return address in link register to stack (smi encoded Code* delta)
4350
+ __ sub(r1, lr, Operand(masm_->CodeObject()));
4351
+ ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
4352
+ ASSERT_EQ(0, kSmiTag);
4353
+ __ add(r1, r1, Operand(r1)); // Convert to smi.
4354
+ __ push(r1);
4355
+ }
4356
+
4357
+
4358
+ void FullCodeGenerator::ExitFinallyBlock() {
4359
+ ASSERT(!result_register().is(r1));
4360
+ // Restore result register from stack.
4361
+ __ pop(r1);
4362
+ // Uncook return address and return.
4363
+ __ pop(result_register());
4364
+ ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
4365
+ __ mov(r1, Operand(r1, ASR, 1)); // Un-smi-tag value.
4366
+ __ add(pc, r1, Operand(masm_->CodeObject()));
4367
+ }
4368
+
4369
+
4370
+ #undef __
4371
+
4372
+ } } // namespace v8::internal
4373
+
4374
+ #endif // V8_TARGET_ARCH_ARM