therubyracer 0.8.1.pre2 → 0.8.1

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of therubyracer might be problematic. Click here for more details.

Files changed (877) hide show
  1. data/Changelog.md +2 -1
  2. data/README.md +6 -3
  3. data/ext/v8/upstream/3.1.8/.gitignore +31 -0
  4. data/ext/v8/upstream/3.1.8/AUTHORS +40 -0
  5. data/ext/v8/upstream/3.1.8/ChangeLog +2566 -0
  6. data/ext/v8/upstream/3.1.8/LICENSE +52 -0
  7. data/ext/v8/upstream/3.1.8/LICENSE.strongtalk +29 -0
  8. data/ext/v8/upstream/3.1.8/LICENSE.v8 +26 -0
  9. data/ext/v8/upstream/3.1.8/LICENSE.valgrind +45 -0
  10. data/ext/v8/upstream/3.1.8/SConstruct +1192 -0
  11. data/ext/v8/upstream/3.1.8/build/README.txt +25 -0
  12. data/ext/v8/upstream/3.1.8/build/all.gyp +18 -0
  13. data/ext/v8/upstream/3.1.8/build/armu.gypi +32 -0
  14. data/ext/v8/upstream/3.1.8/build/common.gypi +82 -0
  15. data/ext/v8/upstream/3.1.8/build/gyp_v8 +145 -0
  16. data/ext/v8/upstream/3.1.8/include/v8-debug.h +384 -0
  17. data/ext/v8/upstream/3.1.8/include/v8-preparser.h +116 -0
  18. data/ext/v8/upstream/3.1.8/include/v8-profiler.h +426 -0
  19. data/ext/v8/upstream/3.1.8/include/v8-testing.h +99 -0
  20. data/ext/v8/upstream/3.1.8/include/v8.h +3846 -0
  21. data/ext/v8/upstream/3.1.8/include/v8stdint.h +53 -0
  22. data/ext/v8/upstream/3.1.8/preparser/preparser-process.cc +206 -0
  23. data/ext/v8/upstream/3.1.8/src/SConscript +356 -0
  24. data/ext/v8/upstream/3.1.8/src/accessors.cc +907 -0
  25. data/ext/v8/upstream/3.1.8/src/accessors.h +121 -0
  26. data/ext/v8/upstream/3.1.8/src/allocation.cc +204 -0
  27. data/ext/v8/upstream/3.1.8/src/allocation.h +176 -0
  28. data/ext/v8/upstream/3.1.8/src/api.cc +5191 -0
  29. data/ext/v8/upstream/3.1.8/src/api.h +508 -0
  30. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/apinatives.js +0 -0
  31. data/ext/v8/upstream/3.1.8/src/apiutils.h +80 -0
  32. data/ext/v8/upstream/3.1.8/src/arguments.h +105 -0
  33. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm-inl.h +352 -0
  34. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.cc +2756 -0
  35. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.h +1294 -0
  36. data/ext/v8/upstream/3.1.8/src/arm/builtins-arm.cc +1628 -0
  37. data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.cc +6783 -0
  38. data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.h +657 -0
  39. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm-inl.h +48 -0
  40. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.cc +7403 -0
  41. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.h +595 -0
  42. data/ext/v8/upstream/3.1.8/src/arm/constants-arm.cc +152 -0
  43. data/ext/v8/upstream/3.1.8/src/arm/constants-arm.h +769 -0
  44. data/ext/v8/upstream/3.1.8/src/arm/cpu-arm.cc +147 -0
  45. data/ext/v8/upstream/3.1.8/src/arm/debug-arm.cc +315 -0
  46. data/ext/v8/upstream/3.1.8/src/arm/deoptimizer-arm.cc +700 -0
  47. data/ext/v8/upstream/3.1.8/src/arm/disasm-arm.cc +1439 -0
  48. data/ext/v8/upstream/3.1.8/src/arm/frames-arm.cc +45 -0
  49. data/ext/v8/upstream/3.1.8/src/arm/frames-arm.h +168 -0
  50. data/ext/v8/upstream/3.1.8/src/arm/full-codegen-arm.cc +4230 -0
  51. data/ext/v8/upstream/3.1.8/src/arm/ic-arm.cc +1799 -0
  52. data/ext/v8/upstream/3.1.8/src/arm/jump-target-arm.cc +174 -0
  53. data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.cc +2041 -0
  54. data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.h +2046 -0
  55. data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.cc +3822 -0
  56. data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.h +312 -0
  57. data/ext/v8/upstream/3.1.8/src/arm/lithium-gap-resolver-arm.cc +303 -0
  58. data/ext/v8/upstream/3.1.8/src/arm/lithium-gap-resolver-arm.h +84 -0
  59. data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.cc +2701 -0
  60. data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.h +1015 -0
  61. data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.cc +1280 -0
  62. data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.h +252 -0
  63. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/register-allocator-arm-inl.h +0 -0
  64. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/register-allocator-arm.cc +0 -0
  65. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/register-allocator-arm.h +0 -0
  66. data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.cc +3165 -0
  67. data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.h +402 -0
  68. data/ext/v8/upstream/3.1.8/src/arm/stub-cache-arm.cc +4077 -0
  69. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/virtual-frame-arm-inl.h +0 -0
  70. data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.cc +843 -0
  71. data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.h +520 -0
  72. data/ext/v8/upstream/3.1.8/src/array.js +1231 -0
  73. data/ext/v8/upstream/3.1.8/src/assembler.cc +973 -0
  74. data/ext/v8/upstream/3.1.8/src/assembler.h +787 -0
  75. data/ext/v8/upstream/3.1.8/src/ast-inl.h +107 -0
  76. data/ext/v8/upstream/3.1.8/src/ast.cc +1067 -0
  77. data/ext/v8/upstream/3.1.8/src/ast.h +2177 -0
  78. data/ext/v8/upstream/3.1.8/src/atomicops.h +165 -0
  79. data/ext/v8/upstream/3.1.8/src/atomicops_internals_arm_gcc.h +145 -0
  80. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_gcc.cc +126 -0
  81. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_gcc.h +287 -0
  82. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_macosx.h +301 -0
  83. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_msvc.h +203 -0
  84. data/ext/v8/upstream/3.1.8/src/bignum-dtoa.cc +655 -0
  85. data/ext/v8/upstream/3.1.8/src/bignum-dtoa.h +81 -0
  86. data/ext/v8/upstream/3.1.8/src/bignum.cc +768 -0
  87. data/ext/v8/upstream/3.1.8/src/bignum.h +140 -0
  88. data/ext/v8/upstream/3.1.8/src/bootstrapper.cc +1888 -0
  89. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/bootstrapper.h +0 -0
  90. data/ext/v8/upstream/3.1.8/src/builtins.cc +1586 -0
  91. data/ext/v8/upstream/3.1.8/src/builtins.h +339 -0
  92. data/ext/v8/upstream/3.1.8/src/bytecodes-irregexp.h +105 -0
  93. data/ext/v8/upstream/3.1.8/src/cached-powers.cc +177 -0
  94. data/ext/v8/upstream/3.1.8/src/cached-powers.h +65 -0
  95. data/ext/v8/upstream/3.1.8/src/char-predicates-inl.h +94 -0
  96. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/char-predicates.h +0 -0
  97. data/ext/v8/upstream/3.1.8/src/checks.cc +110 -0
  98. data/ext/v8/upstream/3.1.8/src/checks.h +292 -0
  99. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/circular-queue-inl.h +0 -0
  100. data/ext/v8/upstream/3.1.8/src/circular-queue.cc +122 -0
  101. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/circular-queue.h +0 -0
  102. data/ext/v8/upstream/3.1.8/src/code-stubs.cc +230 -0
  103. data/ext/v8/upstream/3.1.8/src/code-stubs.h +950 -0
  104. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/code.h +0 -0
  105. data/ext/v8/upstream/3.1.8/src/codegen-inl.h +64 -0
  106. data/ext/v8/upstream/3.1.8/src/codegen.cc +495 -0
  107. data/ext/v8/upstream/3.1.8/src/codegen.h +245 -0
  108. data/ext/v8/upstream/3.1.8/src/compilation-cache.cc +654 -0
  109. data/ext/v8/upstream/3.1.8/src/compilation-cache.h +112 -0
  110. data/ext/v8/upstream/3.1.8/src/compiler.cc +806 -0
  111. data/ext/v8/upstream/3.1.8/src/compiler.h +290 -0
  112. data/ext/v8/upstream/3.1.8/src/contexts.cc +320 -0
  113. data/ext/v8/upstream/3.1.8/src/contexts.h +376 -0
  114. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/conversions-inl.h +0 -0
  115. data/ext/v8/upstream/3.1.8/src/conversions.cc +1069 -0
  116. data/ext/v8/upstream/3.1.8/src/conversions.h +122 -0
  117. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/counters.cc +0 -0
  118. data/ext/v8/upstream/3.1.8/src/counters.h +242 -0
  119. data/ext/v8/upstream/3.1.8/src/cpu-profiler-inl.h +100 -0
  120. data/ext/v8/upstream/3.1.8/src/cpu-profiler.cc +554 -0
  121. data/ext/v8/upstream/3.1.8/src/cpu-profiler.h +291 -0
  122. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/cpu.h +0 -0
  123. data/ext/v8/upstream/3.1.8/src/d8-debug.cc +367 -0
  124. data/ext/v8/upstream/3.1.8/src/d8-debug.h +157 -0
  125. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/d8-posix.cc +0 -0
  126. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/d8-readline.cc +0 -0
  127. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/d8-windows.cc +0 -0
  128. data/ext/v8/upstream/3.1.8/src/d8.cc +792 -0
  129. data/ext/v8/upstream/3.1.8/src/d8.gyp +85 -0
  130. data/ext/v8/upstream/3.1.8/src/d8.h +231 -0
  131. data/ext/v8/upstream/3.1.8/src/d8.js +2798 -0
  132. data/ext/v8/upstream/3.1.8/src/data-flow.cc +545 -0
  133. data/ext/v8/upstream/3.1.8/src/data-flow.h +379 -0
  134. data/ext/v8/upstream/3.1.8/src/date.js +1103 -0
  135. data/ext/v8/upstream/3.1.8/src/dateparser-inl.h +125 -0
  136. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/dateparser.cc +0 -0
  137. data/ext/v8/upstream/3.1.8/src/dateparser.h +263 -0
  138. data/ext/v8/upstream/3.1.8/src/debug-agent.cc +446 -0
  139. data/ext/v8/upstream/3.1.8/src/debug-agent.h +131 -0
  140. data/ext/v8/upstream/3.1.8/src/debug-debugger.js +2569 -0
  141. data/ext/v8/upstream/3.1.8/src/debug.cc +3085 -0
  142. data/ext/v8/upstream/3.1.8/src/debug.h +1025 -0
  143. data/ext/v8/upstream/3.1.8/src/deoptimizer.cc +1185 -0
  144. data/ext/v8/upstream/3.1.8/src/deoptimizer.h +529 -0
  145. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/disasm.h +0 -0
  146. data/ext/v8/upstream/3.1.8/src/disassembler.cc +338 -0
  147. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/disassembler.h +0 -0
  148. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/diy-fp.cc +0 -0
  149. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/diy-fp.h +0 -0
  150. data/ext/v8/upstream/3.1.8/src/double.h +238 -0
  151. data/ext/v8/upstream/3.1.8/src/dtoa.cc +103 -0
  152. data/ext/v8/upstream/3.1.8/src/dtoa.h +85 -0
  153. data/ext/v8/upstream/3.1.8/src/execution.cc +735 -0
  154. data/ext/v8/upstream/3.1.8/src/execution.h +322 -0
  155. data/ext/v8/upstream/3.1.8/src/extensions/experimental/experimental.gyp +53 -0
  156. data/ext/v8/upstream/3.1.8/src/extensions/experimental/i18n-extension.cc +264 -0
  157. data/ext/v8/upstream/3.1.8/src/extensions/experimental/i18n-extension.h +64 -0
  158. data/ext/v8/upstream/3.1.8/src/extensions/externalize-string-extension.cc +141 -0
  159. data/ext/v8/upstream/3.1.8/src/extensions/externalize-string-extension.h +50 -0
  160. data/ext/v8/upstream/3.1.8/src/extensions/gc-extension.cc +58 -0
  161. data/ext/v8/upstream/3.1.8/src/extensions/gc-extension.h +49 -0
  162. data/ext/v8/upstream/3.1.8/src/factory.cc +1087 -0
  163. data/ext/v8/upstream/3.1.8/src/factory.h +432 -0
  164. data/ext/v8/upstream/3.1.8/src/fast-dtoa.cc +736 -0
  165. data/ext/v8/upstream/3.1.8/src/fast-dtoa.h +83 -0
  166. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/fixed-dtoa.cc +0 -0
  167. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/fixed-dtoa.h +0 -0
  168. data/ext/v8/upstream/3.1.8/src/flag-definitions.h +552 -0
  169. data/ext/v8/upstream/3.1.8/src/flags.cc +551 -0
  170. data/ext/v8/upstream/3.1.8/src/flags.h +79 -0
  171. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/frame-element.cc +0 -0
  172. data/ext/v8/upstream/3.1.8/src/frame-element.h +277 -0
  173. data/ext/v8/upstream/3.1.8/src/frames-inl.h +210 -0
  174. data/ext/v8/upstream/3.1.8/src/frames.cc +1232 -0
  175. data/ext/v8/upstream/3.1.8/src/frames.h +826 -0
  176. data/ext/v8/upstream/3.1.8/src/full-codegen.cc +1382 -0
  177. data/ext/v8/upstream/3.1.8/src/full-codegen.h +751 -0
  178. data/ext/v8/upstream/3.1.8/src/func-name-inferrer.cc +90 -0
  179. data/ext/v8/upstream/3.1.8/src/func-name-inferrer.h +111 -0
  180. data/ext/v8/upstream/3.1.8/src/gdb-jit.cc +1547 -0
  181. data/ext/v8/upstream/3.1.8/src/gdb-jit.h +138 -0
  182. data/ext/v8/upstream/3.1.8/src/global-handles.cc +534 -0
  183. data/ext/v8/upstream/3.1.8/src/global-handles.h +181 -0
  184. data/ext/v8/upstream/3.1.8/src/globals.h +325 -0
  185. data/ext/v8/upstream/3.1.8/src/handles-inl.h +80 -0
  186. data/ext/v8/upstream/3.1.8/src/handles.cc +910 -0
  187. data/ext/v8/upstream/3.1.8/src/handles.h +424 -0
  188. data/ext/v8/upstream/3.1.8/src/hashmap.cc +230 -0
  189. data/ext/v8/upstream/3.1.8/src/hashmap.h +121 -0
  190. data/ext/v8/upstream/3.1.8/src/heap-inl.h +587 -0
  191. data/ext/v8/upstream/3.1.8/src/heap-profiler.cc +1128 -0
  192. data/ext/v8/upstream/3.1.8/src/heap-profiler.h +381 -0
  193. data/ext/v8/upstream/3.1.8/src/heap.cc +5610 -0
  194. data/ext/v8/upstream/3.1.8/src/heap.h +2218 -0
  195. data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.cc +1490 -0
  196. data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.h +3493 -0
  197. data/ext/v8/upstream/3.1.8/src/hydrogen.cc +6056 -0
  198. data/ext/v8/upstream/3.1.8/src/hydrogen.h +1091 -0
  199. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32-inl.h +429 -0
  200. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.cc +2800 -0
  201. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.h +1093 -0
  202. data/ext/v8/upstream/3.1.8/src/ia32/builtins-ia32.cc +1590 -0
  203. data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.cc +6624 -0
  204. data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.h +536 -0
  205. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/codegen-ia32-inl.h +0 -0
  206. data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.cc +10354 -0
  207. data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.h +798 -0
  208. data/ext/v8/upstream/3.1.8/src/ia32/cpu-ia32.cc +87 -0
  209. data/ext/v8/upstream/3.1.8/src/ia32/debug-ia32.cc +309 -0
  210. data/ext/v8/upstream/3.1.8/src/ia32/deoptimizer-ia32.cc +664 -0
  211. data/ext/v8/upstream/3.1.8/src/ia32/disasm-ia32.cc +1597 -0
  212. data/ext/v8/upstream/3.1.8/src/ia32/frames-ia32.cc +45 -0
  213. data/ext/v8/upstream/3.1.8/src/ia32/frames-ia32.h +140 -0
  214. data/ext/v8/upstream/3.1.8/src/ia32/full-codegen-ia32.cc +4278 -0
  215. data/ext/v8/upstream/3.1.8/src/ia32/ic-ia32.cc +1786 -0
  216. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/jump-target-ia32.cc +0 -0
  217. data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.cc +3880 -0
  218. data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.h +309 -0
  219. data/ext/v8/upstream/3.1.8/src/ia32/lithium-gap-resolver-ia32.cc +460 -0
  220. data/ext/v8/upstream/3.1.8/src/ia32/lithium-gap-resolver-ia32.h +110 -0
  221. data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.cc +2095 -0
  222. data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.h +2127 -0
  223. data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.cc +2031 -0
  224. data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.h +798 -0
  225. data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.cc +1253 -0
  226. data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.h +215 -0
  227. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/register-allocator-ia32-inl.h +0 -0
  228. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/register-allocator-ia32.cc +0 -0
  229. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/register-allocator-ia32.h +0 -0
  230. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/simulator-ia32.cc +0 -0
  231. data/ext/v8/upstream/3.1.8/src/ia32/simulator-ia32.h +72 -0
  232. data/ext/v8/upstream/3.1.8/src/ia32/stub-cache-ia32.cc +3732 -0
  233. data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.cc +1360 -0
  234. data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.h +646 -0
  235. data/ext/v8/upstream/3.1.8/src/ic-inl.h +129 -0
  236. data/ext/v8/upstream/3.1.8/src/ic.cc +2333 -0
  237. data/ext/v8/upstream/3.1.8/src/ic.h +639 -0
  238. data/ext/v8/upstream/3.1.8/src/inspector.cc +63 -0
  239. data/ext/v8/upstream/3.1.8/src/inspector.h +62 -0
  240. data/ext/v8/upstream/3.1.8/src/interpreter-irregexp.cc +655 -0
  241. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/interpreter-irregexp.h +0 -0
  242. data/ext/v8/upstream/3.1.8/src/json.js +342 -0
  243. data/ext/v8/upstream/3.1.8/src/jsregexp.cc +5340 -0
  244. data/ext/v8/upstream/3.1.8/src/jsregexp.h +1484 -0
  245. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target-heavy-inl.h +0 -0
  246. data/ext/v8/upstream/3.1.8/src/jump-target-heavy.cc +430 -0
  247. data/ext/v8/upstream/3.1.8/src/jump-target-heavy.h +244 -0
  248. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target-inl.h +0 -0
  249. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target-light-inl.h +0 -0
  250. data/ext/v8/upstream/3.1.8/src/jump-target-light.cc +111 -0
  251. data/ext/v8/upstream/3.1.8/src/jump-target-light.h +193 -0
  252. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target.cc +0 -0
  253. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target.h +0 -0
  254. data/ext/v8/upstream/3.1.8/src/list-inl.h +206 -0
  255. data/ext/v8/upstream/3.1.8/src/list.h +164 -0
  256. data/ext/v8/upstream/3.1.8/src/lithium-allocator-inl.h +140 -0
  257. data/ext/v8/upstream/3.1.8/src/lithium-allocator.cc +2093 -0
  258. data/ext/v8/upstream/3.1.8/src/lithium-allocator.h +644 -0
  259. data/ext/v8/upstream/3.1.8/src/lithium.cc +168 -0
  260. data/ext/v8/upstream/3.1.8/src/lithium.h +592 -0
  261. data/ext/v8/upstream/3.1.8/src/liveedit-debugger.js +1082 -0
  262. data/ext/v8/upstream/3.1.8/src/liveedit.cc +1650 -0
  263. data/ext/v8/upstream/3.1.8/src/liveedit.h +174 -0
  264. data/ext/v8/upstream/3.1.8/src/liveobjectlist-inl.h +126 -0
  265. data/ext/v8/upstream/3.1.8/src/liveobjectlist.cc +2527 -0
  266. data/ext/v8/upstream/3.1.8/src/liveobjectlist.h +322 -0
  267. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/log-inl.h +0 -0
  268. data/ext/v8/upstream/3.1.8/src/log-utils.cc +336 -0
  269. data/ext/v8/upstream/3.1.8/src/log-utils.h +232 -0
  270. data/ext/v8/upstream/3.1.8/src/log.cc +1608 -0
  271. data/ext/v8/upstream/3.1.8/src/log.h +379 -0
  272. data/ext/v8/upstream/3.1.8/src/macro-assembler.h +120 -0
  273. data/ext/v8/upstream/3.1.8/src/macros.py +178 -0
  274. data/ext/v8/upstream/3.1.8/src/mark-compact.cc +2957 -0
  275. data/ext/v8/upstream/3.1.8/src/mark-compact.h +433 -0
  276. data/ext/v8/upstream/3.1.8/src/math.js +264 -0
  277. data/ext/v8/upstream/3.1.8/src/memory.h +82 -0
  278. data/ext/v8/upstream/3.1.8/src/messages.cc +164 -0
  279. data/ext/v8/upstream/3.1.8/src/messages.h +114 -0
  280. data/ext/v8/upstream/3.1.8/src/messages.js +1071 -0
  281. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/assembler-mips-inl.h +0 -0
  282. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/assembler-mips.cc +0 -0
  283. data/ext/v8/upstream/3.1.8/src/mips/assembler-mips.h +667 -0
  284. data/ext/v8/upstream/3.1.8/src/mips/builtins-mips.cc +205 -0
  285. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/codegen-mips-inl.h +0 -0
  286. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/codegen-mips.cc +0 -0
  287. data/ext/v8/upstream/3.1.8/src/mips/codegen-mips.h +431 -0
  288. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/constants-mips.cc +0 -0
  289. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/constants-mips.h +0 -0
  290. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/cpu-mips.cc +0 -0
  291. data/ext/v8/upstream/3.1.8/src/mips/debug-mips.cc +127 -0
  292. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/disasm-mips.cc +0 -0
  293. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/fast-codegen-mips.cc +0 -0
  294. data/ext/v8/upstream/3.1.8/src/mips/frames-mips.cc +96 -0
  295. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/frames-mips.h +0 -0
  296. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/full-codegen-mips.cc +0 -0
  297. data/ext/v8/upstream/3.1.8/src/mips/ic-mips.cc +208 -0
  298. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/jump-target-mips.cc +0 -0
  299. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/macro-assembler-mips.cc +0 -0
  300. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/macro-assembler-mips.h +0 -0
  301. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/register-allocator-mips-inl.h +0 -0
  302. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/register-allocator-mips.cc +0 -0
  303. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/register-allocator-mips.h +0 -0
  304. data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.cc +1650 -0
  305. data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.h +311 -0
  306. data/ext/v8/upstream/3.1.8/src/mips/stub-cache-mips.cc +418 -0
  307. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/virtual-frame-mips.cc +0 -0
  308. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/virtual-frame-mips.h +0 -0
  309. data/ext/v8/upstream/3.1.8/src/mirror-debugger.js +2380 -0
  310. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mksnapshot.cc +0 -0
  311. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/natives.h +0 -0
  312. data/ext/v8/upstream/3.1.8/src/objects-debug.cc +722 -0
  313. data/ext/v8/upstream/3.1.8/src/objects-inl.h +3946 -0
  314. data/ext/v8/upstream/3.1.8/src/objects-printer.cc +801 -0
  315. data/ext/v8/upstream/3.1.8/src/objects-visiting.cc +142 -0
  316. data/ext/v8/upstream/3.1.8/src/objects-visiting.h +401 -0
  317. data/ext/v8/upstream/3.1.8/src/objects.cc +10044 -0
  318. data/ext/v8/upstream/3.1.8/src/objects.h +6571 -0
  319. data/ext/v8/upstream/3.1.8/src/parser.cc +5165 -0
  320. data/ext/v8/upstream/3.1.8/src/parser.h +802 -0
  321. data/ext/v8/upstream/3.1.8/src/platform-cygwin.cc +745 -0
  322. data/ext/v8/upstream/3.1.8/src/platform-freebsd.cc +702 -0
  323. data/ext/v8/upstream/3.1.8/src/platform-linux.cc +981 -0
  324. data/ext/v8/upstream/3.1.8/src/platform-macos.cc +732 -0
  325. data/ext/v8/upstream/3.1.8/src/platform-nullos.cc +498 -0
  326. data/ext/v8/upstream/3.1.8/src/platform-openbsd.cc +657 -0
  327. data/ext/v8/upstream/3.1.8/src/platform-posix.cc +399 -0
  328. data/ext/v8/upstream/3.1.8/src/platform-solaris.cc +714 -0
  329. data/ext/v8/upstream/3.1.8/src/platform-win32.cc +1974 -0
  330. data/ext/v8/upstream/3.1.8/src/platform.h +636 -0
  331. data/ext/v8/upstream/3.1.8/src/preparse-data.cc +183 -0
  332. data/ext/v8/upstream/3.1.8/src/preparse-data.h +249 -0
  333. data/ext/v8/upstream/3.1.8/src/preparser-api.cc +213 -0
  334. data/ext/v8/upstream/3.1.8/src/preparser.cc +1205 -0
  335. data/ext/v8/upstream/3.1.8/src/preparser.h +278 -0
  336. data/ext/v8/upstream/3.1.8/src/prettyprinter.cc +1539 -0
  337. data/ext/v8/upstream/3.1.8/src/prettyprinter.h +223 -0
  338. data/ext/v8/upstream/3.1.8/src/profile-generator-inl.h +128 -0
  339. data/ext/v8/upstream/3.1.8/src/profile-generator.cc +2899 -0
  340. data/ext/v8/upstream/3.1.8/src/profile-generator.h +1151 -0
  341. data/ext/v8/upstream/3.1.8/src/property.cc +96 -0
  342. data/ext/v8/upstream/3.1.8/src/property.h +337 -0
  343. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/regexp-macro-assembler-irregexp-inl.h +0 -0
  344. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-irregexp.cc +470 -0
  345. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-irregexp.h +142 -0
  346. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-tracer.cc +373 -0
  347. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-tracer.h +104 -0
  348. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.cc +257 -0
  349. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.h +231 -0
  350. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/regexp-stack.cc +0 -0
  351. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/regexp-stack.h +0 -0
  352. data/ext/v8/upstream/3.1.8/src/regexp.js +483 -0
  353. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/register-allocator-inl.h +0 -0
  354. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/register-allocator.cc +0 -0
  355. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/register-allocator.h +0 -0
  356. data/ext/v8/upstream/3.1.8/src/rewriter.cc +1023 -0
  357. data/ext/v8/upstream/3.1.8/src/rewriter.h +59 -0
  358. data/ext/v8/upstream/3.1.8/src/runtime-profiler.cc +443 -0
  359. data/ext/v8/upstream/3.1.8/src/runtime-profiler.h +77 -0
  360. data/ext/v8/upstream/3.1.8/src/runtime.cc +11592 -0
  361. data/ext/v8/upstream/3.1.8/src/runtime.h +582 -0
  362. data/ext/v8/upstream/3.1.8/src/runtime.js +643 -0
  363. data/ext/v8/upstream/3.1.8/src/safepoint-table.cc +253 -0
  364. data/ext/v8/upstream/3.1.8/src/safepoint-table.h +263 -0
  365. data/ext/v8/upstream/3.1.8/src/scanner-base.cc +971 -0
  366. data/ext/v8/upstream/3.1.8/src/scanner-base.h +653 -0
  367. data/ext/v8/upstream/3.1.8/src/scanner.cc +586 -0
  368. data/ext/v8/upstream/3.1.8/src/scanner.h +194 -0
  369. data/ext/v8/upstream/3.1.8/src/scopeinfo.cc +636 -0
  370. data/ext/v8/upstream/3.1.8/src/scopeinfo.h +238 -0
  371. data/ext/v8/upstream/3.1.8/src/scopes.cc +1063 -0
  372. data/ext/v8/upstream/3.1.8/src/scopes.h +494 -0
  373. data/ext/v8/upstream/3.1.8/src/serialize.cc +1535 -0
  374. data/ext/v8/upstream/3.1.8/src/serialize.h +584 -0
  375. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/shell.h +0 -0
  376. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/simulator.h +0 -0
  377. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/smart-pointer.h +0 -0
  378. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/snapshot-common.cc +0 -0
  379. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/snapshot-empty.cc +0 -0
  380. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/snapshot.h +0 -0
  381. data/ext/v8/upstream/3.1.8/src/spaces-inl.h +524 -0
  382. data/ext/v8/upstream/3.1.8/src/spaces.cc +3254 -0
  383. data/ext/v8/upstream/3.1.8/src/spaces.h +2362 -0
  384. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/splay-tree-inl.h +0 -0
  385. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/splay-tree.h +0 -0
  386. data/ext/v8/upstream/3.1.8/src/string-search.cc +40 -0
  387. data/ext/v8/upstream/3.1.8/src/string-search.h +567 -0
  388. data/ext/v8/upstream/3.1.8/src/string-stream.cc +584 -0
  389. data/ext/v8/upstream/3.1.8/src/string-stream.h +191 -0
  390. data/ext/v8/upstream/3.1.8/src/string.js +915 -0
  391. data/ext/v8/upstream/3.1.8/src/strtod.cc +440 -0
  392. data/ext/v8/upstream/3.1.8/src/strtod.h +40 -0
  393. data/ext/v8/upstream/3.1.8/src/stub-cache.cc +1878 -0
  394. data/ext/v8/upstream/3.1.8/src/stub-cache.h +849 -0
  395. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/third_party/valgrind/valgrind.h +0 -0
  396. data/ext/v8/upstream/3.1.8/src/token.cc +63 -0
  397. data/ext/v8/upstream/3.1.8/src/token.h +288 -0
  398. data/ext/v8/upstream/3.1.8/src/top.cc +1152 -0
  399. data/ext/v8/upstream/3.1.8/src/top.h +608 -0
  400. data/ext/v8/upstream/3.1.8/src/type-info.cc +406 -0
  401. data/ext/v8/upstream/3.1.8/src/type-info.h +283 -0
  402. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/unbound-queue-inl.h +0 -0
  403. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/unbound-queue.h +0 -0
  404. data/ext/v8/upstream/3.1.8/src/unicode-inl.h +238 -0
  405. data/ext/v8/upstream/3.1.8/src/unicode.cc +1624 -0
  406. data/ext/v8/upstream/3.1.8/src/unicode.h +280 -0
  407. data/ext/v8/upstream/3.1.8/src/uri.js +402 -0
  408. data/ext/v8/upstream/3.1.8/src/utils.cc +371 -0
  409. data/ext/v8/upstream/3.1.8/src/utils.h +793 -0
  410. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/v8-counters.cc +0 -0
  411. data/ext/v8/upstream/3.1.8/src/v8-counters.h +290 -0
  412. data/ext/v8/upstream/3.1.8/src/v8.cc +270 -0
  413. data/ext/v8/upstream/3.1.8/src/v8.h +127 -0
  414. data/ext/v8/upstream/3.1.8/src/v8checks.h +64 -0
  415. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/v8dll-main.cc +0 -0
  416. data/ext/v8/upstream/3.1.8/src/v8globals.h +480 -0
  417. data/ext/v8/upstream/3.1.8/src/v8natives.js +1252 -0
  418. data/ext/v8/upstream/3.1.8/src/v8preparserdll-main.cc +39 -0
  419. data/ext/v8/upstream/3.1.8/src/v8threads.cc +440 -0
  420. data/ext/v8/upstream/3.1.8/src/v8threads.h +157 -0
  421. data/ext/v8/upstream/3.1.8/src/v8utils.h +354 -0
  422. data/ext/v8/upstream/3.1.8/src/variables.cc +132 -0
  423. data/ext/v8/upstream/3.1.8/src/variables.h +212 -0
  424. data/ext/v8/upstream/3.1.8/src/version.cc +95 -0
  425. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/version.h +0 -0
  426. data/ext/v8/upstream/3.1.8/src/virtual-frame-heavy-inl.h +190 -0
  427. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-heavy.cc +0 -0
  428. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-inl.h +0 -0
  429. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-light-inl.h +0 -0
  430. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-light.cc +0 -0
  431. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame.cc +0 -0
  432. data/ext/v8/upstream/3.1.8/src/virtual-frame.h +59 -0
  433. data/ext/v8/upstream/3.1.8/src/vm-state-inl.h +134 -0
  434. data/ext/v8/upstream/3.1.8/src/vm-state.h +68 -0
  435. data/ext/v8/upstream/3.1.8/src/win32-headers.h +95 -0
  436. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64-inl.h +455 -0
  437. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.cc +3162 -0
  438. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.h +1584 -0
  439. data/ext/v8/upstream/3.1.8/src/x64/builtins-x64.cc +1492 -0
  440. data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.cc +5150 -0
  441. data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.h +519 -0
  442. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64-inl.h +46 -0
  443. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.cc +8835 -0
  444. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.h +750 -0
  445. data/ext/v8/upstream/3.1.8/src/x64/cpu-x64.cc +86 -0
  446. data/ext/v8/upstream/3.1.8/src/x64/debug-x64.cc +316 -0
  447. data/ext/v8/upstream/3.1.8/src/x64/deoptimizer-x64.cc +781 -0
  448. data/ext/v8/upstream/3.1.8/src/x64/disasm-x64.cc +1737 -0
  449. data/ext/v8/upstream/3.1.8/src/x64/frames-x64.cc +45 -0
  450. data/ext/v8/upstream/3.1.8/src/x64/frames-x64.h +130 -0
  451. data/ext/v8/upstream/3.1.8/src/x64/full-codegen-x64.cc +3984 -0
  452. data/ext/v8/upstream/3.1.8/src/x64/ic-x64.cc +1761 -0
  453. data/ext/v8/upstream/3.1.8/src/x64/jump-target-x64.cc +437 -0
  454. data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.cc +3639 -0
  455. data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.h +305 -0
  456. data/ext/v8/upstream/3.1.8/src/x64/lithium-gap-resolver-x64.cc +320 -0
  457. data/ext/v8/upstream/3.1.8/src/x64/lithium-gap-resolver-x64.h +74 -0
  458. data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.cc +2044 -0
  459. data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.h +2052 -0
  460. data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.cc +2660 -0
  461. data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.h +1852 -0
  462. data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.cc +1382 -0
  463. data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.h +278 -0
  464. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/register-allocator-x64-inl.h +0 -0
  465. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/register-allocator-x64.cc +0 -0
  466. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/register-allocator-x64.h +0 -0
  467. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/simulator-x64.cc +0 -0
  468. data/ext/v8/upstream/3.1.8/src/x64/simulator-x64.h +71 -0
  469. data/ext/v8/upstream/3.1.8/src/x64/stub-cache-x64.cc +3509 -0
  470. data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.cc +1292 -0
  471. data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.h +593 -0
  472. data/ext/v8/upstream/3.1.8/src/zone-inl.h +83 -0
  473. data/ext/v8/upstream/3.1.8/src/zone.cc +195 -0
  474. data/ext/v8/upstream/3.1.8/src/zone.h +233 -0
  475. data/ext/v8/upstream/3.1.8/tools/codemap.js +265 -0
  476. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/consarray.js +0 -0
  477. data/ext/v8/upstream/3.1.8/tools/csvparser.js +78 -0
  478. data/ext/v8/upstream/3.1.8/tools/disasm.py +92 -0
  479. data/ext/v8/upstream/3.1.8/tools/gc-nvp-trace-processor.py +328 -0
  480. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/generate-ten-powers.scm +0 -0
  481. data/ext/v8/upstream/3.1.8/tools/grokdump.py +840 -0
  482. data/ext/v8/upstream/3.1.8/tools/gyp/v8.gyp +869 -0
  483. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/js2c.py +0 -0
  484. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/jsmin.py +0 -0
  485. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/linux-tick-processor +0 -0
  486. data/ext/v8/upstream/3.1.8/tools/ll_prof.py +919 -0
  487. data/ext/v8/upstream/3.1.8/tools/logreader.js +185 -0
  488. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/mac-nm +0 -0
  489. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/mac-tick-processor +0 -0
  490. data/ext/v8/upstream/3.1.8/tools/oom_dump/README +31 -0
  491. data/ext/v8/upstream/3.1.8/tools/oom_dump/SConstruct +42 -0
  492. data/ext/v8/upstream/3.1.8/tools/oom_dump/oom_dump.cc +288 -0
  493. data/ext/v8/upstream/3.1.8/tools/presubmit.py +305 -0
  494. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/process-heap-prof.py +0 -0
  495. data/ext/v8/upstream/3.1.8/tools/profile.js +751 -0
  496. data/ext/v8/upstream/3.1.8/tools/profile_view.js +219 -0
  497. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/run-valgrind.py +0 -0
  498. data/ext/v8/upstream/3.1.8/tools/splaytree.js +316 -0
  499. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/stats-viewer.py +0 -0
  500. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/tickprocessor-driver.js +0 -0
  501. data/ext/v8/upstream/3.1.8/tools/tickprocessor.js +863 -0
  502. data/ext/v8/upstream/3.1.8/tools/utils.py +96 -0
  503. data/ext/v8/upstream/3.1.8/tools/visual_studio/README.txt +70 -0
  504. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/arm.vsprops +0 -0
  505. data/ext/v8/upstream/3.1.8/tools/visual_studio/common.vsprops +34 -0
  506. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8.vcproj +0 -0
  507. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8_arm.vcproj +0 -0
  508. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8_x64.vcproj +0 -0
  509. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8js2c.cmd +0 -0
  510. data/ext/v8/upstream/3.1.8/tools/visual_studio/debug.vsprops +17 -0
  511. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/ia32.vsprops +0 -0
  512. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/js2c.cmd +0 -0
  513. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/release.vsprops +0 -0
  514. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8.sln +0 -0
  515. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8.vcproj +0 -0
  516. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_arm.sln +0 -0
  517. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_arm.vcproj +0 -0
  518. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base.vcproj +1296 -0
  519. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_arm.vcproj +1234 -0
  520. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_x64.vcproj +1296 -0
  521. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_cctest.vcproj +0 -0
  522. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_cctest_arm.vcproj +0 -0
  523. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_cctest_x64.vcproj +0 -0
  524. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_mksnapshot.vcproj +0 -0
  525. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_mksnapshot_x64.vcproj +0 -0
  526. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_process_sample.vcproj +0 -0
  527. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_process_sample_arm.vcproj +0 -0
  528. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_process_sample_x64.vcproj +0 -0
  529. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_shell_sample.vcproj +147 -0
  530. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_shell_sample_arm.vcproj +147 -0
  531. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_shell_sample_x64.vcproj +163 -0
  532. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot.vcproj +0 -0
  533. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot_cc.vcproj +0 -0
  534. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot_cc_x64.vcproj +0 -0
  535. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot_x64.vcproj +0 -0
  536. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_x64.sln +0 -0
  537. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_x64.vcproj +0 -0
  538. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/x64.vsprops +0 -0
  539. data/ext/v8/upstream/3.1.8/tools/windows-tick-processor.bat +30 -0
  540. data/ext/v8/upstream/Makefile +2 -1
  541. data/ext/v8/v8_template.cpp +2 -2
  542. data/lib/v8/version.rb +1 -1
  543. data/spec/redjs/jsapi_spec.rb +2 -2
  544. metadata +552 -490
  545. data/ext/v8/upstream/2.3.3/.gitignore +0 -26
  546. data/ext/v8/upstream/2.3.3/AUTHORS +0 -31
  547. data/ext/v8/upstream/2.3.3/ChangeLog +0 -1916
  548. data/ext/v8/upstream/2.3.3/LICENSE +0 -55
  549. data/ext/v8/upstream/2.3.3/SConstruct +0 -1154
  550. data/ext/v8/upstream/2.3.3/include/v8-debug.h +0 -381
  551. data/ext/v8/upstream/2.3.3/include/v8-profiler.h +0 -353
  552. data/ext/v8/upstream/2.3.3/include/v8.h +0 -3616
  553. data/ext/v8/upstream/2.3.3/src/SConscript +0 -330
  554. data/ext/v8/upstream/2.3.3/src/accessors.cc +0 -661
  555. data/ext/v8/upstream/2.3.3/src/accessors.h +0 -114
  556. data/ext/v8/upstream/2.3.3/src/allocation.cc +0 -198
  557. data/ext/v8/upstream/2.3.3/src/allocation.h +0 -169
  558. data/ext/v8/upstream/2.3.3/src/api.cc +0 -4795
  559. data/ext/v8/upstream/2.3.3/src/api.h +0 -485
  560. data/ext/v8/upstream/2.3.3/src/apiutils.h +0 -69
  561. data/ext/v8/upstream/2.3.3/src/arguments.h +0 -96
  562. data/ext/v8/upstream/2.3.3/src/arm/assembler-arm-inl.h +0 -305
  563. data/ext/v8/upstream/2.3.3/src/arm/assembler-arm.cc +0 -2580
  564. data/ext/v8/upstream/2.3.3/src/arm/assembler-arm.h +0 -1275
  565. data/ext/v8/upstream/2.3.3/src/arm/builtins-arm.cc +0 -1320
  566. data/ext/v8/upstream/2.3.3/src/arm/codegen-arm-inl.h +0 -48
  567. data/ext/v8/upstream/2.3.3/src/arm/codegen-arm.cc +0 -11398
  568. data/ext/v8/upstream/2.3.3/src/arm/codegen-arm.h +0 -1102
  569. data/ext/v8/upstream/2.3.3/src/arm/constants-arm.cc +0 -154
  570. data/ext/v8/upstream/2.3.3/src/arm/constants-arm.h +0 -388
  571. data/ext/v8/upstream/2.3.3/src/arm/cpu-arm.cc +0 -142
  572. data/ext/v8/upstream/2.3.3/src/arm/debug-arm.cc +0 -309
  573. data/ext/v8/upstream/2.3.3/src/arm/disasm-arm.cc +0 -1459
  574. data/ext/v8/upstream/2.3.3/src/arm/fast-codegen-arm.cc +0 -241
  575. data/ext/v8/upstream/2.3.3/src/arm/frames-arm.cc +0 -123
  576. data/ext/v8/upstream/2.3.3/src/arm/frames-arm.h +0 -162
  577. data/ext/v8/upstream/2.3.3/src/arm/full-codegen-arm.cc +0 -3178
  578. data/ext/v8/upstream/2.3.3/src/arm/ic-arm.cc +0 -2258
  579. data/ext/v8/upstream/2.3.3/src/arm/jump-target-arm.cc +0 -164
  580. data/ext/v8/upstream/2.3.3/src/arm/macro-assembler-arm.cc +0 -1892
  581. data/ext/v8/upstream/2.3.3/src/arm/macro-assembler-arm.h +0 -727
  582. data/ext/v8/upstream/2.3.3/src/arm/regexp-macro-assembler-arm.cc +0 -1261
  583. data/ext/v8/upstream/2.3.3/src/arm/regexp-macro-assembler-arm.h +0 -266
  584. data/ext/v8/upstream/2.3.3/src/arm/simulator-arm.cc +0 -2822
  585. data/ext/v8/upstream/2.3.3/src/arm/simulator-arm.h +0 -361
  586. data/ext/v8/upstream/2.3.3/src/arm/stub-cache-arm.cc +0 -2387
  587. data/ext/v8/upstream/2.3.3/src/arm/virtual-frame-arm.cc +0 -834
  588. data/ext/v8/upstream/2.3.3/src/arm/virtual-frame-arm.h +0 -519
  589. data/ext/v8/upstream/2.3.3/src/array.js +0 -1127
  590. data/ext/v8/upstream/2.3.3/src/assembler.cc +0 -801
  591. data/ext/v8/upstream/2.3.3/src/assembler.h +0 -573
  592. data/ext/v8/upstream/2.3.3/src/ast-inl.h +0 -81
  593. data/ext/v8/upstream/2.3.3/src/ast.cc +0 -1152
  594. data/ext/v8/upstream/2.3.3/src/ast.h +0 -2106
  595. data/ext/v8/upstream/2.3.3/src/bootstrapper.cc +0 -1819
  596. data/ext/v8/upstream/2.3.3/src/builtins.cc +0 -1529
  597. data/ext/v8/upstream/2.3.3/src/builtins.h +0 -263
  598. data/ext/v8/upstream/2.3.3/src/bytecodes-irregexp.h +0 -104
  599. data/ext/v8/upstream/2.3.3/src/cached-powers.h +0 -119
  600. data/ext/v8/upstream/2.3.3/src/char-predicates-inl.h +0 -86
  601. data/ext/v8/upstream/2.3.3/src/checks.cc +0 -100
  602. data/ext/v8/upstream/2.3.3/src/checks.h +0 -310
  603. data/ext/v8/upstream/2.3.3/src/circular-queue.cc +0 -121
  604. data/ext/v8/upstream/2.3.3/src/code-stubs.cc +0 -177
  605. data/ext/v8/upstream/2.3.3/src/code-stubs.h +0 -177
  606. data/ext/v8/upstream/2.3.3/src/codegen-inl.h +0 -60
  607. data/ext/v8/upstream/2.3.3/src/codegen.cc +0 -516
  608. data/ext/v8/upstream/2.3.3/src/codegen.h +0 -897
  609. data/ext/v8/upstream/2.3.3/src/compilation-cache.cc +0 -562
  610. data/ext/v8/upstream/2.3.3/src/compilation-cache.h +0 -102
  611. data/ext/v8/upstream/2.3.3/src/compiler.cc +0 -654
  612. data/ext/v8/upstream/2.3.3/src/compiler.h +0 -299
  613. data/ext/v8/upstream/2.3.3/src/contexts.cc +0 -256
  614. data/ext/v8/upstream/2.3.3/src/contexts.h +0 -342
  615. data/ext/v8/upstream/2.3.3/src/conversions.cc +0 -1119
  616. data/ext/v8/upstream/2.3.3/src/conversions.h +0 -123
  617. data/ext/v8/upstream/2.3.3/src/counters.h +0 -239
  618. data/ext/v8/upstream/2.3.3/src/cpu-profiler-inl.h +0 -100
  619. data/ext/v8/upstream/2.3.3/src/cpu-profiler.cc +0 -538
  620. data/ext/v8/upstream/2.3.3/src/cpu-profiler.h +0 -285
  621. data/ext/v8/upstream/2.3.3/src/d8-debug.cc +0 -356
  622. data/ext/v8/upstream/2.3.3/src/d8-debug.h +0 -155
  623. data/ext/v8/upstream/2.3.3/src/d8.cc +0 -783
  624. data/ext/v8/upstream/2.3.3/src/d8.h +0 -227
  625. data/ext/v8/upstream/2.3.3/src/d8.js +0 -1683
  626. data/ext/v8/upstream/2.3.3/src/data-flow.cc +0 -758
  627. data/ext/v8/upstream/2.3.3/src/data-flow.h +0 -278
  628. data/ext/v8/upstream/2.3.3/src/date.js +0 -1059
  629. data/ext/v8/upstream/2.3.3/src/dateparser-inl.h +0 -123
  630. data/ext/v8/upstream/2.3.3/src/dateparser.h +0 -244
  631. data/ext/v8/upstream/2.3.3/src/debug-agent.cc +0 -427
  632. data/ext/v8/upstream/2.3.3/src/debug-agent.h +0 -129
  633. data/ext/v8/upstream/2.3.3/src/debug-debugger.js +0 -2227
  634. data/ext/v8/upstream/2.3.3/src/debug.cc +0 -3005
  635. data/ext/v8/upstream/2.3.3/src/debug.h +0 -993
  636. data/ext/v8/upstream/2.3.3/src/disassembler.cc +0 -312
  637. data/ext/v8/upstream/2.3.3/src/double.h +0 -169
  638. data/ext/v8/upstream/2.3.3/src/dtoa-config.c +0 -92
  639. data/ext/v8/upstream/2.3.3/src/dtoa.cc +0 -77
  640. data/ext/v8/upstream/2.3.3/src/dtoa.h +0 -81
  641. data/ext/v8/upstream/2.3.3/src/execution.cc +0 -809
  642. data/ext/v8/upstream/2.3.3/src/execution.h +0 -336
  643. data/ext/v8/upstream/2.3.3/src/factory.cc +0 -1003
  644. data/ext/v8/upstream/2.3.3/src/factory.h +0 -410
  645. data/ext/v8/upstream/2.3.3/src/fast-codegen.cc +0 -746
  646. data/ext/v8/upstream/2.3.3/src/fast-codegen.h +0 -161
  647. data/ext/v8/upstream/2.3.3/src/fast-dtoa.cc +0 -505
  648. data/ext/v8/upstream/2.3.3/src/fast-dtoa.h +0 -58
  649. data/ext/v8/upstream/2.3.3/src/flag-definitions.h +0 -455
  650. data/ext/v8/upstream/2.3.3/src/flags.cc +0 -551
  651. data/ext/v8/upstream/2.3.3/src/flags.h +0 -81
  652. data/ext/v8/upstream/2.3.3/src/flow-graph.cc +0 -763
  653. data/ext/v8/upstream/2.3.3/src/flow-graph.h +0 -180
  654. data/ext/v8/upstream/2.3.3/src/frame-element.h +0 -273
  655. data/ext/v8/upstream/2.3.3/src/frames-inl.h +0 -217
  656. data/ext/v8/upstream/2.3.3/src/frames.cc +0 -826
  657. data/ext/v8/upstream/2.3.3/src/frames.h +0 -682
  658. data/ext/v8/upstream/2.3.3/src/full-codegen.cc +0 -1443
  659. data/ext/v8/upstream/2.3.3/src/full-codegen.h +0 -548
  660. data/ext/v8/upstream/2.3.3/src/func-name-inferrer.cc +0 -76
  661. data/ext/v8/upstream/2.3.3/src/func-name-inferrer.h +0 -135
  662. data/ext/v8/upstream/2.3.3/src/global-handles.cc +0 -520
  663. data/ext/v8/upstream/2.3.3/src/global-handles.h +0 -180
  664. data/ext/v8/upstream/2.3.3/src/globals.h +0 -669
  665. data/ext/v8/upstream/2.3.3/src/handles-inl.h +0 -76
  666. data/ext/v8/upstream/2.3.3/src/handles.cc +0 -825
  667. data/ext/v8/upstream/2.3.3/src/handles.h +0 -393
  668. data/ext/v8/upstream/2.3.3/src/hashmap.cc +0 -226
  669. data/ext/v8/upstream/2.3.3/src/hashmap.h +0 -120
  670. data/ext/v8/upstream/2.3.3/src/heap-inl.h +0 -493
  671. data/ext/v8/upstream/2.3.3/src/heap-profiler.cc +0 -779
  672. data/ext/v8/upstream/2.3.3/src/heap-profiler.h +0 -323
  673. data/ext/v8/upstream/2.3.3/src/heap.cc +0 -4994
  674. data/ext/v8/upstream/2.3.3/src/heap.h +0 -1984
  675. data/ext/v8/upstream/2.3.3/src/ia32/assembler-ia32-inl.h +0 -360
  676. data/ext/v8/upstream/2.3.3/src/ia32/assembler-ia32.cc +0 -2600
  677. data/ext/v8/upstream/2.3.3/src/ia32/assembler-ia32.h +0 -969
  678. data/ext/v8/upstream/2.3.3/src/ia32/builtins-ia32.cc +0 -1261
  679. data/ext/v8/upstream/2.3.3/src/ia32/codegen-ia32.cc +0 -13968
  680. data/ext/v8/upstream/2.3.3/src/ia32/codegen-ia32.h +0 -1097
  681. data/ext/v8/upstream/2.3.3/src/ia32/cpu-ia32.cc +0 -83
  682. data/ext/v8/upstream/2.3.3/src/ia32/debug-ia32.cc +0 -309
  683. data/ext/v8/upstream/2.3.3/src/ia32/disasm-ia32.cc +0 -1471
  684. data/ext/v8/upstream/2.3.3/src/ia32/fast-codegen-ia32.cc +0 -954
  685. data/ext/v8/upstream/2.3.3/src/ia32/fast-codegen-ia32.h +0 -155
  686. data/ext/v8/upstream/2.3.3/src/ia32/frames-ia32.cc +0 -115
  687. data/ext/v8/upstream/2.3.3/src/ia32/frames-ia32.h +0 -135
  688. data/ext/v8/upstream/2.3.3/src/ia32/full-codegen-ia32.cc +0 -3281
  689. data/ext/v8/upstream/2.3.3/src/ia32/ic-ia32.cc +0 -1966
  690. data/ext/v8/upstream/2.3.3/src/ia32/macro-assembler-ia32.cc +0 -1610
  691. data/ext/v8/upstream/2.3.3/src/ia32/macro-assembler-ia32.h +0 -610
  692. data/ext/v8/upstream/2.3.3/src/ia32/regexp-macro-assembler-ia32.cc +0 -1247
  693. data/ext/v8/upstream/2.3.3/src/ia32/regexp-macro-assembler-ia32.h +0 -214
  694. data/ext/v8/upstream/2.3.3/src/ia32/simulator-ia32.h +0 -62
  695. data/ext/v8/upstream/2.3.3/src/ia32/stub-cache-ia32.cc +0 -2750
  696. data/ext/v8/upstream/2.3.3/src/ia32/virtual-frame-ia32.cc +0 -1334
  697. data/ext/v8/upstream/2.3.3/src/ia32/virtual-frame-ia32.h +0 -627
  698. data/ext/v8/upstream/2.3.3/src/ic-inl.h +0 -120
  699. data/ext/v8/upstream/2.3.3/src/ic.cc +0 -1827
  700. data/ext/v8/upstream/2.3.3/src/ic.h +0 -515
  701. data/ext/v8/upstream/2.3.3/src/interpreter-irregexp.cc +0 -646
  702. data/ext/v8/upstream/2.3.3/src/json.js +0 -268
  703. data/ext/v8/upstream/2.3.3/src/jsregexp.cc +0 -5283
  704. data/ext/v8/upstream/2.3.3/src/jsregexp.h +0 -1463
  705. data/ext/v8/upstream/2.3.3/src/jump-target-heavy.cc +0 -429
  706. data/ext/v8/upstream/2.3.3/src/jump-target-heavy.h +0 -244
  707. data/ext/v8/upstream/2.3.3/src/jump-target-light.cc +0 -110
  708. data/ext/v8/upstream/2.3.3/src/jump-target-light.h +0 -192
  709. data/ext/v8/upstream/2.3.3/src/list-inl.h +0 -166
  710. data/ext/v8/upstream/2.3.3/src/list.h +0 -159
  711. data/ext/v8/upstream/2.3.3/src/liveedit-debugger.js +0 -1057
  712. data/ext/v8/upstream/2.3.3/src/liveedit.cc +0 -1480
  713. data/ext/v8/upstream/2.3.3/src/liveedit.h +0 -170
  714. data/ext/v8/upstream/2.3.3/src/log-utils.cc +0 -497
  715. data/ext/v8/upstream/2.3.3/src/log-utils.h +0 -289
  716. data/ext/v8/upstream/2.3.3/src/log.cc +0 -1561
  717. data/ext/v8/upstream/2.3.3/src/log.h +0 -384
  718. data/ext/v8/upstream/2.3.3/src/macro-assembler.h +0 -86
  719. data/ext/v8/upstream/2.3.3/src/macros.py +0 -177
  720. data/ext/v8/upstream/2.3.3/src/mark-compact.cc +0 -2330
  721. data/ext/v8/upstream/2.3.3/src/mark-compact.h +0 -451
  722. data/ext/v8/upstream/2.3.3/src/math.js +0 -264
  723. data/ext/v8/upstream/2.3.3/src/memory.h +0 -74
  724. data/ext/v8/upstream/2.3.3/src/messages.cc +0 -183
  725. data/ext/v8/upstream/2.3.3/src/messages.h +0 -113
  726. data/ext/v8/upstream/2.3.3/src/messages.js +0 -982
  727. data/ext/v8/upstream/2.3.3/src/mips/assembler-mips.h +0 -668
  728. data/ext/v8/upstream/2.3.3/src/mips/builtins-mips.cc +0 -205
  729. data/ext/v8/upstream/2.3.3/src/mips/codegen-mips.h +0 -434
  730. data/ext/v8/upstream/2.3.3/src/mips/debug-mips.cc +0 -131
  731. data/ext/v8/upstream/2.3.3/src/mips/frames-mips.cc +0 -102
  732. data/ext/v8/upstream/2.3.3/src/mips/ic-mips.cc +0 -220
  733. data/ext/v8/upstream/2.3.3/src/mips/simulator-mips.cc +0 -1651
  734. data/ext/v8/upstream/2.3.3/src/mips/simulator-mips.h +0 -311
  735. data/ext/v8/upstream/2.3.3/src/mips/stub-cache-mips.cc +0 -403
  736. data/ext/v8/upstream/2.3.3/src/mirror-debugger.js +0 -2380
  737. data/ext/v8/upstream/2.3.3/src/objects-debug.cc +0 -1366
  738. data/ext/v8/upstream/2.3.3/src/objects-inl.h +0 -3333
  739. data/ext/v8/upstream/2.3.3/src/objects.cc +0 -8820
  740. data/ext/v8/upstream/2.3.3/src/objects.h +0 -5373
  741. data/ext/v8/upstream/2.3.3/src/oprofile-agent.cc +0 -108
  742. data/ext/v8/upstream/2.3.3/src/oprofile-agent.h +0 -77
  743. data/ext/v8/upstream/2.3.3/src/parser.cc +0 -5207
  744. data/ext/v8/upstream/2.3.3/src/parser.h +0 -197
  745. data/ext/v8/upstream/2.3.3/src/platform-freebsd.cc +0 -667
  746. data/ext/v8/upstream/2.3.3/src/platform-linux.cc +0 -862
  747. data/ext/v8/upstream/2.3.3/src/platform-macos.cc +0 -665
  748. data/ext/v8/upstream/2.3.3/src/platform-nullos.cc +0 -454
  749. data/ext/v8/upstream/2.3.3/src/platform-openbsd.cc +0 -622
  750. data/ext/v8/upstream/2.3.3/src/platform-posix.cc +0 -362
  751. data/ext/v8/upstream/2.3.3/src/platform-solaris.cc +0 -653
  752. data/ext/v8/upstream/2.3.3/src/platform-win32.cc +0 -1911
  753. data/ext/v8/upstream/2.3.3/src/platform.h +0 -577
  754. data/ext/v8/upstream/2.3.3/src/powers-ten.h +0 -2461
  755. data/ext/v8/upstream/2.3.3/src/prettyprinter.cc +0 -1531
  756. data/ext/v8/upstream/2.3.3/src/prettyprinter.h +0 -221
  757. data/ext/v8/upstream/2.3.3/src/profile-generator-inl.h +0 -148
  758. data/ext/v8/upstream/2.3.3/src/profile-generator.cc +0 -1830
  759. data/ext/v8/upstream/2.3.3/src/profile-generator.h +0 -853
  760. data/ext/v8/upstream/2.3.3/src/property.cc +0 -96
  761. data/ext/v8/upstream/2.3.3/src/property.h +0 -315
  762. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-irregexp.cc +0 -464
  763. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-irregexp.h +0 -141
  764. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-tracer.cc +0 -356
  765. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-tracer.h +0 -103
  766. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler.cc +0 -261
  767. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler.h +0 -228
  768. data/ext/v8/upstream/2.3.3/src/regexp.js +0 -549
  769. data/ext/v8/upstream/2.3.3/src/rewriter.cc +0 -1038
  770. data/ext/v8/upstream/2.3.3/src/rewriter.h +0 -54
  771. data/ext/v8/upstream/2.3.3/src/runtime.cc +0 -10599
  772. data/ext/v8/upstream/2.3.3/src/runtime.h +0 -459
  773. data/ext/v8/upstream/2.3.3/src/runtime.js +0 -629
  774. data/ext/v8/upstream/2.3.3/src/scanner.cc +0 -1346
  775. data/ext/v8/upstream/2.3.3/src/scanner.h +0 -503
  776. data/ext/v8/upstream/2.3.3/src/scopeinfo.cc +0 -637
  777. data/ext/v8/upstream/2.3.3/src/scopeinfo.h +0 -233
  778. data/ext/v8/upstream/2.3.3/src/scopes.cc +0 -962
  779. data/ext/v8/upstream/2.3.3/src/scopes.h +0 -400
  780. data/ext/v8/upstream/2.3.3/src/serialize.cc +0 -1461
  781. data/ext/v8/upstream/2.3.3/src/serialize.h +0 -581
  782. data/ext/v8/upstream/2.3.3/src/spaces-inl.h +0 -483
  783. data/ext/v8/upstream/2.3.3/src/spaces.cc +0 -2901
  784. data/ext/v8/upstream/2.3.3/src/spaces.h +0 -2197
  785. data/ext/v8/upstream/2.3.3/src/string-stream.cc +0 -584
  786. data/ext/v8/upstream/2.3.3/src/string-stream.h +0 -189
  787. data/ext/v8/upstream/2.3.3/src/string.js +0 -1006
  788. data/ext/v8/upstream/2.3.3/src/stub-cache.cc +0 -1379
  789. data/ext/v8/upstream/2.3.3/src/stub-cache.h +0 -756
  790. data/ext/v8/upstream/2.3.3/src/third_party/dtoa/COPYING +0 -15
  791. data/ext/v8/upstream/2.3.3/src/third_party/dtoa/dtoa.c +0 -3334
  792. data/ext/v8/upstream/2.3.3/src/token.cc +0 -56
  793. data/ext/v8/upstream/2.3.3/src/token.h +0 -270
  794. data/ext/v8/upstream/2.3.3/src/top.cc +0 -1067
  795. data/ext/v8/upstream/2.3.3/src/top.h +0 -463
  796. data/ext/v8/upstream/2.3.3/src/type-info.cc +0 -53
  797. data/ext/v8/upstream/2.3.3/src/type-info.h +0 -244
  798. data/ext/v8/upstream/2.3.3/src/unicode-inl.h +0 -238
  799. data/ext/v8/upstream/2.3.3/src/unicode.cc +0 -749
  800. data/ext/v8/upstream/2.3.3/src/unicode.h +0 -279
  801. data/ext/v8/upstream/2.3.3/src/uri.js +0 -415
  802. data/ext/v8/upstream/2.3.3/src/utils.cc +0 -285
  803. data/ext/v8/upstream/2.3.3/src/utils.h +0 -745
  804. data/ext/v8/upstream/2.3.3/src/v8-counters.h +0 -250
  805. data/ext/v8/upstream/2.3.3/src/v8.cc +0 -228
  806. data/ext/v8/upstream/2.3.3/src/v8.h +0 -121
  807. data/ext/v8/upstream/2.3.3/src/v8natives.js +0 -1188
  808. data/ext/v8/upstream/2.3.3/src/v8threads.cc +0 -461
  809. data/ext/v8/upstream/2.3.3/src/v8threads.h +0 -159
  810. data/ext/v8/upstream/2.3.3/src/variables.cc +0 -119
  811. data/ext/v8/upstream/2.3.3/src/variables.h +0 -205
  812. data/ext/v8/upstream/2.3.3/src/version.cc +0 -88
  813. data/ext/v8/upstream/2.3.3/src/virtual-frame-heavy-inl.h +0 -192
  814. data/ext/v8/upstream/2.3.3/src/virtual-frame.h +0 -46
  815. data/ext/v8/upstream/2.3.3/src/vm-state-inl.h +0 -137
  816. data/ext/v8/upstream/2.3.3/src/vm-state.cc +0 -39
  817. data/ext/v8/upstream/2.3.3/src/vm-state.h +0 -77
  818. data/ext/v8/upstream/2.3.3/src/x64/assembler-x64-inl.h +0 -400
  819. data/ext/v8/upstream/2.3.3/src/x64/assembler-x64.cc +0 -2963
  820. data/ext/v8/upstream/2.3.3/src/x64/assembler-x64.h +0 -1438
  821. data/ext/v8/upstream/2.3.3/src/x64/builtins-x64.cc +0 -1296
  822. data/ext/v8/upstream/2.3.3/src/x64/codegen-x64-inl.h +0 -46
  823. data/ext/v8/upstream/2.3.3/src/x64/codegen-x64.cc +0 -12491
  824. data/ext/v8/upstream/2.3.3/src/x64/codegen-x64.h +0 -1090
  825. data/ext/v8/upstream/2.3.3/src/x64/cpu-x64.cc +0 -83
  826. data/ext/v8/upstream/2.3.3/src/x64/debug-x64.cc +0 -267
  827. data/ext/v8/upstream/2.3.3/src/x64/disasm-x64.cc +0 -1696
  828. data/ext/v8/upstream/2.3.3/src/x64/fast-codegen-x64.cc +0 -250
  829. data/ext/v8/upstream/2.3.3/src/x64/frames-x64.cc +0 -113
  830. data/ext/v8/upstream/2.3.3/src/x64/frames-x64.h +0 -125
  831. data/ext/v8/upstream/2.3.3/src/x64/full-codegen-x64.cc +0 -3270
  832. data/ext/v8/upstream/2.3.3/src/x64/ic-x64.cc +0 -1907
  833. data/ext/v8/upstream/2.3.3/src/x64/jump-target-x64.cc +0 -437
  834. data/ext/v8/upstream/2.3.3/src/x64/macro-assembler-x64.cc +0 -2793
  835. data/ext/v8/upstream/2.3.3/src/x64/macro-assembler-x64.h +0 -916
  836. data/ext/v8/upstream/2.3.3/src/x64/regexp-macro-assembler-x64.cc +0 -1374
  837. data/ext/v8/upstream/2.3.3/src/x64/regexp-macro-assembler-x64.h +0 -277
  838. data/ext/v8/upstream/2.3.3/src/x64/simulator-x64.h +0 -63
  839. data/ext/v8/upstream/2.3.3/src/x64/stub-cache-x64.cc +0 -2560
  840. data/ext/v8/upstream/2.3.3/src/x64/virtual-frame-x64.cc +0 -1264
  841. data/ext/v8/upstream/2.3.3/src/x64/virtual-frame-x64.h +0 -590
  842. data/ext/v8/upstream/2.3.3/src/zone-inl.h +0 -82
  843. data/ext/v8/upstream/2.3.3/src/zone.cc +0 -194
  844. data/ext/v8/upstream/2.3.3/src/zone.h +0 -221
  845. data/ext/v8/upstream/2.3.3/tools/codemap.js +0 -270
  846. data/ext/v8/upstream/2.3.3/tools/csvparser.js +0 -83
  847. data/ext/v8/upstream/2.3.3/tools/gc-nvp-trace-processor.py +0 -317
  848. data/ext/v8/upstream/2.3.3/tools/gyp/v8.gyp +0 -749
  849. data/ext/v8/upstream/2.3.3/tools/linux-tick-processor.py +0 -78
  850. data/ext/v8/upstream/2.3.3/tools/logreader.js +0 -338
  851. data/ext/v8/upstream/2.3.3/tools/oprofile/annotate +0 -7
  852. data/ext/v8/upstream/2.3.3/tools/oprofile/common +0 -19
  853. data/ext/v8/upstream/2.3.3/tools/oprofile/dump +0 -7
  854. data/ext/v8/upstream/2.3.3/tools/oprofile/report +0 -7
  855. data/ext/v8/upstream/2.3.3/tools/oprofile/reset +0 -7
  856. data/ext/v8/upstream/2.3.3/tools/oprofile/run +0 -14
  857. data/ext/v8/upstream/2.3.3/tools/oprofile/shutdown +0 -7
  858. data/ext/v8/upstream/2.3.3/tools/oprofile/start +0 -7
  859. data/ext/v8/upstream/2.3.3/tools/presubmit.py +0 -299
  860. data/ext/v8/upstream/2.3.3/tools/profile.js +0 -691
  861. data/ext/v8/upstream/2.3.3/tools/profile_view.js +0 -224
  862. data/ext/v8/upstream/2.3.3/tools/splaytree.js +0 -322
  863. data/ext/v8/upstream/2.3.3/tools/splaytree.py +0 -226
  864. data/ext/v8/upstream/2.3.3/tools/tickprocessor.js +0 -862
  865. data/ext/v8/upstream/2.3.3/tools/tickprocessor.py +0 -571
  866. data/ext/v8/upstream/2.3.3/tools/utils.py +0 -88
  867. data/ext/v8/upstream/2.3.3/tools/visual_studio/README.txt +0 -71
  868. data/ext/v8/upstream/2.3.3/tools/visual_studio/common.vsprops +0 -34
  869. data/ext/v8/upstream/2.3.3/tools/visual_studio/debug.vsprops +0 -17
  870. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_base.vcproj +0 -1143
  871. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_base_arm.vcproj +0 -1115
  872. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_base_x64.vcproj +0 -1096
  873. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_shell_sample.vcproj +0 -145
  874. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_shell_sample_arm.vcproj +0 -145
  875. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_shell_sample_x64.vcproj +0 -161
  876. data/ext/v8/upstream/2.3.3/tools/windows-tick-processor.bat +0 -29
  877. data/ext/v8/upstream/2.3.3/tools/windows-tick-processor.py +0 -137
@@ -0,0 +1,402 @@
1
+ // Copyright 2009 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+
29
+ // Declares a Simulator for ARM instructions if we are not generating a native
30
+ // ARM binary. This Simulator allows us to run and debug ARM code generation on
31
+ // regular desktop machines.
32
+ // V8 calls into generated code by "calling" the CALL_GENERATED_CODE macro,
33
+ // which will start execution in the Simulator or forwards to the real entry
34
+ // on a ARM HW platform.
35
+
36
+ #ifndef V8_ARM_SIMULATOR_ARM_H_
37
+ #define V8_ARM_SIMULATOR_ARM_H_
38
+
39
+ #include "allocation.h"
40
+
41
+ #if !defined(USE_SIMULATOR)
42
+ // Running without a simulator on a native arm platform.
43
+
44
+ namespace v8 {
45
+ namespace internal {
46
+
47
+ // When running without a simulator we call the entry directly.
48
+ #define CALL_GENERATED_CODE(entry, p0, p1, p2, p3, p4) \
49
+ (entry(p0, p1, p2, p3, p4))
50
+
51
+ typedef int (*arm_regexp_matcher)(String*, int, const byte*, const byte*,
52
+ void*, int*, Address, int);
53
+
54
+
55
+ // Call the generated regexp code directly. The code at the entry address
56
+ // should act as a function matching the type arm_regexp_matcher.
57
+ // The fifth argument is a dummy that reserves the space used for
58
+ // the return address added by the ExitFrame in native calls.
59
+ #define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6) \
60
+ (FUNCTION_CAST<arm_regexp_matcher>(entry)(p0, p1, p2, p3, NULL, p4, p5, p6))
61
+
62
+ #define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
63
+ (reinterpret_cast<TryCatch*>(try_catch_address))
64
+
65
+ // The stack limit beyond which we will throw stack overflow errors in
66
+ // generated code. Because generated code on arm uses the C stack, we
67
+ // just use the C stack limit.
68
+ class SimulatorStack : public v8::internal::AllStatic {
69
+ public:
70
+ static inline uintptr_t JsLimitFromCLimit(uintptr_t c_limit) {
71
+ return c_limit;
72
+ }
73
+
74
+ static inline uintptr_t RegisterCTryCatch(uintptr_t try_catch_address) {
75
+ return try_catch_address;
76
+ }
77
+
78
+ static inline void UnregisterCTryCatch() { }
79
+ };
80
+
81
+ } } // namespace v8::internal
82
+
83
+ #else // !defined(USE_SIMULATOR)
84
+ // Running with a simulator.
85
+
86
+ #include "constants-arm.h"
87
+ #include "hashmap.h"
88
+ #include "assembler.h"
89
+
90
+ namespace v8 {
91
+ namespace internal {
92
+
93
+ class CachePage {
94
+ public:
95
+ static const int LINE_VALID = 0;
96
+ static const int LINE_INVALID = 1;
97
+
98
+ static const int kPageShift = 12;
99
+ static const int kPageSize = 1 << kPageShift;
100
+ static const int kPageMask = kPageSize - 1;
101
+ static const int kLineShift = 2; // The cache line is only 4 bytes right now.
102
+ static const int kLineLength = 1 << kLineShift;
103
+ static const int kLineMask = kLineLength - 1;
104
+
105
+ CachePage() {
106
+ memset(&validity_map_, LINE_INVALID, sizeof(validity_map_));
107
+ }
108
+
109
+ char* ValidityByte(int offset) {
110
+ return &validity_map_[offset >> kLineShift];
111
+ }
112
+
113
+ char* CachedData(int offset) {
114
+ return &data_[offset];
115
+ }
116
+
117
+ private:
118
+ char data_[kPageSize]; // The cached data.
119
+ static const int kValidityMapSize = kPageSize >> kLineShift;
120
+ char validity_map_[kValidityMapSize]; // One byte per line.
121
+ };
122
+
123
+
124
+ class Simulator {
125
+ public:
126
+ friend class Debugger;
127
+ enum Register {
128
+ no_reg = -1,
129
+ r0 = 0, r1, r2, r3, r4, r5, r6, r7,
130
+ r8, r9, r10, r11, r12, r13, r14, r15,
131
+ num_registers,
132
+ sp = 13,
133
+ lr = 14,
134
+ pc = 15,
135
+ s0 = 0, s1, s2, s3, s4, s5, s6, s7,
136
+ s8, s9, s10, s11, s12, s13, s14, s15,
137
+ s16, s17, s18, s19, s20, s21, s22, s23,
138
+ s24, s25, s26, s27, s28, s29, s30, s31,
139
+ num_s_registers = 32,
140
+ d0 = 0, d1, d2, d3, d4, d5, d6, d7,
141
+ d8, d9, d10, d11, d12, d13, d14, d15,
142
+ num_d_registers = 16
143
+ };
144
+
145
+ Simulator();
146
+ ~Simulator();
147
+
148
+ // The currently executing Simulator instance. Potentially there can be one
149
+ // for each native thread.
150
+ static Simulator* current();
151
+
152
+ // Accessors for register state. Reading the pc value adheres to the ARM
153
+ // architecture specification and is off by a 8 from the currently executing
154
+ // instruction.
155
+ void set_register(int reg, int32_t value);
156
+ int32_t get_register(int reg) const;
157
+ void set_dw_register(int dreg, const int* dbl);
158
+
159
+ // Support for VFP.
160
+ void set_s_register(int reg, unsigned int value);
161
+ unsigned int get_s_register(int reg) const;
162
+ void set_d_register_from_double(int dreg, const double& dbl);
163
+ double get_double_from_d_register(int dreg);
164
+ void set_s_register_from_float(int sreg, const float dbl);
165
+ float get_float_from_s_register(int sreg);
166
+ void set_s_register_from_sinteger(int reg, const int value);
167
+ int get_sinteger_from_s_register(int reg);
168
+
169
+ // Special case of set_register and get_register to access the raw PC value.
170
+ void set_pc(int32_t value);
171
+ int32_t get_pc() const;
172
+
173
+ // Accessor to the internal simulator stack area.
174
+ uintptr_t StackLimit() const;
175
+
176
+ // Executes ARM instructions until the PC reaches end_sim_pc.
177
+ void Execute();
178
+
179
+ // Call on program start.
180
+ static void Initialize();
181
+
182
+ // V8 generally calls into generated JS code with 5 parameters and into
183
+ // generated RegExp code with 7 parameters. This is a convenience function,
184
+ // which sets up the simulator state and grabs the result on return.
185
+ int32_t Call(byte* entry, int argument_count, ...);
186
+
187
+ // Push an address onto the JS stack.
188
+ uintptr_t PushAddress(uintptr_t address);
189
+
190
+ // Pop an address from the JS stack.
191
+ uintptr_t PopAddress();
192
+
193
+ // ICache checking.
194
+ static void FlushICache(void* start, size_t size);
195
+
196
+ // Returns true if pc register contains one of the 'special_values' defined
197
+ // below (bad_lr, end_sim_pc).
198
+ bool has_bad_pc() const;
199
+
200
+ private:
201
+ enum special_values {
202
+ // Known bad pc value to ensure that the simulator does not execute
203
+ // without being properly setup.
204
+ bad_lr = -1,
205
+ // A pc value used to signal the simulator to stop execution. Generally
206
+ // the lr is set to this value on transition from native C code to
207
+ // simulated execution, so that the simulator can "return" to the native
208
+ // C code.
209
+ end_sim_pc = -2
210
+ };
211
+
212
+ // Unsupported instructions use Format to print an error and stop execution.
213
+ void Format(Instruction* instr, const char* format);
214
+
215
+ // Checks if the current instruction should be executed based on its
216
+ // condition bits.
217
+ bool ConditionallyExecute(Instruction* instr);
218
+
219
+ // Helper functions to set the conditional flags in the architecture state.
220
+ void SetNZFlags(int32_t val);
221
+ void SetCFlag(bool val);
222
+ void SetVFlag(bool val);
223
+ bool CarryFrom(int32_t left, int32_t right);
224
+ bool BorrowFrom(int32_t left, int32_t right);
225
+ bool OverflowFrom(int32_t alu_out,
226
+ int32_t left,
227
+ int32_t right,
228
+ bool addition);
229
+
230
+ // Support for VFP.
231
+ void Compute_FPSCR_Flags(double val1, double val2);
232
+ void Copy_FPSCR_to_APSR();
233
+
234
+ // Helper functions to decode common "addressing" modes
235
+ int32_t GetShiftRm(Instruction* instr, bool* carry_out);
236
+ int32_t GetImm(Instruction* instr, bool* carry_out);
237
+ void HandleRList(Instruction* instr, bool load);
238
+ void SoftwareInterrupt(Instruction* instr);
239
+
240
+ // Stop helper functions.
241
+ inline bool isStopInstruction(Instruction* instr);
242
+ inline bool isWatchedStop(uint32_t bkpt_code);
243
+ inline bool isEnabledStop(uint32_t bkpt_code);
244
+ inline void EnableStop(uint32_t bkpt_code);
245
+ inline void DisableStop(uint32_t bkpt_code);
246
+ inline void IncreaseStopCounter(uint32_t bkpt_code);
247
+ void PrintStopInfo(uint32_t code);
248
+
249
+ // Read and write memory.
250
+ inline uint8_t ReadBU(int32_t addr);
251
+ inline int8_t ReadB(int32_t addr);
252
+ inline void WriteB(int32_t addr, uint8_t value);
253
+ inline void WriteB(int32_t addr, int8_t value);
254
+
255
+ inline uint16_t ReadHU(int32_t addr, Instruction* instr);
256
+ inline int16_t ReadH(int32_t addr, Instruction* instr);
257
+ // Note: Overloaded on the sign of the value.
258
+ inline void WriteH(int32_t addr, uint16_t value, Instruction* instr);
259
+ inline void WriteH(int32_t addr, int16_t value, Instruction* instr);
260
+
261
+ inline int ReadW(int32_t addr, Instruction* instr);
262
+ inline void WriteW(int32_t addr, int value, Instruction* instr);
263
+
264
+ int32_t* ReadDW(int32_t addr);
265
+ void WriteDW(int32_t addr, int32_t value1, int32_t value2);
266
+
267
+ // Executing is handled based on the instruction type.
268
+ // Both type 0 and type 1 rolled into one.
269
+ void DecodeType01(Instruction* instr);
270
+ void DecodeType2(Instruction* instr);
271
+ void DecodeType3(Instruction* instr);
272
+ void DecodeType4(Instruction* instr);
273
+ void DecodeType5(Instruction* instr);
274
+ void DecodeType6(Instruction* instr);
275
+ void DecodeType7(Instruction* instr);
276
+
277
+ // Support for VFP.
278
+ void DecodeTypeVFP(Instruction* instr);
279
+ void DecodeType6CoprocessorIns(Instruction* instr);
280
+
281
+ void DecodeVMOVBetweenCoreAndSinglePrecisionRegisters(Instruction* instr);
282
+ void DecodeVCMP(Instruction* instr);
283
+ void DecodeVCVTBetweenDoubleAndSingle(Instruction* instr);
284
+ void DecodeVCVTBetweenFloatingPointAndInteger(Instruction* instr);
285
+
286
+ // Executes one instruction.
287
+ void InstructionDecode(Instruction* instr);
288
+
289
+ // ICache.
290
+ static void CheckICache(Instruction* instr);
291
+ static void FlushOnePage(intptr_t start, int size);
292
+ static CachePage* GetCachePage(void* page);
293
+
294
+ // Runtime call support.
295
+ static void* RedirectExternalReference(
296
+ void* external_function,
297
+ v8::internal::ExternalReference::Type type);
298
+
299
+ // For use in calls that take two double values, constructed from r0, r1, r2
300
+ // and r3.
301
+ void GetFpArgs(double* x, double* y);
302
+ void SetFpResult(const double& result);
303
+ void TrashCallerSaveRegisters();
304
+
305
+ // Architecture state.
306
+ // Saturating instructions require a Q flag to indicate saturation.
307
+ // There is currently no way to read the CPSR directly, and thus read the Q
308
+ // flag, so this is left unimplemented.
309
+ int32_t registers_[16];
310
+ bool n_flag_;
311
+ bool z_flag_;
312
+ bool c_flag_;
313
+ bool v_flag_;
314
+
315
+ // VFP architecture state.
316
+ unsigned int vfp_register[num_s_registers];
317
+ bool n_flag_FPSCR_;
318
+ bool z_flag_FPSCR_;
319
+ bool c_flag_FPSCR_;
320
+ bool v_flag_FPSCR_;
321
+
322
+ // VFP rounding mode. See ARM DDI 0406B Page A2-29.
323
+ VFPRoundingMode FPSCR_rounding_mode_;
324
+
325
+ // VFP FP exception flags architecture state.
326
+ bool inv_op_vfp_flag_;
327
+ bool div_zero_vfp_flag_;
328
+ bool overflow_vfp_flag_;
329
+ bool underflow_vfp_flag_;
330
+ bool inexact_vfp_flag_;
331
+
332
+ // Simulator support.
333
+ char* stack_;
334
+ bool pc_modified_;
335
+ int icount_;
336
+ static bool initialized_;
337
+
338
+ // Icache simulation
339
+ static v8::internal::HashMap* i_cache_;
340
+
341
+ // Registered breakpoints.
342
+ Instruction* break_pc_;
343
+ Instr break_instr_;
344
+
345
+ // A stop is watched if its code is less than kNumOfWatchedStops.
346
+ // Only watched stops support enabling/disabling and the counter feature.
347
+ static const uint32_t kNumOfWatchedStops = 256;
348
+
349
+ // Breakpoint is disabled if bit 31 is set.
350
+ static const uint32_t kStopDisabledBit = 1 << 31;
351
+
352
+ // A stop is enabled, meaning the simulator will stop when meeting the
353
+ // instruction, if bit 31 of watched_stops[code].count is unset.
354
+ // The value watched_stops[code].count & ~(1 << 31) indicates how many times
355
+ // the breakpoint was hit or gone through.
356
+ struct StopCountAndDesc {
357
+ uint32_t count;
358
+ char* desc;
359
+ };
360
+ StopCountAndDesc watched_stops[kNumOfWatchedStops];
361
+ };
362
+
363
+
364
+ // When running with the simulator transition into simulated execution at this
365
+ // point.
366
+ #define CALL_GENERATED_CODE(entry, p0, p1, p2, p3, p4) \
367
+ reinterpret_cast<Object*>(Simulator::current()->Call( \
368
+ FUNCTION_ADDR(entry), 5, p0, p1, p2, p3, p4))
369
+
370
+ #define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6) \
371
+ Simulator::current()->Call(entry, 8, p0, p1, p2, p3, NULL, p4, p5, p6)
372
+
373
+ #define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
374
+ try_catch_address == \
375
+ NULL ? NULL : *(reinterpret_cast<TryCatch**>(try_catch_address))
376
+
377
+
378
+ // The simulator has its own stack. Thus it has a different stack limit from
379
+ // the C-based native code. Setting the c_limit to indicate a very small
380
+ // stack cause stack overflow errors, since the simulator ignores the input.
381
+ // This is unlikely to be an issue in practice, though it might cause testing
382
+ // trouble down the line.
383
+ class SimulatorStack : public v8::internal::AllStatic {
384
+ public:
385
+ static inline uintptr_t JsLimitFromCLimit(uintptr_t c_limit) {
386
+ return Simulator::current()->StackLimit();
387
+ }
388
+
389
+ static inline uintptr_t RegisterCTryCatch(uintptr_t try_catch_address) {
390
+ Simulator* sim = Simulator::current();
391
+ return sim->PushAddress(try_catch_address);
392
+ }
393
+
394
+ static inline void UnregisterCTryCatch() {
395
+ Simulator::current()->PopAddress();
396
+ }
397
+ };
398
+
399
+ } } // namespace v8::internal
400
+
401
+ #endif // !defined(USE_SIMULATOR)
402
+ #endif // V8_ARM_SIMULATOR_ARM_H_
@@ -0,0 +1,4077 @@
1
+ // Copyright 2006-2009 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #include "v8.h"
29
+
30
+ #if defined(V8_TARGET_ARCH_ARM)
31
+
32
+ #include "ic-inl.h"
33
+ #include "codegen-inl.h"
34
+ #include "stub-cache.h"
35
+
36
+ namespace v8 {
37
+ namespace internal {
38
+
39
+ #define __ ACCESS_MASM(masm)
40
+
41
+
42
+ static void ProbeTable(MacroAssembler* masm,
43
+ Code::Flags flags,
44
+ StubCache::Table table,
45
+ Register name,
46
+ Register offset,
47
+ Register scratch,
48
+ Register scratch2) {
49
+ ExternalReference key_offset(SCTableReference::keyReference(table));
50
+ ExternalReference value_offset(SCTableReference::valueReference(table));
51
+
52
+ uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
53
+ uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
54
+
55
+ // Check the relative positions of the address fields.
56
+ ASSERT(value_off_addr > key_off_addr);
57
+ ASSERT((value_off_addr - key_off_addr) % 4 == 0);
58
+ ASSERT((value_off_addr - key_off_addr) < (256 * 4));
59
+
60
+ Label miss;
61
+ Register offsets_base_addr = scratch;
62
+
63
+ // Check that the key in the entry matches the name.
64
+ __ mov(offsets_base_addr, Operand(key_offset));
65
+ __ ldr(ip, MemOperand(offsets_base_addr, offset, LSL, 1));
66
+ __ cmp(name, ip);
67
+ __ b(ne, &miss);
68
+
69
+ // Get the code entry from the cache.
70
+ __ add(offsets_base_addr, offsets_base_addr,
71
+ Operand(value_off_addr - key_off_addr));
72
+ __ ldr(scratch2, MemOperand(offsets_base_addr, offset, LSL, 1));
73
+
74
+ // Check that the flags match what we're looking for.
75
+ __ ldr(scratch2, FieldMemOperand(scratch2, Code::kFlagsOffset));
76
+ __ bic(scratch2, scratch2, Operand(Code::kFlagsNotUsedInLookup));
77
+ __ cmp(scratch2, Operand(flags));
78
+ __ b(ne, &miss);
79
+
80
+ // Re-load code entry from cache.
81
+ __ ldr(offset, MemOperand(offsets_base_addr, offset, LSL, 1));
82
+
83
+ // Jump to the first instruction in the code stub.
84
+ __ add(offset, offset, Operand(Code::kHeaderSize - kHeapObjectTag));
85
+ __ Jump(offset);
86
+
87
+ // Miss: fall through.
88
+ __ bind(&miss);
89
+ }
90
+
91
+
92
+ // Helper function used to check that the dictionary doesn't contain
93
+ // the property. This function may return false negatives, so miss_label
94
+ // must always call a backup property check that is complete.
95
+ // This function is safe to call if the receiver has fast properties.
96
+ // Name must be a symbol and receiver must be a heap object.
97
+ static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
98
+ Label* miss_label,
99
+ Register receiver,
100
+ String* name,
101
+ Register scratch0,
102
+ Register scratch1) {
103
+ ASSERT(name->IsSymbol());
104
+ __ IncrementCounter(&Counters::negative_lookups, 1, scratch0, scratch1);
105
+ __ IncrementCounter(&Counters::negative_lookups_miss, 1, scratch0, scratch1);
106
+
107
+ Label done;
108
+
109
+ const int kInterceptorOrAccessCheckNeededMask =
110
+ (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
111
+
112
+ // Bail out if the receiver has a named interceptor or requires access checks.
113
+ Register map = scratch1;
114
+ __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
115
+ __ ldrb(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
116
+ __ tst(scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
117
+ __ b(ne, miss_label);
118
+
119
+ // Check that receiver is a JSObject.
120
+ __ ldrb(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
121
+ __ cmp(scratch0, Operand(FIRST_JS_OBJECT_TYPE));
122
+ __ b(lt, miss_label);
123
+
124
+ // Load properties array.
125
+ Register properties = scratch0;
126
+ __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
127
+ // Check that the properties array is a dictionary.
128
+ __ ldr(map, FieldMemOperand(properties, HeapObject::kMapOffset));
129
+ Register tmp = properties;
130
+ __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
131
+ __ cmp(map, tmp);
132
+ __ b(ne, miss_label);
133
+
134
+ // Restore the temporarily used register.
135
+ __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
136
+
137
+ // Compute the capacity mask.
138
+ const int kCapacityOffset =
139
+ StringDictionary::kHeaderSize +
140
+ StringDictionary::kCapacityIndex * kPointerSize;
141
+
142
+ // Generate an unrolled loop that performs a few probes before
143
+ // giving up.
144
+ static const int kProbes = 4;
145
+ const int kElementsStartOffset =
146
+ StringDictionary::kHeaderSize +
147
+ StringDictionary::kElementsStartIndex * kPointerSize;
148
+
149
+ // If names of slots in range from 1 to kProbes - 1 for the hash value are
150
+ // not equal to the name and kProbes-th slot is not used (its name is the
151
+ // undefined value), it guarantees the hash table doesn't contain the
152
+ // property. It's true even if some slots represent deleted properties
153
+ // (their names are the null value).
154
+ for (int i = 0; i < kProbes; i++) {
155
+ // scratch0 points to properties hash.
156
+ // Compute the masked index: (hash + i + i * i) & mask.
157
+ Register index = scratch1;
158
+ // Capacity is smi 2^n.
159
+ __ ldr(index, FieldMemOperand(properties, kCapacityOffset));
160
+ __ sub(index, index, Operand(1));
161
+ __ and_(index, index, Operand(
162
+ Smi::FromInt(name->Hash() + StringDictionary::GetProbeOffset(i))));
163
+
164
+ // Scale the index by multiplying by the entry size.
165
+ ASSERT(StringDictionary::kEntrySize == 3);
166
+ __ add(index, index, Operand(index, LSL, 1)); // index *= 3.
167
+
168
+ Register entity_name = scratch1;
169
+ // Having undefined at this place means the name is not contained.
170
+ ASSERT_EQ(kSmiTagSize, 1);
171
+ Register tmp = properties;
172
+ __ add(tmp, properties, Operand(index, LSL, 1));
173
+ __ ldr(entity_name, FieldMemOperand(tmp, kElementsStartOffset));
174
+
175
+ ASSERT(!tmp.is(entity_name));
176
+ __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex);
177
+ __ cmp(entity_name, tmp);
178
+ if (i != kProbes - 1) {
179
+ __ b(eq, &done);
180
+
181
+ // Stop if found the property.
182
+ __ cmp(entity_name, Operand(Handle<String>(name)));
183
+ __ b(eq, miss_label);
184
+
185
+ // Check if the entry name is not a symbol.
186
+ __ ldr(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset));
187
+ __ ldrb(entity_name,
188
+ FieldMemOperand(entity_name, Map::kInstanceTypeOffset));
189
+ __ tst(entity_name, Operand(kIsSymbolMask));
190
+ __ b(eq, miss_label);
191
+
192
+ // Restore the properties.
193
+ __ ldr(properties,
194
+ FieldMemOperand(receiver, JSObject::kPropertiesOffset));
195
+ } else {
196
+ // Give up probing if still not found the undefined value.
197
+ __ b(ne, miss_label);
198
+ }
199
+ }
200
+ __ bind(&done);
201
+ __ DecrementCounter(&Counters::negative_lookups_miss, 1, scratch0, scratch1);
202
+ }
203
+
204
+
205
+ void StubCache::GenerateProbe(MacroAssembler* masm,
206
+ Code::Flags flags,
207
+ Register receiver,
208
+ Register name,
209
+ Register scratch,
210
+ Register extra,
211
+ Register extra2) {
212
+ Label miss;
213
+
214
+ // Make sure that code is valid. The shifting code relies on the
215
+ // entry size being 8.
216
+ ASSERT(sizeof(Entry) == 8);
217
+
218
+ // Make sure the flags does not name a specific type.
219
+ ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
220
+
221
+ // Make sure that there are no register conflicts.
222
+ ASSERT(!scratch.is(receiver));
223
+ ASSERT(!scratch.is(name));
224
+ ASSERT(!extra.is(receiver));
225
+ ASSERT(!extra.is(name));
226
+ ASSERT(!extra.is(scratch));
227
+ ASSERT(!extra2.is(receiver));
228
+ ASSERT(!extra2.is(name));
229
+ ASSERT(!extra2.is(scratch));
230
+ ASSERT(!extra2.is(extra));
231
+
232
+ // Check scratch, extra and extra2 registers are valid.
233
+ ASSERT(!scratch.is(no_reg));
234
+ ASSERT(!extra.is(no_reg));
235
+ ASSERT(!extra2.is(no_reg));
236
+
237
+ // Check that the receiver isn't a smi.
238
+ __ tst(receiver, Operand(kSmiTagMask));
239
+ __ b(eq, &miss);
240
+
241
+ // Get the map of the receiver and compute the hash.
242
+ __ ldr(scratch, FieldMemOperand(name, String::kHashFieldOffset));
243
+ __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset));
244
+ __ add(scratch, scratch, Operand(ip));
245
+ __ eor(scratch, scratch, Operand(flags));
246
+ __ and_(scratch,
247
+ scratch,
248
+ Operand((kPrimaryTableSize - 1) << kHeapObjectTagSize));
249
+
250
+ // Probe the primary table.
251
+ ProbeTable(masm, flags, kPrimary, name, scratch, extra, extra2);
252
+
253
+ // Primary miss: Compute hash for secondary probe.
254
+ __ sub(scratch, scratch, Operand(name));
255
+ __ add(scratch, scratch, Operand(flags));
256
+ __ and_(scratch,
257
+ scratch,
258
+ Operand((kSecondaryTableSize - 1) << kHeapObjectTagSize));
259
+
260
+ // Probe the secondary table.
261
+ ProbeTable(masm, flags, kSecondary, name, scratch, extra, extra2);
262
+
263
+ // Cache miss: Fall-through and let caller handle the miss by
264
+ // entering the runtime system.
265
+ __ bind(&miss);
266
+ }
267
+
268
+
269
+ void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
270
+ int index,
271
+ Register prototype) {
272
+ // Load the global or builtins object from the current context.
273
+ __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
274
+ // Load the global context from the global or builtins object.
275
+ __ ldr(prototype,
276
+ FieldMemOperand(prototype, GlobalObject::kGlobalContextOffset));
277
+ // Load the function from the global context.
278
+ __ ldr(prototype, MemOperand(prototype, Context::SlotOffset(index)));
279
+ // Load the initial map. The global functions all have initial maps.
280
+ __ ldr(prototype,
281
+ FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
282
+ // Load the prototype from the initial map.
283
+ __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
284
+ }
285
+
286
+
287
+ void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
288
+ MacroAssembler* masm, int index, Register prototype, Label* miss) {
289
+ // Check we're still in the same context.
290
+ __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
291
+ __ Move(ip, Top::global());
292
+ __ cmp(prototype, ip);
293
+ __ b(ne, miss);
294
+ // Get the global function with the given index.
295
+ JSFunction* function = JSFunction::cast(Top::global_context()->get(index));
296
+ // Load its initial map. The global functions all have initial maps.
297
+ __ Move(prototype, Handle<Map>(function->initial_map()));
298
+ // Load the prototype from the initial map.
299
+ __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
300
+ }
301
+
302
+
303
+ // Load a fast property out of a holder object (src). In-object properties
304
+ // are loaded directly otherwise the property is loaded from the properties
305
+ // fixed array.
306
+ void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
307
+ Register dst, Register src,
308
+ JSObject* holder, int index) {
309
+ // Adjust for the number of properties stored in the holder.
310
+ index -= holder->map()->inobject_properties();
311
+ if (index < 0) {
312
+ // Get the property straight out of the holder.
313
+ int offset = holder->map()->instance_size() + (index * kPointerSize);
314
+ __ ldr(dst, FieldMemOperand(src, offset));
315
+ } else {
316
+ // Calculate the offset into the properties array.
317
+ int offset = index * kPointerSize + FixedArray::kHeaderSize;
318
+ __ ldr(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
319
+ __ ldr(dst, FieldMemOperand(dst, offset));
320
+ }
321
+ }
322
+
323
+
324
+ void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
325
+ Register receiver,
326
+ Register scratch,
327
+ Label* miss_label) {
328
+ // Check that the receiver isn't a smi.
329
+ __ tst(receiver, Operand(kSmiTagMask));
330
+ __ b(eq, miss_label);
331
+
332
+ // Check that the object is a JS array.
333
+ __ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE);
334
+ __ b(ne, miss_label);
335
+
336
+ // Load length directly from the JS array.
337
+ __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
338
+ __ Ret();
339
+ }
340
+
341
+
342
+ // Generate code to check if an object is a string. If the object is a
343
+ // heap object, its map's instance type is left in the scratch1 register.
344
+ // If this is not needed, scratch1 and scratch2 may be the same register.
345
+ static void GenerateStringCheck(MacroAssembler* masm,
346
+ Register receiver,
347
+ Register scratch1,
348
+ Register scratch2,
349
+ Label* smi,
350
+ Label* non_string_object) {
351
+ // Check that the receiver isn't a smi.
352
+ __ tst(receiver, Operand(kSmiTagMask));
353
+ __ b(eq, smi);
354
+
355
+ // Check that the object is a string.
356
+ __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
357
+ __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
358
+ __ and_(scratch2, scratch1, Operand(kIsNotStringMask));
359
+ // The cast is to resolve the overload for the argument of 0x0.
360
+ __ cmp(scratch2, Operand(static_cast<int32_t>(kStringTag)));
361
+ __ b(ne, non_string_object);
362
+ }
363
+
364
+
365
+ // Generate code to load the length from a string object and return the length.
366
+ // If the receiver object is not a string or a wrapped string object the
367
+ // execution continues at the miss label. The register containing the
368
+ // receiver is potentially clobbered.
369
+ void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
370
+ Register receiver,
371
+ Register scratch1,
372
+ Register scratch2,
373
+ Label* miss,
374
+ bool support_wrappers) {
375
+ Label check_wrapper;
376
+
377
+ // Check if the object is a string leaving the instance type in the
378
+ // scratch1 register.
379
+ GenerateStringCheck(masm, receiver, scratch1, scratch2, miss,
380
+ support_wrappers ? &check_wrapper : miss);
381
+
382
+ // Load length directly from the string.
383
+ __ ldr(r0, FieldMemOperand(receiver, String::kLengthOffset));
384
+ __ Ret();
385
+
386
+ if (support_wrappers) {
387
+ // Check if the object is a JSValue wrapper.
388
+ __ bind(&check_wrapper);
389
+ __ cmp(scratch1, Operand(JS_VALUE_TYPE));
390
+ __ b(ne, miss);
391
+
392
+ // Unwrap the value and check if the wrapped value is a string.
393
+ __ ldr(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset));
394
+ GenerateStringCheck(masm, scratch1, scratch2, scratch2, miss, miss);
395
+ __ ldr(r0, FieldMemOperand(scratch1, String::kLengthOffset));
396
+ __ Ret();
397
+ }
398
+ }
399
+
400
+
401
+ void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
402
+ Register receiver,
403
+ Register scratch1,
404
+ Register scratch2,
405
+ Label* miss_label) {
406
+ __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
407
+ __ mov(r0, scratch1);
408
+ __ Ret();
409
+ }
410
+
411
+
412
+ // Generate StoreField code, value is passed in r0 register.
413
+ // When leaving generated code after success, the receiver_reg and name_reg
414
+ // may be clobbered. Upon branch to miss_label, the receiver and name
415
+ // registers have their original values.
416
+ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
417
+ JSObject* object,
418
+ int index,
419
+ Map* transition,
420
+ Register receiver_reg,
421
+ Register name_reg,
422
+ Register scratch,
423
+ Label* miss_label) {
424
+ // r0 : value
425
+ Label exit;
426
+
427
+ // Check that the receiver isn't a smi.
428
+ __ tst(receiver_reg, Operand(kSmiTagMask));
429
+ __ b(eq, miss_label);
430
+
431
+ // Check that the map of the receiver hasn't changed.
432
+ __ ldr(scratch, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
433
+ __ cmp(scratch, Operand(Handle<Map>(object->map())));
434
+ __ b(ne, miss_label);
435
+
436
+ // Perform global security token check if needed.
437
+ if (object->IsJSGlobalProxy()) {
438
+ __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
439
+ }
440
+
441
+ // Stub never generated for non-global objects that require access
442
+ // checks.
443
+ ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
444
+
445
+ // Perform map transition for the receiver if necessary.
446
+ if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
447
+ // The properties must be extended before we can store the value.
448
+ // We jump to a runtime call that extends the properties array.
449
+ __ push(receiver_reg);
450
+ __ mov(r2, Operand(Handle<Map>(transition)));
451
+ __ Push(r2, r0);
452
+ __ TailCallExternalReference(
453
+ ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage)),
454
+ 3, 1);
455
+ return;
456
+ }
457
+
458
+ if (transition != NULL) {
459
+ // Update the map of the object; no write barrier updating is
460
+ // needed because the map is never in new space.
461
+ __ mov(ip, Operand(Handle<Map>(transition)));
462
+ __ str(ip, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
463
+ }
464
+
465
+ // Adjust for the number of properties stored in the object. Even in the
466
+ // face of a transition we can use the old map here because the size of the
467
+ // object and the number of in-object properties is not going to change.
468
+ index -= object->map()->inobject_properties();
469
+
470
+ if (index < 0) {
471
+ // Set the property straight into the object.
472
+ int offset = object->map()->instance_size() + (index * kPointerSize);
473
+ __ str(r0, FieldMemOperand(receiver_reg, offset));
474
+
475
+ // Skip updating write barrier if storing a smi.
476
+ __ tst(r0, Operand(kSmiTagMask));
477
+ __ b(eq, &exit);
478
+
479
+ // Update the write barrier for the array address.
480
+ // Pass the now unused name_reg as a scratch register.
481
+ __ RecordWrite(receiver_reg, Operand(offset), name_reg, scratch);
482
+ } else {
483
+ // Write to the properties array.
484
+ int offset = index * kPointerSize + FixedArray::kHeaderSize;
485
+ // Get the properties array
486
+ __ ldr(scratch, FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
487
+ __ str(r0, FieldMemOperand(scratch, offset));
488
+
489
+ // Skip updating write barrier if storing a smi.
490
+ __ tst(r0, Operand(kSmiTagMask));
491
+ __ b(eq, &exit);
492
+
493
+ // Update the write barrier for the array address.
494
+ // Ok to clobber receiver_reg and name_reg, since we return.
495
+ __ RecordWrite(scratch, Operand(offset), name_reg, receiver_reg);
496
+ }
497
+
498
+ // Return the value (register r0).
499
+ __ bind(&exit);
500
+ __ Ret();
501
+ }
502
+
503
+
504
+ void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
505
+ ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
506
+ Code* code = NULL;
507
+ if (kind == Code::LOAD_IC) {
508
+ code = Builtins::builtin(Builtins::LoadIC_Miss);
509
+ } else {
510
+ code = Builtins::builtin(Builtins::KeyedLoadIC_Miss);
511
+ }
512
+
513
+ Handle<Code> ic(code);
514
+ __ Jump(ic, RelocInfo::CODE_TARGET);
515
+ }
516
+
517
+
518
+ static void GenerateCallFunction(MacroAssembler* masm,
519
+ Object* object,
520
+ const ParameterCount& arguments,
521
+ Label* miss) {
522
+ // ----------- S t a t e -------------
523
+ // -- r0: receiver
524
+ // -- r1: function to call
525
+ // -----------------------------------
526
+
527
+ // Check that the function really is a function.
528
+ __ JumpIfSmi(r1, miss);
529
+ __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
530
+ __ b(ne, miss);
531
+
532
+ // Patch the receiver on the stack with the global proxy if
533
+ // necessary.
534
+ if (object->IsGlobalObject()) {
535
+ __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
536
+ __ str(r3, MemOperand(sp, arguments.immediate() * kPointerSize));
537
+ }
538
+
539
+ // Invoke the function.
540
+ __ InvokeFunction(r1, arguments, JUMP_FUNCTION);
541
+ }
542
+
543
+
544
+ static void PushInterceptorArguments(MacroAssembler* masm,
545
+ Register receiver,
546
+ Register holder,
547
+ Register name,
548
+ JSObject* holder_obj) {
549
+ __ push(name);
550
+ InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
551
+ ASSERT(!Heap::InNewSpace(interceptor));
552
+ Register scratch = name;
553
+ __ mov(scratch, Operand(Handle<Object>(interceptor)));
554
+ __ push(scratch);
555
+ __ push(receiver);
556
+ __ push(holder);
557
+ __ ldr(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset));
558
+ __ push(scratch);
559
+ }
560
+
561
+
562
+ static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
563
+ Register receiver,
564
+ Register holder,
565
+ Register name,
566
+ JSObject* holder_obj) {
567
+ PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
568
+
569
+ ExternalReference ref =
570
+ ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly));
571
+ __ mov(r0, Operand(5));
572
+ __ mov(r1, Operand(ref));
573
+
574
+ CEntryStub stub(1);
575
+ __ CallStub(&stub);
576
+ }
577
+
578
+ static const int kFastApiCallArguments = 3;
579
+
580
+ // Reserves space for the extra arguments to FastHandleApiCall in the
581
+ // caller's frame.
582
+ //
583
+ // These arguments are set by CheckPrototypes and GenerateFastApiDirectCall.
584
+ static void ReserveSpaceForFastApiCall(MacroAssembler* masm,
585
+ Register scratch) {
586
+ __ mov(scratch, Operand(Smi::FromInt(0)));
587
+ for (int i = 0; i < kFastApiCallArguments; i++) {
588
+ __ push(scratch);
589
+ }
590
+ }
591
+
592
+
593
+ // Undoes the effects of ReserveSpaceForFastApiCall.
594
+ static void FreeSpaceForFastApiCall(MacroAssembler* masm) {
595
+ __ Drop(kFastApiCallArguments);
596
+ }
597
+
598
+
599
+ static MaybeObject* GenerateFastApiDirectCall(MacroAssembler* masm,
600
+ const CallOptimization& optimization,
601
+ int argc) {
602
+ // ----------- S t a t e -------------
603
+ // -- sp[0] : holder (set by CheckPrototypes)
604
+ // -- sp[4] : callee js function
605
+ // -- sp[8] : call data
606
+ // -- sp[12] : last js argument
607
+ // -- ...
608
+ // -- sp[(argc + 3) * 4] : first js argument
609
+ // -- sp[(argc + 4) * 4] : receiver
610
+ // -----------------------------------
611
+ // Get the function and setup the context.
612
+ JSFunction* function = optimization.constant_function();
613
+ __ mov(r5, Operand(Handle<JSFunction>(function)));
614
+ __ ldr(cp, FieldMemOperand(r5, JSFunction::kContextOffset));
615
+
616
+ // Pass the additional arguments FastHandleApiCall expects.
617
+ Object* call_data = optimization.api_call_info()->data();
618
+ Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info());
619
+ if (Heap::InNewSpace(call_data)) {
620
+ __ Move(r0, api_call_info_handle);
621
+ __ ldr(r6, FieldMemOperand(r0, CallHandlerInfo::kDataOffset));
622
+ } else {
623
+ __ Move(r6, Handle<Object>(call_data));
624
+ }
625
+ // Store js function and call data.
626
+ __ stm(ib, sp, r5.bit() | r6.bit());
627
+
628
+ // r2 points to call data as expected by Arguments
629
+ // (refer to layout above).
630
+ __ add(r2, sp, Operand(2 * kPointerSize));
631
+
632
+ Object* callback = optimization.api_call_info()->callback();
633
+ Address api_function_address = v8::ToCData<Address>(callback);
634
+ ApiFunction fun(api_function_address);
635
+
636
+ const int kApiStackSpace = 4;
637
+ __ EnterExitFrame(false, kApiStackSpace);
638
+
639
+ // r0 = v8::Arguments&
640
+ // Arguments is after the return address.
641
+ __ add(r0, sp, Operand(1 * kPointerSize));
642
+ // v8::Arguments::implicit_args = data
643
+ __ str(r2, MemOperand(r0, 0 * kPointerSize));
644
+ // v8::Arguments::values = last argument
645
+ __ add(ip, r2, Operand(argc * kPointerSize));
646
+ __ str(ip, MemOperand(r0, 1 * kPointerSize));
647
+ // v8::Arguments::length_ = argc
648
+ __ mov(ip, Operand(argc));
649
+ __ str(ip, MemOperand(r0, 2 * kPointerSize));
650
+ // v8::Arguments::is_construct_call = 0
651
+ __ mov(ip, Operand(0));
652
+ __ str(ip, MemOperand(r0, 3 * kPointerSize));
653
+
654
+ // Emitting a stub call may try to allocate (if the code is not
655
+ // already generated). Do not allow the assembler to perform a
656
+ // garbage collection but instead return the allocation failure
657
+ // object.
658
+ const int kStackUnwindSpace = argc + kFastApiCallArguments + 1;
659
+ ExternalReference ref =
660
+ ExternalReference(&fun, ExternalReference::DIRECT_API_CALL);
661
+ return masm->TryCallApiFunctionAndReturn(ref, kStackUnwindSpace);
662
+ }
663
+
664
+ class CallInterceptorCompiler BASE_EMBEDDED {
665
+ public:
666
+ CallInterceptorCompiler(StubCompiler* stub_compiler,
667
+ const ParameterCount& arguments,
668
+ Register name)
669
+ : stub_compiler_(stub_compiler),
670
+ arguments_(arguments),
671
+ name_(name) {}
672
+
673
+ MaybeObject* Compile(MacroAssembler* masm,
674
+ JSObject* object,
675
+ JSObject* holder,
676
+ String* name,
677
+ LookupResult* lookup,
678
+ Register receiver,
679
+ Register scratch1,
680
+ Register scratch2,
681
+ Register scratch3,
682
+ Label* miss) {
683
+ ASSERT(holder->HasNamedInterceptor());
684
+ ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
685
+
686
+ // Check that the receiver isn't a smi.
687
+ __ JumpIfSmi(receiver, miss);
688
+
689
+ CallOptimization optimization(lookup);
690
+
691
+ if (optimization.is_constant_call()) {
692
+ return CompileCacheable(masm,
693
+ object,
694
+ receiver,
695
+ scratch1,
696
+ scratch2,
697
+ scratch3,
698
+ holder,
699
+ lookup,
700
+ name,
701
+ optimization,
702
+ miss);
703
+ } else {
704
+ CompileRegular(masm,
705
+ object,
706
+ receiver,
707
+ scratch1,
708
+ scratch2,
709
+ scratch3,
710
+ name,
711
+ holder,
712
+ miss);
713
+ return Heap::undefined_value();
714
+ }
715
+ }
716
+
717
+ private:
718
+ MaybeObject* CompileCacheable(MacroAssembler* masm,
719
+ JSObject* object,
720
+ Register receiver,
721
+ Register scratch1,
722
+ Register scratch2,
723
+ Register scratch3,
724
+ JSObject* interceptor_holder,
725
+ LookupResult* lookup,
726
+ String* name,
727
+ const CallOptimization& optimization,
728
+ Label* miss_label) {
729
+ ASSERT(optimization.is_constant_call());
730
+ ASSERT(!lookup->holder()->IsGlobalObject());
731
+
732
+ int depth1 = kInvalidProtoDepth;
733
+ int depth2 = kInvalidProtoDepth;
734
+ bool can_do_fast_api_call = false;
735
+ if (optimization.is_simple_api_call() &&
736
+ !lookup->holder()->IsGlobalObject()) {
737
+ depth1 =
738
+ optimization.GetPrototypeDepthOfExpectedType(object,
739
+ interceptor_holder);
740
+ if (depth1 == kInvalidProtoDepth) {
741
+ depth2 =
742
+ optimization.GetPrototypeDepthOfExpectedType(interceptor_holder,
743
+ lookup->holder());
744
+ }
745
+ can_do_fast_api_call = (depth1 != kInvalidProtoDepth) ||
746
+ (depth2 != kInvalidProtoDepth);
747
+ }
748
+
749
+ __ IncrementCounter(&Counters::call_const_interceptor, 1,
750
+ scratch1, scratch2);
751
+
752
+ if (can_do_fast_api_call) {
753
+ __ IncrementCounter(&Counters::call_const_interceptor_fast_api, 1,
754
+ scratch1, scratch2);
755
+ ReserveSpaceForFastApiCall(masm, scratch1);
756
+ }
757
+
758
+ // Check that the maps from receiver to interceptor's holder
759
+ // haven't changed and thus we can invoke interceptor.
760
+ Label miss_cleanup;
761
+ Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
762
+ Register holder =
763
+ stub_compiler_->CheckPrototypes(object, receiver,
764
+ interceptor_holder, scratch1,
765
+ scratch2, scratch3, name, depth1, miss);
766
+
767
+ // Invoke an interceptor and if it provides a value,
768
+ // branch to |regular_invoke|.
769
+ Label regular_invoke;
770
+ LoadWithInterceptor(masm, receiver, holder, interceptor_holder, scratch2,
771
+ &regular_invoke);
772
+
773
+ // Interceptor returned nothing for this property. Try to use cached
774
+ // constant function.
775
+
776
+ // Check that the maps from interceptor's holder to constant function's
777
+ // holder haven't changed and thus we can use cached constant function.
778
+ if (interceptor_holder != lookup->holder()) {
779
+ stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
780
+ lookup->holder(), scratch1,
781
+ scratch2, scratch3, name, depth2, miss);
782
+ } else {
783
+ // CheckPrototypes has a side effect of fetching a 'holder'
784
+ // for API (object which is instanceof for the signature). It's
785
+ // safe to omit it here, as if present, it should be fetched
786
+ // by the previous CheckPrototypes.
787
+ ASSERT(depth2 == kInvalidProtoDepth);
788
+ }
789
+
790
+ // Invoke function.
791
+ if (can_do_fast_api_call) {
792
+ MaybeObject* result = GenerateFastApiDirectCall(masm,
793
+ optimization,
794
+ arguments_.immediate());
795
+ if (result->IsFailure()) return result;
796
+ } else {
797
+ __ InvokeFunction(optimization.constant_function(), arguments_,
798
+ JUMP_FUNCTION);
799
+ }
800
+
801
+ // Deferred code for fast API call case---clean preallocated space.
802
+ if (can_do_fast_api_call) {
803
+ __ bind(&miss_cleanup);
804
+ FreeSpaceForFastApiCall(masm);
805
+ __ b(miss_label);
806
+ }
807
+
808
+ // Invoke a regular function.
809
+ __ bind(&regular_invoke);
810
+ if (can_do_fast_api_call) {
811
+ FreeSpaceForFastApiCall(masm);
812
+ }
813
+
814
+ return Heap::undefined_value();
815
+ }
816
+
817
+ void CompileRegular(MacroAssembler* masm,
818
+ JSObject* object,
819
+ Register receiver,
820
+ Register scratch1,
821
+ Register scratch2,
822
+ Register scratch3,
823
+ String* name,
824
+ JSObject* interceptor_holder,
825
+ Label* miss_label) {
826
+ Register holder =
827
+ stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
828
+ scratch1, scratch2, scratch3, name,
829
+ miss_label);
830
+
831
+ // Call a runtime function to load the interceptor property.
832
+ __ EnterInternalFrame();
833
+ // Save the name_ register across the call.
834
+ __ push(name_);
835
+
836
+ PushInterceptorArguments(masm,
837
+ receiver,
838
+ holder,
839
+ name_,
840
+ interceptor_holder);
841
+
842
+ __ CallExternalReference(
843
+ ExternalReference(
844
+ IC_Utility(IC::kLoadPropertyWithInterceptorForCall)),
845
+ 5);
846
+
847
+ // Restore the name_ register.
848
+ __ pop(name_);
849
+ __ LeaveInternalFrame();
850
+ }
851
+
852
+ void LoadWithInterceptor(MacroAssembler* masm,
853
+ Register receiver,
854
+ Register holder,
855
+ JSObject* holder_obj,
856
+ Register scratch,
857
+ Label* interceptor_succeeded) {
858
+ __ EnterInternalFrame();
859
+ __ Push(holder, name_);
860
+
861
+ CompileCallLoadPropertyWithInterceptor(masm,
862
+ receiver,
863
+ holder,
864
+ name_,
865
+ holder_obj);
866
+
867
+ __ pop(name_); // Restore the name.
868
+ __ pop(receiver); // Restore the holder.
869
+ __ LeaveInternalFrame();
870
+
871
+ // If interceptor returns no-result sentinel, call the constant function.
872
+ __ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex);
873
+ __ cmp(r0, scratch);
874
+ __ b(ne, interceptor_succeeded);
875
+ }
876
+
877
+ StubCompiler* stub_compiler_;
878
+ const ParameterCount& arguments_;
879
+ Register name_;
880
+ };
881
+
882
+
883
+ // Generate code to check that a global property cell is empty. Create
884
+ // the property cell at compilation time if no cell exists for the
885
+ // property.
886
+ MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell(
887
+ MacroAssembler* masm,
888
+ GlobalObject* global,
889
+ String* name,
890
+ Register scratch,
891
+ Label* miss) {
892
+ Object* probe;
893
+ { MaybeObject* maybe_probe = global->EnsurePropertyCell(name);
894
+ if (!maybe_probe->ToObject(&probe)) return maybe_probe;
895
+ }
896
+ JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe);
897
+ ASSERT(cell->value()->IsTheHole());
898
+ __ mov(scratch, Operand(Handle<Object>(cell)));
899
+ __ ldr(scratch,
900
+ FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
901
+ __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
902
+ __ cmp(scratch, ip);
903
+ __ b(ne, miss);
904
+ return cell;
905
+ }
906
+
907
+ // Calls GenerateCheckPropertyCell for each global object in the prototype chain
908
+ // from object to (but not including) holder.
909
+ MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCells(
910
+ MacroAssembler* masm,
911
+ JSObject* object,
912
+ JSObject* holder,
913
+ String* name,
914
+ Register scratch,
915
+ Label* miss) {
916
+ JSObject* current = object;
917
+ while (current != holder) {
918
+ if (current->IsGlobalObject()) {
919
+ // Returns a cell or a failure.
920
+ MaybeObject* result = GenerateCheckPropertyCell(
921
+ masm,
922
+ GlobalObject::cast(current),
923
+ name,
924
+ scratch,
925
+ miss);
926
+ if (result->IsFailure()) return result;
927
+ }
928
+ ASSERT(current->IsJSObject());
929
+ current = JSObject::cast(current->GetPrototype());
930
+ }
931
+ return NULL;
932
+ }
933
+
934
+
935
+ // Convert and store int passed in register ival to IEEE 754 single precision
936
+ // floating point value at memory location (dst + 4 * wordoffset)
937
+ // If VFP3 is available use it for conversion.
938
+ static void StoreIntAsFloat(MacroAssembler* masm,
939
+ Register dst,
940
+ Register wordoffset,
941
+ Register ival,
942
+ Register fval,
943
+ Register scratch1,
944
+ Register scratch2) {
945
+ if (CpuFeatures::IsSupported(VFP3)) {
946
+ CpuFeatures::Scope scope(VFP3);
947
+ __ vmov(s0, ival);
948
+ __ add(scratch1, dst, Operand(wordoffset, LSL, 2));
949
+ __ vcvt_f32_s32(s0, s0);
950
+ __ vstr(s0, scratch1, 0);
951
+ } else {
952
+ Label not_special, done;
953
+ // Move sign bit from source to destination. This works because the sign
954
+ // bit in the exponent word of the double has the same position and polarity
955
+ // as the 2's complement sign bit in a Smi.
956
+ ASSERT(kBinary32SignMask == 0x80000000u);
957
+
958
+ __ and_(fval, ival, Operand(kBinary32SignMask), SetCC);
959
+ // Negate value if it is negative.
960
+ __ rsb(ival, ival, Operand(0, RelocInfo::NONE), LeaveCC, ne);
961
+
962
+ // We have -1, 0 or 1, which we treat specially. Register ival contains
963
+ // absolute value: it is either equal to 1 (special case of -1 and 1),
964
+ // greater than 1 (not a special case) or less than 1 (special case of 0).
965
+ __ cmp(ival, Operand(1));
966
+ __ b(gt, &not_special);
967
+
968
+ // For 1 or -1 we need to or in the 0 exponent (biased).
969
+ static const uint32_t exponent_word_for_1 =
970
+ kBinary32ExponentBias << kBinary32ExponentShift;
971
+
972
+ __ orr(fval, fval, Operand(exponent_word_for_1), LeaveCC, eq);
973
+ __ b(&done);
974
+
975
+ __ bind(&not_special);
976
+ // Count leading zeros.
977
+ // Gets the wrong answer for 0, but we already checked for that case above.
978
+ Register zeros = scratch2;
979
+ __ CountLeadingZeros(zeros, ival, scratch1);
980
+
981
+ // Compute exponent and or it into the exponent register.
982
+ __ rsb(scratch1,
983
+ zeros,
984
+ Operand((kBitsPerInt - 1) + kBinary32ExponentBias));
985
+
986
+ __ orr(fval,
987
+ fval,
988
+ Operand(scratch1, LSL, kBinary32ExponentShift));
989
+
990
+ // Shift up the source chopping the top bit off.
991
+ __ add(zeros, zeros, Operand(1));
992
+ // This wouldn't work for 1 and -1 as the shift would be 32 which means 0.
993
+ __ mov(ival, Operand(ival, LSL, zeros));
994
+ // And the top (top 20 bits).
995
+ __ orr(fval,
996
+ fval,
997
+ Operand(ival, LSR, kBitsPerInt - kBinary32MantissaBits));
998
+
999
+ __ bind(&done);
1000
+ __ str(fval, MemOperand(dst, wordoffset, LSL, 2));
1001
+ }
1002
+ }
1003
+
1004
+
1005
+ // Convert unsigned integer with specified number of leading zeroes in binary
1006
+ // representation to IEEE 754 double.
1007
+ // Integer to convert is passed in register hiword.
1008
+ // Resulting double is returned in registers hiword:loword.
1009
+ // This functions does not work correctly for 0.
1010
+ static void GenerateUInt2Double(MacroAssembler* masm,
1011
+ Register hiword,
1012
+ Register loword,
1013
+ Register scratch,
1014
+ int leading_zeroes) {
1015
+ const int meaningful_bits = kBitsPerInt - leading_zeroes - 1;
1016
+ const int biased_exponent = HeapNumber::kExponentBias + meaningful_bits;
1017
+
1018
+ const int mantissa_shift_for_hi_word =
1019
+ meaningful_bits - HeapNumber::kMantissaBitsInTopWord;
1020
+
1021
+ const int mantissa_shift_for_lo_word =
1022
+ kBitsPerInt - mantissa_shift_for_hi_word;
1023
+
1024
+ __ mov(scratch, Operand(biased_exponent << HeapNumber::kExponentShift));
1025
+ if (mantissa_shift_for_hi_word > 0) {
1026
+ __ mov(loword, Operand(hiword, LSL, mantissa_shift_for_lo_word));
1027
+ __ orr(hiword, scratch, Operand(hiword, LSR, mantissa_shift_for_hi_word));
1028
+ } else {
1029
+ __ mov(loword, Operand(0, RelocInfo::NONE));
1030
+ __ orr(hiword, scratch, Operand(hiword, LSL, mantissa_shift_for_hi_word));
1031
+ }
1032
+
1033
+ // If least significant bit of biased exponent was not 1 it was corrupted
1034
+ // by most significant bit of mantissa so we should fix that.
1035
+ if (!(biased_exponent & 1)) {
1036
+ __ bic(hiword, hiword, Operand(1 << HeapNumber::kExponentShift));
1037
+ }
1038
+ }
1039
+
1040
+
1041
+ #undef __
1042
+ #define __ ACCESS_MASM(masm())
1043
+
1044
+
1045
+ Register StubCompiler::CheckPrototypes(JSObject* object,
1046
+ Register object_reg,
1047
+ JSObject* holder,
1048
+ Register holder_reg,
1049
+ Register scratch1,
1050
+ Register scratch2,
1051
+ String* name,
1052
+ int save_at_depth,
1053
+ Label* miss) {
1054
+ // Make sure there's no overlap between holder and object registers.
1055
+ ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
1056
+ ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
1057
+ && !scratch2.is(scratch1));
1058
+
1059
+ // Keep track of the current object in register reg.
1060
+ Register reg = object_reg;
1061
+ int depth = 0;
1062
+
1063
+ if (save_at_depth == depth) {
1064
+ __ str(reg, MemOperand(sp));
1065
+ }
1066
+
1067
+ // Check the maps in the prototype chain.
1068
+ // Traverse the prototype chain from the object and do map checks.
1069
+ JSObject* current = object;
1070
+ while (current != holder) {
1071
+ depth++;
1072
+
1073
+ // Only global objects and objects that do not require access
1074
+ // checks are allowed in stubs.
1075
+ ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
1076
+
1077
+ ASSERT(current->GetPrototype()->IsJSObject());
1078
+ JSObject* prototype = JSObject::cast(current->GetPrototype());
1079
+ if (!current->HasFastProperties() &&
1080
+ !current->IsJSGlobalObject() &&
1081
+ !current->IsJSGlobalProxy()) {
1082
+ if (!name->IsSymbol()) {
1083
+ MaybeObject* maybe_lookup_result = Heap::LookupSymbol(name);
1084
+ Object* lookup_result = NULL; // Initialization to please compiler.
1085
+ if (!maybe_lookup_result->ToObject(&lookup_result)) {
1086
+ set_failure(Failure::cast(maybe_lookup_result));
1087
+ return reg;
1088
+ }
1089
+ name = String::cast(lookup_result);
1090
+ }
1091
+ ASSERT(current->property_dictionary()->FindEntry(name) ==
1092
+ StringDictionary::kNotFound);
1093
+
1094
+ GenerateDictionaryNegativeLookup(masm(),
1095
+ miss,
1096
+ reg,
1097
+ name,
1098
+ scratch1,
1099
+ scratch2);
1100
+ __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1101
+ reg = holder_reg; // from now the object is in holder_reg
1102
+ __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1103
+ } else if (Heap::InNewSpace(prototype)) {
1104
+ // Get the map of the current object.
1105
+ __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1106
+ __ cmp(scratch1, Operand(Handle<Map>(current->map())));
1107
+
1108
+ // Branch on the result of the map check.
1109
+ __ b(ne, miss);
1110
+
1111
+ // Check access rights to the global object. This has to happen
1112
+ // after the map check so that we know that the object is
1113
+ // actually a global object.
1114
+ if (current->IsJSGlobalProxy()) {
1115
+ __ CheckAccessGlobalProxy(reg, scratch1, miss);
1116
+ // Restore scratch register to be the map of the object. In the
1117
+ // new space case below, we load the prototype from the map in
1118
+ // the scratch register.
1119
+ __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1120
+ }
1121
+
1122
+ reg = holder_reg; // from now the object is in holder_reg
1123
+ // The prototype is in new space; we cannot store a reference
1124
+ // to it in the code. Load it from the map.
1125
+ __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1126
+ } else {
1127
+ // Check the map of the current object.
1128
+ __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1129
+ __ cmp(scratch1, Operand(Handle<Map>(current->map())));
1130
+ // Branch on the result of the map check.
1131
+ __ b(ne, miss);
1132
+ // Check access rights to the global object. This has to happen
1133
+ // after the map check so that we know that the object is
1134
+ // actually a global object.
1135
+ if (current->IsJSGlobalProxy()) {
1136
+ __ CheckAccessGlobalProxy(reg, scratch1, miss);
1137
+ }
1138
+ // The prototype is in old space; load it directly.
1139
+ reg = holder_reg; // from now the object is in holder_reg
1140
+ __ mov(reg, Operand(Handle<JSObject>(prototype)));
1141
+ }
1142
+
1143
+ if (save_at_depth == depth) {
1144
+ __ str(reg, MemOperand(sp));
1145
+ }
1146
+
1147
+ // Go to the next object in the prototype chain.
1148
+ current = prototype;
1149
+ }
1150
+
1151
+ // Check the holder map.
1152
+ __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1153
+ __ cmp(scratch1, Operand(Handle<Map>(current->map())));
1154
+ __ b(ne, miss);
1155
+
1156
+ // Log the check depth.
1157
+ LOG(IntEvent("check-maps-depth", depth + 1));
1158
+
1159
+ // Perform security check for access to the global object.
1160
+ ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
1161
+ if (holder->IsJSGlobalProxy()) {
1162
+ __ CheckAccessGlobalProxy(reg, scratch1, miss);
1163
+ };
1164
+
1165
+ // If we've skipped any global objects, it's not enough to verify
1166
+ // that their maps haven't changed. We also need to check that the
1167
+ // property cell for the property is still empty.
1168
+ MaybeObject* result = GenerateCheckPropertyCells(masm(),
1169
+ object,
1170
+ holder,
1171
+ name,
1172
+ scratch1,
1173
+ miss);
1174
+ if (result->IsFailure()) set_failure(Failure::cast(result));
1175
+
1176
+ // Return the register containing the holder.
1177
+ return reg;
1178
+ }
1179
+
1180
+
1181
+ void StubCompiler::GenerateLoadField(JSObject* object,
1182
+ JSObject* holder,
1183
+ Register receiver,
1184
+ Register scratch1,
1185
+ Register scratch2,
1186
+ Register scratch3,
1187
+ int index,
1188
+ String* name,
1189
+ Label* miss) {
1190
+ // Check that the receiver isn't a smi.
1191
+ __ tst(receiver, Operand(kSmiTagMask));
1192
+ __ b(eq, miss);
1193
+
1194
+ // Check that the maps haven't changed.
1195
+ Register reg =
1196
+ CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3,
1197
+ name, miss);
1198
+ GenerateFastPropertyLoad(masm(), r0, reg, holder, index);
1199
+ __ Ret();
1200
+ }
1201
+
1202
+
1203
+ void StubCompiler::GenerateLoadConstant(JSObject* object,
1204
+ JSObject* holder,
1205
+ Register receiver,
1206
+ Register scratch1,
1207
+ Register scratch2,
1208
+ Register scratch3,
1209
+ Object* value,
1210
+ String* name,
1211
+ Label* miss) {
1212
+ // Check that the receiver isn't a smi.
1213
+ __ tst(receiver, Operand(kSmiTagMask));
1214
+ __ b(eq, miss);
1215
+
1216
+ // Check that the maps haven't changed.
1217
+ Register reg =
1218
+ CheckPrototypes(object, receiver, holder,
1219
+ scratch1, scratch2, scratch3, name, miss);
1220
+
1221
+ // Return the constant value.
1222
+ __ mov(r0, Operand(Handle<Object>(value)));
1223
+ __ Ret();
1224
+ }
1225
+
1226
+
1227
+ MaybeObject* StubCompiler::GenerateLoadCallback(JSObject* object,
1228
+ JSObject* holder,
1229
+ Register receiver,
1230
+ Register name_reg,
1231
+ Register scratch1,
1232
+ Register scratch2,
1233
+ Register scratch3,
1234
+ AccessorInfo* callback,
1235
+ String* name,
1236
+ Label* miss) {
1237
+ // Check that the receiver isn't a smi.
1238
+ __ tst(receiver, Operand(kSmiTagMask));
1239
+ __ b(eq, miss);
1240
+
1241
+ // Check that the maps haven't changed.
1242
+ Register reg =
1243
+ CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3,
1244
+ name, miss);
1245
+
1246
+ // Build AccessorInfo::args_ list on the stack and push property name below
1247
+ // the exit frame to make GC aware of them and store pointers to them.
1248
+ __ push(receiver);
1249
+ __ mov(scratch2, sp); // scratch2 = AccessorInfo::args_
1250
+ Handle<AccessorInfo> callback_handle(callback);
1251
+ if (Heap::InNewSpace(callback_handle->data())) {
1252
+ __ Move(scratch3, callback_handle);
1253
+ __ ldr(scratch3, FieldMemOperand(scratch3, AccessorInfo::kDataOffset));
1254
+ } else {
1255
+ __ Move(scratch3, Handle<Object>(callback_handle->data()));
1256
+ }
1257
+ __ Push(reg, scratch3, name_reg);
1258
+ __ mov(r0, sp); // r0 = Handle<String>
1259
+
1260
+ Address getter_address = v8::ToCData<Address>(callback->getter());
1261
+ ApiFunction fun(getter_address);
1262
+
1263
+ const int kApiStackSpace = 1;
1264
+ __ EnterExitFrame(false, kApiStackSpace);
1265
+ // Create AccessorInfo instance on the stack above the exit frame with
1266
+ // scratch2 (internal::Object **args_) as the data.
1267
+ __ str(scratch2, MemOperand(sp, 1 * kPointerSize));
1268
+ __ add(r1, sp, Operand(1 * kPointerSize)); // r1 = AccessorInfo&
1269
+
1270
+ // Emitting a stub call may try to allocate (if the code is not
1271
+ // already generated). Do not allow the assembler to perform a
1272
+ // garbage collection but instead return the allocation failure
1273
+ // object.
1274
+ const int kStackUnwindSpace = 4;
1275
+ ExternalReference ref =
1276
+ ExternalReference(&fun, ExternalReference::DIRECT_GETTER_CALL);
1277
+ return masm()->TryCallApiFunctionAndReturn(ref, kStackUnwindSpace);
1278
+ }
1279
+
1280
+
1281
+ void StubCompiler::GenerateLoadInterceptor(JSObject* object,
1282
+ JSObject* interceptor_holder,
1283
+ LookupResult* lookup,
1284
+ Register receiver,
1285
+ Register name_reg,
1286
+ Register scratch1,
1287
+ Register scratch2,
1288
+ Register scratch3,
1289
+ String* name,
1290
+ Label* miss) {
1291
+ ASSERT(interceptor_holder->HasNamedInterceptor());
1292
+ ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1293
+
1294
+ // Check that the receiver isn't a smi.
1295
+ __ JumpIfSmi(receiver, miss);
1296
+
1297
+ // So far the most popular follow ups for interceptor loads are FIELD
1298
+ // and CALLBACKS, so inline only them, other cases may be added
1299
+ // later.
1300
+ bool compile_followup_inline = false;
1301
+ if (lookup->IsProperty() && lookup->IsCacheable()) {
1302
+ if (lookup->type() == FIELD) {
1303
+ compile_followup_inline = true;
1304
+ } else if (lookup->type() == CALLBACKS &&
1305
+ lookup->GetCallbackObject()->IsAccessorInfo() &&
1306
+ AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL) {
1307
+ compile_followup_inline = true;
1308
+ }
1309
+ }
1310
+
1311
+ if (compile_followup_inline) {
1312
+ // Compile the interceptor call, followed by inline code to load the
1313
+ // property from further up the prototype chain if the call fails.
1314
+ // Check that the maps haven't changed.
1315
+ Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1316
+ scratch1, scratch2, scratch3,
1317
+ name, miss);
1318
+ ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
1319
+
1320
+ // Save necessary data before invoking an interceptor.
1321
+ // Requires a frame to make GC aware of pushed pointers.
1322
+ __ EnterInternalFrame();
1323
+
1324
+ if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1325
+ // CALLBACKS case needs a receiver to be passed into C++ callback.
1326
+ __ Push(receiver, holder_reg, name_reg);
1327
+ } else {
1328
+ __ Push(holder_reg, name_reg);
1329
+ }
1330
+
1331
+ // Invoke an interceptor. Note: map checks from receiver to
1332
+ // interceptor's holder has been compiled before (see a caller
1333
+ // of this method.)
1334
+ CompileCallLoadPropertyWithInterceptor(masm(),
1335
+ receiver,
1336
+ holder_reg,
1337
+ name_reg,
1338
+ interceptor_holder);
1339
+
1340
+ // Check if interceptor provided a value for property. If it's
1341
+ // the case, return immediately.
1342
+ Label interceptor_failed;
1343
+ __ LoadRoot(scratch1, Heap::kNoInterceptorResultSentinelRootIndex);
1344
+ __ cmp(r0, scratch1);
1345
+ __ b(eq, &interceptor_failed);
1346
+ __ LeaveInternalFrame();
1347
+ __ Ret();
1348
+
1349
+ __ bind(&interceptor_failed);
1350
+ __ pop(name_reg);
1351
+ __ pop(holder_reg);
1352
+ if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1353
+ __ pop(receiver);
1354
+ }
1355
+
1356
+ __ LeaveInternalFrame();
1357
+
1358
+ // Check that the maps from interceptor's holder to lookup's holder
1359
+ // haven't changed. And load lookup's holder into |holder| register.
1360
+ if (interceptor_holder != lookup->holder()) {
1361
+ holder_reg = CheckPrototypes(interceptor_holder,
1362
+ holder_reg,
1363
+ lookup->holder(),
1364
+ scratch1,
1365
+ scratch2,
1366
+ scratch3,
1367
+ name,
1368
+ miss);
1369
+ }
1370
+
1371
+ if (lookup->type() == FIELD) {
1372
+ // We found FIELD property in prototype chain of interceptor's holder.
1373
+ // Retrieve a field from field's holder.
1374
+ GenerateFastPropertyLoad(masm(), r0, holder_reg,
1375
+ lookup->holder(), lookup->GetFieldIndex());
1376
+ __ Ret();
1377
+ } else {
1378
+ // We found CALLBACKS property in prototype chain of interceptor's
1379
+ // holder.
1380
+ ASSERT(lookup->type() == CALLBACKS);
1381
+ ASSERT(lookup->GetCallbackObject()->IsAccessorInfo());
1382
+ AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
1383
+ ASSERT(callback != NULL);
1384
+ ASSERT(callback->getter() != NULL);
1385
+
1386
+ // Tail call to runtime.
1387
+ // Important invariant in CALLBACKS case: the code above must be
1388
+ // structured to never clobber |receiver| register.
1389
+ __ Move(scratch2, Handle<AccessorInfo>(callback));
1390
+ // holder_reg is either receiver or scratch1.
1391
+ if (!receiver.is(holder_reg)) {
1392
+ ASSERT(scratch1.is(holder_reg));
1393
+ __ Push(receiver, holder_reg);
1394
+ __ ldr(scratch3,
1395
+ FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
1396
+ __ Push(scratch3, scratch2, name_reg);
1397
+ } else {
1398
+ __ push(receiver);
1399
+ __ ldr(scratch3,
1400
+ FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
1401
+ __ Push(holder_reg, scratch3, scratch2, name_reg);
1402
+ }
1403
+
1404
+ ExternalReference ref =
1405
+ ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
1406
+ __ TailCallExternalReference(ref, 5, 1);
1407
+ }
1408
+ } else { // !compile_followup_inline
1409
+ // Call the runtime system to load the interceptor.
1410
+ // Check that the maps haven't changed.
1411
+ Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1412
+ scratch1, scratch2, scratch3,
1413
+ name, miss);
1414
+ PushInterceptorArguments(masm(), receiver, holder_reg,
1415
+ name_reg, interceptor_holder);
1416
+
1417
+ ExternalReference ref = ExternalReference(
1418
+ IC_Utility(IC::kLoadPropertyWithInterceptorForLoad));
1419
+ __ TailCallExternalReference(ref, 5, 1);
1420
+ }
1421
+ }
1422
+
1423
+
1424
+ void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
1425
+ if (kind_ == Code::KEYED_CALL_IC) {
1426
+ __ cmp(r2, Operand(Handle<String>(name)));
1427
+ __ b(ne, miss);
1428
+ }
1429
+ }
1430
+
1431
+
1432
+ void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object,
1433
+ JSObject* holder,
1434
+ String* name,
1435
+ Label* miss) {
1436
+ ASSERT(holder->IsGlobalObject());
1437
+
1438
+ // Get the number of arguments.
1439
+ const int argc = arguments().immediate();
1440
+
1441
+ // Get the receiver from the stack.
1442
+ __ ldr(r0, MemOperand(sp, argc * kPointerSize));
1443
+
1444
+ // If the object is the holder then we know that it's a global
1445
+ // object which can only happen for contextual calls. In this case,
1446
+ // the receiver cannot be a smi.
1447
+ if (object != holder) {
1448
+ __ tst(r0, Operand(kSmiTagMask));
1449
+ __ b(eq, miss);
1450
+ }
1451
+
1452
+ // Check that the maps haven't changed.
1453
+ CheckPrototypes(object, r0, holder, r3, r1, r4, name, miss);
1454
+ }
1455
+
1456
+
1457
+ void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
1458
+ JSFunction* function,
1459
+ Label* miss) {
1460
+ // Get the value from the cell.
1461
+ __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell)));
1462
+ __ ldr(r1, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset));
1463
+
1464
+ // Check that the cell contains the same function.
1465
+ if (Heap::InNewSpace(function)) {
1466
+ // We can't embed a pointer to a function in new space so we have
1467
+ // to verify that the shared function info is unchanged. This has
1468
+ // the nice side effect that multiple closures based on the same
1469
+ // function can all use this call IC. Before we load through the
1470
+ // function, we have to verify that it still is a function.
1471
+ __ tst(r1, Operand(kSmiTagMask));
1472
+ __ b(eq, miss);
1473
+ __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
1474
+ __ b(ne, miss);
1475
+
1476
+ // Check the shared function info. Make sure it hasn't changed.
1477
+ __ Move(r3, Handle<SharedFunctionInfo>(function->shared()));
1478
+ __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1479
+ __ cmp(r4, r3);
1480
+ __ b(ne, miss);
1481
+ } else {
1482
+ __ cmp(r1, Operand(Handle<JSFunction>(function)));
1483
+ __ b(ne, miss);
1484
+ }
1485
+ }
1486
+
1487
+
1488
+ MaybeObject* CallStubCompiler::GenerateMissBranch() {
1489
+ MaybeObject* maybe_obj = StubCache::ComputeCallMiss(arguments().immediate(),
1490
+ kind_);
1491
+ Object* obj;
1492
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1493
+ __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
1494
+ return obj;
1495
+ }
1496
+
1497
+
1498
+ MaybeObject* CallStubCompiler::CompileCallField(JSObject* object,
1499
+ JSObject* holder,
1500
+ int index,
1501
+ String* name) {
1502
+ // ----------- S t a t e -------------
1503
+ // -- r2 : name
1504
+ // -- lr : return address
1505
+ // -----------------------------------
1506
+ Label miss;
1507
+
1508
+ GenerateNameCheck(name, &miss);
1509
+
1510
+ const int argc = arguments().immediate();
1511
+
1512
+ // Get the receiver of the function from the stack into r0.
1513
+ __ ldr(r0, MemOperand(sp, argc * kPointerSize));
1514
+ // Check that the receiver isn't a smi.
1515
+ __ tst(r0, Operand(kSmiTagMask));
1516
+ __ b(eq, &miss);
1517
+
1518
+ // Do the right check and compute the holder register.
1519
+ Register reg = CheckPrototypes(object, r0, holder, r1, r3, r4, name, &miss);
1520
+ GenerateFastPropertyLoad(masm(), r1, reg, holder, index);
1521
+
1522
+ GenerateCallFunction(masm(), object, arguments(), &miss);
1523
+
1524
+ // Handle call cache miss.
1525
+ __ bind(&miss);
1526
+ Object* obj;
1527
+ { MaybeObject* maybe_obj = GenerateMissBranch();
1528
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1529
+ }
1530
+
1531
+ // Return the generated code.
1532
+ return GetCode(FIELD, name);
1533
+ }
1534
+
1535
+
1536
+ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
1537
+ JSObject* holder,
1538
+ JSGlobalPropertyCell* cell,
1539
+ JSFunction* function,
1540
+ String* name) {
1541
+ // ----------- S t a t e -------------
1542
+ // -- r2 : name
1543
+ // -- lr : return address
1544
+ // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1545
+ // -- ...
1546
+ // -- sp[argc * 4] : receiver
1547
+ // -----------------------------------
1548
+
1549
+ // If object is not an array, bail out to regular call.
1550
+ if (!object->IsJSArray() || cell != NULL) return Heap::undefined_value();
1551
+
1552
+ Label miss;
1553
+
1554
+ GenerateNameCheck(name, &miss);
1555
+
1556
+ Register receiver = r1;
1557
+
1558
+ // Get the receiver from the stack
1559
+ const int argc = arguments().immediate();
1560
+ __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1561
+
1562
+ // Check that the receiver isn't a smi.
1563
+ __ JumpIfSmi(receiver, &miss);
1564
+
1565
+ // Check that the maps haven't changed.
1566
+ CheckPrototypes(JSObject::cast(object), receiver,
1567
+ holder, r3, r0, r4, name, &miss);
1568
+
1569
+ if (argc == 0) {
1570
+ // Nothing to do, just return the length.
1571
+ __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1572
+ __ Drop(argc + 1);
1573
+ __ Ret();
1574
+ } else {
1575
+ Label call_builtin;
1576
+
1577
+ Register elements = r3;
1578
+ Register end_elements = r5;
1579
+
1580
+ // Get the elements array of the object.
1581
+ __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1582
+
1583
+ // Check that the elements are in fast mode and writable.
1584
+ __ CheckMap(elements, r0,
1585
+ Heap::kFixedArrayMapRootIndex, &call_builtin, true);
1586
+
1587
+ if (argc == 1) { // Otherwise fall through to call the builtin.
1588
+ Label exit, with_write_barrier, attempt_to_grow_elements;
1589
+
1590
+ // Get the array's length into r0 and calculate new length.
1591
+ __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1592
+ STATIC_ASSERT(kSmiTagSize == 1);
1593
+ STATIC_ASSERT(kSmiTag == 0);
1594
+ __ add(r0, r0, Operand(Smi::FromInt(argc)));
1595
+
1596
+ // Get the element's length.
1597
+ __ ldr(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
1598
+
1599
+ // Check if we could survive without allocation.
1600
+ __ cmp(r0, r4);
1601
+ __ b(gt, &attempt_to_grow_elements);
1602
+
1603
+ // Save new length.
1604
+ __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1605
+
1606
+ // Push the element.
1607
+ __ ldr(r4, MemOperand(sp, (argc - 1) * kPointerSize));
1608
+ // We may need a register containing the address end_elements below,
1609
+ // so write back the value in end_elements.
1610
+ __ add(end_elements, elements,
1611
+ Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1612
+ const int kEndElementsOffset =
1613
+ FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize;
1614
+ __ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex));
1615
+
1616
+ // Check for a smi.
1617
+ __ JumpIfNotSmi(r4, &with_write_barrier);
1618
+ __ bind(&exit);
1619
+ __ Drop(argc + 1);
1620
+ __ Ret();
1621
+
1622
+ __ bind(&with_write_barrier);
1623
+ __ InNewSpace(elements, r4, eq, &exit);
1624
+ __ RecordWriteHelper(elements, end_elements, r4);
1625
+ __ Drop(argc + 1);
1626
+ __ Ret();
1627
+
1628
+ __ bind(&attempt_to_grow_elements);
1629
+ // r0: array's length + 1.
1630
+ // r4: elements' length.
1631
+
1632
+ if (!FLAG_inline_new) {
1633
+ __ b(&call_builtin);
1634
+ }
1635
+
1636
+ ExternalReference new_space_allocation_top =
1637
+ ExternalReference::new_space_allocation_top_address();
1638
+ ExternalReference new_space_allocation_limit =
1639
+ ExternalReference::new_space_allocation_limit_address();
1640
+
1641
+ const int kAllocationDelta = 4;
1642
+ // Load top and check if it is the end of elements.
1643
+ __ add(end_elements, elements,
1644
+ Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1645
+ __ add(end_elements, end_elements, Operand(kEndElementsOffset));
1646
+ __ mov(r7, Operand(new_space_allocation_top));
1647
+ __ ldr(r6, MemOperand(r7));
1648
+ __ cmp(end_elements, r6);
1649
+ __ b(ne, &call_builtin);
1650
+
1651
+ __ mov(r9, Operand(new_space_allocation_limit));
1652
+ __ ldr(r9, MemOperand(r9));
1653
+ __ add(r6, r6, Operand(kAllocationDelta * kPointerSize));
1654
+ __ cmp(r6, r9);
1655
+ __ b(hi, &call_builtin);
1656
+
1657
+ // We fit and could grow elements.
1658
+ // Update new_space_allocation_top.
1659
+ __ str(r6, MemOperand(r7));
1660
+ // Push the argument.
1661
+ __ ldr(r6, MemOperand(sp, (argc - 1) * kPointerSize));
1662
+ __ str(r6, MemOperand(end_elements));
1663
+ // Fill the rest with holes.
1664
+ __ LoadRoot(r6, Heap::kTheHoleValueRootIndex);
1665
+ for (int i = 1; i < kAllocationDelta; i++) {
1666
+ __ str(r6, MemOperand(end_elements, i * kPointerSize));
1667
+ }
1668
+
1669
+ // Update elements' and array's sizes.
1670
+ __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1671
+ __ add(r4, r4, Operand(Smi::FromInt(kAllocationDelta)));
1672
+ __ str(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
1673
+
1674
+ // Elements are in new space, so write barrier is not required.
1675
+ __ Drop(argc + 1);
1676
+ __ Ret();
1677
+ }
1678
+ __ bind(&call_builtin);
1679
+ __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush),
1680
+ argc + 1,
1681
+ 1);
1682
+ }
1683
+
1684
+ // Handle call cache miss.
1685
+ __ bind(&miss);
1686
+ Object* obj;
1687
+ { MaybeObject* maybe_obj = GenerateMissBranch();
1688
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1689
+ }
1690
+
1691
+ // Return the generated code.
1692
+ return GetCode(function);
1693
+ }
1694
+
1695
+
1696
+ MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object,
1697
+ JSObject* holder,
1698
+ JSGlobalPropertyCell* cell,
1699
+ JSFunction* function,
1700
+ String* name) {
1701
+ // ----------- S t a t e -------------
1702
+ // -- r2 : name
1703
+ // -- lr : return address
1704
+ // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1705
+ // -- ...
1706
+ // -- sp[argc * 4] : receiver
1707
+ // -----------------------------------
1708
+
1709
+ // If object is not an array, bail out to regular call.
1710
+ if (!object->IsJSArray() || cell != NULL) return Heap::undefined_value();
1711
+
1712
+ Label miss, return_undefined, call_builtin;
1713
+
1714
+ Register receiver = r1;
1715
+ Register elements = r3;
1716
+
1717
+ GenerateNameCheck(name, &miss);
1718
+
1719
+ // Get the receiver from the stack
1720
+ const int argc = arguments().immediate();
1721
+ __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1722
+
1723
+ // Check that the receiver isn't a smi.
1724
+ __ JumpIfSmi(receiver, &miss);
1725
+
1726
+ // Check that the maps haven't changed.
1727
+ CheckPrototypes(JSObject::cast(object),
1728
+ receiver, holder, elements, r4, r0, name, &miss);
1729
+
1730
+ // Get the elements array of the object.
1731
+ __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1732
+
1733
+ // Check that the elements are in fast mode and writable.
1734
+ __ CheckMap(elements, r0, Heap::kFixedArrayMapRootIndex, &call_builtin, true);
1735
+
1736
+ // Get the array's length into r4 and calculate new length.
1737
+ __ ldr(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
1738
+ __ sub(r4, r4, Operand(Smi::FromInt(1)), SetCC);
1739
+ __ b(lt, &return_undefined);
1740
+
1741
+ // Get the last element.
1742
+ __ LoadRoot(r6, Heap::kTheHoleValueRootIndex);
1743
+ STATIC_ASSERT(kSmiTagSize == 1);
1744
+ STATIC_ASSERT(kSmiTag == 0);
1745
+ // We can't address the last element in one operation. Compute the more
1746
+ // expensive shift first, and use an offset later on.
1747
+ __ add(elements, elements, Operand(r4, LSL, kPointerSizeLog2 - kSmiTagSize));
1748
+ __ ldr(r0, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
1749
+ __ cmp(r0, r6);
1750
+ __ b(eq, &call_builtin);
1751
+
1752
+ // Set the array's length.
1753
+ __ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
1754
+
1755
+ // Fill with the hole.
1756
+ __ str(r6, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
1757
+ __ Drop(argc + 1);
1758
+ __ Ret();
1759
+
1760
+ __ bind(&return_undefined);
1761
+ __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1762
+ __ Drop(argc + 1);
1763
+ __ Ret();
1764
+
1765
+ __ bind(&call_builtin);
1766
+ __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop),
1767
+ argc + 1,
1768
+ 1);
1769
+
1770
+ // Handle call cache miss.
1771
+ __ bind(&miss);
1772
+ Object* obj;
1773
+ { MaybeObject* maybe_obj = GenerateMissBranch();
1774
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1775
+ }
1776
+
1777
+ // Return the generated code.
1778
+ return GetCode(function);
1779
+ }
1780
+
1781
+
1782
+ MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall(
1783
+ Object* object,
1784
+ JSObject* holder,
1785
+ JSGlobalPropertyCell* cell,
1786
+ JSFunction* function,
1787
+ String* name) {
1788
+ // ----------- S t a t e -------------
1789
+ // -- r2 : function name
1790
+ // -- lr : return address
1791
+ // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1792
+ // -- ...
1793
+ // -- sp[argc * 4] : receiver
1794
+ // -----------------------------------
1795
+
1796
+ // If object is not a string, bail out to regular call.
1797
+ if (!object->IsString() || cell != NULL) return Heap::undefined_value();
1798
+
1799
+ const int argc = arguments().immediate();
1800
+
1801
+ Label miss;
1802
+ Label name_miss;
1803
+ Label index_out_of_range;
1804
+ Label* index_out_of_range_label = &index_out_of_range;
1805
+
1806
+ if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) {
1807
+ index_out_of_range_label = &miss;
1808
+ }
1809
+
1810
+ GenerateNameCheck(name, &name_miss);
1811
+
1812
+ // Check that the maps starting from the prototype haven't changed.
1813
+ GenerateDirectLoadGlobalFunctionPrototype(masm(),
1814
+ Context::STRING_FUNCTION_INDEX,
1815
+ r0,
1816
+ &miss);
1817
+ ASSERT(object != holder);
1818
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder,
1819
+ r1, r3, r4, name, &miss);
1820
+
1821
+ Register receiver = r1;
1822
+ Register index = r4;
1823
+ Register scratch = r3;
1824
+ Register result = r0;
1825
+ __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1826
+ if (argc > 0) {
1827
+ __ ldr(index, MemOperand(sp, (argc - 1) * kPointerSize));
1828
+ } else {
1829
+ __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1830
+ }
1831
+
1832
+ StringCharCodeAtGenerator char_code_at_generator(receiver,
1833
+ index,
1834
+ scratch,
1835
+ result,
1836
+ &miss, // When not a string.
1837
+ &miss, // When not a number.
1838
+ index_out_of_range_label,
1839
+ STRING_INDEX_IS_NUMBER);
1840
+ char_code_at_generator.GenerateFast(masm());
1841
+ __ Drop(argc + 1);
1842
+ __ Ret();
1843
+
1844
+ StubRuntimeCallHelper call_helper;
1845
+ char_code_at_generator.GenerateSlow(masm(), call_helper);
1846
+
1847
+ if (index_out_of_range.is_linked()) {
1848
+ __ bind(&index_out_of_range);
1849
+ __ LoadRoot(r0, Heap::kNanValueRootIndex);
1850
+ __ Drop(argc + 1);
1851
+ __ Ret();
1852
+ }
1853
+
1854
+ __ bind(&miss);
1855
+ // Restore function name in r2.
1856
+ __ Move(r2, Handle<String>(name));
1857
+ __ bind(&name_miss);
1858
+ Object* obj;
1859
+ { MaybeObject* maybe_obj = GenerateMissBranch();
1860
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1861
+ }
1862
+
1863
+ // Return the generated code.
1864
+ return GetCode(function);
1865
+ }
1866
+
1867
+
1868
+ MaybeObject* CallStubCompiler::CompileStringCharAtCall(
1869
+ Object* object,
1870
+ JSObject* holder,
1871
+ JSGlobalPropertyCell* cell,
1872
+ JSFunction* function,
1873
+ String* name) {
1874
+ // ----------- S t a t e -------------
1875
+ // -- r2 : function name
1876
+ // -- lr : return address
1877
+ // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1878
+ // -- ...
1879
+ // -- sp[argc * 4] : receiver
1880
+ // -----------------------------------
1881
+
1882
+ // If object is not a string, bail out to regular call.
1883
+ if (!object->IsString() || cell != NULL) return Heap::undefined_value();
1884
+
1885
+ const int argc = arguments().immediate();
1886
+
1887
+ Label miss;
1888
+ Label name_miss;
1889
+ Label index_out_of_range;
1890
+ Label* index_out_of_range_label = &index_out_of_range;
1891
+
1892
+ if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) {
1893
+ index_out_of_range_label = &miss;
1894
+ }
1895
+
1896
+ GenerateNameCheck(name, &name_miss);
1897
+
1898
+ // Check that the maps starting from the prototype haven't changed.
1899
+ GenerateDirectLoadGlobalFunctionPrototype(masm(),
1900
+ Context::STRING_FUNCTION_INDEX,
1901
+ r0,
1902
+ &miss);
1903
+ ASSERT(object != holder);
1904
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder,
1905
+ r1, r3, r4, name, &miss);
1906
+
1907
+ Register receiver = r0;
1908
+ Register index = r4;
1909
+ Register scratch1 = r1;
1910
+ Register scratch2 = r3;
1911
+ Register result = r0;
1912
+ __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1913
+ if (argc > 0) {
1914
+ __ ldr(index, MemOperand(sp, (argc - 1) * kPointerSize));
1915
+ } else {
1916
+ __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1917
+ }
1918
+
1919
+ StringCharAtGenerator char_at_generator(receiver,
1920
+ index,
1921
+ scratch1,
1922
+ scratch2,
1923
+ result,
1924
+ &miss, // When not a string.
1925
+ &miss, // When not a number.
1926
+ index_out_of_range_label,
1927
+ STRING_INDEX_IS_NUMBER);
1928
+ char_at_generator.GenerateFast(masm());
1929
+ __ Drop(argc + 1);
1930
+ __ Ret();
1931
+
1932
+ StubRuntimeCallHelper call_helper;
1933
+ char_at_generator.GenerateSlow(masm(), call_helper);
1934
+
1935
+ if (index_out_of_range.is_linked()) {
1936
+ __ bind(&index_out_of_range);
1937
+ __ LoadRoot(r0, Heap::kEmptyStringRootIndex);
1938
+ __ Drop(argc + 1);
1939
+ __ Ret();
1940
+ }
1941
+
1942
+ __ bind(&miss);
1943
+ // Restore function name in r2.
1944
+ __ Move(r2, Handle<String>(name));
1945
+ __ bind(&name_miss);
1946
+ Object* obj;
1947
+ { MaybeObject* maybe_obj = GenerateMissBranch();
1948
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1949
+ }
1950
+
1951
+ // Return the generated code.
1952
+ return GetCode(function);
1953
+ }
1954
+
1955
+
1956
+ MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall(
1957
+ Object* object,
1958
+ JSObject* holder,
1959
+ JSGlobalPropertyCell* cell,
1960
+ JSFunction* function,
1961
+ String* name) {
1962
+ // ----------- S t a t e -------------
1963
+ // -- r2 : function name
1964
+ // -- lr : return address
1965
+ // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1966
+ // -- ...
1967
+ // -- sp[argc * 4] : receiver
1968
+ // -----------------------------------
1969
+
1970
+ const int argc = arguments().immediate();
1971
+
1972
+ // If the object is not a JSObject or we got an unexpected number of
1973
+ // arguments, bail out to the regular call.
1974
+ if (!object->IsJSObject() || argc != 1) return Heap::undefined_value();
1975
+
1976
+ Label miss;
1977
+ GenerateNameCheck(name, &miss);
1978
+
1979
+ if (cell == NULL) {
1980
+ __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
1981
+
1982
+ STATIC_ASSERT(kSmiTag == 0);
1983
+ __ tst(r1, Operand(kSmiTagMask));
1984
+ __ b(eq, &miss);
1985
+
1986
+ CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
1987
+ &miss);
1988
+ } else {
1989
+ ASSERT(cell->value() == function);
1990
+ GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
1991
+ GenerateLoadFunctionFromCell(cell, function, &miss);
1992
+ }
1993
+
1994
+ // Load the char code argument.
1995
+ Register code = r1;
1996
+ __ ldr(code, MemOperand(sp, 0 * kPointerSize));
1997
+
1998
+ // Check the code is a smi.
1999
+ Label slow;
2000
+ STATIC_ASSERT(kSmiTag == 0);
2001
+ __ tst(code, Operand(kSmiTagMask));
2002
+ __ b(ne, &slow);
2003
+
2004
+ // Convert the smi code to uint16.
2005
+ __ and_(code, code, Operand(Smi::FromInt(0xffff)));
2006
+
2007
+ StringCharFromCodeGenerator char_from_code_generator(code, r0);
2008
+ char_from_code_generator.GenerateFast(masm());
2009
+ __ Drop(argc + 1);
2010
+ __ Ret();
2011
+
2012
+ StubRuntimeCallHelper call_helper;
2013
+ char_from_code_generator.GenerateSlow(masm(), call_helper);
2014
+
2015
+ // Tail call the full function. We do not have to patch the receiver
2016
+ // because the function makes no use of it.
2017
+ __ bind(&slow);
2018
+ __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2019
+
2020
+ __ bind(&miss);
2021
+ // r2: function name.
2022
+ Object* obj;
2023
+ { MaybeObject* maybe_obj = GenerateMissBranch();
2024
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2025
+ }
2026
+
2027
+ // Return the generated code.
2028
+ return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
2029
+ }
2030
+
2031
+
2032
+ MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object,
2033
+ JSObject* holder,
2034
+ JSGlobalPropertyCell* cell,
2035
+ JSFunction* function,
2036
+ String* name) {
2037
+ // ----------- S t a t e -------------
2038
+ // -- r2 : function name
2039
+ // -- lr : return address
2040
+ // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2041
+ // -- ...
2042
+ // -- sp[argc * 4] : receiver
2043
+ // -----------------------------------
2044
+
2045
+ if (!CpuFeatures::IsSupported(VFP3)) return Heap::undefined_value();
2046
+ CpuFeatures::Scope scope_vfp3(VFP3);
2047
+
2048
+ const int argc = arguments().immediate();
2049
+
2050
+ // If the object is not a JSObject or we got an unexpected number of
2051
+ // arguments, bail out to the regular call.
2052
+ if (!object->IsJSObject() || argc != 1) return Heap::undefined_value();
2053
+
2054
+ Label miss, slow;
2055
+ GenerateNameCheck(name, &miss);
2056
+
2057
+ if (cell == NULL) {
2058
+ __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
2059
+
2060
+ STATIC_ASSERT(kSmiTag == 0);
2061
+ __ JumpIfSmi(r1, &miss);
2062
+
2063
+ CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
2064
+ &miss);
2065
+ } else {
2066
+ ASSERT(cell->value() == function);
2067
+ GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
2068
+ GenerateLoadFunctionFromCell(cell, function, &miss);
2069
+ }
2070
+
2071
+ // Load the (only) argument into r0.
2072
+ __ ldr(r0, MemOperand(sp, 0 * kPointerSize));
2073
+
2074
+ // If the argument is a smi, just return.
2075
+ STATIC_ASSERT(kSmiTag == 0);
2076
+ __ tst(r0, Operand(kSmiTagMask));
2077
+ __ Drop(argc + 1, eq);
2078
+ __ Ret(eq);
2079
+
2080
+ __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, &slow, true);
2081
+
2082
+ Label wont_fit_smi, no_vfp_exception, restore_fpscr_and_return;
2083
+
2084
+ // If vfp3 is enabled, we use the fpu rounding with the RM (round towards
2085
+ // minus infinity) mode.
2086
+
2087
+ // Load the HeapNumber value.
2088
+ // We will need access to the value in the core registers, so we load it
2089
+ // with ldrd and move it to the fpu. It also spares a sub instruction for
2090
+ // updating the HeapNumber value address, as vldr expects a multiple
2091
+ // of 4 offset.
2092
+ __ Ldrd(r4, r5, FieldMemOperand(r0, HeapNumber::kValueOffset));
2093
+ __ vmov(d1, r4, r5);
2094
+
2095
+ // Backup FPSCR.
2096
+ __ vmrs(r3);
2097
+ // Set custom FPCSR:
2098
+ // - Set rounding mode to "Round towards Minus Infinity"
2099
+ // (ie bits [23:22] = 0b10).
2100
+ // - Clear vfp cumulative exception flags (bits [3:0]).
2101
+ // - Make sure Flush-to-zero mode control bit is unset (bit 22).
2102
+ __ bic(r9, r3,
2103
+ Operand(kVFPExceptionMask | kVFPRoundingModeMask | kVFPFlushToZeroMask));
2104
+ __ orr(r9, r9, Operand(kRoundToMinusInf));
2105
+ __ vmsr(r9);
2106
+
2107
+ // Convert the argument to an integer.
2108
+ __ vcvt_s32_f64(s0, d1, kFPSCRRounding);
2109
+
2110
+ // Use vcvt latency to start checking for special cases.
2111
+ // Get the argument exponent and clear the sign bit.
2112
+ __ bic(r6, r5, Operand(HeapNumber::kSignMask));
2113
+ __ mov(r6, Operand(r6, LSR, HeapNumber::kMantissaBitsInTopWord));
2114
+
2115
+ // Retrieve FPSCR and check for vfp exceptions.
2116
+ __ vmrs(r9);
2117
+ __ tst(r9, Operand(kVFPExceptionMask));
2118
+ __ b(&no_vfp_exception, eq);
2119
+
2120
+ // Check for NaN, Infinity, and -Infinity.
2121
+ // They are invariant through a Math.Floor call, so just
2122
+ // return the original argument.
2123
+ __ sub(r7, r6, Operand(HeapNumber::kExponentMask
2124
+ >> HeapNumber::kMantissaBitsInTopWord), SetCC);
2125
+ __ b(&restore_fpscr_and_return, eq);
2126
+ // We had an overflow or underflow in the conversion. Check if we
2127
+ // have a big exponent.
2128
+ __ cmp(r7, Operand(HeapNumber::kMantissaBits));
2129
+ // If greater or equal, the argument is already round and in r0.
2130
+ __ b(&restore_fpscr_and_return, ge);
2131
+ __ b(&wont_fit_smi);
2132
+
2133
+ __ bind(&no_vfp_exception);
2134
+ // Move the result back to general purpose register r0.
2135
+ __ vmov(r0, s0);
2136
+ // Check if the result fits into a smi.
2137
+ __ add(r1, r0, Operand(0x40000000), SetCC);
2138
+ __ b(&wont_fit_smi, mi);
2139
+ // Tag the result.
2140
+ STATIC_ASSERT(kSmiTag == 0);
2141
+ __ mov(r0, Operand(r0, LSL, kSmiTagSize));
2142
+
2143
+ // Check for -0.
2144
+ __ cmp(r0, Operand(0, RelocInfo::NONE));
2145
+ __ b(&restore_fpscr_and_return, ne);
2146
+ // r5 already holds the HeapNumber exponent.
2147
+ __ tst(r5, Operand(HeapNumber::kSignMask));
2148
+ // If our HeapNumber is negative it was -0, so load its address and return.
2149
+ // Else r0 is loaded with 0, so we can also just return.
2150
+ __ ldr(r0, MemOperand(sp, 0 * kPointerSize), ne);
2151
+
2152
+ __ bind(&restore_fpscr_and_return);
2153
+ // Restore FPSCR and return.
2154
+ __ vmsr(r3);
2155
+ __ Drop(argc + 1);
2156
+ __ Ret();
2157
+
2158
+ __ bind(&wont_fit_smi);
2159
+ // Restore FPCSR and fall to slow case.
2160
+ __ vmsr(r3);
2161
+
2162
+ __ bind(&slow);
2163
+ // Tail call the full function. We do not have to patch the receiver
2164
+ // because the function makes no use of it.
2165
+ __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2166
+
2167
+ __ bind(&miss);
2168
+ // r2: function name.
2169
+ MaybeObject* obj = GenerateMissBranch();
2170
+ if (obj->IsFailure()) return obj;
2171
+
2172
+ // Return the generated code.
2173
+ return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
2174
+ }
2175
+
2176
+
2177
+ MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object,
2178
+ JSObject* holder,
2179
+ JSGlobalPropertyCell* cell,
2180
+ JSFunction* function,
2181
+ String* name) {
2182
+ // ----------- S t a t e -------------
2183
+ // -- r2 : function name
2184
+ // -- lr : return address
2185
+ // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2186
+ // -- ...
2187
+ // -- sp[argc * 4] : receiver
2188
+ // -----------------------------------
2189
+
2190
+ const int argc = arguments().immediate();
2191
+
2192
+ // If the object is not a JSObject or we got an unexpected number of
2193
+ // arguments, bail out to the regular call.
2194
+ if (!object->IsJSObject() || argc != 1) return Heap::undefined_value();
2195
+
2196
+ Label miss;
2197
+ GenerateNameCheck(name, &miss);
2198
+
2199
+ if (cell == NULL) {
2200
+ __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
2201
+
2202
+ STATIC_ASSERT(kSmiTag == 0);
2203
+ __ tst(r1, Operand(kSmiTagMask));
2204
+ __ b(eq, &miss);
2205
+
2206
+ CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
2207
+ &miss);
2208
+ } else {
2209
+ ASSERT(cell->value() == function);
2210
+ GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
2211
+ GenerateLoadFunctionFromCell(cell, function, &miss);
2212
+ }
2213
+
2214
+ // Load the (only) argument into r0.
2215
+ __ ldr(r0, MemOperand(sp, 0 * kPointerSize));
2216
+
2217
+ // Check if the argument is a smi.
2218
+ Label not_smi;
2219
+ STATIC_ASSERT(kSmiTag == 0);
2220
+ __ JumpIfNotSmi(r0, &not_smi);
2221
+
2222
+ // Do bitwise not or do nothing depending on the sign of the
2223
+ // argument.
2224
+ __ eor(r1, r0, Operand(r0, ASR, kBitsPerInt - 1));
2225
+
2226
+ // Add 1 or do nothing depending on the sign of the argument.
2227
+ __ sub(r0, r1, Operand(r0, ASR, kBitsPerInt - 1), SetCC);
2228
+
2229
+ // If the result is still negative, go to the slow case.
2230
+ // This only happens for the most negative smi.
2231
+ Label slow;
2232
+ __ b(mi, &slow);
2233
+
2234
+ // Smi case done.
2235
+ __ Drop(argc + 1);
2236
+ __ Ret();
2237
+
2238
+ // Check if the argument is a heap number and load its exponent and
2239
+ // sign.
2240
+ __ bind(&not_smi);
2241
+ __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, &slow, true);
2242
+ __ ldr(r1, FieldMemOperand(r0, HeapNumber::kExponentOffset));
2243
+
2244
+ // Check the sign of the argument. If the argument is positive,
2245
+ // just return it.
2246
+ Label negative_sign;
2247
+ __ tst(r1, Operand(HeapNumber::kSignMask));
2248
+ __ b(ne, &negative_sign);
2249
+ __ Drop(argc + 1);
2250
+ __ Ret();
2251
+
2252
+ // If the argument is negative, clear the sign, and return a new
2253
+ // number.
2254
+ __ bind(&negative_sign);
2255
+ __ eor(r1, r1, Operand(HeapNumber::kSignMask));
2256
+ __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
2257
+ __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
2258
+ __ AllocateHeapNumber(r0, r4, r5, r6, &slow);
2259
+ __ str(r1, FieldMemOperand(r0, HeapNumber::kExponentOffset));
2260
+ __ str(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
2261
+ __ Drop(argc + 1);
2262
+ __ Ret();
2263
+
2264
+ // Tail call the full function. We do not have to patch the receiver
2265
+ // because the function makes no use of it.
2266
+ __ bind(&slow);
2267
+ __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2268
+
2269
+ __ bind(&miss);
2270
+ // r2: function name.
2271
+ Object* obj;
2272
+ { MaybeObject* maybe_obj = GenerateMissBranch();
2273
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2274
+ }
2275
+
2276
+ // Return the generated code.
2277
+ return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
2278
+ }
2279
+
2280
+
2281
+ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
2282
+ JSObject* holder,
2283
+ JSFunction* function,
2284
+ String* name,
2285
+ CheckType check) {
2286
+ // ----------- S t a t e -------------
2287
+ // -- r2 : name
2288
+ // -- lr : return address
2289
+ // -----------------------------------
2290
+ SharedFunctionInfo* function_info = function->shared();
2291
+ if (function_info->HasBuiltinFunctionId()) {
2292
+ BuiltinFunctionId id = function_info->builtin_function_id();
2293
+ MaybeObject* maybe_result = CompileCustomCall(
2294
+ id, object, holder, NULL, function, name);
2295
+ Object* result;
2296
+ if (!maybe_result->ToObject(&result)) return maybe_result;
2297
+ // undefined means bail out to regular compiler.
2298
+ if (!result->IsUndefined()) {
2299
+ return result;
2300
+ }
2301
+ }
2302
+
2303
+ Label miss_in_smi_check;
2304
+
2305
+ GenerateNameCheck(name, &miss_in_smi_check);
2306
+
2307
+ // Get the receiver from the stack
2308
+ const int argc = arguments().immediate();
2309
+ __ ldr(r1, MemOperand(sp, argc * kPointerSize));
2310
+
2311
+ // Check that the receiver isn't a smi.
2312
+ if (check != NUMBER_CHECK) {
2313
+ __ tst(r1, Operand(kSmiTagMask));
2314
+ __ b(eq, &miss_in_smi_check);
2315
+ }
2316
+
2317
+ // Make sure that it's okay not to patch the on stack receiver
2318
+ // unless we're doing a receiver map check.
2319
+ ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2320
+
2321
+ CallOptimization optimization(function);
2322
+ int depth = kInvalidProtoDepth;
2323
+ Label miss;
2324
+
2325
+ switch (check) {
2326
+ case RECEIVER_MAP_CHECK:
2327
+ __ IncrementCounter(&Counters::call_const, 1, r0, r3);
2328
+
2329
+ if (optimization.is_simple_api_call() && !object->IsGlobalObject()) {
2330
+ depth = optimization.GetPrototypeDepthOfExpectedType(
2331
+ JSObject::cast(object), holder);
2332
+ }
2333
+
2334
+ if (depth != kInvalidProtoDepth) {
2335
+ __ IncrementCounter(&Counters::call_const_fast_api, 1, r0, r3);
2336
+ ReserveSpaceForFastApiCall(masm(), r0);
2337
+ }
2338
+
2339
+ // Check that the maps haven't changed.
2340
+ CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
2341
+ depth, &miss);
2342
+
2343
+ // Patch the receiver on the stack with the global proxy if
2344
+ // necessary.
2345
+ if (object->IsGlobalObject()) {
2346
+ ASSERT(depth == kInvalidProtoDepth);
2347
+ __ ldr(r3, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
2348
+ __ str(r3, MemOperand(sp, argc * kPointerSize));
2349
+ }
2350
+ break;
2351
+
2352
+ case STRING_CHECK:
2353
+ if (!function->IsBuiltin() && !function_info->strict_mode()) {
2354
+ // Calling non-strict non-builtins with a value as the receiver
2355
+ // requires boxing.
2356
+ __ jmp(&miss);
2357
+ } else {
2358
+ // Check that the object is a two-byte string or a symbol.
2359
+ __ CompareObjectType(r1, r3, r3, FIRST_NONSTRING_TYPE);
2360
+ __ b(hs, &miss);
2361
+ // Check that the maps starting from the prototype haven't changed.
2362
+ GenerateDirectLoadGlobalFunctionPrototype(
2363
+ masm(), Context::STRING_FUNCTION_INDEX, r0, &miss);
2364
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder, r3,
2365
+ r1, r4, name, &miss);
2366
+ }
2367
+ break;
2368
+
2369
+ case NUMBER_CHECK: {
2370
+ if (!function->IsBuiltin() && !function_info->strict_mode()) {
2371
+ // Calling non-strict non-builtins with a value as the receiver
2372
+ // requires boxing.
2373
+ __ jmp(&miss);
2374
+ } else {
2375
+ Label fast;
2376
+ // Check that the object is a smi or a heap number.
2377
+ __ tst(r1, Operand(kSmiTagMask));
2378
+ __ b(eq, &fast);
2379
+ __ CompareObjectType(r1, r0, r0, HEAP_NUMBER_TYPE);
2380
+ __ b(ne, &miss);
2381
+ __ bind(&fast);
2382
+ // Check that the maps starting from the prototype haven't changed.
2383
+ GenerateDirectLoadGlobalFunctionPrototype(
2384
+ masm(), Context::NUMBER_FUNCTION_INDEX, r0, &miss);
2385
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder, r3,
2386
+ r1, r4, name, &miss);
2387
+ }
2388
+ break;
2389
+ }
2390
+
2391
+ case BOOLEAN_CHECK: {
2392
+ if (!function->IsBuiltin() && !function_info->strict_mode()) {
2393
+ // Calling non-strict non-builtins with a value as the receiver
2394
+ // requires boxing.
2395
+ __ jmp(&miss);
2396
+ } else {
2397
+ Label fast;
2398
+ // Check that the object is a boolean.
2399
+ __ LoadRoot(ip, Heap::kTrueValueRootIndex);
2400
+ __ cmp(r1, ip);
2401
+ __ b(eq, &fast);
2402
+ __ LoadRoot(ip, Heap::kFalseValueRootIndex);
2403
+ __ cmp(r1, ip);
2404
+ __ b(ne, &miss);
2405
+ __ bind(&fast);
2406
+ // Check that the maps starting from the prototype haven't changed.
2407
+ GenerateDirectLoadGlobalFunctionPrototype(
2408
+ masm(), Context::BOOLEAN_FUNCTION_INDEX, r0, &miss);
2409
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder, r3,
2410
+ r1, r4, name, &miss);
2411
+ }
2412
+ break;
2413
+ }
2414
+
2415
+ default:
2416
+ UNREACHABLE();
2417
+ }
2418
+
2419
+ if (depth != kInvalidProtoDepth) {
2420
+ MaybeObject* result = GenerateFastApiDirectCall(masm(), optimization, argc);
2421
+ if (result->IsFailure()) return result;
2422
+ } else {
2423
+ __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2424
+ }
2425
+
2426
+ // Handle call cache miss.
2427
+ __ bind(&miss);
2428
+ if (depth != kInvalidProtoDepth) {
2429
+ FreeSpaceForFastApiCall(masm());
2430
+ }
2431
+
2432
+ __ bind(&miss_in_smi_check);
2433
+ Object* obj;
2434
+ { MaybeObject* maybe_obj = GenerateMissBranch();
2435
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2436
+ }
2437
+
2438
+ // Return the generated code.
2439
+ return GetCode(function);
2440
+ }
2441
+
2442
+
2443
+ MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object,
2444
+ JSObject* holder,
2445
+ String* name) {
2446
+ // ----------- S t a t e -------------
2447
+ // -- r2 : name
2448
+ // -- lr : return address
2449
+ // -----------------------------------
2450
+
2451
+ Label miss;
2452
+
2453
+ GenerateNameCheck(name, &miss);
2454
+
2455
+ // Get the number of arguments.
2456
+ const int argc = arguments().immediate();
2457
+
2458
+ LookupResult lookup;
2459
+ LookupPostInterceptor(holder, name, &lookup);
2460
+
2461
+ // Get the receiver from the stack.
2462
+ __ ldr(r1, MemOperand(sp, argc * kPointerSize));
2463
+
2464
+ CallInterceptorCompiler compiler(this, arguments(), r2);
2465
+ MaybeObject* result = compiler.Compile(masm(),
2466
+ object,
2467
+ holder,
2468
+ name,
2469
+ &lookup,
2470
+ r1,
2471
+ r3,
2472
+ r4,
2473
+ r0,
2474
+ &miss);
2475
+ if (result->IsFailure()) {
2476
+ return result;
2477
+ }
2478
+
2479
+ // Move returned value, the function to call, to r1.
2480
+ __ mov(r1, r0);
2481
+ // Restore receiver.
2482
+ __ ldr(r0, MemOperand(sp, argc * kPointerSize));
2483
+
2484
+ GenerateCallFunction(masm(), object, arguments(), &miss);
2485
+
2486
+ // Handle call cache miss.
2487
+ __ bind(&miss);
2488
+ Object* obj;
2489
+ { MaybeObject* maybe_obj = GenerateMissBranch();
2490
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2491
+ }
2492
+
2493
+ // Return the generated code.
2494
+ return GetCode(INTERCEPTOR, name);
2495
+ }
2496
+
2497
+
2498
+ MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object,
2499
+ GlobalObject* holder,
2500
+ JSGlobalPropertyCell* cell,
2501
+ JSFunction* function,
2502
+ String* name) {
2503
+ // ----------- S t a t e -------------
2504
+ // -- r2 : name
2505
+ // -- lr : return address
2506
+ // -----------------------------------
2507
+
2508
+ SharedFunctionInfo* function_info = function->shared();
2509
+ if (function_info->HasBuiltinFunctionId()) {
2510
+ BuiltinFunctionId id = function_info->builtin_function_id();
2511
+ MaybeObject* maybe_result = CompileCustomCall(
2512
+ id, object, holder, cell, function, name);
2513
+ Object* result;
2514
+ if (!maybe_result->ToObject(&result)) return maybe_result;
2515
+ // undefined means bail out to regular compiler.
2516
+ if (!result->IsUndefined()) return result;
2517
+ }
2518
+
2519
+ Label miss;
2520
+
2521
+ GenerateNameCheck(name, &miss);
2522
+
2523
+ // Get the number of arguments.
2524
+ const int argc = arguments().immediate();
2525
+
2526
+ GenerateGlobalReceiverCheck(object, holder, name, &miss);
2527
+
2528
+ GenerateLoadFunctionFromCell(cell, function, &miss);
2529
+
2530
+ // Patch the receiver on the stack with the global proxy if
2531
+ // necessary.
2532
+ if (object->IsGlobalObject()) {
2533
+ __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
2534
+ __ str(r3, MemOperand(sp, argc * kPointerSize));
2535
+ }
2536
+
2537
+ // Setup the context (function already in r1).
2538
+ __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
2539
+
2540
+ // Jump to the cached code (tail call).
2541
+ __ IncrementCounter(&Counters::call_global_inline, 1, r3, r4);
2542
+ ASSERT(function->is_compiled());
2543
+ Handle<Code> code(function->code());
2544
+ ParameterCount expected(function->shared()->formal_parameter_count());
2545
+ if (V8::UseCrankshaft()) {
2546
+ // TODO(kasperl): For now, we always call indirectly through the
2547
+ // code field in the function to allow recompilation to take effect
2548
+ // without changing any of the call sites.
2549
+ __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2550
+ __ InvokeCode(r3, expected, arguments(), JUMP_FUNCTION);
2551
+ } else {
2552
+ __ InvokeCode(code, expected, arguments(),
2553
+ RelocInfo::CODE_TARGET, JUMP_FUNCTION);
2554
+ }
2555
+
2556
+ // Handle call cache miss.
2557
+ __ bind(&miss);
2558
+ __ IncrementCounter(&Counters::call_global_inline_miss, 1, r1, r3);
2559
+ Object* obj;
2560
+ { MaybeObject* maybe_obj = GenerateMissBranch();
2561
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2562
+ }
2563
+
2564
+ // Return the generated code.
2565
+ return GetCode(NORMAL, name);
2566
+ }
2567
+
2568
+
2569
+ MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object,
2570
+ int index,
2571
+ Map* transition,
2572
+ String* name) {
2573
+ // ----------- S t a t e -------------
2574
+ // -- r0 : value
2575
+ // -- r1 : receiver
2576
+ // -- r2 : name
2577
+ // -- lr : return address
2578
+ // -----------------------------------
2579
+ Label miss;
2580
+
2581
+ GenerateStoreField(masm(),
2582
+ object,
2583
+ index,
2584
+ transition,
2585
+ r1, r2, r3,
2586
+ &miss);
2587
+ __ bind(&miss);
2588
+ Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2589
+ __ Jump(ic, RelocInfo::CODE_TARGET);
2590
+
2591
+ // Return the generated code.
2592
+ return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2593
+ }
2594
+
2595
+
2596
+ MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
2597
+ AccessorInfo* callback,
2598
+ String* name) {
2599
+ // ----------- S t a t e -------------
2600
+ // -- r0 : value
2601
+ // -- r1 : receiver
2602
+ // -- r2 : name
2603
+ // -- lr : return address
2604
+ // -----------------------------------
2605
+ Label miss;
2606
+
2607
+ // Check that the object isn't a smi.
2608
+ __ tst(r1, Operand(kSmiTagMask));
2609
+ __ b(eq, &miss);
2610
+
2611
+ // Check that the map of the object hasn't changed.
2612
+ __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
2613
+ __ cmp(r3, Operand(Handle<Map>(object->map())));
2614
+ __ b(ne, &miss);
2615
+
2616
+ // Perform global security token check if needed.
2617
+ if (object->IsJSGlobalProxy()) {
2618
+ __ CheckAccessGlobalProxy(r1, r3, &miss);
2619
+ }
2620
+
2621
+ // Stub never generated for non-global objects that require access
2622
+ // checks.
2623
+ ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2624
+
2625
+ __ push(r1); // receiver
2626
+ __ mov(ip, Operand(Handle<AccessorInfo>(callback))); // callback info
2627
+ __ Push(ip, r2, r0);
2628
+
2629
+ // Do tail-call to the runtime system.
2630
+ ExternalReference store_callback_property =
2631
+ ExternalReference(IC_Utility(IC::kStoreCallbackProperty));
2632
+ __ TailCallExternalReference(store_callback_property, 4, 1);
2633
+
2634
+ // Handle store cache miss.
2635
+ __ bind(&miss);
2636
+ Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2637
+ __ Jump(ic, RelocInfo::CODE_TARGET);
2638
+
2639
+ // Return the generated code.
2640
+ return GetCode(CALLBACKS, name);
2641
+ }
2642
+
2643
+
2644
+ MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
2645
+ String* name) {
2646
+ // ----------- S t a t e -------------
2647
+ // -- r0 : value
2648
+ // -- r1 : receiver
2649
+ // -- r2 : name
2650
+ // -- lr : return address
2651
+ // -----------------------------------
2652
+ Label miss;
2653
+
2654
+ // Check that the object isn't a smi.
2655
+ __ tst(r1, Operand(kSmiTagMask));
2656
+ __ b(eq, &miss);
2657
+
2658
+ // Check that the map of the object hasn't changed.
2659
+ __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
2660
+ __ cmp(r3, Operand(Handle<Map>(receiver->map())));
2661
+ __ b(ne, &miss);
2662
+
2663
+ // Perform global security token check if needed.
2664
+ if (receiver->IsJSGlobalProxy()) {
2665
+ __ CheckAccessGlobalProxy(r1, r3, &miss);
2666
+ }
2667
+
2668
+ // Stub is never generated for non-global objects that require access
2669
+ // checks.
2670
+ ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2671
+
2672
+ __ Push(r1, r2, r0); // Receiver, name, value.
2673
+
2674
+ __ mov(r0, Operand(Smi::FromInt(strict_mode_)));
2675
+ __ push(r0); // strict mode
2676
+
2677
+ // Do tail-call to the runtime system.
2678
+ ExternalReference store_ic_property =
2679
+ ExternalReference(IC_Utility(IC::kStoreInterceptorProperty));
2680
+ __ TailCallExternalReference(store_ic_property, 4, 1);
2681
+
2682
+ // Handle store cache miss.
2683
+ __ bind(&miss);
2684
+ Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2685
+ __ Jump(ic, RelocInfo::CODE_TARGET);
2686
+
2687
+ // Return the generated code.
2688
+ return GetCode(INTERCEPTOR, name);
2689
+ }
2690
+
2691
+
2692
+ MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
2693
+ JSGlobalPropertyCell* cell,
2694
+ String* name) {
2695
+ // ----------- S t a t e -------------
2696
+ // -- r0 : value
2697
+ // -- r1 : receiver
2698
+ // -- r2 : name
2699
+ // -- lr : return address
2700
+ // -----------------------------------
2701
+ Label miss;
2702
+
2703
+ // Check that the map of the global has not changed.
2704
+ __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
2705
+ __ cmp(r3, Operand(Handle<Map>(object->map())));
2706
+ __ b(ne, &miss);
2707
+
2708
+ // Check that the value in the cell is not the hole. If it is, this
2709
+ // cell could have been deleted and reintroducing the global needs
2710
+ // to update the property details in the property dictionary of the
2711
+ // global object. We bail out to the runtime system to do that.
2712
+ __ mov(r4, Operand(Handle<JSGlobalPropertyCell>(cell)));
2713
+ __ LoadRoot(r5, Heap::kTheHoleValueRootIndex);
2714
+ __ ldr(r6, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset));
2715
+ __ cmp(r5, r6);
2716
+ __ b(eq, &miss);
2717
+
2718
+ // Store the value in the cell.
2719
+ __ str(r0, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset));
2720
+
2721
+ __ IncrementCounter(&Counters::named_store_global_inline, 1, r4, r3);
2722
+ __ Ret();
2723
+
2724
+ // Handle store cache miss.
2725
+ __ bind(&miss);
2726
+ __ IncrementCounter(&Counters::named_store_global_inline_miss, 1, r4, r3);
2727
+ Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2728
+ __ Jump(ic, RelocInfo::CODE_TARGET);
2729
+
2730
+ // Return the generated code.
2731
+ return GetCode(NORMAL, name);
2732
+ }
2733
+
2734
+
2735
+ MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
2736
+ JSObject* object,
2737
+ JSObject* last) {
2738
+ // ----------- S t a t e -------------
2739
+ // -- r0 : receiver
2740
+ // -- lr : return address
2741
+ // -----------------------------------
2742
+ Label miss;
2743
+
2744
+ // Check that receiver is not a smi.
2745
+ __ tst(r0, Operand(kSmiTagMask));
2746
+ __ b(eq, &miss);
2747
+
2748
+ // Check the maps of the full prototype chain.
2749
+ CheckPrototypes(object, r0, last, r3, r1, r4, name, &miss);
2750
+
2751
+ // If the last object in the prototype chain is a global object,
2752
+ // check that the global property cell is empty.
2753
+ if (last->IsGlobalObject()) {
2754
+ MaybeObject* cell = GenerateCheckPropertyCell(masm(),
2755
+ GlobalObject::cast(last),
2756
+ name,
2757
+ r1,
2758
+ &miss);
2759
+ if (cell->IsFailure()) {
2760
+ miss.Unuse();
2761
+ return cell;
2762
+ }
2763
+ }
2764
+
2765
+ // Return undefined if maps of the full prototype chain are still the
2766
+ // same and no global property with this name contains a value.
2767
+ __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2768
+ __ Ret();
2769
+
2770
+ __ bind(&miss);
2771
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
2772
+
2773
+ // Return the generated code.
2774
+ return GetCode(NONEXISTENT, Heap::empty_string());
2775
+ }
2776
+
2777
+
2778
+ MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object,
2779
+ JSObject* holder,
2780
+ int index,
2781
+ String* name) {
2782
+ // ----------- S t a t e -------------
2783
+ // -- r0 : receiver
2784
+ // -- r2 : name
2785
+ // -- lr : return address
2786
+ // -----------------------------------
2787
+ Label miss;
2788
+
2789
+ GenerateLoadField(object, holder, r0, r3, r1, r4, index, name, &miss);
2790
+ __ bind(&miss);
2791
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
2792
+
2793
+ // Return the generated code.
2794
+ return GetCode(FIELD, name);
2795
+ }
2796
+
2797
+
2798
+ MaybeObject* LoadStubCompiler::CompileLoadCallback(String* name,
2799
+ JSObject* object,
2800
+ JSObject* holder,
2801
+ AccessorInfo* callback) {
2802
+ // ----------- S t a t e -------------
2803
+ // -- r0 : receiver
2804
+ // -- r2 : name
2805
+ // -- lr : return address
2806
+ // -----------------------------------
2807
+ Label miss;
2808
+
2809
+ MaybeObject* result = GenerateLoadCallback(object, holder, r0, r2, r3, r1, r4,
2810
+ callback, name, &miss);
2811
+ if (result->IsFailure()) {
2812
+ miss.Unuse();
2813
+ return result;
2814
+ }
2815
+
2816
+ __ bind(&miss);
2817
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
2818
+
2819
+ // Return the generated code.
2820
+ return GetCode(CALLBACKS, name);
2821
+ }
2822
+
2823
+
2824
+ MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object,
2825
+ JSObject* holder,
2826
+ Object* value,
2827
+ String* name) {
2828
+ // ----------- S t a t e -------------
2829
+ // -- r0 : receiver
2830
+ // -- r2 : name
2831
+ // -- lr : return address
2832
+ // -----------------------------------
2833
+ Label miss;
2834
+
2835
+ GenerateLoadConstant(object, holder, r0, r3, r1, r4, value, name, &miss);
2836
+ __ bind(&miss);
2837
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
2838
+
2839
+ // Return the generated code.
2840
+ return GetCode(CONSTANT_FUNCTION, name);
2841
+ }
2842
+
2843
+
2844
+ MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* object,
2845
+ JSObject* holder,
2846
+ String* name) {
2847
+ // ----------- S t a t e -------------
2848
+ // -- r0 : receiver
2849
+ // -- r2 : name
2850
+ // -- lr : return address
2851
+ // -----------------------------------
2852
+ Label miss;
2853
+
2854
+ LookupResult lookup;
2855
+ LookupPostInterceptor(holder, name, &lookup);
2856
+ GenerateLoadInterceptor(object,
2857
+ holder,
2858
+ &lookup,
2859
+ r0,
2860
+ r2,
2861
+ r3,
2862
+ r1,
2863
+ r4,
2864
+ name,
2865
+ &miss);
2866
+ __ bind(&miss);
2867
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
2868
+
2869
+ // Return the generated code.
2870
+ return GetCode(INTERCEPTOR, name);
2871
+ }
2872
+
2873
+
2874
+ MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
2875
+ GlobalObject* holder,
2876
+ JSGlobalPropertyCell* cell,
2877
+ String* name,
2878
+ bool is_dont_delete) {
2879
+ // ----------- S t a t e -------------
2880
+ // -- r0 : receiver
2881
+ // -- r2 : name
2882
+ // -- lr : return address
2883
+ // -----------------------------------
2884
+ Label miss;
2885
+
2886
+ // If the object is the holder then we know that it's a global
2887
+ // object which can only happen for contextual calls. In this case,
2888
+ // the receiver cannot be a smi.
2889
+ if (object != holder) {
2890
+ __ tst(r0, Operand(kSmiTagMask));
2891
+ __ b(eq, &miss);
2892
+ }
2893
+
2894
+ // Check that the map of the global has not changed.
2895
+ CheckPrototypes(object, r0, holder, r3, r4, r1, name, &miss);
2896
+
2897
+ // Get the value from the cell.
2898
+ __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell)));
2899
+ __ ldr(r4, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset));
2900
+
2901
+ // Check for deleted property if property can actually be deleted.
2902
+ if (!is_dont_delete) {
2903
+ __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2904
+ __ cmp(r4, ip);
2905
+ __ b(eq, &miss);
2906
+ }
2907
+
2908
+ __ mov(r0, r4);
2909
+ __ IncrementCounter(&Counters::named_load_global_stub, 1, r1, r3);
2910
+ __ Ret();
2911
+
2912
+ __ bind(&miss);
2913
+ __ IncrementCounter(&Counters::named_load_global_stub_miss, 1, r1, r3);
2914
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
2915
+
2916
+ // Return the generated code.
2917
+ return GetCode(NORMAL, name);
2918
+ }
2919
+
2920
+
2921
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
2922
+ JSObject* receiver,
2923
+ JSObject* holder,
2924
+ int index) {
2925
+ // ----------- S t a t e -------------
2926
+ // -- lr : return address
2927
+ // -- r0 : key
2928
+ // -- r1 : receiver
2929
+ // -----------------------------------
2930
+ Label miss;
2931
+
2932
+ // Check the key is the cached one.
2933
+ __ cmp(r0, Operand(Handle<String>(name)));
2934
+ __ b(ne, &miss);
2935
+
2936
+ GenerateLoadField(receiver, holder, r1, r2, r3, r4, index, name, &miss);
2937
+ __ bind(&miss);
2938
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2939
+
2940
+ return GetCode(FIELD, name);
2941
+ }
2942
+
2943
+
2944
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback(
2945
+ String* name,
2946
+ JSObject* receiver,
2947
+ JSObject* holder,
2948
+ AccessorInfo* callback) {
2949
+ // ----------- S t a t e -------------
2950
+ // -- lr : return address
2951
+ // -- r0 : key
2952
+ // -- r1 : receiver
2953
+ // -----------------------------------
2954
+ Label miss;
2955
+
2956
+ // Check the key is the cached one.
2957
+ __ cmp(r0, Operand(Handle<String>(name)));
2958
+ __ b(ne, &miss);
2959
+
2960
+ MaybeObject* result = GenerateLoadCallback(receiver, holder, r1, r0, r2, r3,
2961
+ r4, callback, name, &miss);
2962
+ if (result->IsFailure()) {
2963
+ miss.Unuse();
2964
+ return result;
2965
+ }
2966
+
2967
+ __ bind(&miss);
2968
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2969
+
2970
+ return GetCode(CALLBACKS, name);
2971
+ }
2972
+
2973
+
2974
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
2975
+ JSObject* receiver,
2976
+ JSObject* holder,
2977
+ Object* value) {
2978
+ // ----------- S t a t e -------------
2979
+ // -- lr : return address
2980
+ // -- r0 : key
2981
+ // -- r1 : receiver
2982
+ // -----------------------------------
2983
+ Label miss;
2984
+
2985
+ // Check the key is the cached one.
2986
+ __ cmp(r0, Operand(Handle<String>(name)));
2987
+ __ b(ne, &miss);
2988
+
2989
+ GenerateLoadConstant(receiver, holder, r1, r2, r3, r4, value, name, &miss);
2990
+ __ bind(&miss);
2991
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2992
+
2993
+ // Return the generated code.
2994
+ return GetCode(CONSTANT_FUNCTION, name);
2995
+ }
2996
+
2997
+
2998
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
2999
+ JSObject* holder,
3000
+ String* name) {
3001
+ // ----------- S t a t e -------------
3002
+ // -- lr : return address
3003
+ // -- r0 : key
3004
+ // -- r1 : receiver
3005
+ // -----------------------------------
3006
+ Label miss;
3007
+
3008
+ // Check the key is the cached one.
3009
+ __ cmp(r0, Operand(Handle<String>(name)));
3010
+ __ b(ne, &miss);
3011
+
3012
+ LookupResult lookup;
3013
+ LookupPostInterceptor(holder, name, &lookup);
3014
+ GenerateLoadInterceptor(receiver,
3015
+ holder,
3016
+ &lookup,
3017
+ r1,
3018
+ r0,
3019
+ r2,
3020
+ r3,
3021
+ r4,
3022
+ name,
3023
+ &miss);
3024
+ __ bind(&miss);
3025
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3026
+
3027
+ return GetCode(INTERCEPTOR, name);
3028
+ }
3029
+
3030
+
3031
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
3032
+ // ----------- S t a t e -------------
3033
+ // -- lr : return address
3034
+ // -- r0 : key
3035
+ // -- r1 : receiver
3036
+ // -----------------------------------
3037
+ Label miss;
3038
+
3039
+ // Check the key is the cached one.
3040
+ __ cmp(r0, Operand(Handle<String>(name)));
3041
+ __ b(ne, &miss);
3042
+
3043
+ GenerateLoadArrayLength(masm(), r1, r2, &miss);
3044
+ __ bind(&miss);
3045
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3046
+
3047
+ return GetCode(CALLBACKS, name);
3048
+ }
3049
+
3050
+
3051
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
3052
+ // ----------- S t a t e -------------
3053
+ // -- lr : return address
3054
+ // -- r0 : key
3055
+ // -- r1 : receiver
3056
+ // -----------------------------------
3057
+ Label miss;
3058
+ __ IncrementCounter(&Counters::keyed_load_string_length, 1, r2, r3);
3059
+
3060
+ // Check the key is the cached one.
3061
+ __ cmp(r0, Operand(Handle<String>(name)));
3062
+ __ b(ne, &miss);
3063
+
3064
+ GenerateLoadStringLength(masm(), r1, r2, r3, &miss, true);
3065
+ __ bind(&miss);
3066
+ __ DecrementCounter(&Counters::keyed_load_string_length, 1, r2, r3);
3067
+
3068
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3069
+
3070
+ return GetCode(CALLBACKS, name);
3071
+ }
3072
+
3073
+
3074
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
3075
+ // ----------- S t a t e -------------
3076
+ // -- lr : return address
3077
+ // -- r0 : key
3078
+ // -- r1 : receiver
3079
+ // -----------------------------------
3080
+ Label miss;
3081
+
3082
+ __ IncrementCounter(&Counters::keyed_load_function_prototype, 1, r2, r3);
3083
+
3084
+ // Check the name hasn't changed.
3085
+ __ cmp(r0, Operand(Handle<String>(name)));
3086
+ __ b(ne, &miss);
3087
+
3088
+ GenerateLoadFunctionPrototype(masm(), r1, r2, r3, &miss);
3089
+ __ bind(&miss);
3090
+ __ DecrementCounter(&Counters::keyed_load_function_prototype, 1, r2, r3);
3091
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3092
+
3093
+ return GetCode(CALLBACKS, name);
3094
+ }
3095
+
3096
+
3097
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) {
3098
+ // ----------- S t a t e -------------
3099
+ // -- lr : return address
3100
+ // -- r0 : key
3101
+ // -- r1 : receiver
3102
+ // -----------------------------------
3103
+ Label miss;
3104
+
3105
+ // Check that the receiver isn't a smi.
3106
+ __ tst(r1, Operand(kSmiTagMask));
3107
+ __ b(eq, &miss);
3108
+
3109
+ // Check that the map matches.
3110
+ __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
3111
+ __ cmp(r2, Operand(Handle<Map>(receiver->map())));
3112
+ __ b(ne, &miss);
3113
+
3114
+ // Check that the key is a smi.
3115
+ __ tst(r0, Operand(kSmiTagMask));
3116
+ __ b(ne, &miss);
3117
+
3118
+ // Get the elements array.
3119
+ __ ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset));
3120
+ __ AssertFastElements(r2);
3121
+
3122
+ // Check that the key is within bounds.
3123
+ __ ldr(r3, FieldMemOperand(r2, FixedArray::kLengthOffset));
3124
+ __ cmp(r0, Operand(r3));
3125
+ __ b(hs, &miss);
3126
+
3127
+ // Load the result and make sure it's not the hole.
3128
+ __ add(r3, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3129
+ ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
3130
+ __ ldr(r4,
3131
+ MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
3132
+ __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
3133
+ __ cmp(r4, ip);
3134
+ __ b(eq, &miss);
3135
+ __ mov(r0, r4);
3136
+ __ Ret();
3137
+
3138
+ __ bind(&miss);
3139
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3140
+
3141
+ // Return the generated code.
3142
+ return GetCode(NORMAL, NULL);
3143
+ }
3144
+
3145
+
3146
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadPixelArray(JSObject* receiver) {
3147
+ // ----------- S t a t e -------------
3148
+ // -- lr : return address
3149
+ // -- r0 : key
3150
+ // -- r1 : receiver
3151
+ // -----------------------------------
3152
+ Label miss;
3153
+
3154
+ // Check that the map matches.
3155
+ __ CheckMap(r1, r2, Handle<Map>(receiver->map()), &miss, false);
3156
+
3157
+ GenerateFastPixelArrayLoad(masm(),
3158
+ r1,
3159
+ r0,
3160
+ r2,
3161
+ r3,
3162
+ r4,
3163
+ r5,
3164
+ r0,
3165
+ &miss,
3166
+ &miss,
3167
+ &miss);
3168
+
3169
+ __ bind(&miss);
3170
+ Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Miss));
3171
+ __ Jump(ic, RelocInfo::CODE_TARGET);
3172
+
3173
+ // Return the generated code.
3174
+ return GetCode(NORMAL, NULL);
3175
+ }
3176
+
3177
+
3178
+ MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
3179
+ int index,
3180
+ Map* transition,
3181
+ String* name) {
3182
+ // ----------- S t a t e -------------
3183
+ // -- r0 : value
3184
+ // -- r1 : name
3185
+ // -- r2 : receiver
3186
+ // -- lr : return address
3187
+ // -----------------------------------
3188
+ Label miss;
3189
+
3190
+ __ IncrementCounter(&Counters::keyed_store_field, 1, r3, r4);
3191
+
3192
+ // Check that the name has not changed.
3193
+ __ cmp(r1, Operand(Handle<String>(name)));
3194
+ __ b(ne, &miss);
3195
+
3196
+ // r3 is used as scratch register. r1 and r2 keep their values if a jump to
3197
+ // the miss label is generated.
3198
+ GenerateStoreField(masm(),
3199
+ object,
3200
+ index,
3201
+ transition,
3202
+ r2, r1, r3,
3203
+ &miss);
3204
+ __ bind(&miss);
3205
+
3206
+ __ DecrementCounter(&Counters::keyed_store_field, 1, r3, r4);
3207
+ Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
3208
+
3209
+ __ Jump(ic, RelocInfo::CODE_TARGET);
3210
+
3211
+ // Return the generated code.
3212
+ return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
3213
+ }
3214
+
3215
+
3216
+ MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
3217
+ JSObject* receiver) {
3218
+ // ----------- S t a t e -------------
3219
+ // -- r0 : value
3220
+ // -- r1 : key
3221
+ // -- r2 : receiver
3222
+ // -- lr : return address
3223
+ // -- r3 : scratch
3224
+ // -- r4 : scratch (elements)
3225
+ // -----------------------------------
3226
+ Label miss;
3227
+
3228
+ Register value_reg = r0;
3229
+ Register key_reg = r1;
3230
+ Register receiver_reg = r2;
3231
+ Register scratch = r3;
3232
+ Register elements_reg = r4;
3233
+
3234
+ // Check that the receiver isn't a smi.
3235
+ __ tst(receiver_reg, Operand(kSmiTagMask));
3236
+ __ b(eq, &miss);
3237
+
3238
+ // Check that the map matches.
3239
+ __ ldr(scratch, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
3240
+ __ cmp(scratch, Operand(Handle<Map>(receiver->map())));
3241
+ __ b(ne, &miss);
3242
+
3243
+ // Check that the key is a smi.
3244
+ __ tst(key_reg, Operand(kSmiTagMask));
3245
+ __ b(ne, &miss);
3246
+
3247
+ // Get the elements array and make sure it is a fast element array, not 'cow'.
3248
+ __ ldr(elements_reg,
3249
+ FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
3250
+ __ ldr(scratch, FieldMemOperand(elements_reg, HeapObject::kMapOffset));
3251
+ __ cmp(scratch, Operand(Handle<Map>(Factory::fixed_array_map())));
3252
+ __ b(ne, &miss);
3253
+
3254
+ // Check that the key is within bounds.
3255
+ if (receiver->IsJSArray()) {
3256
+ __ ldr(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
3257
+ } else {
3258
+ __ ldr(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
3259
+ }
3260
+ // Compare smis.
3261
+ __ cmp(key_reg, scratch);
3262
+ __ b(hs, &miss);
3263
+
3264
+ __ add(scratch,
3265
+ elements_reg, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3266
+ ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
3267
+ __ str(value_reg,
3268
+ MemOperand(scratch, key_reg, LSL, kPointerSizeLog2 - kSmiTagSize));
3269
+ __ RecordWrite(scratch,
3270
+ Operand(key_reg, LSL, kPointerSizeLog2 - kSmiTagSize),
3271
+ receiver_reg , elements_reg);
3272
+
3273
+ // value_reg (r0) is preserved.
3274
+ // Done.
3275
+ __ Ret();
3276
+
3277
+ __ bind(&miss);
3278
+ Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
3279
+ __ Jump(ic, RelocInfo::CODE_TARGET);
3280
+
3281
+ // Return the generated code.
3282
+ return GetCode(NORMAL, NULL);
3283
+ }
3284
+
3285
+
3286
+ MaybeObject* KeyedStoreStubCompiler::CompileStorePixelArray(
3287
+ JSObject* receiver) {
3288
+ // ----------- S t a t e -------------
3289
+ // -- r0 : value
3290
+ // -- r1 : key
3291
+ // -- r2 : receiver
3292
+ // -- r3 : scratch
3293
+ // -- r4 : scratch
3294
+ // -- r5 : scratch
3295
+ // -- r6 : scratch
3296
+ // -- lr : return address
3297
+ // -----------------------------------
3298
+ Label miss;
3299
+
3300
+ // Check that the map matches.
3301
+ __ CheckMap(r2, r6, Handle<Map>(receiver->map()), &miss, false);
3302
+
3303
+ GenerateFastPixelArrayStore(masm(),
3304
+ r2,
3305
+ r1,
3306
+ r0,
3307
+ r3,
3308
+ r4,
3309
+ r5,
3310
+ r6,
3311
+ true,
3312
+ true,
3313
+ &miss,
3314
+ &miss,
3315
+ NULL,
3316
+ &miss);
3317
+
3318
+ __ bind(&miss);
3319
+ Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
3320
+ __ Jump(ic, RelocInfo::CODE_TARGET);
3321
+
3322
+ // Return the generated code.
3323
+ return GetCode(NORMAL, NULL);
3324
+ }
3325
+
3326
+
3327
+ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
3328
+ // ----------- S t a t e -------------
3329
+ // -- r0 : argc
3330
+ // -- r1 : constructor
3331
+ // -- lr : return address
3332
+ // -- [sp] : last argument
3333
+ // -----------------------------------
3334
+ Label generic_stub_call;
3335
+
3336
+ // Use r7 for holding undefined which is used in several places below.
3337
+ __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
3338
+
3339
+ #ifdef ENABLE_DEBUGGER_SUPPORT
3340
+ // Check to see whether there are any break points in the function code. If
3341
+ // there are jump to the generic constructor stub which calls the actual
3342
+ // code for the function thereby hitting the break points.
3343
+ __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
3344
+ __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kDebugInfoOffset));
3345
+ __ cmp(r2, r7);
3346
+ __ b(ne, &generic_stub_call);
3347
+ #endif
3348
+
3349
+ // Load the initial map and verify that it is in fact a map.
3350
+ // r1: constructor function
3351
+ // r7: undefined
3352
+ __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
3353
+ __ tst(r2, Operand(kSmiTagMask));
3354
+ __ b(eq, &generic_stub_call);
3355
+ __ CompareObjectType(r2, r3, r4, MAP_TYPE);
3356
+ __ b(ne, &generic_stub_call);
3357
+
3358
+ #ifdef DEBUG
3359
+ // Cannot construct functions this way.
3360
+ // r0: argc
3361
+ // r1: constructor function
3362
+ // r2: initial map
3363
+ // r7: undefined
3364
+ __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
3365
+ __ Check(ne, "Function constructed by construct stub.");
3366
+ #endif
3367
+
3368
+ // Now allocate the JSObject in new space.
3369
+ // r0: argc
3370
+ // r1: constructor function
3371
+ // r2: initial map
3372
+ // r7: undefined
3373
+ __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
3374
+ __ AllocateInNewSpace(r3,
3375
+ r4,
3376
+ r5,
3377
+ r6,
3378
+ &generic_stub_call,
3379
+ SIZE_IN_WORDS);
3380
+
3381
+ // Allocated the JSObject, now initialize the fields. Map is set to initial
3382
+ // map and properties and elements are set to empty fixed array.
3383
+ // r0: argc
3384
+ // r1: constructor function
3385
+ // r2: initial map
3386
+ // r3: object size (in words)
3387
+ // r4: JSObject (not tagged)
3388
+ // r7: undefined
3389
+ __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
3390
+ __ mov(r5, r4);
3391
+ ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
3392
+ __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
3393
+ ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
3394
+ __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
3395
+ ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
3396
+ __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
3397
+
3398
+ // Calculate the location of the first argument. The stack contains only the
3399
+ // argc arguments.
3400
+ __ add(r1, sp, Operand(r0, LSL, kPointerSizeLog2));
3401
+
3402
+ // Fill all the in-object properties with undefined.
3403
+ // r0: argc
3404
+ // r1: first argument
3405
+ // r3: object size (in words)
3406
+ // r4: JSObject (not tagged)
3407
+ // r5: First in-object property of JSObject (not tagged)
3408
+ // r7: undefined
3409
+ // Fill the initialized properties with a constant value or a passed argument
3410
+ // depending on the this.x = ...; assignment in the function.
3411
+ SharedFunctionInfo* shared = function->shared();
3412
+ for (int i = 0; i < shared->this_property_assignments_count(); i++) {
3413
+ if (shared->IsThisPropertyAssignmentArgument(i)) {
3414
+ Label not_passed, next;
3415
+ // Check if the argument assigned to the property is actually passed.
3416
+ int arg_number = shared->GetThisPropertyAssignmentArgument(i);
3417
+ __ cmp(r0, Operand(arg_number));
3418
+ __ b(le, &not_passed);
3419
+ // Argument passed - find it on the stack.
3420
+ __ ldr(r2, MemOperand(r1, (arg_number + 1) * -kPointerSize));
3421
+ __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
3422
+ __ b(&next);
3423
+ __ bind(&not_passed);
3424
+ // Set the property to undefined.
3425
+ __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
3426
+ __ bind(&next);
3427
+ } else {
3428
+ // Set the property to the constant value.
3429
+ Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i));
3430
+ __ mov(r2, Operand(constant));
3431
+ __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
3432
+ }
3433
+ }
3434
+
3435
+ // Fill the unused in-object property fields with undefined.
3436
+ ASSERT(function->has_initial_map());
3437
+ for (int i = shared->this_property_assignments_count();
3438
+ i < function->initial_map()->inobject_properties();
3439
+ i++) {
3440
+ __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
3441
+ }
3442
+
3443
+ // r0: argc
3444
+ // r4: JSObject (not tagged)
3445
+ // Move argc to r1 and the JSObject to return to r0 and tag it.
3446
+ __ mov(r1, r0);
3447
+ __ mov(r0, r4);
3448
+ __ orr(r0, r0, Operand(kHeapObjectTag));
3449
+
3450
+ // r0: JSObject
3451
+ // r1: argc
3452
+ // Remove caller arguments and receiver from the stack and return.
3453
+ __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2));
3454
+ __ add(sp, sp, Operand(kPointerSize));
3455
+ __ IncrementCounter(&Counters::constructed_objects, 1, r1, r2);
3456
+ __ IncrementCounter(&Counters::constructed_objects_stub, 1, r1, r2);
3457
+ __ Jump(lr);
3458
+
3459
+ // Jump to the generic stub in case the specialized code cannot handle the
3460
+ // construction.
3461
+ __ bind(&generic_stub_call);
3462
+ Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
3463
+ Handle<Code> generic_construct_stub(code);
3464
+ __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
3465
+
3466
+ // Return the generated code.
3467
+ return GetCode();
3468
+ }
3469
+
3470
+
3471
+ static bool IsElementTypeSigned(ExternalArrayType array_type) {
3472
+ switch (array_type) {
3473
+ case kExternalByteArray:
3474
+ case kExternalShortArray:
3475
+ case kExternalIntArray:
3476
+ return true;
3477
+
3478
+ case kExternalUnsignedByteArray:
3479
+ case kExternalUnsignedShortArray:
3480
+ case kExternalUnsignedIntArray:
3481
+ return false;
3482
+
3483
+ default:
3484
+ UNREACHABLE();
3485
+ return false;
3486
+ }
3487
+ }
3488
+
3489
+
3490
+ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
3491
+ ExternalArrayType array_type, Code::Flags flags) {
3492
+ // ---------- S t a t e --------------
3493
+ // -- lr : return address
3494
+ // -- r0 : key
3495
+ // -- r1 : receiver
3496
+ // -----------------------------------
3497
+ Label slow, failed_allocation;
3498
+
3499
+ Register key = r0;
3500
+ Register receiver = r1;
3501
+
3502
+ // Check that the object isn't a smi
3503
+ __ JumpIfSmi(receiver, &slow);
3504
+
3505
+ // Check that the key is a smi.
3506
+ __ JumpIfNotSmi(key, &slow);
3507
+
3508
+ // Check that the object is a JS object. Load map into r2.
3509
+ __ CompareObjectType(receiver, r2, r3, FIRST_JS_OBJECT_TYPE);
3510
+ __ b(lt, &slow);
3511
+
3512
+ // Check that the receiver does not require access checks. We need
3513
+ // to check this explicitly since this generic stub does not perform
3514
+ // map checks.
3515
+ __ ldrb(r3, FieldMemOperand(r2, Map::kBitFieldOffset));
3516
+ __ tst(r3, Operand(1 << Map::kIsAccessCheckNeeded));
3517
+ __ b(ne, &slow);
3518
+
3519
+ // Check that the elements array is the appropriate type of
3520
+ // ExternalArray.
3521
+ __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3522
+ __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
3523
+ __ LoadRoot(ip, Heap::RootIndexForExternalArrayType(array_type));
3524
+ __ cmp(r2, ip);
3525
+ __ b(ne, &slow);
3526
+
3527
+ // Check that the index is in range.
3528
+ __ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset));
3529
+ __ cmp(ip, Operand(key, ASR, kSmiTagSize));
3530
+ // Unsigned comparison catches both negative and too-large values.
3531
+ __ b(lo, &slow);
3532
+
3533
+ // r3: elements array
3534
+ __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
3535
+ // r3: base pointer of external storage
3536
+
3537
+ // We are not untagging smi key and instead work with it
3538
+ // as if it was premultiplied by 2.
3539
+ ASSERT((kSmiTag == 0) && (kSmiTagSize == 1));
3540
+
3541
+ Register value = r2;
3542
+ switch (array_type) {
3543
+ case kExternalByteArray:
3544
+ __ ldrsb(value, MemOperand(r3, key, LSR, 1));
3545
+ break;
3546
+ case kExternalUnsignedByteArray:
3547
+ __ ldrb(value, MemOperand(r3, key, LSR, 1));
3548
+ break;
3549
+ case kExternalShortArray:
3550
+ __ ldrsh(value, MemOperand(r3, key, LSL, 0));
3551
+ break;
3552
+ case kExternalUnsignedShortArray:
3553
+ __ ldrh(value, MemOperand(r3, key, LSL, 0));
3554
+ break;
3555
+ case kExternalIntArray:
3556
+ case kExternalUnsignedIntArray:
3557
+ __ ldr(value, MemOperand(r3, key, LSL, 1));
3558
+ break;
3559
+ case kExternalFloatArray:
3560
+ if (CpuFeatures::IsSupported(VFP3)) {
3561
+ CpuFeatures::Scope scope(VFP3);
3562
+ __ add(r2, r3, Operand(key, LSL, 1));
3563
+ __ vldr(s0, r2, 0);
3564
+ } else {
3565
+ __ ldr(value, MemOperand(r3, key, LSL, 1));
3566
+ }
3567
+ break;
3568
+ default:
3569
+ UNREACHABLE();
3570
+ break;
3571
+ }
3572
+
3573
+ // For integer array types:
3574
+ // r2: value
3575
+ // For floating-point array type
3576
+ // s0: value (if VFP3 is supported)
3577
+ // r2: value (if VFP3 is not supported)
3578
+
3579
+ if (array_type == kExternalIntArray) {
3580
+ // For the Int and UnsignedInt array types, we need to see whether
3581
+ // the value can be represented in a Smi. If not, we need to convert
3582
+ // it to a HeapNumber.
3583
+ Label box_int;
3584
+ __ cmp(value, Operand(0xC0000000));
3585
+ __ b(mi, &box_int);
3586
+ // Tag integer as smi and return it.
3587
+ __ mov(r0, Operand(value, LSL, kSmiTagSize));
3588
+ __ Ret();
3589
+
3590
+ __ bind(&box_int);
3591
+ // Allocate a HeapNumber for the result and perform int-to-double
3592
+ // conversion. Don't touch r0 or r1 as they are needed if allocation
3593
+ // fails.
3594
+ __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3595
+ __ AllocateHeapNumber(r5, r3, r4, r6, &slow);
3596
+ // Now we can use r0 for the result as key is not needed any more.
3597
+ __ mov(r0, r5);
3598
+
3599
+ if (CpuFeatures::IsSupported(VFP3)) {
3600
+ CpuFeatures::Scope scope(VFP3);
3601
+ __ vmov(s0, value);
3602
+ __ vcvt_f64_s32(d0, s0);
3603
+ __ sub(r3, r0, Operand(kHeapObjectTag));
3604
+ __ vstr(d0, r3, HeapNumber::kValueOffset);
3605
+ __ Ret();
3606
+ } else {
3607
+ WriteInt32ToHeapNumberStub stub(value, r0, r3);
3608
+ __ TailCallStub(&stub);
3609
+ }
3610
+ } else if (array_type == kExternalUnsignedIntArray) {
3611
+ // The test is different for unsigned int values. Since we need
3612
+ // the value to be in the range of a positive smi, we can't
3613
+ // handle either of the top two bits being set in the value.
3614
+ if (CpuFeatures::IsSupported(VFP3)) {
3615
+ CpuFeatures::Scope scope(VFP3);
3616
+ Label box_int, done;
3617
+ __ tst(value, Operand(0xC0000000));
3618
+ __ b(ne, &box_int);
3619
+ // Tag integer as smi and return it.
3620
+ __ mov(r0, Operand(value, LSL, kSmiTagSize));
3621
+ __ Ret();
3622
+
3623
+ __ bind(&box_int);
3624
+ __ vmov(s0, value);
3625
+ // Allocate a HeapNumber for the result and perform int-to-double
3626
+ // conversion. Don't use r0 and r1 as AllocateHeapNumber clobbers all
3627
+ // registers - also when jumping due to exhausted young space.
3628
+ __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3629
+ __ AllocateHeapNumber(r2, r3, r4, r6, &slow);
3630
+
3631
+ __ vcvt_f64_u32(d0, s0);
3632
+ __ sub(r1, r2, Operand(kHeapObjectTag));
3633
+ __ vstr(d0, r1, HeapNumber::kValueOffset);
3634
+
3635
+ __ mov(r0, r2);
3636
+ __ Ret();
3637
+ } else {
3638
+ // Check whether unsigned integer fits into smi.
3639
+ Label box_int_0, box_int_1, done;
3640
+ __ tst(value, Operand(0x80000000));
3641
+ __ b(ne, &box_int_0);
3642
+ __ tst(value, Operand(0x40000000));
3643
+ __ b(ne, &box_int_1);
3644
+ // Tag integer as smi and return it.
3645
+ __ mov(r0, Operand(value, LSL, kSmiTagSize));
3646
+ __ Ret();
3647
+
3648
+ Register hiword = value; // r2.
3649
+ Register loword = r3;
3650
+
3651
+ __ bind(&box_int_0);
3652
+ // Integer does not have leading zeros.
3653
+ GenerateUInt2Double(masm(), hiword, loword, r4, 0);
3654
+ __ b(&done);
3655
+
3656
+ __ bind(&box_int_1);
3657
+ // Integer has one leading zero.
3658
+ GenerateUInt2Double(masm(), hiword, loword, r4, 1);
3659
+
3660
+
3661
+ __ bind(&done);
3662
+ // Integer was converted to double in registers hiword:loword.
3663
+ // Wrap it into a HeapNumber. Don't use r0 and r1 as AllocateHeapNumber
3664
+ // clobbers all registers - also when jumping due to exhausted young
3665
+ // space.
3666
+ __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3667
+ __ AllocateHeapNumber(r4, r5, r7, r6, &slow);
3668
+
3669
+ __ str(hiword, FieldMemOperand(r4, HeapNumber::kExponentOffset));
3670
+ __ str(loword, FieldMemOperand(r4, HeapNumber::kMantissaOffset));
3671
+
3672
+ __ mov(r0, r4);
3673
+ __ Ret();
3674
+ }
3675
+ } else if (array_type == kExternalFloatArray) {
3676
+ // For the floating-point array type, we need to always allocate a
3677
+ // HeapNumber.
3678
+ if (CpuFeatures::IsSupported(VFP3)) {
3679
+ CpuFeatures::Scope scope(VFP3);
3680
+ // Allocate a HeapNumber for the result. Don't use r0 and r1 as
3681
+ // AllocateHeapNumber clobbers all registers - also when jumping due to
3682
+ // exhausted young space.
3683
+ __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3684
+ __ AllocateHeapNumber(r2, r3, r4, r6, &slow);
3685
+ __ vcvt_f64_f32(d0, s0);
3686
+ __ sub(r1, r2, Operand(kHeapObjectTag));
3687
+ __ vstr(d0, r1, HeapNumber::kValueOffset);
3688
+
3689
+ __ mov(r0, r2);
3690
+ __ Ret();
3691
+ } else {
3692
+ // Allocate a HeapNumber for the result. Don't use r0 and r1 as
3693
+ // AllocateHeapNumber clobbers all registers - also when jumping due to
3694
+ // exhausted young space.
3695
+ __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3696
+ __ AllocateHeapNumber(r3, r4, r5, r6, &slow);
3697
+ // VFP is not available, do manual single to double conversion.
3698
+
3699
+ // r2: floating point value (binary32)
3700
+ // r3: heap number for result
3701
+
3702
+ // Extract mantissa to r0. OK to clobber r0 now as there are no jumps to
3703
+ // the slow case from here.
3704
+ __ and_(r0, value, Operand(kBinary32MantissaMask));
3705
+
3706
+ // Extract exponent to r1. OK to clobber r1 now as there are no jumps to
3707
+ // the slow case from here.
3708
+ __ mov(r1, Operand(value, LSR, kBinary32MantissaBits));
3709
+ __ and_(r1, r1, Operand(kBinary32ExponentMask >> kBinary32MantissaBits));
3710
+
3711
+ Label exponent_rebiased;
3712
+ __ teq(r1, Operand(0x00));
3713
+ __ b(eq, &exponent_rebiased);
3714
+
3715
+ __ teq(r1, Operand(0xff));
3716
+ __ mov(r1, Operand(0x7ff), LeaveCC, eq);
3717
+ __ b(eq, &exponent_rebiased);
3718
+
3719
+ // Rebias exponent.
3720
+ __ add(r1,
3721
+ r1,
3722
+ Operand(-kBinary32ExponentBias + HeapNumber::kExponentBias));
3723
+
3724
+ __ bind(&exponent_rebiased);
3725
+ __ and_(r2, value, Operand(kBinary32SignMask));
3726
+ value = no_reg;
3727
+ __ orr(r2, r2, Operand(r1, LSL, HeapNumber::kMantissaBitsInTopWord));
3728
+
3729
+ // Shift mantissa.
3730
+ static const int kMantissaShiftForHiWord =
3731
+ kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord;
3732
+
3733
+ static const int kMantissaShiftForLoWord =
3734
+ kBitsPerInt - kMantissaShiftForHiWord;
3735
+
3736
+ __ orr(r2, r2, Operand(r0, LSR, kMantissaShiftForHiWord));
3737
+ __ mov(r0, Operand(r0, LSL, kMantissaShiftForLoWord));
3738
+
3739
+ __ str(r2, FieldMemOperand(r3, HeapNumber::kExponentOffset));
3740
+ __ str(r0, FieldMemOperand(r3, HeapNumber::kMantissaOffset));
3741
+
3742
+ __ mov(r0, r3);
3743
+ __ Ret();
3744
+ }
3745
+
3746
+ } else {
3747
+ // Tag integer as smi and return it.
3748
+ __ mov(r0, Operand(value, LSL, kSmiTagSize));
3749
+ __ Ret();
3750
+ }
3751
+
3752
+ // Slow case, key and receiver still in r0 and r1.
3753
+ __ bind(&slow);
3754
+ __ IncrementCounter(&Counters::keyed_load_external_array_slow, 1, r2, r3);
3755
+
3756
+ // ---------- S t a t e --------------
3757
+ // -- lr : return address
3758
+ // -- r0 : key
3759
+ // -- r1 : receiver
3760
+ // -----------------------------------
3761
+
3762
+ __ Push(r1, r0);
3763
+
3764
+ __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
3765
+
3766
+ return GetCode(flags);
3767
+ }
3768
+
3769
+
3770
+ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
3771
+ ExternalArrayType array_type, Code::Flags flags) {
3772
+ // ---------- S t a t e --------------
3773
+ // -- r0 : value
3774
+ // -- r1 : key
3775
+ // -- r2 : receiver
3776
+ // -- lr : return address
3777
+ // -----------------------------------
3778
+ Label slow, check_heap_number;
3779
+
3780
+ // Register usage.
3781
+ Register value = r0;
3782
+ Register key = r1;
3783
+ Register receiver = r2;
3784
+ // r3 mostly holds the elements array or the destination external array.
3785
+
3786
+ // Check that the object isn't a smi.
3787
+ __ JumpIfSmi(receiver, &slow);
3788
+
3789
+ // Check that the object is a JS object. Load map into r3.
3790
+ __ CompareObjectType(receiver, r3, r4, FIRST_JS_OBJECT_TYPE);
3791
+ __ b(le, &slow);
3792
+
3793
+ // Check that the receiver does not require access checks. We need
3794
+ // to do this because this generic stub does not perform map checks.
3795
+ __ ldrb(ip, FieldMemOperand(r3, Map::kBitFieldOffset));
3796
+ __ tst(ip, Operand(1 << Map::kIsAccessCheckNeeded));
3797
+ __ b(ne, &slow);
3798
+
3799
+ // Check that the key is a smi.
3800
+ __ JumpIfNotSmi(key, &slow);
3801
+
3802
+ // Check that the elements array is the appropriate type of ExternalArray.
3803
+ __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3804
+ __ ldr(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
3805
+ __ LoadRoot(ip, Heap::RootIndexForExternalArrayType(array_type));
3806
+ __ cmp(r4, ip);
3807
+ __ b(ne, &slow);
3808
+
3809
+ // Check that the index is in range.
3810
+ __ mov(r4, Operand(key, ASR, kSmiTagSize)); // Untag the index.
3811
+ __ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset));
3812
+ __ cmp(r4, ip);
3813
+ // Unsigned comparison catches both negative and too-large values.
3814
+ __ b(hs, &slow);
3815
+
3816
+ // Handle both smis and HeapNumbers in the fast path. Go to the
3817
+ // runtime for all other kinds of values.
3818
+ // r3: external array.
3819
+ // r4: key (integer).
3820
+ __ JumpIfNotSmi(value, &check_heap_number);
3821
+ __ mov(r5, Operand(value, ASR, kSmiTagSize)); // Untag the value.
3822
+ __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
3823
+
3824
+ // r3: base pointer of external storage.
3825
+ // r4: key (integer).
3826
+ // r5: value (integer).
3827
+ switch (array_type) {
3828
+ case kExternalByteArray:
3829
+ case kExternalUnsignedByteArray:
3830
+ __ strb(r5, MemOperand(r3, r4, LSL, 0));
3831
+ break;
3832
+ case kExternalShortArray:
3833
+ case kExternalUnsignedShortArray:
3834
+ __ strh(r5, MemOperand(r3, r4, LSL, 1));
3835
+ break;
3836
+ case kExternalIntArray:
3837
+ case kExternalUnsignedIntArray:
3838
+ __ str(r5, MemOperand(r3, r4, LSL, 2));
3839
+ break;
3840
+ case kExternalFloatArray:
3841
+ // Perform int-to-float conversion and store to memory.
3842
+ StoreIntAsFloat(masm(), r3, r4, r5, r6, r7, r9);
3843
+ break;
3844
+ default:
3845
+ UNREACHABLE();
3846
+ break;
3847
+ }
3848
+
3849
+ // Entry registers are intact, r0 holds the value which is the return value.
3850
+ __ Ret();
3851
+
3852
+
3853
+ // r3: external array.
3854
+ // r4: index (integer).
3855
+ __ bind(&check_heap_number);
3856
+ __ CompareObjectType(value, r5, r6, HEAP_NUMBER_TYPE);
3857
+ __ b(ne, &slow);
3858
+
3859
+ __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
3860
+
3861
+ // r3: base pointer of external storage.
3862
+ // r4: key (integer).
3863
+
3864
+ // The WebGL specification leaves the behavior of storing NaN and
3865
+ // +/-Infinity into integer arrays basically undefined. For more
3866
+ // reproducible behavior, convert these to zero.
3867
+ if (CpuFeatures::IsSupported(VFP3)) {
3868
+ CpuFeatures::Scope scope(VFP3);
3869
+
3870
+
3871
+ if (array_type == kExternalFloatArray) {
3872
+ // vldr requires offset to be a multiple of 4 so we can not
3873
+ // include -kHeapObjectTag into it.
3874
+ __ sub(r5, r0, Operand(kHeapObjectTag));
3875
+ __ vldr(d0, r5, HeapNumber::kValueOffset);
3876
+ __ add(r5, r3, Operand(r4, LSL, 2));
3877
+ __ vcvt_f32_f64(s0, d0);
3878
+ __ vstr(s0, r5, 0);
3879
+ } else {
3880
+ // Need to perform float-to-int conversion.
3881
+ // Test for NaN or infinity (both give zero).
3882
+ __ ldr(r6, FieldMemOperand(value, HeapNumber::kExponentOffset));
3883
+
3884
+ // Hoisted load. vldr requires offset to be a multiple of 4 so we can not
3885
+ // include -kHeapObjectTag into it.
3886
+ __ sub(r5, value, Operand(kHeapObjectTag));
3887
+ __ vldr(d0, r5, HeapNumber::kValueOffset);
3888
+
3889
+ __ Sbfx(r6, r6, HeapNumber::kExponentShift, HeapNumber::kExponentBits);
3890
+ // NaNs and Infinities have all-one exponents so they sign extend to -1.
3891
+ __ cmp(r6, Operand(-1));
3892
+ __ mov(r5, Operand(0), LeaveCC, eq);
3893
+
3894
+ // Not infinity or NaN simply convert to int.
3895
+ if (IsElementTypeSigned(array_type)) {
3896
+ __ vcvt_s32_f64(s0, d0, kDefaultRoundToZero, ne);
3897
+ } else {
3898
+ __ vcvt_u32_f64(s0, d0, kDefaultRoundToZero, ne);
3899
+ }
3900
+ __ vmov(r5, s0, ne);
3901
+
3902
+ switch (array_type) {
3903
+ case kExternalByteArray:
3904
+ case kExternalUnsignedByteArray:
3905
+ __ strb(r5, MemOperand(r3, r4, LSL, 0));
3906
+ break;
3907
+ case kExternalShortArray:
3908
+ case kExternalUnsignedShortArray:
3909
+ __ strh(r5, MemOperand(r3, r4, LSL, 1));
3910
+ break;
3911
+ case kExternalIntArray:
3912
+ case kExternalUnsignedIntArray:
3913
+ __ str(r5, MemOperand(r3, r4, LSL, 2));
3914
+ break;
3915
+ default:
3916
+ UNREACHABLE();
3917
+ break;
3918
+ }
3919
+ }
3920
+
3921
+ // Entry registers are intact, r0 holds the value which is the return value.
3922
+ __ Ret();
3923
+ } else {
3924
+ // VFP3 is not available do manual conversions.
3925
+ __ ldr(r5, FieldMemOperand(value, HeapNumber::kExponentOffset));
3926
+ __ ldr(r6, FieldMemOperand(value, HeapNumber::kMantissaOffset));
3927
+
3928
+ if (array_type == kExternalFloatArray) {
3929
+ Label done, nan_or_infinity_or_zero;
3930
+ static const int kMantissaInHiWordShift =
3931
+ kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord;
3932
+
3933
+ static const int kMantissaInLoWordShift =
3934
+ kBitsPerInt - kMantissaInHiWordShift;
3935
+
3936
+ // Test for all special exponent values: zeros, subnormal numbers, NaNs
3937
+ // and infinities. All these should be converted to 0.
3938
+ __ mov(r7, Operand(HeapNumber::kExponentMask));
3939
+ __ and_(r9, r5, Operand(r7), SetCC);
3940
+ __ b(eq, &nan_or_infinity_or_zero);
3941
+
3942
+ __ teq(r9, Operand(r7));
3943
+ __ mov(r9, Operand(kBinary32ExponentMask), LeaveCC, eq);
3944
+ __ b(eq, &nan_or_infinity_or_zero);
3945
+
3946
+ // Rebias exponent.
3947
+ __ mov(r9, Operand(r9, LSR, HeapNumber::kExponentShift));
3948
+ __ add(r9,
3949
+ r9,
3950
+ Operand(kBinary32ExponentBias - HeapNumber::kExponentBias));
3951
+
3952
+ __ cmp(r9, Operand(kBinary32MaxExponent));
3953
+ __ and_(r5, r5, Operand(HeapNumber::kSignMask), LeaveCC, gt);
3954
+ __ orr(r5, r5, Operand(kBinary32ExponentMask), LeaveCC, gt);
3955
+ __ b(gt, &done);
3956
+
3957
+ __ cmp(r9, Operand(kBinary32MinExponent));
3958
+ __ and_(r5, r5, Operand(HeapNumber::kSignMask), LeaveCC, lt);
3959
+ __ b(lt, &done);
3960
+
3961
+ __ and_(r7, r5, Operand(HeapNumber::kSignMask));
3962
+ __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
3963
+ __ orr(r7, r7, Operand(r5, LSL, kMantissaInHiWordShift));
3964
+ __ orr(r7, r7, Operand(r6, LSR, kMantissaInLoWordShift));
3965
+ __ orr(r5, r7, Operand(r9, LSL, kBinary32ExponentShift));
3966
+
3967
+ __ bind(&done);
3968
+ __ str(r5, MemOperand(r3, r4, LSL, 2));
3969
+ // Entry registers are intact, r0 holds the value which is the return
3970
+ // value.
3971
+ __ Ret();
3972
+
3973
+ __ bind(&nan_or_infinity_or_zero);
3974
+ __ and_(r7, r5, Operand(HeapNumber::kSignMask));
3975
+ __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
3976
+ __ orr(r9, r9, r7);
3977
+ __ orr(r9, r9, Operand(r5, LSL, kMantissaInHiWordShift));
3978
+ __ orr(r5, r9, Operand(r6, LSR, kMantissaInLoWordShift));
3979
+ __ b(&done);
3980
+ } else {
3981
+ bool is_signed_type = IsElementTypeSigned(array_type);
3982
+ int meaningfull_bits = is_signed_type ? (kBitsPerInt - 1) : kBitsPerInt;
3983
+ int32_t min_value = is_signed_type ? 0x80000000 : 0x00000000;
3984
+
3985
+ Label done, sign;
3986
+
3987
+ // Test for all special exponent values: zeros, subnormal numbers, NaNs
3988
+ // and infinities. All these should be converted to 0.
3989
+ __ mov(r7, Operand(HeapNumber::kExponentMask));
3990
+ __ and_(r9, r5, Operand(r7), SetCC);
3991
+ __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, eq);
3992
+ __ b(eq, &done);
3993
+
3994
+ __ teq(r9, Operand(r7));
3995
+ __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, eq);
3996
+ __ b(eq, &done);
3997
+
3998
+ // Unbias exponent.
3999
+ __ mov(r9, Operand(r9, LSR, HeapNumber::kExponentShift));
4000
+ __ sub(r9, r9, Operand(HeapNumber::kExponentBias), SetCC);
4001
+ // If exponent is negative then result is 0.
4002
+ __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, mi);
4003
+ __ b(mi, &done);
4004
+
4005
+ // If exponent is too big then result is minimal value.
4006
+ __ cmp(r9, Operand(meaningfull_bits - 1));
4007
+ __ mov(r5, Operand(min_value), LeaveCC, ge);
4008
+ __ b(ge, &done);
4009
+
4010
+ __ and_(r7, r5, Operand(HeapNumber::kSignMask), SetCC);
4011
+ __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
4012
+ __ orr(r5, r5, Operand(1u << HeapNumber::kMantissaBitsInTopWord));
4013
+
4014
+ __ rsb(r9, r9, Operand(HeapNumber::kMantissaBitsInTopWord), SetCC);
4015
+ __ mov(r5, Operand(r5, LSR, r9), LeaveCC, pl);
4016
+ __ b(pl, &sign);
4017
+
4018
+ __ rsb(r9, r9, Operand(0, RelocInfo::NONE));
4019
+ __ mov(r5, Operand(r5, LSL, r9));
4020
+ __ rsb(r9, r9, Operand(meaningfull_bits));
4021
+ __ orr(r5, r5, Operand(r6, LSR, r9));
4022
+
4023
+ __ bind(&sign);
4024
+ __ teq(r7, Operand(0, RelocInfo::NONE));
4025
+ __ rsb(r5, r5, Operand(0, RelocInfo::NONE), LeaveCC, ne);
4026
+
4027
+ __ bind(&done);
4028
+ switch (array_type) {
4029
+ case kExternalByteArray:
4030
+ case kExternalUnsignedByteArray:
4031
+ __ strb(r5, MemOperand(r3, r4, LSL, 0));
4032
+ break;
4033
+ case kExternalShortArray:
4034
+ case kExternalUnsignedShortArray:
4035
+ __ strh(r5, MemOperand(r3, r4, LSL, 1));
4036
+ break;
4037
+ case kExternalIntArray:
4038
+ case kExternalUnsignedIntArray:
4039
+ __ str(r5, MemOperand(r3, r4, LSL, 2));
4040
+ break;
4041
+ default:
4042
+ UNREACHABLE();
4043
+ break;
4044
+ }
4045
+ }
4046
+ }
4047
+
4048
+ // Slow case: call runtime.
4049
+ __ bind(&slow);
4050
+
4051
+ // Entry registers are intact.
4052
+ // ---------- S t a t e --------------
4053
+ // -- r0 : value
4054
+ // -- r1 : key
4055
+ // -- r2 : receiver
4056
+ // -- lr : return address
4057
+ // -----------------------------------
4058
+
4059
+ // Push receiver, key and value for runtime call.
4060
+ __ Push(r2, r1, r0);
4061
+
4062
+ __ mov(r1, Operand(Smi::FromInt(NONE))); // PropertyAttributes
4063
+ __ mov(r0, Operand(Smi::FromInt(
4064
+ Code::ExtractExtraICStateFromFlags(flags) & kStrictMode)));
4065
+ __ Push(r1, r0);
4066
+
4067
+ __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
4068
+
4069
+ return GetCode(flags);
4070
+ }
4071
+
4072
+
4073
+ #undef __
4074
+
4075
+ } } // namespace v8::internal
4076
+
4077
+ #endif // V8_TARGET_ARCH_ARM