therubyracer 0.9.0beta2 → 0.9.0beta3

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of therubyracer might be problematic. Click here for more details.

Files changed (920) hide show
  1. data/.gitmodules +3 -0
  2. data/ext/v8/upstream/Makefile +1 -2
  3. data/ext/v8/upstream/v8/.gitignore +33 -0
  4. data/ext/v8/upstream/v8/AUTHORS +42 -0
  5. data/ext/v8/upstream/v8/ChangeLog +2663 -0
  6. data/ext/v8/upstream/{3.1.8 → v8}/LICENSE +0 -0
  7. data/ext/v8/upstream/{3.1.8 → v8}/LICENSE.strongtalk +0 -0
  8. data/ext/v8/upstream/{3.1.8 → v8}/LICENSE.v8 +0 -0
  9. data/ext/v8/upstream/{3.1.8 → v8}/LICENSE.valgrind +0 -0
  10. data/ext/v8/upstream/v8/SConstruct +1473 -0
  11. data/ext/v8/upstream/{3.1.8 → v8}/build/README.txt +0 -0
  12. data/ext/v8/upstream/{3.1.8 → v8}/build/all.gyp +0 -0
  13. data/ext/v8/upstream/{3.1.8 → v8}/build/armu.gypi +0 -0
  14. data/ext/v8/upstream/{3.1.8 → v8}/build/common.gypi +0 -0
  15. data/ext/v8/upstream/{3.1.8 → v8}/build/gyp_v8 +0 -0
  16. data/ext/v8/upstream/v8/include/v8-debug.h +394 -0
  17. data/ext/v8/upstream/v8/include/v8-preparser.h +116 -0
  18. data/ext/v8/upstream/v8/include/v8-profiler.h +505 -0
  19. data/ext/v8/upstream/v8/include/v8-testing.h +104 -0
  20. data/ext/v8/upstream/v8/include/v8.h +4000 -0
  21. data/ext/v8/upstream/{3.1.8 → v8}/include/v8stdint.h +0 -0
  22. data/ext/v8/upstream/v8/preparser/SConscript +38 -0
  23. data/ext/v8/upstream/v8/preparser/preparser-process.cc +169 -0
  24. data/ext/v8/upstream/v8/src/SConscript +380 -0
  25. data/ext/v8/upstream/v8/src/accessors.cc +766 -0
  26. data/ext/v8/upstream/{3.1.8 → v8}/src/accessors.h +0 -0
  27. data/ext/v8/upstream/v8/src/allocation-inl.h +49 -0
  28. data/ext/v8/upstream/v8/src/allocation.cc +122 -0
  29. data/ext/v8/upstream/v8/src/allocation.h +143 -0
  30. data/ext/v8/upstream/v8/src/api.cc +5678 -0
  31. data/ext/v8/upstream/v8/src/api.h +572 -0
  32. data/ext/v8/upstream/{3.1.8 → v8}/src/apinatives.js +0 -0
  33. data/ext/v8/upstream/v8/src/apiutils.h +73 -0
  34. data/ext/v8/upstream/v8/src/arguments.h +116 -0
  35. data/ext/v8/upstream/v8/src/arm/assembler-arm-inl.h +353 -0
  36. data/ext/v8/upstream/v8/src/arm/assembler-arm.cc +2877 -0
  37. data/ext/v8/upstream/v8/src/arm/assembler-arm.h +1382 -0
  38. data/ext/v8/upstream/v8/src/arm/builtins-arm.cc +1634 -0
  39. data/ext/v8/upstream/v8/src/arm/code-stubs-arm.cc +6917 -0
  40. data/ext/v8/upstream/v8/src/arm/code-stubs-arm.h +623 -0
  41. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/codegen-arm-inl.h +0 -0
  42. data/ext/v8/upstream/v8/src/arm/codegen-arm.cc +7437 -0
  43. data/ext/v8/upstream/v8/src/arm/codegen-arm.h +595 -0
  44. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/constants-arm.cc +0 -0
  45. data/ext/v8/upstream/v8/src/arm/constants-arm.h +778 -0
  46. data/ext/v8/upstream/v8/src/arm/cpu-arm.cc +149 -0
  47. data/ext/v8/upstream/v8/src/arm/debug-arm.cc +317 -0
  48. data/ext/v8/upstream/v8/src/arm/deoptimizer-arm.cc +737 -0
  49. data/ext/v8/upstream/v8/src/arm/disasm-arm.cc +1503 -0
  50. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/frames-arm.cc +0 -0
  51. data/ext/v8/upstream/v8/src/arm/frames-arm.h +168 -0
  52. data/ext/v8/upstream/v8/src/arm/full-codegen-arm.cc +4374 -0
  53. data/ext/v8/upstream/v8/src/arm/ic-arm.cc +1793 -0
  54. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/jump-target-arm.cc +0 -0
  55. data/ext/v8/upstream/v8/src/arm/lithium-arm.cc +2120 -0
  56. data/ext/v8/upstream/v8/src/arm/lithium-arm.h +2179 -0
  57. data/ext/v8/upstream/v8/src/arm/lithium-codegen-arm.cc +4132 -0
  58. data/ext/v8/upstream/v8/src/arm/lithium-codegen-arm.h +329 -0
  59. data/ext/v8/upstream/v8/src/arm/lithium-gap-resolver-arm.cc +305 -0
  60. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/lithium-gap-resolver-arm.h +0 -0
  61. data/ext/v8/upstream/v8/src/arm/macro-assembler-arm.cc +2939 -0
  62. data/ext/v8/upstream/v8/src/arm/macro-assembler-arm.h +1071 -0
  63. data/ext/v8/upstream/v8/src/arm/regexp-macro-assembler-arm.cc +1287 -0
  64. data/ext/v8/upstream/v8/src/arm/regexp-macro-assembler-arm.h +253 -0
  65. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/register-allocator-arm-inl.h +0 -0
  66. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/register-allocator-arm.cc +0 -0
  67. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/register-allocator-arm.h +0 -0
  68. data/ext/v8/upstream/v8/src/arm/simulator-arm.cc +3288 -0
  69. data/ext/v8/upstream/v8/src/arm/simulator-arm.h +413 -0
  70. data/ext/v8/upstream/v8/src/arm/stub-cache-arm.cc +4034 -0
  71. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/virtual-frame-arm-inl.h +0 -0
  72. data/ext/v8/upstream/v8/src/arm/virtual-frame-arm.cc +843 -0
  73. data/ext/v8/upstream/v8/src/arm/virtual-frame-arm.h +523 -0
  74. data/ext/v8/upstream/v8/src/array.js +1249 -0
  75. data/ext/v8/upstream/v8/src/assembler.cc +1067 -0
  76. data/ext/v8/upstream/v8/src/assembler.h +823 -0
  77. data/ext/v8/upstream/v8/src/ast-inl.h +112 -0
  78. data/ext/v8/upstream/v8/src/ast.cc +1078 -0
  79. data/ext/v8/upstream/v8/src/ast.h +2234 -0
  80. data/ext/v8/upstream/v8/src/atomicops.h +167 -0
  81. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_arm_gcc.h +0 -0
  82. data/ext/v8/upstream/v8/src/atomicops_internals_mips_gcc.h +169 -0
  83. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_x86_gcc.cc +0 -0
  84. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_x86_gcc.h +0 -0
  85. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_x86_macosx.h +0 -0
  86. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_x86_msvc.h +0 -0
  87. data/ext/v8/upstream/{3.1.8 → v8}/src/bignum-dtoa.cc +0 -0
  88. data/ext/v8/upstream/{3.1.8 → v8}/src/bignum-dtoa.h +0 -0
  89. data/ext/v8/upstream/{3.1.8 → v8}/src/bignum.cc +0 -0
  90. data/ext/v8/upstream/{3.1.8 → v8}/src/bignum.h +0 -0
  91. data/ext/v8/upstream/v8/src/bootstrapper.cc +2138 -0
  92. data/ext/v8/upstream/v8/src/bootstrapper.h +185 -0
  93. data/ext/v8/upstream/v8/src/builtins.cc +1708 -0
  94. data/ext/v8/upstream/v8/src/builtins.h +368 -0
  95. data/ext/v8/upstream/{3.1.8 → v8}/src/bytecodes-irregexp.h +0 -0
  96. data/ext/v8/upstream/{3.1.8 → v8}/src/cached-powers.cc +0 -0
  97. data/ext/v8/upstream/{3.1.8 → v8}/src/cached-powers.h +0 -0
  98. data/ext/v8/upstream/{3.1.8 → v8}/src/char-predicates-inl.h +0 -0
  99. data/ext/v8/upstream/{3.1.8 → v8}/src/char-predicates.h +0 -0
  100. data/ext/v8/upstream/v8/src/checks.cc +110 -0
  101. data/ext/v8/upstream/v8/src/checks.h +296 -0
  102. data/ext/v8/upstream/{3.1.8 → v8}/src/circular-queue-inl.h +0 -0
  103. data/ext/v8/upstream/{3.1.8 → v8}/src/circular-queue.cc +0 -0
  104. data/ext/v8/upstream/{3.1.8 → v8}/src/circular-queue.h +0 -0
  105. data/ext/v8/upstream/v8/src/code-stubs.cc +240 -0
  106. data/ext/v8/upstream/v8/src/code-stubs.h +971 -0
  107. data/ext/v8/upstream/{3.1.8 → v8}/src/code.h +0 -0
  108. data/ext/v8/upstream/v8/src/codegen-inl.h +68 -0
  109. data/ext/v8/upstream/v8/src/codegen.cc +505 -0
  110. data/ext/v8/upstream/v8/src/codegen.h +245 -0
  111. data/ext/v8/upstream/v8/src/compilation-cache.cc +540 -0
  112. data/ext/v8/upstream/v8/src/compilation-cache.h +287 -0
  113. data/ext/v8/upstream/v8/src/compiler.cc +792 -0
  114. data/ext/v8/upstream/v8/src/compiler.h +307 -0
  115. data/ext/v8/upstream/v8/src/contexts.cc +327 -0
  116. data/ext/v8/upstream/v8/src/contexts.h +382 -0
  117. data/ext/v8/upstream/{3.1.8 → v8}/src/conversions-inl.h +0 -0
  118. data/ext/v8/upstream/v8/src/conversions.cc +1125 -0
  119. data/ext/v8/upstream/{3.1.8 → v8}/src/conversions.h +0 -0
  120. data/ext/v8/upstream/v8/src/counters.cc +93 -0
  121. data/ext/v8/upstream/v8/src/counters.h +254 -0
  122. data/ext/v8/upstream/v8/src/cpu-profiler-inl.h +101 -0
  123. data/ext/v8/upstream/v8/src/cpu-profiler.cc +606 -0
  124. data/ext/v8/upstream/v8/src/cpu-profiler.h +305 -0
  125. data/ext/v8/upstream/v8/src/cpu.h +67 -0
  126. data/ext/v8/upstream/v8/src/d8-debug.cc +367 -0
  127. data/ext/v8/upstream/v8/src/d8-debug.h +158 -0
  128. data/ext/v8/upstream/v8/src/d8-posix.cc +695 -0
  129. data/ext/v8/upstream/{3.1.8 → v8}/src/d8-readline.cc +0 -0
  130. data/ext/v8/upstream/{3.1.8 → v8}/src/d8-windows.cc +0 -0
  131. data/ext/v8/upstream/v8/src/d8.cc +796 -0
  132. data/ext/v8/upstream/v8/src/d8.gyp +88 -0
  133. data/ext/v8/upstream/{3.1.8 → v8}/src/d8.h +0 -0
  134. data/ext/v8/upstream/{3.1.8 → v8}/src/d8.js +0 -0
  135. data/ext/v8/upstream/{3.1.8 → v8}/src/data-flow.cc +0 -0
  136. data/ext/v8/upstream/v8/src/data-flow.h +379 -0
  137. data/ext/v8/upstream/{3.1.8 → v8}/src/date.js +0 -0
  138. data/ext/v8/upstream/{3.1.8 → v8}/src/dateparser-inl.h +0 -0
  139. data/ext/v8/upstream/{3.1.8 → v8}/src/dateparser.cc +0 -0
  140. data/ext/v8/upstream/v8/src/dateparser.h +265 -0
  141. data/ext/v8/upstream/v8/src/debug-agent.cc +447 -0
  142. data/ext/v8/upstream/v8/src/debug-agent.h +129 -0
  143. data/ext/v8/upstream/{3.1.8 → v8}/src/debug-debugger.js +0 -0
  144. data/ext/v8/upstream/v8/src/debug.cc +3188 -0
  145. data/ext/v8/upstream/v8/src/debug.h +1055 -0
  146. data/ext/v8/upstream/v8/src/deoptimizer.cc +1296 -0
  147. data/ext/v8/upstream/v8/src/deoptimizer.h +629 -0
  148. data/ext/v8/upstream/v8/src/disasm.h +80 -0
  149. data/ext/v8/upstream/v8/src/disassembler.cc +339 -0
  150. data/ext/v8/upstream/{3.1.8 → v8}/src/disassembler.h +0 -0
  151. data/ext/v8/upstream/{3.1.8 → v8}/src/diy-fp.cc +0 -0
  152. data/ext/v8/upstream/{3.1.8 → v8}/src/diy-fp.h +0 -0
  153. data/ext/v8/upstream/{3.1.8 → v8}/src/double.h +0 -0
  154. data/ext/v8/upstream/{3.1.8 → v8}/src/dtoa.cc +0 -0
  155. data/ext/v8/upstream/{3.1.8 → v8}/src/dtoa.h +0 -0
  156. data/ext/v8/upstream/v8/src/execution.cc +791 -0
  157. data/ext/v8/upstream/v8/src/execution.h +291 -0
  158. data/ext/v8/upstream/v8/src/extensions/experimental/break-iterator.cc +250 -0
  159. data/ext/v8/upstream/v8/src/extensions/experimental/break-iterator.h +89 -0
  160. data/ext/v8/upstream/v8/src/extensions/experimental/experimental.gyp +55 -0
  161. data/ext/v8/upstream/v8/src/extensions/experimental/i18n-extension.cc +284 -0
  162. data/ext/v8/upstream/{3.1.8 → v8}/src/extensions/experimental/i18n-extension.h +0 -0
  163. data/ext/v8/upstream/v8/src/extensions/externalize-string-extension.cc +141 -0
  164. data/ext/v8/upstream/{3.1.8 → v8}/src/extensions/externalize-string-extension.h +0 -0
  165. data/ext/v8/upstream/v8/src/extensions/gc-extension.cc +58 -0
  166. data/ext/v8/upstream/{3.1.8 → v8}/src/extensions/gc-extension.h +0 -0
  167. data/ext/v8/upstream/v8/src/factory.cc +1194 -0
  168. data/ext/v8/upstream/v8/src/factory.h +436 -0
  169. data/ext/v8/upstream/{3.1.8 → v8}/src/fast-dtoa.cc +0 -0
  170. data/ext/v8/upstream/{3.1.8 → v8}/src/fast-dtoa.h +0 -0
  171. data/ext/v8/upstream/{3.1.8 → v8}/src/fixed-dtoa.cc +0 -0
  172. data/ext/v8/upstream/{3.1.8 → v8}/src/fixed-dtoa.h +0 -0
  173. data/ext/v8/upstream/v8/src/flag-definitions.h +556 -0
  174. data/ext/v8/upstream/{3.1.8 → v8}/src/flags.cc +0 -0
  175. data/ext/v8/upstream/{3.1.8 → v8}/src/flags.h +0 -0
  176. data/ext/v8/upstream/v8/src/frame-element.cc +37 -0
  177. data/ext/v8/upstream/v8/src/frame-element.h +269 -0
  178. data/ext/v8/upstream/v8/src/frames-inl.h +236 -0
  179. data/ext/v8/upstream/v8/src/frames.cc +1273 -0
  180. data/ext/v8/upstream/v8/src/frames.h +854 -0
  181. data/ext/v8/upstream/v8/src/full-codegen.cc +1385 -0
  182. data/ext/v8/upstream/v8/src/full-codegen.h +753 -0
  183. data/ext/v8/upstream/v8/src/func-name-inferrer.cc +91 -0
  184. data/ext/v8/upstream/v8/src/func-name-inferrer.h +111 -0
  185. data/ext/v8/upstream/v8/src/gdb-jit.cc +1548 -0
  186. data/ext/v8/upstream/{3.1.8 → v8}/src/gdb-jit.h +0 -0
  187. data/ext/v8/upstream/v8/src/global-handles.cc +596 -0
  188. data/ext/v8/upstream/v8/src/global-handles.h +239 -0
  189. data/ext/v8/upstream/v8/src/globals.h +325 -0
  190. data/ext/v8/upstream/v8/src/handles-inl.h +177 -0
  191. data/ext/v8/upstream/v8/src/handles.cc +965 -0
  192. data/ext/v8/upstream/v8/src/handles.h +372 -0
  193. data/ext/v8/upstream/{3.1.8 → v8}/src/hashmap.cc +0 -0
  194. data/ext/v8/upstream/v8/src/hashmap.h +121 -0
  195. data/ext/v8/upstream/v8/src/heap-inl.h +703 -0
  196. data/ext/v8/upstream/v8/src/heap-profiler.cc +1173 -0
  197. data/ext/v8/upstream/v8/src/heap-profiler.h +396 -0
  198. data/ext/v8/upstream/v8/src/heap.cc +5856 -0
  199. data/ext/v8/upstream/v8/src/heap.h +2264 -0
  200. data/ext/v8/upstream/v8/src/hydrogen-instructions.cc +1639 -0
  201. data/ext/v8/upstream/v8/src/hydrogen-instructions.h +3657 -0
  202. data/ext/v8/upstream/v8/src/hydrogen.cc +6011 -0
  203. data/ext/v8/upstream/v8/src/hydrogen.h +1137 -0
  204. data/ext/v8/upstream/v8/src/ia32/assembler-ia32-inl.h +430 -0
  205. data/ext/v8/upstream/v8/src/ia32/assembler-ia32.cc +2846 -0
  206. data/ext/v8/upstream/v8/src/ia32/assembler-ia32.h +1159 -0
  207. data/ext/v8/upstream/v8/src/ia32/builtins-ia32.cc +1596 -0
  208. data/ext/v8/upstream/v8/src/ia32/code-stubs-ia32.cc +6549 -0
  209. data/ext/v8/upstream/v8/src/ia32/code-stubs-ia32.h +495 -0
  210. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/codegen-ia32-inl.h +0 -0
  211. data/ext/v8/upstream/v8/src/ia32/codegen-ia32.cc +10385 -0
  212. data/ext/v8/upstream/v8/src/ia32/codegen-ia32.h +801 -0
  213. data/ext/v8/upstream/v8/src/ia32/cpu-ia32.cc +88 -0
  214. data/ext/v8/upstream/v8/src/ia32/debug-ia32.cc +312 -0
  215. data/ext/v8/upstream/v8/src/ia32/deoptimizer-ia32.cc +774 -0
  216. data/ext/v8/upstream/v8/src/ia32/disasm-ia32.cc +1620 -0
  217. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/frames-ia32.cc +0 -0
  218. data/ext/v8/upstream/v8/src/ia32/frames-ia32.h +140 -0
  219. data/ext/v8/upstream/v8/src/ia32/full-codegen-ia32.cc +4357 -0
  220. data/ext/v8/upstream/v8/src/ia32/ic-ia32.cc +1779 -0
  221. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/jump-target-ia32.cc +0 -0
  222. data/ext/v8/upstream/v8/src/ia32/lithium-codegen-ia32.cc +4158 -0
  223. data/ext/v8/upstream/v8/src/ia32/lithium-codegen-ia32.h +318 -0
  224. data/ext/v8/upstream/v8/src/ia32/lithium-gap-resolver-ia32.cc +466 -0
  225. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/lithium-gap-resolver-ia32.h +0 -0
  226. data/ext/v8/upstream/v8/src/ia32/lithium-ia32.cc +2181 -0
  227. data/ext/v8/upstream/v8/src/ia32/lithium-ia32.h +2235 -0
  228. data/ext/v8/upstream/v8/src/ia32/macro-assembler-ia32.cc +2056 -0
  229. data/ext/v8/upstream/v8/src/ia32/macro-assembler-ia32.h +807 -0
  230. data/ext/v8/upstream/v8/src/ia32/regexp-macro-assembler-ia32.cc +1264 -0
  231. data/ext/v8/upstream/v8/src/ia32/regexp-macro-assembler-ia32.h +216 -0
  232. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/register-allocator-ia32-inl.h +0 -0
  233. data/ext/v8/upstream/v8/src/ia32/register-allocator-ia32.cc +157 -0
  234. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/register-allocator-ia32.h +0 -0
  235. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/simulator-ia32.cc +0 -0
  236. data/ext/v8/upstream/v8/src/ia32/simulator-ia32.h +72 -0
  237. data/ext/v8/upstream/v8/src/ia32/stub-cache-ia32.cc +3711 -0
  238. data/ext/v8/upstream/v8/src/ia32/virtual-frame-ia32.cc +1366 -0
  239. data/ext/v8/upstream/v8/src/ia32/virtual-frame-ia32.h +650 -0
  240. data/ext/v8/upstream/v8/src/ic-inl.h +130 -0
  241. data/ext/v8/upstream/v8/src/ic.cc +2389 -0
  242. data/ext/v8/upstream/v8/src/ic.h +675 -0
  243. data/ext/v8/upstream/{3.1.8 → v8}/src/inspector.cc +0 -0
  244. data/ext/v8/upstream/{3.1.8 → v8}/src/inspector.h +0 -0
  245. data/ext/v8/upstream/v8/src/interpreter-irregexp.cc +659 -0
  246. data/ext/v8/upstream/v8/src/interpreter-irregexp.h +49 -0
  247. data/ext/v8/upstream/v8/src/isolate.cc +883 -0
  248. data/ext/v8/upstream/v8/src/isolate.h +1306 -0
  249. data/ext/v8/upstream/v8/src/json.js +342 -0
  250. data/ext/v8/upstream/v8/src/jsregexp.cc +5371 -0
  251. data/ext/v8/upstream/v8/src/jsregexp.h +1483 -0
  252. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target-heavy-inl.h +0 -0
  253. data/ext/v8/upstream/v8/src/jump-target-heavy.cc +427 -0
  254. data/ext/v8/upstream/v8/src/jump-target-heavy.h +238 -0
  255. data/ext/v8/upstream/v8/src/jump-target-inl.h +48 -0
  256. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target-light-inl.h +0 -0
  257. data/ext/v8/upstream/v8/src/jump-target-light.cc +111 -0
  258. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target-light.h +0 -0
  259. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target.cc +0 -0
  260. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target.h +0 -0
  261. data/ext/v8/upstream/{3.1.8 → v8}/src/list-inl.h +0 -0
  262. data/ext/v8/upstream/{3.1.8 → v8}/src/list.h +0 -0
  263. data/ext/v8/upstream/v8/src/lithium-allocator-inl.h +142 -0
  264. data/ext/v8/upstream/v8/src/lithium-allocator.cc +2105 -0
  265. data/ext/v8/upstream/v8/src/lithium-allocator.h +630 -0
  266. data/ext/v8/upstream/v8/src/lithium.cc +169 -0
  267. data/ext/v8/upstream/{3.1.8 → v8}/src/lithium.h +0 -0
  268. data/ext/v8/upstream/{3.1.8 → v8}/src/liveedit-debugger.js +0 -0
  269. data/ext/v8/upstream/v8/src/liveedit.cc +1693 -0
  270. data/ext/v8/upstream/v8/src/liveedit.h +179 -0
  271. data/ext/v8/upstream/{3.1.8 → v8}/src/liveobjectlist-inl.h +0 -0
  272. data/ext/v8/upstream/v8/src/liveobjectlist.cc +2589 -0
  273. data/ext/v8/upstream/v8/src/liveobjectlist.h +322 -0
  274. data/ext/v8/upstream/{3.1.8 → v8}/src/log-inl.h +0 -0
  275. data/ext/v8/upstream/v8/src/log-utils.cc +423 -0
  276. data/ext/v8/upstream/v8/src/log-utils.h +229 -0
  277. data/ext/v8/upstream/v8/src/log.cc +1666 -0
  278. data/ext/v8/upstream/v8/src/log.h +446 -0
  279. data/ext/v8/upstream/{3.1.8 → v8}/src/macro-assembler.h +0 -0
  280. data/ext/v8/upstream/{3.1.8 → v8}/src/macros.py +0 -0
  281. data/ext/v8/upstream/v8/src/mark-compact.cc +3092 -0
  282. data/ext/v8/upstream/v8/src/mark-compact.h +506 -0
  283. data/ext/v8/upstream/{3.1.8 → v8}/src/math.js +0 -0
  284. data/ext/v8/upstream/v8/src/messages.cc +166 -0
  285. data/ext/v8/upstream/{3.1.8 → v8}/src/messages.h +0 -0
  286. data/ext/v8/upstream/v8/src/messages.js +1090 -0
  287. data/ext/v8/upstream/v8/src/mips/assembler-mips-inl.h +335 -0
  288. data/ext/v8/upstream/v8/src/mips/assembler-mips.cc +2093 -0
  289. data/ext/v8/upstream/v8/src/mips/assembler-mips.h +1066 -0
  290. data/ext/v8/upstream/v8/src/mips/builtins-mips.cc +148 -0
  291. data/ext/v8/upstream/v8/src/mips/code-stubs-mips.cc +752 -0
  292. data/ext/v8/upstream/v8/src/mips/code-stubs-mips.h +511 -0
  293. data/ext/v8/upstream/v8/src/mips/codegen-mips-inl.h +64 -0
  294. data/ext/v8/upstream/v8/src/mips/codegen-mips.cc +1213 -0
  295. data/ext/v8/upstream/v8/src/mips/codegen-mips.h +633 -0
  296. data/ext/v8/upstream/v8/src/mips/constants-mips.cc +352 -0
  297. data/ext/v8/upstream/v8/src/mips/constants-mips.h +723 -0
  298. data/ext/v8/upstream/v8/src/mips/cpu-mips.cc +90 -0
  299. data/ext/v8/upstream/v8/src/mips/debug-mips.cc +155 -0
  300. data/ext/v8/upstream/v8/src/mips/deoptimizer-mips.cc +91 -0
  301. data/ext/v8/upstream/v8/src/mips/disasm-mips.cc +1023 -0
  302. data/ext/v8/upstream/v8/src/mips/frames-mips.cc +48 -0
  303. data/ext/v8/upstream/v8/src/mips/frames-mips.h +179 -0
  304. data/ext/v8/upstream/v8/src/mips/full-codegen-mips.cc +727 -0
  305. data/ext/v8/upstream/v8/src/mips/ic-mips.cc +244 -0
  306. data/ext/v8/upstream/v8/src/mips/jump-target-mips.cc +80 -0
  307. data/ext/v8/upstream/v8/src/mips/lithium-codegen-mips.h +65 -0
  308. data/ext/v8/upstream/v8/src/mips/lithium-mips.h +304 -0
  309. data/ext/v8/upstream/v8/src/mips/macro-assembler-mips.cc +3327 -0
  310. data/ext/v8/upstream/v8/src/mips/macro-assembler-mips.h +1058 -0
  311. data/ext/v8/upstream/v8/src/mips/regexp-macro-assembler-mips.cc +478 -0
  312. data/ext/v8/upstream/v8/src/mips/regexp-macro-assembler-mips.h +250 -0
  313. data/ext/v8/upstream/v8/src/mips/register-allocator-mips-inl.h +134 -0
  314. data/ext/v8/upstream/{3.1.8 → v8}/src/mips/register-allocator-mips.cc +0 -0
  315. data/ext/v8/upstream/v8/src/mips/register-allocator-mips.h +47 -0
  316. data/ext/v8/upstream/v8/src/mips/simulator-mips.cc +2438 -0
  317. data/ext/v8/upstream/v8/src/mips/simulator-mips.h +394 -0
  318. data/ext/v8/upstream/v8/src/mips/stub-cache-mips.cc +601 -0
  319. data/ext/v8/upstream/v8/src/mips/virtual-frame-mips-inl.h +58 -0
  320. data/ext/v8/upstream/v8/src/mips/virtual-frame-mips.cc +307 -0
  321. data/ext/v8/upstream/v8/src/mips/virtual-frame-mips.h +530 -0
  322. data/ext/v8/upstream/v8/src/mirror-debugger.js +2381 -0
  323. data/ext/v8/upstream/v8/src/mksnapshot.cc +256 -0
  324. data/ext/v8/upstream/{3.1.8 → v8}/src/natives.h +0 -0
  325. data/ext/v8/upstream/v8/src/objects-debug.cc +722 -0
  326. data/ext/v8/upstream/v8/src/objects-inl.h +4166 -0
  327. data/ext/v8/upstream/v8/src/objects-printer.cc +801 -0
  328. data/ext/v8/upstream/v8/src/objects-visiting.cc +142 -0
  329. data/ext/v8/upstream/v8/src/objects-visiting.h +422 -0
  330. data/ext/v8/upstream/v8/src/objects.cc +10296 -0
  331. data/ext/v8/upstream/v8/src/objects.h +6662 -0
  332. data/ext/v8/upstream/v8/src/parser.cc +5168 -0
  333. data/ext/v8/upstream/v8/src/parser.h +823 -0
  334. data/ext/v8/upstream/v8/src/platform-cygwin.cc +811 -0
  335. data/ext/v8/upstream/v8/src/platform-freebsd.cc +854 -0
  336. data/ext/v8/upstream/v8/src/platform-linux.cc +1120 -0
  337. data/ext/v8/upstream/v8/src/platform-macos.cc +865 -0
  338. data/ext/v8/upstream/v8/src/platform-nullos.cc +504 -0
  339. data/ext/v8/upstream/v8/src/platform-openbsd.cc +672 -0
  340. data/ext/v8/upstream/v8/src/platform-posix.cc +424 -0
  341. data/ext/v8/upstream/v8/src/platform-solaris.cc +796 -0
  342. data/ext/v8/upstream/v8/src/platform-tls-mac.h +62 -0
  343. data/ext/v8/upstream/v8/src/platform-tls-win32.h +62 -0
  344. data/ext/v8/upstream/v8/src/platform-tls.h +50 -0
  345. data/ext/v8/upstream/v8/src/platform-win32.cc +2072 -0
  346. data/ext/v8/upstream/v8/src/platform.h +693 -0
  347. data/ext/v8/upstream/v8/src/preparse-data.cc +185 -0
  348. data/ext/v8/upstream/{3.1.8 → v8}/src/preparse-data.h +0 -0
  349. data/ext/v8/upstream/v8/src/preparser-api.cc +219 -0
  350. data/ext/v8/upstream/v8/src/preparser.cc +1205 -0
  351. data/ext/v8/upstream/{3.1.8 → v8}/src/preparser.h +0 -0
  352. data/ext/v8/upstream/v8/src/prettyprinter.cc +1530 -0
  353. data/ext/v8/upstream/v8/src/prettyprinter.h +223 -0
  354. data/ext/v8/upstream/{3.1.8 → v8}/src/profile-generator-inl.h +0 -0
  355. data/ext/v8/upstream/v8/src/profile-generator.cc +3095 -0
  356. data/ext/v8/upstream/v8/src/profile-generator.h +1125 -0
  357. data/ext/v8/upstream/v8/src/property.cc +102 -0
  358. data/ext/v8/upstream/v8/src/property.h +348 -0
  359. data/ext/v8/upstream/{3.1.8 → v8}/src/regexp-macro-assembler-irregexp-inl.h +0 -0
  360. data/ext/v8/upstream/v8/src/regexp-macro-assembler-irregexp.cc +470 -0
  361. data/ext/v8/upstream/{3.1.8 → v8}/src/regexp-macro-assembler-irregexp.h +0 -0
  362. data/ext/v8/upstream/{3.1.8 → v8}/src/regexp-macro-assembler-tracer.cc +0 -0
  363. data/ext/v8/upstream/{3.1.8 → v8}/src/regexp-macro-assembler-tracer.h +0 -0
  364. data/ext/v8/upstream/v8/src/regexp-macro-assembler.cc +266 -0
  365. data/ext/v8/upstream/v8/src/regexp-macro-assembler.h +236 -0
  366. data/ext/v8/upstream/v8/src/regexp-stack.cc +111 -0
  367. data/ext/v8/upstream/v8/src/regexp-stack.h +147 -0
  368. data/ext/v8/upstream/v8/src/regexp.js +483 -0
  369. data/ext/v8/upstream/v8/src/register-allocator-inl.h +141 -0
  370. data/ext/v8/upstream/v8/src/register-allocator.cc +98 -0
  371. data/ext/v8/upstream/v8/src/register-allocator.h +310 -0
  372. data/ext/v8/upstream/v8/src/rewriter.cc +1024 -0
  373. data/ext/v8/upstream/{3.1.8 → v8}/src/rewriter.h +0 -0
  374. data/ext/v8/upstream/v8/src/runtime-profiler.cc +478 -0
  375. data/ext/v8/upstream/v8/src/runtime-profiler.h +192 -0
  376. data/ext/v8/upstream/v8/src/runtime.cc +11949 -0
  377. data/ext/v8/upstream/v8/src/runtime.h +643 -0
  378. data/ext/v8/upstream/{3.1.8 → v8}/src/runtime.js +0 -0
  379. data/ext/v8/upstream/v8/src/safepoint-table.cc +256 -0
  380. data/ext/v8/upstream/v8/src/safepoint-table.h +269 -0
  381. data/ext/v8/upstream/v8/src/scanner-base.cc +964 -0
  382. data/ext/v8/upstream/v8/src/scanner-base.h +664 -0
  383. data/ext/v8/upstream/v8/src/scanner.cc +584 -0
  384. data/ext/v8/upstream/v8/src/scanner.h +196 -0
  385. data/ext/v8/upstream/v8/src/scopeinfo.cc +631 -0
  386. data/ext/v8/upstream/v8/src/scopeinfo.h +249 -0
  387. data/ext/v8/upstream/v8/src/scopes.cc +1093 -0
  388. data/ext/v8/upstream/v8/src/scopes.h +508 -0
  389. data/ext/v8/upstream/v8/src/serialize.cc +1574 -0
  390. data/ext/v8/upstream/v8/src/serialize.h +589 -0
  391. data/ext/v8/upstream/{3.1.8 → v8}/src/shell.h +0 -0
  392. data/ext/v8/upstream/{3.1.8 → v8}/src/simulator.h +0 -0
  393. data/ext/v8/upstream/v8/src/small-pointer-list.h +163 -0
  394. data/ext/v8/upstream/{3.1.8 → v8}/src/smart-pointer.h +0 -0
  395. data/ext/v8/upstream/v8/src/snapshot-common.cc +82 -0
  396. data/ext/v8/upstream/{3.1.8 → v8}/src/snapshot-empty.cc +0 -0
  397. data/ext/v8/upstream/v8/src/snapshot.h +73 -0
  398. data/ext/v8/upstream/v8/src/spaces-inl.h +529 -0
  399. data/ext/v8/upstream/v8/src/spaces.cc +3147 -0
  400. data/ext/v8/upstream/v8/src/spaces.h +2368 -0
  401. data/ext/v8/upstream/{3.1.8 → v8}/src/splay-tree-inl.h +0 -0
  402. data/ext/v8/upstream/{3.1.8 → v8}/src/splay-tree.h +0 -0
  403. data/ext/v8/upstream/v8/src/string-search.cc +41 -0
  404. data/ext/v8/upstream/v8/src/string-search.h +568 -0
  405. data/ext/v8/upstream/v8/src/string-stream.cc +592 -0
  406. data/ext/v8/upstream/{3.1.8 → v8}/src/string-stream.h +0 -0
  407. data/ext/v8/upstream/v8/src/string.js +915 -0
  408. data/ext/v8/upstream/{3.1.8 → v8}/src/strtod.cc +0 -0
  409. data/ext/v8/upstream/{3.1.8 → v8}/src/strtod.h +0 -0
  410. data/ext/v8/upstream/v8/src/stub-cache.cc +1940 -0
  411. data/ext/v8/upstream/v8/src/stub-cache.h +866 -0
  412. data/ext/v8/upstream/{3.1.8 → v8}/src/third_party/valgrind/valgrind.h +0 -0
  413. data/ext/v8/upstream/v8/src/token.cc +63 -0
  414. data/ext/v8/upstream/v8/src/token.h +288 -0
  415. data/ext/v8/upstream/v8/src/top.cc +983 -0
  416. data/ext/v8/upstream/v8/src/type-info.cc +472 -0
  417. data/ext/v8/upstream/v8/src/type-info.h +290 -0
  418. data/ext/v8/upstream/{3.1.8 → v8}/src/unbound-queue-inl.h +0 -0
  419. data/ext/v8/upstream/{3.1.8 → v8}/src/unbound-queue.h +0 -0
  420. data/ext/v8/upstream/{3.1.8 → v8}/src/unicode-inl.h +0 -0
  421. data/ext/v8/upstream/v8/src/unicode.cc +1624 -0
  422. data/ext/v8/upstream/v8/src/unicode.h +280 -0
  423. data/ext/v8/upstream/{3.1.8 → v8}/src/uri.js +0 -0
  424. data/ext/v8/upstream/{3.1.8 → v8}/src/utils.cc +0 -0
  425. data/ext/v8/upstream/v8/src/utils.h +796 -0
  426. data/ext/v8/upstream/v8/src/v8-counters.cc +62 -0
  427. data/ext/v8/upstream/v8/src/v8-counters.h +311 -0
  428. data/ext/v8/upstream/v8/src/v8.cc +215 -0
  429. data/ext/v8/upstream/v8/src/v8.h +130 -0
  430. data/ext/v8/upstream/{3.1.8 → v8}/src/v8checks.h +0 -0
  431. data/ext/v8/upstream/{3.1.8 → v8}/src/v8dll-main.cc +0 -0
  432. data/ext/v8/upstream/v8/src/v8globals.h +486 -0
  433. data/ext/v8/upstream/{3.1.8/src/memory.h → v8/src/v8memory.h} +0 -0
  434. data/ext/v8/upstream/v8/src/v8natives.js +1293 -0
  435. data/ext/v8/upstream/{3.1.8 → v8}/src/v8preparserdll-main.cc +0 -0
  436. data/ext/v8/upstream/v8/src/v8threads.cc +453 -0
  437. data/ext/v8/upstream/v8/src/v8threads.h +164 -0
  438. data/ext/v8/upstream/v8/src/v8utils.h +317 -0
  439. data/ext/v8/upstream/{3.1.8 → v8}/src/variables.cc +0 -0
  440. data/ext/v8/upstream/v8/src/variables.h +212 -0
  441. data/ext/v8/upstream/v8/src/version.cc +116 -0
  442. data/ext/v8/upstream/v8/src/version.h +68 -0
  443. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame-heavy-inl.h +0 -0
  444. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame-heavy.cc +0 -0
  445. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame-inl.h +0 -0
  446. data/ext/v8/upstream/v8/src/virtual-frame-light-inl.h +171 -0
  447. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame-light.cc +0 -0
  448. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame.cc +0 -0
  449. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame.h +0 -0
  450. data/ext/v8/upstream/v8/src/vm-state-inl.h +138 -0
  451. data/ext/v8/upstream/v8/src/vm-state.h +70 -0
  452. data/ext/v8/upstream/v8/src/win32-headers.h +96 -0
  453. data/ext/v8/upstream/v8/src/x64/assembler-x64-inl.h +456 -0
  454. data/ext/v8/upstream/v8/src/x64/assembler-x64.cc +2954 -0
  455. data/ext/v8/upstream/v8/src/x64/assembler-x64.h +1630 -0
  456. data/ext/v8/upstream/v8/src/x64/builtins-x64.cc +1493 -0
  457. data/ext/v8/upstream/v8/src/x64/code-stubs-x64.cc +5132 -0
  458. data/ext/v8/upstream/v8/src/x64/code-stubs-x64.h +477 -0
  459. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/codegen-x64-inl.h +0 -0
  460. data/ext/v8/upstream/v8/src/x64/codegen-x64.cc +8843 -0
  461. data/ext/v8/upstream/v8/src/x64/codegen-x64.h +753 -0
  462. data/ext/v8/upstream/v8/src/x64/cpu-x64.cc +88 -0
  463. data/ext/v8/upstream/v8/src/x64/debug-x64.cc +318 -0
  464. data/ext/v8/upstream/v8/src/x64/deoptimizer-x64.cc +815 -0
  465. data/ext/v8/upstream/v8/src/x64/disasm-x64.cc +1752 -0
  466. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/frames-x64.cc +0 -0
  467. data/ext/v8/upstream/v8/src/x64/frames-x64.h +130 -0
  468. data/ext/v8/upstream/v8/src/x64/full-codegen-x64.cc +4339 -0
  469. data/ext/v8/upstream/v8/src/x64/ic-x64.cc +1752 -0
  470. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/jump-target-x64.cc +0 -0
  471. data/ext/v8/upstream/v8/src/x64/lithium-codegen-x64.cc +3970 -0
  472. data/ext/v8/upstream/v8/src/x64/lithium-codegen-x64.h +318 -0
  473. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/lithium-gap-resolver-x64.cc +0 -0
  474. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/lithium-gap-resolver-x64.h +0 -0
  475. data/ext/v8/upstream/v8/src/x64/lithium-x64.cc +2115 -0
  476. data/ext/v8/upstream/v8/src/x64/lithium-x64.h +2161 -0
  477. data/ext/v8/upstream/v8/src/x64/macro-assembler-x64.cc +2911 -0
  478. data/ext/v8/upstream/v8/src/x64/macro-assembler-x64.h +1984 -0
  479. data/ext/v8/upstream/v8/src/x64/regexp-macro-assembler-x64.cc +1398 -0
  480. data/ext/v8/upstream/v8/src/x64/regexp-macro-assembler-x64.h +282 -0
  481. data/ext/v8/upstream/v8/src/x64/register-allocator-x64-inl.h +87 -0
  482. data/ext/v8/upstream/v8/src/x64/register-allocator-x64.cc +95 -0
  483. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/register-allocator-x64.h +0 -0
  484. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/simulator-x64.cc +0 -0
  485. data/ext/v8/upstream/v8/src/x64/simulator-x64.h +71 -0
  486. data/ext/v8/upstream/v8/src/x64/stub-cache-x64.cc +3460 -0
  487. data/ext/v8/upstream/v8/src/x64/virtual-frame-x64.cc +1296 -0
  488. data/ext/v8/upstream/v8/src/x64/virtual-frame-x64.h +597 -0
  489. data/ext/v8/upstream/v8/src/zone-inl.h +129 -0
  490. data/ext/v8/upstream/v8/src/zone.cc +196 -0
  491. data/ext/v8/upstream/v8/src/zone.h +236 -0
  492. data/ext/v8/upstream/{3.1.8 → v8}/tools/codemap.js +0 -0
  493. data/ext/v8/upstream/{3.1.8 → v8}/tools/consarray.js +0 -0
  494. data/ext/v8/upstream/{3.1.8 → v8}/tools/csvparser.js +0 -0
  495. data/ext/v8/upstream/{3.1.8 → v8}/tools/disasm.py +0 -0
  496. data/ext/v8/upstream/v8/tools/freebsd-tick-processor +10 -0
  497. data/ext/v8/upstream/{3.1.8 → v8}/tools/gc-nvp-trace-processor.py +0 -0
  498. data/ext/v8/upstream/{3.1.8 → v8}/tools/generate-ten-powers.scm +0 -0
  499. data/ext/v8/upstream/{3.1.8 → v8}/tools/grokdump.py +0 -0
  500. data/ext/v8/upstream/v8/tools/gyp/v8.gyp +844 -0
  501. data/ext/v8/upstream/{3.1.8 → v8}/tools/js2c.py +0 -0
  502. data/ext/v8/upstream/{3.1.8 → v8}/tools/jsmin.py +0 -0
  503. data/ext/v8/upstream/v8/tools/linux-tick-processor +35 -0
  504. data/ext/v8/upstream/{3.1.8 → v8}/tools/ll_prof.py +0 -0
  505. data/ext/v8/upstream/{3.1.8 → v8}/tools/logreader.js +0 -0
  506. data/ext/v8/upstream/{3.1.8 → v8}/tools/mac-nm +0 -0
  507. data/ext/v8/upstream/{3.1.8 → v8}/tools/mac-tick-processor +0 -0
  508. data/ext/v8/upstream/{3.1.8 → v8}/tools/oom_dump/README +0 -0
  509. data/ext/v8/upstream/{3.1.8 → v8}/tools/oom_dump/SConstruct +0 -0
  510. data/ext/v8/upstream/{3.1.8 → v8}/tools/oom_dump/oom_dump.cc +0 -0
  511. data/ext/v8/upstream/{3.1.8 → v8}/tools/presubmit.py +0 -0
  512. data/ext/v8/upstream/{3.1.8 → v8}/tools/process-heap-prof.py +0 -0
  513. data/ext/v8/upstream/{3.1.8 → v8}/tools/profile.js +0 -0
  514. data/ext/v8/upstream/{3.1.8 → v8}/tools/profile_view.js +0 -0
  515. data/ext/v8/upstream/{3.1.8 → v8}/tools/run-valgrind.py +0 -0
  516. data/ext/v8/upstream/{3.1.8 → v8}/tools/splaytree.js +0 -0
  517. data/ext/v8/upstream/{3.1.8 → v8}/tools/stats-viewer.py +0 -0
  518. data/ext/v8/upstream/v8/tools/test.py +1490 -0
  519. data/ext/v8/upstream/{3.1.8 → v8}/tools/tickprocessor-driver.js +0 -0
  520. data/ext/v8/upstream/v8/tools/tickprocessor.js +877 -0
  521. data/ext/v8/upstream/{3.1.8 → v8}/tools/utils.py +0 -0
  522. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/README.txt +0 -0
  523. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/arm.vsprops +0 -0
  524. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/common.vsprops +0 -0
  525. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/d8.vcproj +0 -0
  526. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/d8_arm.vcproj +0 -0
  527. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/d8_x64.vcproj +0 -0
  528. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/d8js2c.cmd +0 -0
  529. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/debug.vsprops +0 -0
  530. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/ia32.vsprops +0 -0
  531. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/js2c.cmd +0 -0
  532. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/release.vsprops +0 -0
  533. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8.sln +0 -0
  534. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8.vcproj +0 -0
  535. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_arm.sln +0 -0
  536. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_arm.vcproj +0 -0
  537. data/ext/v8/upstream/v8/tools/visual_studio/v8_base.vcproj +1308 -0
  538. data/ext/v8/upstream/v8/tools/visual_studio/v8_base_arm.vcproj +1238 -0
  539. data/ext/v8/upstream/v8/tools/visual_studio/v8_base_x64.vcproj +1300 -0
  540. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_cctest.vcproj +0 -0
  541. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_cctest_arm.vcproj +0 -0
  542. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_cctest_x64.vcproj +0 -0
  543. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_mksnapshot.vcproj +0 -0
  544. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_mksnapshot_x64.vcproj +0 -0
  545. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_process_sample.vcproj +0 -0
  546. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_process_sample_arm.vcproj +0 -0
  547. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_process_sample_x64.vcproj +0 -0
  548. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_shell_sample.vcproj +0 -0
  549. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_shell_sample_arm.vcproj +0 -0
  550. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_shell_sample_x64.vcproj +0 -0
  551. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_snapshot.vcproj +0 -0
  552. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_snapshot_cc.vcproj +0 -0
  553. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_snapshot_cc_x64.vcproj +0 -0
  554. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_snapshot_x64.vcproj +0 -0
  555. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_x64.sln +0 -0
  556. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_x64.vcproj +0 -0
  557. data/ext/v8/upstream/v8/tools/visual_studio/x64.vsprops +18 -0
  558. data/ext/v8/upstream/{3.1.8 → v8}/tools/windows-tick-processor.bat +0 -0
  559. data/ext/v8/v8_callbacks.cpp +52 -92
  560. data/ext/v8/v8_date.cpp +2 -3
  561. data/ext/v8/v8_object.cpp +4 -0
  562. data/ext/v8/v8_template.cpp +2 -2
  563. data/ext/v8/v8_try_catch.cpp +8 -38
  564. data/lib/v8/version.rb +1 -1
  565. data/spec/ext/ext_spec_helper.rb +2 -20
  566. data/spec/ext/object_spec.rb +0 -12
  567. data/spec/ext/try_catch_spec.rb +29 -1
  568. data/spec/spec_helper.rb +1 -0
  569. data/spec/v8/portal/proxies_spec.rb +1 -84
  570. data/specmem/handle_memspec.rb +41 -0
  571. data/specmem/object_memspec.rb +16 -0
  572. data/specmem/proxies_memspec.rb +86 -0
  573. data/specmem/spec_helper.rb +24 -0
  574. data/therubyracer.gemspec +7 -2
  575. metadata +564 -541
  576. data/ext/v8/upstream/3.1.8/.gitignore +0 -31
  577. data/ext/v8/upstream/3.1.8/AUTHORS +0 -40
  578. data/ext/v8/upstream/3.1.8/ChangeLog +0 -2566
  579. data/ext/v8/upstream/3.1.8/SConstruct +0 -1192
  580. data/ext/v8/upstream/3.1.8/include/v8-debug.h +0 -384
  581. data/ext/v8/upstream/3.1.8/include/v8-preparser.h +0 -116
  582. data/ext/v8/upstream/3.1.8/include/v8-profiler.h +0 -426
  583. data/ext/v8/upstream/3.1.8/include/v8-testing.h +0 -99
  584. data/ext/v8/upstream/3.1.8/include/v8.h +0 -3846
  585. data/ext/v8/upstream/3.1.8/preparser/preparser-process.cc +0 -206
  586. data/ext/v8/upstream/3.1.8/src/SConscript +0 -356
  587. data/ext/v8/upstream/3.1.8/src/accessors.cc +0 -907
  588. data/ext/v8/upstream/3.1.8/src/allocation.cc +0 -204
  589. data/ext/v8/upstream/3.1.8/src/allocation.h +0 -176
  590. data/ext/v8/upstream/3.1.8/src/api.cc +0 -5191
  591. data/ext/v8/upstream/3.1.8/src/api.h +0 -508
  592. data/ext/v8/upstream/3.1.8/src/apiutils.h +0 -80
  593. data/ext/v8/upstream/3.1.8/src/arguments.h +0 -105
  594. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm-inl.h +0 -352
  595. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.cc +0 -2756
  596. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.h +0 -1294
  597. data/ext/v8/upstream/3.1.8/src/arm/builtins-arm.cc +0 -1628
  598. data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.cc +0 -6783
  599. data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.h +0 -657
  600. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.cc +0 -7403
  601. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.h +0 -595
  602. data/ext/v8/upstream/3.1.8/src/arm/constants-arm.h +0 -769
  603. data/ext/v8/upstream/3.1.8/src/arm/cpu-arm.cc +0 -147
  604. data/ext/v8/upstream/3.1.8/src/arm/debug-arm.cc +0 -315
  605. data/ext/v8/upstream/3.1.8/src/arm/deoptimizer-arm.cc +0 -700
  606. data/ext/v8/upstream/3.1.8/src/arm/disasm-arm.cc +0 -1439
  607. data/ext/v8/upstream/3.1.8/src/arm/frames-arm.h +0 -168
  608. data/ext/v8/upstream/3.1.8/src/arm/full-codegen-arm.cc +0 -4230
  609. data/ext/v8/upstream/3.1.8/src/arm/ic-arm.cc +0 -1799
  610. data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.cc +0 -2041
  611. data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.h +0 -2046
  612. data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.cc +0 -3822
  613. data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.h +0 -312
  614. data/ext/v8/upstream/3.1.8/src/arm/lithium-gap-resolver-arm.cc +0 -303
  615. data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.cc +0 -2701
  616. data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.h +0 -1015
  617. data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.cc +0 -1280
  618. data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.h +0 -252
  619. data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.cc +0 -3165
  620. data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.h +0 -402
  621. data/ext/v8/upstream/3.1.8/src/arm/stub-cache-arm.cc +0 -4077
  622. data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.cc +0 -843
  623. data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.h +0 -520
  624. data/ext/v8/upstream/3.1.8/src/array.js +0 -1231
  625. data/ext/v8/upstream/3.1.8/src/assembler.cc +0 -973
  626. data/ext/v8/upstream/3.1.8/src/assembler.h +0 -787
  627. data/ext/v8/upstream/3.1.8/src/ast-inl.h +0 -107
  628. data/ext/v8/upstream/3.1.8/src/ast.cc +0 -1067
  629. data/ext/v8/upstream/3.1.8/src/ast.h +0 -2177
  630. data/ext/v8/upstream/3.1.8/src/atomicops.h +0 -165
  631. data/ext/v8/upstream/3.1.8/src/bootstrapper.cc +0 -1888
  632. data/ext/v8/upstream/3.1.8/src/bootstrapper.h +0 -118
  633. data/ext/v8/upstream/3.1.8/src/builtins.cc +0 -1586
  634. data/ext/v8/upstream/3.1.8/src/builtins.h +0 -339
  635. data/ext/v8/upstream/3.1.8/src/checks.cc +0 -110
  636. data/ext/v8/upstream/3.1.8/src/checks.h +0 -292
  637. data/ext/v8/upstream/3.1.8/src/code-stubs.cc +0 -230
  638. data/ext/v8/upstream/3.1.8/src/code-stubs.h +0 -950
  639. data/ext/v8/upstream/3.1.8/src/codegen-inl.h +0 -64
  640. data/ext/v8/upstream/3.1.8/src/codegen.cc +0 -495
  641. data/ext/v8/upstream/3.1.8/src/codegen.h +0 -245
  642. data/ext/v8/upstream/3.1.8/src/compilation-cache.cc +0 -654
  643. data/ext/v8/upstream/3.1.8/src/compilation-cache.h +0 -112
  644. data/ext/v8/upstream/3.1.8/src/compiler.cc +0 -806
  645. data/ext/v8/upstream/3.1.8/src/compiler.h +0 -290
  646. data/ext/v8/upstream/3.1.8/src/contexts.cc +0 -320
  647. data/ext/v8/upstream/3.1.8/src/contexts.h +0 -376
  648. data/ext/v8/upstream/3.1.8/src/conversions.cc +0 -1069
  649. data/ext/v8/upstream/3.1.8/src/counters.cc +0 -78
  650. data/ext/v8/upstream/3.1.8/src/counters.h +0 -242
  651. data/ext/v8/upstream/3.1.8/src/cpu-profiler-inl.h +0 -100
  652. data/ext/v8/upstream/3.1.8/src/cpu-profiler.cc +0 -554
  653. data/ext/v8/upstream/3.1.8/src/cpu-profiler.h +0 -291
  654. data/ext/v8/upstream/3.1.8/src/cpu.h +0 -65
  655. data/ext/v8/upstream/3.1.8/src/d8-debug.cc +0 -367
  656. data/ext/v8/upstream/3.1.8/src/d8-debug.h +0 -157
  657. data/ext/v8/upstream/3.1.8/src/d8-posix.cc +0 -693
  658. data/ext/v8/upstream/3.1.8/src/d8.cc +0 -792
  659. data/ext/v8/upstream/3.1.8/src/d8.gyp +0 -85
  660. data/ext/v8/upstream/3.1.8/src/data-flow.h +0 -379
  661. data/ext/v8/upstream/3.1.8/src/dateparser.h +0 -263
  662. data/ext/v8/upstream/3.1.8/src/debug-agent.cc +0 -446
  663. data/ext/v8/upstream/3.1.8/src/debug-agent.h +0 -131
  664. data/ext/v8/upstream/3.1.8/src/debug.cc +0 -3085
  665. data/ext/v8/upstream/3.1.8/src/debug.h +0 -1025
  666. data/ext/v8/upstream/3.1.8/src/deoptimizer.cc +0 -1185
  667. data/ext/v8/upstream/3.1.8/src/deoptimizer.h +0 -529
  668. data/ext/v8/upstream/3.1.8/src/disasm.h +0 -77
  669. data/ext/v8/upstream/3.1.8/src/disassembler.cc +0 -338
  670. data/ext/v8/upstream/3.1.8/src/execution.cc +0 -735
  671. data/ext/v8/upstream/3.1.8/src/execution.h +0 -322
  672. data/ext/v8/upstream/3.1.8/src/extensions/experimental/experimental.gyp +0 -53
  673. data/ext/v8/upstream/3.1.8/src/extensions/experimental/i18n-extension.cc +0 -264
  674. data/ext/v8/upstream/3.1.8/src/extensions/externalize-string-extension.cc +0 -141
  675. data/ext/v8/upstream/3.1.8/src/extensions/gc-extension.cc +0 -58
  676. data/ext/v8/upstream/3.1.8/src/factory.cc +0 -1087
  677. data/ext/v8/upstream/3.1.8/src/factory.h +0 -432
  678. data/ext/v8/upstream/3.1.8/src/flag-definitions.h +0 -552
  679. data/ext/v8/upstream/3.1.8/src/frame-element.cc +0 -42
  680. data/ext/v8/upstream/3.1.8/src/frame-element.h +0 -277
  681. data/ext/v8/upstream/3.1.8/src/frames-inl.h +0 -210
  682. data/ext/v8/upstream/3.1.8/src/frames.cc +0 -1232
  683. data/ext/v8/upstream/3.1.8/src/frames.h +0 -826
  684. data/ext/v8/upstream/3.1.8/src/full-codegen.cc +0 -1382
  685. data/ext/v8/upstream/3.1.8/src/full-codegen.h +0 -751
  686. data/ext/v8/upstream/3.1.8/src/func-name-inferrer.cc +0 -90
  687. data/ext/v8/upstream/3.1.8/src/func-name-inferrer.h +0 -111
  688. data/ext/v8/upstream/3.1.8/src/gdb-jit.cc +0 -1547
  689. data/ext/v8/upstream/3.1.8/src/global-handles.cc +0 -534
  690. data/ext/v8/upstream/3.1.8/src/global-handles.h +0 -181
  691. data/ext/v8/upstream/3.1.8/src/globals.h +0 -325
  692. data/ext/v8/upstream/3.1.8/src/handles-inl.h +0 -80
  693. data/ext/v8/upstream/3.1.8/src/handles.cc +0 -910
  694. data/ext/v8/upstream/3.1.8/src/handles.h +0 -424
  695. data/ext/v8/upstream/3.1.8/src/hashmap.h +0 -121
  696. data/ext/v8/upstream/3.1.8/src/heap-inl.h +0 -587
  697. data/ext/v8/upstream/3.1.8/src/heap-profiler.cc +0 -1128
  698. data/ext/v8/upstream/3.1.8/src/heap-profiler.h +0 -381
  699. data/ext/v8/upstream/3.1.8/src/heap.cc +0 -5610
  700. data/ext/v8/upstream/3.1.8/src/heap.h +0 -2218
  701. data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.cc +0 -1490
  702. data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.h +0 -3493
  703. data/ext/v8/upstream/3.1.8/src/hydrogen.cc +0 -6056
  704. data/ext/v8/upstream/3.1.8/src/hydrogen.h +0 -1091
  705. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32-inl.h +0 -429
  706. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.cc +0 -2800
  707. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.h +0 -1093
  708. data/ext/v8/upstream/3.1.8/src/ia32/builtins-ia32.cc +0 -1590
  709. data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.cc +0 -6624
  710. data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.h +0 -536
  711. data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.cc +0 -10354
  712. data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.h +0 -798
  713. data/ext/v8/upstream/3.1.8/src/ia32/cpu-ia32.cc +0 -87
  714. data/ext/v8/upstream/3.1.8/src/ia32/debug-ia32.cc +0 -309
  715. data/ext/v8/upstream/3.1.8/src/ia32/deoptimizer-ia32.cc +0 -664
  716. data/ext/v8/upstream/3.1.8/src/ia32/disasm-ia32.cc +0 -1597
  717. data/ext/v8/upstream/3.1.8/src/ia32/frames-ia32.h +0 -140
  718. data/ext/v8/upstream/3.1.8/src/ia32/full-codegen-ia32.cc +0 -4278
  719. data/ext/v8/upstream/3.1.8/src/ia32/ic-ia32.cc +0 -1786
  720. data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.cc +0 -3880
  721. data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.h +0 -309
  722. data/ext/v8/upstream/3.1.8/src/ia32/lithium-gap-resolver-ia32.cc +0 -460
  723. data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.cc +0 -2095
  724. data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.h +0 -2127
  725. data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.cc +0 -2031
  726. data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.h +0 -798
  727. data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.cc +0 -1253
  728. data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.h +0 -215
  729. data/ext/v8/upstream/3.1.8/src/ia32/register-allocator-ia32.cc +0 -157
  730. data/ext/v8/upstream/3.1.8/src/ia32/simulator-ia32.h +0 -72
  731. data/ext/v8/upstream/3.1.8/src/ia32/stub-cache-ia32.cc +0 -3732
  732. data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.cc +0 -1360
  733. data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.h +0 -646
  734. data/ext/v8/upstream/3.1.8/src/ic-inl.h +0 -129
  735. data/ext/v8/upstream/3.1.8/src/ic.cc +0 -2333
  736. data/ext/v8/upstream/3.1.8/src/ic.h +0 -639
  737. data/ext/v8/upstream/3.1.8/src/interpreter-irregexp.cc +0 -655
  738. data/ext/v8/upstream/3.1.8/src/interpreter-irregexp.h +0 -48
  739. data/ext/v8/upstream/3.1.8/src/json.js +0 -342
  740. data/ext/v8/upstream/3.1.8/src/jsregexp.cc +0 -5340
  741. data/ext/v8/upstream/3.1.8/src/jsregexp.h +0 -1484
  742. data/ext/v8/upstream/3.1.8/src/jump-target-heavy.cc +0 -430
  743. data/ext/v8/upstream/3.1.8/src/jump-target-heavy.h +0 -244
  744. data/ext/v8/upstream/3.1.8/src/jump-target-inl.h +0 -48
  745. data/ext/v8/upstream/3.1.8/src/jump-target-light.cc +0 -111
  746. data/ext/v8/upstream/3.1.8/src/lithium-allocator-inl.h +0 -140
  747. data/ext/v8/upstream/3.1.8/src/lithium-allocator.cc +0 -2093
  748. data/ext/v8/upstream/3.1.8/src/lithium-allocator.h +0 -644
  749. data/ext/v8/upstream/3.1.8/src/lithium.cc +0 -168
  750. data/ext/v8/upstream/3.1.8/src/liveedit.cc +0 -1650
  751. data/ext/v8/upstream/3.1.8/src/liveedit.h +0 -174
  752. data/ext/v8/upstream/3.1.8/src/liveobjectlist.cc +0 -2527
  753. data/ext/v8/upstream/3.1.8/src/liveobjectlist.h +0 -322
  754. data/ext/v8/upstream/3.1.8/src/log-utils.cc +0 -336
  755. data/ext/v8/upstream/3.1.8/src/log-utils.h +0 -232
  756. data/ext/v8/upstream/3.1.8/src/log.cc +0 -1608
  757. data/ext/v8/upstream/3.1.8/src/log.h +0 -379
  758. data/ext/v8/upstream/3.1.8/src/mark-compact.cc +0 -2957
  759. data/ext/v8/upstream/3.1.8/src/mark-compact.h +0 -433
  760. data/ext/v8/upstream/3.1.8/src/messages.cc +0 -164
  761. data/ext/v8/upstream/3.1.8/src/messages.js +0 -1071
  762. data/ext/v8/upstream/3.1.8/src/mips/assembler-mips-inl.h +0 -215
  763. data/ext/v8/upstream/3.1.8/src/mips/assembler-mips.cc +0 -1219
  764. data/ext/v8/upstream/3.1.8/src/mips/assembler-mips.h +0 -667
  765. data/ext/v8/upstream/3.1.8/src/mips/builtins-mips.cc +0 -205
  766. data/ext/v8/upstream/3.1.8/src/mips/codegen-mips-inl.h +0 -70
  767. data/ext/v8/upstream/3.1.8/src/mips/codegen-mips.cc +0 -1437
  768. data/ext/v8/upstream/3.1.8/src/mips/codegen-mips.h +0 -431
  769. data/ext/v8/upstream/3.1.8/src/mips/constants-mips.cc +0 -328
  770. data/ext/v8/upstream/3.1.8/src/mips/constants-mips.h +0 -525
  771. data/ext/v8/upstream/3.1.8/src/mips/cpu-mips.cc +0 -73
  772. data/ext/v8/upstream/3.1.8/src/mips/debug-mips.cc +0 -127
  773. data/ext/v8/upstream/3.1.8/src/mips/disasm-mips.cc +0 -787
  774. data/ext/v8/upstream/3.1.8/src/mips/fast-codegen-mips.cc +0 -77
  775. data/ext/v8/upstream/3.1.8/src/mips/frames-mips.cc +0 -96
  776. data/ext/v8/upstream/3.1.8/src/mips/frames-mips.h +0 -164
  777. data/ext/v8/upstream/3.1.8/src/mips/full-codegen-mips.cc +0 -277
  778. data/ext/v8/upstream/3.1.8/src/mips/ic-mips.cc +0 -208
  779. data/ext/v8/upstream/3.1.8/src/mips/jump-target-mips.cc +0 -175
  780. data/ext/v8/upstream/3.1.8/src/mips/macro-assembler-mips.cc +0 -1326
  781. data/ext/v8/upstream/3.1.8/src/mips/macro-assembler-mips.h +0 -461
  782. data/ext/v8/upstream/3.1.8/src/mips/register-allocator-mips-inl.h +0 -137
  783. data/ext/v8/upstream/3.1.8/src/mips/register-allocator-mips.h +0 -46
  784. data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.cc +0 -1650
  785. data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.h +0 -311
  786. data/ext/v8/upstream/3.1.8/src/mips/stub-cache-mips.cc +0 -418
  787. data/ext/v8/upstream/3.1.8/src/mips/virtual-frame-mips.cc +0 -319
  788. data/ext/v8/upstream/3.1.8/src/mips/virtual-frame-mips.h +0 -548
  789. data/ext/v8/upstream/3.1.8/src/mirror-debugger.js +0 -2380
  790. data/ext/v8/upstream/3.1.8/src/mksnapshot.cc +0 -256
  791. data/ext/v8/upstream/3.1.8/src/objects-debug.cc +0 -722
  792. data/ext/v8/upstream/3.1.8/src/objects-inl.h +0 -3946
  793. data/ext/v8/upstream/3.1.8/src/objects-printer.cc +0 -801
  794. data/ext/v8/upstream/3.1.8/src/objects-visiting.cc +0 -142
  795. data/ext/v8/upstream/3.1.8/src/objects-visiting.h +0 -401
  796. data/ext/v8/upstream/3.1.8/src/objects.cc +0 -10044
  797. data/ext/v8/upstream/3.1.8/src/objects.h +0 -6571
  798. data/ext/v8/upstream/3.1.8/src/parser.cc +0 -5165
  799. data/ext/v8/upstream/3.1.8/src/parser.h +0 -802
  800. data/ext/v8/upstream/3.1.8/src/platform-cygwin.cc +0 -745
  801. data/ext/v8/upstream/3.1.8/src/platform-freebsd.cc +0 -702
  802. data/ext/v8/upstream/3.1.8/src/platform-linux.cc +0 -981
  803. data/ext/v8/upstream/3.1.8/src/platform-macos.cc +0 -732
  804. data/ext/v8/upstream/3.1.8/src/platform-nullos.cc +0 -498
  805. data/ext/v8/upstream/3.1.8/src/platform-openbsd.cc +0 -657
  806. data/ext/v8/upstream/3.1.8/src/platform-posix.cc +0 -399
  807. data/ext/v8/upstream/3.1.8/src/platform-solaris.cc +0 -714
  808. data/ext/v8/upstream/3.1.8/src/platform-win32.cc +0 -1974
  809. data/ext/v8/upstream/3.1.8/src/platform.h +0 -636
  810. data/ext/v8/upstream/3.1.8/src/preparse-data.cc +0 -183
  811. data/ext/v8/upstream/3.1.8/src/preparser-api.cc +0 -213
  812. data/ext/v8/upstream/3.1.8/src/preparser.cc +0 -1205
  813. data/ext/v8/upstream/3.1.8/src/prettyprinter.cc +0 -1539
  814. data/ext/v8/upstream/3.1.8/src/prettyprinter.h +0 -223
  815. data/ext/v8/upstream/3.1.8/src/profile-generator.cc +0 -2899
  816. data/ext/v8/upstream/3.1.8/src/profile-generator.h +0 -1151
  817. data/ext/v8/upstream/3.1.8/src/property.cc +0 -96
  818. data/ext/v8/upstream/3.1.8/src/property.h +0 -337
  819. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-irregexp.cc +0 -470
  820. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.cc +0 -257
  821. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.h +0 -231
  822. data/ext/v8/upstream/3.1.8/src/regexp-stack.cc +0 -103
  823. data/ext/v8/upstream/3.1.8/src/regexp-stack.h +0 -123
  824. data/ext/v8/upstream/3.1.8/src/regexp.js +0 -483
  825. data/ext/v8/upstream/3.1.8/src/register-allocator-inl.h +0 -141
  826. data/ext/v8/upstream/3.1.8/src/register-allocator.cc +0 -104
  827. data/ext/v8/upstream/3.1.8/src/register-allocator.h +0 -320
  828. data/ext/v8/upstream/3.1.8/src/rewriter.cc +0 -1023
  829. data/ext/v8/upstream/3.1.8/src/runtime-profiler.cc +0 -443
  830. data/ext/v8/upstream/3.1.8/src/runtime-profiler.h +0 -77
  831. data/ext/v8/upstream/3.1.8/src/runtime.cc +0 -11592
  832. data/ext/v8/upstream/3.1.8/src/runtime.h +0 -582
  833. data/ext/v8/upstream/3.1.8/src/safepoint-table.cc +0 -253
  834. data/ext/v8/upstream/3.1.8/src/safepoint-table.h +0 -263
  835. data/ext/v8/upstream/3.1.8/src/scanner-base.cc +0 -971
  836. data/ext/v8/upstream/3.1.8/src/scanner-base.h +0 -653
  837. data/ext/v8/upstream/3.1.8/src/scanner.cc +0 -586
  838. data/ext/v8/upstream/3.1.8/src/scanner.h +0 -194
  839. data/ext/v8/upstream/3.1.8/src/scopeinfo.cc +0 -636
  840. data/ext/v8/upstream/3.1.8/src/scopeinfo.h +0 -238
  841. data/ext/v8/upstream/3.1.8/src/scopes.cc +0 -1063
  842. data/ext/v8/upstream/3.1.8/src/scopes.h +0 -494
  843. data/ext/v8/upstream/3.1.8/src/serialize.cc +0 -1535
  844. data/ext/v8/upstream/3.1.8/src/serialize.h +0 -584
  845. data/ext/v8/upstream/3.1.8/src/snapshot-common.cc +0 -82
  846. data/ext/v8/upstream/3.1.8/src/snapshot.h +0 -71
  847. data/ext/v8/upstream/3.1.8/src/spaces-inl.h +0 -524
  848. data/ext/v8/upstream/3.1.8/src/spaces.cc +0 -3254
  849. data/ext/v8/upstream/3.1.8/src/spaces.h +0 -2362
  850. data/ext/v8/upstream/3.1.8/src/string-search.cc +0 -40
  851. data/ext/v8/upstream/3.1.8/src/string-search.h +0 -567
  852. data/ext/v8/upstream/3.1.8/src/string-stream.cc +0 -584
  853. data/ext/v8/upstream/3.1.8/src/string.js +0 -915
  854. data/ext/v8/upstream/3.1.8/src/stub-cache.cc +0 -1878
  855. data/ext/v8/upstream/3.1.8/src/stub-cache.h +0 -849
  856. data/ext/v8/upstream/3.1.8/src/token.cc +0 -63
  857. data/ext/v8/upstream/3.1.8/src/token.h +0 -288
  858. data/ext/v8/upstream/3.1.8/src/top.cc +0 -1152
  859. data/ext/v8/upstream/3.1.8/src/top.h +0 -608
  860. data/ext/v8/upstream/3.1.8/src/type-info.cc +0 -406
  861. data/ext/v8/upstream/3.1.8/src/type-info.h +0 -283
  862. data/ext/v8/upstream/3.1.8/src/unicode.cc +0 -1624
  863. data/ext/v8/upstream/3.1.8/src/unicode.h +0 -280
  864. data/ext/v8/upstream/3.1.8/src/utils.h +0 -793
  865. data/ext/v8/upstream/3.1.8/src/v8-counters.cc +0 -55
  866. data/ext/v8/upstream/3.1.8/src/v8-counters.h +0 -290
  867. data/ext/v8/upstream/3.1.8/src/v8.cc +0 -270
  868. data/ext/v8/upstream/3.1.8/src/v8.h +0 -127
  869. data/ext/v8/upstream/3.1.8/src/v8globals.h +0 -480
  870. data/ext/v8/upstream/3.1.8/src/v8natives.js +0 -1252
  871. data/ext/v8/upstream/3.1.8/src/v8threads.cc +0 -440
  872. data/ext/v8/upstream/3.1.8/src/v8threads.h +0 -157
  873. data/ext/v8/upstream/3.1.8/src/v8utils.h +0 -354
  874. data/ext/v8/upstream/3.1.8/src/variables.h +0 -212
  875. data/ext/v8/upstream/3.1.8/src/version.cc +0 -95
  876. data/ext/v8/upstream/3.1.8/src/version.h +0 -64
  877. data/ext/v8/upstream/3.1.8/src/virtual-frame-light-inl.h +0 -170
  878. data/ext/v8/upstream/3.1.8/src/vm-state-inl.h +0 -134
  879. data/ext/v8/upstream/3.1.8/src/vm-state.h +0 -68
  880. data/ext/v8/upstream/3.1.8/src/win32-headers.h +0 -95
  881. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64-inl.h +0 -455
  882. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.cc +0 -3162
  883. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.h +0 -1584
  884. data/ext/v8/upstream/3.1.8/src/x64/builtins-x64.cc +0 -1492
  885. data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.cc +0 -5150
  886. data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.h +0 -519
  887. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.cc +0 -8835
  888. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.h +0 -750
  889. data/ext/v8/upstream/3.1.8/src/x64/cpu-x64.cc +0 -86
  890. data/ext/v8/upstream/3.1.8/src/x64/debug-x64.cc +0 -316
  891. data/ext/v8/upstream/3.1.8/src/x64/deoptimizer-x64.cc +0 -781
  892. data/ext/v8/upstream/3.1.8/src/x64/disasm-x64.cc +0 -1737
  893. data/ext/v8/upstream/3.1.8/src/x64/frames-x64.h +0 -130
  894. data/ext/v8/upstream/3.1.8/src/x64/full-codegen-x64.cc +0 -3984
  895. data/ext/v8/upstream/3.1.8/src/x64/ic-x64.cc +0 -1761
  896. data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.cc +0 -3639
  897. data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.h +0 -305
  898. data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.cc +0 -2044
  899. data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.h +0 -2052
  900. data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.cc +0 -2660
  901. data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.h +0 -1852
  902. data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.cc +0 -1382
  903. data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.h +0 -278
  904. data/ext/v8/upstream/3.1.8/src/x64/register-allocator-x64-inl.h +0 -87
  905. data/ext/v8/upstream/3.1.8/src/x64/register-allocator-x64.cc +0 -91
  906. data/ext/v8/upstream/3.1.8/src/x64/simulator-x64.h +0 -71
  907. data/ext/v8/upstream/3.1.8/src/x64/stub-cache-x64.cc +0 -3509
  908. data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.cc +0 -1292
  909. data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.h +0 -593
  910. data/ext/v8/upstream/3.1.8/src/zone-inl.h +0 -83
  911. data/ext/v8/upstream/3.1.8/src/zone.cc +0 -195
  912. data/ext/v8/upstream/3.1.8/src/zone.h +0 -233
  913. data/ext/v8/upstream/3.1.8/tools/gyp/v8.gyp +0 -869
  914. data/ext/v8/upstream/3.1.8/tools/linux-tick-processor +0 -33
  915. data/ext/v8/upstream/3.1.8/tools/tickprocessor.js +0 -863
  916. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base.vcproj +0 -1296
  917. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_arm.vcproj +0 -1234
  918. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_x64.vcproj +0 -1296
  919. data/ext/v8/upstream/3.1.8/tools/visual_studio/x64.vsprops +0 -17
  920. data/spec/ext/mem_spec.rb +0 -42
@@ -0,0 +1,2939 @@
1
+ // Copyright 2011 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #include <limits.h> // For LONG_MIN, LONG_MAX.
29
+
30
+ #include "v8.h"
31
+
32
+ #if defined(V8_TARGET_ARCH_ARM)
33
+
34
+ #include "bootstrapper.h"
35
+ #include "codegen-inl.h"
36
+ #include "debug.h"
37
+ #include "runtime.h"
38
+
39
+ namespace v8 {
40
+ namespace internal {
41
+
42
+ MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
43
+ : Assembler(arg_isolate, buffer, size),
44
+ generating_stub_(false),
45
+ allow_stub_calls_(true) {
46
+ if (isolate() != NULL) {
47
+ code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
48
+ isolate());
49
+ }
50
+ }
51
+
52
+
53
+ // We always generate arm code, never thumb code, even if V8 is compiled to
54
+ // thumb, so we require inter-working support
55
+ #if defined(__thumb__) && !defined(USE_THUMB_INTERWORK)
56
+ #error "flag -mthumb-interwork missing"
57
+ #endif
58
+
59
+
60
+ // We do not support thumb inter-working with an arm architecture not supporting
61
+ // the blx instruction (below v5t). If you know what CPU you are compiling for
62
+ // you can use -march=armv7 or similar.
63
+ #if defined(USE_THUMB_INTERWORK) && !defined(CAN_USE_THUMB_INSTRUCTIONS)
64
+ # error "For thumb inter-working we require an architecture which supports blx"
65
+ #endif
66
+
67
+
68
+ // Using bx does not yield better code, so use it only when required
69
+ #if defined(USE_THUMB_INTERWORK)
70
+ #define USE_BX 1
71
+ #endif
72
+
73
+
74
+ void MacroAssembler::Jump(Register target, Condition cond) {
75
+ #if USE_BX
76
+ bx(target, cond);
77
+ #else
78
+ mov(pc, Operand(target), LeaveCC, cond);
79
+ #endif
80
+ }
81
+
82
+
83
+ void MacroAssembler::Jump(intptr_t target, RelocInfo::Mode rmode,
84
+ Condition cond) {
85
+ #if USE_BX
86
+ mov(ip, Operand(target, rmode), LeaveCC, cond);
87
+ bx(ip, cond);
88
+ #else
89
+ mov(pc, Operand(target, rmode), LeaveCC, cond);
90
+ #endif
91
+ }
92
+
93
+
94
+ void MacroAssembler::Jump(byte* target, RelocInfo::Mode rmode,
95
+ Condition cond) {
96
+ ASSERT(!RelocInfo::IsCodeTarget(rmode));
97
+ Jump(reinterpret_cast<intptr_t>(target), rmode, cond);
98
+ }
99
+
100
+
101
+ void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
102
+ Condition cond) {
103
+ ASSERT(RelocInfo::IsCodeTarget(rmode));
104
+ // 'code' is always generated ARM code, never THUMB code
105
+ Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
106
+ }
107
+
108
+
109
+ int MacroAssembler::CallSize(Register target, Condition cond) {
110
+ #if USE_BLX
111
+ return kInstrSize;
112
+ #else
113
+ return 2 * kInstrSize;
114
+ #endif
115
+ }
116
+
117
+
118
+ void MacroAssembler::Call(Register target, Condition cond) {
119
+ // Block constant pool for the call instruction sequence.
120
+ BlockConstPoolScope block_const_pool(this);
121
+ #ifdef DEBUG
122
+ int pre_position = pc_offset();
123
+ #endif
124
+
125
+ #if USE_BLX
126
+ blx(target, cond);
127
+ #else
128
+ // set lr for return at current pc + 8
129
+ mov(lr, Operand(pc), LeaveCC, cond);
130
+ mov(pc, Operand(target), LeaveCC, cond);
131
+ #endif
132
+
133
+ #ifdef DEBUG
134
+ int post_position = pc_offset();
135
+ CHECK_EQ(pre_position + CallSize(target, cond), post_position);
136
+ #endif
137
+ }
138
+
139
+
140
+ int MacroAssembler::CallSize(
141
+ intptr_t target, RelocInfo::Mode rmode, Condition cond) {
142
+ int size = 2 * kInstrSize;
143
+ Instr mov_instr = cond | MOV | LeaveCC;
144
+ if (!Operand(target, rmode).is_single_instruction(mov_instr)) {
145
+ size += kInstrSize;
146
+ }
147
+ return size;
148
+ }
149
+
150
+
151
+ void MacroAssembler::Call(
152
+ intptr_t target, RelocInfo::Mode rmode, Condition cond) {
153
+ // Block constant pool for the call instruction sequence.
154
+ BlockConstPoolScope block_const_pool(this);
155
+ #ifdef DEBUG
156
+ int pre_position = pc_offset();
157
+ #endif
158
+
159
+ #if USE_BLX
160
+ // On ARMv5 and after the recommended call sequence is:
161
+ // ldr ip, [pc, #...]
162
+ // blx ip
163
+
164
+ // Statement positions are expected to be recorded when the target
165
+ // address is loaded. The mov method will automatically record
166
+ // positions when pc is the target, since this is not the case here
167
+ // we have to do it explicitly.
168
+ positions_recorder()->WriteRecordedPositions();
169
+
170
+ mov(ip, Operand(target, rmode), LeaveCC, cond);
171
+ blx(ip, cond);
172
+
173
+ ASSERT(kCallTargetAddressOffset == 2 * kInstrSize);
174
+ #else
175
+ // Set lr for return at current pc + 8.
176
+ mov(lr, Operand(pc), LeaveCC, cond);
177
+ // Emit a ldr<cond> pc, [pc + offset of target in constant pool].
178
+ mov(pc, Operand(target, rmode), LeaveCC, cond);
179
+ ASSERT(kCallTargetAddressOffset == kInstrSize);
180
+ #endif
181
+
182
+ #ifdef DEBUG
183
+ int post_position = pc_offset();
184
+ CHECK_EQ(pre_position + CallSize(target, rmode, cond), post_position);
185
+ #endif
186
+ }
187
+
188
+
189
+ int MacroAssembler::CallSize(
190
+ byte* target, RelocInfo::Mode rmode, Condition cond) {
191
+ return CallSize(reinterpret_cast<intptr_t>(target), rmode);
192
+ }
193
+
194
+
195
+ void MacroAssembler::Call(
196
+ byte* target, RelocInfo::Mode rmode, Condition cond) {
197
+ #ifdef DEBUG
198
+ int pre_position = pc_offset();
199
+ #endif
200
+
201
+ ASSERT(!RelocInfo::IsCodeTarget(rmode));
202
+ Call(reinterpret_cast<intptr_t>(target), rmode, cond);
203
+
204
+ #ifdef DEBUG
205
+ int post_position = pc_offset();
206
+ CHECK_EQ(pre_position + CallSize(target, rmode, cond), post_position);
207
+ #endif
208
+ }
209
+
210
+
211
+ int MacroAssembler::CallSize(
212
+ Handle<Code> code, RelocInfo::Mode rmode, Condition cond) {
213
+ return CallSize(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
214
+ }
215
+
216
+
217
+ void MacroAssembler::Call(
218
+ Handle<Code> code, RelocInfo::Mode rmode, Condition cond) {
219
+ #ifdef DEBUG
220
+ int pre_position = pc_offset();
221
+ #endif
222
+
223
+ ASSERT(RelocInfo::IsCodeTarget(rmode));
224
+ // 'code' is always generated ARM code, never THUMB code
225
+ Call(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
226
+
227
+ #ifdef DEBUG
228
+ int post_position = pc_offset();
229
+ CHECK_EQ(pre_position + CallSize(code, rmode, cond), post_position);
230
+ #endif
231
+ }
232
+
233
+
234
+ void MacroAssembler::Ret(Condition cond) {
235
+ #if USE_BX
236
+ bx(lr, cond);
237
+ #else
238
+ mov(pc, Operand(lr), LeaveCC, cond);
239
+ #endif
240
+ }
241
+
242
+
243
+ void MacroAssembler::Drop(int count, Condition cond) {
244
+ if (count > 0) {
245
+ add(sp, sp, Operand(count * kPointerSize), LeaveCC, cond);
246
+ }
247
+ }
248
+
249
+
250
+ void MacroAssembler::Ret(int drop, Condition cond) {
251
+ Drop(drop, cond);
252
+ Ret(cond);
253
+ }
254
+
255
+
256
+ void MacroAssembler::Swap(Register reg1,
257
+ Register reg2,
258
+ Register scratch,
259
+ Condition cond) {
260
+ if (scratch.is(no_reg)) {
261
+ eor(reg1, reg1, Operand(reg2), LeaveCC, cond);
262
+ eor(reg2, reg2, Operand(reg1), LeaveCC, cond);
263
+ eor(reg1, reg1, Operand(reg2), LeaveCC, cond);
264
+ } else {
265
+ mov(scratch, reg1, LeaveCC, cond);
266
+ mov(reg1, reg2, LeaveCC, cond);
267
+ mov(reg2, scratch, LeaveCC, cond);
268
+ }
269
+ }
270
+
271
+
272
+ void MacroAssembler::Call(Label* target) {
273
+ bl(target);
274
+ }
275
+
276
+
277
+ void MacroAssembler::Move(Register dst, Handle<Object> value) {
278
+ mov(dst, Operand(value));
279
+ }
280
+
281
+
282
+ void MacroAssembler::Move(Register dst, Register src) {
283
+ if (!dst.is(src)) {
284
+ mov(dst, src);
285
+ }
286
+ }
287
+
288
+
289
+ void MacroAssembler::And(Register dst, Register src1, const Operand& src2,
290
+ Condition cond) {
291
+ if (!src2.is_reg() &&
292
+ !src2.must_use_constant_pool() &&
293
+ src2.immediate() == 0) {
294
+ mov(dst, Operand(0, RelocInfo::NONE), LeaveCC, cond);
295
+
296
+ } else if (!src2.is_single_instruction() &&
297
+ !src2.must_use_constant_pool() &&
298
+ CpuFeatures::IsSupported(ARMv7) &&
299
+ IsPowerOf2(src2.immediate() + 1)) {
300
+ ubfx(dst, src1, 0, WhichPowerOf2(src2.immediate() + 1), cond);
301
+
302
+ } else {
303
+ and_(dst, src1, src2, LeaveCC, cond);
304
+ }
305
+ }
306
+
307
+
308
+ void MacroAssembler::Ubfx(Register dst, Register src1, int lsb, int width,
309
+ Condition cond) {
310
+ ASSERT(lsb < 32);
311
+ if (!CpuFeatures::IsSupported(ARMv7)) {
312
+ int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
313
+ and_(dst, src1, Operand(mask), LeaveCC, cond);
314
+ if (lsb != 0) {
315
+ mov(dst, Operand(dst, LSR, lsb), LeaveCC, cond);
316
+ }
317
+ } else {
318
+ ubfx(dst, src1, lsb, width, cond);
319
+ }
320
+ }
321
+
322
+
323
+ void MacroAssembler::Sbfx(Register dst, Register src1, int lsb, int width,
324
+ Condition cond) {
325
+ ASSERT(lsb < 32);
326
+ if (!CpuFeatures::IsSupported(ARMv7)) {
327
+ int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
328
+ and_(dst, src1, Operand(mask), LeaveCC, cond);
329
+ int shift_up = 32 - lsb - width;
330
+ int shift_down = lsb + shift_up;
331
+ if (shift_up != 0) {
332
+ mov(dst, Operand(dst, LSL, shift_up), LeaveCC, cond);
333
+ }
334
+ if (shift_down != 0) {
335
+ mov(dst, Operand(dst, ASR, shift_down), LeaveCC, cond);
336
+ }
337
+ } else {
338
+ sbfx(dst, src1, lsb, width, cond);
339
+ }
340
+ }
341
+
342
+
343
+ void MacroAssembler::Bfi(Register dst,
344
+ Register src,
345
+ Register scratch,
346
+ int lsb,
347
+ int width,
348
+ Condition cond) {
349
+ ASSERT(0 <= lsb && lsb < 32);
350
+ ASSERT(0 <= width && width < 32);
351
+ ASSERT(lsb + width < 32);
352
+ ASSERT(!scratch.is(dst));
353
+ if (width == 0) return;
354
+ if (!CpuFeatures::IsSupported(ARMv7)) {
355
+ int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
356
+ bic(dst, dst, Operand(mask));
357
+ and_(scratch, src, Operand((1 << width) - 1));
358
+ mov(scratch, Operand(scratch, LSL, lsb));
359
+ orr(dst, dst, scratch);
360
+ } else {
361
+ bfi(dst, src, lsb, width, cond);
362
+ }
363
+ }
364
+
365
+
366
+ void MacroAssembler::Bfc(Register dst, int lsb, int width, Condition cond) {
367
+ ASSERT(lsb < 32);
368
+ if (!CpuFeatures::IsSupported(ARMv7)) {
369
+ int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
370
+ bic(dst, dst, Operand(mask));
371
+ } else {
372
+ bfc(dst, lsb, width, cond);
373
+ }
374
+ }
375
+
376
+
377
+ void MacroAssembler::Usat(Register dst, int satpos, const Operand& src,
378
+ Condition cond) {
379
+ if (!CpuFeatures::IsSupported(ARMv7)) {
380
+ ASSERT(!dst.is(pc) && !src.rm().is(pc));
381
+ ASSERT((satpos >= 0) && (satpos <= 31));
382
+
383
+ // These asserts are required to ensure compatibility with the ARMv7
384
+ // implementation.
385
+ ASSERT((src.shift_op() == ASR) || (src.shift_op() == LSL));
386
+ ASSERT(src.rs().is(no_reg));
387
+
388
+ Label done;
389
+ int satval = (1 << satpos) - 1;
390
+
391
+ if (cond != al) {
392
+ b(NegateCondition(cond), &done); // Skip saturate if !condition.
393
+ }
394
+ if (!(src.is_reg() && dst.is(src.rm()))) {
395
+ mov(dst, src);
396
+ }
397
+ tst(dst, Operand(~satval));
398
+ b(eq, &done);
399
+ mov(dst, Operand(0, RelocInfo::NONE), LeaveCC, mi); // 0 if negative.
400
+ mov(dst, Operand(satval), LeaveCC, pl); // satval if positive.
401
+ bind(&done);
402
+ } else {
403
+ usat(dst, satpos, src, cond);
404
+ }
405
+ }
406
+
407
+
408
+ void MacroAssembler::SmiJumpTable(Register index, Vector<Label*> targets) {
409
+ // Empty the const pool.
410
+ CheckConstPool(true, true);
411
+ add(pc, pc, Operand(index,
412
+ LSL,
413
+ Instruction::kInstrSizeLog2 - kSmiTagSize));
414
+ BlockConstPoolBefore(pc_offset() + (targets.length() + 1) * kInstrSize);
415
+ nop(); // Jump table alignment.
416
+ for (int i = 0; i < targets.length(); i++) {
417
+ b(targets[i]);
418
+ }
419
+ }
420
+
421
+
422
+ void MacroAssembler::LoadRoot(Register destination,
423
+ Heap::RootListIndex index,
424
+ Condition cond) {
425
+ ldr(destination, MemOperand(roots, index << kPointerSizeLog2), cond);
426
+ }
427
+
428
+
429
+ void MacroAssembler::StoreRoot(Register source,
430
+ Heap::RootListIndex index,
431
+ Condition cond) {
432
+ str(source, MemOperand(roots, index << kPointerSizeLog2), cond);
433
+ }
434
+
435
+
436
+ void MacroAssembler::RecordWriteHelper(Register object,
437
+ Register address,
438
+ Register scratch) {
439
+ if (emit_debug_code()) {
440
+ // Check that the object is not in new space.
441
+ Label not_in_new_space;
442
+ InNewSpace(object, scratch, ne, &not_in_new_space);
443
+ Abort("new-space object passed to RecordWriteHelper");
444
+ bind(&not_in_new_space);
445
+ }
446
+
447
+ // Calculate page address.
448
+ Bfc(object, 0, kPageSizeBits);
449
+
450
+ // Calculate region number.
451
+ Ubfx(address, address, Page::kRegionSizeLog2,
452
+ kPageSizeBits - Page::kRegionSizeLog2);
453
+
454
+ // Mark region dirty.
455
+ ldr(scratch, MemOperand(object, Page::kDirtyFlagOffset));
456
+ mov(ip, Operand(1));
457
+ orr(scratch, scratch, Operand(ip, LSL, address));
458
+ str(scratch, MemOperand(object, Page::kDirtyFlagOffset));
459
+ }
460
+
461
+
462
+ void MacroAssembler::InNewSpace(Register object,
463
+ Register scratch,
464
+ Condition cond,
465
+ Label* branch) {
466
+ ASSERT(cond == eq || cond == ne);
467
+ and_(scratch, object, Operand(ExternalReference::new_space_mask(isolate())));
468
+ cmp(scratch, Operand(ExternalReference::new_space_start(isolate())));
469
+ b(cond, branch);
470
+ }
471
+
472
+
473
+ // Will clobber 4 registers: object, offset, scratch, ip. The
474
+ // register 'object' contains a heap object pointer. The heap object
475
+ // tag is shifted away.
476
+ void MacroAssembler::RecordWrite(Register object,
477
+ Operand offset,
478
+ Register scratch0,
479
+ Register scratch1) {
480
+ // The compiled code assumes that record write doesn't change the
481
+ // context register, so we check that none of the clobbered
482
+ // registers are cp.
483
+ ASSERT(!object.is(cp) && !scratch0.is(cp) && !scratch1.is(cp));
484
+
485
+ Label done;
486
+
487
+ // First, test that the object is not in the new space. We cannot set
488
+ // region marks for new space pages.
489
+ InNewSpace(object, scratch0, eq, &done);
490
+
491
+ // Add offset into the object.
492
+ add(scratch0, object, offset);
493
+
494
+ // Record the actual write.
495
+ RecordWriteHelper(object, scratch0, scratch1);
496
+
497
+ bind(&done);
498
+
499
+ // Clobber all input registers when running with the debug-code flag
500
+ // turned on to provoke errors.
501
+ if (emit_debug_code()) {
502
+ mov(object, Operand(BitCast<int32_t>(kZapValue)));
503
+ mov(scratch0, Operand(BitCast<int32_t>(kZapValue)));
504
+ mov(scratch1, Operand(BitCast<int32_t>(kZapValue)));
505
+ }
506
+ }
507
+
508
+
509
+ // Will clobber 4 registers: object, address, scratch, ip. The
510
+ // register 'object' contains a heap object pointer. The heap object
511
+ // tag is shifted away.
512
+ void MacroAssembler::RecordWrite(Register object,
513
+ Register address,
514
+ Register scratch) {
515
+ // The compiled code assumes that record write doesn't change the
516
+ // context register, so we check that none of the clobbered
517
+ // registers are cp.
518
+ ASSERT(!object.is(cp) && !address.is(cp) && !scratch.is(cp));
519
+
520
+ Label done;
521
+
522
+ // First, test that the object is not in the new space. We cannot set
523
+ // region marks for new space pages.
524
+ InNewSpace(object, scratch, eq, &done);
525
+
526
+ // Record the actual write.
527
+ RecordWriteHelper(object, address, scratch);
528
+
529
+ bind(&done);
530
+
531
+ // Clobber all input registers when running with the debug-code flag
532
+ // turned on to provoke errors.
533
+ if (emit_debug_code()) {
534
+ mov(object, Operand(BitCast<int32_t>(kZapValue)));
535
+ mov(address, Operand(BitCast<int32_t>(kZapValue)));
536
+ mov(scratch, Operand(BitCast<int32_t>(kZapValue)));
537
+ }
538
+ }
539
+
540
+
541
+ // Push and pop all registers that can hold pointers.
542
+ void MacroAssembler::PushSafepointRegisters() {
543
+ // Safepoints expect a block of contiguous register values starting with r0:
544
+ ASSERT(((1 << kNumSafepointSavedRegisters) - 1) == kSafepointSavedRegisters);
545
+ // Safepoints expect a block of kNumSafepointRegisters values on the
546
+ // stack, so adjust the stack for unsaved registers.
547
+ const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
548
+ ASSERT(num_unsaved >= 0);
549
+ sub(sp, sp, Operand(num_unsaved * kPointerSize));
550
+ stm(db_w, sp, kSafepointSavedRegisters);
551
+ }
552
+
553
+
554
+ void MacroAssembler::PopSafepointRegisters() {
555
+ const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
556
+ ldm(ia_w, sp, kSafepointSavedRegisters);
557
+ add(sp, sp, Operand(num_unsaved * kPointerSize));
558
+ }
559
+
560
+
561
+ void MacroAssembler::PushSafepointRegistersAndDoubles() {
562
+ PushSafepointRegisters();
563
+ sub(sp, sp, Operand(DwVfpRegister::kNumAllocatableRegisters *
564
+ kDoubleSize));
565
+ for (int i = 0; i < DwVfpRegister::kNumAllocatableRegisters; i++) {
566
+ vstr(DwVfpRegister::FromAllocationIndex(i), sp, i * kDoubleSize);
567
+ }
568
+ }
569
+
570
+
571
+ void MacroAssembler::PopSafepointRegistersAndDoubles() {
572
+ for (int i = 0; i < DwVfpRegister::kNumAllocatableRegisters; i++) {
573
+ vldr(DwVfpRegister::FromAllocationIndex(i), sp, i * kDoubleSize);
574
+ }
575
+ add(sp, sp, Operand(DwVfpRegister::kNumAllocatableRegisters *
576
+ kDoubleSize));
577
+ PopSafepointRegisters();
578
+ }
579
+
580
+ void MacroAssembler::StoreToSafepointRegistersAndDoublesSlot(Register src,
581
+ Register dst) {
582
+ str(src, SafepointRegistersAndDoublesSlot(dst));
583
+ }
584
+
585
+
586
+ void MacroAssembler::StoreToSafepointRegisterSlot(Register src, Register dst) {
587
+ str(src, SafepointRegisterSlot(dst));
588
+ }
589
+
590
+
591
+ void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
592
+ ldr(dst, SafepointRegisterSlot(src));
593
+ }
594
+
595
+
596
+ int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
597
+ // The registers are pushed starting with the highest encoding,
598
+ // which means that lowest encodings are closest to the stack pointer.
599
+ ASSERT(reg_code >= 0 && reg_code < kNumSafepointRegisters);
600
+ return reg_code;
601
+ }
602
+
603
+
604
+ MemOperand MacroAssembler::SafepointRegisterSlot(Register reg) {
605
+ return MemOperand(sp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
606
+ }
607
+
608
+
609
+ MemOperand MacroAssembler::SafepointRegistersAndDoublesSlot(Register reg) {
610
+ // General purpose registers are pushed last on the stack.
611
+ int doubles_size = DwVfpRegister::kNumAllocatableRegisters * kDoubleSize;
612
+ int register_offset = SafepointRegisterStackIndex(reg.code()) * kPointerSize;
613
+ return MemOperand(sp, doubles_size + register_offset);
614
+ }
615
+
616
+
617
+ void MacroAssembler::Ldrd(Register dst1, Register dst2,
618
+ const MemOperand& src, Condition cond) {
619
+ ASSERT(src.rm().is(no_reg));
620
+ ASSERT(!dst1.is(lr)); // r14.
621
+ ASSERT_EQ(0, dst1.code() % 2);
622
+ ASSERT_EQ(dst1.code() + 1, dst2.code());
623
+
624
+ // Generate two ldr instructions if ldrd is not available.
625
+ if (CpuFeatures::IsSupported(ARMv7)) {
626
+ CpuFeatures::Scope scope(ARMv7);
627
+ ldrd(dst1, dst2, src, cond);
628
+ } else {
629
+ MemOperand src2(src);
630
+ src2.set_offset(src2.offset() + 4);
631
+ if (dst1.is(src.rn())) {
632
+ ldr(dst2, src2, cond);
633
+ ldr(dst1, src, cond);
634
+ } else {
635
+ ldr(dst1, src, cond);
636
+ ldr(dst2, src2, cond);
637
+ }
638
+ }
639
+ }
640
+
641
+
642
+ void MacroAssembler::Strd(Register src1, Register src2,
643
+ const MemOperand& dst, Condition cond) {
644
+ ASSERT(dst.rm().is(no_reg));
645
+ ASSERT(!src1.is(lr)); // r14.
646
+ ASSERT_EQ(0, src1.code() % 2);
647
+ ASSERT_EQ(src1.code() + 1, src2.code());
648
+
649
+ // Generate two str instructions if strd is not available.
650
+ if (CpuFeatures::IsSupported(ARMv7)) {
651
+ CpuFeatures::Scope scope(ARMv7);
652
+ strd(src1, src2, dst, cond);
653
+ } else {
654
+ MemOperand dst2(dst);
655
+ dst2.set_offset(dst2.offset() + 4);
656
+ str(src1, dst, cond);
657
+ str(src2, dst2, cond);
658
+ }
659
+ }
660
+
661
+
662
+ void MacroAssembler::ClearFPSCRBits(const uint32_t bits_to_clear,
663
+ const Register scratch,
664
+ const Condition cond) {
665
+ vmrs(scratch, cond);
666
+ bic(scratch, scratch, Operand(bits_to_clear), LeaveCC, cond);
667
+ vmsr(scratch, cond);
668
+ }
669
+
670
+
671
+ void MacroAssembler::VFPCompareAndSetFlags(const DwVfpRegister src1,
672
+ const DwVfpRegister src2,
673
+ const Condition cond) {
674
+ // Compare and move FPSCR flags to the normal condition flags.
675
+ VFPCompareAndLoadFlags(src1, src2, pc, cond);
676
+ }
677
+
678
+ void MacroAssembler::VFPCompareAndSetFlags(const DwVfpRegister src1,
679
+ const double src2,
680
+ const Condition cond) {
681
+ // Compare and move FPSCR flags to the normal condition flags.
682
+ VFPCompareAndLoadFlags(src1, src2, pc, cond);
683
+ }
684
+
685
+
686
+ void MacroAssembler::VFPCompareAndLoadFlags(const DwVfpRegister src1,
687
+ const DwVfpRegister src2,
688
+ const Register fpscr_flags,
689
+ const Condition cond) {
690
+ // Compare and load FPSCR.
691
+ vcmp(src1, src2, cond);
692
+ vmrs(fpscr_flags, cond);
693
+ }
694
+
695
+ void MacroAssembler::VFPCompareAndLoadFlags(const DwVfpRegister src1,
696
+ const double src2,
697
+ const Register fpscr_flags,
698
+ const Condition cond) {
699
+ // Compare and load FPSCR.
700
+ vcmp(src1, src2, cond);
701
+ vmrs(fpscr_flags, cond);
702
+ }
703
+
704
+
705
+ void MacroAssembler::EnterFrame(StackFrame::Type type) {
706
+ // r0-r3: preserved
707
+ stm(db_w, sp, cp.bit() | fp.bit() | lr.bit());
708
+ mov(ip, Operand(Smi::FromInt(type)));
709
+ push(ip);
710
+ mov(ip, Operand(CodeObject()));
711
+ push(ip);
712
+ add(fp, sp, Operand(3 * kPointerSize)); // Adjust FP to point to saved FP.
713
+ }
714
+
715
+
716
+ void MacroAssembler::LeaveFrame(StackFrame::Type type) {
717
+ // r0: preserved
718
+ // r1: preserved
719
+ // r2: preserved
720
+
721
+ // Drop the execution stack down to the frame pointer and restore
722
+ // the caller frame pointer and return address.
723
+ mov(sp, fp);
724
+ ldm(ia_w, sp, fp.bit() | lr.bit());
725
+ }
726
+
727
+
728
+ void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space) {
729
+ // Setup the frame structure on the stack.
730
+ ASSERT_EQ(2 * kPointerSize, ExitFrameConstants::kCallerSPDisplacement);
731
+ ASSERT_EQ(1 * kPointerSize, ExitFrameConstants::kCallerPCOffset);
732
+ ASSERT_EQ(0 * kPointerSize, ExitFrameConstants::kCallerFPOffset);
733
+ Push(lr, fp);
734
+ mov(fp, Operand(sp)); // Setup new frame pointer.
735
+ // Reserve room for saved entry sp and code object.
736
+ sub(sp, sp, Operand(2 * kPointerSize));
737
+ if (emit_debug_code()) {
738
+ mov(ip, Operand(0));
739
+ str(ip, MemOperand(fp, ExitFrameConstants::kSPOffset));
740
+ }
741
+ mov(ip, Operand(CodeObject()));
742
+ str(ip, MemOperand(fp, ExitFrameConstants::kCodeOffset));
743
+
744
+ // Save the frame pointer and the context in top.
745
+ mov(ip, Operand(ExternalReference(Isolate::k_c_entry_fp_address, isolate())));
746
+ str(fp, MemOperand(ip));
747
+ mov(ip, Operand(ExternalReference(Isolate::k_context_address, isolate())));
748
+ str(cp, MemOperand(ip));
749
+
750
+ // Optionally save all double registers.
751
+ if (save_doubles) {
752
+ DwVfpRegister first = d0;
753
+ DwVfpRegister last =
754
+ DwVfpRegister::from_code(DwVfpRegister::kNumRegisters - 1);
755
+ vstm(db_w, sp, first, last);
756
+ // Note that d0 will be accessible at
757
+ // fp - 2 * kPointerSize - DwVfpRegister::kNumRegisters * kDoubleSize,
758
+ // since the sp slot and code slot were pushed after the fp.
759
+ }
760
+
761
+ // Reserve place for the return address and stack space and align the frame
762
+ // preparing for calling the runtime function.
763
+ const int frame_alignment = MacroAssembler::ActivationFrameAlignment();
764
+ sub(sp, sp, Operand((stack_space + 1) * kPointerSize));
765
+ if (frame_alignment > 0) {
766
+ ASSERT(IsPowerOf2(frame_alignment));
767
+ and_(sp, sp, Operand(-frame_alignment));
768
+ }
769
+
770
+ // Set the exit frame sp value to point just before the return address
771
+ // location.
772
+ add(ip, sp, Operand(kPointerSize));
773
+ str(ip, MemOperand(fp, ExitFrameConstants::kSPOffset));
774
+ }
775
+
776
+
777
+ void MacroAssembler::InitializeNewString(Register string,
778
+ Register length,
779
+ Heap::RootListIndex map_index,
780
+ Register scratch1,
781
+ Register scratch2) {
782
+ mov(scratch1, Operand(length, LSL, kSmiTagSize));
783
+ LoadRoot(scratch2, map_index);
784
+ str(scratch1, FieldMemOperand(string, String::kLengthOffset));
785
+ mov(scratch1, Operand(String::kEmptyHashField));
786
+ str(scratch2, FieldMemOperand(string, HeapObject::kMapOffset));
787
+ str(scratch1, FieldMemOperand(string, String::kHashFieldOffset));
788
+ }
789
+
790
+
791
+ int MacroAssembler::ActivationFrameAlignment() {
792
+ #if defined(V8_HOST_ARCH_ARM)
793
+ // Running on the real platform. Use the alignment as mandated by the local
794
+ // environment.
795
+ // Note: This will break if we ever start generating snapshots on one ARM
796
+ // platform for another ARM platform with a different alignment.
797
+ return OS::ActivationFrameAlignment();
798
+ #else // defined(V8_HOST_ARCH_ARM)
799
+ // If we are using the simulator then we should always align to the expected
800
+ // alignment. As the simulator is used to generate snapshots we do not know
801
+ // if the target platform will need alignment, so this is controlled from a
802
+ // flag.
803
+ return FLAG_sim_stack_alignment;
804
+ #endif // defined(V8_HOST_ARCH_ARM)
805
+ }
806
+
807
+
808
+ void MacroAssembler::LeaveExitFrame(bool save_doubles,
809
+ Register argument_count) {
810
+ // Optionally restore all double registers.
811
+ if (save_doubles) {
812
+ // Calculate the stack location of the saved doubles and restore them.
813
+ const int offset = 2 * kPointerSize;
814
+ sub(r3, fp, Operand(offset + DwVfpRegister::kNumRegisters * kDoubleSize));
815
+ DwVfpRegister first = d0;
816
+ DwVfpRegister last =
817
+ DwVfpRegister::from_code(DwVfpRegister::kNumRegisters - 1);
818
+ vldm(ia, r3, first, last);
819
+ }
820
+
821
+ // Clear top frame.
822
+ mov(r3, Operand(0, RelocInfo::NONE));
823
+ mov(ip, Operand(ExternalReference(Isolate::k_c_entry_fp_address, isolate())));
824
+ str(r3, MemOperand(ip));
825
+
826
+ // Restore current context from top and clear it in debug mode.
827
+ mov(ip, Operand(ExternalReference(Isolate::k_context_address, isolate())));
828
+ ldr(cp, MemOperand(ip));
829
+ #ifdef DEBUG
830
+ str(r3, MemOperand(ip));
831
+ #endif
832
+
833
+ // Tear down the exit frame, pop the arguments, and return.
834
+ mov(sp, Operand(fp));
835
+ ldm(ia_w, sp, fp.bit() | lr.bit());
836
+ if (argument_count.is_valid()) {
837
+ add(sp, sp, Operand(argument_count, LSL, kPointerSizeLog2));
838
+ }
839
+ }
840
+
841
+ void MacroAssembler::GetCFunctionDoubleResult(const DoubleRegister dst) {
842
+ #if !defined(USE_ARM_EABI)
843
+ UNREACHABLE();
844
+ #else
845
+ vmov(dst, r0, r1);
846
+ #endif
847
+ }
848
+
849
+
850
+ void MacroAssembler::InvokePrologue(const ParameterCount& expected,
851
+ const ParameterCount& actual,
852
+ Handle<Code> code_constant,
853
+ Register code_reg,
854
+ Label* done,
855
+ InvokeFlag flag,
856
+ CallWrapper* call_wrapper) {
857
+ bool definitely_matches = false;
858
+ Label regular_invoke;
859
+
860
+ // Check whether the expected and actual arguments count match. If not,
861
+ // setup registers according to contract with ArgumentsAdaptorTrampoline:
862
+ // r0: actual arguments count
863
+ // r1: function (passed through to callee)
864
+ // r2: expected arguments count
865
+ // r3: callee code entry
866
+
867
+ // The code below is made a lot easier because the calling code already sets
868
+ // up actual and expected registers according to the contract if values are
869
+ // passed in registers.
870
+ ASSERT(actual.is_immediate() || actual.reg().is(r0));
871
+ ASSERT(expected.is_immediate() || expected.reg().is(r2));
872
+ ASSERT((!code_constant.is_null() && code_reg.is(no_reg)) || code_reg.is(r3));
873
+
874
+ if (expected.is_immediate()) {
875
+ ASSERT(actual.is_immediate());
876
+ if (expected.immediate() == actual.immediate()) {
877
+ definitely_matches = true;
878
+ } else {
879
+ mov(r0, Operand(actual.immediate()));
880
+ const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
881
+ if (expected.immediate() == sentinel) {
882
+ // Don't worry about adapting arguments for builtins that
883
+ // don't want that done. Skip adaption code by making it look
884
+ // like we have a match between expected and actual number of
885
+ // arguments.
886
+ definitely_matches = true;
887
+ } else {
888
+ mov(r2, Operand(expected.immediate()));
889
+ }
890
+ }
891
+ } else {
892
+ if (actual.is_immediate()) {
893
+ cmp(expected.reg(), Operand(actual.immediate()));
894
+ b(eq, &regular_invoke);
895
+ mov(r0, Operand(actual.immediate()));
896
+ } else {
897
+ cmp(expected.reg(), Operand(actual.reg()));
898
+ b(eq, &regular_invoke);
899
+ }
900
+ }
901
+
902
+ if (!definitely_matches) {
903
+ if (!code_constant.is_null()) {
904
+ mov(r3, Operand(code_constant));
905
+ add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
906
+ }
907
+
908
+ Handle<Code> adaptor =
909
+ isolate()->builtins()->ArgumentsAdaptorTrampoline();
910
+ if (flag == CALL_FUNCTION) {
911
+ if (call_wrapper != NULL) {
912
+ call_wrapper->BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
913
+ }
914
+ Call(adaptor, RelocInfo::CODE_TARGET);
915
+ if (call_wrapper != NULL) call_wrapper->AfterCall();
916
+ b(done);
917
+ } else {
918
+ Jump(adaptor, RelocInfo::CODE_TARGET);
919
+ }
920
+ bind(&regular_invoke);
921
+ }
922
+ }
923
+
924
+
925
+ void MacroAssembler::InvokeCode(Register code,
926
+ const ParameterCount& expected,
927
+ const ParameterCount& actual,
928
+ InvokeFlag flag,
929
+ CallWrapper* call_wrapper) {
930
+ Label done;
931
+
932
+ InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag,
933
+ call_wrapper);
934
+ if (flag == CALL_FUNCTION) {
935
+ if (call_wrapper != NULL) call_wrapper->BeforeCall(CallSize(code));
936
+ Call(code);
937
+ if (call_wrapper != NULL) call_wrapper->AfterCall();
938
+ } else {
939
+ ASSERT(flag == JUMP_FUNCTION);
940
+ Jump(code);
941
+ }
942
+
943
+ // Continue here if InvokePrologue does handle the invocation due to
944
+ // mismatched parameter counts.
945
+ bind(&done);
946
+ }
947
+
948
+
949
+ void MacroAssembler::InvokeCode(Handle<Code> code,
950
+ const ParameterCount& expected,
951
+ const ParameterCount& actual,
952
+ RelocInfo::Mode rmode,
953
+ InvokeFlag flag) {
954
+ Label done;
955
+
956
+ InvokePrologue(expected, actual, code, no_reg, &done, flag);
957
+ if (flag == CALL_FUNCTION) {
958
+ Call(code, rmode);
959
+ } else {
960
+ Jump(code, rmode);
961
+ }
962
+
963
+ // Continue here if InvokePrologue does handle the invocation due to
964
+ // mismatched parameter counts.
965
+ bind(&done);
966
+ }
967
+
968
+
969
+ void MacroAssembler::InvokeFunction(Register fun,
970
+ const ParameterCount& actual,
971
+ InvokeFlag flag,
972
+ CallWrapper* call_wrapper) {
973
+ // Contract with called JS functions requires that function is passed in r1.
974
+ ASSERT(fun.is(r1));
975
+
976
+ Register expected_reg = r2;
977
+ Register code_reg = r3;
978
+
979
+ ldr(code_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
980
+ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
981
+ ldr(expected_reg,
982
+ FieldMemOperand(code_reg,
983
+ SharedFunctionInfo::kFormalParameterCountOffset));
984
+ mov(expected_reg, Operand(expected_reg, ASR, kSmiTagSize));
985
+ ldr(code_reg,
986
+ FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
987
+
988
+ ParameterCount expected(expected_reg);
989
+ InvokeCode(code_reg, expected, actual, flag, call_wrapper);
990
+ }
991
+
992
+
993
+ void MacroAssembler::InvokeFunction(JSFunction* function,
994
+ const ParameterCount& actual,
995
+ InvokeFlag flag) {
996
+ ASSERT(function->is_compiled());
997
+
998
+ // Get the function and setup the context.
999
+ mov(r1, Operand(Handle<JSFunction>(function)));
1000
+ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1001
+
1002
+ // Invoke the cached code.
1003
+ Handle<Code> code(function->code());
1004
+ ParameterCount expected(function->shared()->formal_parameter_count());
1005
+ if (V8::UseCrankshaft()) {
1006
+ // TODO(kasperl): For now, we always call indirectly through the
1007
+ // code field in the function to allow recompilation to take effect
1008
+ // without changing any of the call sites.
1009
+ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
1010
+ InvokeCode(r3, expected, actual, flag);
1011
+ } else {
1012
+ InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
1013
+ }
1014
+ }
1015
+
1016
+
1017
+ void MacroAssembler::IsObjectJSObjectType(Register heap_object,
1018
+ Register map,
1019
+ Register scratch,
1020
+ Label* fail) {
1021
+ ldr(map, FieldMemOperand(heap_object, HeapObject::kMapOffset));
1022
+ IsInstanceJSObjectType(map, scratch, fail);
1023
+ }
1024
+
1025
+
1026
+ void MacroAssembler::IsInstanceJSObjectType(Register map,
1027
+ Register scratch,
1028
+ Label* fail) {
1029
+ ldrb(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset));
1030
+ cmp(scratch, Operand(FIRST_JS_OBJECT_TYPE));
1031
+ b(lt, fail);
1032
+ cmp(scratch, Operand(LAST_JS_OBJECT_TYPE));
1033
+ b(gt, fail);
1034
+ }
1035
+
1036
+
1037
+ void MacroAssembler::IsObjectJSStringType(Register object,
1038
+ Register scratch,
1039
+ Label* fail) {
1040
+ ASSERT(kNotStringTag != 0);
1041
+
1042
+ ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
1043
+ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
1044
+ tst(scratch, Operand(kIsNotStringMask));
1045
+ b(ne, fail);
1046
+ }
1047
+
1048
+
1049
+ #ifdef ENABLE_DEBUGGER_SUPPORT
1050
+ void MacroAssembler::DebugBreak() {
1051
+ ASSERT(allow_stub_calls());
1052
+ mov(r0, Operand(0, RelocInfo::NONE));
1053
+ mov(r1, Operand(ExternalReference(Runtime::kDebugBreak, isolate())));
1054
+ CEntryStub ces(1);
1055
+ Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
1056
+ }
1057
+ #endif
1058
+
1059
+
1060
+ void MacroAssembler::PushTryHandler(CodeLocation try_location,
1061
+ HandlerType type) {
1062
+ // Adjust this code if not the case.
1063
+ ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1064
+ // The pc (return address) is passed in register lr.
1065
+ if (try_location == IN_JAVASCRIPT) {
1066
+ if (type == TRY_CATCH_HANDLER) {
1067
+ mov(r3, Operand(StackHandler::TRY_CATCH));
1068
+ } else {
1069
+ mov(r3, Operand(StackHandler::TRY_FINALLY));
1070
+ }
1071
+ ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize
1072
+ && StackHandlerConstants::kFPOffset == 2 * kPointerSize
1073
+ && StackHandlerConstants::kPCOffset == 3 * kPointerSize);
1074
+ stm(db_w, sp, r3.bit() | fp.bit() | lr.bit());
1075
+ // Save the current handler as the next handler.
1076
+ mov(r3, Operand(ExternalReference(Isolate::k_handler_address, isolate())));
1077
+ ldr(r1, MemOperand(r3));
1078
+ ASSERT(StackHandlerConstants::kNextOffset == 0);
1079
+ push(r1);
1080
+ // Link this handler as the new current one.
1081
+ str(sp, MemOperand(r3));
1082
+ } else {
1083
+ // Must preserve r0-r4, r5-r7 are available.
1084
+ ASSERT(try_location == IN_JS_ENTRY);
1085
+ // The frame pointer does not point to a JS frame so we save NULL
1086
+ // for fp. We expect the code throwing an exception to check fp
1087
+ // before dereferencing it to restore the context.
1088
+ mov(ip, Operand(0, RelocInfo::NONE)); // To save a NULL frame pointer.
1089
+ mov(r6, Operand(StackHandler::ENTRY));
1090
+ ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize
1091
+ && StackHandlerConstants::kFPOffset == 2 * kPointerSize
1092
+ && StackHandlerConstants::kPCOffset == 3 * kPointerSize);
1093
+ stm(db_w, sp, r6.bit() | ip.bit() | lr.bit());
1094
+ // Save the current handler as the next handler.
1095
+ mov(r7, Operand(ExternalReference(Isolate::k_handler_address, isolate())));
1096
+ ldr(r6, MemOperand(r7));
1097
+ ASSERT(StackHandlerConstants::kNextOffset == 0);
1098
+ push(r6);
1099
+ // Link this handler as the new current one.
1100
+ str(sp, MemOperand(r7));
1101
+ }
1102
+ }
1103
+
1104
+
1105
+ void MacroAssembler::PopTryHandler() {
1106
+ ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1107
+ pop(r1);
1108
+ mov(ip, Operand(ExternalReference(Isolate::k_handler_address, isolate())));
1109
+ add(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize));
1110
+ str(r1, MemOperand(ip));
1111
+ }
1112
+
1113
+
1114
+ void MacroAssembler::Throw(Register value) {
1115
+ // r0 is expected to hold the exception.
1116
+ if (!value.is(r0)) {
1117
+ mov(r0, value);
1118
+ }
1119
+
1120
+ // Adjust this code if not the case.
1121
+ STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1122
+
1123
+ // Drop the sp to the top of the handler.
1124
+ mov(r3, Operand(ExternalReference(Isolate::k_handler_address, isolate())));
1125
+ ldr(sp, MemOperand(r3));
1126
+
1127
+ // Restore the next handler and frame pointer, discard handler state.
1128
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1129
+ pop(r2);
1130
+ str(r2, MemOperand(r3));
1131
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
1132
+ ldm(ia_w, sp, r3.bit() | fp.bit()); // r3: discarded state.
1133
+
1134
+ // Before returning we restore the context from the frame pointer if
1135
+ // not NULL. The frame pointer is NULL in the exception handler of a
1136
+ // JS entry frame.
1137
+ cmp(fp, Operand(0, RelocInfo::NONE));
1138
+ // Set cp to NULL if fp is NULL.
1139
+ mov(cp, Operand(0, RelocInfo::NONE), LeaveCC, eq);
1140
+ // Restore cp otherwise.
1141
+ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
1142
+ #ifdef DEBUG
1143
+ if (emit_debug_code()) {
1144
+ mov(lr, Operand(pc));
1145
+ }
1146
+ #endif
1147
+ STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
1148
+ pop(pc);
1149
+ }
1150
+
1151
+
1152
+ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
1153
+ Register value) {
1154
+ // Adjust this code if not the case.
1155
+ STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1156
+
1157
+ // r0 is expected to hold the exception.
1158
+ if (!value.is(r0)) {
1159
+ mov(r0, value);
1160
+ }
1161
+
1162
+ // Drop sp to the top stack handler.
1163
+ mov(r3, Operand(ExternalReference(Isolate::k_handler_address, isolate())));
1164
+ ldr(sp, MemOperand(r3));
1165
+
1166
+ // Unwind the handlers until the ENTRY handler is found.
1167
+ Label loop, done;
1168
+ bind(&loop);
1169
+ // Load the type of the current stack handler.
1170
+ const int kStateOffset = StackHandlerConstants::kStateOffset;
1171
+ ldr(r2, MemOperand(sp, kStateOffset));
1172
+ cmp(r2, Operand(StackHandler::ENTRY));
1173
+ b(eq, &done);
1174
+ // Fetch the next handler in the list.
1175
+ const int kNextOffset = StackHandlerConstants::kNextOffset;
1176
+ ldr(sp, MemOperand(sp, kNextOffset));
1177
+ jmp(&loop);
1178
+ bind(&done);
1179
+
1180
+ // Set the top handler address to next handler past the current ENTRY handler.
1181
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1182
+ pop(r2);
1183
+ str(r2, MemOperand(r3));
1184
+
1185
+ if (type == OUT_OF_MEMORY) {
1186
+ // Set external caught exception to false.
1187
+ ExternalReference external_caught(
1188
+ Isolate::k_external_caught_exception_address, isolate());
1189
+ mov(r0, Operand(false, RelocInfo::NONE));
1190
+ mov(r2, Operand(external_caught));
1191
+ str(r0, MemOperand(r2));
1192
+
1193
+ // Set pending exception and r0 to out of memory exception.
1194
+ Failure* out_of_memory = Failure::OutOfMemoryException();
1195
+ mov(r0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
1196
+ mov(r2, Operand(ExternalReference(Isolate::k_pending_exception_address,
1197
+ isolate())));
1198
+ str(r0, MemOperand(r2));
1199
+ }
1200
+
1201
+ // Stack layout at this point. See also StackHandlerConstants.
1202
+ // sp -> state (ENTRY)
1203
+ // fp
1204
+ // lr
1205
+
1206
+ // Discard handler state (r2 is not used) and restore frame pointer.
1207
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
1208
+ ldm(ia_w, sp, r2.bit() | fp.bit()); // r2: discarded state.
1209
+ // Before returning we restore the context from the frame pointer if
1210
+ // not NULL. The frame pointer is NULL in the exception handler of a
1211
+ // JS entry frame.
1212
+ cmp(fp, Operand(0, RelocInfo::NONE));
1213
+ // Set cp to NULL if fp is NULL.
1214
+ mov(cp, Operand(0, RelocInfo::NONE), LeaveCC, eq);
1215
+ // Restore cp otherwise.
1216
+ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
1217
+ #ifdef DEBUG
1218
+ if (emit_debug_code()) {
1219
+ mov(lr, Operand(pc));
1220
+ }
1221
+ #endif
1222
+ STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
1223
+ pop(pc);
1224
+ }
1225
+
1226
+
1227
+ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1228
+ Register scratch,
1229
+ Label* miss) {
1230
+ Label same_contexts;
1231
+
1232
+ ASSERT(!holder_reg.is(scratch));
1233
+ ASSERT(!holder_reg.is(ip));
1234
+ ASSERT(!scratch.is(ip));
1235
+
1236
+ // Load current lexical context from the stack frame.
1237
+ ldr(scratch, MemOperand(fp, StandardFrameConstants::kContextOffset));
1238
+ // In debug mode, make sure the lexical context is set.
1239
+ #ifdef DEBUG
1240
+ cmp(scratch, Operand(0, RelocInfo::NONE));
1241
+ Check(ne, "we should not have an empty lexical context");
1242
+ #endif
1243
+
1244
+ // Load the global context of the current context.
1245
+ int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
1246
+ ldr(scratch, FieldMemOperand(scratch, offset));
1247
+ ldr(scratch, FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset));
1248
+
1249
+ // Check the context is a global context.
1250
+ if (emit_debug_code()) {
1251
+ // TODO(119): avoid push(holder_reg)/pop(holder_reg)
1252
+ // Cannot use ip as a temporary in this verification code. Due to the fact
1253
+ // that ip is clobbered as part of cmp with an object Operand.
1254
+ push(holder_reg); // Temporarily save holder on the stack.
1255
+ // Read the first word and compare to the global_context_map.
1256
+ ldr(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
1257
+ LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
1258
+ cmp(holder_reg, ip);
1259
+ Check(eq, "JSGlobalObject::global_context should be a global context.");
1260
+ pop(holder_reg); // Restore holder.
1261
+ }
1262
+
1263
+ // Check if both contexts are the same.
1264
+ ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kContextOffset));
1265
+ cmp(scratch, Operand(ip));
1266
+ b(eq, &same_contexts);
1267
+
1268
+ // Check the context is a global context.
1269
+ if (emit_debug_code()) {
1270
+ // TODO(119): avoid push(holder_reg)/pop(holder_reg)
1271
+ // Cannot use ip as a temporary in this verification code. Due to the fact
1272
+ // that ip is clobbered as part of cmp with an object Operand.
1273
+ push(holder_reg); // Temporarily save holder on the stack.
1274
+ mov(holder_reg, ip); // Move ip to its holding place.
1275
+ LoadRoot(ip, Heap::kNullValueRootIndex);
1276
+ cmp(holder_reg, ip);
1277
+ Check(ne, "JSGlobalProxy::context() should not be null.");
1278
+
1279
+ ldr(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
1280
+ LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
1281
+ cmp(holder_reg, ip);
1282
+ Check(eq, "JSGlobalObject::global_context should be a global context.");
1283
+ // Restore ip is not needed. ip is reloaded below.
1284
+ pop(holder_reg); // Restore holder.
1285
+ // Restore ip to holder's context.
1286
+ ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kContextOffset));
1287
+ }
1288
+
1289
+ // Check that the security token in the calling global object is
1290
+ // compatible with the security token in the receiving global
1291
+ // object.
1292
+ int token_offset = Context::kHeaderSize +
1293
+ Context::SECURITY_TOKEN_INDEX * kPointerSize;
1294
+
1295
+ ldr(scratch, FieldMemOperand(scratch, token_offset));
1296
+ ldr(ip, FieldMemOperand(ip, token_offset));
1297
+ cmp(scratch, Operand(ip));
1298
+ b(ne, miss);
1299
+
1300
+ bind(&same_contexts);
1301
+ }
1302
+
1303
+
1304
+ void MacroAssembler::AllocateInNewSpace(int object_size,
1305
+ Register result,
1306
+ Register scratch1,
1307
+ Register scratch2,
1308
+ Label* gc_required,
1309
+ AllocationFlags flags) {
1310
+ if (!FLAG_inline_new) {
1311
+ if (emit_debug_code()) {
1312
+ // Trash the registers to simulate an allocation failure.
1313
+ mov(result, Operand(0x7091));
1314
+ mov(scratch1, Operand(0x7191));
1315
+ mov(scratch2, Operand(0x7291));
1316
+ }
1317
+ jmp(gc_required);
1318
+ return;
1319
+ }
1320
+
1321
+ ASSERT(!result.is(scratch1));
1322
+ ASSERT(!result.is(scratch2));
1323
+ ASSERT(!scratch1.is(scratch2));
1324
+ ASSERT(!scratch1.is(ip));
1325
+ ASSERT(!scratch2.is(ip));
1326
+
1327
+ // Make object size into bytes.
1328
+ if ((flags & SIZE_IN_WORDS) != 0) {
1329
+ object_size *= kPointerSize;
1330
+ }
1331
+ ASSERT_EQ(0, object_size & kObjectAlignmentMask);
1332
+
1333
+ // Check relative positions of allocation top and limit addresses.
1334
+ // The values must be adjacent in memory to allow the use of LDM.
1335
+ // Also, assert that the registers are numbered such that the values
1336
+ // are loaded in the correct order.
1337
+ ExternalReference new_space_allocation_top =
1338
+ ExternalReference::new_space_allocation_top_address(isolate());
1339
+ ExternalReference new_space_allocation_limit =
1340
+ ExternalReference::new_space_allocation_limit_address(isolate());
1341
+ intptr_t top =
1342
+ reinterpret_cast<intptr_t>(new_space_allocation_top.address());
1343
+ intptr_t limit =
1344
+ reinterpret_cast<intptr_t>(new_space_allocation_limit.address());
1345
+ ASSERT((limit - top) == kPointerSize);
1346
+ ASSERT(result.code() < ip.code());
1347
+
1348
+ // Set up allocation top address and object size registers.
1349
+ Register topaddr = scratch1;
1350
+ Register obj_size_reg = scratch2;
1351
+ mov(topaddr, Operand(new_space_allocation_top));
1352
+ mov(obj_size_reg, Operand(object_size));
1353
+
1354
+ // This code stores a temporary value in ip. This is OK, as the code below
1355
+ // does not need ip for implicit literal generation.
1356
+ if ((flags & RESULT_CONTAINS_TOP) == 0) {
1357
+ // Load allocation top into result and allocation limit into ip.
1358
+ ldm(ia, topaddr, result.bit() | ip.bit());
1359
+ } else {
1360
+ if (emit_debug_code()) {
1361
+ // Assert that result actually contains top on entry. ip is used
1362
+ // immediately below so this use of ip does not cause difference with
1363
+ // respect to register content between debug and release mode.
1364
+ ldr(ip, MemOperand(topaddr));
1365
+ cmp(result, ip);
1366
+ Check(eq, "Unexpected allocation top");
1367
+ }
1368
+ // Load allocation limit into ip. Result already contains allocation top.
1369
+ ldr(ip, MemOperand(topaddr, limit - top));
1370
+ }
1371
+
1372
+ // Calculate new top and bail out if new space is exhausted. Use result
1373
+ // to calculate the new top.
1374
+ add(scratch2, result, Operand(obj_size_reg), SetCC);
1375
+ b(cs, gc_required);
1376
+ cmp(scratch2, Operand(ip));
1377
+ b(hi, gc_required);
1378
+ str(scratch2, MemOperand(topaddr));
1379
+
1380
+ // Tag object if requested.
1381
+ if ((flags & TAG_OBJECT) != 0) {
1382
+ add(result, result, Operand(kHeapObjectTag));
1383
+ }
1384
+ }
1385
+
1386
+
1387
+ void MacroAssembler::AllocateInNewSpace(Register object_size,
1388
+ Register result,
1389
+ Register scratch1,
1390
+ Register scratch2,
1391
+ Label* gc_required,
1392
+ AllocationFlags flags) {
1393
+ if (!FLAG_inline_new) {
1394
+ if (emit_debug_code()) {
1395
+ // Trash the registers to simulate an allocation failure.
1396
+ mov(result, Operand(0x7091));
1397
+ mov(scratch1, Operand(0x7191));
1398
+ mov(scratch2, Operand(0x7291));
1399
+ }
1400
+ jmp(gc_required);
1401
+ return;
1402
+ }
1403
+
1404
+ // Assert that the register arguments are different and that none of
1405
+ // them are ip. ip is used explicitly in the code generated below.
1406
+ ASSERT(!result.is(scratch1));
1407
+ ASSERT(!result.is(scratch2));
1408
+ ASSERT(!scratch1.is(scratch2));
1409
+ ASSERT(!result.is(ip));
1410
+ ASSERT(!scratch1.is(ip));
1411
+ ASSERT(!scratch2.is(ip));
1412
+
1413
+ // Check relative positions of allocation top and limit addresses.
1414
+ // The values must be adjacent in memory to allow the use of LDM.
1415
+ // Also, assert that the registers are numbered such that the values
1416
+ // are loaded in the correct order.
1417
+ ExternalReference new_space_allocation_top =
1418
+ ExternalReference::new_space_allocation_top_address(isolate());
1419
+ ExternalReference new_space_allocation_limit =
1420
+ ExternalReference::new_space_allocation_limit_address(isolate());
1421
+ intptr_t top =
1422
+ reinterpret_cast<intptr_t>(new_space_allocation_top.address());
1423
+ intptr_t limit =
1424
+ reinterpret_cast<intptr_t>(new_space_allocation_limit.address());
1425
+ ASSERT((limit - top) == kPointerSize);
1426
+ ASSERT(result.code() < ip.code());
1427
+
1428
+ // Set up allocation top address.
1429
+ Register topaddr = scratch1;
1430
+ mov(topaddr, Operand(new_space_allocation_top));
1431
+
1432
+ // This code stores a temporary value in ip. This is OK, as the code below
1433
+ // does not need ip for implicit literal generation.
1434
+ if ((flags & RESULT_CONTAINS_TOP) == 0) {
1435
+ // Load allocation top into result and allocation limit into ip.
1436
+ ldm(ia, topaddr, result.bit() | ip.bit());
1437
+ } else {
1438
+ if (emit_debug_code()) {
1439
+ // Assert that result actually contains top on entry. ip is used
1440
+ // immediately below so this use of ip does not cause difference with
1441
+ // respect to register content between debug and release mode.
1442
+ ldr(ip, MemOperand(topaddr));
1443
+ cmp(result, ip);
1444
+ Check(eq, "Unexpected allocation top");
1445
+ }
1446
+ // Load allocation limit into ip. Result already contains allocation top.
1447
+ ldr(ip, MemOperand(topaddr, limit - top));
1448
+ }
1449
+
1450
+ // Calculate new top and bail out if new space is exhausted. Use result
1451
+ // to calculate the new top. Object size may be in words so a shift is
1452
+ // required to get the number of bytes.
1453
+ if ((flags & SIZE_IN_WORDS) != 0) {
1454
+ add(scratch2, result, Operand(object_size, LSL, kPointerSizeLog2), SetCC);
1455
+ } else {
1456
+ add(scratch2, result, Operand(object_size), SetCC);
1457
+ }
1458
+ b(cs, gc_required);
1459
+ cmp(scratch2, Operand(ip));
1460
+ b(hi, gc_required);
1461
+
1462
+ // Update allocation top. result temporarily holds the new top.
1463
+ if (emit_debug_code()) {
1464
+ tst(scratch2, Operand(kObjectAlignmentMask));
1465
+ Check(eq, "Unaligned allocation in new space");
1466
+ }
1467
+ str(scratch2, MemOperand(topaddr));
1468
+
1469
+ // Tag object if requested.
1470
+ if ((flags & TAG_OBJECT) != 0) {
1471
+ add(result, result, Operand(kHeapObjectTag));
1472
+ }
1473
+ }
1474
+
1475
+
1476
+ void MacroAssembler::UndoAllocationInNewSpace(Register object,
1477
+ Register scratch) {
1478
+ ExternalReference new_space_allocation_top =
1479
+ ExternalReference::new_space_allocation_top_address(isolate());
1480
+
1481
+ // Make sure the object has no tag before resetting top.
1482
+ and_(object, object, Operand(~kHeapObjectTagMask));
1483
+ #ifdef DEBUG
1484
+ // Check that the object un-allocated is below the current top.
1485
+ mov(scratch, Operand(new_space_allocation_top));
1486
+ ldr(scratch, MemOperand(scratch));
1487
+ cmp(object, scratch);
1488
+ Check(lt, "Undo allocation of non allocated memory");
1489
+ #endif
1490
+ // Write the address of the object to un-allocate as the current top.
1491
+ mov(scratch, Operand(new_space_allocation_top));
1492
+ str(object, MemOperand(scratch));
1493
+ }
1494
+
1495
+
1496
+ void MacroAssembler::AllocateTwoByteString(Register result,
1497
+ Register length,
1498
+ Register scratch1,
1499
+ Register scratch2,
1500
+ Register scratch3,
1501
+ Label* gc_required) {
1502
+ // Calculate the number of bytes needed for the characters in the string while
1503
+ // observing object alignment.
1504
+ ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1505
+ mov(scratch1, Operand(length, LSL, 1)); // Length in bytes, not chars.
1506
+ add(scratch1, scratch1,
1507
+ Operand(kObjectAlignmentMask + SeqTwoByteString::kHeaderSize));
1508
+ and_(scratch1, scratch1, Operand(~kObjectAlignmentMask));
1509
+
1510
+ // Allocate two-byte string in new space.
1511
+ AllocateInNewSpace(scratch1,
1512
+ result,
1513
+ scratch2,
1514
+ scratch3,
1515
+ gc_required,
1516
+ TAG_OBJECT);
1517
+
1518
+ // Set the map, length and hash field.
1519
+ InitializeNewString(result,
1520
+ length,
1521
+ Heap::kStringMapRootIndex,
1522
+ scratch1,
1523
+ scratch2);
1524
+ }
1525
+
1526
+
1527
+ void MacroAssembler::AllocateAsciiString(Register result,
1528
+ Register length,
1529
+ Register scratch1,
1530
+ Register scratch2,
1531
+ Register scratch3,
1532
+ Label* gc_required) {
1533
+ // Calculate the number of bytes needed for the characters in the string while
1534
+ // observing object alignment.
1535
+ ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
1536
+ ASSERT(kCharSize == 1);
1537
+ add(scratch1, length,
1538
+ Operand(kObjectAlignmentMask + SeqAsciiString::kHeaderSize));
1539
+ and_(scratch1, scratch1, Operand(~kObjectAlignmentMask));
1540
+
1541
+ // Allocate ASCII string in new space.
1542
+ AllocateInNewSpace(scratch1,
1543
+ result,
1544
+ scratch2,
1545
+ scratch3,
1546
+ gc_required,
1547
+ TAG_OBJECT);
1548
+
1549
+ // Set the map, length and hash field.
1550
+ InitializeNewString(result,
1551
+ length,
1552
+ Heap::kAsciiStringMapRootIndex,
1553
+ scratch1,
1554
+ scratch2);
1555
+ }
1556
+
1557
+
1558
+ void MacroAssembler::AllocateTwoByteConsString(Register result,
1559
+ Register length,
1560
+ Register scratch1,
1561
+ Register scratch2,
1562
+ Label* gc_required) {
1563
+ AllocateInNewSpace(ConsString::kSize,
1564
+ result,
1565
+ scratch1,
1566
+ scratch2,
1567
+ gc_required,
1568
+ TAG_OBJECT);
1569
+
1570
+ InitializeNewString(result,
1571
+ length,
1572
+ Heap::kConsStringMapRootIndex,
1573
+ scratch1,
1574
+ scratch2);
1575
+ }
1576
+
1577
+
1578
+ void MacroAssembler::AllocateAsciiConsString(Register result,
1579
+ Register length,
1580
+ Register scratch1,
1581
+ Register scratch2,
1582
+ Label* gc_required) {
1583
+ AllocateInNewSpace(ConsString::kSize,
1584
+ result,
1585
+ scratch1,
1586
+ scratch2,
1587
+ gc_required,
1588
+ TAG_OBJECT);
1589
+
1590
+ InitializeNewString(result,
1591
+ length,
1592
+ Heap::kConsAsciiStringMapRootIndex,
1593
+ scratch1,
1594
+ scratch2);
1595
+ }
1596
+
1597
+
1598
+ void MacroAssembler::CompareObjectType(Register object,
1599
+ Register map,
1600
+ Register type_reg,
1601
+ InstanceType type) {
1602
+ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
1603
+ CompareInstanceType(map, type_reg, type);
1604
+ }
1605
+
1606
+
1607
+ void MacroAssembler::CompareInstanceType(Register map,
1608
+ Register type_reg,
1609
+ InstanceType type) {
1610
+ ldrb(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
1611
+ cmp(type_reg, Operand(type));
1612
+ }
1613
+
1614
+
1615
+ void MacroAssembler::CompareRoot(Register obj,
1616
+ Heap::RootListIndex index) {
1617
+ ASSERT(!obj.is(ip));
1618
+ LoadRoot(ip, index);
1619
+ cmp(obj, ip);
1620
+ }
1621
+
1622
+
1623
+ void MacroAssembler::CheckMap(Register obj,
1624
+ Register scratch,
1625
+ Handle<Map> map,
1626
+ Label* fail,
1627
+ bool is_heap_object) {
1628
+ if (!is_heap_object) {
1629
+ JumpIfSmi(obj, fail);
1630
+ }
1631
+ ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
1632
+ mov(ip, Operand(map));
1633
+ cmp(scratch, ip);
1634
+ b(ne, fail);
1635
+ }
1636
+
1637
+
1638
+ void MacroAssembler::CheckMap(Register obj,
1639
+ Register scratch,
1640
+ Heap::RootListIndex index,
1641
+ Label* fail,
1642
+ bool is_heap_object) {
1643
+ if (!is_heap_object) {
1644
+ JumpIfSmi(obj, fail);
1645
+ }
1646
+ ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
1647
+ LoadRoot(ip, index);
1648
+ cmp(scratch, ip);
1649
+ b(ne, fail);
1650
+ }
1651
+
1652
+
1653
+ void MacroAssembler::TryGetFunctionPrototype(Register function,
1654
+ Register result,
1655
+ Register scratch,
1656
+ Label* miss) {
1657
+ // Check that the receiver isn't a smi.
1658
+ JumpIfSmi(function, miss);
1659
+
1660
+ // Check that the function really is a function. Load map into result reg.
1661
+ CompareObjectType(function, result, scratch, JS_FUNCTION_TYPE);
1662
+ b(ne, miss);
1663
+
1664
+ // Make sure that the function has an instance prototype.
1665
+ Label non_instance;
1666
+ ldrb(scratch, FieldMemOperand(result, Map::kBitFieldOffset));
1667
+ tst(scratch, Operand(1 << Map::kHasNonInstancePrototype));
1668
+ b(ne, &non_instance);
1669
+
1670
+ // Get the prototype or initial map from the function.
1671
+ ldr(result,
1672
+ FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1673
+
1674
+ // If the prototype or initial map is the hole, don't return it and
1675
+ // simply miss the cache instead. This will allow us to allocate a
1676
+ // prototype object on-demand in the runtime system.
1677
+ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1678
+ cmp(result, ip);
1679
+ b(eq, miss);
1680
+
1681
+ // If the function does not have an initial map, we're done.
1682
+ Label done;
1683
+ CompareObjectType(result, scratch, scratch, MAP_TYPE);
1684
+ b(ne, &done);
1685
+
1686
+ // Get the prototype from the initial map.
1687
+ ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
1688
+ jmp(&done);
1689
+
1690
+ // Non-instance prototype: Fetch prototype from constructor field
1691
+ // in initial map.
1692
+ bind(&non_instance);
1693
+ ldr(result, FieldMemOperand(result, Map::kConstructorOffset));
1694
+
1695
+ // All done.
1696
+ bind(&done);
1697
+ }
1698
+
1699
+
1700
+ void MacroAssembler::CallStub(CodeStub* stub, Condition cond) {
1701
+ ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs.
1702
+ Call(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
1703
+ }
1704
+
1705
+
1706
+ void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) {
1707
+ ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs.
1708
+ Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
1709
+ }
1710
+
1711
+
1712
+ MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub, Condition cond) {
1713
+ ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs.
1714
+ Object* result;
1715
+ { MaybeObject* maybe_result = stub->TryGetCode();
1716
+ if (!maybe_result->ToObject(&result)) return maybe_result;
1717
+ }
1718
+ Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
1719
+ return result;
1720
+ }
1721
+
1722
+
1723
+ static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
1724
+ return ref0.address() - ref1.address();
1725
+ }
1726
+
1727
+
1728
+ MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(
1729
+ ExternalReference function, int stack_space) {
1730
+ ExternalReference next_address =
1731
+ ExternalReference::handle_scope_next_address();
1732
+ const int kNextOffset = 0;
1733
+ const int kLimitOffset = AddressOffset(
1734
+ ExternalReference::handle_scope_limit_address(),
1735
+ next_address);
1736
+ const int kLevelOffset = AddressOffset(
1737
+ ExternalReference::handle_scope_level_address(),
1738
+ next_address);
1739
+
1740
+ // Allocate HandleScope in callee-save registers.
1741
+ mov(r7, Operand(next_address));
1742
+ ldr(r4, MemOperand(r7, kNextOffset));
1743
+ ldr(r5, MemOperand(r7, kLimitOffset));
1744
+ ldr(r6, MemOperand(r7, kLevelOffset));
1745
+ add(r6, r6, Operand(1));
1746
+ str(r6, MemOperand(r7, kLevelOffset));
1747
+
1748
+ // Native call returns to the DirectCEntry stub which redirects to the
1749
+ // return address pushed on stack (could have moved after GC).
1750
+ // DirectCEntry stub itself is generated early and never moves.
1751
+ DirectCEntryStub stub;
1752
+ stub.GenerateCall(this, function);
1753
+
1754
+ Label promote_scheduled_exception;
1755
+ Label delete_allocated_handles;
1756
+ Label leave_exit_frame;
1757
+
1758
+ // If result is non-zero, dereference to get the result value
1759
+ // otherwise set it to undefined.
1760
+ cmp(r0, Operand(0));
1761
+ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1762
+ ldr(r0, MemOperand(r0), ne);
1763
+
1764
+ // No more valid handles (the result handle was the last one). Restore
1765
+ // previous handle scope.
1766
+ str(r4, MemOperand(r7, kNextOffset));
1767
+ if (emit_debug_code()) {
1768
+ ldr(r1, MemOperand(r7, kLevelOffset));
1769
+ cmp(r1, r6);
1770
+ Check(eq, "Unexpected level after return from api call");
1771
+ }
1772
+ sub(r6, r6, Operand(1));
1773
+ str(r6, MemOperand(r7, kLevelOffset));
1774
+ ldr(ip, MemOperand(r7, kLimitOffset));
1775
+ cmp(r5, ip);
1776
+ b(ne, &delete_allocated_handles);
1777
+
1778
+ // Check if the function scheduled an exception.
1779
+ bind(&leave_exit_frame);
1780
+ LoadRoot(r4, Heap::kTheHoleValueRootIndex);
1781
+ mov(ip, Operand(ExternalReference::scheduled_exception_address(isolate())));
1782
+ ldr(r5, MemOperand(ip));
1783
+ cmp(r4, r5);
1784
+ b(ne, &promote_scheduled_exception);
1785
+
1786
+ // LeaveExitFrame expects unwind space to be in a register.
1787
+ mov(r4, Operand(stack_space));
1788
+ LeaveExitFrame(false, r4);
1789
+ mov(pc, lr);
1790
+
1791
+ bind(&promote_scheduled_exception);
1792
+ MaybeObject* result
1793
+ = TryTailCallExternalReference(
1794
+ ExternalReference(Runtime::kPromoteScheduledException, isolate()),
1795
+ 0,
1796
+ 1);
1797
+ if (result->IsFailure()) {
1798
+ return result;
1799
+ }
1800
+
1801
+ // HandleScope limit has changed. Delete allocated extensions.
1802
+ bind(&delete_allocated_handles);
1803
+ str(r5, MemOperand(r7, kLimitOffset));
1804
+ mov(r4, r0);
1805
+ PrepareCallCFunction(1, r5);
1806
+ mov(r0, Operand(ExternalReference::isolate_address()));
1807
+ CallCFunction(
1808
+ ExternalReference::delete_handle_scope_extensions(isolate()), 1);
1809
+ mov(r0, r4);
1810
+ jmp(&leave_exit_frame);
1811
+
1812
+ return result;
1813
+ }
1814
+
1815
+
1816
+ void MacroAssembler::IllegalOperation(int num_arguments) {
1817
+ if (num_arguments > 0) {
1818
+ add(sp, sp, Operand(num_arguments * kPointerSize));
1819
+ }
1820
+ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1821
+ }
1822
+
1823
+
1824
+ void MacroAssembler::IndexFromHash(Register hash, Register index) {
1825
+ // If the hash field contains an array index pick it out. The assert checks
1826
+ // that the constants for the maximum number of digits for an array index
1827
+ // cached in the hash field and the number of bits reserved for it does not
1828
+ // conflict.
1829
+ ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
1830
+ (1 << String::kArrayIndexValueBits));
1831
+ // We want the smi-tagged index in key. kArrayIndexValueMask has zeros in
1832
+ // the low kHashShift bits.
1833
+ STATIC_ASSERT(kSmiTag == 0);
1834
+ Ubfx(hash, hash, String::kHashShift, String::kArrayIndexValueBits);
1835
+ mov(index, Operand(hash, LSL, kSmiTagSize));
1836
+ }
1837
+
1838
+
1839
+ void MacroAssembler::IntegerToDoubleConversionWithVFP3(Register inReg,
1840
+ Register outHighReg,
1841
+ Register outLowReg) {
1842
+ // ARMv7 VFP3 instructions to implement integer to double conversion.
1843
+ mov(r7, Operand(inReg, ASR, kSmiTagSize));
1844
+ vmov(s15, r7);
1845
+ vcvt_f64_s32(d7, s15);
1846
+ vmov(outLowReg, outHighReg, d7);
1847
+ }
1848
+
1849
+
1850
+ void MacroAssembler::ObjectToDoubleVFPRegister(Register object,
1851
+ DwVfpRegister result,
1852
+ Register scratch1,
1853
+ Register scratch2,
1854
+ Register heap_number_map,
1855
+ SwVfpRegister scratch3,
1856
+ Label* not_number,
1857
+ ObjectToDoubleFlags flags) {
1858
+ Label done;
1859
+ if ((flags & OBJECT_NOT_SMI) == 0) {
1860
+ Label not_smi;
1861
+ JumpIfNotSmi(object, &not_smi);
1862
+ // Remove smi tag and convert to double.
1863
+ mov(scratch1, Operand(object, ASR, kSmiTagSize));
1864
+ vmov(scratch3, scratch1);
1865
+ vcvt_f64_s32(result, scratch3);
1866
+ b(&done);
1867
+ bind(&not_smi);
1868
+ }
1869
+ // Check for heap number and load double value from it.
1870
+ ldr(scratch1, FieldMemOperand(object, HeapObject::kMapOffset));
1871
+ sub(scratch2, object, Operand(kHeapObjectTag));
1872
+ cmp(scratch1, heap_number_map);
1873
+ b(ne, not_number);
1874
+ if ((flags & AVOID_NANS_AND_INFINITIES) != 0) {
1875
+ // If exponent is all ones the number is either a NaN or +/-Infinity.
1876
+ ldr(scratch1, FieldMemOperand(object, HeapNumber::kExponentOffset));
1877
+ Sbfx(scratch1,
1878
+ scratch1,
1879
+ HeapNumber::kExponentShift,
1880
+ HeapNumber::kExponentBits);
1881
+ // All-one value sign extend to -1.
1882
+ cmp(scratch1, Operand(-1));
1883
+ b(eq, not_number);
1884
+ }
1885
+ vldr(result, scratch2, HeapNumber::kValueOffset);
1886
+ bind(&done);
1887
+ }
1888
+
1889
+
1890
+ void MacroAssembler::SmiToDoubleVFPRegister(Register smi,
1891
+ DwVfpRegister value,
1892
+ Register scratch1,
1893
+ SwVfpRegister scratch2) {
1894
+ mov(scratch1, Operand(smi, ASR, kSmiTagSize));
1895
+ vmov(scratch2, scratch1);
1896
+ vcvt_f64_s32(value, scratch2);
1897
+ }
1898
+
1899
+
1900
+ // Tries to get a signed int32 out of a double precision floating point heap
1901
+ // number. Rounds towards 0. Branch to 'not_int32' if the double is out of the
1902
+ // 32bits signed integer range.
1903
+ void MacroAssembler::ConvertToInt32(Register source,
1904
+ Register dest,
1905
+ Register scratch,
1906
+ Register scratch2,
1907
+ DwVfpRegister double_scratch,
1908
+ Label *not_int32) {
1909
+ if (CpuFeatures::IsSupported(VFP3)) {
1910
+ CpuFeatures::Scope scope(VFP3);
1911
+ sub(scratch, source, Operand(kHeapObjectTag));
1912
+ vldr(double_scratch, scratch, HeapNumber::kValueOffset);
1913
+ vcvt_s32_f64(double_scratch.low(), double_scratch);
1914
+ vmov(dest, double_scratch.low());
1915
+ // Signed vcvt instruction will saturate to the minimum (0x80000000) or
1916
+ // maximun (0x7fffffff) signed 32bits integer when the double is out of
1917
+ // range. When substracting one, the minimum signed integer becomes the
1918
+ // maximun signed integer.
1919
+ sub(scratch, dest, Operand(1));
1920
+ cmp(scratch, Operand(LONG_MAX - 1));
1921
+ // If equal then dest was LONG_MAX, if greater dest was LONG_MIN.
1922
+ b(ge, not_int32);
1923
+ } else {
1924
+ // This code is faster for doubles that are in the ranges -0x7fffffff to
1925
+ // -0x40000000 or 0x40000000 to 0x7fffffff. This corresponds almost to
1926
+ // the range of signed int32 values that are not Smis. Jumps to the label
1927
+ // 'not_int32' if the double isn't in the range -0x80000000.0 to
1928
+ // 0x80000000.0 (excluding the endpoints).
1929
+ Label right_exponent, done;
1930
+ // Get exponent word.
1931
+ ldr(scratch, FieldMemOperand(source, HeapNumber::kExponentOffset));
1932
+ // Get exponent alone in scratch2.
1933
+ Ubfx(scratch2,
1934
+ scratch,
1935
+ HeapNumber::kExponentShift,
1936
+ HeapNumber::kExponentBits);
1937
+ // Load dest with zero. We use this either for the final shift or
1938
+ // for the answer.
1939
+ mov(dest, Operand(0, RelocInfo::NONE));
1940
+ // Check whether the exponent matches a 32 bit signed int that is not a Smi.
1941
+ // A non-Smi integer is 1.xxx * 2^30 so the exponent is 30 (biased). This is
1942
+ // the exponent that we are fastest at and also the highest exponent we can
1943
+ // handle here.
1944
+ const uint32_t non_smi_exponent = HeapNumber::kExponentBias + 30;
1945
+ // The non_smi_exponent, 0x41d, is too big for ARM's immediate field so we
1946
+ // split it up to avoid a constant pool entry. You can't do that in general
1947
+ // for cmp because of the overflow flag, but we know the exponent is in the
1948
+ // range 0-2047 so there is no overflow.
1949
+ int fudge_factor = 0x400;
1950
+ sub(scratch2, scratch2, Operand(fudge_factor));
1951
+ cmp(scratch2, Operand(non_smi_exponent - fudge_factor));
1952
+ // If we have a match of the int32-but-not-Smi exponent then skip some
1953
+ // logic.
1954
+ b(eq, &right_exponent);
1955
+ // If the exponent is higher than that then go to slow case. This catches
1956
+ // numbers that don't fit in a signed int32, infinities and NaNs.
1957
+ b(gt, not_int32);
1958
+
1959
+ // We know the exponent is smaller than 30 (biased). If it is less than
1960
+ // 0 (biased) then the number is smaller in magnitude than 1.0 * 2^0, ie
1961
+ // it rounds to zero.
1962
+ const uint32_t zero_exponent = HeapNumber::kExponentBias + 0;
1963
+ sub(scratch2, scratch2, Operand(zero_exponent - fudge_factor), SetCC);
1964
+ // Dest already has a Smi zero.
1965
+ b(lt, &done);
1966
+
1967
+ // We have an exponent between 0 and 30 in scratch2. Subtract from 30 to
1968
+ // get how much to shift down.
1969
+ rsb(dest, scratch2, Operand(30));
1970
+
1971
+ bind(&right_exponent);
1972
+ // Get the top bits of the mantissa.
1973
+ and_(scratch2, scratch, Operand(HeapNumber::kMantissaMask));
1974
+ // Put back the implicit 1.
1975
+ orr(scratch2, scratch2, Operand(1 << HeapNumber::kExponentShift));
1976
+ // Shift up the mantissa bits to take up the space the exponent used to
1977
+ // take. We just orred in the implicit bit so that took care of one and
1978
+ // we want to leave the sign bit 0 so we subtract 2 bits from the shift
1979
+ // distance.
1980
+ const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2;
1981
+ mov(scratch2, Operand(scratch2, LSL, shift_distance));
1982
+ // Put sign in zero flag.
1983
+ tst(scratch, Operand(HeapNumber::kSignMask));
1984
+ // Get the second half of the double. For some exponents we don't
1985
+ // actually need this because the bits get shifted out again, but
1986
+ // it's probably slower to test than just to do it.
1987
+ ldr(scratch, FieldMemOperand(source, HeapNumber::kMantissaOffset));
1988
+ // Shift down 22 bits to get the last 10 bits.
1989
+ orr(scratch, scratch2, Operand(scratch, LSR, 32 - shift_distance));
1990
+ // Move down according to the exponent.
1991
+ mov(dest, Operand(scratch, LSR, dest));
1992
+ // Fix sign if sign bit was set.
1993
+ rsb(dest, dest, Operand(0, RelocInfo::NONE), LeaveCC, ne);
1994
+ bind(&done);
1995
+ }
1996
+ }
1997
+
1998
+
1999
+ void MacroAssembler::EmitVFPTruncate(VFPRoundingMode rounding_mode,
2000
+ SwVfpRegister result,
2001
+ DwVfpRegister double_input,
2002
+ Register scratch1,
2003
+ Register scratch2,
2004
+ CheckForInexactConversion check_inexact) {
2005
+ ASSERT(CpuFeatures::IsSupported(VFP3));
2006
+ CpuFeatures::Scope scope(VFP3);
2007
+ Register prev_fpscr = scratch1;
2008
+ Register scratch = scratch2;
2009
+
2010
+ int32_t check_inexact_conversion =
2011
+ (check_inexact == kCheckForInexactConversion) ? kVFPInexactExceptionBit : 0;
2012
+
2013
+ // Set custom FPCSR:
2014
+ // - Set rounding mode.
2015
+ // - Clear vfp cumulative exception flags.
2016
+ // - Make sure Flush-to-zero mode control bit is unset.
2017
+ vmrs(prev_fpscr);
2018
+ bic(scratch,
2019
+ prev_fpscr,
2020
+ Operand(kVFPExceptionMask |
2021
+ check_inexact_conversion |
2022
+ kVFPRoundingModeMask |
2023
+ kVFPFlushToZeroMask));
2024
+ // 'Round To Nearest' is encoded by 0b00 so no bits need to be set.
2025
+ if (rounding_mode != kRoundToNearest) {
2026
+ orr(scratch, scratch, Operand(rounding_mode));
2027
+ }
2028
+ vmsr(scratch);
2029
+
2030
+ // Convert the argument to an integer.
2031
+ vcvt_s32_f64(result,
2032
+ double_input,
2033
+ (rounding_mode == kRoundToZero) ? kDefaultRoundToZero
2034
+ : kFPSCRRounding);
2035
+
2036
+ // Retrieve FPSCR.
2037
+ vmrs(scratch);
2038
+ // Restore FPSCR.
2039
+ vmsr(prev_fpscr);
2040
+ // Check for vfp exceptions.
2041
+ tst(scratch, Operand(kVFPExceptionMask | check_inexact_conversion));
2042
+ }
2043
+
2044
+
2045
+ void MacroAssembler::EmitOutOfInt32RangeTruncate(Register result,
2046
+ Register input_high,
2047
+ Register input_low,
2048
+ Register scratch) {
2049
+ Label done, normal_exponent, restore_sign;
2050
+
2051
+ // Extract the biased exponent in result.
2052
+ Ubfx(result,
2053
+ input_high,
2054
+ HeapNumber::kExponentShift,
2055
+ HeapNumber::kExponentBits);
2056
+
2057
+ // Check for Infinity and NaNs, which should return 0.
2058
+ cmp(result, Operand(HeapNumber::kExponentMask));
2059
+ mov(result, Operand(0), LeaveCC, eq);
2060
+ b(eq, &done);
2061
+
2062
+ // Express exponent as delta to (number of mantissa bits + 31).
2063
+ sub(result,
2064
+ result,
2065
+ Operand(HeapNumber::kExponentBias + HeapNumber::kMantissaBits + 31),
2066
+ SetCC);
2067
+
2068
+ // If the delta is strictly positive, all bits would be shifted away,
2069
+ // which means that we can return 0.
2070
+ b(le, &normal_exponent);
2071
+ mov(result, Operand(0));
2072
+ b(&done);
2073
+
2074
+ bind(&normal_exponent);
2075
+ const int kShiftBase = HeapNumber::kNonMantissaBitsInTopWord - 1;
2076
+ // Calculate shift.
2077
+ add(scratch, result, Operand(kShiftBase + HeapNumber::kMantissaBits), SetCC);
2078
+
2079
+ // Save the sign.
2080
+ Register sign = result;
2081
+ result = no_reg;
2082
+ and_(sign, input_high, Operand(HeapNumber::kSignMask));
2083
+
2084
+ // Set the implicit 1 before the mantissa part in input_high.
2085
+ orr(input_high,
2086
+ input_high,
2087
+ Operand(1 << HeapNumber::kMantissaBitsInTopWord));
2088
+ // Shift the mantissa bits to the correct position.
2089
+ // We don't need to clear non-mantissa bits as they will be shifted away.
2090
+ // If they weren't, it would mean that the answer is in the 32bit range.
2091
+ mov(input_high, Operand(input_high, LSL, scratch));
2092
+
2093
+ // Replace the shifted bits with bits from the lower mantissa word.
2094
+ Label pos_shift, shift_done;
2095
+ rsb(scratch, scratch, Operand(32), SetCC);
2096
+ b(&pos_shift, ge);
2097
+
2098
+ // Negate scratch.
2099
+ rsb(scratch, scratch, Operand(0));
2100
+ mov(input_low, Operand(input_low, LSL, scratch));
2101
+ b(&shift_done);
2102
+
2103
+ bind(&pos_shift);
2104
+ mov(input_low, Operand(input_low, LSR, scratch));
2105
+
2106
+ bind(&shift_done);
2107
+ orr(input_high, input_high, Operand(input_low));
2108
+ // Restore sign if necessary.
2109
+ cmp(sign, Operand(0));
2110
+ result = sign;
2111
+ sign = no_reg;
2112
+ rsb(result, input_high, Operand(0), LeaveCC, ne);
2113
+ mov(result, input_high, LeaveCC, eq);
2114
+ bind(&done);
2115
+ }
2116
+
2117
+
2118
+ void MacroAssembler::EmitECMATruncate(Register result,
2119
+ DwVfpRegister double_input,
2120
+ SwVfpRegister single_scratch,
2121
+ Register scratch,
2122
+ Register input_high,
2123
+ Register input_low) {
2124
+ CpuFeatures::Scope scope(VFP3);
2125
+ ASSERT(!input_high.is(result));
2126
+ ASSERT(!input_low.is(result));
2127
+ ASSERT(!input_low.is(input_high));
2128
+ ASSERT(!scratch.is(result) &&
2129
+ !scratch.is(input_high) &&
2130
+ !scratch.is(input_low));
2131
+ ASSERT(!single_scratch.is(double_input.low()) &&
2132
+ !single_scratch.is(double_input.high()));
2133
+
2134
+ Label done;
2135
+
2136
+ // Clear cumulative exception flags.
2137
+ ClearFPSCRBits(kVFPExceptionMask, scratch);
2138
+ // Try a conversion to a signed integer.
2139
+ vcvt_s32_f64(single_scratch, double_input);
2140
+ vmov(result, single_scratch);
2141
+ // Retrieve he FPSCR.
2142
+ vmrs(scratch);
2143
+ // Check for overflow and NaNs.
2144
+ tst(scratch, Operand(kVFPOverflowExceptionBit |
2145
+ kVFPUnderflowExceptionBit |
2146
+ kVFPInvalidOpExceptionBit));
2147
+ // If we had no exceptions we are done.
2148
+ b(eq, &done);
2149
+
2150
+ // Load the double value and perform a manual truncation.
2151
+ vmov(input_low, input_high, double_input);
2152
+ EmitOutOfInt32RangeTruncate(result,
2153
+ input_high,
2154
+ input_low,
2155
+ scratch);
2156
+ bind(&done);
2157
+ }
2158
+
2159
+
2160
+ void MacroAssembler::GetLeastBitsFromSmi(Register dst,
2161
+ Register src,
2162
+ int num_least_bits) {
2163
+ if (CpuFeatures::IsSupported(ARMv7)) {
2164
+ ubfx(dst, src, kSmiTagSize, num_least_bits);
2165
+ } else {
2166
+ mov(dst, Operand(src, ASR, kSmiTagSize));
2167
+ and_(dst, dst, Operand((1 << num_least_bits) - 1));
2168
+ }
2169
+ }
2170
+
2171
+
2172
+ void MacroAssembler::GetLeastBitsFromInt32(Register dst,
2173
+ Register src,
2174
+ int num_least_bits) {
2175
+ and_(dst, src, Operand((1 << num_least_bits) - 1));
2176
+ }
2177
+
2178
+
2179
+ void MacroAssembler::CallRuntime(const Runtime::Function* f,
2180
+ int num_arguments) {
2181
+ // All parameters are on the stack. r0 has the return value after call.
2182
+
2183
+ // If the expected number of arguments of the runtime function is
2184
+ // constant, we check that the actual number of arguments match the
2185
+ // expectation.
2186
+ if (f->nargs >= 0 && f->nargs != num_arguments) {
2187
+ IllegalOperation(num_arguments);
2188
+ return;
2189
+ }
2190
+
2191
+ // TODO(1236192): Most runtime routines don't need the number of
2192
+ // arguments passed in because it is constant. At some point we
2193
+ // should remove this need and make the runtime routine entry code
2194
+ // smarter.
2195
+ mov(r0, Operand(num_arguments));
2196
+ mov(r1, Operand(ExternalReference(f, isolate())));
2197
+ CEntryStub stub(1);
2198
+ CallStub(&stub);
2199
+ }
2200
+
2201
+
2202
+ void MacroAssembler::CallRuntime(Runtime::FunctionId fid, int num_arguments) {
2203
+ CallRuntime(Runtime::FunctionForId(fid), num_arguments);
2204
+ }
2205
+
2206
+
2207
+ void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
2208
+ const Runtime::Function* function = Runtime::FunctionForId(id);
2209
+ mov(r0, Operand(function->nargs));
2210
+ mov(r1, Operand(ExternalReference(function, isolate())));
2211
+ CEntryStub stub(1);
2212
+ stub.SaveDoubles();
2213
+ CallStub(&stub);
2214
+ }
2215
+
2216
+
2217
+ void MacroAssembler::CallExternalReference(const ExternalReference& ext,
2218
+ int num_arguments) {
2219
+ mov(r0, Operand(num_arguments));
2220
+ mov(r1, Operand(ext));
2221
+
2222
+ CEntryStub stub(1);
2223
+ CallStub(&stub);
2224
+ }
2225
+
2226
+
2227
+ void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
2228
+ int num_arguments,
2229
+ int result_size) {
2230
+ // TODO(1236192): Most runtime routines don't need the number of
2231
+ // arguments passed in because it is constant. At some point we
2232
+ // should remove this need and make the runtime routine entry code
2233
+ // smarter.
2234
+ mov(r0, Operand(num_arguments));
2235
+ JumpToExternalReference(ext);
2236
+ }
2237
+
2238
+
2239
+ MaybeObject* MacroAssembler::TryTailCallExternalReference(
2240
+ const ExternalReference& ext, int num_arguments, int result_size) {
2241
+ // TODO(1236192): Most runtime routines don't need the number of
2242
+ // arguments passed in because it is constant. At some point we
2243
+ // should remove this need and make the runtime routine entry code
2244
+ // smarter.
2245
+ mov(r0, Operand(num_arguments));
2246
+ return TryJumpToExternalReference(ext);
2247
+ }
2248
+
2249
+
2250
+ void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
2251
+ int num_arguments,
2252
+ int result_size) {
2253
+ TailCallExternalReference(ExternalReference(fid, isolate()),
2254
+ num_arguments,
2255
+ result_size);
2256
+ }
2257
+
2258
+
2259
+ void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) {
2260
+ #if defined(__thumb__)
2261
+ // Thumb mode builtin.
2262
+ ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1);
2263
+ #endif
2264
+ mov(r1, Operand(builtin));
2265
+ CEntryStub stub(1);
2266
+ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
2267
+ }
2268
+
2269
+
2270
+ MaybeObject* MacroAssembler::TryJumpToExternalReference(
2271
+ const ExternalReference& builtin) {
2272
+ #if defined(__thumb__)
2273
+ // Thumb mode builtin.
2274
+ ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1);
2275
+ #endif
2276
+ mov(r1, Operand(builtin));
2277
+ CEntryStub stub(1);
2278
+ return TryTailCallStub(&stub);
2279
+ }
2280
+
2281
+
2282
+ void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
2283
+ InvokeJSFlags flags,
2284
+ CallWrapper* call_wrapper) {
2285
+ GetBuiltinEntry(r2, id);
2286
+ if (flags == CALL_JS) {
2287
+ if (call_wrapper != NULL) call_wrapper->BeforeCall(CallSize(r2));
2288
+ Call(r2);
2289
+ if (call_wrapper != NULL) call_wrapper->AfterCall();
2290
+ } else {
2291
+ ASSERT(flags == JUMP_JS);
2292
+ Jump(r2);
2293
+ }
2294
+ }
2295
+
2296
+
2297
+ void MacroAssembler::GetBuiltinFunction(Register target,
2298
+ Builtins::JavaScript id) {
2299
+ // Load the builtins object into target register.
2300
+ ldr(target, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
2301
+ ldr(target, FieldMemOperand(target, GlobalObject::kBuiltinsOffset));
2302
+ // Load the JavaScript builtin function from the builtins object.
2303
+ ldr(target, FieldMemOperand(target,
2304
+ JSBuiltinsObject::OffsetOfFunctionWithId(id)));
2305
+ }
2306
+
2307
+
2308
+ void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
2309
+ ASSERT(!target.is(r1));
2310
+ GetBuiltinFunction(r1, id);
2311
+ // Load the code entry point from the builtins object.
2312
+ ldr(target, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2313
+ }
2314
+
2315
+
2316
+ void MacroAssembler::SetCounter(StatsCounter* counter, int value,
2317
+ Register scratch1, Register scratch2) {
2318
+ if (FLAG_native_code_counters && counter->Enabled()) {
2319
+ mov(scratch1, Operand(value));
2320
+ mov(scratch2, Operand(ExternalReference(counter)));
2321
+ str(scratch1, MemOperand(scratch2));
2322
+ }
2323
+ }
2324
+
2325
+
2326
+ void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
2327
+ Register scratch1, Register scratch2) {
2328
+ ASSERT(value > 0);
2329
+ if (FLAG_native_code_counters && counter->Enabled()) {
2330
+ mov(scratch2, Operand(ExternalReference(counter)));
2331
+ ldr(scratch1, MemOperand(scratch2));
2332
+ add(scratch1, scratch1, Operand(value));
2333
+ str(scratch1, MemOperand(scratch2));
2334
+ }
2335
+ }
2336
+
2337
+
2338
+ void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
2339
+ Register scratch1, Register scratch2) {
2340
+ ASSERT(value > 0);
2341
+ if (FLAG_native_code_counters && counter->Enabled()) {
2342
+ mov(scratch2, Operand(ExternalReference(counter)));
2343
+ ldr(scratch1, MemOperand(scratch2));
2344
+ sub(scratch1, scratch1, Operand(value));
2345
+ str(scratch1, MemOperand(scratch2));
2346
+ }
2347
+ }
2348
+
2349
+
2350
+ void MacroAssembler::Assert(Condition cond, const char* msg) {
2351
+ if (emit_debug_code())
2352
+ Check(cond, msg);
2353
+ }
2354
+
2355
+
2356
+ void MacroAssembler::AssertRegisterIsRoot(Register reg,
2357
+ Heap::RootListIndex index) {
2358
+ if (emit_debug_code()) {
2359
+ LoadRoot(ip, index);
2360
+ cmp(reg, ip);
2361
+ Check(eq, "Register did not match expected root");
2362
+ }
2363
+ }
2364
+
2365
+
2366
+ void MacroAssembler::AssertFastElements(Register elements) {
2367
+ if (emit_debug_code()) {
2368
+ ASSERT(!elements.is(ip));
2369
+ Label ok;
2370
+ push(elements);
2371
+ ldr(elements, FieldMemOperand(elements, HeapObject::kMapOffset));
2372
+ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
2373
+ cmp(elements, ip);
2374
+ b(eq, &ok);
2375
+ LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
2376
+ cmp(elements, ip);
2377
+ b(eq, &ok);
2378
+ Abort("JSObject with fast elements map has slow elements");
2379
+ bind(&ok);
2380
+ pop(elements);
2381
+ }
2382
+ }
2383
+
2384
+
2385
+ void MacroAssembler::Check(Condition cond, const char* msg) {
2386
+ Label L;
2387
+ b(cond, &L);
2388
+ Abort(msg);
2389
+ // will not return here
2390
+ bind(&L);
2391
+ }
2392
+
2393
+
2394
+ void MacroAssembler::Abort(const char* msg) {
2395
+ Label abort_start;
2396
+ bind(&abort_start);
2397
+ // We want to pass the msg string like a smi to avoid GC
2398
+ // problems, however msg is not guaranteed to be aligned
2399
+ // properly. Instead, we pass an aligned pointer that is
2400
+ // a proper v8 smi, but also pass the alignment difference
2401
+ // from the real pointer as a smi.
2402
+ intptr_t p1 = reinterpret_cast<intptr_t>(msg);
2403
+ intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
2404
+ ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
2405
+ #ifdef DEBUG
2406
+ if (msg != NULL) {
2407
+ RecordComment("Abort message: ");
2408
+ RecordComment(msg);
2409
+ }
2410
+ #endif
2411
+ // Disable stub call restrictions to always allow calls to abort.
2412
+ AllowStubCallsScope allow_scope(this, true);
2413
+
2414
+ mov(r0, Operand(p0));
2415
+ push(r0);
2416
+ mov(r0, Operand(Smi::FromInt(p1 - p0)));
2417
+ push(r0);
2418
+ CallRuntime(Runtime::kAbort, 2);
2419
+ // will not return here
2420
+ if (is_const_pool_blocked()) {
2421
+ // If the calling code cares about the exact number of
2422
+ // instructions generated, we insert padding here to keep the size
2423
+ // of the Abort macro constant.
2424
+ static const int kExpectedAbortInstructions = 10;
2425
+ int abort_instructions = InstructionsGeneratedSince(&abort_start);
2426
+ ASSERT(abort_instructions <= kExpectedAbortInstructions);
2427
+ while (abort_instructions++ < kExpectedAbortInstructions) {
2428
+ nop();
2429
+ }
2430
+ }
2431
+ }
2432
+
2433
+
2434
+ void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2435
+ if (context_chain_length > 0) {
2436
+ // Move up the chain of contexts to the context containing the slot.
2437
+ ldr(dst, MemOperand(cp, Context::SlotOffset(Context::CLOSURE_INDEX)));
2438
+ // Load the function context (which is the incoming, outer context).
2439
+ ldr(dst, FieldMemOperand(dst, JSFunction::kContextOffset));
2440
+ for (int i = 1; i < context_chain_length; i++) {
2441
+ ldr(dst, MemOperand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
2442
+ ldr(dst, FieldMemOperand(dst, JSFunction::kContextOffset));
2443
+ }
2444
+ } else {
2445
+ // Slot is in the current function context. Move it into the
2446
+ // destination register in case we store into it (the write barrier
2447
+ // cannot be allowed to destroy the context in esi).
2448
+ mov(dst, cp);
2449
+ }
2450
+
2451
+ // We should not have found a 'with' context by walking the context chain
2452
+ // (i.e., the static scope chain and runtime context chain do not agree).
2453
+ // A variable occurring in such a scope should have slot type LOOKUP and
2454
+ // not CONTEXT.
2455
+ if (emit_debug_code()) {
2456
+ ldr(ip, MemOperand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2457
+ cmp(dst, ip);
2458
+ Check(eq, "Yo dawg, I heard you liked function contexts "
2459
+ "so I put function contexts in all your contexts");
2460
+ }
2461
+ }
2462
+
2463
+
2464
+ void MacroAssembler::LoadGlobalFunction(int index, Register function) {
2465
+ // Load the global or builtins object from the current context.
2466
+ ldr(function, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
2467
+ // Load the global context from the global or builtins object.
2468
+ ldr(function, FieldMemOperand(function,
2469
+ GlobalObject::kGlobalContextOffset));
2470
+ // Load the function from the global context.
2471
+ ldr(function, MemOperand(function, Context::SlotOffset(index)));
2472
+ }
2473
+
2474
+
2475
+ void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2476
+ Register map,
2477
+ Register scratch) {
2478
+ // Load the initial map. The global functions all have initial maps.
2479
+ ldr(map, FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2480
+ if (emit_debug_code()) {
2481
+ Label ok, fail;
2482
+ CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail, false);
2483
+ b(&ok);
2484
+ bind(&fail);
2485
+ Abort("Global functions must have initial map");
2486
+ bind(&ok);
2487
+ }
2488
+ }
2489
+
2490
+
2491
+ void MacroAssembler::JumpIfNotPowerOfTwoOrZero(
2492
+ Register reg,
2493
+ Register scratch,
2494
+ Label* not_power_of_two_or_zero) {
2495
+ sub(scratch, reg, Operand(1), SetCC);
2496
+ b(mi, not_power_of_two_or_zero);
2497
+ tst(scratch, reg);
2498
+ b(ne, not_power_of_two_or_zero);
2499
+ }
2500
+
2501
+
2502
+ void MacroAssembler::JumpIfNotPowerOfTwoOrZeroAndNeg(
2503
+ Register reg,
2504
+ Register scratch,
2505
+ Label* zero_and_neg,
2506
+ Label* not_power_of_two) {
2507
+ sub(scratch, reg, Operand(1), SetCC);
2508
+ b(mi, zero_and_neg);
2509
+ tst(scratch, reg);
2510
+ b(ne, not_power_of_two);
2511
+ }
2512
+
2513
+
2514
+ void MacroAssembler::JumpIfNotBothSmi(Register reg1,
2515
+ Register reg2,
2516
+ Label* on_not_both_smi) {
2517
+ STATIC_ASSERT(kSmiTag == 0);
2518
+ tst(reg1, Operand(kSmiTagMask));
2519
+ tst(reg2, Operand(kSmiTagMask), eq);
2520
+ b(ne, on_not_both_smi);
2521
+ }
2522
+
2523
+
2524
+ void MacroAssembler::JumpIfEitherSmi(Register reg1,
2525
+ Register reg2,
2526
+ Label* on_either_smi) {
2527
+ STATIC_ASSERT(kSmiTag == 0);
2528
+ tst(reg1, Operand(kSmiTagMask));
2529
+ tst(reg2, Operand(kSmiTagMask), ne);
2530
+ b(eq, on_either_smi);
2531
+ }
2532
+
2533
+
2534
+ void MacroAssembler::AbortIfSmi(Register object) {
2535
+ STATIC_ASSERT(kSmiTag == 0);
2536
+ tst(object, Operand(kSmiTagMask));
2537
+ Assert(ne, "Operand is a smi");
2538
+ }
2539
+
2540
+
2541
+ void MacroAssembler::AbortIfNotSmi(Register object) {
2542
+ STATIC_ASSERT(kSmiTag == 0);
2543
+ tst(object, Operand(kSmiTagMask));
2544
+ Assert(eq, "Operand is not smi");
2545
+ }
2546
+
2547
+
2548
+ void MacroAssembler::AbortIfNotString(Register object) {
2549
+ STATIC_ASSERT(kSmiTag == 0);
2550
+ tst(object, Operand(kSmiTagMask));
2551
+ Assert(ne, "Operand is not a string");
2552
+ push(object);
2553
+ ldr(object, FieldMemOperand(object, HeapObject::kMapOffset));
2554
+ CompareInstanceType(object, object, FIRST_NONSTRING_TYPE);
2555
+ pop(object);
2556
+ Assert(lo, "Operand is not a string");
2557
+ }
2558
+
2559
+
2560
+
2561
+ void MacroAssembler::AbortIfNotRootValue(Register src,
2562
+ Heap::RootListIndex root_value_index,
2563
+ const char* message) {
2564
+ CompareRoot(src, root_value_index);
2565
+ Assert(eq, message);
2566
+ }
2567
+
2568
+
2569
+ void MacroAssembler::JumpIfNotHeapNumber(Register object,
2570
+ Register heap_number_map,
2571
+ Register scratch,
2572
+ Label* on_not_heap_number) {
2573
+ ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
2574
+ AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
2575
+ cmp(scratch, heap_number_map);
2576
+ b(ne, on_not_heap_number);
2577
+ }
2578
+
2579
+
2580
+ void MacroAssembler::JumpIfNonSmisNotBothSequentialAsciiStrings(
2581
+ Register first,
2582
+ Register second,
2583
+ Register scratch1,
2584
+ Register scratch2,
2585
+ Label* failure) {
2586
+ // Test that both first and second are sequential ASCII strings.
2587
+ // Assume that they are non-smis.
2588
+ ldr(scratch1, FieldMemOperand(first, HeapObject::kMapOffset));
2589
+ ldr(scratch2, FieldMemOperand(second, HeapObject::kMapOffset));
2590
+ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
2591
+ ldrb(scratch2, FieldMemOperand(scratch2, Map::kInstanceTypeOffset));
2592
+
2593
+ JumpIfBothInstanceTypesAreNotSequentialAscii(scratch1,
2594
+ scratch2,
2595
+ scratch1,
2596
+ scratch2,
2597
+ failure);
2598
+ }
2599
+
2600
+ void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first,
2601
+ Register second,
2602
+ Register scratch1,
2603
+ Register scratch2,
2604
+ Label* failure) {
2605
+ // Check that neither is a smi.
2606
+ STATIC_ASSERT(kSmiTag == 0);
2607
+ and_(scratch1, first, Operand(second));
2608
+ tst(scratch1, Operand(kSmiTagMask));
2609
+ b(eq, failure);
2610
+ JumpIfNonSmisNotBothSequentialAsciiStrings(first,
2611
+ second,
2612
+ scratch1,
2613
+ scratch2,
2614
+ failure);
2615
+ }
2616
+
2617
+
2618
+ // Allocates a heap number or jumps to the need_gc label if the young space
2619
+ // is full and a scavenge is needed.
2620
+ void MacroAssembler::AllocateHeapNumber(Register result,
2621
+ Register scratch1,
2622
+ Register scratch2,
2623
+ Register heap_number_map,
2624
+ Label* gc_required) {
2625
+ // Allocate an object in the heap for the heap number and tag it as a heap
2626
+ // object.
2627
+ AllocateInNewSpace(HeapNumber::kSize,
2628
+ result,
2629
+ scratch1,
2630
+ scratch2,
2631
+ gc_required,
2632
+ TAG_OBJECT);
2633
+
2634
+ // Store heap number map in the allocated object.
2635
+ AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
2636
+ str(heap_number_map, FieldMemOperand(result, HeapObject::kMapOffset));
2637
+ }
2638
+
2639
+
2640
+ void MacroAssembler::AllocateHeapNumberWithValue(Register result,
2641
+ DwVfpRegister value,
2642
+ Register scratch1,
2643
+ Register scratch2,
2644
+ Register heap_number_map,
2645
+ Label* gc_required) {
2646
+ AllocateHeapNumber(result, scratch1, scratch2, heap_number_map, gc_required);
2647
+ sub(scratch1, result, Operand(kHeapObjectTag));
2648
+ vstr(value, scratch1, HeapNumber::kValueOffset);
2649
+ }
2650
+
2651
+
2652
+ // Copies a fixed number of fields of heap objects from src to dst.
2653
+ void MacroAssembler::CopyFields(Register dst,
2654
+ Register src,
2655
+ RegList temps,
2656
+ int field_count) {
2657
+ // At least one bit set in the first 15 registers.
2658
+ ASSERT((temps & ((1 << 15) - 1)) != 0);
2659
+ ASSERT((temps & dst.bit()) == 0);
2660
+ ASSERT((temps & src.bit()) == 0);
2661
+ // Primitive implementation using only one temporary register.
2662
+
2663
+ Register tmp = no_reg;
2664
+ // Find a temp register in temps list.
2665
+ for (int i = 0; i < 15; i++) {
2666
+ if ((temps & (1 << i)) != 0) {
2667
+ tmp.set_code(i);
2668
+ break;
2669
+ }
2670
+ }
2671
+ ASSERT(!tmp.is(no_reg));
2672
+
2673
+ for (int i = 0; i < field_count; i++) {
2674
+ ldr(tmp, FieldMemOperand(src, i * kPointerSize));
2675
+ str(tmp, FieldMemOperand(dst, i * kPointerSize));
2676
+ }
2677
+ }
2678
+
2679
+
2680
+ void MacroAssembler::CopyBytes(Register src,
2681
+ Register dst,
2682
+ Register length,
2683
+ Register scratch) {
2684
+ Label align_loop, align_loop_1, word_loop, byte_loop, byte_loop_1, done;
2685
+
2686
+ // Align src before copying in word size chunks.
2687
+ bind(&align_loop);
2688
+ cmp(length, Operand(0));
2689
+ b(eq, &done);
2690
+ bind(&align_loop_1);
2691
+ tst(src, Operand(kPointerSize - 1));
2692
+ b(eq, &word_loop);
2693
+ ldrb(scratch, MemOperand(src, 1, PostIndex));
2694
+ strb(scratch, MemOperand(dst, 1, PostIndex));
2695
+ sub(length, length, Operand(1), SetCC);
2696
+ b(ne, &byte_loop_1);
2697
+
2698
+ // Copy bytes in word size chunks.
2699
+ bind(&word_loop);
2700
+ if (emit_debug_code()) {
2701
+ tst(src, Operand(kPointerSize - 1));
2702
+ Assert(eq, "Expecting alignment for CopyBytes");
2703
+ }
2704
+ cmp(length, Operand(kPointerSize));
2705
+ b(lt, &byte_loop);
2706
+ ldr(scratch, MemOperand(src, kPointerSize, PostIndex));
2707
+ #if CAN_USE_UNALIGNED_ACCESSES
2708
+ str(scratch, MemOperand(dst, kPointerSize, PostIndex));
2709
+ #else
2710
+ strb(scratch, MemOperand(dst, 1, PostIndex));
2711
+ mov(scratch, Operand(scratch, LSR, 8));
2712
+ strb(scratch, MemOperand(dst, 1, PostIndex));
2713
+ mov(scratch, Operand(scratch, LSR, 8));
2714
+ strb(scratch, MemOperand(dst, 1, PostIndex));
2715
+ mov(scratch, Operand(scratch, LSR, 8));
2716
+ strb(scratch, MemOperand(dst, 1, PostIndex));
2717
+ #endif
2718
+ sub(length, length, Operand(kPointerSize));
2719
+ b(&word_loop);
2720
+
2721
+ // Copy the last bytes if any left.
2722
+ bind(&byte_loop);
2723
+ cmp(length, Operand(0));
2724
+ b(eq, &done);
2725
+ bind(&byte_loop_1);
2726
+ ldrb(scratch, MemOperand(src, 1, PostIndex));
2727
+ strb(scratch, MemOperand(dst, 1, PostIndex));
2728
+ sub(length, length, Operand(1), SetCC);
2729
+ b(ne, &byte_loop_1);
2730
+ bind(&done);
2731
+ }
2732
+
2733
+
2734
+ void MacroAssembler::CountLeadingZeros(Register zeros, // Answer.
2735
+ Register source, // Input.
2736
+ Register scratch) {
2737
+ ASSERT(!zeros.is(source) || !source.is(scratch));
2738
+ ASSERT(!zeros.is(scratch));
2739
+ ASSERT(!scratch.is(ip));
2740
+ ASSERT(!source.is(ip));
2741
+ ASSERT(!zeros.is(ip));
2742
+ #ifdef CAN_USE_ARMV5_INSTRUCTIONS
2743
+ clz(zeros, source); // This instruction is only supported after ARM5.
2744
+ #else
2745
+ mov(zeros, Operand(0, RelocInfo::NONE));
2746
+ Move(scratch, source);
2747
+ // Top 16.
2748
+ tst(scratch, Operand(0xffff0000));
2749
+ add(zeros, zeros, Operand(16), LeaveCC, eq);
2750
+ mov(scratch, Operand(scratch, LSL, 16), LeaveCC, eq);
2751
+ // Top 8.
2752
+ tst(scratch, Operand(0xff000000));
2753
+ add(zeros, zeros, Operand(8), LeaveCC, eq);
2754
+ mov(scratch, Operand(scratch, LSL, 8), LeaveCC, eq);
2755
+ // Top 4.
2756
+ tst(scratch, Operand(0xf0000000));
2757
+ add(zeros, zeros, Operand(4), LeaveCC, eq);
2758
+ mov(scratch, Operand(scratch, LSL, 4), LeaveCC, eq);
2759
+ // Top 2.
2760
+ tst(scratch, Operand(0xc0000000));
2761
+ add(zeros, zeros, Operand(2), LeaveCC, eq);
2762
+ mov(scratch, Operand(scratch, LSL, 2), LeaveCC, eq);
2763
+ // Top bit.
2764
+ tst(scratch, Operand(0x80000000u));
2765
+ add(zeros, zeros, Operand(1), LeaveCC, eq);
2766
+ #endif
2767
+ }
2768
+
2769
+
2770
+ void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
2771
+ Register first,
2772
+ Register second,
2773
+ Register scratch1,
2774
+ Register scratch2,
2775
+ Label* failure) {
2776
+ int kFlatAsciiStringMask =
2777
+ kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
2778
+ int kFlatAsciiStringTag = ASCII_STRING_TYPE;
2779
+ and_(scratch1, first, Operand(kFlatAsciiStringMask));
2780
+ and_(scratch2, second, Operand(kFlatAsciiStringMask));
2781
+ cmp(scratch1, Operand(kFlatAsciiStringTag));
2782
+ // Ignore second test if first test failed.
2783
+ cmp(scratch2, Operand(kFlatAsciiStringTag), eq);
2784
+ b(ne, failure);
2785
+ }
2786
+
2787
+
2788
+ void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(Register type,
2789
+ Register scratch,
2790
+ Label* failure) {
2791
+ int kFlatAsciiStringMask =
2792
+ kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
2793
+ int kFlatAsciiStringTag = ASCII_STRING_TYPE;
2794
+ and_(scratch, type, Operand(kFlatAsciiStringMask));
2795
+ cmp(scratch, Operand(kFlatAsciiStringTag));
2796
+ b(ne, failure);
2797
+ }
2798
+
2799
+ static const int kRegisterPassedArguments = 4;
2800
+
2801
+ void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
2802
+ int frame_alignment = ActivationFrameAlignment();
2803
+
2804
+ // Up to four simple arguments are passed in registers r0..r3.
2805
+ int stack_passed_arguments = (num_arguments <= kRegisterPassedArguments) ?
2806
+ 0 : num_arguments - kRegisterPassedArguments;
2807
+ if (frame_alignment > kPointerSize) {
2808
+ // Make stack end at alignment and make room for num_arguments - 4 words
2809
+ // and the original value of sp.
2810
+ mov(scratch, sp);
2811
+ sub(sp, sp, Operand((stack_passed_arguments + 1) * kPointerSize));
2812
+ ASSERT(IsPowerOf2(frame_alignment));
2813
+ and_(sp, sp, Operand(-frame_alignment));
2814
+ str(scratch, MemOperand(sp, stack_passed_arguments * kPointerSize));
2815
+ } else {
2816
+ sub(sp, sp, Operand(stack_passed_arguments * kPointerSize));
2817
+ }
2818
+ }
2819
+
2820
+
2821
+ void MacroAssembler::CallCFunction(ExternalReference function,
2822
+ int num_arguments) {
2823
+ CallCFunctionHelper(no_reg, function, ip, num_arguments);
2824
+ }
2825
+
2826
+ void MacroAssembler::CallCFunction(Register function,
2827
+ Register scratch,
2828
+ int num_arguments) {
2829
+ CallCFunctionHelper(function,
2830
+ ExternalReference::the_hole_value_location(isolate()),
2831
+ scratch,
2832
+ num_arguments);
2833
+ }
2834
+
2835
+
2836
+ void MacroAssembler::CallCFunctionHelper(Register function,
2837
+ ExternalReference function_reference,
2838
+ Register scratch,
2839
+ int num_arguments) {
2840
+ // Make sure that the stack is aligned before calling a C function unless
2841
+ // running in the simulator. The simulator has its own alignment check which
2842
+ // provides more information.
2843
+ #if defined(V8_HOST_ARCH_ARM)
2844
+ if (emit_debug_code()) {
2845
+ int frame_alignment = OS::ActivationFrameAlignment();
2846
+ int frame_alignment_mask = frame_alignment - 1;
2847
+ if (frame_alignment > kPointerSize) {
2848
+ ASSERT(IsPowerOf2(frame_alignment));
2849
+ Label alignment_as_expected;
2850
+ tst(sp, Operand(frame_alignment_mask));
2851
+ b(eq, &alignment_as_expected);
2852
+ // Don't use Check here, as it will call Runtime_Abort possibly
2853
+ // re-entering here.
2854
+ stop("Unexpected alignment");
2855
+ bind(&alignment_as_expected);
2856
+ }
2857
+ }
2858
+ #endif
2859
+
2860
+ // Just call directly. The function called cannot cause a GC, or
2861
+ // allow preemption, so the return address in the link register
2862
+ // stays correct.
2863
+ if (function.is(no_reg)) {
2864
+ mov(scratch, Operand(function_reference));
2865
+ function = scratch;
2866
+ }
2867
+ Call(function);
2868
+ int stack_passed_arguments = (num_arguments <= kRegisterPassedArguments) ?
2869
+ 0 : num_arguments - kRegisterPassedArguments;
2870
+ if (OS::ActivationFrameAlignment() > kPointerSize) {
2871
+ ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize));
2872
+ } else {
2873
+ add(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize)));
2874
+ }
2875
+ }
2876
+
2877
+
2878
+ void MacroAssembler::GetRelocatedValueLocation(Register ldr_location,
2879
+ Register result) {
2880
+ const uint32_t kLdrOffsetMask = (1 << 12) - 1;
2881
+ const int32_t kPCRegOffset = 2 * kPointerSize;
2882
+ ldr(result, MemOperand(ldr_location));
2883
+ if (emit_debug_code()) {
2884
+ // Check that the instruction is a ldr reg, [pc + offset] .
2885
+ and_(result, result, Operand(kLdrPCPattern));
2886
+ cmp(result, Operand(kLdrPCPattern));
2887
+ Check(eq, "The instruction to patch should be a load from pc.");
2888
+ // Result was clobbered. Restore it.
2889
+ ldr(result, MemOperand(ldr_location));
2890
+ }
2891
+ // Get the address of the constant.
2892
+ and_(result, result, Operand(kLdrOffsetMask));
2893
+ add(result, ldr_location, Operand(result));
2894
+ add(result, result, Operand(kPCRegOffset));
2895
+ }
2896
+
2897
+
2898
+ CodePatcher::CodePatcher(byte* address, int instructions)
2899
+ : address_(address),
2900
+ instructions_(instructions),
2901
+ size_(instructions * Assembler::kInstrSize),
2902
+ masm_(Isolate::Current(), address, size_ + Assembler::kGap) {
2903
+ // Create a new macro assembler pointing to the address of the code to patch.
2904
+ // The size is adjusted with kGap on order for the assembler to generate size
2905
+ // bytes of instructions without failing with buffer size constraints.
2906
+ ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2907
+ }
2908
+
2909
+
2910
+ CodePatcher::~CodePatcher() {
2911
+ // Indicate that code has changed.
2912
+ CPU::FlushICache(address_, size_);
2913
+
2914
+ // Check that the code was patched as expected.
2915
+ ASSERT(masm_.pc_ == address_ + size_);
2916
+ ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2917
+ }
2918
+
2919
+
2920
+ void CodePatcher::Emit(Instr instr) {
2921
+ masm()->emit(instr);
2922
+ }
2923
+
2924
+
2925
+ void CodePatcher::Emit(Address addr) {
2926
+ masm()->emit(reinterpret_cast<Instr>(addr));
2927
+ }
2928
+
2929
+
2930
+ void CodePatcher::EmitCondition(Condition cond) {
2931
+ Instr instr = Assembler::instr_at(masm_.pc_);
2932
+ instr = (instr & ~kCondMask) | cond;
2933
+ masm_.emit(instr);
2934
+ }
2935
+
2936
+
2937
+ } } // namespace v8::internal
2938
+
2939
+ #endif // V8_TARGET_ARCH_ARM