therubyracer 0.9.0beta2 → 0.9.0beta3

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of therubyracer might be problematic. Click here for more details.

Files changed (920) hide show
  1. data/.gitmodules +3 -0
  2. data/ext/v8/upstream/Makefile +1 -2
  3. data/ext/v8/upstream/v8/.gitignore +33 -0
  4. data/ext/v8/upstream/v8/AUTHORS +42 -0
  5. data/ext/v8/upstream/v8/ChangeLog +2663 -0
  6. data/ext/v8/upstream/{3.1.8 → v8}/LICENSE +0 -0
  7. data/ext/v8/upstream/{3.1.8 → v8}/LICENSE.strongtalk +0 -0
  8. data/ext/v8/upstream/{3.1.8 → v8}/LICENSE.v8 +0 -0
  9. data/ext/v8/upstream/{3.1.8 → v8}/LICENSE.valgrind +0 -0
  10. data/ext/v8/upstream/v8/SConstruct +1473 -0
  11. data/ext/v8/upstream/{3.1.8 → v8}/build/README.txt +0 -0
  12. data/ext/v8/upstream/{3.1.8 → v8}/build/all.gyp +0 -0
  13. data/ext/v8/upstream/{3.1.8 → v8}/build/armu.gypi +0 -0
  14. data/ext/v8/upstream/{3.1.8 → v8}/build/common.gypi +0 -0
  15. data/ext/v8/upstream/{3.1.8 → v8}/build/gyp_v8 +0 -0
  16. data/ext/v8/upstream/v8/include/v8-debug.h +394 -0
  17. data/ext/v8/upstream/v8/include/v8-preparser.h +116 -0
  18. data/ext/v8/upstream/v8/include/v8-profiler.h +505 -0
  19. data/ext/v8/upstream/v8/include/v8-testing.h +104 -0
  20. data/ext/v8/upstream/v8/include/v8.h +4000 -0
  21. data/ext/v8/upstream/{3.1.8 → v8}/include/v8stdint.h +0 -0
  22. data/ext/v8/upstream/v8/preparser/SConscript +38 -0
  23. data/ext/v8/upstream/v8/preparser/preparser-process.cc +169 -0
  24. data/ext/v8/upstream/v8/src/SConscript +380 -0
  25. data/ext/v8/upstream/v8/src/accessors.cc +766 -0
  26. data/ext/v8/upstream/{3.1.8 → v8}/src/accessors.h +0 -0
  27. data/ext/v8/upstream/v8/src/allocation-inl.h +49 -0
  28. data/ext/v8/upstream/v8/src/allocation.cc +122 -0
  29. data/ext/v8/upstream/v8/src/allocation.h +143 -0
  30. data/ext/v8/upstream/v8/src/api.cc +5678 -0
  31. data/ext/v8/upstream/v8/src/api.h +572 -0
  32. data/ext/v8/upstream/{3.1.8 → v8}/src/apinatives.js +0 -0
  33. data/ext/v8/upstream/v8/src/apiutils.h +73 -0
  34. data/ext/v8/upstream/v8/src/arguments.h +116 -0
  35. data/ext/v8/upstream/v8/src/arm/assembler-arm-inl.h +353 -0
  36. data/ext/v8/upstream/v8/src/arm/assembler-arm.cc +2877 -0
  37. data/ext/v8/upstream/v8/src/arm/assembler-arm.h +1382 -0
  38. data/ext/v8/upstream/v8/src/arm/builtins-arm.cc +1634 -0
  39. data/ext/v8/upstream/v8/src/arm/code-stubs-arm.cc +6917 -0
  40. data/ext/v8/upstream/v8/src/arm/code-stubs-arm.h +623 -0
  41. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/codegen-arm-inl.h +0 -0
  42. data/ext/v8/upstream/v8/src/arm/codegen-arm.cc +7437 -0
  43. data/ext/v8/upstream/v8/src/arm/codegen-arm.h +595 -0
  44. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/constants-arm.cc +0 -0
  45. data/ext/v8/upstream/v8/src/arm/constants-arm.h +778 -0
  46. data/ext/v8/upstream/v8/src/arm/cpu-arm.cc +149 -0
  47. data/ext/v8/upstream/v8/src/arm/debug-arm.cc +317 -0
  48. data/ext/v8/upstream/v8/src/arm/deoptimizer-arm.cc +737 -0
  49. data/ext/v8/upstream/v8/src/arm/disasm-arm.cc +1503 -0
  50. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/frames-arm.cc +0 -0
  51. data/ext/v8/upstream/v8/src/arm/frames-arm.h +168 -0
  52. data/ext/v8/upstream/v8/src/arm/full-codegen-arm.cc +4374 -0
  53. data/ext/v8/upstream/v8/src/arm/ic-arm.cc +1793 -0
  54. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/jump-target-arm.cc +0 -0
  55. data/ext/v8/upstream/v8/src/arm/lithium-arm.cc +2120 -0
  56. data/ext/v8/upstream/v8/src/arm/lithium-arm.h +2179 -0
  57. data/ext/v8/upstream/v8/src/arm/lithium-codegen-arm.cc +4132 -0
  58. data/ext/v8/upstream/v8/src/arm/lithium-codegen-arm.h +329 -0
  59. data/ext/v8/upstream/v8/src/arm/lithium-gap-resolver-arm.cc +305 -0
  60. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/lithium-gap-resolver-arm.h +0 -0
  61. data/ext/v8/upstream/v8/src/arm/macro-assembler-arm.cc +2939 -0
  62. data/ext/v8/upstream/v8/src/arm/macro-assembler-arm.h +1071 -0
  63. data/ext/v8/upstream/v8/src/arm/regexp-macro-assembler-arm.cc +1287 -0
  64. data/ext/v8/upstream/v8/src/arm/regexp-macro-assembler-arm.h +253 -0
  65. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/register-allocator-arm-inl.h +0 -0
  66. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/register-allocator-arm.cc +0 -0
  67. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/register-allocator-arm.h +0 -0
  68. data/ext/v8/upstream/v8/src/arm/simulator-arm.cc +3288 -0
  69. data/ext/v8/upstream/v8/src/arm/simulator-arm.h +413 -0
  70. data/ext/v8/upstream/v8/src/arm/stub-cache-arm.cc +4034 -0
  71. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/virtual-frame-arm-inl.h +0 -0
  72. data/ext/v8/upstream/v8/src/arm/virtual-frame-arm.cc +843 -0
  73. data/ext/v8/upstream/v8/src/arm/virtual-frame-arm.h +523 -0
  74. data/ext/v8/upstream/v8/src/array.js +1249 -0
  75. data/ext/v8/upstream/v8/src/assembler.cc +1067 -0
  76. data/ext/v8/upstream/v8/src/assembler.h +823 -0
  77. data/ext/v8/upstream/v8/src/ast-inl.h +112 -0
  78. data/ext/v8/upstream/v8/src/ast.cc +1078 -0
  79. data/ext/v8/upstream/v8/src/ast.h +2234 -0
  80. data/ext/v8/upstream/v8/src/atomicops.h +167 -0
  81. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_arm_gcc.h +0 -0
  82. data/ext/v8/upstream/v8/src/atomicops_internals_mips_gcc.h +169 -0
  83. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_x86_gcc.cc +0 -0
  84. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_x86_gcc.h +0 -0
  85. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_x86_macosx.h +0 -0
  86. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_x86_msvc.h +0 -0
  87. data/ext/v8/upstream/{3.1.8 → v8}/src/bignum-dtoa.cc +0 -0
  88. data/ext/v8/upstream/{3.1.8 → v8}/src/bignum-dtoa.h +0 -0
  89. data/ext/v8/upstream/{3.1.8 → v8}/src/bignum.cc +0 -0
  90. data/ext/v8/upstream/{3.1.8 → v8}/src/bignum.h +0 -0
  91. data/ext/v8/upstream/v8/src/bootstrapper.cc +2138 -0
  92. data/ext/v8/upstream/v8/src/bootstrapper.h +185 -0
  93. data/ext/v8/upstream/v8/src/builtins.cc +1708 -0
  94. data/ext/v8/upstream/v8/src/builtins.h +368 -0
  95. data/ext/v8/upstream/{3.1.8 → v8}/src/bytecodes-irregexp.h +0 -0
  96. data/ext/v8/upstream/{3.1.8 → v8}/src/cached-powers.cc +0 -0
  97. data/ext/v8/upstream/{3.1.8 → v8}/src/cached-powers.h +0 -0
  98. data/ext/v8/upstream/{3.1.8 → v8}/src/char-predicates-inl.h +0 -0
  99. data/ext/v8/upstream/{3.1.8 → v8}/src/char-predicates.h +0 -0
  100. data/ext/v8/upstream/v8/src/checks.cc +110 -0
  101. data/ext/v8/upstream/v8/src/checks.h +296 -0
  102. data/ext/v8/upstream/{3.1.8 → v8}/src/circular-queue-inl.h +0 -0
  103. data/ext/v8/upstream/{3.1.8 → v8}/src/circular-queue.cc +0 -0
  104. data/ext/v8/upstream/{3.1.8 → v8}/src/circular-queue.h +0 -0
  105. data/ext/v8/upstream/v8/src/code-stubs.cc +240 -0
  106. data/ext/v8/upstream/v8/src/code-stubs.h +971 -0
  107. data/ext/v8/upstream/{3.1.8 → v8}/src/code.h +0 -0
  108. data/ext/v8/upstream/v8/src/codegen-inl.h +68 -0
  109. data/ext/v8/upstream/v8/src/codegen.cc +505 -0
  110. data/ext/v8/upstream/v8/src/codegen.h +245 -0
  111. data/ext/v8/upstream/v8/src/compilation-cache.cc +540 -0
  112. data/ext/v8/upstream/v8/src/compilation-cache.h +287 -0
  113. data/ext/v8/upstream/v8/src/compiler.cc +792 -0
  114. data/ext/v8/upstream/v8/src/compiler.h +307 -0
  115. data/ext/v8/upstream/v8/src/contexts.cc +327 -0
  116. data/ext/v8/upstream/v8/src/contexts.h +382 -0
  117. data/ext/v8/upstream/{3.1.8 → v8}/src/conversions-inl.h +0 -0
  118. data/ext/v8/upstream/v8/src/conversions.cc +1125 -0
  119. data/ext/v8/upstream/{3.1.8 → v8}/src/conversions.h +0 -0
  120. data/ext/v8/upstream/v8/src/counters.cc +93 -0
  121. data/ext/v8/upstream/v8/src/counters.h +254 -0
  122. data/ext/v8/upstream/v8/src/cpu-profiler-inl.h +101 -0
  123. data/ext/v8/upstream/v8/src/cpu-profiler.cc +606 -0
  124. data/ext/v8/upstream/v8/src/cpu-profiler.h +305 -0
  125. data/ext/v8/upstream/v8/src/cpu.h +67 -0
  126. data/ext/v8/upstream/v8/src/d8-debug.cc +367 -0
  127. data/ext/v8/upstream/v8/src/d8-debug.h +158 -0
  128. data/ext/v8/upstream/v8/src/d8-posix.cc +695 -0
  129. data/ext/v8/upstream/{3.1.8 → v8}/src/d8-readline.cc +0 -0
  130. data/ext/v8/upstream/{3.1.8 → v8}/src/d8-windows.cc +0 -0
  131. data/ext/v8/upstream/v8/src/d8.cc +796 -0
  132. data/ext/v8/upstream/v8/src/d8.gyp +88 -0
  133. data/ext/v8/upstream/{3.1.8 → v8}/src/d8.h +0 -0
  134. data/ext/v8/upstream/{3.1.8 → v8}/src/d8.js +0 -0
  135. data/ext/v8/upstream/{3.1.8 → v8}/src/data-flow.cc +0 -0
  136. data/ext/v8/upstream/v8/src/data-flow.h +379 -0
  137. data/ext/v8/upstream/{3.1.8 → v8}/src/date.js +0 -0
  138. data/ext/v8/upstream/{3.1.8 → v8}/src/dateparser-inl.h +0 -0
  139. data/ext/v8/upstream/{3.1.8 → v8}/src/dateparser.cc +0 -0
  140. data/ext/v8/upstream/v8/src/dateparser.h +265 -0
  141. data/ext/v8/upstream/v8/src/debug-agent.cc +447 -0
  142. data/ext/v8/upstream/v8/src/debug-agent.h +129 -0
  143. data/ext/v8/upstream/{3.1.8 → v8}/src/debug-debugger.js +0 -0
  144. data/ext/v8/upstream/v8/src/debug.cc +3188 -0
  145. data/ext/v8/upstream/v8/src/debug.h +1055 -0
  146. data/ext/v8/upstream/v8/src/deoptimizer.cc +1296 -0
  147. data/ext/v8/upstream/v8/src/deoptimizer.h +629 -0
  148. data/ext/v8/upstream/v8/src/disasm.h +80 -0
  149. data/ext/v8/upstream/v8/src/disassembler.cc +339 -0
  150. data/ext/v8/upstream/{3.1.8 → v8}/src/disassembler.h +0 -0
  151. data/ext/v8/upstream/{3.1.8 → v8}/src/diy-fp.cc +0 -0
  152. data/ext/v8/upstream/{3.1.8 → v8}/src/diy-fp.h +0 -0
  153. data/ext/v8/upstream/{3.1.8 → v8}/src/double.h +0 -0
  154. data/ext/v8/upstream/{3.1.8 → v8}/src/dtoa.cc +0 -0
  155. data/ext/v8/upstream/{3.1.8 → v8}/src/dtoa.h +0 -0
  156. data/ext/v8/upstream/v8/src/execution.cc +791 -0
  157. data/ext/v8/upstream/v8/src/execution.h +291 -0
  158. data/ext/v8/upstream/v8/src/extensions/experimental/break-iterator.cc +250 -0
  159. data/ext/v8/upstream/v8/src/extensions/experimental/break-iterator.h +89 -0
  160. data/ext/v8/upstream/v8/src/extensions/experimental/experimental.gyp +55 -0
  161. data/ext/v8/upstream/v8/src/extensions/experimental/i18n-extension.cc +284 -0
  162. data/ext/v8/upstream/{3.1.8 → v8}/src/extensions/experimental/i18n-extension.h +0 -0
  163. data/ext/v8/upstream/v8/src/extensions/externalize-string-extension.cc +141 -0
  164. data/ext/v8/upstream/{3.1.8 → v8}/src/extensions/externalize-string-extension.h +0 -0
  165. data/ext/v8/upstream/v8/src/extensions/gc-extension.cc +58 -0
  166. data/ext/v8/upstream/{3.1.8 → v8}/src/extensions/gc-extension.h +0 -0
  167. data/ext/v8/upstream/v8/src/factory.cc +1194 -0
  168. data/ext/v8/upstream/v8/src/factory.h +436 -0
  169. data/ext/v8/upstream/{3.1.8 → v8}/src/fast-dtoa.cc +0 -0
  170. data/ext/v8/upstream/{3.1.8 → v8}/src/fast-dtoa.h +0 -0
  171. data/ext/v8/upstream/{3.1.8 → v8}/src/fixed-dtoa.cc +0 -0
  172. data/ext/v8/upstream/{3.1.8 → v8}/src/fixed-dtoa.h +0 -0
  173. data/ext/v8/upstream/v8/src/flag-definitions.h +556 -0
  174. data/ext/v8/upstream/{3.1.8 → v8}/src/flags.cc +0 -0
  175. data/ext/v8/upstream/{3.1.8 → v8}/src/flags.h +0 -0
  176. data/ext/v8/upstream/v8/src/frame-element.cc +37 -0
  177. data/ext/v8/upstream/v8/src/frame-element.h +269 -0
  178. data/ext/v8/upstream/v8/src/frames-inl.h +236 -0
  179. data/ext/v8/upstream/v8/src/frames.cc +1273 -0
  180. data/ext/v8/upstream/v8/src/frames.h +854 -0
  181. data/ext/v8/upstream/v8/src/full-codegen.cc +1385 -0
  182. data/ext/v8/upstream/v8/src/full-codegen.h +753 -0
  183. data/ext/v8/upstream/v8/src/func-name-inferrer.cc +91 -0
  184. data/ext/v8/upstream/v8/src/func-name-inferrer.h +111 -0
  185. data/ext/v8/upstream/v8/src/gdb-jit.cc +1548 -0
  186. data/ext/v8/upstream/{3.1.8 → v8}/src/gdb-jit.h +0 -0
  187. data/ext/v8/upstream/v8/src/global-handles.cc +596 -0
  188. data/ext/v8/upstream/v8/src/global-handles.h +239 -0
  189. data/ext/v8/upstream/v8/src/globals.h +325 -0
  190. data/ext/v8/upstream/v8/src/handles-inl.h +177 -0
  191. data/ext/v8/upstream/v8/src/handles.cc +965 -0
  192. data/ext/v8/upstream/v8/src/handles.h +372 -0
  193. data/ext/v8/upstream/{3.1.8 → v8}/src/hashmap.cc +0 -0
  194. data/ext/v8/upstream/v8/src/hashmap.h +121 -0
  195. data/ext/v8/upstream/v8/src/heap-inl.h +703 -0
  196. data/ext/v8/upstream/v8/src/heap-profiler.cc +1173 -0
  197. data/ext/v8/upstream/v8/src/heap-profiler.h +396 -0
  198. data/ext/v8/upstream/v8/src/heap.cc +5856 -0
  199. data/ext/v8/upstream/v8/src/heap.h +2264 -0
  200. data/ext/v8/upstream/v8/src/hydrogen-instructions.cc +1639 -0
  201. data/ext/v8/upstream/v8/src/hydrogen-instructions.h +3657 -0
  202. data/ext/v8/upstream/v8/src/hydrogen.cc +6011 -0
  203. data/ext/v8/upstream/v8/src/hydrogen.h +1137 -0
  204. data/ext/v8/upstream/v8/src/ia32/assembler-ia32-inl.h +430 -0
  205. data/ext/v8/upstream/v8/src/ia32/assembler-ia32.cc +2846 -0
  206. data/ext/v8/upstream/v8/src/ia32/assembler-ia32.h +1159 -0
  207. data/ext/v8/upstream/v8/src/ia32/builtins-ia32.cc +1596 -0
  208. data/ext/v8/upstream/v8/src/ia32/code-stubs-ia32.cc +6549 -0
  209. data/ext/v8/upstream/v8/src/ia32/code-stubs-ia32.h +495 -0
  210. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/codegen-ia32-inl.h +0 -0
  211. data/ext/v8/upstream/v8/src/ia32/codegen-ia32.cc +10385 -0
  212. data/ext/v8/upstream/v8/src/ia32/codegen-ia32.h +801 -0
  213. data/ext/v8/upstream/v8/src/ia32/cpu-ia32.cc +88 -0
  214. data/ext/v8/upstream/v8/src/ia32/debug-ia32.cc +312 -0
  215. data/ext/v8/upstream/v8/src/ia32/deoptimizer-ia32.cc +774 -0
  216. data/ext/v8/upstream/v8/src/ia32/disasm-ia32.cc +1620 -0
  217. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/frames-ia32.cc +0 -0
  218. data/ext/v8/upstream/v8/src/ia32/frames-ia32.h +140 -0
  219. data/ext/v8/upstream/v8/src/ia32/full-codegen-ia32.cc +4357 -0
  220. data/ext/v8/upstream/v8/src/ia32/ic-ia32.cc +1779 -0
  221. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/jump-target-ia32.cc +0 -0
  222. data/ext/v8/upstream/v8/src/ia32/lithium-codegen-ia32.cc +4158 -0
  223. data/ext/v8/upstream/v8/src/ia32/lithium-codegen-ia32.h +318 -0
  224. data/ext/v8/upstream/v8/src/ia32/lithium-gap-resolver-ia32.cc +466 -0
  225. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/lithium-gap-resolver-ia32.h +0 -0
  226. data/ext/v8/upstream/v8/src/ia32/lithium-ia32.cc +2181 -0
  227. data/ext/v8/upstream/v8/src/ia32/lithium-ia32.h +2235 -0
  228. data/ext/v8/upstream/v8/src/ia32/macro-assembler-ia32.cc +2056 -0
  229. data/ext/v8/upstream/v8/src/ia32/macro-assembler-ia32.h +807 -0
  230. data/ext/v8/upstream/v8/src/ia32/regexp-macro-assembler-ia32.cc +1264 -0
  231. data/ext/v8/upstream/v8/src/ia32/regexp-macro-assembler-ia32.h +216 -0
  232. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/register-allocator-ia32-inl.h +0 -0
  233. data/ext/v8/upstream/v8/src/ia32/register-allocator-ia32.cc +157 -0
  234. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/register-allocator-ia32.h +0 -0
  235. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/simulator-ia32.cc +0 -0
  236. data/ext/v8/upstream/v8/src/ia32/simulator-ia32.h +72 -0
  237. data/ext/v8/upstream/v8/src/ia32/stub-cache-ia32.cc +3711 -0
  238. data/ext/v8/upstream/v8/src/ia32/virtual-frame-ia32.cc +1366 -0
  239. data/ext/v8/upstream/v8/src/ia32/virtual-frame-ia32.h +650 -0
  240. data/ext/v8/upstream/v8/src/ic-inl.h +130 -0
  241. data/ext/v8/upstream/v8/src/ic.cc +2389 -0
  242. data/ext/v8/upstream/v8/src/ic.h +675 -0
  243. data/ext/v8/upstream/{3.1.8 → v8}/src/inspector.cc +0 -0
  244. data/ext/v8/upstream/{3.1.8 → v8}/src/inspector.h +0 -0
  245. data/ext/v8/upstream/v8/src/interpreter-irregexp.cc +659 -0
  246. data/ext/v8/upstream/v8/src/interpreter-irregexp.h +49 -0
  247. data/ext/v8/upstream/v8/src/isolate.cc +883 -0
  248. data/ext/v8/upstream/v8/src/isolate.h +1306 -0
  249. data/ext/v8/upstream/v8/src/json.js +342 -0
  250. data/ext/v8/upstream/v8/src/jsregexp.cc +5371 -0
  251. data/ext/v8/upstream/v8/src/jsregexp.h +1483 -0
  252. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target-heavy-inl.h +0 -0
  253. data/ext/v8/upstream/v8/src/jump-target-heavy.cc +427 -0
  254. data/ext/v8/upstream/v8/src/jump-target-heavy.h +238 -0
  255. data/ext/v8/upstream/v8/src/jump-target-inl.h +48 -0
  256. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target-light-inl.h +0 -0
  257. data/ext/v8/upstream/v8/src/jump-target-light.cc +111 -0
  258. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target-light.h +0 -0
  259. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target.cc +0 -0
  260. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target.h +0 -0
  261. data/ext/v8/upstream/{3.1.8 → v8}/src/list-inl.h +0 -0
  262. data/ext/v8/upstream/{3.1.8 → v8}/src/list.h +0 -0
  263. data/ext/v8/upstream/v8/src/lithium-allocator-inl.h +142 -0
  264. data/ext/v8/upstream/v8/src/lithium-allocator.cc +2105 -0
  265. data/ext/v8/upstream/v8/src/lithium-allocator.h +630 -0
  266. data/ext/v8/upstream/v8/src/lithium.cc +169 -0
  267. data/ext/v8/upstream/{3.1.8 → v8}/src/lithium.h +0 -0
  268. data/ext/v8/upstream/{3.1.8 → v8}/src/liveedit-debugger.js +0 -0
  269. data/ext/v8/upstream/v8/src/liveedit.cc +1693 -0
  270. data/ext/v8/upstream/v8/src/liveedit.h +179 -0
  271. data/ext/v8/upstream/{3.1.8 → v8}/src/liveobjectlist-inl.h +0 -0
  272. data/ext/v8/upstream/v8/src/liveobjectlist.cc +2589 -0
  273. data/ext/v8/upstream/v8/src/liveobjectlist.h +322 -0
  274. data/ext/v8/upstream/{3.1.8 → v8}/src/log-inl.h +0 -0
  275. data/ext/v8/upstream/v8/src/log-utils.cc +423 -0
  276. data/ext/v8/upstream/v8/src/log-utils.h +229 -0
  277. data/ext/v8/upstream/v8/src/log.cc +1666 -0
  278. data/ext/v8/upstream/v8/src/log.h +446 -0
  279. data/ext/v8/upstream/{3.1.8 → v8}/src/macro-assembler.h +0 -0
  280. data/ext/v8/upstream/{3.1.8 → v8}/src/macros.py +0 -0
  281. data/ext/v8/upstream/v8/src/mark-compact.cc +3092 -0
  282. data/ext/v8/upstream/v8/src/mark-compact.h +506 -0
  283. data/ext/v8/upstream/{3.1.8 → v8}/src/math.js +0 -0
  284. data/ext/v8/upstream/v8/src/messages.cc +166 -0
  285. data/ext/v8/upstream/{3.1.8 → v8}/src/messages.h +0 -0
  286. data/ext/v8/upstream/v8/src/messages.js +1090 -0
  287. data/ext/v8/upstream/v8/src/mips/assembler-mips-inl.h +335 -0
  288. data/ext/v8/upstream/v8/src/mips/assembler-mips.cc +2093 -0
  289. data/ext/v8/upstream/v8/src/mips/assembler-mips.h +1066 -0
  290. data/ext/v8/upstream/v8/src/mips/builtins-mips.cc +148 -0
  291. data/ext/v8/upstream/v8/src/mips/code-stubs-mips.cc +752 -0
  292. data/ext/v8/upstream/v8/src/mips/code-stubs-mips.h +511 -0
  293. data/ext/v8/upstream/v8/src/mips/codegen-mips-inl.h +64 -0
  294. data/ext/v8/upstream/v8/src/mips/codegen-mips.cc +1213 -0
  295. data/ext/v8/upstream/v8/src/mips/codegen-mips.h +633 -0
  296. data/ext/v8/upstream/v8/src/mips/constants-mips.cc +352 -0
  297. data/ext/v8/upstream/v8/src/mips/constants-mips.h +723 -0
  298. data/ext/v8/upstream/v8/src/mips/cpu-mips.cc +90 -0
  299. data/ext/v8/upstream/v8/src/mips/debug-mips.cc +155 -0
  300. data/ext/v8/upstream/v8/src/mips/deoptimizer-mips.cc +91 -0
  301. data/ext/v8/upstream/v8/src/mips/disasm-mips.cc +1023 -0
  302. data/ext/v8/upstream/v8/src/mips/frames-mips.cc +48 -0
  303. data/ext/v8/upstream/v8/src/mips/frames-mips.h +179 -0
  304. data/ext/v8/upstream/v8/src/mips/full-codegen-mips.cc +727 -0
  305. data/ext/v8/upstream/v8/src/mips/ic-mips.cc +244 -0
  306. data/ext/v8/upstream/v8/src/mips/jump-target-mips.cc +80 -0
  307. data/ext/v8/upstream/v8/src/mips/lithium-codegen-mips.h +65 -0
  308. data/ext/v8/upstream/v8/src/mips/lithium-mips.h +304 -0
  309. data/ext/v8/upstream/v8/src/mips/macro-assembler-mips.cc +3327 -0
  310. data/ext/v8/upstream/v8/src/mips/macro-assembler-mips.h +1058 -0
  311. data/ext/v8/upstream/v8/src/mips/regexp-macro-assembler-mips.cc +478 -0
  312. data/ext/v8/upstream/v8/src/mips/regexp-macro-assembler-mips.h +250 -0
  313. data/ext/v8/upstream/v8/src/mips/register-allocator-mips-inl.h +134 -0
  314. data/ext/v8/upstream/{3.1.8 → v8}/src/mips/register-allocator-mips.cc +0 -0
  315. data/ext/v8/upstream/v8/src/mips/register-allocator-mips.h +47 -0
  316. data/ext/v8/upstream/v8/src/mips/simulator-mips.cc +2438 -0
  317. data/ext/v8/upstream/v8/src/mips/simulator-mips.h +394 -0
  318. data/ext/v8/upstream/v8/src/mips/stub-cache-mips.cc +601 -0
  319. data/ext/v8/upstream/v8/src/mips/virtual-frame-mips-inl.h +58 -0
  320. data/ext/v8/upstream/v8/src/mips/virtual-frame-mips.cc +307 -0
  321. data/ext/v8/upstream/v8/src/mips/virtual-frame-mips.h +530 -0
  322. data/ext/v8/upstream/v8/src/mirror-debugger.js +2381 -0
  323. data/ext/v8/upstream/v8/src/mksnapshot.cc +256 -0
  324. data/ext/v8/upstream/{3.1.8 → v8}/src/natives.h +0 -0
  325. data/ext/v8/upstream/v8/src/objects-debug.cc +722 -0
  326. data/ext/v8/upstream/v8/src/objects-inl.h +4166 -0
  327. data/ext/v8/upstream/v8/src/objects-printer.cc +801 -0
  328. data/ext/v8/upstream/v8/src/objects-visiting.cc +142 -0
  329. data/ext/v8/upstream/v8/src/objects-visiting.h +422 -0
  330. data/ext/v8/upstream/v8/src/objects.cc +10296 -0
  331. data/ext/v8/upstream/v8/src/objects.h +6662 -0
  332. data/ext/v8/upstream/v8/src/parser.cc +5168 -0
  333. data/ext/v8/upstream/v8/src/parser.h +823 -0
  334. data/ext/v8/upstream/v8/src/platform-cygwin.cc +811 -0
  335. data/ext/v8/upstream/v8/src/platform-freebsd.cc +854 -0
  336. data/ext/v8/upstream/v8/src/platform-linux.cc +1120 -0
  337. data/ext/v8/upstream/v8/src/platform-macos.cc +865 -0
  338. data/ext/v8/upstream/v8/src/platform-nullos.cc +504 -0
  339. data/ext/v8/upstream/v8/src/platform-openbsd.cc +672 -0
  340. data/ext/v8/upstream/v8/src/platform-posix.cc +424 -0
  341. data/ext/v8/upstream/v8/src/platform-solaris.cc +796 -0
  342. data/ext/v8/upstream/v8/src/platform-tls-mac.h +62 -0
  343. data/ext/v8/upstream/v8/src/platform-tls-win32.h +62 -0
  344. data/ext/v8/upstream/v8/src/platform-tls.h +50 -0
  345. data/ext/v8/upstream/v8/src/platform-win32.cc +2072 -0
  346. data/ext/v8/upstream/v8/src/platform.h +693 -0
  347. data/ext/v8/upstream/v8/src/preparse-data.cc +185 -0
  348. data/ext/v8/upstream/{3.1.8 → v8}/src/preparse-data.h +0 -0
  349. data/ext/v8/upstream/v8/src/preparser-api.cc +219 -0
  350. data/ext/v8/upstream/v8/src/preparser.cc +1205 -0
  351. data/ext/v8/upstream/{3.1.8 → v8}/src/preparser.h +0 -0
  352. data/ext/v8/upstream/v8/src/prettyprinter.cc +1530 -0
  353. data/ext/v8/upstream/v8/src/prettyprinter.h +223 -0
  354. data/ext/v8/upstream/{3.1.8 → v8}/src/profile-generator-inl.h +0 -0
  355. data/ext/v8/upstream/v8/src/profile-generator.cc +3095 -0
  356. data/ext/v8/upstream/v8/src/profile-generator.h +1125 -0
  357. data/ext/v8/upstream/v8/src/property.cc +102 -0
  358. data/ext/v8/upstream/v8/src/property.h +348 -0
  359. data/ext/v8/upstream/{3.1.8 → v8}/src/regexp-macro-assembler-irregexp-inl.h +0 -0
  360. data/ext/v8/upstream/v8/src/regexp-macro-assembler-irregexp.cc +470 -0
  361. data/ext/v8/upstream/{3.1.8 → v8}/src/regexp-macro-assembler-irregexp.h +0 -0
  362. data/ext/v8/upstream/{3.1.8 → v8}/src/regexp-macro-assembler-tracer.cc +0 -0
  363. data/ext/v8/upstream/{3.1.8 → v8}/src/regexp-macro-assembler-tracer.h +0 -0
  364. data/ext/v8/upstream/v8/src/regexp-macro-assembler.cc +266 -0
  365. data/ext/v8/upstream/v8/src/regexp-macro-assembler.h +236 -0
  366. data/ext/v8/upstream/v8/src/regexp-stack.cc +111 -0
  367. data/ext/v8/upstream/v8/src/regexp-stack.h +147 -0
  368. data/ext/v8/upstream/v8/src/regexp.js +483 -0
  369. data/ext/v8/upstream/v8/src/register-allocator-inl.h +141 -0
  370. data/ext/v8/upstream/v8/src/register-allocator.cc +98 -0
  371. data/ext/v8/upstream/v8/src/register-allocator.h +310 -0
  372. data/ext/v8/upstream/v8/src/rewriter.cc +1024 -0
  373. data/ext/v8/upstream/{3.1.8 → v8}/src/rewriter.h +0 -0
  374. data/ext/v8/upstream/v8/src/runtime-profiler.cc +478 -0
  375. data/ext/v8/upstream/v8/src/runtime-profiler.h +192 -0
  376. data/ext/v8/upstream/v8/src/runtime.cc +11949 -0
  377. data/ext/v8/upstream/v8/src/runtime.h +643 -0
  378. data/ext/v8/upstream/{3.1.8 → v8}/src/runtime.js +0 -0
  379. data/ext/v8/upstream/v8/src/safepoint-table.cc +256 -0
  380. data/ext/v8/upstream/v8/src/safepoint-table.h +269 -0
  381. data/ext/v8/upstream/v8/src/scanner-base.cc +964 -0
  382. data/ext/v8/upstream/v8/src/scanner-base.h +664 -0
  383. data/ext/v8/upstream/v8/src/scanner.cc +584 -0
  384. data/ext/v8/upstream/v8/src/scanner.h +196 -0
  385. data/ext/v8/upstream/v8/src/scopeinfo.cc +631 -0
  386. data/ext/v8/upstream/v8/src/scopeinfo.h +249 -0
  387. data/ext/v8/upstream/v8/src/scopes.cc +1093 -0
  388. data/ext/v8/upstream/v8/src/scopes.h +508 -0
  389. data/ext/v8/upstream/v8/src/serialize.cc +1574 -0
  390. data/ext/v8/upstream/v8/src/serialize.h +589 -0
  391. data/ext/v8/upstream/{3.1.8 → v8}/src/shell.h +0 -0
  392. data/ext/v8/upstream/{3.1.8 → v8}/src/simulator.h +0 -0
  393. data/ext/v8/upstream/v8/src/small-pointer-list.h +163 -0
  394. data/ext/v8/upstream/{3.1.8 → v8}/src/smart-pointer.h +0 -0
  395. data/ext/v8/upstream/v8/src/snapshot-common.cc +82 -0
  396. data/ext/v8/upstream/{3.1.8 → v8}/src/snapshot-empty.cc +0 -0
  397. data/ext/v8/upstream/v8/src/snapshot.h +73 -0
  398. data/ext/v8/upstream/v8/src/spaces-inl.h +529 -0
  399. data/ext/v8/upstream/v8/src/spaces.cc +3147 -0
  400. data/ext/v8/upstream/v8/src/spaces.h +2368 -0
  401. data/ext/v8/upstream/{3.1.8 → v8}/src/splay-tree-inl.h +0 -0
  402. data/ext/v8/upstream/{3.1.8 → v8}/src/splay-tree.h +0 -0
  403. data/ext/v8/upstream/v8/src/string-search.cc +41 -0
  404. data/ext/v8/upstream/v8/src/string-search.h +568 -0
  405. data/ext/v8/upstream/v8/src/string-stream.cc +592 -0
  406. data/ext/v8/upstream/{3.1.8 → v8}/src/string-stream.h +0 -0
  407. data/ext/v8/upstream/v8/src/string.js +915 -0
  408. data/ext/v8/upstream/{3.1.8 → v8}/src/strtod.cc +0 -0
  409. data/ext/v8/upstream/{3.1.8 → v8}/src/strtod.h +0 -0
  410. data/ext/v8/upstream/v8/src/stub-cache.cc +1940 -0
  411. data/ext/v8/upstream/v8/src/stub-cache.h +866 -0
  412. data/ext/v8/upstream/{3.1.8 → v8}/src/third_party/valgrind/valgrind.h +0 -0
  413. data/ext/v8/upstream/v8/src/token.cc +63 -0
  414. data/ext/v8/upstream/v8/src/token.h +288 -0
  415. data/ext/v8/upstream/v8/src/top.cc +983 -0
  416. data/ext/v8/upstream/v8/src/type-info.cc +472 -0
  417. data/ext/v8/upstream/v8/src/type-info.h +290 -0
  418. data/ext/v8/upstream/{3.1.8 → v8}/src/unbound-queue-inl.h +0 -0
  419. data/ext/v8/upstream/{3.1.8 → v8}/src/unbound-queue.h +0 -0
  420. data/ext/v8/upstream/{3.1.8 → v8}/src/unicode-inl.h +0 -0
  421. data/ext/v8/upstream/v8/src/unicode.cc +1624 -0
  422. data/ext/v8/upstream/v8/src/unicode.h +280 -0
  423. data/ext/v8/upstream/{3.1.8 → v8}/src/uri.js +0 -0
  424. data/ext/v8/upstream/{3.1.8 → v8}/src/utils.cc +0 -0
  425. data/ext/v8/upstream/v8/src/utils.h +796 -0
  426. data/ext/v8/upstream/v8/src/v8-counters.cc +62 -0
  427. data/ext/v8/upstream/v8/src/v8-counters.h +311 -0
  428. data/ext/v8/upstream/v8/src/v8.cc +215 -0
  429. data/ext/v8/upstream/v8/src/v8.h +130 -0
  430. data/ext/v8/upstream/{3.1.8 → v8}/src/v8checks.h +0 -0
  431. data/ext/v8/upstream/{3.1.8 → v8}/src/v8dll-main.cc +0 -0
  432. data/ext/v8/upstream/v8/src/v8globals.h +486 -0
  433. data/ext/v8/upstream/{3.1.8/src/memory.h → v8/src/v8memory.h} +0 -0
  434. data/ext/v8/upstream/v8/src/v8natives.js +1293 -0
  435. data/ext/v8/upstream/{3.1.8 → v8}/src/v8preparserdll-main.cc +0 -0
  436. data/ext/v8/upstream/v8/src/v8threads.cc +453 -0
  437. data/ext/v8/upstream/v8/src/v8threads.h +164 -0
  438. data/ext/v8/upstream/v8/src/v8utils.h +317 -0
  439. data/ext/v8/upstream/{3.1.8 → v8}/src/variables.cc +0 -0
  440. data/ext/v8/upstream/v8/src/variables.h +212 -0
  441. data/ext/v8/upstream/v8/src/version.cc +116 -0
  442. data/ext/v8/upstream/v8/src/version.h +68 -0
  443. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame-heavy-inl.h +0 -0
  444. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame-heavy.cc +0 -0
  445. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame-inl.h +0 -0
  446. data/ext/v8/upstream/v8/src/virtual-frame-light-inl.h +171 -0
  447. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame-light.cc +0 -0
  448. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame.cc +0 -0
  449. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame.h +0 -0
  450. data/ext/v8/upstream/v8/src/vm-state-inl.h +138 -0
  451. data/ext/v8/upstream/v8/src/vm-state.h +70 -0
  452. data/ext/v8/upstream/v8/src/win32-headers.h +96 -0
  453. data/ext/v8/upstream/v8/src/x64/assembler-x64-inl.h +456 -0
  454. data/ext/v8/upstream/v8/src/x64/assembler-x64.cc +2954 -0
  455. data/ext/v8/upstream/v8/src/x64/assembler-x64.h +1630 -0
  456. data/ext/v8/upstream/v8/src/x64/builtins-x64.cc +1493 -0
  457. data/ext/v8/upstream/v8/src/x64/code-stubs-x64.cc +5132 -0
  458. data/ext/v8/upstream/v8/src/x64/code-stubs-x64.h +477 -0
  459. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/codegen-x64-inl.h +0 -0
  460. data/ext/v8/upstream/v8/src/x64/codegen-x64.cc +8843 -0
  461. data/ext/v8/upstream/v8/src/x64/codegen-x64.h +753 -0
  462. data/ext/v8/upstream/v8/src/x64/cpu-x64.cc +88 -0
  463. data/ext/v8/upstream/v8/src/x64/debug-x64.cc +318 -0
  464. data/ext/v8/upstream/v8/src/x64/deoptimizer-x64.cc +815 -0
  465. data/ext/v8/upstream/v8/src/x64/disasm-x64.cc +1752 -0
  466. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/frames-x64.cc +0 -0
  467. data/ext/v8/upstream/v8/src/x64/frames-x64.h +130 -0
  468. data/ext/v8/upstream/v8/src/x64/full-codegen-x64.cc +4339 -0
  469. data/ext/v8/upstream/v8/src/x64/ic-x64.cc +1752 -0
  470. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/jump-target-x64.cc +0 -0
  471. data/ext/v8/upstream/v8/src/x64/lithium-codegen-x64.cc +3970 -0
  472. data/ext/v8/upstream/v8/src/x64/lithium-codegen-x64.h +318 -0
  473. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/lithium-gap-resolver-x64.cc +0 -0
  474. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/lithium-gap-resolver-x64.h +0 -0
  475. data/ext/v8/upstream/v8/src/x64/lithium-x64.cc +2115 -0
  476. data/ext/v8/upstream/v8/src/x64/lithium-x64.h +2161 -0
  477. data/ext/v8/upstream/v8/src/x64/macro-assembler-x64.cc +2911 -0
  478. data/ext/v8/upstream/v8/src/x64/macro-assembler-x64.h +1984 -0
  479. data/ext/v8/upstream/v8/src/x64/regexp-macro-assembler-x64.cc +1398 -0
  480. data/ext/v8/upstream/v8/src/x64/regexp-macro-assembler-x64.h +282 -0
  481. data/ext/v8/upstream/v8/src/x64/register-allocator-x64-inl.h +87 -0
  482. data/ext/v8/upstream/v8/src/x64/register-allocator-x64.cc +95 -0
  483. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/register-allocator-x64.h +0 -0
  484. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/simulator-x64.cc +0 -0
  485. data/ext/v8/upstream/v8/src/x64/simulator-x64.h +71 -0
  486. data/ext/v8/upstream/v8/src/x64/stub-cache-x64.cc +3460 -0
  487. data/ext/v8/upstream/v8/src/x64/virtual-frame-x64.cc +1296 -0
  488. data/ext/v8/upstream/v8/src/x64/virtual-frame-x64.h +597 -0
  489. data/ext/v8/upstream/v8/src/zone-inl.h +129 -0
  490. data/ext/v8/upstream/v8/src/zone.cc +196 -0
  491. data/ext/v8/upstream/v8/src/zone.h +236 -0
  492. data/ext/v8/upstream/{3.1.8 → v8}/tools/codemap.js +0 -0
  493. data/ext/v8/upstream/{3.1.8 → v8}/tools/consarray.js +0 -0
  494. data/ext/v8/upstream/{3.1.8 → v8}/tools/csvparser.js +0 -0
  495. data/ext/v8/upstream/{3.1.8 → v8}/tools/disasm.py +0 -0
  496. data/ext/v8/upstream/v8/tools/freebsd-tick-processor +10 -0
  497. data/ext/v8/upstream/{3.1.8 → v8}/tools/gc-nvp-trace-processor.py +0 -0
  498. data/ext/v8/upstream/{3.1.8 → v8}/tools/generate-ten-powers.scm +0 -0
  499. data/ext/v8/upstream/{3.1.8 → v8}/tools/grokdump.py +0 -0
  500. data/ext/v8/upstream/v8/tools/gyp/v8.gyp +844 -0
  501. data/ext/v8/upstream/{3.1.8 → v8}/tools/js2c.py +0 -0
  502. data/ext/v8/upstream/{3.1.8 → v8}/tools/jsmin.py +0 -0
  503. data/ext/v8/upstream/v8/tools/linux-tick-processor +35 -0
  504. data/ext/v8/upstream/{3.1.8 → v8}/tools/ll_prof.py +0 -0
  505. data/ext/v8/upstream/{3.1.8 → v8}/tools/logreader.js +0 -0
  506. data/ext/v8/upstream/{3.1.8 → v8}/tools/mac-nm +0 -0
  507. data/ext/v8/upstream/{3.1.8 → v8}/tools/mac-tick-processor +0 -0
  508. data/ext/v8/upstream/{3.1.8 → v8}/tools/oom_dump/README +0 -0
  509. data/ext/v8/upstream/{3.1.8 → v8}/tools/oom_dump/SConstruct +0 -0
  510. data/ext/v8/upstream/{3.1.8 → v8}/tools/oom_dump/oom_dump.cc +0 -0
  511. data/ext/v8/upstream/{3.1.8 → v8}/tools/presubmit.py +0 -0
  512. data/ext/v8/upstream/{3.1.8 → v8}/tools/process-heap-prof.py +0 -0
  513. data/ext/v8/upstream/{3.1.8 → v8}/tools/profile.js +0 -0
  514. data/ext/v8/upstream/{3.1.8 → v8}/tools/profile_view.js +0 -0
  515. data/ext/v8/upstream/{3.1.8 → v8}/tools/run-valgrind.py +0 -0
  516. data/ext/v8/upstream/{3.1.8 → v8}/tools/splaytree.js +0 -0
  517. data/ext/v8/upstream/{3.1.8 → v8}/tools/stats-viewer.py +0 -0
  518. data/ext/v8/upstream/v8/tools/test.py +1490 -0
  519. data/ext/v8/upstream/{3.1.8 → v8}/tools/tickprocessor-driver.js +0 -0
  520. data/ext/v8/upstream/v8/tools/tickprocessor.js +877 -0
  521. data/ext/v8/upstream/{3.1.8 → v8}/tools/utils.py +0 -0
  522. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/README.txt +0 -0
  523. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/arm.vsprops +0 -0
  524. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/common.vsprops +0 -0
  525. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/d8.vcproj +0 -0
  526. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/d8_arm.vcproj +0 -0
  527. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/d8_x64.vcproj +0 -0
  528. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/d8js2c.cmd +0 -0
  529. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/debug.vsprops +0 -0
  530. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/ia32.vsprops +0 -0
  531. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/js2c.cmd +0 -0
  532. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/release.vsprops +0 -0
  533. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8.sln +0 -0
  534. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8.vcproj +0 -0
  535. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_arm.sln +0 -0
  536. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_arm.vcproj +0 -0
  537. data/ext/v8/upstream/v8/tools/visual_studio/v8_base.vcproj +1308 -0
  538. data/ext/v8/upstream/v8/tools/visual_studio/v8_base_arm.vcproj +1238 -0
  539. data/ext/v8/upstream/v8/tools/visual_studio/v8_base_x64.vcproj +1300 -0
  540. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_cctest.vcproj +0 -0
  541. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_cctest_arm.vcproj +0 -0
  542. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_cctest_x64.vcproj +0 -0
  543. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_mksnapshot.vcproj +0 -0
  544. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_mksnapshot_x64.vcproj +0 -0
  545. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_process_sample.vcproj +0 -0
  546. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_process_sample_arm.vcproj +0 -0
  547. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_process_sample_x64.vcproj +0 -0
  548. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_shell_sample.vcproj +0 -0
  549. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_shell_sample_arm.vcproj +0 -0
  550. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_shell_sample_x64.vcproj +0 -0
  551. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_snapshot.vcproj +0 -0
  552. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_snapshot_cc.vcproj +0 -0
  553. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_snapshot_cc_x64.vcproj +0 -0
  554. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_snapshot_x64.vcproj +0 -0
  555. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_x64.sln +0 -0
  556. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_x64.vcproj +0 -0
  557. data/ext/v8/upstream/v8/tools/visual_studio/x64.vsprops +18 -0
  558. data/ext/v8/upstream/{3.1.8 → v8}/tools/windows-tick-processor.bat +0 -0
  559. data/ext/v8/v8_callbacks.cpp +52 -92
  560. data/ext/v8/v8_date.cpp +2 -3
  561. data/ext/v8/v8_object.cpp +4 -0
  562. data/ext/v8/v8_template.cpp +2 -2
  563. data/ext/v8/v8_try_catch.cpp +8 -38
  564. data/lib/v8/version.rb +1 -1
  565. data/spec/ext/ext_spec_helper.rb +2 -20
  566. data/spec/ext/object_spec.rb +0 -12
  567. data/spec/ext/try_catch_spec.rb +29 -1
  568. data/spec/spec_helper.rb +1 -0
  569. data/spec/v8/portal/proxies_spec.rb +1 -84
  570. data/specmem/handle_memspec.rb +41 -0
  571. data/specmem/object_memspec.rb +16 -0
  572. data/specmem/proxies_memspec.rb +86 -0
  573. data/specmem/spec_helper.rb +24 -0
  574. data/therubyracer.gemspec +7 -2
  575. metadata +564 -541
  576. data/ext/v8/upstream/3.1.8/.gitignore +0 -31
  577. data/ext/v8/upstream/3.1.8/AUTHORS +0 -40
  578. data/ext/v8/upstream/3.1.8/ChangeLog +0 -2566
  579. data/ext/v8/upstream/3.1.8/SConstruct +0 -1192
  580. data/ext/v8/upstream/3.1.8/include/v8-debug.h +0 -384
  581. data/ext/v8/upstream/3.1.8/include/v8-preparser.h +0 -116
  582. data/ext/v8/upstream/3.1.8/include/v8-profiler.h +0 -426
  583. data/ext/v8/upstream/3.1.8/include/v8-testing.h +0 -99
  584. data/ext/v8/upstream/3.1.8/include/v8.h +0 -3846
  585. data/ext/v8/upstream/3.1.8/preparser/preparser-process.cc +0 -206
  586. data/ext/v8/upstream/3.1.8/src/SConscript +0 -356
  587. data/ext/v8/upstream/3.1.8/src/accessors.cc +0 -907
  588. data/ext/v8/upstream/3.1.8/src/allocation.cc +0 -204
  589. data/ext/v8/upstream/3.1.8/src/allocation.h +0 -176
  590. data/ext/v8/upstream/3.1.8/src/api.cc +0 -5191
  591. data/ext/v8/upstream/3.1.8/src/api.h +0 -508
  592. data/ext/v8/upstream/3.1.8/src/apiutils.h +0 -80
  593. data/ext/v8/upstream/3.1.8/src/arguments.h +0 -105
  594. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm-inl.h +0 -352
  595. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.cc +0 -2756
  596. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.h +0 -1294
  597. data/ext/v8/upstream/3.1.8/src/arm/builtins-arm.cc +0 -1628
  598. data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.cc +0 -6783
  599. data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.h +0 -657
  600. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.cc +0 -7403
  601. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.h +0 -595
  602. data/ext/v8/upstream/3.1.8/src/arm/constants-arm.h +0 -769
  603. data/ext/v8/upstream/3.1.8/src/arm/cpu-arm.cc +0 -147
  604. data/ext/v8/upstream/3.1.8/src/arm/debug-arm.cc +0 -315
  605. data/ext/v8/upstream/3.1.8/src/arm/deoptimizer-arm.cc +0 -700
  606. data/ext/v8/upstream/3.1.8/src/arm/disasm-arm.cc +0 -1439
  607. data/ext/v8/upstream/3.1.8/src/arm/frames-arm.h +0 -168
  608. data/ext/v8/upstream/3.1.8/src/arm/full-codegen-arm.cc +0 -4230
  609. data/ext/v8/upstream/3.1.8/src/arm/ic-arm.cc +0 -1799
  610. data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.cc +0 -2041
  611. data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.h +0 -2046
  612. data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.cc +0 -3822
  613. data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.h +0 -312
  614. data/ext/v8/upstream/3.1.8/src/arm/lithium-gap-resolver-arm.cc +0 -303
  615. data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.cc +0 -2701
  616. data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.h +0 -1015
  617. data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.cc +0 -1280
  618. data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.h +0 -252
  619. data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.cc +0 -3165
  620. data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.h +0 -402
  621. data/ext/v8/upstream/3.1.8/src/arm/stub-cache-arm.cc +0 -4077
  622. data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.cc +0 -843
  623. data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.h +0 -520
  624. data/ext/v8/upstream/3.1.8/src/array.js +0 -1231
  625. data/ext/v8/upstream/3.1.8/src/assembler.cc +0 -973
  626. data/ext/v8/upstream/3.1.8/src/assembler.h +0 -787
  627. data/ext/v8/upstream/3.1.8/src/ast-inl.h +0 -107
  628. data/ext/v8/upstream/3.1.8/src/ast.cc +0 -1067
  629. data/ext/v8/upstream/3.1.8/src/ast.h +0 -2177
  630. data/ext/v8/upstream/3.1.8/src/atomicops.h +0 -165
  631. data/ext/v8/upstream/3.1.8/src/bootstrapper.cc +0 -1888
  632. data/ext/v8/upstream/3.1.8/src/bootstrapper.h +0 -118
  633. data/ext/v8/upstream/3.1.8/src/builtins.cc +0 -1586
  634. data/ext/v8/upstream/3.1.8/src/builtins.h +0 -339
  635. data/ext/v8/upstream/3.1.8/src/checks.cc +0 -110
  636. data/ext/v8/upstream/3.1.8/src/checks.h +0 -292
  637. data/ext/v8/upstream/3.1.8/src/code-stubs.cc +0 -230
  638. data/ext/v8/upstream/3.1.8/src/code-stubs.h +0 -950
  639. data/ext/v8/upstream/3.1.8/src/codegen-inl.h +0 -64
  640. data/ext/v8/upstream/3.1.8/src/codegen.cc +0 -495
  641. data/ext/v8/upstream/3.1.8/src/codegen.h +0 -245
  642. data/ext/v8/upstream/3.1.8/src/compilation-cache.cc +0 -654
  643. data/ext/v8/upstream/3.1.8/src/compilation-cache.h +0 -112
  644. data/ext/v8/upstream/3.1.8/src/compiler.cc +0 -806
  645. data/ext/v8/upstream/3.1.8/src/compiler.h +0 -290
  646. data/ext/v8/upstream/3.1.8/src/contexts.cc +0 -320
  647. data/ext/v8/upstream/3.1.8/src/contexts.h +0 -376
  648. data/ext/v8/upstream/3.1.8/src/conversions.cc +0 -1069
  649. data/ext/v8/upstream/3.1.8/src/counters.cc +0 -78
  650. data/ext/v8/upstream/3.1.8/src/counters.h +0 -242
  651. data/ext/v8/upstream/3.1.8/src/cpu-profiler-inl.h +0 -100
  652. data/ext/v8/upstream/3.1.8/src/cpu-profiler.cc +0 -554
  653. data/ext/v8/upstream/3.1.8/src/cpu-profiler.h +0 -291
  654. data/ext/v8/upstream/3.1.8/src/cpu.h +0 -65
  655. data/ext/v8/upstream/3.1.8/src/d8-debug.cc +0 -367
  656. data/ext/v8/upstream/3.1.8/src/d8-debug.h +0 -157
  657. data/ext/v8/upstream/3.1.8/src/d8-posix.cc +0 -693
  658. data/ext/v8/upstream/3.1.8/src/d8.cc +0 -792
  659. data/ext/v8/upstream/3.1.8/src/d8.gyp +0 -85
  660. data/ext/v8/upstream/3.1.8/src/data-flow.h +0 -379
  661. data/ext/v8/upstream/3.1.8/src/dateparser.h +0 -263
  662. data/ext/v8/upstream/3.1.8/src/debug-agent.cc +0 -446
  663. data/ext/v8/upstream/3.1.8/src/debug-agent.h +0 -131
  664. data/ext/v8/upstream/3.1.8/src/debug.cc +0 -3085
  665. data/ext/v8/upstream/3.1.8/src/debug.h +0 -1025
  666. data/ext/v8/upstream/3.1.8/src/deoptimizer.cc +0 -1185
  667. data/ext/v8/upstream/3.1.8/src/deoptimizer.h +0 -529
  668. data/ext/v8/upstream/3.1.8/src/disasm.h +0 -77
  669. data/ext/v8/upstream/3.1.8/src/disassembler.cc +0 -338
  670. data/ext/v8/upstream/3.1.8/src/execution.cc +0 -735
  671. data/ext/v8/upstream/3.1.8/src/execution.h +0 -322
  672. data/ext/v8/upstream/3.1.8/src/extensions/experimental/experimental.gyp +0 -53
  673. data/ext/v8/upstream/3.1.8/src/extensions/experimental/i18n-extension.cc +0 -264
  674. data/ext/v8/upstream/3.1.8/src/extensions/externalize-string-extension.cc +0 -141
  675. data/ext/v8/upstream/3.1.8/src/extensions/gc-extension.cc +0 -58
  676. data/ext/v8/upstream/3.1.8/src/factory.cc +0 -1087
  677. data/ext/v8/upstream/3.1.8/src/factory.h +0 -432
  678. data/ext/v8/upstream/3.1.8/src/flag-definitions.h +0 -552
  679. data/ext/v8/upstream/3.1.8/src/frame-element.cc +0 -42
  680. data/ext/v8/upstream/3.1.8/src/frame-element.h +0 -277
  681. data/ext/v8/upstream/3.1.8/src/frames-inl.h +0 -210
  682. data/ext/v8/upstream/3.1.8/src/frames.cc +0 -1232
  683. data/ext/v8/upstream/3.1.8/src/frames.h +0 -826
  684. data/ext/v8/upstream/3.1.8/src/full-codegen.cc +0 -1382
  685. data/ext/v8/upstream/3.1.8/src/full-codegen.h +0 -751
  686. data/ext/v8/upstream/3.1.8/src/func-name-inferrer.cc +0 -90
  687. data/ext/v8/upstream/3.1.8/src/func-name-inferrer.h +0 -111
  688. data/ext/v8/upstream/3.1.8/src/gdb-jit.cc +0 -1547
  689. data/ext/v8/upstream/3.1.8/src/global-handles.cc +0 -534
  690. data/ext/v8/upstream/3.1.8/src/global-handles.h +0 -181
  691. data/ext/v8/upstream/3.1.8/src/globals.h +0 -325
  692. data/ext/v8/upstream/3.1.8/src/handles-inl.h +0 -80
  693. data/ext/v8/upstream/3.1.8/src/handles.cc +0 -910
  694. data/ext/v8/upstream/3.1.8/src/handles.h +0 -424
  695. data/ext/v8/upstream/3.1.8/src/hashmap.h +0 -121
  696. data/ext/v8/upstream/3.1.8/src/heap-inl.h +0 -587
  697. data/ext/v8/upstream/3.1.8/src/heap-profiler.cc +0 -1128
  698. data/ext/v8/upstream/3.1.8/src/heap-profiler.h +0 -381
  699. data/ext/v8/upstream/3.1.8/src/heap.cc +0 -5610
  700. data/ext/v8/upstream/3.1.8/src/heap.h +0 -2218
  701. data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.cc +0 -1490
  702. data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.h +0 -3493
  703. data/ext/v8/upstream/3.1.8/src/hydrogen.cc +0 -6056
  704. data/ext/v8/upstream/3.1.8/src/hydrogen.h +0 -1091
  705. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32-inl.h +0 -429
  706. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.cc +0 -2800
  707. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.h +0 -1093
  708. data/ext/v8/upstream/3.1.8/src/ia32/builtins-ia32.cc +0 -1590
  709. data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.cc +0 -6624
  710. data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.h +0 -536
  711. data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.cc +0 -10354
  712. data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.h +0 -798
  713. data/ext/v8/upstream/3.1.8/src/ia32/cpu-ia32.cc +0 -87
  714. data/ext/v8/upstream/3.1.8/src/ia32/debug-ia32.cc +0 -309
  715. data/ext/v8/upstream/3.1.8/src/ia32/deoptimizer-ia32.cc +0 -664
  716. data/ext/v8/upstream/3.1.8/src/ia32/disasm-ia32.cc +0 -1597
  717. data/ext/v8/upstream/3.1.8/src/ia32/frames-ia32.h +0 -140
  718. data/ext/v8/upstream/3.1.8/src/ia32/full-codegen-ia32.cc +0 -4278
  719. data/ext/v8/upstream/3.1.8/src/ia32/ic-ia32.cc +0 -1786
  720. data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.cc +0 -3880
  721. data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.h +0 -309
  722. data/ext/v8/upstream/3.1.8/src/ia32/lithium-gap-resolver-ia32.cc +0 -460
  723. data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.cc +0 -2095
  724. data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.h +0 -2127
  725. data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.cc +0 -2031
  726. data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.h +0 -798
  727. data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.cc +0 -1253
  728. data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.h +0 -215
  729. data/ext/v8/upstream/3.1.8/src/ia32/register-allocator-ia32.cc +0 -157
  730. data/ext/v8/upstream/3.1.8/src/ia32/simulator-ia32.h +0 -72
  731. data/ext/v8/upstream/3.1.8/src/ia32/stub-cache-ia32.cc +0 -3732
  732. data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.cc +0 -1360
  733. data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.h +0 -646
  734. data/ext/v8/upstream/3.1.8/src/ic-inl.h +0 -129
  735. data/ext/v8/upstream/3.1.8/src/ic.cc +0 -2333
  736. data/ext/v8/upstream/3.1.8/src/ic.h +0 -639
  737. data/ext/v8/upstream/3.1.8/src/interpreter-irregexp.cc +0 -655
  738. data/ext/v8/upstream/3.1.8/src/interpreter-irregexp.h +0 -48
  739. data/ext/v8/upstream/3.1.8/src/json.js +0 -342
  740. data/ext/v8/upstream/3.1.8/src/jsregexp.cc +0 -5340
  741. data/ext/v8/upstream/3.1.8/src/jsregexp.h +0 -1484
  742. data/ext/v8/upstream/3.1.8/src/jump-target-heavy.cc +0 -430
  743. data/ext/v8/upstream/3.1.8/src/jump-target-heavy.h +0 -244
  744. data/ext/v8/upstream/3.1.8/src/jump-target-inl.h +0 -48
  745. data/ext/v8/upstream/3.1.8/src/jump-target-light.cc +0 -111
  746. data/ext/v8/upstream/3.1.8/src/lithium-allocator-inl.h +0 -140
  747. data/ext/v8/upstream/3.1.8/src/lithium-allocator.cc +0 -2093
  748. data/ext/v8/upstream/3.1.8/src/lithium-allocator.h +0 -644
  749. data/ext/v8/upstream/3.1.8/src/lithium.cc +0 -168
  750. data/ext/v8/upstream/3.1.8/src/liveedit.cc +0 -1650
  751. data/ext/v8/upstream/3.1.8/src/liveedit.h +0 -174
  752. data/ext/v8/upstream/3.1.8/src/liveobjectlist.cc +0 -2527
  753. data/ext/v8/upstream/3.1.8/src/liveobjectlist.h +0 -322
  754. data/ext/v8/upstream/3.1.8/src/log-utils.cc +0 -336
  755. data/ext/v8/upstream/3.1.8/src/log-utils.h +0 -232
  756. data/ext/v8/upstream/3.1.8/src/log.cc +0 -1608
  757. data/ext/v8/upstream/3.1.8/src/log.h +0 -379
  758. data/ext/v8/upstream/3.1.8/src/mark-compact.cc +0 -2957
  759. data/ext/v8/upstream/3.1.8/src/mark-compact.h +0 -433
  760. data/ext/v8/upstream/3.1.8/src/messages.cc +0 -164
  761. data/ext/v8/upstream/3.1.8/src/messages.js +0 -1071
  762. data/ext/v8/upstream/3.1.8/src/mips/assembler-mips-inl.h +0 -215
  763. data/ext/v8/upstream/3.1.8/src/mips/assembler-mips.cc +0 -1219
  764. data/ext/v8/upstream/3.1.8/src/mips/assembler-mips.h +0 -667
  765. data/ext/v8/upstream/3.1.8/src/mips/builtins-mips.cc +0 -205
  766. data/ext/v8/upstream/3.1.8/src/mips/codegen-mips-inl.h +0 -70
  767. data/ext/v8/upstream/3.1.8/src/mips/codegen-mips.cc +0 -1437
  768. data/ext/v8/upstream/3.1.8/src/mips/codegen-mips.h +0 -431
  769. data/ext/v8/upstream/3.1.8/src/mips/constants-mips.cc +0 -328
  770. data/ext/v8/upstream/3.1.8/src/mips/constants-mips.h +0 -525
  771. data/ext/v8/upstream/3.1.8/src/mips/cpu-mips.cc +0 -73
  772. data/ext/v8/upstream/3.1.8/src/mips/debug-mips.cc +0 -127
  773. data/ext/v8/upstream/3.1.8/src/mips/disasm-mips.cc +0 -787
  774. data/ext/v8/upstream/3.1.8/src/mips/fast-codegen-mips.cc +0 -77
  775. data/ext/v8/upstream/3.1.8/src/mips/frames-mips.cc +0 -96
  776. data/ext/v8/upstream/3.1.8/src/mips/frames-mips.h +0 -164
  777. data/ext/v8/upstream/3.1.8/src/mips/full-codegen-mips.cc +0 -277
  778. data/ext/v8/upstream/3.1.8/src/mips/ic-mips.cc +0 -208
  779. data/ext/v8/upstream/3.1.8/src/mips/jump-target-mips.cc +0 -175
  780. data/ext/v8/upstream/3.1.8/src/mips/macro-assembler-mips.cc +0 -1326
  781. data/ext/v8/upstream/3.1.8/src/mips/macro-assembler-mips.h +0 -461
  782. data/ext/v8/upstream/3.1.8/src/mips/register-allocator-mips-inl.h +0 -137
  783. data/ext/v8/upstream/3.1.8/src/mips/register-allocator-mips.h +0 -46
  784. data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.cc +0 -1650
  785. data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.h +0 -311
  786. data/ext/v8/upstream/3.1.8/src/mips/stub-cache-mips.cc +0 -418
  787. data/ext/v8/upstream/3.1.8/src/mips/virtual-frame-mips.cc +0 -319
  788. data/ext/v8/upstream/3.1.8/src/mips/virtual-frame-mips.h +0 -548
  789. data/ext/v8/upstream/3.1.8/src/mirror-debugger.js +0 -2380
  790. data/ext/v8/upstream/3.1.8/src/mksnapshot.cc +0 -256
  791. data/ext/v8/upstream/3.1.8/src/objects-debug.cc +0 -722
  792. data/ext/v8/upstream/3.1.8/src/objects-inl.h +0 -3946
  793. data/ext/v8/upstream/3.1.8/src/objects-printer.cc +0 -801
  794. data/ext/v8/upstream/3.1.8/src/objects-visiting.cc +0 -142
  795. data/ext/v8/upstream/3.1.8/src/objects-visiting.h +0 -401
  796. data/ext/v8/upstream/3.1.8/src/objects.cc +0 -10044
  797. data/ext/v8/upstream/3.1.8/src/objects.h +0 -6571
  798. data/ext/v8/upstream/3.1.8/src/parser.cc +0 -5165
  799. data/ext/v8/upstream/3.1.8/src/parser.h +0 -802
  800. data/ext/v8/upstream/3.1.8/src/platform-cygwin.cc +0 -745
  801. data/ext/v8/upstream/3.1.8/src/platform-freebsd.cc +0 -702
  802. data/ext/v8/upstream/3.1.8/src/platform-linux.cc +0 -981
  803. data/ext/v8/upstream/3.1.8/src/platform-macos.cc +0 -732
  804. data/ext/v8/upstream/3.1.8/src/platform-nullos.cc +0 -498
  805. data/ext/v8/upstream/3.1.8/src/platform-openbsd.cc +0 -657
  806. data/ext/v8/upstream/3.1.8/src/platform-posix.cc +0 -399
  807. data/ext/v8/upstream/3.1.8/src/platform-solaris.cc +0 -714
  808. data/ext/v8/upstream/3.1.8/src/platform-win32.cc +0 -1974
  809. data/ext/v8/upstream/3.1.8/src/platform.h +0 -636
  810. data/ext/v8/upstream/3.1.8/src/preparse-data.cc +0 -183
  811. data/ext/v8/upstream/3.1.8/src/preparser-api.cc +0 -213
  812. data/ext/v8/upstream/3.1.8/src/preparser.cc +0 -1205
  813. data/ext/v8/upstream/3.1.8/src/prettyprinter.cc +0 -1539
  814. data/ext/v8/upstream/3.1.8/src/prettyprinter.h +0 -223
  815. data/ext/v8/upstream/3.1.8/src/profile-generator.cc +0 -2899
  816. data/ext/v8/upstream/3.1.8/src/profile-generator.h +0 -1151
  817. data/ext/v8/upstream/3.1.8/src/property.cc +0 -96
  818. data/ext/v8/upstream/3.1.8/src/property.h +0 -337
  819. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-irregexp.cc +0 -470
  820. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.cc +0 -257
  821. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.h +0 -231
  822. data/ext/v8/upstream/3.1.8/src/regexp-stack.cc +0 -103
  823. data/ext/v8/upstream/3.1.8/src/regexp-stack.h +0 -123
  824. data/ext/v8/upstream/3.1.8/src/regexp.js +0 -483
  825. data/ext/v8/upstream/3.1.8/src/register-allocator-inl.h +0 -141
  826. data/ext/v8/upstream/3.1.8/src/register-allocator.cc +0 -104
  827. data/ext/v8/upstream/3.1.8/src/register-allocator.h +0 -320
  828. data/ext/v8/upstream/3.1.8/src/rewriter.cc +0 -1023
  829. data/ext/v8/upstream/3.1.8/src/runtime-profiler.cc +0 -443
  830. data/ext/v8/upstream/3.1.8/src/runtime-profiler.h +0 -77
  831. data/ext/v8/upstream/3.1.8/src/runtime.cc +0 -11592
  832. data/ext/v8/upstream/3.1.8/src/runtime.h +0 -582
  833. data/ext/v8/upstream/3.1.8/src/safepoint-table.cc +0 -253
  834. data/ext/v8/upstream/3.1.8/src/safepoint-table.h +0 -263
  835. data/ext/v8/upstream/3.1.8/src/scanner-base.cc +0 -971
  836. data/ext/v8/upstream/3.1.8/src/scanner-base.h +0 -653
  837. data/ext/v8/upstream/3.1.8/src/scanner.cc +0 -586
  838. data/ext/v8/upstream/3.1.8/src/scanner.h +0 -194
  839. data/ext/v8/upstream/3.1.8/src/scopeinfo.cc +0 -636
  840. data/ext/v8/upstream/3.1.8/src/scopeinfo.h +0 -238
  841. data/ext/v8/upstream/3.1.8/src/scopes.cc +0 -1063
  842. data/ext/v8/upstream/3.1.8/src/scopes.h +0 -494
  843. data/ext/v8/upstream/3.1.8/src/serialize.cc +0 -1535
  844. data/ext/v8/upstream/3.1.8/src/serialize.h +0 -584
  845. data/ext/v8/upstream/3.1.8/src/snapshot-common.cc +0 -82
  846. data/ext/v8/upstream/3.1.8/src/snapshot.h +0 -71
  847. data/ext/v8/upstream/3.1.8/src/spaces-inl.h +0 -524
  848. data/ext/v8/upstream/3.1.8/src/spaces.cc +0 -3254
  849. data/ext/v8/upstream/3.1.8/src/spaces.h +0 -2362
  850. data/ext/v8/upstream/3.1.8/src/string-search.cc +0 -40
  851. data/ext/v8/upstream/3.1.8/src/string-search.h +0 -567
  852. data/ext/v8/upstream/3.1.8/src/string-stream.cc +0 -584
  853. data/ext/v8/upstream/3.1.8/src/string.js +0 -915
  854. data/ext/v8/upstream/3.1.8/src/stub-cache.cc +0 -1878
  855. data/ext/v8/upstream/3.1.8/src/stub-cache.h +0 -849
  856. data/ext/v8/upstream/3.1.8/src/token.cc +0 -63
  857. data/ext/v8/upstream/3.1.8/src/token.h +0 -288
  858. data/ext/v8/upstream/3.1.8/src/top.cc +0 -1152
  859. data/ext/v8/upstream/3.1.8/src/top.h +0 -608
  860. data/ext/v8/upstream/3.1.8/src/type-info.cc +0 -406
  861. data/ext/v8/upstream/3.1.8/src/type-info.h +0 -283
  862. data/ext/v8/upstream/3.1.8/src/unicode.cc +0 -1624
  863. data/ext/v8/upstream/3.1.8/src/unicode.h +0 -280
  864. data/ext/v8/upstream/3.1.8/src/utils.h +0 -793
  865. data/ext/v8/upstream/3.1.8/src/v8-counters.cc +0 -55
  866. data/ext/v8/upstream/3.1.8/src/v8-counters.h +0 -290
  867. data/ext/v8/upstream/3.1.8/src/v8.cc +0 -270
  868. data/ext/v8/upstream/3.1.8/src/v8.h +0 -127
  869. data/ext/v8/upstream/3.1.8/src/v8globals.h +0 -480
  870. data/ext/v8/upstream/3.1.8/src/v8natives.js +0 -1252
  871. data/ext/v8/upstream/3.1.8/src/v8threads.cc +0 -440
  872. data/ext/v8/upstream/3.1.8/src/v8threads.h +0 -157
  873. data/ext/v8/upstream/3.1.8/src/v8utils.h +0 -354
  874. data/ext/v8/upstream/3.1.8/src/variables.h +0 -212
  875. data/ext/v8/upstream/3.1.8/src/version.cc +0 -95
  876. data/ext/v8/upstream/3.1.8/src/version.h +0 -64
  877. data/ext/v8/upstream/3.1.8/src/virtual-frame-light-inl.h +0 -170
  878. data/ext/v8/upstream/3.1.8/src/vm-state-inl.h +0 -134
  879. data/ext/v8/upstream/3.1.8/src/vm-state.h +0 -68
  880. data/ext/v8/upstream/3.1.8/src/win32-headers.h +0 -95
  881. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64-inl.h +0 -455
  882. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.cc +0 -3162
  883. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.h +0 -1584
  884. data/ext/v8/upstream/3.1.8/src/x64/builtins-x64.cc +0 -1492
  885. data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.cc +0 -5150
  886. data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.h +0 -519
  887. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.cc +0 -8835
  888. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.h +0 -750
  889. data/ext/v8/upstream/3.1.8/src/x64/cpu-x64.cc +0 -86
  890. data/ext/v8/upstream/3.1.8/src/x64/debug-x64.cc +0 -316
  891. data/ext/v8/upstream/3.1.8/src/x64/deoptimizer-x64.cc +0 -781
  892. data/ext/v8/upstream/3.1.8/src/x64/disasm-x64.cc +0 -1737
  893. data/ext/v8/upstream/3.1.8/src/x64/frames-x64.h +0 -130
  894. data/ext/v8/upstream/3.1.8/src/x64/full-codegen-x64.cc +0 -3984
  895. data/ext/v8/upstream/3.1.8/src/x64/ic-x64.cc +0 -1761
  896. data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.cc +0 -3639
  897. data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.h +0 -305
  898. data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.cc +0 -2044
  899. data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.h +0 -2052
  900. data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.cc +0 -2660
  901. data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.h +0 -1852
  902. data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.cc +0 -1382
  903. data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.h +0 -278
  904. data/ext/v8/upstream/3.1.8/src/x64/register-allocator-x64-inl.h +0 -87
  905. data/ext/v8/upstream/3.1.8/src/x64/register-allocator-x64.cc +0 -91
  906. data/ext/v8/upstream/3.1.8/src/x64/simulator-x64.h +0 -71
  907. data/ext/v8/upstream/3.1.8/src/x64/stub-cache-x64.cc +0 -3509
  908. data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.cc +0 -1292
  909. data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.h +0 -593
  910. data/ext/v8/upstream/3.1.8/src/zone-inl.h +0 -83
  911. data/ext/v8/upstream/3.1.8/src/zone.cc +0 -195
  912. data/ext/v8/upstream/3.1.8/src/zone.h +0 -233
  913. data/ext/v8/upstream/3.1.8/tools/gyp/v8.gyp +0 -869
  914. data/ext/v8/upstream/3.1.8/tools/linux-tick-processor +0 -33
  915. data/ext/v8/upstream/3.1.8/tools/tickprocessor.js +0 -863
  916. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base.vcproj +0 -1296
  917. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_arm.vcproj +0 -1234
  918. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_x64.vcproj +0 -1296
  919. data/ext/v8/upstream/3.1.8/tools/visual_studio/x64.vsprops +0 -17
  920. data/spec/ext/mem_spec.rb +0 -42
@@ -0,0 +1,3327 @@
1
+ // Copyright 2011 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #include <limits.h> // For LONG_MIN, LONG_MAX
29
+
30
+ #include "v8.h"
31
+
32
+ #if defined(V8_TARGET_ARCH_MIPS)
33
+
34
+ #include "bootstrapper.h"
35
+ #include "codegen-inl.h"
36
+ #include "debug.h"
37
+ #include "runtime.h"
38
+
39
+ namespace v8 {
40
+ namespace internal {
41
+
42
+ MacroAssembler::MacroAssembler(void* buffer, int size)
43
+ : Assembler(buffer, size),
44
+ generating_stub_(false),
45
+ allow_stub_calls_(true),
46
+ code_object_(HEAP->undefined_value()) {
47
+ }
48
+
49
+
50
+ // Arguments macros
51
+ #define COND_TYPED_ARGS Condition cond, Register r1, const Operand& r2
52
+ #define COND_ARGS cond, r1, r2
53
+
54
+ #define REGISTER_TARGET_BODY(Name) \
55
+ void MacroAssembler::Name(Register target, \
56
+ BranchDelaySlot bd) { \
57
+ Name(Operand(target), bd); \
58
+ } \
59
+ void MacroAssembler::Name(Register target, COND_TYPED_ARGS, \
60
+ BranchDelaySlot bd) { \
61
+ Name(Operand(target), COND_ARGS, bd); \
62
+ }
63
+
64
+
65
+ #define INT_PTR_TARGET_BODY(Name) \
66
+ void MacroAssembler::Name(intptr_t target, RelocInfo::Mode rmode, \
67
+ BranchDelaySlot bd) { \
68
+ Name(Operand(target, rmode), bd); \
69
+ } \
70
+ void MacroAssembler::Name(intptr_t target, \
71
+ RelocInfo::Mode rmode, \
72
+ COND_TYPED_ARGS, \
73
+ BranchDelaySlot bd) { \
74
+ Name(Operand(target, rmode), COND_ARGS, bd); \
75
+ }
76
+
77
+
78
+ #define BYTE_PTR_TARGET_BODY(Name) \
79
+ void MacroAssembler::Name(byte* target, RelocInfo::Mode rmode, \
80
+ BranchDelaySlot bd) { \
81
+ Name(reinterpret_cast<intptr_t>(target), rmode, bd); \
82
+ } \
83
+ void MacroAssembler::Name(byte* target, \
84
+ RelocInfo::Mode rmode, \
85
+ COND_TYPED_ARGS, \
86
+ BranchDelaySlot bd) { \
87
+ Name(reinterpret_cast<intptr_t>(target), rmode, COND_ARGS, bd); \
88
+ }
89
+
90
+
91
+ #define CODE_TARGET_BODY(Name) \
92
+ void MacroAssembler::Name(Handle<Code> target, RelocInfo::Mode rmode, \
93
+ BranchDelaySlot bd) { \
94
+ Name(reinterpret_cast<intptr_t>(target.location()), rmode, bd); \
95
+ } \
96
+ void MacroAssembler::Name(Handle<Code> target, \
97
+ RelocInfo::Mode rmode, \
98
+ COND_TYPED_ARGS, \
99
+ BranchDelaySlot bd) { \
100
+ Name(reinterpret_cast<intptr_t>(target.location()), rmode, COND_ARGS, bd); \
101
+ }
102
+
103
+
104
+ REGISTER_TARGET_BODY(Jump)
105
+ REGISTER_TARGET_BODY(Call)
106
+ INT_PTR_TARGET_BODY(Jump)
107
+ INT_PTR_TARGET_BODY(Call)
108
+ BYTE_PTR_TARGET_BODY(Jump)
109
+ BYTE_PTR_TARGET_BODY(Call)
110
+ CODE_TARGET_BODY(Jump)
111
+ CODE_TARGET_BODY(Call)
112
+
113
+ #undef COND_TYPED_ARGS
114
+ #undef COND_ARGS
115
+ #undef REGISTER_TARGET_BODY
116
+ #undef BYTE_PTR_TARGET_BODY
117
+ #undef CODE_TARGET_BODY
118
+
119
+
120
+ void MacroAssembler::Ret(BranchDelaySlot bd) {
121
+ Jump(Operand(ra), bd);
122
+ }
123
+
124
+
125
+ void MacroAssembler::Ret(Condition cond, Register r1, const Operand& r2,
126
+ BranchDelaySlot bd) {
127
+ Jump(Operand(ra), cond, r1, r2, bd);
128
+ }
129
+
130
+
131
+ void MacroAssembler::LoadRoot(Register destination,
132
+ Heap::RootListIndex index) {
133
+ lw(destination, MemOperand(s6, index << kPointerSizeLog2));
134
+ }
135
+
136
+
137
+ void MacroAssembler::LoadRoot(Register destination,
138
+ Heap::RootListIndex index,
139
+ Condition cond,
140
+ Register src1, const Operand& src2) {
141
+ Branch(2, NegateCondition(cond), src1, src2);
142
+ lw(destination, MemOperand(s6, index << kPointerSizeLog2));
143
+ }
144
+
145
+
146
+ void MacroAssembler::StoreRoot(Register source,
147
+ Heap::RootListIndex index) {
148
+ sw(source, MemOperand(s6, index << kPointerSizeLog2));
149
+ }
150
+
151
+
152
+ void MacroAssembler::StoreRoot(Register source,
153
+ Heap::RootListIndex index,
154
+ Condition cond,
155
+ Register src1, const Operand& src2) {
156
+ Branch(2, NegateCondition(cond), src1, src2);
157
+ sw(source, MemOperand(s6, index << kPointerSizeLog2));
158
+ }
159
+
160
+
161
+ void MacroAssembler::RecordWriteHelper(Register object,
162
+ Register address,
163
+ Register scratch) {
164
+ if (FLAG_debug_code) {
165
+ // Check that the object is not in new space.
166
+ Label not_in_new_space;
167
+ InNewSpace(object, scratch, ne, &not_in_new_space);
168
+ Abort("new-space object passed to RecordWriteHelper");
169
+ bind(&not_in_new_space);
170
+ }
171
+
172
+ // Calculate page address: Clear bits from 0 to kPageSizeBits.
173
+ if (mips32r2) {
174
+ Ins(object, zero_reg, 0, kPageSizeBits);
175
+ } else {
176
+ // The Ins macro is slow on r1, so use shifts instead.
177
+ srl(object, object, kPageSizeBits);
178
+ sll(object, object, kPageSizeBits);
179
+ }
180
+
181
+ // Calculate region number.
182
+ Ext(address, address, Page::kRegionSizeLog2,
183
+ kPageSizeBits - Page::kRegionSizeLog2);
184
+
185
+ // Mark region dirty.
186
+ lw(scratch, MemOperand(object, Page::kDirtyFlagOffset));
187
+ li(at, Operand(1));
188
+ sllv(at, at, address);
189
+ or_(scratch, scratch, at);
190
+ sw(scratch, MemOperand(object, Page::kDirtyFlagOffset));
191
+ }
192
+
193
+
194
+ void MacroAssembler::InNewSpace(Register object,
195
+ Register scratch,
196
+ Condition cc,
197
+ Label* branch) {
198
+ ASSERT(cc == eq || cc == ne);
199
+ And(scratch, object, Operand(ExternalReference::new_space_mask(isolate())));
200
+ Branch(branch, cc, scratch,
201
+ Operand(ExternalReference::new_space_start(isolate())));
202
+ }
203
+
204
+
205
+ // Will clobber 4 registers: object, scratch0, scratch1, at. The
206
+ // register 'object' contains a heap object pointer. The heap object
207
+ // tag is shifted away.
208
+ void MacroAssembler::RecordWrite(Register object,
209
+ Operand offset,
210
+ Register scratch0,
211
+ Register scratch1) {
212
+ // The compiled code assumes that record write doesn't change the
213
+ // context register, so we check that none of the clobbered
214
+ // registers are cp.
215
+ ASSERT(!object.is(cp) && !scratch0.is(cp) && !scratch1.is(cp));
216
+
217
+ Label done;
218
+
219
+ // First, test that the object is not in the new space. We cannot set
220
+ // region marks for new space pages.
221
+ InNewSpace(object, scratch0, eq, &done);
222
+
223
+ // Add offset into the object.
224
+ Addu(scratch0, object, offset);
225
+
226
+ // Record the actual write.
227
+ RecordWriteHelper(object, scratch0, scratch1);
228
+
229
+ bind(&done);
230
+
231
+ // Clobber all input registers when running with the debug-code flag
232
+ // turned on to provoke errors.
233
+ if (FLAG_debug_code) {
234
+ li(object, Operand(BitCast<int32_t>(kZapValue)));
235
+ li(scratch0, Operand(BitCast<int32_t>(kZapValue)));
236
+ li(scratch1, Operand(BitCast<int32_t>(kZapValue)));
237
+ }
238
+ }
239
+
240
+
241
+ // Will clobber 4 registers: object, address, scratch, ip. The
242
+ // register 'object' contains a heap object pointer. The heap object
243
+ // tag is shifted away.
244
+ void MacroAssembler::RecordWrite(Register object,
245
+ Register address,
246
+ Register scratch) {
247
+ // The compiled code assumes that record write doesn't change the
248
+ // context register, so we check that none of the clobbered
249
+ // registers are cp.
250
+ ASSERT(!object.is(cp) && !address.is(cp) && !scratch.is(cp));
251
+
252
+ Label done;
253
+
254
+ // First, test that the object is not in the new space. We cannot set
255
+ // region marks for new space pages.
256
+ InNewSpace(object, scratch, eq, &done);
257
+
258
+ // Record the actual write.
259
+ RecordWriteHelper(object, address, scratch);
260
+
261
+ bind(&done);
262
+
263
+ // Clobber all input registers when running with the debug-code flag
264
+ // turned on to provoke errors.
265
+ if (FLAG_debug_code) {
266
+ li(object, Operand(BitCast<int32_t>(kZapValue)));
267
+ li(address, Operand(BitCast<int32_t>(kZapValue)));
268
+ li(scratch, Operand(BitCast<int32_t>(kZapValue)));
269
+ }
270
+ }
271
+
272
+
273
+ // -----------------------------------------------------------------------------
274
+ // Allocation support
275
+
276
+
277
+ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
278
+ Register scratch,
279
+ Label* miss) {
280
+ Label same_contexts;
281
+
282
+ ASSERT(!holder_reg.is(scratch));
283
+ ASSERT(!holder_reg.is(at));
284
+ ASSERT(!scratch.is(at));
285
+
286
+ // Load current lexical context from the stack frame.
287
+ lw(scratch, MemOperand(fp, StandardFrameConstants::kContextOffset));
288
+ // In debug mode, make sure the lexical context is set.
289
+ #ifdef DEBUG
290
+ Check(ne, "we should not have an empty lexical context",
291
+ scratch, Operand(zero_reg));
292
+ #endif
293
+
294
+ // Load the global context of the current context.
295
+ int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
296
+ lw(scratch, FieldMemOperand(scratch, offset));
297
+ lw(scratch, FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset));
298
+
299
+ // Check the context is a global context.
300
+ if (FLAG_debug_code) {
301
+ // TODO(119): Avoid push(holder_reg)/pop(holder_reg).
302
+ Push(holder_reg); // Temporarily save holder on the stack.
303
+ // Read the first word and compare to the global_context_map.
304
+ lw(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
305
+ LoadRoot(at, Heap::kGlobalContextMapRootIndex);
306
+ Check(eq, "JSGlobalObject::global_context should be a global context.",
307
+ holder_reg, Operand(at));
308
+ Pop(holder_reg); // Restore holder.
309
+ }
310
+
311
+ // Check if both contexts are the same.
312
+ lw(at, FieldMemOperand(holder_reg, JSGlobalProxy::kContextOffset));
313
+ Branch(&same_contexts, eq, scratch, Operand(at));
314
+
315
+ // Check the context is a global context.
316
+ if (FLAG_debug_code) {
317
+ // TODO(119): Avoid push(holder_reg)/pop(holder_reg).
318
+ Push(holder_reg); // Temporarily save holder on the stack.
319
+ mov(holder_reg, at); // Move at to its holding place.
320
+ LoadRoot(at, Heap::kNullValueRootIndex);
321
+ Check(ne, "JSGlobalProxy::context() should not be null.",
322
+ holder_reg, Operand(at));
323
+
324
+ lw(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
325
+ LoadRoot(at, Heap::kGlobalContextMapRootIndex);
326
+ Check(eq, "JSGlobalObject::global_context should be a global context.",
327
+ holder_reg, Operand(at));
328
+ // Restore at is not needed. at is reloaded below.
329
+ Pop(holder_reg); // Restore holder.
330
+ // Restore at to holder's context.
331
+ lw(at, FieldMemOperand(holder_reg, JSGlobalProxy::kContextOffset));
332
+ }
333
+
334
+ // Check that the security token in the calling global object is
335
+ // compatible with the security token in the receiving global
336
+ // object.
337
+ int token_offset = Context::kHeaderSize +
338
+ Context::SECURITY_TOKEN_INDEX * kPointerSize;
339
+
340
+ lw(scratch, FieldMemOperand(scratch, token_offset));
341
+ lw(at, FieldMemOperand(at, token_offset));
342
+ Branch(miss, ne, scratch, Operand(at));
343
+
344
+ bind(&same_contexts);
345
+ }
346
+
347
+
348
+ // ---------------------------------------------------------------------------
349
+ // Instruction macros
350
+
351
+ void MacroAssembler::Addu(Register rd, Register rs, const Operand& rt) {
352
+ if (rt.is_reg()) {
353
+ addu(rd, rs, rt.rm());
354
+ } else {
355
+ if (is_int16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
356
+ addiu(rd, rs, rt.imm32_);
357
+ } else {
358
+ // li handles the relocation.
359
+ ASSERT(!rs.is(at));
360
+ li(at, rt);
361
+ addu(rd, rs, at);
362
+ }
363
+ }
364
+ }
365
+
366
+
367
+ void MacroAssembler::Subu(Register rd, Register rs, const Operand& rt) {
368
+ if (rt.is_reg()) {
369
+ subu(rd, rs, rt.rm());
370
+ } else {
371
+ if (is_int16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
372
+ addiu(rd, rs, -rt.imm32_); // No subiu instr, use addiu(x, y, -imm).
373
+ } else {
374
+ // li handles the relocation.
375
+ ASSERT(!rs.is(at));
376
+ li(at, rt);
377
+ subu(rd, rs, at);
378
+ }
379
+ }
380
+ }
381
+
382
+
383
+ void MacroAssembler::Mul(Register rd, Register rs, const Operand& rt) {
384
+ if (rt.is_reg()) {
385
+ mul(rd, rs, rt.rm());
386
+ } else {
387
+ // li handles the relocation.
388
+ ASSERT(!rs.is(at));
389
+ li(at, rt);
390
+ mul(rd, rs, at);
391
+ }
392
+ }
393
+
394
+
395
+ void MacroAssembler::Mult(Register rs, const Operand& rt) {
396
+ if (rt.is_reg()) {
397
+ mult(rs, rt.rm());
398
+ } else {
399
+ // li handles the relocation.
400
+ ASSERT(!rs.is(at));
401
+ li(at, rt);
402
+ mult(rs, at);
403
+ }
404
+ }
405
+
406
+
407
+ void MacroAssembler::Multu(Register rs, const Operand& rt) {
408
+ if (rt.is_reg()) {
409
+ multu(rs, rt.rm());
410
+ } else {
411
+ // li handles the relocation.
412
+ ASSERT(!rs.is(at));
413
+ li(at, rt);
414
+ multu(rs, at);
415
+ }
416
+ }
417
+
418
+
419
+ void MacroAssembler::Div(Register rs, const Operand& rt) {
420
+ if (rt.is_reg()) {
421
+ div(rs, rt.rm());
422
+ } else {
423
+ // li handles the relocation.
424
+ ASSERT(!rs.is(at));
425
+ li(at, rt);
426
+ div(rs, at);
427
+ }
428
+ }
429
+
430
+
431
+ void MacroAssembler::Divu(Register rs, const Operand& rt) {
432
+ if (rt.is_reg()) {
433
+ divu(rs, rt.rm());
434
+ } else {
435
+ // li handles the relocation.
436
+ ASSERT(!rs.is(at));
437
+ li(at, rt);
438
+ divu(rs, at);
439
+ }
440
+ }
441
+
442
+
443
+ void MacroAssembler::And(Register rd, Register rs, const Operand& rt) {
444
+ if (rt.is_reg()) {
445
+ and_(rd, rs, rt.rm());
446
+ } else {
447
+ if (is_uint16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
448
+ andi(rd, rs, rt.imm32_);
449
+ } else {
450
+ // li handles the relocation.
451
+ ASSERT(!rs.is(at));
452
+ li(at, rt);
453
+ and_(rd, rs, at);
454
+ }
455
+ }
456
+ }
457
+
458
+
459
+ void MacroAssembler::Or(Register rd, Register rs, const Operand& rt) {
460
+ if (rt.is_reg()) {
461
+ or_(rd, rs, rt.rm());
462
+ } else {
463
+ if (is_uint16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
464
+ ori(rd, rs, rt.imm32_);
465
+ } else {
466
+ // li handles the relocation.
467
+ ASSERT(!rs.is(at));
468
+ li(at, rt);
469
+ or_(rd, rs, at);
470
+ }
471
+ }
472
+ }
473
+
474
+
475
+ void MacroAssembler::Xor(Register rd, Register rs, const Operand& rt) {
476
+ if (rt.is_reg()) {
477
+ xor_(rd, rs, rt.rm());
478
+ } else {
479
+ if (is_uint16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
480
+ xori(rd, rs, rt.imm32_);
481
+ } else {
482
+ // li handles the relocation.
483
+ ASSERT(!rs.is(at));
484
+ li(at, rt);
485
+ xor_(rd, rs, at);
486
+ }
487
+ }
488
+ }
489
+
490
+
491
+ void MacroAssembler::Nor(Register rd, Register rs, const Operand& rt) {
492
+ if (rt.is_reg()) {
493
+ nor(rd, rs, rt.rm());
494
+ } else {
495
+ // li handles the relocation.
496
+ ASSERT(!rs.is(at));
497
+ li(at, rt);
498
+ nor(rd, rs, at);
499
+ }
500
+ }
501
+
502
+
503
+ void MacroAssembler::Slt(Register rd, Register rs, const Operand& rt) {
504
+ if (rt.is_reg()) {
505
+ slt(rd, rs, rt.rm());
506
+ } else {
507
+ if (is_int16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
508
+ slti(rd, rs, rt.imm32_);
509
+ } else {
510
+ // li handles the relocation.
511
+ ASSERT(!rs.is(at));
512
+ li(at, rt);
513
+ slt(rd, rs, at);
514
+ }
515
+ }
516
+ }
517
+
518
+
519
+ void MacroAssembler::Sltu(Register rd, Register rs, const Operand& rt) {
520
+ if (rt.is_reg()) {
521
+ sltu(rd, rs, rt.rm());
522
+ } else {
523
+ if (is_uint16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
524
+ sltiu(rd, rs, rt.imm32_);
525
+ } else {
526
+ // li handles the relocation.
527
+ ASSERT(!rs.is(at));
528
+ li(at, rt);
529
+ sltu(rd, rs, at);
530
+ }
531
+ }
532
+ }
533
+
534
+
535
+ void MacroAssembler::Ror(Register rd, Register rs, const Operand& rt) {
536
+ if (mips32r2) {
537
+ if (rt.is_reg()) {
538
+ rotrv(rd, rs, rt.rm());
539
+ } else {
540
+ rotr(rd, rs, rt.imm32_);
541
+ }
542
+ } else {
543
+ if (rt.is_reg()) {
544
+ subu(at, zero_reg, rt.rm());
545
+ sllv(at, rs, at);
546
+ srlv(rd, rs, rt.rm());
547
+ or_(rd, rd, at);
548
+ } else {
549
+ if (rt.imm32_ == 0) {
550
+ srl(rd, rs, 0);
551
+ } else {
552
+ srl(at, rs, rt.imm32_);
553
+ sll(rd, rs, (0x20 - rt.imm32_) & 0x1f);
554
+ or_(rd, rd, at);
555
+ }
556
+ }
557
+ }
558
+ }
559
+
560
+
561
+ //------------Pseudo-instructions-------------
562
+
563
+ void MacroAssembler::li(Register rd, Operand j, bool gen2instr) {
564
+ ASSERT(!j.is_reg());
565
+ BlockTrampolinePoolScope block_trampoline_pool(this);
566
+ if (!MustUseReg(j.rmode_) && !gen2instr) {
567
+ // Normal load of an immediate value which does not need Relocation Info.
568
+ if (is_int16(j.imm32_)) {
569
+ addiu(rd, zero_reg, j.imm32_);
570
+ } else if (!(j.imm32_ & kHiMask)) {
571
+ ori(rd, zero_reg, j.imm32_);
572
+ } else if (!(j.imm32_ & kImm16Mask)) {
573
+ lui(rd, (j.imm32_ & kHiMask) >> kLuiShift);
574
+ } else {
575
+ lui(rd, (j.imm32_ & kHiMask) >> kLuiShift);
576
+ ori(rd, rd, (j.imm32_ & kImm16Mask));
577
+ }
578
+ } else if (MustUseReg(j.rmode_) || gen2instr) {
579
+ if (MustUseReg(j.rmode_)) {
580
+ RecordRelocInfo(j.rmode_, j.imm32_);
581
+ }
582
+ // We need always the same number of instructions as we may need to patch
583
+ // this code to load another value which may need 2 instructions to load.
584
+ if (is_int16(j.imm32_)) {
585
+ nop();
586
+ addiu(rd, zero_reg, j.imm32_);
587
+ } else if (!(j.imm32_ & kHiMask)) {
588
+ nop();
589
+ ori(rd, zero_reg, j.imm32_);
590
+ } else if (!(j.imm32_ & kImm16Mask)) {
591
+ nop();
592
+ lui(rd, (j.imm32_ & kHiMask) >> kLuiShift);
593
+ } else {
594
+ lui(rd, (j.imm32_ & kHiMask) >> kLuiShift);
595
+ ori(rd, rd, (j.imm32_ & kImm16Mask));
596
+ }
597
+ }
598
+ }
599
+
600
+
601
+ // Exception-generating instructions and debugging support
602
+ void MacroAssembler::stop(const char* msg) {
603
+ // TO_UPGRADE: Just a break for now. Maybe we could upgrade it.
604
+ // We use the 0x54321 value to be able to find it easily when reading memory.
605
+ break_(0x54321);
606
+ }
607
+
608
+
609
+ void MacroAssembler::MultiPush(RegList regs) {
610
+ int16_t NumSaved = 0;
611
+ int16_t NumToPush = NumberOfBitsSet(regs);
612
+
613
+ addiu(sp, sp, -4 * NumToPush);
614
+ for (int16_t i = kNumRegisters; i > 0; i--) {
615
+ if ((regs & (1 << i)) != 0) {
616
+ sw(ToRegister(i), MemOperand(sp, 4 * (NumToPush - ++NumSaved)));
617
+ }
618
+ }
619
+ }
620
+
621
+
622
+ void MacroAssembler::MultiPushReversed(RegList regs) {
623
+ int16_t NumSaved = 0;
624
+ int16_t NumToPush = NumberOfBitsSet(regs);
625
+
626
+ addiu(sp, sp, -4 * NumToPush);
627
+ for (int16_t i = 0; i < kNumRegisters; i++) {
628
+ if ((regs & (1 << i)) != 0) {
629
+ sw(ToRegister(i), MemOperand(sp, 4 * (NumToPush - ++NumSaved)));
630
+ }
631
+ }
632
+ }
633
+
634
+
635
+ void MacroAssembler::MultiPop(RegList regs) {
636
+ int16_t NumSaved = 0;
637
+
638
+ for (int16_t i = 0; i < kNumRegisters; i++) {
639
+ if ((regs & (1 << i)) != 0) {
640
+ lw(ToRegister(i), MemOperand(sp, 4 * (NumSaved++)));
641
+ }
642
+ }
643
+ addiu(sp, sp, 4 * NumSaved);
644
+ }
645
+
646
+
647
+ void MacroAssembler::MultiPopReversed(RegList regs) {
648
+ int16_t NumSaved = 0;
649
+
650
+ for (int16_t i = kNumRegisters; i > 0; i--) {
651
+ if ((regs & (1 << i)) != 0) {
652
+ lw(ToRegister(i), MemOperand(sp, 4 * (NumSaved++)));
653
+ }
654
+ }
655
+ addiu(sp, sp, 4 * NumSaved);
656
+ }
657
+
658
+
659
+ void MacroAssembler::Ext(Register rt,
660
+ Register rs,
661
+ uint16_t pos,
662
+ uint16_t size) {
663
+ ASSERT(pos < 32);
664
+ ASSERT(pos + size < 32);
665
+
666
+ if (mips32r2) {
667
+ ext_(rt, rs, pos, size);
668
+ } else {
669
+ // Move rs to rt and shift it left then right to get the
670
+ // desired bitfield on the right side and zeroes on the left.
671
+ sll(rt, rs, 32 - (pos + size));
672
+ srl(rt, rt, 32 - size);
673
+ }
674
+ }
675
+
676
+
677
+ void MacroAssembler::Ins(Register rt,
678
+ Register rs,
679
+ uint16_t pos,
680
+ uint16_t size) {
681
+ ASSERT(pos < 32);
682
+ ASSERT(pos + size < 32);
683
+
684
+ if (mips32r2) {
685
+ ins_(rt, rs, pos, size);
686
+ } else {
687
+ ASSERT(!rt.is(t8) && !rs.is(t8));
688
+
689
+ srl(t8, rt, pos + size);
690
+ // The left chunk from rt that needs to
691
+ // be saved is on the right side of t8.
692
+ sll(at, t8, pos + size);
693
+ // The 'at' register now contains the left chunk on
694
+ // the left (proper position) and zeroes.
695
+ sll(t8, rt, 32 - pos);
696
+ // t8 now contains the right chunk on the left and zeroes.
697
+ srl(t8, t8, 32 - pos);
698
+ // t8 now contains the right chunk on
699
+ // the right (proper position) and zeroes.
700
+ or_(rt, at, t8);
701
+ // rt now contains the left and right chunks from the original rt
702
+ // in their proper position and zeroes in the middle.
703
+ sll(t8, rs, 32 - size);
704
+ // t8 now contains the chunk from rs on the left and zeroes.
705
+ srl(t8, t8, 32 - size - pos);
706
+ // t8 now contains the original chunk from rs in
707
+ // the middle (proper position).
708
+ or_(rt, rt, t8);
709
+ // rt now contains the result of the ins instruction in R2 mode.
710
+ }
711
+ }
712
+
713
+
714
+ void MacroAssembler::Cvt_d_uw(FPURegister fd, FPURegister fs) {
715
+ // Move the data from fs to t4.
716
+ mfc1(t4, fs);
717
+ return Cvt_d_uw(fd, t4);
718
+ }
719
+
720
+
721
+ void MacroAssembler::Cvt_d_uw(FPURegister fd, Register rs) {
722
+ // Convert rs to a FP value in fd (and fd + 1).
723
+ // We do this by converting rs minus the MSB to avoid sign conversion,
724
+ // then adding 2^31-1 and 1 to the result.
725
+
726
+ ASSERT(!fd.is(f20));
727
+ ASSERT(!rs.is(t9));
728
+ ASSERT(!rs.is(t8));
729
+
730
+ // Save rs's MSB to t8
731
+ And(t8, rs, 0x80000000);
732
+ // Remove rs's MSB.
733
+ And(t9, rs, 0x7FFFFFFF);
734
+ // Move t9 to fd
735
+ mtc1(t9, fd);
736
+
737
+ // Convert fd to a real FP value.
738
+ cvt_d_w(fd, fd);
739
+
740
+ Label conversion_done;
741
+
742
+ // If rs's MSB was 0, it's done.
743
+ // Otherwise we need to add that to the FP register.
744
+ Branch(&conversion_done, eq, t8, Operand(zero_reg));
745
+
746
+ // First load 2^31 - 1 into f20.
747
+ Or(t9, zero_reg, 0x7FFFFFFF);
748
+ mtc1(t9, f20);
749
+
750
+ // Convert it to FP and add it to fd.
751
+ cvt_d_w(f20, f20);
752
+ add_d(fd, fd, f20);
753
+ // Now add 1.
754
+ Or(t9, zero_reg, 1);
755
+ mtc1(t9, f20);
756
+
757
+ cvt_d_w(f20, f20);
758
+ add_d(fd, fd, f20);
759
+ bind(&conversion_done);
760
+ }
761
+
762
+
763
+ void MacroAssembler::Trunc_uw_d(FPURegister fd, FPURegister fs) {
764
+ Trunc_uw_d(fs, t4);
765
+ mtc1(t4, fd);
766
+ }
767
+
768
+
769
+ void MacroAssembler::Trunc_uw_d(FPURegister fd, Register rs) {
770
+ ASSERT(!fd.is(f22));
771
+ ASSERT(!rs.is(t6));
772
+
773
+ // Load 2^31 into f22.
774
+ Or(t6, zero_reg, 0x80000000);
775
+ Cvt_d_uw(f22, t6);
776
+
777
+ // Test if f22 > fd.
778
+ c(OLT, D, fd, f22);
779
+
780
+ Label simple_convert;
781
+ // If fd < 2^31 we can convert it normally.
782
+ bc1t(&simple_convert);
783
+
784
+ // First we subtract 2^31 from fd, then trunc it to rs
785
+ // and add 2^31 to rs.
786
+
787
+ sub_d(f22, fd, f22);
788
+ trunc_w_d(f22, f22);
789
+ mfc1(rs, f22);
790
+ or_(rs, rs, t6);
791
+
792
+ Label done;
793
+ Branch(&done);
794
+ // Simple conversion.
795
+ bind(&simple_convert);
796
+ trunc_w_d(f22, fd);
797
+ mfc1(rs, f22);
798
+
799
+ bind(&done);
800
+ }
801
+
802
+
803
+ // Tries to get a signed int32 out of a double precision floating point heap
804
+ // number. Rounds towards 0. Branch to 'not_int32' if the double is out of the
805
+ // 32bits signed integer range.
806
+ // This method implementation differs from the ARM version for performance
807
+ // reasons.
808
+ void MacroAssembler::ConvertToInt32(Register source,
809
+ Register dest,
810
+ Register scratch,
811
+ Register scratch2,
812
+ FPURegister double_scratch,
813
+ Label *not_int32) {
814
+ Label right_exponent, done;
815
+ // Get exponent word (ENDIAN issues).
816
+ lw(scratch, FieldMemOperand(source, HeapNumber::kExponentOffset));
817
+ // Get exponent alone in scratch2.
818
+ And(scratch2, scratch, Operand(HeapNumber::kExponentMask));
819
+ // Load dest with zero. We use this either for the final shift or
820
+ // for the answer.
821
+ mov(dest, zero_reg);
822
+ // Check whether the exponent matches a 32 bit signed int that is not a Smi.
823
+ // A non-Smi integer is 1.xxx * 2^30 so the exponent is 30 (biased). This is
824
+ // the exponent that we are fastest at and also the highest exponent we can
825
+ // handle here.
826
+ const uint32_t non_smi_exponent =
827
+ (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift;
828
+ // If we have a match of the int32-but-not-Smi exponent then skip some logic.
829
+ Branch(&right_exponent, eq, scratch2, Operand(non_smi_exponent));
830
+ // If the exponent is higher than that then go to not_int32 case. This
831
+ // catches numbers that don't fit in a signed int32, infinities and NaNs.
832
+ Branch(not_int32, gt, scratch2, Operand(non_smi_exponent));
833
+
834
+ // We know the exponent is smaller than 30 (biased). If it is less than
835
+ // 0 (biased) then the number is smaller in magnitude than 1.0 * 2^0, ie
836
+ // it rounds to zero.
837
+ const uint32_t zero_exponent =
838
+ (HeapNumber::kExponentBias + 0) << HeapNumber::kExponentShift;
839
+ Subu(scratch2, scratch2, Operand(zero_exponent));
840
+ // Dest already has a Smi zero.
841
+ Branch(&done, lt, scratch2, Operand(zero_reg));
842
+ if (!Isolate::Current()->cpu_features()->IsSupported(FPU)) {
843
+ // We have a shifted exponent between 0 and 30 in scratch2.
844
+ srl(dest, scratch2, HeapNumber::kExponentShift);
845
+ // We now have the exponent in dest. Subtract from 30 to get
846
+ // how much to shift down.
847
+ li(at, Operand(30));
848
+ subu(dest, at, dest);
849
+ }
850
+ bind(&right_exponent);
851
+ if (Isolate::Current()->cpu_features()->IsSupported(FPU)) {
852
+ CpuFeatures::Scope scope(FPU);
853
+ // MIPS FPU instructions implementing double precision to integer
854
+ // conversion using round to zero. Since the FP value was qualified
855
+ // above, the resulting integer should be a legal int32.
856
+ // The original 'Exponent' word is still in scratch.
857
+ lwc1(double_scratch, FieldMemOperand(source, HeapNumber::kMantissaOffset));
858
+ mtc1(scratch, FPURegister::from_code(double_scratch.code() + 1));
859
+ trunc_w_d(double_scratch, double_scratch);
860
+ mfc1(dest, double_scratch);
861
+ } else {
862
+ // On entry, dest has final downshift, scratch has original sign/exp/mant.
863
+ // Save sign bit in top bit of dest.
864
+ And(scratch2, scratch, Operand(0x80000000));
865
+ Or(dest, dest, Operand(scratch2));
866
+ // Put back the implicit 1, just above mantissa field.
867
+ Or(scratch, scratch, Operand(1 << HeapNumber::kExponentShift));
868
+
869
+ // Shift up the mantissa bits to take up the space the exponent used to
870
+ // take. We just orred in the implicit bit so that took care of one and
871
+ // we want to leave the sign bit 0 so we subtract 2 bits from the shift
872
+ // distance. But we want to clear the sign-bit so shift one more bit
873
+ // left, then shift right one bit.
874
+ const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2;
875
+ sll(scratch, scratch, shift_distance + 1);
876
+ srl(scratch, scratch, 1);
877
+
878
+ // Get the second half of the double. For some exponents we don't
879
+ // actually need this because the bits get shifted out again, but
880
+ // it's probably slower to test than just to do it.
881
+ lw(scratch2, FieldMemOperand(source, HeapNumber::kMantissaOffset));
882
+ // Extract the top 10 bits, and insert those bottom 10 bits of scratch.
883
+ // The width of the field here is the same as the shift amount above.
884
+ const int field_width = shift_distance;
885
+ Ext(scratch2, scratch2, 32-shift_distance, field_width);
886
+ Ins(scratch, scratch2, 0, field_width);
887
+ // Move down according to the exponent.
888
+ srlv(scratch, scratch, dest);
889
+ // Prepare the negative version of our integer.
890
+ subu(scratch2, zero_reg, scratch);
891
+ // Trick to check sign bit (msb) held in dest, count leading zero.
892
+ // 0 indicates negative, save negative version with conditional move.
893
+ clz(dest, dest);
894
+ movz(scratch, scratch2, dest);
895
+ mov(dest, scratch);
896
+ }
897
+ bind(&done);
898
+ }
899
+
900
+
901
+ // Emulated condtional branches do not emit a nop in the branch delay slot.
902
+ //
903
+ // BRANCH_ARGS_CHECK checks that conditional jump arguments are correct.
904
+ #define BRANCH_ARGS_CHECK(cond, rs, rt) ASSERT( \
905
+ (cond == cc_always && rs.is(zero_reg) && rt.rm().is(zero_reg)) || \
906
+ (cond != cc_always && (!rs.is(zero_reg) || !rt.rm().is(zero_reg))))
907
+
908
+
909
+ void MacroAssembler::Branch(int16_t offset, BranchDelaySlot bdslot) {
910
+ b(offset);
911
+
912
+ // Emit a nop in the branch delay slot if required.
913
+ if (bdslot == PROTECT)
914
+ nop();
915
+ }
916
+
917
+
918
+ void MacroAssembler::Branch(int16_t offset, Condition cond, Register rs,
919
+ const Operand& rt,
920
+ BranchDelaySlot bdslot) {
921
+ BRANCH_ARGS_CHECK(cond, rs, rt);
922
+ ASSERT(!rs.is(zero_reg));
923
+ Register r2 = no_reg;
924
+ Register scratch = at;
925
+
926
+ if (rt.is_reg()) {
927
+ // We don't want any other register but scratch clobbered.
928
+ ASSERT(!scratch.is(rs) && !scratch.is(rt.rm_));
929
+ r2 = rt.rm_;
930
+ switch (cond) {
931
+ case cc_always:
932
+ b(offset);
933
+ break;
934
+ case eq:
935
+ beq(rs, r2, offset);
936
+ break;
937
+ case ne:
938
+ bne(rs, r2, offset);
939
+ break;
940
+ // Signed comparison
941
+ case greater:
942
+ if (r2.is(zero_reg)) {
943
+ bgtz(rs, offset);
944
+ } else {
945
+ slt(scratch, r2, rs);
946
+ bne(scratch, zero_reg, offset);
947
+ }
948
+ break;
949
+ case greater_equal:
950
+ if (r2.is(zero_reg)) {
951
+ bgez(rs, offset);
952
+ } else {
953
+ slt(scratch, rs, r2);
954
+ beq(scratch, zero_reg, offset);
955
+ }
956
+ break;
957
+ case less:
958
+ if (r2.is(zero_reg)) {
959
+ bltz(rs, offset);
960
+ } else {
961
+ slt(scratch, rs, r2);
962
+ bne(scratch, zero_reg, offset);
963
+ }
964
+ break;
965
+ case less_equal:
966
+ if (r2.is(zero_reg)) {
967
+ blez(rs, offset);
968
+ } else {
969
+ slt(scratch, r2, rs);
970
+ beq(scratch, zero_reg, offset);
971
+ }
972
+ break;
973
+ // Unsigned comparison.
974
+ case Ugreater:
975
+ if (r2.is(zero_reg)) {
976
+ bgtz(rs, offset);
977
+ } else {
978
+ sltu(scratch, r2, rs);
979
+ bne(scratch, zero_reg, offset);
980
+ }
981
+ break;
982
+ case Ugreater_equal:
983
+ if (r2.is(zero_reg)) {
984
+ bgez(rs, offset);
985
+ } else {
986
+ sltu(scratch, rs, r2);
987
+ beq(scratch, zero_reg, offset);
988
+ }
989
+ break;
990
+ case Uless:
991
+ if (r2.is(zero_reg)) {
992
+ b(offset);
993
+ } else {
994
+ sltu(scratch, rs, r2);
995
+ bne(scratch, zero_reg, offset);
996
+ }
997
+ break;
998
+ case Uless_equal:
999
+ if (r2.is(zero_reg)) {
1000
+ b(offset);
1001
+ } else {
1002
+ sltu(scratch, r2, rs);
1003
+ beq(scratch, zero_reg, offset);
1004
+ }
1005
+ break;
1006
+ default:
1007
+ UNREACHABLE();
1008
+ }
1009
+ } else {
1010
+ // Be careful to always use shifted_branch_offset only just before the
1011
+ // branch instruction, as the location will be remember for patching the
1012
+ // target.
1013
+ switch (cond) {
1014
+ case cc_always:
1015
+ b(offset);
1016
+ break;
1017
+ case eq:
1018
+ // We don't want any other register but scratch clobbered.
1019
+ ASSERT(!scratch.is(rs));
1020
+ r2 = scratch;
1021
+ li(r2, rt);
1022
+ beq(rs, r2, offset);
1023
+ break;
1024
+ case ne:
1025
+ // We don't want any other register but scratch clobbered.
1026
+ ASSERT(!scratch.is(rs));
1027
+ r2 = scratch;
1028
+ li(r2, rt);
1029
+ bne(rs, r2, offset);
1030
+ break;
1031
+ // Signed comparison
1032
+ case greater:
1033
+ if (rt.imm32_ == 0) {
1034
+ bgtz(rs, offset);
1035
+ } else {
1036
+ r2 = scratch;
1037
+ li(r2, rt);
1038
+ slt(scratch, r2, rs);
1039
+ bne(scratch, zero_reg, offset);
1040
+ }
1041
+ break;
1042
+ case greater_equal:
1043
+ if (rt.imm32_ == 0) {
1044
+ bgez(rs, offset);
1045
+ } else if (is_int16(rt.imm32_)) {
1046
+ slti(scratch, rs, rt.imm32_);
1047
+ beq(scratch, zero_reg, offset);
1048
+ } else {
1049
+ r2 = scratch;
1050
+ li(r2, rt);
1051
+ sltu(scratch, rs, r2);
1052
+ beq(scratch, zero_reg, offset);
1053
+ }
1054
+ break;
1055
+ case less:
1056
+ if (rt.imm32_ == 0) {
1057
+ bltz(rs, offset);
1058
+ } else if (is_int16(rt.imm32_)) {
1059
+ slti(scratch, rs, rt.imm32_);
1060
+ bne(scratch, zero_reg, offset);
1061
+ } else {
1062
+ r2 = scratch;
1063
+ li(r2, rt);
1064
+ slt(scratch, rs, r2);
1065
+ bne(scratch, zero_reg, offset);
1066
+ }
1067
+ break;
1068
+ case less_equal:
1069
+ if (rt.imm32_ == 0) {
1070
+ blez(rs, offset);
1071
+ } else {
1072
+ r2 = scratch;
1073
+ li(r2, rt);
1074
+ slt(scratch, r2, rs);
1075
+ beq(scratch, zero_reg, offset);
1076
+ }
1077
+ break;
1078
+ // Unsigned comparison.
1079
+ case Ugreater:
1080
+ if (rt.imm32_ == 0) {
1081
+ bgtz(rs, offset);
1082
+ } else {
1083
+ r2 = scratch;
1084
+ li(r2, rt);
1085
+ sltu(scratch, r2, rs);
1086
+ bne(scratch, zero_reg, offset);
1087
+ }
1088
+ break;
1089
+ case Ugreater_equal:
1090
+ if (rt.imm32_ == 0) {
1091
+ bgez(rs, offset);
1092
+ } else if (is_int16(rt.imm32_)) {
1093
+ sltiu(scratch, rs, rt.imm32_);
1094
+ beq(scratch, zero_reg, offset);
1095
+ } else {
1096
+ r2 = scratch;
1097
+ li(r2, rt);
1098
+ sltu(scratch, rs, r2);
1099
+ beq(scratch, zero_reg, offset);
1100
+ }
1101
+ break;
1102
+ case Uless:
1103
+ if (rt.imm32_ == 0) {
1104
+ b(offset);
1105
+ } else if (is_int16(rt.imm32_)) {
1106
+ sltiu(scratch, rs, rt.imm32_);
1107
+ bne(scratch, zero_reg, offset);
1108
+ } else {
1109
+ r2 = scratch;
1110
+ li(r2, rt);
1111
+ sltu(scratch, rs, r2);
1112
+ bne(scratch, zero_reg, offset);
1113
+ }
1114
+ break;
1115
+ case Uless_equal:
1116
+ if (rt.imm32_ == 0) {
1117
+ b(offset);
1118
+ } else {
1119
+ r2 = scratch;
1120
+ li(r2, rt);
1121
+ sltu(scratch, r2, rs);
1122
+ beq(scratch, zero_reg, offset);
1123
+ }
1124
+ break;
1125
+ default:
1126
+ UNREACHABLE();
1127
+ }
1128
+ }
1129
+ // Emit a nop in the branch delay slot if required.
1130
+ if (bdslot == PROTECT)
1131
+ nop();
1132
+ }
1133
+
1134
+
1135
+ void MacroAssembler::Branch(Label* L, BranchDelaySlot bdslot) {
1136
+ // We use branch_offset as an argument for the branch instructions to be sure
1137
+ // it is called just before generating the branch instruction, as needed.
1138
+
1139
+ b(shifted_branch_offset(L, false));
1140
+
1141
+ // Emit a nop in the branch delay slot if required.
1142
+ if (bdslot == PROTECT)
1143
+ nop();
1144
+ }
1145
+
1146
+
1147
+ void MacroAssembler::Branch(Label* L, Condition cond, Register rs,
1148
+ const Operand& rt,
1149
+ BranchDelaySlot bdslot) {
1150
+ BRANCH_ARGS_CHECK(cond, rs, rt);
1151
+
1152
+ int32_t offset;
1153
+ Register r2 = no_reg;
1154
+ Register scratch = at;
1155
+ if (rt.is_reg()) {
1156
+ r2 = rt.rm_;
1157
+ // Be careful to always use shifted_branch_offset only just before the
1158
+ // branch instruction, as the location will be remember for patching the
1159
+ // target.
1160
+ switch (cond) {
1161
+ case cc_always:
1162
+ offset = shifted_branch_offset(L, false);
1163
+ b(offset);
1164
+ break;
1165
+ case eq:
1166
+ offset = shifted_branch_offset(L, false);
1167
+ beq(rs, r2, offset);
1168
+ break;
1169
+ case ne:
1170
+ offset = shifted_branch_offset(L, false);
1171
+ bne(rs, r2, offset);
1172
+ break;
1173
+ // Signed comparison
1174
+ case greater:
1175
+ if (r2.is(zero_reg)) {
1176
+ offset = shifted_branch_offset(L, false);
1177
+ bgtz(rs, offset);
1178
+ } else {
1179
+ slt(scratch, r2, rs);
1180
+ offset = shifted_branch_offset(L, false);
1181
+ bne(scratch, zero_reg, offset);
1182
+ }
1183
+ break;
1184
+ case greater_equal:
1185
+ if (r2.is(zero_reg)) {
1186
+ offset = shifted_branch_offset(L, false);
1187
+ bgez(rs, offset);
1188
+ } else {
1189
+ slt(scratch, rs, r2);
1190
+ offset = shifted_branch_offset(L, false);
1191
+ beq(scratch, zero_reg, offset);
1192
+ }
1193
+ break;
1194
+ case less:
1195
+ if (r2.is(zero_reg)) {
1196
+ offset = shifted_branch_offset(L, false);
1197
+ bltz(rs, offset);
1198
+ } else {
1199
+ slt(scratch, rs, r2);
1200
+ offset = shifted_branch_offset(L, false);
1201
+ bne(scratch, zero_reg, offset);
1202
+ }
1203
+ break;
1204
+ case less_equal:
1205
+ if (r2.is(zero_reg)) {
1206
+ offset = shifted_branch_offset(L, false);
1207
+ blez(rs, offset);
1208
+ } else {
1209
+ slt(scratch, r2, rs);
1210
+ offset = shifted_branch_offset(L, false);
1211
+ beq(scratch, zero_reg, offset);
1212
+ }
1213
+ break;
1214
+ // Unsigned comparison.
1215
+ case Ugreater:
1216
+ if (r2.is(zero_reg)) {
1217
+ offset = shifted_branch_offset(L, false);
1218
+ bgtz(rs, offset);
1219
+ } else {
1220
+ sltu(scratch, r2, rs);
1221
+ offset = shifted_branch_offset(L, false);
1222
+ bne(scratch, zero_reg, offset);
1223
+ }
1224
+ break;
1225
+ case Ugreater_equal:
1226
+ if (r2.is(zero_reg)) {
1227
+ offset = shifted_branch_offset(L, false);
1228
+ bgez(rs, offset);
1229
+ } else {
1230
+ sltu(scratch, rs, r2);
1231
+ offset = shifted_branch_offset(L, false);
1232
+ beq(scratch, zero_reg, offset);
1233
+ }
1234
+ break;
1235
+ case Uless:
1236
+ if (r2.is(zero_reg)) {
1237
+ offset = shifted_branch_offset(L, false);
1238
+ b(offset);
1239
+ } else {
1240
+ sltu(scratch, rs, r2);
1241
+ offset = shifted_branch_offset(L, false);
1242
+ bne(scratch, zero_reg, offset);
1243
+ }
1244
+ break;
1245
+ case Uless_equal:
1246
+ if (r2.is(zero_reg)) {
1247
+ offset = shifted_branch_offset(L, false);
1248
+ b(offset);
1249
+ } else {
1250
+ sltu(scratch, r2, rs);
1251
+ offset = shifted_branch_offset(L, false);
1252
+ beq(scratch, zero_reg, offset);
1253
+ }
1254
+ break;
1255
+ default:
1256
+ UNREACHABLE();
1257
+ }
1258
+ } else {
1259
+ // Be careful to always use shifted_branch_offset only just before the
1260
+ // branch instruction, as the location will be remember for patching the
1261
+ // target.
1262
+ switch (cond) {
1263
+ case cc_always:
1264
+ offset = shifted_branch_offset(L, false);
1265
+ b(offset);
1266
+ break;
1267
+ case eq:
1268
+ r2 = scratch;
1269
+ li(r2, rt);
1270
+ offset = shifted_branch_offset(L, false);
1271
+ beq(rs, r2, offset);
1272
+ break;
1273
+ case ne:
1274
+ r2 = scratch;
1275
+ li(r2, rt);
1276
+ offset = shifted_branch_offset(L, false);
1277
+ bne(rs, r2, offset);
1278
+ break;
1279
+ // Signed comparison
1280
+ case greater:
1281
+ if (rt.imm32_ == 0) {
1282
+ offset = shifted_branch_offset(L, false);
1283
+ bgtz(rs, offset);
1284
+ } else {
1285
+ r2 = scratch;
1286
+ li(r2, rt);
1287
+ slt(scratch, r2, rs);
1288
+ offset = shifted_branch_offset(L, false);
1289
+ bne(scratch, zero_reg, offset);
1290
+ }
1291
+ break;
1292
+ case greater_equal:
1293
+ if (rt.imm32_ == 0) {
1294
+ offset = shifted_branch_offset(L, false);
1295
+ bgez(rs, offset);
1296
+ } else if (is_int16(rt.imm32_)) {
1297
+ slti(scratch, rs, rt.imm32_);
1298
+ offset = shifted_branch_offset(L, false);
1299
+ beq(scratch, zero_reg, offset);
1300
+ } else {
1301
+ r2 = scratch;
1302
+ li(r2, rt);
1303
+ sltu(scratch, rs, r2);
1304
+ offset = shifted_branch_offset(L, false);
1305
+ beq(scratch, zero_reg, offset);
1306
+ }
1307
+ break;
1308
+ case less:
1309
+ if (rt.imm32_ == 0) {
1310
+ offset = shifted_branch_offset(L, false);
1311
+ bltz(rs, offset);
1312
+ } else if (is_int16(rt.imm32_)) {
1313
+ slti(scratch, rs, rt.imm32_);
1314
+ offset = shifted_branch_offset(L, false);
1315
+ bne(scratch, zero_reg, offset);
1316
+ } else {
1317
+ r2 = scratch;
1318
+ li(r2, rt);
1319
+ slt(scratch, rs, r2);
1320
+ offset = shifted_branch_offset(L, false);
1321
+ bne(scratch, zero_reg, offset);
1322
+ }
1323
+ break;
1324
+ case less_equal:
1325
+ if (rt.imm32_ == 0) {
1326
+ offset = shifted_branch_offset(L, false);
1327
+ blez(rs, offset);
1328
+ } else {
1329
+ r2 = scratch;
1330
+ li(r2, rt);
1331
+ slt(scratch, r2, rs);
1332
+ offset = shifted_branch_offset(L, false);
1333
+ beq(scratch, zero_reg, offset);
1334
+ }
1335
+ break;
1336
+ // Unsigned comparison.
1337
+ case Ugreater:
1338
+ if (rt.imm32_ == 0) {
1339
+ offset = shifted_branch_offset(L, false);
1340
+ bgtz(rs, offset);
1341
+ } else {
1342
+ r2 = scratch;
1343
+ li(r2, rt);
1344
+ sltu(scratch, r2, rs);
1345
+ offset = shifted_branch_offset(L, false);
1346
+ bne(scratch, zero_reg, offset);
1347
+ }
1348
+ break;
1349
+ case Ugreater_equal:
1350
+ if (rt.imm32_ == 0) {
1351
+ offset = shifted_branch_offset(L, false);
1352
+ bgez(rs, offset);
1353
+ } else if (is_int16(rt.imm32_)) {
1354
+ sltiu(scratch, rs, rt.imm32_);
1355
+ offset = shifted_branch_offset(L, false);
1356
+ beq(scratch, zero_reg, offset);
1357
+ } else {
1358
+ r2 = scratch;
1359
+ li(r2, rt);
1360
+ sltu(scratch, rs, r2);
1361
+ offset = shifted_branch_offset(L, false);
1362
+ beq(scratch, zero_reg, offset);
1363
+ }
1364
+ break;
1365
+ case Uless:
1366
+ if (rt.imm32_ == 0) {
1367
+ offset = shifted_branch_offset(L, false);
1368
+ b(offset);
1369
+ } else if (is_int16(rt.imm32_)) {
1370
+ sltiu(scratch, rs, rt.imm32_);
1371
+ offset = shifted_branch_offset(L, false);
1372
+ bne(scratch, zero_reg, offset);
1373
+ } else {
1374
+ r2 = scratch;
1375
+ li(r2, rt);
1376
+ sltu(scratch, rs, r2);
1377
+ offset = shifted_branch_offset(L, false);
1378
+ bne(scratch, zero_reg, offset);
1379
+ }
1380
+ break;
1381
+ case Uless_equal:
1382
+ if (rt.imm32_ == 0) {
1383
+ offset = shifted_branch_offset(L, false);
1384
+ b(offset);
1385
+ } else {
1386
+ r2 = scratch;
1387
+ li(r2, rt);
1388
+ sltu(scratch, r2, rs);
1389
+ offset = shifted_branch_offset(L, false);
1390
+ beq(scratch, zero_reg, offset);
1391
+ }
1392
+ break;
1393
+ default:
1394
+ UNREACHABLE();
1395
+ }
1396
+ }
1397
+ // Check that offset could actually hold on an int16_t.
1398
+ ASSERT(is_int16(offset));
1399
+ // Emit a nop in the branch delay slot if required.
1400
+ if (bdslot == PROTECT)
1401
+ nop();
1402
+ }
1403
+
1404
+
1405
+ // We need to use a bgezal or bltzal, but they can't be used directly with the
1406
+ // slt instructions. We could use sub or add instead but we would miss overflow
1407
+ // cases, so we keep slt and add an intermediate third instruction.
1408
+ void MacroAssembler::BranchAndLink(int16_t offset,
1409
+ BranchDelaySlot bdslot) {
1410
+ bal(offset);
1411
+
1412
+ // Emit a nop in the branch delay slot if required.
1413
+ if (bdslot == PROTECT)
1414
+ nop();
1415
+ }
1416
+
1417
+
1418
+ void MacroAssembler::BranchAndLink(int16_t offset, Condition cond, Register rs,
1419
+ const Operand& rt,
1420
+ BranchDelaySlot bdslot) {
1421
+ BRANCH_ARGS_CHECK(cond, rs, rt);
1422
+ Register r2 = no_reg;
1423
+ Register scratch = at;
1424
+
1425
+ if (rt.is_reg()) {
1426
+ r2 = rt.rm_;
1427
+ } else if (cond != cc_always) {
1428
+ r2 = scratch;
1429
+ li(r2, rt);
1430
+ }
1431
+
1432
+ switch (cond) {
1433
+ case cc_always:
1434
+ bal(offset);
1435
+ break;
1436
+ case eq:
1437
+ bne(rs, r2, 2);
1438
+ nop();
1439
+ bal(offset);
1440
+ break;
1441
+ case ne:
1442
+ beq(rs, r2, 2);
1443
+ nop();
1444
+ bal(offset);
1445
+ break;
1446
+
1447
+ // Signed comparison
1448
+ case greater:
1449
+ slt(scratch, r2, rs);
1450
+ addiu(scratch, scratch, -1);
1451
+ bgezal(scratch, offset);
1452
+ break;
1453
+ case greater_equal:
1454
+ slt(scratch, rs, r2);
1455
+ addiu(scratch, scratch, -1);
1456
+ bltzal(scratch, offset);
1457
+ break;
1458
+ case less:
1459
+ slt(scratch, rs, r2);
1460
+ addiu(scratch, scratch, -1);
1461
+ bgezal(scratch, offset);
1462
+ break;
1463
+ case less_equal:
1464
+ slt(scratch, r2, rs);
1465
+ addiu(scratch, scratch, -1);
1466
+ bltzal(scratch, offset);
1467
+ break;
1468
+
1469
+ // Unsigned comparison.
1470
+ case Ugreater:
1471
+ sltu(scratch, r2, rs);
1472
+ addiu(scratch, scratch, -1);
1473
+ bgezal(scratch, offset);
1474
+ break;
1475
+ case Ugreater_equal:
1476
+ sltu(scratch, rs, r2);
1477
+ addiu(scratch, scratch, -1);
1478
+ bltzal(scratch, offset);
1479
+ break;
1480
+ case Uless:
1481
+ sltu(scratch, rs, r2);
1482
+ addiu(scratch, scratch, -1);
1483
+ bgezal(scratch, offset);
1484
+ break;
1485
+ case Uless_equal:
1486
+ sltu(scratch, r2, rs);
1487
+ addiu(scratch, scratch, -1);
1488
+ bltzal(scratch, offset);
1489
+ break;
1490
+
1491
+ default:
1492
+ UNREACHABLE();
1493
+ }
1494
+ // Emit a nop in the branch delay slot if required.
1495
+ if (bdslot == PROTECT)
1496
+ nop();
1497
+ }
1498
+
1499
+
1500
+ void MacroAssembler::BranchAndLink(Label* L, BranchDelaySlot bdslot) {
1501
+ bal(shifted_branch_offset(L, false));
1502
+
1503
+ // Emit a nop in the branch delay slot if required.
1504
+ if (bdslot == PROTECT)
1505
+ nop();
1506
+ }
1507
+
1508
+
1509
+ void MacroAssembler::BranchAndLink(Label* L, Condition cond, Register rs,
1510
+ const Operand& rt,
1511
+ BranchDelaySlot bdslot) {
1512
+ BRANCH_ARGS_CHECK(cond, rs, rt);
1513
+
1514
+ int32_t offset;
1515
+ Register r2 = no_reg;
1516
+ Register scratch = at;
1517
+ if (rt.is_reg()) {
1518
+ r2 = rt.rm_;
1519
+ } else if (cond != cc_always) {
1520
+ r2 = scratch;
1521
+ li(r2, rt);
1522
+ }
1523
+
1524
+ switch (cond) {
1525
+ case cc_always:
1526
+ offset = shifted_branch_offset(L, false);
1527
+ bal(offset);
1528
+ break;
1529
+ case eq:
1530
+ bne(rs, r2, 2);
1531
+ nop();
1532
+ offset = shifted_branch_offset(L, false);
1533
+ bal(offset);
1534
+ break;
1535
+ case ne:
1536
+ beq(rs, r2, 2);
1537
+ nop();
1538
+ offset = shifted_branch_offset(L, false);
1539
+ bal(offset);
1540
+ break;
1541
+
1542
+ // Signed comparison
1543
+ case greater:
1544
+ slt(scratch, r2, rs);
1545
+ addiu(scratch, scratch, -1);
1546
+ offset = shifted_branch_offset(L, false);
1547
+ bgezal(scratch, offset);
1548
+ break;
1549
+ case greater_equal:
1550
+ slt(scratch, rs, r2);
1551
+ addiu(scratch, scratch, -1);
1552
+ offset = shifted_branch_offset(L, false);
1553
+ bltzal(scratch, offset);
1554
+ break;
1555
+ case less:
1556
+ slt(scratch, rs, r2);
1557
+ addiu(scratch, scratch, -1);
1558
+ offset = shifted_branch_offset(L, false);
1559
+ bgezal(scratch, offset);
1560
+ break;
1561
+ case less_equal:
1562
+ slt(scratch, r2, rs);
1563
+ addiu(scratch, scratch, -1);
1564
+ offset = shifted_branch_offset(L, false);
1565
+ bltzal(scratch, offset);
1566
+ break;
1567
+
1568
+ // Unsigned comparison.
1569
+ case Ugreater:
1570
+ sltu(scratch, r2, rs);
1571
+ addiu(scratch, scratch, -1);
1572
+ offset = shifted_branch_offset(L, false);
1573
+ bgezal(scratch, offset);
1574
+ break;
1575
+ case Ugreater_equal:
1576
+ sltu(scratch, rs, r2);
1577
+ addiu(scratch, scratch, -1);
1578
+ offset = shifted_branch_offset(L, false);
1579
+ bltzal(scratch, offset);
1580
+ break;
1581
+ case Uless:
1582
+ sltu(scratch, rs, r2);
1583
+ addiu(scratch, scratch, -1);
1584
+ offset = shifted_branch_offset(L, false);
1585
+ bgezal(scratch, offset);
1586
+ break;
1587
+ case Uless_equal:
1588
+ sltu(scratch, r2, rs);
1589
+ addiu(scratch, scratch, -1);
1590
+ offset = shifted_branch_offset(L, false);
1591
+ bltzal(scratch, offset);
1592
+ break;
1593
+
1594
+ default:
1595
+ UNREACHABLE();
1596
+ }
1597
+
1598
+ // Check that offset could actually hold on an int16_t.
1599
+ ASSERT(is_int16(offset));
1600
+
1601
+ // Emit a nop in the branch delay slot if required.
1602
+ if (bdslot == PROTECT)
1603
+ nop();
1604
+ }
1605
+
1606
+
1607
+ void MacroAssembler::Jump(const Operand& target, BranchDelaySlot bdslot) {
1608
+ BlockTrampolinePoolScope block_trampoline_pool(this);
1609
+ if (target.is_reg()) {
1610
+ jr(target.rm());
1611
+ } else {
1612
+ if (!MustUseReg(target.rmode_)) {
1613
+ j(target.imm32_);
1614
+ } else {
1615
+ li(t9, target);
1616
+ jr(t9);
1617
+ }
1618
+ }
1619
+ // Emit a nop in the branch delay slot if required.
1620
+ if (bdslot == PROTECT)
1621
+ nop();
1622
+ }
1623
+
1624
+
1625
+ void MacroAssembler::Jump(const Operand& target,
1626
+ Condition cond, Register rs, const Operand& rt,
1627
+ BranchDelaySlot bdslot) {
1628
+ BlockTrampolinePoolScope block_trampoline_pool(this);
1629
+ BRANCH_ARGS_CHECK(cond, rs, rt);
1630
+ if (target.is_reg()) {
1631
+ if (cond == cc_always) {
1632
+ jr(target.rm());
1633
+ } else {
1634
+ Branch(2, NegateCondition(cond), rs, rt);
1635
+ jr(target.rm());
1636
+ }
1637
+ } else { // Not register target.
1638
+ if (!MustUseReg(target.rmode_)) {
1639
+ if (cond == cc_always) {
1640
+ j(target.imm32_);
1641
+ } else {
1642
+ Branch(2, NegateCondition(cond), rs, rt);
1643
+ j(target.imm32_); // Will generate only one instruction.
1644
+ }
1645
+ } else { // MustUseReg(target)
1646
+ li(t9, target);
1647
+ if (cond == cc_always) {
1648
+ jr(t9);
1649
+ } else {
1650
+ Branch(2, NegateCondition(cond), rs, rt);
1651
+ jr(t9); // Will generate only one instruction.
1652
+ }
1653
+ }
1654
+ }
1655
+ // Emit a nop in the branch delay slot if required.
1656
+ if (bdslot == PROTECT)
1657
+ nop();
1658
+ }
1659
+
1660
+
1661
+ // Note: To call gcc-compiled C code on mips, you must call thru t9.
1662
+ void MacroAssembler::Call(const Operand& target, BranchDelaySlot bdslot) {
1663
+ BlockTrampolinePoolScope block_trampoline_pool(this);
1664
+ if (target.is_reg()) {
1665
+ jalr(target.rm());
1666
+ } else { // !target.is_reg()
1667
+ if (!MustUseReg(target.rmode_)) {
1668
+ jal(target.imm32_);
1669
+ } else { // MustUseReg(target)
1670
+ li(t9, target);
1671
+ jalr(t9);
1672
+ }
1673
+ }
1674
+ // Emit a nop in the branch delay slot if required.
1675
+ if (bdslot == PROTECT)
1676
+ nop();
1677
+ }
1678
+
1679
+
1680
+ // Note: To call gcc-compiled C code on mips, you must call thru t9.
1681
+ void MacroAssembler::Call(const Operand& target,
1682
+ Condition cond, Register rs, const Operand& rt,
1683
+ BranchDelaySlot bdslot) {
1684
+ BlockTrampolinePoolScope block_trampoline_pool(this);
1685
+ BRANCH_ARGS_CHECK(cond, rs, rt);
1686
+ if (target.is_reg()) {
1687
+ if (cond == cc_always) {
1688
+ jalr(target.rm());
1689
+ } else {
1690
+ Branch(2, NegateCondition(cond), rs, rt);
1691
+ jalr(target.rm());
1692
+ }
1693
+ } else { // !target.is_reg()
1694
+ if (!MustUseReg(target.rmode_)) {
1695
+ if (cond == cc_always) {
1696
+ jal(target.imm32_);
1697
+ } else {
1698
+ Branch(2, NegateCondition(cond), rs, rt);
1699
+ jal(target.imm32_); // Will generate only one instruction.
1700
+ }
1701
+ } else { // MustUseReg(target)
1702
+ li(t9, target);
1703
+ if (cond == cc_always) {
1704
+ jalr(t9);
1705
+ } else {
1706
+ Branch(2, NegateCondition(cond), rs, rt);
1707
+ jalr(t9); // Will generate only one instruction.
1708
+ }
1709
+ }
1710
+ }
1711
+ // Emit a nop in the branch delay slot if required.
1712
+ if (bdslot == PROTECT)
1713
+ nop();
1714
+ }
1715
+
1716
+
1717
+ void MacroAssembler::Drop(int count,
1718
+ Condition cond,
1719
+ Register reg,
1720
+ const Operand& op) {
1721
+ if (count <= 0) {
1722
+ return;
1723
+ }
1724
+
1725
+ Label skip;
1726
+
1727
+ if (cond != al) {
1728
+ Branch(&skip, NegateCondition(cond), reg, op);
1729
+ }
1730
+
1731
+ if (count > 0) {
1732
+ addiu(sp, sp, count * kPointerSize);
1733
+ }
1734
+
1735
+ if (cond != al) {
1736
+ bind(&skip);
1737
+ }
1738
+ }
1739
+
1740
+
1741
+ void MacroAssembler::DropAndRet(int drop,
1742
+ Condition cond,
1743
+ Register r1,
1744
+ const Operand& r2) {
1745
+ // This is a workaround to make sure only one branch instruction is
1746
+ // generated. It relies on Drop and Ret not creating branches if
1747
+ // cond == cc_always.
1748
+ Label skip;
1749
+ if (cond != cc_always) {
1750
+ Branch(&skip, NegateCondition(cond), r1, r2);
1751
+ }
1752
+
1753
+ Drop(drop);
1754
+ Ret();
1755
+
1756
+ if (cond != cc_always) {
1757
+ bind(&skip);
1758
+ }
1759
+ }
1760
+
1761
+
1762
+ void MacroAssembler::Swap(Register reg1,
1763
+ Register reg2,
1764
+ Register scratch) {
1765
+ if (scratch.is(no_reg)) {
1766
+ Xor(reg1, reg1, Operand(reg2));
1767
+ Xor(reg2, reg2, Operand(reg1));
1768
+ Xor(reg1, reg1, Operand(reg2));
1769
+ } else {
1770
+ mov(scratch, reg1);
1771
+ mov(reg1, reg2);
1772
+ mov(reg2, scratch);
1773
+ }
1774
+ }
1775
+
1776
+
1777
+ void MacroAssembler::Call(Label* target) {
1778
+ BranchAndLink(target);
1779
+ }
1780
+
1781
+
1782
+ void MacroAssembler::Move(Register dst, Register src) {
1783
+ if (!dst.is(src)) {
1784
+ mov(dst, src);
1785
+ }
1786
+ }
1787
+
1788
+
1789
+ #ifdef ENABLE_DEBUGGER_SUPPORT
1790
+
1791
+ void MacroAssembler::DebugBreak() {
1792
+ ASSERT(allow_stub_calls());
1793
+ mov(a0, zero_reg);
1794
+ li(a1, Operand(ExternalReference(Runtime::kDebugBreak, isolate())));
1795
+ CEntryStub ces(1);
1796
+ Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
1797
+ }
1798
+
1799
+ #endif // ENABLE_DEBUGGER_SUPPORT
1800
+
1801
+
1802
+ // ---------------------------------------------------------------------------
1803
+ // Exception handling
1804
+
1805
+ void MacroAssembler::PushTryHandler(CodeLocation try_location,
1806
+ HandlerType type) {
1807
+ // Adjust this code if not the case.
1808
+ ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1809
+ // The return address is passed in register ra.
1810
+ if (try_location == IN_JAVASCRIPT) {
1811
+ if (type == TRY_CATCH_HANDLER) {
1812
+ li(t0, Operand(StackHandler::TRY_CATCH));
1813
+ } else {
1814
+ li(t0, Operand(StackHandler::TRY_FINALLY));
1815
+ }
1816
+ ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize
1817
+ && StackHandlerConstants::kFPOffset == 2 * kPointerSize
1818
+ && StackHandlerConstants::kPCOffset == 3 * kPointerSize
1819
+ && StackHandlerConstants::kNextOffset == 0 * kPointerSize);
1820
+ // Save the current handler as the next handler.
1821
+ li(t2, Operand(ExternalReference(Isolate::k_handler_address, isolate())));
1822
+ lw(t1, MemOperand(t2));
1823
+
1824
+ addiu(sp, sp, -StackHandlerConstants::kSize);
1825
+ sw(ra, MemOperand(sp, 12));
1826
+ sw(fp, MemOperand(sp, 8));
1827
+ sw(t0, MemOperand(sp, 4));
1828
+ sw(t1, MemOperand(sp, 0));
1829
+
1830
+ // Link this handler as the new current one.
1831
+ sw(sp, MemOperand(t2));
1832
+
1833
+ } else {
1834
+ // Must preserve a0-a3, and s0 (argv).
1835
+ ASSERT(try_location == IN_JS_ENTRY);
1836
+ ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize
1837
+ && StackHandlerConstants::kFPOffset == 2 * kPointerSize
1838
+ && StackHandlerConstants::kPCOffset == 3 * kPointerSize
1839
+ && StackHandlerConstants::kNextOffset == 0 * kPointerSize);
1840
+
1841
+ // The frame pointer does not point to a JS frame so we save NULL
1842
+ // for fp. We expect the code throwing an exception to check fp
1843
+ // before dereferencing it to restore the context.
1844
+ li(t0, Operand(StackHandler::ENTRY));
1845
+
1846
+ // Save the current handler as the next handler.
1847
+ li(t2, Operand(ExternalReference(Isolate::k_handler_address, isolate())));
1848
+ lw(t1, MemOperand(t2));
1849
+
1850
+ addiu(sp, sp, -StackHandlerConstants::kSize);
1851
+ sw(ra, MemOperand(sp, 12));
1852
+ sw(zero_reg, MemOperand(sp, 8));
1853
+ sw(t0, MemOperand(sp, 4));
1854
+ sw(t1, MemOperand(sp, 0));
1855
+
1856
+ // Link this handler as the new current one.
1857
+ sw(sp, MemOperand(t2));
1858
+ }
1859
+ }
1860
+
1861
+
1862
+ void MacroAssembler::PopTryHandler() {
1863
+ ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1864
+ pop(a1);
1865
+ Addu(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize));
1866
+ li(at, Operand(ExternalReference(Isolate::k_handler_address, isolate())));
1867
+ sw(a1, MemOperand(at));
1868
+ }
1869
+
1870
+
1871
+ void MacroAssembler::AllocateInNewSpace(int object_size,
1872
+ Register result,
1873
+ Register scratch1,
1874
+ Register scratch2,
1875
+ Label* gc_required,
1876
+ AllocationFlags flags) {
1877
+ if (!FLAG_inline_new) {
1878
+ if (FLAG_debug_code) {
1879
+ // Trash the registers to simulate an allocation failure.
1880
+ li(result, 0x7091);
1881
+ li(scratch1, 0x7191);
1882
+ li(scratch2, 0x7291);
1883
+ }
1884
+ jmp(gc_required);
1885
+ return;
1886
+ }
1887
+
1888
+ ASSERT(!result.is(scratch1));
1889
+ ASSERT(!result.is(scratch2));
1890
+ ASSERT(!scratch1.is(scratch2));
1891
+ ASSERT(!scratch1.is(t9));
1892
+ ASSERT(!scratch2.is(t9));
1893
+ ASSERT(!result.is(t9));
1894
+
1895
+ // Make object size into bytes.
1896
+ if ((flags & SIZE_IN_WORDS) != 0) {
1897
+ object_size *= kPointerSize;
1898
+ }
1899
+ ASSERT_EQ(0, object_size & kObjectAlignmentMask);
1900
+
1901
+ // Check relative positions of allocation top and limit addresses.
1902
+ // ARM adds additional checks to make sure the ldm instruction can be
1903
+ // used. On MIPS we don't have ldm so we don't need additional checks either.
1904
+ ExternalReference new_space_allocation_top =
1905
+ ExternalReference::new_space_allocation_top_address(isolate());
1906
+ ExternalReference new_space_allocation_limit =
1907
+ ExternalReference::new_space_allocation_limit_address(isolate());
1908
+ intptr_t top =
1909
+ reinterpret_cast<intptr_t>(new_space_allocation_top.address());
1910
+ intptr_t limit =
1911
+ reinterpret_cast<intptr_t>(new_space_allocation_limit.address());
1912
+ ASSERT((limit - top) == kPointerSize);
1913
+
1914
+ // Set up allocation top address and object size registers.
1915
+ Register topaddr = scratch1;
1916
+ Register obj_size_reg = scratch2;
1917
+ li(topaddr, Operand(new_space_allocation_top));
1918
+ li(obj_size_reg, Operand(object_size));
1919
+
1920
+ // This code stores a temporary value in t9.
1921
+ if ((flags & RESULT_CONTAINS_TOP) == 0) {
1922
+ // Load allocation top into result and allocation limit into t9.
1923
+ lw(result, MemOperand(topaddr));
1924
+ lw(t9, MemOperand(topaddr, kPointerSize));
1925
+ } else {
1926
+ if (FLAG_debug_code) {
1927
+ // Assert that result actually contains top on entry. t9 is used
1928
+ // immediately below so this use of t9 does not cause difference with
1929
+ // respect to register content between debug and release mode.
1930
+ lw(t9, MemOperand(topaddr));
1931
+ Check(eq, "Unexpected allocation top", result, Operand(t9));
1932
+ }
1933
+ // Load allocation limit into t9. Result already contains allocation top.
1934
+ lw(t9, MemOperand(topaddr, limit - top));
1935
+ }
1936
+
1937
+ // Calculate new top and bail out if new space is exhausted. Use result
1938
+ // to calculate the new top.
1939
+ Addu(scratch2, result, Operand(obj_size_reg));
1940
+ Branch(gc_required, Ugreater, scratch2, Operand(t9));
1941
+ sw(scratch2, MemOperand(topaddr));
1942
+
1943
+ // Tag object if requested.
1944
+ if ((flags & TAG_OBJECT) != 0) {
1945
+ Addu(result, result, Operand(kHeapObjectTag));
1946
+ }
1947
+ }
1948
+
1949
+
1950
+ void MacroAssembler::AllocateInNewSpace(Register object_size,
1951
+ Register result,
1952
+ Register scratch1,
1953
+ Register scratch2,
1954
+ Label* gc_required,
1955
+ AllocationFlags flags) {
1956
+ if (!FLAG_inline_new) {
1957
+ if (FLAG_debug_code) {
1958
+ // Trash the registers to simulate an allocation failure.
1959
+ li(result, 0x7091);
1960
+ li(scratch1, 0x7191);
1961
+ li(scratch2, 0x7291);
1962
+ }
1963
+ jmp(gc_required);
1964
+ return;
1965
+ }
1966
+
1967
+ ASSERT(!result.is(scratch1));
1968
+ ASSERT(!result.is(scratch2));
1969
+ ASSERT(!scratch1.is(scratch2));
1970
+ ASSERT(!scratch1.is(t9) && !scratch2.is(t9) && !result.is(t9));
1971
+
1972
+ // Check relative positions of allocation top and limit addresses.
1973
+ // ARM adds additional checks to make sure the ldm instruction can be
1974
+ // used. On MIPS we don't have ldm so we don't need additional checks either.
1975
+ ExternalReference new_space_allocation_top =
1976
+ ExternalReference::new_space_allocation_top_address(isolate());
1977
+ ExternalReference new_space_allocation_limit =
1978
+ ExternalReference::new_space_allocation_limit_address(isolate());
1979
+ intptr_t top =
1980
+ reinterpret_cast<intptr_t>(new_space_allocation_top.address());
1981
+ intptr_t limit =
1982
+ reinterpret_cast<intptr_t>(new_space_allocation_limit.address());
1983
+ ASSERT((limit - top) == kPointerSize);
1984
+
1985
+ // Set up allocation top address and object size registers.
1986
+ Register topaddr = scratch1;
1987
+ li(topaddr, Operand(new_space_allocation_top));
1988
+
1989
+ // This code stores a temporary value in t9.
1990
+ if ((flags & RESULT_CONTAINS_TOP) == 0) {
1991
+ // Load allocation top into result and allocation limit into t9.
1992
+ lw(result, MemOperand(topaddr));
1993
+ lw(t9, MemOperand(topaddr, kPointerSize));
1994
+ } else {
1995
+ if (FLAG_debug_code) {
1996
+ // Assert that result actually contains top on entry. t9 is used
1997
+ // immediately below so this use of t9 does not cause difference with
1998
+ // respect to register content between debug and release mode.
1999
+ lw(t9, MemOperand(topaddr));
2000
+ Check(eq, "Unexpected allocation top", result, Operand(t9));
2001
+ }
2002
+ // Load allocation limit into t9. Result already contains allocation top.
2003
+ lw(t9, MemOperand(topaddr, limit - top));
2004
+ }
2005
+
2006
+ // Calculate new top and bail out if new space is exhausted. Use result
2007
+ // to calculate the new top. Object size may be in words so a shift is
2008
+ // required to get the number of bytes.
2009
+ if ((flags & SIZE_IN_WORDS) != 0) {
2010
+ sll(scratch2, object_size, kPointerSizeLog2);
2011
+ Addu(scratch2, result, scratch2);
2012
+ } else {
2013
+ Addu(scratch2, result, Operand(object_size));
2014
+ }
2015
+ Branch(gc_required, Ugreater, scratch2, Operand(t9));
2016
+
2017
+ // Update allocation top. result temporarily holds the new top.
2018
+ if (FLAG_debug_code) {
2019
+ And(t9, scratch2, Operand(kObjectAlignmentMask));
2020
+ Check(eq, "Unaligned allocation in new space", t9, Operand(zero_reg));
2021
+ }
2022
+ sw(scratch2, MemOperand(topaddr));
2023
+
2024
+ // Tag object if requested.
2025
+ if ((flags & TAG_OBJECT) != 0) {
2026
+ Addu(result, result, Operand(kHeapObjectTag));
2027
+ }
2028
+ }
2029
+
2030
+
2031
+ void MacroAssembler::UndoAllocationInNewSpace(Register object,
2032
+ Register scratch) {
2033
+ ExternalReference new_space_allocation_top =
2034
+ ExternalReference::new_space_allocation_top_address(isolate());
2035
+
2036
+ // Make sure the object has no tag before resetting top.
2037
+ And(object, object, Operand(~kHeapObjectTagMask));
2038
+ #ifdef DEBUG
2039
+ // Check that the object un-allocated is below the current top.
2040
+ li(scratch, Operand(new_space_allocation_top));
2041
+ lw(scratch, MemOperand(scratch));
2042
+ Check(less, "Undo allocation of non allocated memory",
2043
+ object, Operand(scratch));
2044
+ #endif
2045
+ // Write the address of the object to un-allocate as the current top.
2046
+ li(scratch, Operand(new_space_allocation_top));
2047
+ sw(object, MemOperand(scratch));
2048
+ }
2049
+
2050
+
2051
+ void MacroAssembler::AllocateTwoByteString(Register result,
2052
+ Register length,
2053
+ Register scratch1,
2054
+ Register scratch2,
2055
+ Register scratch3,
2056
+ Label* gc_required) {
2057
+ // Calculate the number of bytes needed for the characters in the string while
2058
+ // observing object alignment.
2059
+ ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
2060
+ sll(scratch1, length, 1); // Length in bytes, not chars.
2061
+ addiu(scratch1, scratch1,
2062
+ kObjectAlignmentMask + SeqTwoByteString::kHeaderSize);
2063
+ And(scratch1, scratch1, Operand(~kObjectAlignmentMask));
2064
+
2065
+ // Allocate two-byte string in new space.
2066
+ AllocateInNewSpace(scratch1,
2067
+ result,
2068
+ scratch2,
2069
+ scratch3,
2070
+ gc_required,
2071
+ TAG_OBJECT);
2072
+
2073
+ // Set the map, length and hash field.
2074
+ InitializeNewString(result,
2075
+ length,
2076
+ Heap::kStringMapRootIndex,
2077
+ scratch1,
2078
+ scratch2);
2079
+ }
2080
+
2081
+
2082
+ void MacroAssembler::AllocateAsciiString(Register result,
2083
+ Register length,
2084
+ Register scratch1,
2085
+ Register scratch2,
2086
+ Register scratch3,
2087
+ Label* gc_required) {
2088
+ // Calculate the number of bytes needed for the characters in the string
2089
+ // while observing object alignment.
2090
+ ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
2091
+ ASSERT(kCharSize == 1);
2092
+ addiu(scratch1, length, kObjectAlignmentMask + SeqAsciiString::kHeaderSize);
2093
+ And(scratch1, scratch1, Operand(~kObjectAlignmentMask));
2094
+
2095
+ // Allocate ASCII string in new space.
2096
+ AllocateInNewSpace(scratch1,
2097
+ result,
2098
+ scratch2,
2099
+ scratch3,
2100
+ gc_required,
2101
+ TAG_OBJECT);
2102
+
2103
+ // Set the map, length and hash field.
2104
+ InitializeNewString(result,
2105
+ length,
2106
+ Heap::kAsciiStringMapRootIndex,
2107
+ scratch1,
2108
+ scratch2);
2109
+ }
2110
+
2111
+
2112
+ void MacroAssembler::AllocateTwoByteConsString(Register result,
2113
+ Register length,
2114
+ Register scratch1,
2115
+ Register scratch2,
2116
+ Label* gc_required) {
2117
+ AllocateInNewSpace(ConsString::kSize,
2118
+ result,
2119
+ scratch1,
2120
+ scratch2,
2121
+ gc_required,
2122
+ TAG_OBJECT);
2123
+ InitializeNewString(result,
2124
+ length,
2125
+ Heap::kConsStringMapRootIndex,
2126
+ scratch1,
2127
+ scratch2);
2128
+ }
2129
+
2130
+
2131
+ void MacroAssembler::AllocateAsciiConsString(Register result,
2132
+ Register length,
2133
+ Register scratch1,
2134
+ Register scratch2,
2135
+ Label* gc_required) {
2136
+ AllocateInNewSpace(ConsString::kSize,
2137
+ result,
2138
+ scratch1,
2139
+ scratch2,
2140
+ gc_required,
2141
+ TAG_OBJECT);
2142
+ InitializeNewString(result,
2143
+ length,
2144
+ Heap::kConsAsciiStringMapRootIndex,
2145
+ scratch1,
2146
+ scratch2);
2147
+ }
2148
+
2149
+
2150
+ // Allocates a heap number or jumps to the label if the young space is full and
2151
+ // a scavenge is needed.
2152
+ void MacroAssembler::AllocateHeapNumber(Register result,
2153
+ Register scratch1,
2154
+ Register scratch2,
2155
+ Register heap_number_map,
2156
+ Label* need_gc) {
2157
+ // Allocate an object in the heap for the heap number and tag it as a heap
2158
+ // object.
2159
+ AllocateInNewSpace(HeapNumber::kSize,
2160
+ result,
2161
+ scratch1,
2162
+ scratch2,
2163
+ need_gc,
2164
+ TAG_OBJECT);
2165
+
2166
+ // Store heap number map in the allocated object.
2167
+ AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
2168
+ sw(heap_number_map, FieldMemOperand(result, HeapObject::kMapOffset));
2169
+ }
2170
+
2171
+
2172
+ void MacroAssembler::AllocateHeapNumberWithValue(Register result,
2173
+ FPURegister value,
2174
+ Register scratch1,
2175
+ Register scratch2,
2176
+ Label* gc_required) {
2177
+ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
2178
+ AllocateHeapNumber(result, scratch1, scratch2, t6, gc_required);
2179
+ sdc1(value, FieldMemOperand(result, HeapNumber::kValueOffset));
2180
+ }
2181
+
2182
+
2183
+ // Copies a fixed number of fields of heap objects from src to dst.
2184
+ void MacroAssembler::CopyFields(Register dst,
2185
+ Register src,
2186
+ RegList temps,
2187
+ int field_count) {
2188
+ ASSERT((temps & dst.bit()) == 0);
2189
+ ASSERT((temps & src.bit()) == 0);
2190
+ // Primitive implementation using only one temporary register.
2191
+
2192
+ Register tmp = no_reg;
2193
+ // Find a temp register in temps list.
2194
+ for (int i = 0; i < kNumRegisters; i++) {
2195
+ if ((temps & (1 << i)) != 0) {
2196
+ tmp.code_ = i;
2197
+ break;
2198
+ }
2199
+ }
2200
+ ASSERT(!tmp.is(no_reg));
2201
+
2202
+ for (int i = 0; i < field_count; i++) {
2203
+ lw(tmp, FieldMemOperand(src, i * kPointerSize));
2204
+ sw(tmp, FieldMemOperand(dst, i * kPointerSize));
2205
+ }
2206
+ }
2207
+
2208
+
2209
+ void MacroAssembler::CheckMap(Register obj,
2210
+ Register scratch,
2211
+ Handle<Map> map,
2212
+ Label* fail,
2213
+ bool is_heap_object) {
2214
+ if (!is_heap_object) {
2215
+ JumpIfSmi(obj, fail);
2216
+ }
2217
+ lw(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
2218
+ li(at, Operand(map));
2219
+ Branch(fail, ne, scratch, Operand(at));
2220
+ }
2221
+
2222
+
2223
+ void MacroAssembler::CheckMap(Register obj,
2224
+ Register scratch,
2225
+ Heap::RootListIndex index,
2226
+ Label* fail,
2227
+ bool is_heap_object) {
2228
+ if (!is_heap_object) {
2229
+ JumpIfSmi(obj, fail);
2230
+ }
2231
+ lw(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
2232
+ LoadRoot(at, index);
2233
+ Branch(fail, ne, scratch, Operand(at));
2234
+ }
2235
+
2236
+
2237
+ // -----------------------------------------------------------------------------
2238
+ // JavaScript invokes
2239
+
2240
+ void MacroAssembler::InvokePrologue(const ParameterCount& expected,
2241
+ const ParameterCount& actual,
2242
+ Handle<Code> code_constant,
2243
+ Register code_reg,
2244
+ Label* done,
2245
+ InvokeFlag flag,
2246
+ PostCallGenerator* post_call_generator) {
2247
+ bool definitely_matches = false;
2248
+ Label regular_invoke;
2249
+
2250
+ // Check whether the expected and actual arguments count match. If not,
2251
+ // setup registers according to contract with ArgumentsAdaptorTrampoline:
2252
+ // a0: actual arguments count
2253
+ // a1: function (passed through to callee)
2254
+ // a2: expected arguments count
2255
+ // a3: callee code entry
2256
+
2257
+ // The code below is made a lot easier because the calling code already sets
2258
+ // up actual and expected registers according to the contract if values are
2259
+ // passed in registers.
2260
+ ASSERT(actual.is_immediate() || actual.reg().is(a0));
2261
+ ASSERT(expected.is_immediate() || expected.reg().is(a2));
2262
+ ASSERT((!code_constant.is_null() && code_reg.is(no_reg)) || code_reg.is(a3));
2263
+
2264
+ if (expected.is_immediate()) {
2265
+ ASSERT(actual.is_immediate());
2266
+ if (expected.immediate() == actual.immediate()) {
2267
+ definitely_matches = true;
2268
+ } else {
2269
+ li(a0, Operand(actual.immediate()));
2270
+ const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
2271
+ if (expected.immediate() == sentinel) {
2272
+ // Don't worry about adapting arguments for builtins that
2273
+ // don't want that done. Skip adaption code by making it look
2274
+ // like we have a match between expected and actual number of
2275
+ // arguments.
2276
+ definitely_matches = true;
2277
+ } else {
2278
+ li(a2, Operand(expected.immediate()));
2279
+ }
2280
+ }
2281
+ } else {
2282
+ if (actual.is_immediate()) {
2283
+ Branch(&regular_invoke, eq, expected.reg(), Operand(actual.immediate()));
2284
+ li(a0, Operand(actual.immediate()));
2285
+ } else {
2286
+ Branch(&regular_invoke, eq, expected.reg(), Operand(actual.reg()));
2287
+ }
2288
+ }
2289
+
2290
+ if (!definitely_matches) {
2291
+ if (!code_constant.is_null()) {
2292
+ li(a3, Operand(code_constant));
2293
+ addiu(a3, a3, Code::kHeaderSize - kHeapObjectTag);
2294
+ }
2295
+
2296
+ Handle<Code> adaptor =
2297
+ isolate()->builtins()->ArgumentsAdaptorTrampoline();
2298
+ if (flag == CALL_FUNCTION) {
2299
+ Call(adaptor, RelocInfo::CODE_TARGET);
2300
+ if (post_call_generator != NULL) post_call_generator->Generate();
2301
+ jmp(done);
2302
+ } else {
2303
+ Jump(adaptor, RelocInfo::CODE_TARGET);
2304
+ }
2305
+ bind(&regular_invoke);
2306
+ }
2307
+ }
2308
+
2309
+
2310
+ void MacroAssembler::InvokeCode(Register code,
2311
+ const ParameterCount& expected,
2312
+ const ParameterCount& actual,
2313
+ InvokeFlag flag,
2314
+ PostCallGenerator* post_call_generator) {
2315
+ Label done;
2316
+
2317
+ InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag,
2318
+ post_call_generator);
2319
+ if (flag == CALL_FUNCTION) {
2320
+ Call(code);
2321
+ } else {
2322
+ ASSERT(flag == JUMP_FUNCTION);
2323
+ Jump(code);
2324
+ }
2325
+ // Continue here if InvokePrologue does handle the invocation due to
2326
+ // mismatched parameter counts.
2327
+ bind(&done);
2328
+ }
2329
+
2330
+
2331
+ void MacroAssembler::InvokeCode(Handle<Code> code,
2332
+ const ParameterCount& expected,
2333
+ const ParameterCount& actual,
2334
+ RelocInfo::Mode rmode,
2335
+ InvokeFlag flag) {
2336
+ Label done;
2337
+
2338
+ InvokePrologue(expected, actual, code, no_reg, &done, flag);
2339
+ if (flag == CALL_FUNCTION) {
2340
+ Call(code, rmode);
2341
+ } else {
2342
+ Jump(code, rmode);
2343
+ }
2344
+ // Continue here if InvokePrologue does handle the invocation due to
2345
+ // mismatched parameter counts.
2346
+ bind(&done);
2347
+ }
2348
+
2349
+
2350
+ void MacroAssembler::InvokeFunction(Register function,
2351
+ const ParameterCount& actual,
2352
+ InvokeFlag flag,
2353
+ PostCallGenerator* post_call_generator) {
2354
+ // Contract with called JS functions requires that function is passed in a1.
2355
+ ASSERT(function.is(a1));
2356
+ Register expected_reg = a2;
2357
+ Register code_reg = a3;
2358
+
2359
+ lw(code_reg, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2360
+ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
2361
+ lw(expected_reg,
2362
+ FieldMemOperand(code_reg,
2363
+ SharedFunctionInfo::kFormalParameterCountOffset));
2364
+ sra(expected_reg, expected_reg, kSmiTagSize);
2365
+ lw(code_reg, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
2366
+
2367
+ ParameterCount expected(expected_reg);
2368
+ InvokeCode(code_reg, expected, actual, flag, post_call_generator);
2369
+ }
2370
+
2371
+
2372
+ void MacroAssembler::InvokeFunction(JSFunction* function,
2373
+ const ParameterCount& actual,
2374
+ InvokeFlag flag) {
2375
+ ASSERT(function->is_compiled());
2376
+
2377
+ // Get the function and setup the context.
2378
+ li(a1, Operand(Handle<JSFunction>(function)));
2379
+ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
2380
+
2381
+ // Invoke the cached code.
2382
+ Handle<Code> code(function->code());
2383
+ ParameterCount expected(function->shared()->formal_parameter_count());
2384
+ if (V8::UseCrankshaft()) {
2385
+ UNIMPLEMENTED_MIPS();
2386
+ } else {
2387
+ InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
2388
+ }
2389
+ }
2390
+
2391
+
2392
+ void MacroAssembler::IsObjectJSObjectType(Register heap_object,
2393
+ Register map,
2394
+ Register scratch,
2395
+ Label* fail) {
2396
+ lw(map, FieldMemOperand(heap_object, HeapObject::kMapOffset));
2397
+ IsInstanceJSObjectType(map, scratch, fail);
2398
+ }
2399
+
2400
+
2401
+ void MacroAssembler::IsInstanceJSObjectType(Register map,
2402
+ Register scratch,
2403
+ Label* fail) {
2404
+ lbu(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset));
2405
+ Branch(fail, lt, scratch, Operand(FIRST_JS_OBJECT_TYPE));
2406
+ Branch(fail, gt, scratch, Operand(LAST_JS_OBJECT_TYPE));
2407
+ }
2408
+
2409
+
2410
+ void MacroAssembler::IsObjectJSStringType(Register object,
2411
+ Register scratch,
2412
+ Label* fail) {
2413
+ ASSERT(kNotStringTag != 0);
2414
+
2415
+ lw(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
2416
+ lbu(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
2417
+ And(scratch, scratch, Operand(kIsNotStringMask));
2418
+ Branch(fail, ne, scratch, Operand(zero_reg));
2419
+ }
2420
+
2421
+
2422
+ // ---------------------------------------------------------------------------
2423
+ // Support functions.
2424
+
2425
+
2426
+ void MacroAssembler::TryGetFunctionPrototype(Register function,
2427
+ Register result,
2428
+ Register scratch,
2429
+ Label* miss) {
2430
+ // Check that the receiver isn't a smi.
2431
+ JumpIfSmi(function, miss);
2432
+
2433
+ // Check that the function really is a function. Load map into result reg.
2434
+ GetObjectType(function, result, scratch);
2435
+ Branch(miss, ne, scratch, Operand(JS_FUNCTION_TYPE));
2436
+
2437
+ // Make sure that the function has an instance prototype.
2438
+ Label non_instance;
2439
+ lbu(scratch, FieldMemOperand(result, Map::kBitFieldOffset));
2440
+ And(scratch, scratch, Operand(1 << Map::kHasNonInstancePrototype));
2441
+ Branch(&non_instance, ne, scratch, Operand(zero_reg));
2442
+
2443
+ // Get the prototype or initial map from the function.
2444
+ lw(result,
2445
+ FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2446
+
2447
+ // If the prototype or initial map is the hole, don't return it and
2448
+ // simply miss the cache instead. This will allow us to allocate a
2449
+ // prototype object on-demand in the runtime system.
2450
+ LoadRoot(t8, Heap::kTheHoleValueRootIndex);
2451
+ Branch(miss, eq, result, Operand(t8));
2452
+
2453
+ // If the function does not have an initial map, we're done.
2454
+ Label done;
2455
+ GetObjectType(result, scratch, scratch);
2456
+ Branch(&done, ne, scratch, Operand(MAP_TYPE));
2457
+
2458
+ // Get the prototype from the initial map.
2459
+ lw(result, FieldMemOperand(result, Map::kPrototypeOffset));
2460
+ jmp(&done);
2461
+
2462
+ // Non-instance prototype: Fetch prototype from constructor field
2463
+ // in initial map.
2464
+ bind(&non_instance);
2465
+ lw(result, FieldMemOperand(result, Map::kConstructorOffset));
2466
+
2467
+ // All done.
2468
+ bind(&done);
2469
+ }
2470
+
2471
+
2472
+ void MacroAssembler::GetObjectType(Register object,
2473
+ Register map,
2474
+ Register type_reg) {
2475
+ lw(map, FieldMemOperand(object, HeapObject::kMapOffset));
2476
+ lbu(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
2477
+ }
2478
+
2479
+
2480
+ // -----------------------------------------------------------------------------
2481
+ // Runtime calls
2482
+
2483
+ void MacroAssembler::CallStub(CodeStub* stub, Condition cond,
2484
+ Register r1, const Operand& r2) {
2485
+ ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs.
2486
+ Call(stub->GetCode(), RelocInfo::CODE_TARGET, cond, r1, r2);
2487
+ }
2488
+
2489
+
2490
+ void MacroAssembler::TailCallStub(CodeStub* stub) {
2491
+ ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs
2492
+ Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
2493
+ }
2494
+
2495
+
2496
+ void MacroAssembler::IllegalOperation(int num_arguments) {
2497
+ if (num_arguments > 0) {
2498
+ addiu(sp, sp, num_arguments * kPointerSize);
2499
+ }
2500
+ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
2501
+ }
2502
+
2503
+
2504
+ void MacroAssembler::IndexFromHash(Register hash,
2505
+ Register index) {
2506
+ // If the hash field contains an array index pick it out. The assert checks
2507
+ // that the constants for the maximum number of digits for an array index
2508
+ // cached in the hash field and the number of bits reserved for it does not
2509
+ // conflict.
2510
+ ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
2511
+ (1 << String::kArrayIndexValueBits));
2512
+ // We want the smi-tagged index in key. kArrayIndexValueMask has zeros in
2513
+ // the low kHashShift bits.
2514
+ STATIC_ASSERT(kSmiTag == 0);
2515
+ Ext(hash, hash, String::kHashShift, String::kArrayIndexValueBits);
2516
+ sll(index, hash, kSmiTagSize);
2517
+ }
2518
+
2519
+
2520
+ void MacroAssembler::ObjectToDoubleFPURegister(Register object,
2521
+ FPURegister result,
2522
+ Register scratch1,
2523
+ Register scratch2,
2524
+ Register heap_number_map,
2525
+ Label* not_number,
2526
+ ObjectToDoubleFlags flags) {
2527
+ Label done;
2528
+ if ((flags & OBJECT_NOT_SMI) == 0) {
2529
+ Label not_smi;
2530
+ JumpIfNotSmi(object, &not_smi);
2531
+ // Remove smi tag and convert to double.
2532
+ sra(scratch1, object, kSmiTagSize);
2533
+ mtc1(scratch1, result);
2534
+ cvt_d_w(result, result);
2535
+ Branch(&done);
2536
+ bind(&not_smi);
2537
+ }
2538
+ // Check for heap number and load double value from it.
2539
+ lw(scratch1, FieldMemOperand(object, HeapObject::kMapOffset));
2540
+ Branch(not_number, ne, scratch1, Operand(heap_number_map));
2541
+
2542
+ if ((flags & AVOID_NANS_AND_INFINITIES) != 0) {
2543
+ // If exponent is all ones the number is either a NaN or +/-Infinity.
2544
+ Register exponent = scratch1;
2545
+ Register mask_reg = scratch2;
2546
+ lw(exponent, FieldMemOperand(object, HeapNumber::kExponentOffset));
2547
+ li(mask_reg, HeapNumber::kExponentMask);
2548
+
2549
+ And(exponent, exponent, mask_reg);
2550
+ Branch(not_number, eq, exponent, Operand(mask_reg));
2551
+ }
2552
+ ldc1(result, FieldMemOperand(object, HeapNumber::kValueOffset));
2553
+ bind(&done);
2554
+ }
2555
+
2556
+
2557
+
2558
+ void MacroAssembler::SmiToDoubleFPURegister(Register smi,
2559
+ FPURegister value,
2560
+ Register scratch1) {
2561
+ sra(scratch1, smi, kSmiTagSize);
2562
+ mtc1(scratch1, value);
2563
+ cvt_d_w(value, value);
2564
+ }
2565
+
2566
+
2567
+ void MacroAssembler::CallRuntime(const Runtime::Function* f,
2568
+ int num_arguments) {
2569
+ // All parameters are on the stack. v0 has the return value after call.
2570
+
2571
+ // If the expected number of arguments of the runtime function is
2572
+ // constant, we check that the actual number of arguments match the
2573
+ // expectation.
2574
+ if (f->nargs >= 0 && f->nargs != num_arguments) {
2575
+ IllegalOperation(num_arguments);
2576
+ return;
2577
+ }
2578
+
2579
+ // TODO(1236192): Most runtime routines don't need the number of
2580
+ // arguments passed in because it is constant. At some point we
2581
+ // should remove this need and make the runtime routine entry code
2582
+ // smarter.
2583
+ li(a0, num_arguments);
2584
+ li(a1, Operand(ExternalReference(f, isolate())));
2585
+ CEntryStub stub(1);
2586
+ CallStub(&stub);
2587
+ }
2588
+
2589
+
2590
+ void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
2591
+ const Runtime::Function* function = Runtime::FunctionForId(id);
2592
+ li(a0, Operand(function->nargs));
2593
+ li(a1, Operand(ExternalReference(function, isolate())));
2594
+ CEntryStub stub(1);
2595
+ stub.SaveDoubles();
2596
+ CallStub(&stub);
2597
+ }
2598
+
2599
+
2600
+ void MacroAssembler::CallRuntime(Runtime::FunctionId fid, int num_arguments) {
2601
+ CallRuntime(Runtime::FunctionForId(fid), num_arguments);
2602
+ }
2603
+
2604
+
2605
+ void MacroAssembler::CallExternalReference(const ExternalReference& ext,
2606
+ int num_arguments) {
2607
+ li(a0, Operand(num_arguments));
2608
+ li(a1, Operand(ext));
2609
+
2610
+ CEntryStub stub(1);
2611
+ CallStub(&stub);
2612
+ }
2613
+
2614
+
2615
+ void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
2616
+ int num_arguments,
2617
+ int result_size) {
2618
+ // TODO(1236192): Most runtime routines don't need the number of
2619
+ // arguments passed in because it is constant. At some point we
2620
+ // should remove this need and make the runtime routine entry code
2621
+ // smarter.
2622
+ li(a0, Operand(num_arguments));
2623
+ JumpToExternalReference(ext);
2624
+ }
2625
+
2626
+
2627
+ void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
2628
+ int num_arguments,
2629
+ int result_size) {
2630
+ TailCallExternalReference(ExternalReference(fid, isolate()),
2631
+ num_arguments,
2632
+ result_size);
2633
+ }
2634
+
2635
+
2636
+ void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) {
2637
+ li(a1, Operand(builtin));
2638
+ CEntryStub stub(1);
2639
+ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
2640
+ }
2641
+
2642
+
2643
+ void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
2644
+ InvokeJSFlags flags,
2645
+ PostCallGenerator* post_call_generator) {
2646
+ GetBuiltinEntry(t9, id);
2647
+ if (flags == CALL_JS) {
2648
+ Call(t9);
2649
+ if (post_call_generator != NULL) post_call_generator->Generate();
2650
+ } else {
2651
+ ASSERT(flags == JUMP_JS);
2652
+ Jump(t9);
2653
+ }
2654
+ }
2655
+
2656
+
2657
+ void MacroAssembler::GetBuiltinFunction(Register target,
2658
+ Builtins::JavaScript id) {
2659
+ // Load the builtins object into target register.
2660
+ lw(target, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
2661
+ lw(target, FieldMemOperand(target, GlobalObject::kBuiltinsOffset));
2662
+ // Load the JavaScript builtin function from the builtins object.
2663
+ lw(target, FieldMemOperand(target,
2664
+ JSBuiltinsObject::OffsetOfFunctionWithId(id)));
2665
+ }
2666
+
2667
+
2668
+ void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
2669
+ ASSERT(!target.is(a1));
2670
+ GetBuiltinFunction(a1, id);
2671
+ // Load the code entry point from the builtins object.
2672
+ lw(target, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
2673
+ }
2674
+
2675
+
2676
+ void MacroAssembler::SetCounter(StatsCounter* counter, int value,
2677
+ Register scratch1, Register scratch2) {
2678
+ if (FLAG_native_code_counters && counter->Enabled()) {
2679
+ li(scratch1, Operand(value));
2680
+ li(scratch2, Operand(ExternalReference(counter)));
2681
+ sw(scratch1, MemOperand(scratch2));
2682
+ }
2683
+ }
2684
+
2685
+
2686
+ void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
2687
+ Register scratch1, Register scratch2) {
2688
+ ASSERT(value > 0);
2689
+ if (FLAG_native_code_counters && counter->Enabled()) {
2690
+ li(scratch2, Operand(ExternalReference(counter)));
2691
+ lw(scratch1, MemOperand(scratch2));
2692
+ Addu(scratch1, scratch1, Operand(value));
2693
+ sw(scratch1, MemOperand(scratch2));
2694
+ }
2695
+ }
2696
+
2697
+
2698
+ void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
2699
+ Register scratch1, Register scratch2) {
2700
+ ASSERT(value > 0);
2701
+ if (FLAG_native_code_counters && counter->Enabled()) {
2702
+ li(scratch2, Operand(ExternalReference(counter)));
2703
+ lw(scratch1, MemOperand(scratch2));
2704
+ Subu(scratch1, scratch1, Operand(value));
2705
+ sw(scratch1, MemOperand(scratch2));
2706
+ }
2707
+ }
2708
+
2709
+
2710
+ // -----------------------------------------------------------------------------
2711
+ // Debugging
2712
+
2713
+ void MacroAssembler::Assert(Condition cc, const char* msg,
2714
+ Register rs, Operand rt) {
2715
+ if (FLAG_debug_code)
2716
+ Check(cc, msg, rs, rt);
2717
+ }
2718
+
2719
+
2720
+ void MacroAssembler::AssertRegisterIsRoot(Register reg,
2721
+ Heap::RootListIndex index) {
2722
+ if (FLAG_debug_code) {
2723
+ LoadRoot(at, index);
2724
+ Check(eq, "Register did not match expected root", reg, Operand(at));
2725
+ }
2726
+ }
2727
+
2728
+
2729
+ void MacroAssembler::AssertFastElements(Register elements) {
2730
+ if (FLAG_debug_code) {
2731
+ ASSERT(!elements.is(at));
2732
+ Label ok;
2733
+ Push(elements);
2734
+ lw(elements, FieldMemOperand(elements, HeapObject::kMapOffset));
2735
+ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
2736
+ Branch(&ok, eq, elements, Operand(at));
2737
+ LoadRoot(at, Heap::kFixedCOWArrayMapRootIndex);
2738
+ Branch(&ok, eq, elements, Operand(at));
2739
+ Abort("JSObject with fast elements map has slow elements");
2740
+ bind(&ok);
2741
+ Pop(elements);
2742
+ }
2743
+ }
2744
+
2745
+
2746
+ void MacroAssembler::Check(Condition cc, const char* msg,
2747
+ Register rs, Operand rt) {
2748
+ Label L;
2749
+ Branch(&L, cc, rs, rt);
2750
+ Abort(msg);
2751
+ // will not return here
2752
+ bind(&L);
2753
+ }
2754
+
2755
+
2756
+ void MacroAssembler::Abort(const char* msg) {
2757
+ Label abort_start;
2758
+ bind(&abort_start);
2759
+ // We want to pass the msg string like a smi to avoid GC
2760
+ // problems, however msg is not guaranteed to be aligned
2761
+ // properly. Instead, we pass an aligned pointer that is
2762
+ // a proper v8 smi, but also pass the alignment difference
2763
+ // from the real pointer as a smi.
2764
+ intptr_t p1 = reinterpret_cast<intptr_t>(msg);
2765
+ intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
2766
+ ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
2767
+ #ifdef DEBUG
2768
+ if (msg != NULL) {
2769
+ RecordComment("Abort message: ");
2770
+ RecordComment(msg);
2771
+ }
2772
+ #endif
2773
+ // Disable stub call restrictions to always allow calls to abort.
2774
+ AllowStubCallsScope allow_scope(this, true);
2775
+
2776
+ li(a0, Operand(p0));
2777
+ Push(a0);
2778
+ li(a0, Operand(Smi::FromInt(p1 - p0)));
2779
+ Push(a0);
2780
+ CallRuntime(Runtime::kAbort, 2);
2781
+ // will not return here
2782
+ if (is_trampoline_pool_blocked()) {
2783
+ // If the calling code cares about the exact number of
2784
+ // instructions generated, we insert padding here to keep the size
2785
+ // of the Abort macro constant.
2786
+ // Currently in debug mode with debug_code enabled the number of
2787
+ // generated instructions is 14, so we use this as a maximum value.
2788
+ static const int kExpectedAbortInstructions = 14;
2789
+ int abort_instructions = InstructionsGeneratedSince(&abort_start);
2790
+ ASSERT(abort_instructions <= kExpectedAbortInstructions);
2791
+ while (abort_instructions++ < kExpectedAbortInstructions) {
2792
+ nop();
2793
+ }
2794
+ }
2795
+ }
2796
+
2797
+
2798
+ void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2799
+ if (context_chain_length > 0) {
2800
+ // Move up the chain of contexts to the context containing the slot.
2801
+ lw(dst, MemOperand(cp, Context::SlotOffset(Context::CLOSURE_INDEX)));
2802
+ // Load the function context (which is the incoming, outer context).
2803
+ lw(dst, FieldMemOperand(dst, JSFunction::kContextOffset));
2804
+ for (int i = 1; i < context_chain_length; i++) {
2805
+ lw(dst, MemOperand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
2806
+ lw(dst, FieldMemOperand(dst, JSFunction::kContextOffset));
2807
+ }
2808
+ // The context may be an intermediate context, not a function context.
2809
+ lw(dst, MemOperand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2810
+ } else { // Slot is in the current function context.
2811
+ // The context may be an intermediate context, not a function context.
2812
+ lw(dst, MemOperand(cp, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2813
+ }
2814
+ }
2815
+
2816
+
2817
+ void MacroAssembler::LoadGlobalFunction(int index, Register function) {
2818
+ // Load the global or builtins object from the current context.
2819
+ lw(function, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
2820
+ // Load the global context from the global or builtins object.
2821
+ lw(function, FieldMemOperand(function,
2822
+ GlobalObject::kGlobalContextOffset));
2823
+ // Load the function from the global context.
2824
+ lw(function, MemOperand(function, Context::SlotOffset(index)));
2825
+ }
2826
+
2827
+
2828
+ void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2829
+ Register map,
2830
+ Register scratch) {
2831
+ // Load the initial map. The global functions all have initial maps.
2832
+ lw(map, FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2833
+ if (FLAG_debug_code) {
2834
+ Label ok, fail;
2835
+ CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail, false);
2836
+ Branch(&ok);
2837
+ bind(&fail);
2838
+ Abort("Global functions must have initial map");
2839
+ bind(&ok);
2840
+ }
2841
+ }
2842
+
2843
+
2844
+ void MacroAssembler::EnterFrame(StackFrame::Type type) {
2845
+ addiu(sp, sp, -5 * kPointerSize);
2846
+ li(t8, Operand(Smi::FromInt(type)));
2847
+ li(t9, Operand(CodeObject()));
2848
+ sw(ra, MemOperand(sp, 4 * kPointerSize));
2849
+ sw(fp, MemOperand(sp, 3 * kPointerSize));
2850
+ sw(cp, MemOperand(sp, 2 * kPointerSize));
2851
+ sw(t8, MemOperand(sp, 1 * kPointerSize));
2852
+ sw(t9, MemOperand(sp, 0 * kPointerSize));
2853
+ addiu(fp, sp, 3 * kPointerSize);
2854
+ }
2855
+
2856
+
2857
+ void MacroAssembler::LeaveFrame(StackFrame::Type type) {
2858
+ mov(sp, fp);
2859
+ lw(fp, MemOperand(sp, 0 * kPointerSize));
2860
+ lw(ra, MemOperand(sp, 1 * kPointerSize));
2861
+ addiu(sp, sp, 2 * kPointerSize);
2862
+ }
2863
+
2864
+
2865
+ void MacroAssembler::EnterExitFrame(Register hold_argc,
2866
+ Register hold_argv,
2867
+ Register hold_function,
2868
+ bool save_doubles) {
2869
+ // a0 is argc.
2870
+ sll(t8, a0, kPointerSizeLog2);
2871
+ addu(hold_argv, sp, t8);
2872
+ addiu(hold_argv, hold_argv, -kPointerSize);
2873
+
2874
+ // Compute callee's stack pointer before making changes and save it as
2875
+ // t9 register so that it is restored as sp register on exit, thereby
2876
+ // popping the args.
2877
+ // t9 = sp + kPointerSize * #args
2878
+ addu(t9, sp, t8);
2879
+
2880
+ // Compute the argv pointer and keep it in a callee-saved register.
2881
+ // This only seems to be needed for crankshaft and may cause problems
2882
+ // so it's disabled for now.
2883
+ // Subu(s6, t9, Operand(kPointerSize));
2884
+
2885
+ // Align the stack at this point.
2886
+ AlignStack(0);
2887
+
2888
+ // Save registers.
2889
+ addiu(sp, sp, -12);
2890
+ sw(t9, MemOperand(sp, 8));
2891
+ sw(ra, MemOperand(sp, 4));
2892
+ sw(fp, MemOperand(sp, 0));
2893
+ mov(fp, sp); // Setup new frame pointer.
2894
+
2895
+ li(t8, Operand(CodeObject()));
2896
+ Push(t8); // Accessed from ExitFrame::code_slot.
2897
+
2898
+ // Save the frame pointer and the context in top.
2899
+ li(t8, Operand(ExternalReference(Isolate::k_c_entry_fp_address, isolate())));
2900
+ sw(fp, MemOperand(t8));
2901
+ li(t8, Operand(ExternalReference(Isolate::k_context_address, isolate())));
2902
+ sw(cp, MemOperand(t8));
2903
+
2904
+ // Setup argc and the builtin function in callee-saved registers.
2905
+ mov(hold_argc, a0);
2906
+ mov(hold_function, a1);
2907
+
2908
+ // Optionally save all double registers.
2909
+ if (save_doubles) {
2910
+ #ifdef DEBUG
2911
+ int frame_alignment = ActivationFrameAlignment();
2912
+ #endif
2913
+ // The stack alignment code above made sp unaligned, so add space for one
2914
+ // more double register and use aligned addresses.
2915
+ ASSERT(kDoubleSize == frame_alignment);
2916
+ // Mark the frame as containing doubles by pushing a non-valid return
2917
+ // address, i.e. 0.
2918
+ ASSERT(ExitFrameConstants::kMarkerOffset == -2 * kPointerSize);
2919
+ push(zero_reg); // Marker and alignment word.
2920
+ int space = FPURegister::kNumRegisters * kDoubleSize + kPointerSize;
2921
+ Subu(sp, sp, Operand(space));
2922
+ // Remember: we only need to save every 2nd double FPU value.
2923
+ for (int i = 0; i < FPURegister::kNumRegisters; i+=2) {
2924
+ FPURegister reg = FPURegister::from_code(i);
2925
+ sdc1(reg, MemOperand(sp, i * kDoubleSize + kPointerSize));
2926
+ }
2927
+ // Note that f0 will be accessible at fp - 2*kPointerSize -
2928
+ // FPURegister::kNumRegisters * kDoubleSize, since the code slot and the
2929
+ // alignment word were pushed after the fp.
2930
+ }
2931
+ }
2932
+
2933
+
2934
+ void MacroAssembler::LeaveExitFrame(bool save_doubles) {
2935
+ // Optionally restore all double registers.
2936
+ if (save_doubles) {
2937
+ // TODO(regis): Use vldrm instruction.
2938
+ // Remember: we only need to restore every 2nd double FPU value.
2939
+ for (int i = 0; i < FPURegister::kNumRegisters; i+=2) {
2940
+ FPURegister reg = FPURegister::from_code(i);
2941
+ // Register f30-f31 is just below the marker.
2942
+ const int offset = ExitFrameConstants::kMarkerOffset;
2943
+ ldc1(reg, MemOperand(fp,
2944
+ (i - FPURegister::kNumRegisters) * kDoubleSize + offset));
2945
+ }
2946
+ }
2947
+
2948
+ // Clear top frame.
2949
+ li(t8, Operand(ExternalReference(Isolate::k_c_entry_fp_address, isolate())));
2950
+ sw(zero_reg, MemOperand(t8));
2951
+
2952
+ // Restore current context from top and clear it in debug mode.
2953
+ li(t8, Operand(ExternalReference(Isolate::k_context_address, isolate())));
2954
+ lw(cp, MemOperand(t8));
2955
+ #ifdef DEBUG
2956
+ sw(a3, MemOperand(t8));
2957
+ #endif
2958
+
2959
+ // Pop the arguments, restore registers, and return.
2960
+ mov(sp, fp); // Respect ABI stack constraint.
2961
+ lw(fp, MemOperand(sp, 0));
2962
+ lw(ra, MemOperand(sp, 4));
2963
+ lw(sp, MemOperand(sp, 8));
2964
+ jr(ra);
2965
+ nop(); // Branch delay slot nop.
2966
+ }
2967
+
2968
+
2969
+ void MacroAssembler::InitializeNewString(Register string,
2970
+ Register length,
2971
+ Heap::RootListIndex map_index,
2972
+ Register scratch1,
2973
+ Register scratch2) {
2974
+ sll(scratch1, length, kSmiTagSize);
2975
+ LoadRoot(scratch2, map_index);
2976
+ sw(scratch1, FieldMemOperand(string, String::kLengthOffset));
2977
+ li(scratch1, Operand(String::kEmptyHashField));
2978
+ sw(scratch2, FieldMemOperand(string, HeapObject::kMapOffset));
2979
+ sw(scratch1, FieldMemOperand(string, String::kHashFieldOffset));
2980
+ }
2981
+
2982
+
2983
+ int MacroAssembler::ActivationFrameAlignment() {
2984
+ #if defined(V8_HOST_ARCH_MIPS)
2985
+ // Running on the real platform. Use the alignment as mandated by the local
2986
+ // environment.
2987
+ // Note: This will break if we ever start generating snapshots on one Mips
2988
+ // platform for another Mips platform with a different alignment.
2989
+ return OS::ActivationFrameAlignment();
2990
+ #else // defined(V8_HOST_ARCH_MIPS)
2991
+ // If we are using the simulator then we should always align to the expected
2992
+ // alignment. As the simulator is used to generate snapshots we do not know
2993
+ // if the target platform will need alignment, so this is controlled from a
2994
+ // flag.
2995
+ return FLAG_sim_stack_alignment;
2996
+ #endif // defined(V8_HOST_ARCH_MIPS)
2997
+ }
2998
+
2999
+
3000
+ void MacroAssembler::AlignStack(int offset) {
3001
+ // On MIPS an offset of 0 aligns to 0 modulo 8 bytes,
3002
+ // and an offset of 1 aligns to 4 modulo 8 bytes.
3003
+ #if defined(V8_HOST_ARCH_MIPS)
3004
+ // Running on the real platform. Use the alignment as mandated by the local
3005
+ // environment.
3006
+ // Note: This will break if we ever start generating snapshots on one MIPS
3007
+ // platform for another MIPS platform with a different alignment.
3008
+ int activation_frame_alignment = OS::ActivationFrameAlignment();
3009
+ #else // defined(V8_HOST_ARCH_MIPS)
3010
+ // If we are using the simulator then we should always align to the expected
3011
+ // alignment. As the simulator is used to generate snapshots we do not know
3012
+ // if the target platform will need alignment, so we will always align at
3013
+ // this point here.
3014
+ int activation_frame_alignment = 2 * kPointerSize;
3015
+ #endif // defined(V8_HOST_ARCH_MIPS)
3016
+ if (activation_frame_alignment != kPointerSize) {
3017
+ // This code needs to be made more general if this assert doesn't hold.
3018
+ ASSERT(activation_frame_alignment == 2 * kPointerSize);
3019
+ if (offset == 0) {
3020
+ andi(t8, sp, activation_frame_alignment - 1);
3021
+ Push(zero_reg, eq, t8, zero_reg);
3022
+ } else {
3023
+ andi(t8, sp, activation_frame_alignment - 1);
3024
+ addiu(t8, t8, -4);
3025
+ Push(zero_reg, eq, t8, zero_reg);
3026
+ }
3027
+ }
3028
+ }
3029
+
3030
+
3031
+
3032
+ void MacroAssembler::JumpIfNotPowerOfTwoOrZero(
3033
+ Register reg,
3034
+ Register scratch,
3035
+ Label* not_power_of_two_or_zero) {
3036
+ Subu(scratch, reg, Operand(1));
3037
+ Branch(USE_DELAY_SLOT, not_power_of_two_or_zero, lt,
3038
+ scratch, Operand(zero_reg));
3039
+ and_(at, scratch, reg); // In the delay slot.
3040
+ Branch(not_power_of_two_or_zero, ne, at, Operand(zero_reg));
3041
+ }
3042
+
3043
+
3044
+ void MacroAssembler::JumpIfNotBothSmi(Register reg1,
3045
+ Register reg2,
3046
+ Label* on_not_both_smi) {
3047
+ STATIC_ASSERT(kSmiTag == 0);
3048
+ ASSERT_EQ(1, kSmiTagMask);
3049
+ or_(at, reg1, reg2);
3050
+ andi(at, at, kSmiTagMask);
3051
+ Branch(on_not_both_smi, ne, at, Operand(zero_reg));
3052
+ }
3053
+
3054
+
3055
+ void MacroAssembler::JumpIfEitherSmi(Register reg1,
3056
+ Register reg2,
3057
+ Label* on_either_smi) {
3058
+ STATIC_ASSERT(kSmiTag == 0);
3059
+ ASSERT_EQ(1, kSmiTagMask);
3060
+ // Both Smi tags must be 1 (not Smi).
3061
+ and_(at, reg1, reg2);
3062
+ andi(at, at, kSmiTagMask);
3063
+ Branch(on_either_smi, eq, at, Operand(zero_reg));
3064
+ }
3065
+
3066
+
3067
+ void MacroAssembler::AbortIfSmi(Register object) {
3068
+ STATIC_ASSERT(kSmiTag == 0);
3069
+ andi(at, object, kSmiTagMask);
3070
+ Assert(ne, "Operand is a smi", at, Operand(zero_reg));
3071
+ }
3072
+
3073
+
3074
+ void MacroAssembler::AbortIfNotSmi(Register object) {
3075
+ STATIC_ASSERT(kSmiTag == 0);
3076
+ andi(at, object, kSmiTagMask);
3077
+ Assert(eq, "Operand is a smi", at, Operand(zero_reg));
3078
+ }
3079
+
3080
+
3081
+ void MacroAssembler::AbortIfNotRootValue(Register src,
3082
+ Heap::RootListIndex root_value_index,
3083
+ const char* message) {
3084
+ ASSERT(!src.is(at));
3085
+ LoadRoot(at, root_value_index);
3086
+ Assert(eq, message, src, Operand(at));
3087
+ }
3088
+
3089
+
3090
+ void MacroAssembler::JumpIfNotHeapNumber(Register object,
3091
+ Register heap_number_map,
3092
+ Register scratch,
3093
+ Label* on_not_heap_number) {
3094
+ lw(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
3095
+ AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
3096
+ Branch(on_not_heap_number, ne, scratch, Operand(heap_number_map));
3097
+ }
3098
+
3099
+
3100
+ void MacroAssembler::JumpIfNonSmisNotBothSequentialAsciiStrings(
3101
+ Register first,
3102
+ Register second,
3103
+ Register scratch1,
3104
+ Register scratch2,
3105
+ Label* failure) {
3106
+ // Test that both first and second are sequential ASCII strings.
3107
+ // Assume that they are non-smis.
3108
+ lw(scratch1, FieldMemOperand(first, HeapObject::kMapOffset));
3109
+ lw(scratch2, FieldMemOperand(second, HeapObject::kMapOffset));
3110
+ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3111
+ lbu(scratch2, FieldMemOperand(scratch2, Map::kInstanceTypeOffset));
3112
+
3113
+ JumpIfBothInstanceTypesAreNotSequentialAscii(scratch1,
3114
+ scratch2,
3115
+ scratch1,
3116
+ scratch2,
3117
+ failure);
3118
+ }
3119
+
3120
+
3121
+ void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first,
3122
+ Register second,
3123
+ Register scratch1,
3124
+ Register scratch2,
3125
+ Label* failure) {
3126
+ // Check that neither is a smi.
3127
+ STATIC_ASSERT(kSmiTag == 0);
3128
+ And(scratch1, first, Operand(second));
3129
+ And(scratch1, scratch1, Operand(kSmiTagMask));
3130
+ Branch(failure, eq, scratch1, Operand(zero_reg));
3131
+ JumpIfNonSmisNotBothSequentialAsciiStrings(first,
3132
+ second,
3133
+ scratch1,
3134
+ scratch2,
3135
+ failure);
3136
+ }
3137
+
3138
+
3139
+ void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
3140
+ Register first,
3141
+ Register second,
3142
+ Register scratch1,
3143
+ Register scratch2,
3144
+ Label* failure) {
3145
+ int kFlatAsciiStringMask =
3146
+ kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
3147
+ int kFlatAsciiStringTag = ASCII_STRING_TYPE;
3148
+ ASSERT(kFlatAsciiStringTag <= 0xffff); // Ensure this fits 16-bit immed.
3149
+ andi(scratch1, first, kFlatAsciiStringMask);
3150
+ Branch(failure, ne, scratch1, Operand(kFlatAsciiStringTag));
3151
+ andi(scratch2, second, kFlatAsciiStringMask);
3152
+ Branch(failure, ne, scratch2, Operand(kFlatAsciiStringTag));
3153
+ }
3154
+
3155
+
3156
+ void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(Register type,
3157
+ Register scratch,
3158
+ Label* failure) {
3159
+ int kFlatAsciiStringMask =
3160
+ kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
3161
+ int kFlatAsciiStringTag = ASCII_STRING_TYPE;
3162
+ And(scratch, type, Operand(kFlatAsciiStringMask));
3163
+ Branch(failure, ne, scratch, Operand(kFlatAsciiStringTag));
3164
+ }
3165
+
3166
+
3167
+ static const int kRegisterPassedArguments = 4;
3168
+
3169
+ void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
3170
+ int frame_alignment = ActivationFrameAlignment();
3171
+
3172
+ // Reserve space for Isolate address which is always passed as last parameter
3173
+ num_arguments += 1;
3174
+
3175
+ // Up to four simple arguments are passed in registers a0..a3.
3176
+ // Those four arguments must have reserved argument slots on the stack for
3177
+ // mips, even though those argument slots are not normally used.
3178
+ // Remaining arguments are pushed on the stack, above (higher address than)
3179
+ // the argument slots.
3180
+ ASSERT(StandardFrameConstants::kCArgsSlotsSize % kPointerSize == 0);
3181
+ int stack_passed_arguments = ((num_arguments <= kRegisterPassedArguments) ?
3182
+ 0 : num_arguments - kRegisterPassedArguments) +
3183
+ (StandardFrameConstants::kCArgsSlotsSize /
3184
+ kPointerSize);
3185
+ if (frame_alignment > kPointerSize) {
3186
+ // Make stack end at alignment and make room for num_arguments - 4 words
3187
+ // and the original value of sp.
3188
+ mov(scratch, sp);
3189
+ Subu(sp, sp, Operand((stack_passed_arguments + 1) * kPointerSize));
3190
+ ASSERT(IsPowerOf2(frame_alignment));
3191
+ And(sp, sp, Operand(-frame_alignment));
3192
+ sw(scratch, MemOperand(sp, stack_passed_arguments * kPointerSize));
3193
+ } else {
3194
+ Subu(sp, sp, Operand(stack_passed_arguments * kPointerSize));
3195
+ }
3196
+ }
3197
+
3198
+
3199
+ void MacroAssembler::CallCFunction(ExternalReference function,
3200
+ int num_arguments) {
3201
+ CallCFunctionHelper(no_reg, function, at, num_arguments);
3202
+ }
3203
+
3204
+
3205
+ void MacroAssembler::CallCFunction(Register function,
3206
+ Register scratch,
3207
+ int num_arguments) {
3208
+ CallCFunctionHelper(function,
3209
+ ExternalReference::the_hole_value_location(isolate()),
3210
+ scratch,
3211
+ num_arguments);
3212
+ }
3213
+
3214
+
3215
+ void MacroAssembler::CallCFunctionHelper(Register function,
3216
+ ExternalReference function_reference,
3217
+ Register scratch,
3218
+ int num_arguments) {
3219
+ // Push Isolate address as the last argument.
3220
+ if (num_arguments < kRegisterPassedArguments) {
3221
+ Register arg_to_reg[] = {a0, a1, a2, a3};
3222
+ Register r = arg_to_reg[num_arguments];
3223
+ li(r, Operand(ExternalReference::isolate_address()));
3224
+ } else {
3225
+ int stack_passed_arguments = num_arguments - kRegisterPassedArguments +
3226
+ (StandardFrameConstants::kCArgsSlotsSize /
3227
+ kPointerSize);
3228
+ // Push Isolate address on the stack after the arguments.
3229
+ li(scratch, Operand(ExternalReference::isolate_address()));
3230
+ sw(scratch, MemOperand(sp, stack_passed_arguments * kPointerSize));
3231
+ }
3232
+ num_arguments += 1;
3233
+
3234
+ // Make sure that the stack is aligned before calling a C function unless
3235
+ // running in the simulator. The simulator has its own alignment check which
3236
+ // provides more information.
3237
+ // The argument stots are presumed to have been set up by
3238
+ // PrepareCallCFunction. The C function must be called via t9, for mips ABI.
3239
+
3240
+ #if defined(V8_HOST_ARCH_MIPS)
3241
+ if (emit_debug_code()) {
3242
+ int frame_alignment = OS::ActivationFrameAlignment();
3243
+ int frame_alignment_mask = frame_alignment - 1;
3244
+ if (frame_alignment > kPointerSize) {
3245
+ ASSERT(IsPowerOf2(frame_alignment));
3246
+ Label alignment_as_expected;
3247
+ And(at, sp, Operand(frame_alignment_mask));
3248
+ Branch(&alignment_as_expected, eq, at, Operand(zero_reg));
3249
+ // Don't use Check here, as it will call Runtime_Abort possibly
3250
+ // re-entering here.
3251
+ stop("Unexpected alignment in CallCFunction");
3252
+ bind(&alignment_as_expected);
3253
+ }
3254
+ }
3255
+ #endif // V8_HOST_ARCH_MIPS
3256
+
3257
+ // Just call directly. The function called cannot cause a GC, or
3258
+ // allow preemption, so the return address in the link register
3259
+ // stays correct.
3260
+ if (!function.is(t9)) {
3261
+ mov(t9, function);
3262
+ function = t9;
3263
+ }
3264
+
3265
+ if (function.is(no_reg)) {
3266
+ li(t9, Operand(function_reference));
3267
+ function = t9;
3268
+ }
3269
+
3270
+ Call(function);
3271
+
3272
+ ASSERT(StandardFrameConstants::kCArgsSlotsSize % kPointerSize == 0);
3273
+ int stack_passed_arguments = ((num_arguments <= kRegisterPassedArguments) ?
3274
+ 0 : num_arguments - kRegisterPassedArguments) +
3275
+ (StandardFrameConstants::kCArgsSlotsSize /
3276
+ kPointerSize);
3277
+
3278
+ if (OS::ActivationFrameAlignment() > kPointerSize) {
3279
+ lw(sp, MemOperand(sp, stack_passed_arguments * kPointerSize));
3280
+ } else {
3281
+ Addu(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize)));
3282
+ }
3283
+ }
3284
+
3285
+
3286
+ #undef BRANCH_ARGS_CHECK
3287
+
3288
+
3289
+ #ifdef ENABLE_DEBUGGER_SUPPORT
3290
+ CodePatcher::CodePatcher(byte* address, int instructions)
3291
+ : address_(address),
3292
+ instructions_(instructions),
3293
+ size_(instructions * Assembler::kInstrSize),
3294
+ masm_(address, size_ + Assembler::kGap) {
3295
+ // Create a new macro assembler pointing to the address of the code to patch.
3296
+ // The size is adjusted with kGap on order for the assembler to generate size
3297
+ // bytes of instructions without failing with buffer size constraints.
3298
+ ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
3299
+ }
3300
+
3301
+
3302
+ CodePatcher::~CodePatcher() {
3303
+ // Indicate that code has changed.
3304
+ CPU::FlushICache(address_, size_);
3305
+
3306
+ // Check that the code was patched as expected.
3307
+ ASSERT(masm_.pc_ == address_ + size_);
3308
+ ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
3309
+ }
3310
+
3311
+
3312
+ void CodePatcher::Emit(Instr x) {
3313
+ masm()->emit(x);
3314
+ }
3315
+
3316
+
3317
+ void CodePatcher::Emit(Address addr) {
3318
+ masm()->emit(reinterpret_cast<Instr>(addr));
3319
+ }
3320
+
3321
+
3322
+ #endif // ENABLE_DEBUGGER_SUPPORT
3323
+
3324
+
3325
+ } } // namespace v8::internal
3326
+
3327
+ #endif // V8_TARGET_ARCH_MIPS