therubyracer 0.8.1.pre2 → 0.8.1

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of therubyracer might be problematic. Click here for more details.

Files changed (877) hide show
  1. data/Changelog.md +2 -1
  2. data/README.md +6 -3
  3. data/ext/v8/upstream/3.1.8/.gitignore +31 -0
  4. data/ext/v8/upstream/3.1.8/AUTHORS +40 -0
  5. data/ext/v8/upstream/3.1.8/ChangeLog +2566 -0
  6. data/ext/v8/upstream/3.1.8/LICENSE +52 -0
  7. data/ext/v8/upstream/3.1.8/LICENSE.strongtalk +29 -0
  8. data/ext/v8/upstream/3.1.8/LICENSE.v8 +26 -0
  9. data/ext/v8/upstream/3.1.8/LICENSE.valgrind +45 -0
  10. data/ext/v8/upstream/3.1.8/SConstruct +1192 -0
  11. data/ext/v8/upstream/3.1.8/build/README.txt +25 -0
  12. data/ext/v8/upstream/3.1.8/build/all.gyp +18 -0
  13. data/ext/v8/upstream/3.1.8/build/armu.gypi +32 -0
  14. data/ext/v8/upstream/3.1.8/build/common.gypi +82 -0
  15. data/ext/v8/upstream/3.1.8/build/gyp_v8 +145 -0
  16. data/ext/v8/upstream/3.1.8/include/v8-debug.h +384 -0
  17. data/ext/v8/upstream/3.1.8/include/v8-preparser.h +116 -0
  18. data/ext/v8/upstream/3.1.8/include/v8-profiler.h +426 -0
  19. data/ext/v8/upstream/3.1.8/include/v8-testing.h +99 -0
  20. data/ext/v8/upstream/3.1.8/include/v8.h +3846 -0
  21. data/ext/v8/upstream/3.1.8/include/v8stdint.h +53 -0
  22. data/ext/v8/upstream/3.1.8/preparser/preparser-process.cc +206 -0
  23. data/ext/v8/upstream/3.1.8/src/SConscript +356 -0
  24. data/ext/v8/upstream/3.1.8/src/accessors.cc +907 -0
  25. data/ext/v8/upstream/3.1.8/src/accessors.h +121 -0
  26. data/ext/v8/upstream/3.1.8/src/allocation.cc +204 -0
  27. data/ext/v8/upstream/3.1.8/src/allocation.h +176 -0
  28. data/ext/v8/upstream/3.1.8/src/api.cc +5191 -0
  29. data/ext/v8/upstream/3.1.8/src/api.h +508 -0
  30. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/apinatives.js +0 -0
  31. data/ext/v8/upstream/3.1.8/src/apiutils.h +80 -0
  32. data/ext/v8/upstream/3.1.8/src/arguments.h +105 -0
  33. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm-inl.h +352 -0
  34. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.cc +2756 -0
  35. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.h +1294 -0
  36. data/ext/v8/upstream/3.1.8/src/arm/builtins-arm.cc +1628 -0
  37. data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.cc +6783 -0
  38. data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.h +657 -0
  39. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm-inl.h +48 -0
  40. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.cc +7403 -0
  41. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.h +595 -0
  42. data/ext/v8/upstream/3.1.8/src/arm/constants-arm.cc +152 -0
  43. data/ext/v8/upstream/3.1.8/src/arm/constants-arm.h +769 -0
  44. data/ext/v8/upstream/3.1.8/src/arm/cpu-arm.cc +147 -0
  45. data/ext/v8/upstream/3.1.8/src/arm/debug-arm.cc +315 -0
  46. data/ext/v8/upstream/3.1.8/src/arm/deoptimizer-arm.cc +700 -0
  47. data/ext/v8/upstream/3.1.8/src/arm/disasm-arm.cc +1439 -0
  48. data/ext/v8/upstream/3.1.8/src/arm/frames-arm.cc +45 -0
  49. data/ext/v8/upstream/3.1.8/src/arm/frames-arm.h +168 -0
  50. data/ext/v8/upstream/3.1.8/src/arm/full-codegen-arm.cc +4230 -0
  51. data/ext/v8/upstream/3.1.8/src/arm/ic-arm.cc +1799 -0
  52. data/ext/v8/upstream/3.1.8/src/arm/jump-target-arm.cc +174 -0
  53. data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.cc +2041 -0
  54. data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.h +2046 -0
  55. data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.cc +3822 -0
  56. data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.h +312 -0
  57. data/ext/v8/upstream/3.1.8/src/arm/lithium-gap-resolver-arm.cc +303 -0
  58. data/ext/v8/upstream/3.1.8/src/arm/lithium-gap-resolver-arm.h +84 -0
  59. data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.cc +2701 -0
  60. data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.h +1015 -0
  61. data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.cc +1280 -0
  62. data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.h +252 -0
  63. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/register-allocator-arm-inl.h +0 -0
  64. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/register-allocator-arm.cc +0 -0
  65. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/register-allocator-arm.h +0 -0
  66. data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.cc +3165 -0
  67. data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.h +402 -0
  68. data/ext/v8/upstream/3.1.8/src/arm/stub-cache-arm.cc +4077 -0
  69. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/virtual-frame-arm-inl.h +0 -0
  70. data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.cc +843 -0
  71. data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.h +520 -0
  72. data/ext/v8/upstream/3.1.8/src/array.js +1231 -0
  73. data/ext/v8/upstream/3.1.8/src/assembler.cc +973 -0
  74. data/ext/v8/upstream/3.1.8/src/assembler.h +787 -0
  75. data/ext/v8/upstream/3.1.8/src/ast-inl.h +107 -0
  76. data/ext/v8/upstream/3.1.8/src/ast.cc +1067 -0
  77. data/ext/v8/upstream/3.1.8/src/ast.h +2177 -0
  78. data/ext/v8/upstream/3.1.8/src/atomicops.h +165 -0
  79. data/ext/v8/upstream/3.1.8/src/atomicops_internals_arm_gcc.h +145 -0
  80. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_gcc.cc +126 -0
  81. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_gcc.h +287 -0
  82. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_macosx.h +301 -0
  83. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_msvc.h +203 -0
  84. data/ext/v8/upstream/3.1.8/src/bignum-dtoa.cc +655 -0
  85. data/ext/v8/upstream/3.1.8/src/bignum-dtoa.h +81 -0
  86. data/ext/v8/upstream/3.1.8/src/bignum.cc +768 -0
  87. data/ext/v8/upstream/3.1.8/src/bignum.h +140 -0
  88. data/ext/v8/upstream/3.1.8/src/bootstrapper.cc +1888 -0
  89. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/bootstrapper.h +0 -0
  90. data/ext/v8/upstream/3.1.8/src/builtins.cc +1586 -0
  91. data/ext/v8/upstream/3.1.8/src/builtins.h +339 -0
  92. data/ext/v8/upstream/3.1.8/src/bytecodes-irregexp.h +105 -0
  93. data/ext/v8/upstream/3.1.8/src/cached-powers.cc +177 -0
  94. data/ext/v8/upstream/3.1.8/src/cached-powers.h +65 -0
  95. data/ext/v8/upstream/3.1.8/src/char-predicates-inl.h +94 -0
  96. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/char-predicates.h +0 -0
  97. data/ext/v8/upstream/3.1.8/src/checks.cc +110 -0
  98. data/ext/v8/upstream/3.1.8/src/checks.h +292 -0
  99. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/circular-queue-inl.h +0 -0
  100. data/ext/v8/upstream/3.1.8/src/circular-queue.cc +122 -0
  101. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/circular-queue.h +0 -0
  102. data/ext/v8/upstream/3.1.8/src/code-stubs.cc +230 -0
  103. data/ext/v8/upstream/3.1.8/src/code-stubs.h +950 -0
  104. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/code.h +0 -0
  105. data/ext/v8/upstream/3.1.8/src/codegen-inl.h +64 -0
  106. data/ext/v8/upstream/3.1.8/src/codegen.cc +495 -0
  107. data/ext/v8/upstream/3.1.8/src/codegen.h +245 -0
  108. data/ext/v8/upstream/3.1.8/src/compilation-cache.cc +654 -0
  109. data/ext/v8/upstream/3.1.8/src/compilation-cache.h +112 -0
  110. data/ext/v8/upstream/3.1.8/src/compiler.cc +806 -0
  111. data/ext/v8/upstream/3.1.8/src/compiler.h +290 -0
  112. data/ext/v8/upstream/3.1.8/src/contexts.cc +320 -0
  113. data/ext/v8/upstream/3.1.8/src/contexts.h +376 -0
  114. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/conversions-inl.h +0 -0
  115. data/ext/v8/upstream/3.1.8/src/conversions.cc +1069 -0
  116. data/ext/v8/upstream/3.1.8/src/conversions.h +122 -0
  117. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/counters.cc +0 -0
  118. data/ext/v8/upstream/3.1.8/src/counters.h +242 -0
  119. data/ext/v8/upstream/3.1.8/src/cpu-profiler-inl.h +100 -0
  120. data/ext/v8/upstream/3.1.8/src/cpu-profiler.cc +554 -0
  121. data/ext/v8/upstream/3.1.8/src/cpu-profiler.h +291 -0
  122. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/cpu.h +0 -0
  123. data/ext/v8/upstream/3.1.8/src/d8-debug.cc +367 -0
  124. data/ext/v8/upstream/3.1.8/src/d8-debug.h +157 -0
  125. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/d8-posix.cc +0 -0
  126. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/d8-readline.cc +0 -0
  127. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/d8-windows.cc +0 -0
  128. data/ext/v8/upstream/3.1.8/src/d8.cc +792 -0
  129. data/ext/v8/upstream/3.1.8/src/d8.gyp +85 -0
  130. data/ext/v8/upstream/3.1.8/src/d8.h +231 -0
  131. data/ext/v8/upstream/3.1.8/src/d8.js +2798 -0
  132. data/ext/v8/upstream/3.1.8/src/data-flow.cc +545 -0
  133. data/ext/v8/upstream/3.1.8/src/data-flow.h +379 -0
  134. data/ext/v8/upstream/3.1.8/src/date.js +1103 -0
  135. data/ext/v8/upstream/3.1.8/src/dateparser-inl.h +125 -0
  136. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/dateparser.cc +0 -0
  137. data/ext/v8/upstream/3.1.8/src/dateparser.h +263 -0
  138. data/ext/v8/upstream/3.1.8/src/debug-agent.cc +446 -0
  139. data/ext/v8/upstream/3.1.8/src/debug-agent.h +131 -0
  140. data/ext/v8/upstream/3.1.8/src/debug-debugger.js +2569 -0
  141. data/ext/v8/upstream/3.1.8/src/debug.cc +3085 -0
  142. data/ext/v8/upstream/3.1.8/src/debug.h +1025 -0
  143. data/ext/v8/upstream/3.1.8/src/deoptimizer.cc +1185 -0
  144. data/ext/v8/upstream/3.1.8/src/deoptimizer.h +529 -0
  145. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/disasm.h +0 -0
  146. data/ext/v8/upstream/3.1.8/src/disassembler.cc +338 -0
  147. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/disassembler.h +0 -0
  148. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/diy-fp.cc +0 -0
  149. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/diy-fp.h +0 -0
  150. data/ext/v8/upstream/3.1.8/src/double.h +238 -0
  151. data/ext/v8/upstream/3.1.8/src/dtoa.cc +103 -0
  152. data/ext/v8/upstream/3.1.8/src/dtoa.h +85 -0
  153. data/ext/v8/upstream/3.1.8/src/execution.cc +735 -0
  154. data/ext/v8/upstream/3.1.8/src/execution.h +322 -0
  155. data/ext/v8/upstream/3.1.8/src/extensions/experimental/experimental.gyp +53 -0
  156. data/ext/v8/upstream/3.1.8/src/extensions/experimental/i18n-extension.cc +264 -0
  157. data/ext/v8/upstream/3.1.8/src/extensions/experimental/i18n-extension.h +64 -0
  158. data/ext/v8/upstream/3.1.8/src/extensions/externalize-string-extension.cc +141 -0
  159. data/ext/v8/upstream/3.1.8/src/extensions/externalize-string-extension.h +50 -0
  160. data/ext/v8/upstream/3.1.8/src/extensions/gc-extension.cc +58 -0
  161. data/ext/v8/upstream/3.1.8/src/extensions/gc-extension.h +49 -0
  162. data/ext/v8/upstream/3.1.8/src/factory.cc +1087 -0
  163. data/ext/v8/upstream/3.1.8/src/factory.h +432 -0
  164. data/ext/v8/upstream/3.1.8/src/fast-dtoa.cc +736 -0
  165. data/ext/v8/upstream/3.1.8/src/fast-dtoa.h +83 -0
  166. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/fixed-dtoa.cc +0 -0
  167. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/fixed-dtoa.h +0 -0
  168. data/ext/v8/upstream/3.1.8/src/flag-definitions.h +552 -0
  169. data/ext/v8/upstream/3.1.8/src/flags.cc +551 -0
  170. data/ext/v8/upstream/3.1.8/src/flags.h +79 -0
  171. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/frame-element.cc +0 -0
  172. data/ext/v8/upstream/3.1.8/src/frame-element.h +277 -0
  173. data/ext/v8/upstream/3.1.8/src/frames-inl.h +210 -0
  174. data/ext/v8/upstream/3.1.8/src/frames.cc +1232 -0
  175. data/ext/v8/upstream/3.1.8/src/frames.h +826 -0
  176. data/ext/v8/upstream/3.1.8/src/full-codegen.cc +1382 -0
  177. data/ext/v8/upstream/3.1.8/src/full-codegen.h +751 -0
  178. data/ext/v8/upstream/3.1.8/src/func-name-inferrer.cc +90 -0
  179. data/ext/v8/upstream/3.1.8/src/func-name-inferrer.h +111 -0
  180. data/ext/v8/upstream/3.1.8/src/gdb-jit.cc +1547 -0
  181. data/ext/v8/upstream/3.1.8/src/gdb-jit.h +138 -0
  182. data/ext/v8/upstream/3.1.8/src/global-handles.cc +534 -0
  183. data/ext/v8/upstream/3.1.8/src/global-handles.h +181 -0
  184. data/ext/v8/upstream/3.1.8/src/globals.h +325 -0
  185. data/ext/v8/upstream/3.1.8/src/handles-inl.h +80 -0
  186. data/ext/v8/upstream/3.1.8/src/handles.cc +910 -0
  187. data/ext/v8/upstream/3.1.8/src/handles.h +424 -0
  188. data/ext/v8/upstream/3.1.8/src/hashmap.cc +230 -0
  189. data/ext/v8/upstream/3.1.8/src/hashmap.h +121 -0
  190. data/ext/v8/upstream/3.1.8/src/heap-inl.h +587 -0
  191. data/ext/v8/upstream/3.1.8/src/heap-profiler.cc +1128 -0
  192. data/ext/v8/upstream/3.1.8/src/heap-profiler.h +381 -0
  193. data/ext/v8/upstream/3.1.8/src/heap.cc +5610 -0
  194. data/ext/v8/upstream/3.1.8/src/heap.h +2218 -0
  195. data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.cc +1490 -0
  196. data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.h +3493 -0
  197. data/ext/v8/upstream/3.1.8/src/hydrogen.cc +6056 -0
  198. data/ext/v8/upstream/3.1.8/src/hydrogen.h +1091 -0
  199. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32-inl.h +429 -0
  200. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.cc +2800 -0
  201. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.h +1093 -0
  202. data/ext/v8/upstream/3.1.8/src/ia32/builtins-ia32.cc +1590 -0
  203. data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.cc +6624 -0
  204. data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.h +536 -0
  205. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/codegen-ia32-inl.h +0 -0
  206. data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.cc +10354 -0
  207. data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.h +798 -0
  208. data/ext/v8/upstream/3.1.8/src/ia32/cpu-ia32.cc +87 -0
  209. data/ext/v8/upstream/3.1.8/src/ia32/debug-ia32.cc +309 -0
  210. data/ext/v8/upstream/3.1.8/src/ia32/deoptimizer-ia32.cc +664 -0
  211. data/ext/v8/upstream/3.1.8/src/ia32/disasm-ia32.cc +1597 -0
  212. data/ext/v8/upstream/3.1.8/src/ia32/frames-ia32.cc +45 -0
  213. data/ext/v8/upstream/3.1.8/src/ia32/frames-ia32.h +140 -0
  214. data/ext/v8/upstream/3.1.8/src/ia32/full-codegen-ia32.cc +4278 -0
  215. data/ext/v8/upstream/3.1.8/src/ia32/ic-ia32.cc +1786 -0
  216. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/jump-target-ia32.cc +0 -0
  217. data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.cc +3880 -0
  218. data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.h +309 -0
  219. data/ext/v8/upstream/3.1.8/src/ia32/lithium-gap-resolver-ia32.cc +460 -0
  220. data/ext/v8/upstream/3.1.8/src/ia32/lithium-gap-resolver-ia32.h +110 -0
  221. data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.cc +2095 -0
  222. data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.h +2127 -0
  223. data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.cc +2031 -0
  224. data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.h +798 -0
  225. data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.cc +1253 -0
  226. data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.h +215 -0
  227. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/register-allocator-ia32-inl.h +0 -0
  228. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/register-allocator-ia32.cc +0 -0
  229. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/register-allocator-ia32.h +0 -0
  230. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/simulator-ia32.cc +0 -0
  231. data/ext/v8/upstream/3.1.8/src/ia32/simulator-ia32.h +72 -0
  232. data/ext/v8/upstream/3.1.8/src/ia32/stub-cache-ia32.cc +3732 -0
  233. data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.cc +1360 -0
  234. data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.h +646 -0
  235. data/ext/v8/upstream/3.1.8/src/ic-inl.h +129 -0
  236. data/ext/v8/upstream/3.1.8/src/ic.cc +2333 -0
  237. data/ext/v8/upstream/3.1.8/src/ic.h +639 -0
  238. data/ext/v8/upstream/3.1.8/src/inspector.cc +63 -0
  239. data/ext/v8/upstream/3.1.8/src/inspector.h +62 -0
  240. data/ext/v8/upstream/3.1.8/src/interpreter-irregexp.cc +655 -0
  241. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/interpreter-irregexp.h +0 -0
  242. data/ext/v8/upstream/3.1.8/src/json.js +342 -0
  243. data/ext/v8/upstream/3.1.8/src/jsregexp.cc +5340 -0
  244. data/ext/v8/upstream/3.1.8/src/jsregexp.h +1484 -0
  245. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target-heavy-inl.h +0 -0
  246. data/ext/v8/upstream/3.1.8/src/jump-target-heavy.cc +430 -0
  247. data/ext/v8/upstream/3.1.8/src/jump-target-heavy.h +244 -0
  248. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target-inl.h +0 -0
  249. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target-light-inl.h +0 -0
  250. data/ext/v8/upstream/3.1.8/src/jump-target-light.cc +111 -0
  251. data/ext/v8/upstream/3.1.8/src/jump-target-light.h +193 -0
  252. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target.cc +0 -0
  253. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target.h +0 -0
  254. data/ext/v8/upstream/3.1.8/src/list-inl.h +206 -0
  255. data/ext/v8/upstream/3.1.8/src/list.h +164 -0
  256. data/ext/v8/upstream/3.1.8/src/lithium-allocator-inl.h +140 -0
  257. data/ext/v8/upstream/3.1.8/src/lithium-allocator.cc +2093 -0
  258. data/ext/v8/upstream/3.1.8/src/lithium-allocator.h +644 -0
  259. data/ext/v8/upstream/3.1.8/src/lithium.cc +168 -0
  260. data/ext/v8/upstream/3.1.8/src/lithium.h +592 -0
  261. data/ext/v8/upstream/3.1.8/src/liveedit-debugger.js +1082 -0
  262. data/ext/v8/upstream/3.1.8/src/liveedit.cc +1650 -0
  263. data/ext/v8/upstream/3.1.8/src/liveedit.h +174 -0
  264. data/ext/v8/upstream/3.1.8/src/liveobjectlist-inl.h +126 -0
  265. data/ext/v8/upstream/3.1.8/src/liveobjectlist.cc +2527 -0
  266. data/ext/v8/upstream/3.1.8/src/liveobjectlist.h +322 -0
  267. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/log-inl.h +0 -0
  268. data/ext/v8/upstream/3.1.8/src/log-utils.cc +336 -0
  269. data/ext/v8/upstream/3.1.8/src/log-utils.h +232 -0
  270. data/ext/v8/upstream/3.1.8/src/log.cc +1608 -0
  271. data/ext/v8/upstream/3.1.8/src/log.h +379 -0
  272. data/ext/v8/upstream/3.1.8/src/macro-assembler.h +120 -0
  273. data/ext/v8/upstream/3.1.8/src/macros.py +178 -0
  274. data/ext/v8/upstream/3.1.8/src/mark-compact.cc +2957 -0
  275. data/ext/v8/upstream/3.1.8/src/mark-compact.h +433 -0
  276. data/ext/v8/upstream/3.1.8/src/math.js +264 -0
  277. data/ext/v8/upstream/3.1.8/src/memory.h +82 -0
  278. data/ext/v8/upstream/3.1.8/src/messages.cc +164 -0
  279. data/ext/v8/upstream/3.1.8/src/messages.h +114 -0
  280. data/ext/v8/upstream/3.1.8/src/messages.js +1071 -0
  281. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/assembler-mips-inl.h +0 -0
  282. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/assembler-mips.cc +0 -0
  283. data/ext/v8/upstream/3.1.8/src/mips/assembler-mips.h +667 -0
  284. data/ext/v8/upstream/3.1.8/src/mips/builtins-mips.cc +205 -0
  285. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/codegen-mips-inl.h +0 -0
  286. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/codegen-mips.cc +0 -0
  287. data/ext/v8/upstream/3.1.8/src/mips/codegen-mips.h +431 -0
  288. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/constants-mips.cc +0 -0
  289. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/constants-mips.h +0 -0
  290. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/cpu-mips.cc +0 -0
  291. data/ext/v8/upstream/3.1.8/src/mips/debug-mips.cc +127 -0
  292. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/disasm-mips.cc +0 -0
  293. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/fast-codegen-mips.cc +0 -0
  294. data/ext/v8/upstream/3.1.8/src/mips/frames-mips.cc +96 -0
  295. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/frames-mips.h +0 -0
  296. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/full-codegen-mips.cc +0 -0
  297. data/ext/v8/upstream/3.1.8/src/mips/ic-mips.cc +208 -0
  298. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/jump-target-mips.cc +0 -0
  299. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/macro-assembler-mips.cc +0 -0
  300. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/macro-assembler-mips.h +0 -0
  301. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/register-allocator-mips-inl.h +0 -0
  302. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/register-allocator-mips.cc +0 -0
  303. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/register-allocator-mips.h +0 -0
  304. data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.cc +1650 -0
  305. data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.h +311 -0
  306. data/ext/v8/upstream/3.1.8/src/mips/stub-cache-mips.cc +418 -0
  307. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/virtual-frame-mips.cc +0 -0
  308. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/virtual-frame-mips.h +0 -0
  309. data/ext/v8/upstream/3.1.8/src/mirror-debugger.js +2380 -0
  310. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mksnapshot.cc +0 -0
  311. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/natives.h +0 -0
  312. data/ext/v8/upstream/3.1.8/src/objects-debug.cc +722 -0
  313. data/ext/v8/upstream/3.1.8/src/objects-inl.h +3946 -0
  314. data/ext/v8/upstream/3.1.8/src/objects-printer.cc +801 -0
  315. data/ext/v8/upstream/3.1.8/src/objects-visiting.cc +142 -0
  316. data/ext/v8/upstream/3.1.8/src/objects-visiting.h +401 -0
  317. data/ext/v8/upstream/3.1.8/src/objects.cc +10044 -0
  318. data/ext/v8/upstream/3.1.8/src/objects.h +6571 -0
  319. data/ext/v8/upstream/3.1.8/src/parser.cc +5165 -0
  320. data/ext/v8/upstream/3.1.8/src/parser.h +802 -0
  321. data/ext/v8/upstream/3.1.8/src/platform-cygwin.cc +745 -0
  322. data/ext/v8/upstream/3.1.8/src/platform-freebsd.cc +702 -0
  323. data/ext/v8/upstream/3.1.8/src/platform-linux.cc +981 -0
  324. data/ext/v8/upstream/3.1.8/src/platform-macos.cc +732 -0
  325. data/ext/v8/upstream/3.1.8/src/platform-nullos.cc +498 -0
  326. data/ext/v8/upstream/3.1.8/src/platform-openbsd.cc +657 -0
  327. data/ext/v8/upstream/3.1.8/src/platform-posix.cc +399 -0
  328. data/ext/v8/upstream/3.1.8/src/platform-solaris.cc +714 -0
  329. data/ext/v8/upstream/3.1.8/src/platform-win32.cc +1974 -0
  330. data/ext/v8/upstream/3.1.8/src/platform.h +636 -0
  331. data/ext/v8/upstream/3.1.8/src/preparse-data.cc +183 -0
  332. data/ext/v8/upstream/3.1.8/src/preparse-data.h +249 -0
  333. data/ext/v8/upstream/3.1.8/src/preparser-api.cc +213 -0
  334. data/ext/v8/upstream/3.1.8/src/preparser.cc +1205 -0
  335. data/ext/v8/upstream/3.1.8/src/preparser.h +278 -0
  336. data/ext/v8/upstream/3.1.8/src/prettyprinter.cc +1539 -0
  337. data/ext/v8/upstream/3.1.8/src/prettyprinter.h +223 -0
  338. data/ext/v8/upstream/3.1.8/src/profile-generator-inl.h +128 -0
  339. data/ext/v8/upstream/3.1.8/src/profile-generator.cc +2899 -0
  340. data/ext/v8/upstream/3.1.8/src/profile-generator.h +1151 -0
  341. data/ext/v8/upstream/3.1.8/src/property.cc +96 -0
  342. data/ext/v8/upstream/3.1.8/src/property.h +337 -0
  343. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/regexp-macro-assembler-irregexp-inl.h +0 -0
  344. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-irregexp.cc +470 -0
  345. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-irregexp.h +142 -0
  346. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-tracer.cc +373 -0
  347. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-tracer.h +104 -0
  348. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.cc +257 -0
  349. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.h +231 -0
  350. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/regexp-stack.cc +0 -0
  351. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/regexp-stack.h +0 -0
  352. data/ext/v8/upstream/3.1.8/src/regexp.js +483 -0
  353. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/register-allocator-inl.h +0 -0
  354. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/register-allocator.cc +0 -0
  355. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/register-allocator.h +0 -0
  356. data/ext/v8/upstream/3.1.8/src/rewriter.cc +1023 -0
  357. data/ext/v8/upstream/3.1.8/src/rewriter.h +59 -0
  358. data/ext/v8/upstream/3.1.8/src/runtime-profiler.cc +443 -0
  359. data/ext/v8/upstream/3.1.8/src/runtime-profiler.h +77 -0
  360. data/ext/v8/upstream/3.1.8/src/runtime.cc +11592 -0
  361. data/ext/v8/upstream/3.1.8/src/runtime.h +582 -0
  362. data/ext/v8/upstream/3.1.8/src/runtime.js +643 -0
  363. data/ext/v8/upstream/3.1.8/src/safepoint-table.cc +253 -0
  364. data/ext/v8/upstream/3.1.8/src/safepoint-table.h +263 -0
  365. data/ext/v8/upstream/3.1.8/src/scanner-base.cc +971 -0
  366. data/ext/v8/upstream/3.1.8/src/scanner-base.h +653 -0
  367. data/ext/v8/upstream/3.1.8/src/scanner.cc +586 -0
  368. data/ext/v8/upstream/3.1.8/src/scanner.h +194 -0
  369. data/ext/v8/upstream/3.1.8/src/scopeinfo.cc +636 -0
  370. data/ext/v8/upstream/3.1.8/src/scopeinfo.h +238 -0
  371. data/ext/v8/upstream/3.1.8/src/scopes.cc +1063 -0
  372. data/ext/v8/upstream/3.1.8/src/scopes.h +494 -0
  373. data/ext/v8/upstream/3.1.8/src/serialize.cc +1535 -0
  374. data/ext/v8/upstream/3.1.8/src/serialize.h +584 -0
  375. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/shell.h +0 -0
  376. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/simulator.h +0 -0
  377. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/smart-pointer.h +0 -0
  378. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/snapshot-common.cc +0 -0
  379. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/snapshot-empty.cc +0 -0
  380. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/snapshot.h +0 -0
  381. data/ext/v8/upstream/3.1.8/src/spaces-inl.h +524 -0
  382. data/ext/v8/upstream/3.1.8/src/spaces.cc +3254 -0
  383. data/ext/v8/upstream/3.1.8/src/spaces.h +2362 -0
  384. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/splay-tree-inl.h +0 -0
  385. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/splay-tree.h +0 -0
  386. data/ext/v8/upstream/3.1.8/src/string-search.cc +40 -0
  387. data/ext/v8/upstream/3.1.8/src/string-search.h +567 -0
  388. data/ext/v8/upstream/3.1.8/src/string-stream.cc +584 -0
  389. data/ext/v8/upstream/3.1.8/src/string-stream.h +191 -0
  390. data/ext/v8/upstream/3.1.8/src/string.js +915 -0
  391. data/ext/v8/upstream/3.1.8/src/strtod.cc +440 -0
  392. data/ext/v8/upstream/3.1.8/src/strtod.h +40 -0
  393. data/ext/v8/upstream/3.1.8/src/stub-cache.cc +1878 -0
  394. data/ext/v8/upstream/3.1.8/src/stub-cache.h +849 -0
  395. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/third_party/valgrind/valgrind.h +0 -0
  396. data/ext/v8/upstream/3.1.8/src/token.cc +63 -0
  397. data/ext/v8/upstream/3.1.8/src/token.h +288 -0
  398. data/ext/v8/upstream/3.1.8/src/top.cc +1152 -0
  399. data/ext/v8/upstream/3.1.8/src/top.h +608 -0
  400. data/ext/v8/upstream/3.1.8/src/type-info.cc +406 -0
  401. data/ext/v8/upstream/3.1.8/src/type-info.h +283 -0
  402. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/unbound-queue-inl.h +0 -0
  403. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/unbound-queue.h +0 -0
  404. data/ext/v8/upstream/3.1.8/src/unicode-inl.h +238 -0
  405. data/ext/v8/upstream/3.1.8/src/unicode.cc +1624 -0
  406. data/ext/v8/upstream/3.1.8/src/unicode.h +280 -0
  407. data/ext/v8/upstream/3.1.8/src/uri.js +402 -0
  408. data/ext/v8/upstream/3.1.8/src/utils.cc +371 -0
  409. data/ext/v8/upstream/3.1.8/src/utils.h +793 -0
  410. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/v8-counters.cc +0 -0
  411. data/ext/v8/upstream/3.1.8/src/v8-counters.h +290 -0
  412. data/ext/v8/upstream/3.1.8/src/v8.cc +270 -0
  413. data/ext/v8/upstream/3.1.8/src/v8.h +127 -0
  414. data/ext/v8/upstream/3.1.8/src/v8checks.h +64 -0
  415. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/v8dll-main.cc +0 -0
  416. data/ext/v8/upstream/3.1.8/src/v8globals.h +480 -0
  417. data/ext/v8/upstream/3.1.8/src/v8natives.js +1252 -0
  418. data/ext/v8/upstream/3.1.8/src/v8preparserdll-main.cc +39 -0
  419. data/ext/v8/upstream/3.1.8/src/v8threads.cc +440 -0
  420. data/ext/v8/upstream/3.1.8/src/v8threads.h +157 -0
  421. data/ext/v8/upstream/3.1.8/src/v8utils.h +354 -0
  422. data/ext/v8/upstream/3.1.8/src/variables.cc +132 -0
  423. data/ext/v8/upstream/3.1.8/src/variables.h +212 -0
  424. data/ext/v8/upstream/3.1.8/src/version.cc +95 -0
  425. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/version.h +0 -0
  426. data/ext/v8/upstream/3.1.8/src/virtual-frame-heavy-inl.h +190 -0
  427. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-heavy.cc +0 -0
  428. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-inl.h +0 -0
  429. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-light-inl.h +0 -0
  430. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-light.cc +0 -0
  431. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame.cc +0 -0
  432. data/ext/v8/upstream/3.1.8/src/virtual-frame.h +59 -0
  433. data/ext/v8/upstream/3.1.8/src/vm-state-inl.h +134 -0
  434. data/ext/v8/upstream/3.1.8/src/vm-state.h +68 -0
  435. data/ext/v8/upstream/3.1.8/src/win32-headers.h +95 -0
  436. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64-inl.h +455 -0
  437. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.cc +3162 -0
  438. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.h +1584 -0
  439. data/ext/v8/upstream/3.1.8/src/x64/builtins-x64.cc +1492 -0
  440. data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.cc +5150 -0
  441. data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.h +519 -0
  442. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64-inl.h +46 -0
  443. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.cc +8835 -0
  444. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.h +750 -0
  445. data/ext/v8/upstream/3.1.8/src/x64/cpu-x64.cc +86 -0
  446. data/ext/v8/upstream/3.1.8/src/x64/debug-x64.cc +316 -0
  447. data/ext/v8/upstream/3.1.8/src/x64/deoptimizer-x64.cc +781 -0
  448. data/ext/v8/upstream/3.1.8/src/x64/disasm-x64.cc +1737 -0
  449. data/ext/v8/upstream/3.1.8/src/x64/frames-x64.cc +45 -0
  450. data/ext/v8/upstream/3.1.8/src/x64/frames-x64.h +130 -0
  451. data/ext/v8/upstream/3.1.8/src/x64/full-codegen-x64.cc +3984 -0
  452. data/ext/v8/upstream/3.1.8/src/x64/ic-x64.cc +1761 -0
  453. data/ext/v8/upstream/3.1.8/src/x64/jump-target-x64.cc +437 -0
  454. data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.cc +3639 -0
  455. data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.h +305 -0
  456. data/ext/v8/upstream/3.1.8/src/x64/lithium-gap-resolver-x64.cc +320 -0
  457. data/ext/v8/upstream/3.1.8/src/x64/lithium-gap-resolver-x64.h +74 -0
  458. data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.cc +2044 -0
  459. data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.h +2052 -0
  460. data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.cc +2660 -0
  461. data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.h +1852 -0
  462. data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.cc +1382 -0
  463. data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.h +278 -0
  464. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/register-allocator-x64-inl.h +0 -0
  465. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/register-allocator-x64.cc +0 -0
  466. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/register-allocator-x64.h +0 -0
  467. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/simulator-x64.cc +0 -0
  468. data/ext/v8/upstream/3.1.8/src/x64/simulator-x64.h +71 -0
  469. data/ext/v8/upstream/3.1.8/src/x64/stub-cache-x64.cc +3509 -0
  470. data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.cc +1292 -0
  471. data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.h +593 -0
  472. data/ext/v8/upstream/3.1.8/src/zone-inl.h +83 -0
  473. data/ext/v8/upstream/3.1.8/src/zone.cc +195 -0
  474. data/ext/v8/upstream/3.1.8/src/zone.h +233 -0
  475. data/ext/v8/upstream/3.1.8/tools/codemap.js +265 -0
  476. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/consarray.js +0 -0
  477. data/ext/v8/upstream/3.1.8/tools/csvparser.js +78 -0
  478. data/ext/v8/upstream/3.1.8/tools/disasm.py +92 -0
  479. data/ext/v8/upstream/3.1.8/tools/gc-nvp-trace-processor.py +328 -0
  480. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/generate-ten-powers.scm +0 -0
  481. data/ext/v8/upstream/3.1.8/tools/grokdump.py +840 -0
  482. data/ext/v8/upstream/3.1.8/tools/gyp/v8.gyp +869 -0
  483. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/js2c.py +0 -0
  484. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/jsmin.py +0 -0
  485. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/linux-tick-processor +0 -0
  486. data/ext/v8/upstream/3.1.8/tools/ll_prof.py +919 -0
  487. data/ext/v8/upstream/3.1.8/tools/logreader.js +185 -0
  488. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/mac-nm +0 -0
  489. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/mac-tick-processor +0 -0
  490. data/ext/v8/upstream/3.1.8/tools/oom_dump/README +31 -0
  491. data/ext/v8/upstream/3.1.8/tools/oom_dump/SConstruct +42 -0
  492. data/ext/v8/upstream/3.1.8/tools/oom_dump/oom_dump.cc +288 -0
  493. data/ext/v8/upstream/3.1.8/tools/presubmit.py +305 -0
  494. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/process-heap-prof.py +0 -0
  495. data/ext/v8/upstream/3.1.8/tools/profile.js +751 -0
  496. data/ext/v8/upstream/3.1.8/tools/profile_view.js +219 -0
  497. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/run-valgrind.py +0 -0
  498. data/ext/v8/upstream/3.1.8/tools/splaytree.js +316 -0
  499. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/stats-viewer.py +0 -0
  500. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/tickprocessor-driver.js +0 -0
  501. data/ext/v8/upstream/3.1.8/tools/tickprocessor.js +863 -0
  502. data/ext/v8/upstream/3.1.8/tools/utils.py +96 -0
  503. data/ext/v8/upstream/3.1.8/tools/visual_studio/README.txt +70 -0
  504. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/arm.vsprops +0 -0
  505. data/ext/v8/upstream/3.1.8/tools/visual_studio/common.vsprops +34 -0
  506. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8.vcproj +0 -0
  507. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8_arm.vcproj +0 -0
  508. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8_x64.vcproj +0 -0
  509. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8js2c.cmd +0 -0
  510. data/ext/v8/upstream/3.1.8/tools/visual_studio/debug.vsprops +17 -0
  511. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/ia32.vsprops +0 -0
  512. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/js2c.cmd +0 -0
  513. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/release.vsprops +0 -0
  514. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8.sln +0 -0
  515. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8.vcproj +0 -0
  516. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_arm.sln +0 -0
  517. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_arm.vcproj +0 -0
  518. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base.vcproj +1296 -0
  519. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_arm.vcproj +1234 -0
  520. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_x64.vcproj +1296 -0
  521. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_cctest.vcproj +0 -0
  522. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_cctest_arm.vcproj +0 -0
  523. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_cctest_x64.vcproj +0 -0
  524. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_mksnapshot.vcproj +0 -0
  525. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_mksnapshot_x64.vcproj +0 -0
  526. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_process_sample.vcproj +0 -0
  527. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_process_sample_arm.vcproj +0 -0
  528. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_process_sample_x64.vcproj +0 -0
  529. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_shell_sample.vcproj +147 -0
  530. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_shell_sample_arm.vcproj +147 -0
  531. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_shell_sample_x64.vcproj +163 -0
  532. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot.vcproj +0 -0
  533. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot_cc.vcproj +0 -0
  534. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot_cc_x64.vcproj +0 -0
  535. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot_x64.vcproj +0 -0
  536. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_x64.sln +0 -0
  537. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_x64.vcproj +0 -0
  538. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/x64.vsprops +0 -0
  539. data/ext/v8/upstream/3.1.8/tools/windows-tick-processor.bat +30 -0
  540. data/ext/v8/upstream/Makefile +2 -1
  541. data/ext/v8/v8_template.cpp +2 -2
  542. data/lib/v8/version.rb +1 -1
  543. data/spec/redjs/jsapi_spec.rb +2 -2
  544. metadata +552 -490
  545. data/ext/v8/upstream/2.3.3/.gitignore +0 -26
  546. data/ext/v8/upstream/2.3.3/AUTHORS +0 -31
  547. data/ext/v8/upstream/2.3.3/ChangeLog +0 -1916
  548. data/ext/v8/upstream/2.3.3/LICENSE +0 -55
  549. data/ext/v8/upstream/2.3.3/SConstruct +0 -1154
  550. data/ext/v8/upstream/2.3.3/include/v8-debug.h +0 -381
  551. data/ext/v8/upstream/2.3.3/include/v8-profiler.h +0 -353
  552. data/ext/v8/upstream/2.3.3/include/v8.h +0 -3616
  553. data/ext/v8/upstream/2.3.3/src/SConscript +0 -330
  554. data/ext/v8/upstream/2.3.3/src/accessors.cc +0 -661
  555. data/ext/v8/upstream/2.3.3/src/accessors.h +0 -114
  556. data/ext/v8/upstream/2.3.3/src/allocation.cc +0 -198
  557. data/ext/v8/upstream/2.3.3/src/allocation.h +0 -169
  558. data/ext/v8/upstream/2.3.3/src/api.cc +0 -4795
  559. data/ext/v8/upstream/2.3.3/src/api.h +0 -485
  560. data/ext/v8/upstream/2.3.3/src/apiutils.h +0 -69
  561. data/ext/v8/upstream/2.3.3/src/arguments.h +0 -96
  562. data/ext/v8/upstream/2.3.3/src/arm/assembler-arm-inl.h +0 -305
  563. data/ext/v8/upstream/2.3.3/src/arm/assembler-arm.cc +0 -2580
  564. data/ext/v8/upstream/2.3.3/src/arm/assembler-arm.h +0 -1275
  565. data/ext/v8/upstream/2.3.3/src/arm/builtins-arm.cc +0 -1320
  566. data/ext/v8/upstream/2.3.3/src/arm/codegen-arm-inl.h +0 -48
  567. data/ext/v8/upstream/2.3.3/src/arm/codegen-arm.cc +0 -11398
  568. data/ext/v8/upstream/2.3.3/src/arm/codegen-arm.h +0 -1102
  569. data/ext/v8/upstream/2.3.3/src/arm/constants-arm.cc +0 -154
  570. data/ext/v8/upstream/2.3.3/src/arm/constants-arm.h +0 -388
  571. data/ext/v8/upstream/2.3.3/src/arm/cpu-arm.cc +0 -142
  572. data/ext/v8/upstream/2.3.3/src/arm/debug-arm.cc +0 -309
  573. data/ext/v8/upstream/2.3.3/src/arm/disasm-arm.cc +0 -1459
  574. data/ext/v8/upstream/2.3.3/src/arm/fast-codegen-arm.cc +0 -241
  575. data/ext/v8/upstream/2.3.3/src/arm/frames-arm.cc +0 -123
  576. data/ext/v8/upstream/2.3.3/src/arm/frames-arm.h +0 -162
  577. data/ext/v8/upstream/2.3.3/src/arm/full-codegen-arm.cc +0 -3178
  578. data/ext/v8/upstream/2.3.3/src/arm/ic-arm.cc +0 -2258
  579. data/ext/v8/upstream/2.3.3/src/arm/jump-target-arm.cc +0 -164
  580. data/ext/v8/upstream/2.3.3/src/arm/macro-assembler-arm.cc +0 -1892
  581. data/ext/v8/upstream/2.3.3/src/arm/macro-assembler-arm.h +0 -727
  582. data/ext/v8/upstream/2.3.3/src/arm/regexp-macro-assembler-arm.cc +0 -1261
  583. data/ext/v8/upstream/2.3.3/src/arm/regexp-macro-assembler-arm.h +0 -266
  584. data/ext/v8/upstream/2.3.3/src/arm/simulator-arm.cc +0 -2822
  585. data/ext/v8/upstream/2.3.3/src/arm/simulator-arm.h +0 -361
  586. data/ext/v8/upstream/2.3.3/src/arm/stub-cache-arm.cc +0 -2387
  587. data/ext/v8/upstream/2.3.3/src/arm/virtual-frame-arm.cc +0 -834
  588. data/ext/v8/upstream/2.3.3/src/arm/virtual-frame-arm.h +0 -519
  589. data/ext/v8/upstream/2.3.3/src/array.js +0 -1127
  590. data/ext/v8/upstream/2.3.3/src/assembler.cc +0 -801
  591. data/ext/v8/upstream/2.3.3/src/assembler.h +0 -573
  592. data/ext/v8/upstream/2.3.3/src/ast-inl.h +0 -81
  593. data/ext/v8/upstream/2.3.3/src/ast.cc +0 -1152
  594. data/ext/v8/upstream/2.3.3/src/ast.h +0 -2106
  595. data/ext/v8/upstream/2.3.3/src/bootstrapper.cc +0 -1819
  596. data/ext/v8/upstream/2.3.3/src/builtins.cc +0 -1529
  597. data/ext/v8/upstream/2.3.3/src/builtins.h +0 -263
  598. data/ext/v8/upstream/2.3.3/src/bytecodes-irregexp.h +0 -104
  599. data/ext/v8/upstream/2.3.3/src/cached-powers.h +0 -119
  600. data/ext/v8/upstream/2.3.3/src/char-predicates-inl.h +0 -86
  601. data/ext/v8/upstream/2.3.3/src/checks.cc +0 -100
  602. data/ext/v8/upstream/2.3.3/src/checks.h +0 -310
  603. data/ext/v8/upstream/2.3.3/src/circular-queue.cc +0 -121
  604. data/ext/v8/upstream/2.3.3/src/code-stubs.cc +0 -177
  605. data/ext/v8/upstream/2.3.3/src/code-stubs.h +0 -177
  606. data/ext/v8/upstream/2.3.3/src/codegen-inl.h +0 -60
  607. data/ext/v8/upstream/2.3.3/src/codegen.cc +0 -516
  608. data/ext/v8/upstream/2.3.3/src/codegen.h +0 -897
  609. data/ext/v8/upstream/2.3.3/src/compilation-cache.cc +0 -562
  610. data/ext/v8/upstream/2.3.3/src/compilation-cache.h +0 -102
  611. data/ext/v8/upstream/2.3.3/src/compiler.cc +0 -654
  612. data/ext/v8/upstream/2.3.3/src/compiler.h +0 -299
  613. data/ext/v8/upstream/2.3.3/src/contexts.cc +0 -256
  614. data/ext/v8/upstream/2.3.3/src/contexts.h +0 -342
  615. data/ext/v8/upstream/2.3.3/src/conversions.cc +0 -1119
  616. data/ext/v8/upstream/2.3.3/src/conversions.h +0 -123
  617. data/ext/v8/upstream/2.3.3/src/counters.h +0 -239
  618. data/ext/v8/upstream/2.3.3/src/cpu-profiler-inl.h +0 -100
  619. data/ext/v8/upstream/2.3.3/src/cpu-profiler.cc +0 -538
  620. data/ext/v8/upstream/2.3.3/src/cpu-profiler.h +0 -285
  621. data/ext/v8/upstream/2.3.3/src/d8-debug.cc +0 -356
  622. data/ext/v8/upstream/2.3.3/src/d8-debug.h +0 -155
  623. data/ext/v8/upstream/2.3.3/src/d8.cc +0 -783
  624. data/ext/v8/upstream/2.3.3/src/d8.h +0 -227
  625. data/ext/v8/upstream/2.3.3/src/d8.js +0 -1683
  626. data/ext/v8/upstream/2.3.3/src/data-flow.cc +0 -758
  627. data/ext/v8/upstream/2.3.3/src/data-flow.h +0 -278
  628. data/ext/v8/upstream/2.3.3/src/date.js +0 -1059
  629. data/ext/v8/upstream/2.3.3/src/dateparser-inl.h +0 -123
  630. data/ext/v8/upstream/2.3.3/src/dateparser.h +0 -244
  631. data/ext/v8/upstream/2.3.3/src/debug-agent.cc +0 -427
  632. data/ext/v8/upstream/2.3.3/src/debug-agent.h +0 -129
  633. data/ext/v8/upstream/2.3.3/src/debug-debugger.js +0 -2227
  634. data/ext/v8/upstream/2.3.3/src/debug.cc +0 -3005
  635. data/ext/v8/upstream/2.3.3/src/debug.h +0 -993
  636. data/ext/v8/upstream/2.3.3/src/disassembler.cc +0 -312
  637. data/ext/v8/upstream/2.3.3/src/double.h +0 -169
  638. data/ext/v8/upstream/2.3.3/src/dtoa-config.c +0 -92
  639. data/ext/v8/upstream/2.3.3/src/dtoa.cc +0 -77
  640. data/ext/v8/upstream/2.3.3/src/dtoa.h +0 -81
  641. data/ext/v8/upstream/2.3.3/src/execution.cc +0 -809
  642. data/ext/v8/upstream/2.3.3/src/execution.h +0 -336
  643. data/ext/v8/upstream/2.3.3/src/factory.cc +0 -1003
  644. data/ext/v8/upstream/2.3.3/src/factory.h +0 -410
  645. data/ext/v8/upstream/2.3.3/src/fast-codegen.cc +0 -746
  646. data/ext/v8/upstream/2.3.3/src/fast-codegen.h +0 -161
  647. data/ext/v8/upstream/2.3.3/src/fast-dtoa.cc +0 -505
  648. data/ext/v8/upstream/2.3.3/src/fast-dtoa.h +0 -58
  649. data/ext/v8/upstream/2.3.3/src/flag-definitions.h +0 -455
  650. data/ext/v8/upstream/2.3.3/src/flags.cc +0 -551
  651. data/ext/v8/upstream/2.3.3/src/flags.h +0 -81
  652. data/ext/v8/upstream/2.3.3/src/flow-graph.cc +0 -763
  653. data/ext/v8/upstream/2.3.3/src/flow-graph.h +0 -180
  654. data/ext/v8/upstream/2.3.3/src/frame-element.h +0 -273
  655. data/ext/v8/upstream/2.3.3/src/frames-inl.h +0 -217
  656. data/ext/v8/upstream/2.3.3/src/frames.cc +0 -826
  657. data/ext/v8/upstream/2.3.3/src/frames.h +0 -682
  658. data/ext/v8/upstream/2.3.3/src/full-codegen.cc +0 -1443
  659. data/ext/v8/upstream/2.3.3/src/full-codegen.h +0 -548
  660. data/ext/v8/upstream/2.3.3/src/func-name-inferrer.cc +0 -76
  661. data/ext/v8/upstream/2.3.3/src/func-name-inferrer.h +0 -135
  662. data/ext/v8/upstream/2.3.3/src/global-handles.cc +0 -520
  663. data/ext/v8/upstream/2.3.3/src/global-handles.h +0 -180
  664. data/ext/v8/upstream/2.3.3/src/globals.h +0 -669
  665. data/ext/v8/upstream/2.3.3/src/handles-inl.h +0 -76
  666. data/ext/v8/upstream/2.3.3/src/handles.cc +0 -825
  667. data/ext/v8/upstream/2.3.3/src/handles.h +0 -393
  668. data/ext/v8/upstream/2.3.3/src/hashmap.cc +0 -226
  669. data/ext/v8/upstream/2.3.3/src/hashmap.h +0 -120
  670. data/ext/v8/upstream/2.3.3/src/heap-inl.h +0 -493
  671. data/ext/v8/upstream/2.3.3/src/heap-profiler.cc +0 -779
  672. data/ext/v8/upstream/2.3.3/src/heap-profiler.h +0 -323
  673. data/ext/v8/upstream/2.3.3/src/heap.cc +0 -4994
  674. data/ext/v8/upstream/2.3.3/src/heap.h +0 -1984
  675. data/ext/v8/upstream/2.3.3/src/ia32/assembler-ia32-inl.h +0 -360
  676. data/ext/v8/upstream/2.3.3/src/ia32/assembler-ia32.cc +0 -2600
  677. data/ext/v8/upstream/2.3.3/src/ia32/assembler-ia32.h +0 -969
  678. data/ext/v8/upstream/2.3.3/src/ia32/builtins-ia32.cc +0 -1261
  679. data/ext/v8/upstream/2.3.3/src/ia32/codegen-ia32.cc +0 -13968
  680. data/ext/v8/upstream/2.3.3/src/ia32/codegen-ia32.h +0 -1097
  681. data/ext/v8/upstream/2.3.3/src/ia32/cpu-ia32.cc +0 -83
  682. data/ext/v8/upstream/2.3.3/src/ia32/debug-ia32.cc +0 -309
  683. data/ext/v8/upstream/2.3.3/src/ia32/disasm-ia32.cc +0 -1471
  684. data/ext/v8/upstream/2.3.3/src/ia32/fast-codegen-ia32.cc +0 -954
  685. data/ext/v8/upstream/2.3.3/src/ia32/fast-codegen-ia32.h +0 -155
  686. data/ext/v8/upstream/2.3.3/src/ia32/frames-ia32.cc +0 -115
  687. data/ext/v8/upstream/2.3.3/src/ia32/frames-ia32.h +0 -135
  688. data/ext/v8/upstream/2.3.3/src/ia32/full-codegen-ia32.cc +0 -3281
  689. data/ext/v8/upstream/2.3.3/src/ia32/ic-ia32.cc +0 -1966
  690. data/ext/v8/upstream/2.3.3/src/ia32/macro-assembler-ia32.cc +0 -1610
  691. data/ext/v8/upstream/2.3.3/src/ia32/macro-assembler-ia32.h +0 -610
  692. data/ext/v8/upstream/2.3.3/src/ia32/regexp-macro-assembler-ia32.cc +0 -1247
  693. data/ext/v8/upstream/2.3.3/src/ia32/regexp-macro-assembler-ia32.h +0 -214
  694. data/ext/v8/upstream/2.3.3/src/ia32/simulator-ia32.h +0 -62
  695. data/ext/v8/upstream/2.3.3/src/ia32/stub-cache-ia32.cc +0 -2750
  696. data/ext/v8/upstream/2.3.3/src/ia32/virtual-frame-ia32.cc +0 -1334
  697. data/ext/v8/upstream/2.3.3/src/ia32/virtual-frame-ia32.h +0 -627
  698. data/ext/v8/upstream/2.3.3/src/ic-inl.h +0 -120
  699. data/ext/v8/upstream/2.3.3/src/ic.cc +0 -1827
  700. data/ext/v8/upstream/2.3.3/src/ic.h +0 -515
  701. data/ext/v8/upstream/2.3.3/src/interpreter-irregexp.cc +0 -646
  702. data/ext/v8/upstream/2.3.3/src/json.js +0 -268
  703. data/ext/v8/upstream/2.3.3/src/jsregexp.cc +0 -5283
  704. data/ext/v8/upstream/2.3.3/src/jsregexp.h +0 -1463
  705. data/ext/v8/upstream/2.3.3/src/jump-target-heavy.cc +0 -429
  706. data/ext/v8/upstream/2.3.3/src/jump-target-heavy.h +0 -244
  707. data/ext/v8/upstream/2.3.3/src/jump-target-light.cc +0 -110
  708. data/ext/v8/upstream/2.3.3/src/jump-target-light.h +0 -192
  709. data/ext/v8/upstream/2.3.3/src/list-inl.h +0 -166
  710. data/ext/v8/upstream/2.3.3/src/list.h +0 -159
  711. data/ext/v8/upstream/2.3.3/src/liveedit-debugger.js +0 -1057
  712. data/ext/v8/upstream/2.3.3/src/liveedit.cc +0 -1480
  713. data/ext/v8/upstream/2.3.3/src/liveedit.h +0 -170
  714. data/ext/v8/upstream/2.3.3/src/log-utils.cc +0 -497
  715. data/ext/v8/upstream/2.3.3/src/log-utils.h +0 -289
  716. data/ext/v8/upstream/2.3.3/src/log.cc +0 -1561
  717. data/ext/v8/upstream/2.3.3/src/log.h +0 -384
  718. data/ext/v8/upstream/2.3.3/src/macro-assembler.h +0 -86
  719. data/ext/v8/upstream/2.3.3/src/macros.py +0 -177
  720. data/ext/v8/upstream/2.3.3/src/mark-compact.cc +0 -2330
  721. data/ext/v8/upstream/2.3.3/src/mark-compact.h +0 -451
  722. data/ext/v8/upstream/2.3.3/src/math.js +0 -264
  723. data/ext/v8/upstream/2.3.3/src/memory.h +0 -74
  724. data/ext/v8/upstream/2.3.3/src/messages.cc +0 -183
  725. data/ext/v8/upstream/2.3.3/src/messages.h +0 -113
  726. data/ext/v8/upstream/2.3.3/src/messages.js +0 -982
  727. data/ext/v8/upstream/2.3.3/src/mips/assembler-mips.h +0 -668
  728. data/ext/v8/upstream/2.3.3/src/mips/builtins-mips.cc +0 -205
  729. data/ext/v8/upstream/2.3.3/src/mips/codegen-mips.h +0 -434
  730. data/ext/v8/upstream/2.3.3/src/mips/debug-mips.cc +0 -131
  731. data/ext/v8/upstream/2.3.3/src/mips/frames-mips.cc +0 -102
  732. data/ext/v8/upstream/2.3.3/src/mips/ic-mips.cc +0 -220
  733. data/ext/v8/upstream/2.3.3/src/mips/simulator-mips.cc +0 -1651
  734. data/ext/v8/upstream/2.3.3/src/mips/simulator-mips.h +0 -311
  735. data/ext/v8/upstream/2.3.3/src/mips/stub-cache-mips.cc +0 -403
  736. data/ext/v8/upstream/2.3.3/src/mirror-debugger.js +0 -2380
  737. data/ext/v8/upstream/2.3.3/src/objects-debug.cc +0 -1366
  738. data/ext/v8/upstream/2.3.3/src/objects-inl.h +0 -3333
  739. data/ext/v8/upstream/2.3.3/src/objects.cc +0 -8820
  740. data/ext/v8/upstream/2.3.3/src/objects.h +0 -5373
  741. data/ext/v8/upstream/2.3.3/src/oprofile-agent.cc +0 -108
  742. data/ext/v8/upstream/2.3.3/src/oprofile-agent.h +0 -77
  743. data/ext/v8/upstream/2.3.3/src/parser.cc +0 -5207
  744. data/ext/v8/upstream/2.3.3/src/parser.h +0 -197
  745. data/ext/v8/upstream/2.3.3/src/platform-freebsd.cc +0 -667
  746. data/ext/v8/upstream/2.3.3/src/platform-linux.cc +0 -862
  747. data/ext/v8/upstream/2.3.3/src/platform-macos.cc +0 -665
  748. data/ext/v8/upstream/2.3.3/src/platform-nullos.cc +0 -454
  749. data/ext/v8/upstream/2.3.3/src/platform-openbsd.cc +0 -622
  750. data/ext/v8/upstream/2.3.3/src/platform-posix.cc +0 -362
  751. data/ext/v8/upstream/2.3.3/src/platform-solaris.cc +0 -653
  752. data/ext/v8/upstream/2.3.3/src/platform-win32.cc +0 -1911
  753. data/ext/v8/upstream/2.3.3/src/platform.h +0 -577
  754. data/ext/v8/upstream/2.3.3/src/powers-ten.h +0 -2461
  755. data/ext/v8/upstream/2.3.3/src/prettyprinter.cc +0 -1531
  756. data/ext/v8/upstream/2.3.3/src/prettyprinter.h +0 -221
  757. data/ext/v8/upstream/2.3.3/src/profile-generator-inl.h +0 -148
  758. data/ext/v8/upstream/2.3.3/src/profile-generator.cc +0 -1830
  759. data/ext/v8/upstream/2.3.3/src/profile-generator.h +0 -853
  760. data/ext/v8/upstream/2.3.3/src/property.cc +0 -96
  761. data/ext/v8/upstream/2.3.3/src/property.h +0 -315
  762. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-irregexp.cc +0 -464
  763. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-irregexp.h +0 -141
  764. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-tracer.cc +0 -356
  765. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-tracer.h +0 -103
  766. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler.cc +0 -261
  767. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler.h +0 -228
  768. data/ext/v8/upstream/2.3.3/src/regexp.js +0 -549
  769. data/ext/v8/upstream/2.3.3/src/rewriter.cc +0 -1038
  770. data/ext/v8/upstream/2.3.3/src/rewriter.h +0 -54
  771. data/ext/v8/upstream/2.3.3/src/runtime.cc +0 -10599
  772. data/ext/v8/upstream/2.3.3/src/runtime.h +0 -459
  773. data/ext/v8/upstream/2.3.3/src/runtime.js +0 -629
  774. data/ext/v8/upstream/2.3.3/src/scanner.cc +0 -1346
  775. data/ext/v8/upstream/2.3.3/src/scanner.h +0 -503
  776. data/ext/v8/upstream/2.3.3/src/scopeinfo.cc +0 -637
  777. data/ext/v8/upstream/2.3.3/src/scopeinfo.h +0 -233
  778. data/ext/v8/upstream/2.3.3/src/scopes.cc +0 -962
  779. data/ext/v8/upstream/2.3.3/src/scopes.h +0 -400
  780. data/ext/v8/upstream/2.3.3/src/serialize.cc +0 -1461
  781. data/ext/v8/upstream/2.3.3/src/serialize.h +0 -581
  782. data/ext/v8/upstream/2.3.3/src/spaces-inl.h +0 -483
  783. data/ext/v8/upstream/2.3.3/src/spaces.cc +0 -2901
  784. data/ext/v8/upstream/2.3.3/src/spaces.h +0 -2197
  785. data/ext/v8/upstream/2.3.3/src/string-stream.cc +0 -584
  786. data/ext/v8/upstream/2.3.3/src/string-stream.h +0 -189
  787. data/ext/v8/upstream/2.3.3/src/string.js +0 -1006
  788. data/ext/v8/upstream/2.3.3/src/stub-cache.cc +0 -1379
  789. data/ext/v8/upstream/2.3.3/src/stub-cache.h +0 -756
  790. data/ext/v8/upstream/2.3.3/src/third_party/dtoa/COPYING +0 -15
  791. data/ext/v8/upstream/2.3.3/src/third_party/dtoa/dtoa.c +0 -3334
  792. data/ext/v8/upstream/2.3.3/src/token.cc +0 -56
  793. data/ext/v8/upstream/2.3.3/src/token.h +0 -270
  794. data/ext/v8/upstream/2.3.3/src/top.cc +0 -1067
  795. data/ext/v8/upstream/2.3.3/src/top.h +0 -463
  796. data/ext/v8/upstream/2.3.3/src/type-info.cc +0 -53
  797. data/ext/v8/upstream/2.3.3/src/type-info.h +0 -244
  798. data/ext/v8/upstream/2.3.3/src/unicode-inl.h +0 -238
  799. data/ext/v8/upstream/2.3.3/src/unicode.cc +0 -749
  800. data/ext/v8/upstream/2.3.3/src/unicode.h +0 -279
  801. data/ext/v8/upstream/2.3.3/src/uri.js +0 -415
  802. data/ext/v8/upstream/2.3.3/src/utils.cc +0 -285
  803. data/ext/v8/upstream/2.3.3/src/utils.h +0 -745
  804. data/ext/v8/upstream/2.3.3/src/v8-counters.h +0 -250
  805. data/ext/v8/upstream/2.3.3/src/v8.cc +0 -228
  806. data/ext/v8/upstream/2.3.3/src/v8.h +0 -121
  807. data/ext/v8/upstream/2.3.3/src/v8natives.js +0 -1188
  808. data/ext/v8/upstream/2.3.3/src/v8threads.cc +0 -461
  809. data/ext/v8/upstream/2.3.3/src/v8threads.h +0 -159
  810. data/ext/v8/upstream/2.3.3/src/variables.cc +0 -119
  811. data/ext/v8/upstream/2.3.3/src/variables.h +0 -205
  812. data/ext/v8/upstream/2.3.3/src/version.cc +0 -88
  813. data/ext/v8/upstream/2.3.3/src/virtual-frame-heavy-inl.h +0 -192
  814. data/ext/v8/upstream/2.3.3/src/virtual-frame.h +0 -46
  815. data/ext/v8/upstream/2.3.3/src/vm-state-inl.h +0 -137
  816. data/ext/v8/upstream/2.3.3/src/vm-state.cc +0 -39
  817. data/ext/v8/upstream/2.3.3/src/vm-state.h +0 -77
  818. data/ext/v8/upstream/2.3.3/src/x64/assembler-x64-inl.h +0 -400
  819. data/ext/v8/upstream/2.3.3/src/x64/assembler-x64.cc +0 -2963
  820. data/ext/v8/upstream/2.3.3/src/x64/assembler-x64.h +0 -1438
  821. data/ext/v8/upstream/2.3.3/src/x64/builtins-x64.cc +0 -1296
  822. data/ext/v8/upstream/2.3.3/src/x64/codegen-x64-inl.h +0 -46
  823. data/ext/v8/upstream/2.3.3/src/x64/codegen-x64.cc +0 -12491
  824. data/ext/v8/upstream/2.3.3/src/x64/codegen-x64.h +0 -1090
  825. data/ext/v8/upstream/2.3.3/src/x64/cpu-x64.cc +0 -83
  826. data/ext/v8/upstream/2.3.3/src/x64/debug-x64.cc +0 -267
  827. data/ext/v8/upstream/2.3.3/src/x64/disasm-x64.cc +0 -1696
  828. data/ext/v8/upstream/2.3.3/src/x64/fast-codegen-x64.cc +0 -250
  829. data/ext/v8/upstream/2.3.3/src/x64/frames-x64.cc +0 -113
  830. data/ext/v8/upstream/2.3.3/src/x64/frames-x64.h +0 -125
  831. data/ext/v8/upstream/2.3.3/src/x64/full-codegen-x64.cc +0 -3270
  832. data/ext/v8/upstream/2.3.3/src/x64/ic-x64.cc +0 -1907
  833. data/ext/v8/upstream/2.3.3/src/x64/jump-target-x64.cc +0 -437
  834. data/ext/v8/upstream/2.3.3/src/x64/macro-assembler-x64.cc +0 -2793
  835. data/ext/v8/upstream/2.3.3/src/x64/macro-assembler-x64.h +0 -916
  836. data/ext/v8/upstream/2.3.3/src/x64/regexp-macro-assembler-x64.cc +0 -1374
  837. data/ext/v8/upstream/2.3.3/src/x64/regexp-macro-assembler-x64.h +0 -277
  838. data/ext/v8/upstream/2.3.3/src/x64/simulator-x64.h +0 -63
  839. data/ext/v8/upstream/2.3.3/src/x64/stub-cache-x64.cc +0 -2560
  840. data/ext/v8/upstream/2.3.3/src/x64/virtual-frame-x64.cc +0 -1264
  841. data/ext/v8/upstream/2.3.3/src/x64/virtual-frame-x64.h +0 -590
  842. data/ext/v8/upstream/2.3.3/src/zone-inl.h +0 -82
  843. data/ext/v8/upstream/2.3.3/src/zone.cc +0 -194
  844. data/ext/v8/upstream/2.3.3/src/zone.h +0 -221
  845. data/ext/v8/upstream/2.3.3/tools/codemap.js +0 -270
  846. data/ext/v8/upstream/2.3.3/tools/csvparser.js +0 -83
  847. data/ext/v8/upstream/2.3.3/tools/gc-nvp-trace-processor.py +0 -317
  848. data/ext/v8/upstream/2.3.3/tools/gyp/v8.gyp +0 -749
  849. data/ext/v8/upstream/2.3.3/tools/linux-tick-processor.py +0 -78
  850. data/ext/v8/upstream/2.3.3/tools/logreader.js +0 -338
  851. data/ext/v8/upstream/2.3.3/tools/oprofile/annotate +0 -7
  852. data/ext/v8/upstream/2.3.3/tools/oprofile/common +0 -19
  853. data/ext/v8/upstream/2.3.3/tools/oprofile/dump +0 -7
  854. data/ext/v8/upstream/2.3.3/tools/oprofile/report +0 -7
  855. data/ext/v8/upstream/2.3.3/tools/oprofile/reset +0 -7
  856. data/ext/v8/upstream/2.3.3/tools/oprofile/run +0 -14
  857. data/ext/v8/upstream/2.3.3/tools/oprofile/shutdown +0 -7
  858. data/ext/v8/upstream/2.3.3/tools/oprofile/start +0 -7
  859. data/ext/v8/upstream/2.3.3/tools/presubmit.py +0 -299
  860. data/ext/v8/upstream/2.3.3/tools/profile.js +0 -691
  861. data/ext/v8/upstream/2.3.3/tools/profile_view.js +0 -224
  862. data/ext/v8/upstream/2.3.3/tools/splaytree.js +0 -322
  863. data/ext/v8/upstream/2.3.3/tools/splaytree.py +0 -226
  864. data/ext/v8/upstream/2.3.3/tools/tickprocessor.js +0 -862
  865. data/ext/v8/upstream/2.3.3/tools/tickprocessor.py +0 -571
  866. data/ext/v8/upstream/2.3.3/tools/utils.py +0 -88
  867. data/ext/v8/upstream/2.3.3/tools/visual_studio/README.txt +0 -71
  868. data/ext/v8/upstream/2.3.3/tools/visual_studio/common.vsprops +0 -34
  869. data/ext/v8/upstream/2.3.3/tools/visual_studio/debug.vsprops +0 -17
  870. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_base.vcproj +0 -1143
  871. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_base_arm.vcproj +0 -1115
  872. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_base_x64.vcproj +0 -1096
  873. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_shell_sample.vcproj +0 -145
  874. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_shell_sample_arm.vcproj +0 -145
  875. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_shell_sample_x64.vcproj +0 -161
  876. data/ext/v8/upstream/2.3.3/tools/windows-tick-processor.bat +0 -29
  877. data/ext/v8/upstream/2.3.3/tools/windows-tick-processor.py +0 -137
@@ -0,0 +1,2660 @@
1
+ // Copyright 2011 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #include "v8.h"
29
+
30
+ #if defined(V8_TARGET_ARCH_X64)
31
+
32
+ #include "bootstrapper.h"
33
+ #include "codegen-inl.h"
34
+ #include "assembler-x64.h"
35
+ #include "macro-assembler-x64.h"
36
+ #include "serialize.h"
37
+ #include "debug.h"
38
+ #include "heap.h"
39
+
40
+ namespace v8 {
41
+ namespace internal {
42
+
43
+ MacroAssembler::MacroAssembler(void* buffer, int size)
44
+ : Assembler(buffer, size),
45
+ generating_stub_(false),
46
+ allow_stub_calls_(true),
47
+ code_object_(Heap::undefined_value()) {
48
+ }
49
+
50
+
51
+ void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
52
+ movq(destination, Operand(kRootRegister, index << kPointerSizeLog2));
53
+ }
54
+
55
+
56
+ void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
57
+ movq(Operand(kRootRegister, index << kPointerSizeLog2), source);
58
+ }
59
+
60
+
61
+ void MacroAssembler::PushRoot(Heap::RootListIndex index) {
62
+ push(Operand(kRootRegister, index << kPointerSizeLog2));
63
+ }
64
+
65
+
66
+ void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
67
+ cmpq(with, Operand(kRootRegister, index << kPointerSizeLog2));
68
+ }
69
+
70
+
71
+ void MacroAssembler::CompareRoot(const Operand& with,
72
+ Heap::RootListIndex index) {
73
+ ASSERT(!with.AddressUsesRegister(kScratchRegister));
74
+ LoadRoot(kScratchRegister, index);
75
+ cmpq(with, kScratchRegister);
76
+ }
77
+
78
+
79
+ void MacroAssembler::RecordWriteHelper(Register object,
80
+ Register addr,
81
+ Register scratch) {
82
+ if (FLAG_debug_code) {
83
+ // Check that the object is not in new space.
84
+ NearLabel not_in_new_space;
85
+ InNewSpace(object, scratch, not_equal, &not_in_new_space);
86
+ Abort("new-space object passed to RecordWriteHelper");
87
+ bind(&not_in_new_space);
88
+ }
89
+
90
+ // Compute the page start address from the heap object pointer, and reuse
91
+ // the 'object' register for it.
92
+ and_(object, Immediate(~Page::kPageAlignmentMask));
93
+
94
+ // Compute number of region covering addr. See Page::GetRegionNumberForAddress
95
+ // method for more details.
96
+ shrl(addr, Immediate(Page::kRegionSizeLog2));
97
+ andl(addr, Immediate(Page::kPageAlignmentMask >> Page::kRegionSizeLog2));
98
+
99
+ // Set dirty mark for region.
100
+ bts(Operand(object, Page::kDirtyFlagOffset), addr);
101
+ }
102
+
103
+
104
+ void MacroAssembler::RecordWrite(Register object,
105
+ int offset,
106
+ Register value,
107
+ Register index) {
108
+ // The compiled code assumes that record write doesn't change the
109
+ // context register, so we check that none of the clobbered
110
+ // registers are rsi.
111
+ ASSERT(!object.is(rsi) && !value.is(rsi) && !index.is(rsi));
112
+
113
+ // First, check if a write barrier is even needed. The tests below
114
+ // catch stores of Smis and stores into young gen.
115
+ Label done;
116
+ JumpIfSmi(value, &done);
117
+
118
+ RecordWriteNonSmi(object, offset, value, index);
119
+ bind(&done);
120
+
121
+ // Clobber all input registers when running with the debug-code flag
122
+ // turned on to provoke errors. This clobbering repeats the
123
+ // clobbering done inside RecordWriteNonSmi but it's necessary to
124
+ // avoid having the fast case for smis leave the registers
125
+ // unchanged.
126
+ if (FLAG_debug_code) {
127
+ movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
128
+ movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
129
+ movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
130
+ }
131
+ }
132
+
133
+
134
+ void MacroAssembler::RecordWrite(Register object,
135
+ Register address,
136
+ Register value) {
137
+ // The compiled code assumes that record write doesn't change the
138
+ // context register, so we check that none of the clobbered
139
+ // registers are rsi.
140
+ ASSERT(!object.is(rsi) && !value.is(rsi) && !address.is(rsi));
141
+
142
+ // First, check if a write barrier is even needed. The tests below
143
+ // catch stores of Smis and stores into young gen.
144
+ Label done;
145
+ JumpIfSmi(value, &done);
146
+
147
+ InNewSpace(object, value, equal, &done);
148
+
149
+ RecordWriteHelper(object, address, value);
150
+
151
+ bind(&done);
152
+
153
+ // Clobber all input registers when running with the debug-code flag
154
+ // turned on to provoke errors.
155
+ if (FLAG_debug_code) {
156
+ movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
157
+ movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
158
+ movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
159
+ }
160
+ }
161
+
162
+
163
+ void MacroAssembler::RecordWriteNonSmi(Register object,
164
+ int offset,
165
+ Register scratch,
166
+ Register index) {
167
+ Label done;
168
+
169
+ if (FLAG_debug_code) {
170
+ NearLabel okay;
171
+ JumpIfNotSmi(object, &okay);
172
+ Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis");
173
+ bind(&okay);
174
+
175
+ if (offset == 0) {
176
+ // index must be int32.
177
+ Register tmp = index.is(rax) ? rbx : rax;
178
+ push(tmp);
179
+ movl(tmp, index);
180
+ cmpq(tmp, index);
181
+ Check(equal, "Index register for RecordWrite must be untagged int32.");
182
+ pop(tmp);
183
+ }
184
+ }
185
+
186
+ // Test that the object address is not in the new space. We cannot
187
+ // update page dirty marks for new space pages.
188
+ InNewSpace(object, scratch, equal, &done);
189
+
190
+ // The offset is relative to a tagged or untagged HeapObject pointer,
191
+ // so either offset or offset + kHeapObjectTag must be a
192
+ // multiple of kPointerSize.
193
+ ASSERT(IsAligned(offset, kPointerSize) ||
194
+ IsAligned(offset + kHeapObjectTag, kPointerSize));
195
+
196
+ Register dst = index;
197
+ if (offset != 0) {
198
+ lea(dst, Operand(object, offset));
199
+ } else {
200
+ // array access: calculate the destination address in the same manner as
201
+ // KeyedStoreIC::GenerateGeneric.
202
+ lea(dst, FieldOperand(object,
203
+ index,
204
+ times_pointer_size,
205
+ FixedArray::kHeaderSize));
206
+ }
207
+ RecordWriteHelper(object, dst, scratch);
208
+
209
+ bind(&done);
210
+
211
+ // Clobber all input registers when running with the debug-code flag
212
+ // turned on to provoke errors.
213
+ if (FLAG_debug_code) {
214
+ movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
215
+ movq(scratch, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
216
+ movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
217
+ }
218
+ }
219
+
220
+ void MacroAssembler::Assert(Condition cc, const char* msg) {
221
+ if (FLAG_debug_code) Check(cc, msg);
222
+ }
223
+
224
+
225
+ void MacroAssembler::AssertFastElements(Register elements) {
226
+ if (FLAG_debug_code) {
227
+ NearLabel ok;
228
+ CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
229
+ Heap::kFixedArrayMapRootIndex);
230
+ j(equal, &ok);
231
+ CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
232
+ Heap::kFixedCOWArrayMapRootIndex);
233
+ j(equal, &ok);
234
+ Abort("JSObject with fast elements map has slow elements");
235
+ bind(&ok);
236
+ }
237
+ }
238
+
239
+
240
+ void MacroAssembler::Check(Condition cc, const char* msg) {
241
+ NearLabel L;
242
+ j(cc, &L);
243
+ Abort(msg);
244
+ // will not return here
245
+ bind(&L);
246
+ }
247
+
248
+
249
+ void MacroAssembler::CheckStackAlignment() {
250
+ int frame_alignment = OS::ActivationFrameAlignment();
251
+ int frame_alignment_mask = frame_alignment - 1;
252
+ if (frame_alignment > kPointerSize) {
253
+ ASSERT(IsPowerOf2(frame_alignment));
254
+ NearLabel alignment_as_expected;
255
+ testq(rsp, Immediate(frame_alignment_mask));
256
+ j(zero, &alignment_as_expected);
257
+ // Abort if stack is not aligned.
258
+ int3();
259
+ bind(&alignment_as_expected);
260
+ }
261
+ }
262
+
263
+
264
+ void MacroAssembler::NegativeZeroTest(Register result,
265
+ Register op,
266
+ Label* then_label) {
267
+ NearLabel ok;
268
+ testl(result, result);
269
+ j(not_zero, &ok);
270
+ testl(op, op);
271
+ j(sign, then_label);
272
+ bind(&ok);
273
+ }
274
+
275
+
276
+ void MacroAssembler::Abort(const char* msg) {
277
+ // We want to pass the msg string like a smi to avoid GC
278
+ // problems, however msg is not guaranteed to be aligned
279
+ // properly. Instead, we pass an aligned pointer that is
280
+ // a proper v8 smi, but also pass the alignment difference
281
+ // from the real pointer as a smi.
282
+ intptr_t p1 = reinterpret_cast<intptr_t>(msg);
283
+ intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
284
+ // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
285
+ ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
286
+ #ifdef DEBUG
287
+ if (msg != NULL) {
288
+ RecordComment("Abort message: ");
289
+ RecordComment(msg);
290
+ }
291
+ #endif
292
+ // Disable stub call restrictions to always allow calls to abort.
293
+ AllowStubCallsScope allow_scope(this, true);
294
+
295
+ push(rax);
296
+ movq(kScratchRegister, p0, RelocInfo::NONE);
297
+ push(kScratchRegister);
298
+ movq(kScratchRegister,
299
+ reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
300
+ RelocInfo::NONE);
301
+ push(kScratchRegister);
302
+ CallRuntime(Runtime::kAbort, 2);
303
+ // will not return here
304
+ int3();
305
+ }
306
+
307
+
308
+ void MacroAssembler::CallStub(CodeStub* stub) {
309
+ ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
310
+ Call(stub->GetCode(), RelocInfo::CODE_TARGET);
311
+ }
312
+
313
+
314
+ MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) {
315
+ ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
316
+ MaybeObject* result = stub->TryGetCode();
317
+ if (!result->IsFailure()) {
318
+ call(Handle<Code>(Code::cast(result->ToObjectUnchecked())),
319
+ RelocInfo::CODE_TARGET);
320
+ }
321
+ return result;
322
+ }
323
+
324
+
325
+ void MacroAssembler::TailCallStub(CodeStub* stub) {
326
+ ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
327
+ Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
328
+ }
329
+
330
+
331
+ MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) {
332
+ ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
333
+ MaybeObject* result = stub->TryGetCode();
334
+ if (!result->IsFailure()) {
335
+ jmp(Handle<Code>(Code::cast(result->ToObjectUnchecked())),
336
+ RelocInfo::CODE_TARGET);
337
+ }
338
+ return result;
339
+ }
340
+
341
+
342
+ void MacroAssembler::StubReturn(int argc) {
343
+ ASSERT(argc >= 1 && generating_stub());
344
+ ret((argc - 1) * kPointerSize);
345
+ }
346
+
347
+
348
+ void MacroAssembler::IllegalOperation(int num_arguments) {
349
+ if (num_arguments > 0) {
350
+ addq(rsp, Immediate(num_arguments * kPointerSize));
351
+ }
352
+ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
353
+ }
354
+
355
+
356
+ void MacroAssembler::IndexFromHash(Register hash, Register index) {
357
+ // The assert checks that the constants for the maximum number of digits
358
+ // for an array index cached in the hash field and the number of bits
359
+ // reserved for it does not conflict.
360
+ ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
361
+ (1 << String::kArrayIndexValueBits));
362
+ // We want the smi-tagged index in key. Even if we subsequently go to
363
+ // the slow case, converting the key to a smi is always valid.
364
+ // key: string key
365
+ // hash: key's hash field, including its array index value.
366
+ and_(hash, Immediate(String::kArrayIndexValueMask));
367
+ shr(hash, Immediate(String::kHashShift));
368
+ // Here we actually clobber the key which will be used if calling into
369
+ // runtime later. However as the new key is the numeric value of a string key
370
+ // there is no difference in using either key.
371
+ Integer32ToSmi(index, hash);
372
+ }
373
+
374
+
375
+ void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
376
+ CallRuntime(Runtime::FunctionForId(id), num_arguments);
377
+ }
378
+
379
+
380
+ void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
381
+ Runtime::Function* function = Runtime::FunctionForId(id);
382
+ Set(rax, function->nargs);
383
+ movq(rbx, ExternalReference(function));
384
+ CEntryStub ces(1);
385
+ ces.SaveDoubles();
386
+ CallStub(&ces);
387
+ }
388
+
389
+
390
+ MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
391
+ int num_arguments) {
392
+ return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
393
+ }
394
+
395
+
396
+ void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
397
+ // If the expected number of arguments of the runtime function is
398
+ // constant, we check that the actual number of arguments match the
399
+ // expectation.
400
+ if (f->nargs >= 0 && f->nargs != num_arguments) {
401
+ IllegalOperation(num_arguments);
402
+ return;
403
+ }
404
+
405
+ // TODO(1236192): Most runtime routines don't need the number of
406
+ // arguments passed in because it is constant. At some point we
407
+ // should remove this need and make the runtime routine entry code
408
+ // smarter.
409
+ Set(rax, num_arguments);
410
+ movq(rbx, ExternalReference(f));
411
+ CEntryStub ces(f->result_size);
412
+ CallStub(&ces);
413
+ }
414
+
415
+
416
+ MaybeObject* MacroAssembler::TryCallRuntime(Runtime::Function* f,
417
+ int num_arguments) {
418
+ if (f->nargs >= 0 && f->nargs != num_arguments) {
419
+ IllegalOperation(num_arguments);
420
+ // Since we did not call the stub, there was no allocation failure.
421
+ // Return some non-failure object.
422
+ return Heap::undefined_value();
423
+ }
424
+
425
+ // TODO(1236192): Most runtime routines don't need the number of
426
+ // arguments passed in because it is constant. At some point we
427
+ // should remove this need and make the runtime routine entry code
428
+ // smarter.
429
+ Set(rax, num_arguments);
430
+ movq(rbx, ExternalReference(f));
431
+ CEntryStub ces(f->result_size);
432
+ return TryCallStub(&ces);
433
+ }
434
+
435
+
436
+ void MacroAssembler::CallExternalReference(const ExternalReference& ext,
437
+ int num_arguments) {
438
+ Set(rax, num_arguments);
439
+ movq(rbx, ext);
440
+
441
+ CEntryStub stub(1);
442
+ CallStub(&stub);
443
+ }
444
+
445
+
446
+ void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
447
+ int num_arguments,
448
+ int result_size) {
449
+ // ----------- S t a t e -------------
450
+ // -- rsp[0] : return address
451
+ // -- rsp[8] : argument num_arguments - 1
452
+ // ...
453
+ // -- rsp[8 * num_arguments] : argument 0 (receiver)
454
+ // -----------------------------------
455
+
456
+ // TODO(1236192): Most runtime routines don't need the number of
457
+ // arguments passed in because it is constant. At some point we
458
+ // should remove this need and make the runtime routine entry code
459
+ // smarter.
460
+ Set(rax, num_arguments);
461
+ JumpToExternalReference(ext, result_size);
462
+ }
463
+
464
+
465
+ MaybeObject* MacroAssembler::TryTailCallExternalReference(
466
+ const ExternalReference& ext, int num_arguments, int result_size) {
467
+ // ----------- S t a t e -------------
468
+ // -- rsp[0] : return address
469
+ // -- rsp[8] : argument num_arguments - 1
470
+ // ...
471
+ // -- rsp[8 * num_arguments] : argument 0 (receiver)
472
+ // -----------------------------------
473
+
474
+ // TODO(1236192): Most runtime routines don't need the number of
475
+ // arguments passed in because it is constant. At some point we
476
+ // should remove this need and make the runtime routine entry code
477
+ // smarter.
478
+ Set(rax, num_arguments);
479
+ return TryJumpToExternalReference(ext, result_size);
480
+ }
481
+
482
+
483
+ void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
484
+ int num_arguments,
485
+ int result_size) {
486
+ TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
487
+ }
488
+
489
+
490
+ MaybeObject* MacroAssembler::TryTailCallRuntime(Runtime::FunctionId fid,
491
+ int num_arguments,
492
+ int result_size) {
493
+ return TryTailCallExternalReference(ExternalReference(fid),
494
+ num_arguments,
495
+ result_size);
496
+ }
497
+
498
+
499
+ static int Offset(ExternalReference ref0, ExternalReference ref1) {
500
+ int64_t offset = (ref0.address() - ref1.address());
501
+ // Check that fits into int.
502
+ ASSERT(static_cast<int>(offset) == offset);
503
+ return static_cast<int>(offset);
504
+ }
505
+
506
+
507
+ void MacroAssembler::PrepareCallApiFunction(int arg_stack_space) {
508
+ #ifdef _WIN64
509
+ // We need to prepare a slot for result handle on stack and put
510
+ // a pointer to it into 1st arg register.
511
+ EnterApiExitFrame(arg_stack_space + 1);
512
+
513
+ // rcx must be used to pass the pointer to the return value slot.
514
+ lea(rcx, StackSpaceOperand(arg_stack_space));
515
+ #else
516
+ EnterApiExitFrame(arg_stack_space);
517
+ #endif
518
+ }
519
+
520
+
521
+ MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(
522
+ ApiFunction* function, int stack_space) {
523
+ Label empty_result;
524
+ Label prologue;
525
+ Label promote_scheduled_exception;
526
+ Label delete_allocated_handles;
527
+ Label leave_exit_frame;
528
+ Label write_back;
529
+
530
+ ExternalReference next_address =
531
+ ExternalReference::handle_scope_next_address();
532
+ const int kNextOffset = 0;
533
+ const int kLimitOffset = Offset(
534
+ ExternalReference::handle_scope_limit_address(),
535
+ next_address);
536
+ const int kLevelOffset = Offset(
537
+ ExternalReference::handle_scope_level_address(),
538
+ next_address);
539
+ ExternalReference scheduled_exception_address =
540
+ ExternalReference::scheduled_exception_address();
541
+
542
+ // Allocate HandleScope in callee-save registers.
543
+ Register prev_next_address_reg = r14;
544
+ Register prev_limit_reg = rbx;
545
+ Register base_reg = r12;
546
+ movq(base_reg, next_address);
547
+ movq(prev_next_address_reg, Operand(base_reg, kNextOffset));
548
+ movq(prev_limit_reg, Operand(base_reg, kLimitOffset));
549
+ addl(Operand(base_reg, kLevelOffset), Immediate(1));
550
+ // Call the api function!
551
+ movq(rax,
552
+ reinterpret_cast<int64_t>(function->address()),
553
+ RelocInfo::RUNTIME_ENTRY);
554
+ call(rax);
555
+
556
+ #ifdef _WIN64
557
+ // rax keeps a pointer to v8::Handle, unpack it.
558
+ movq(rax, Operand(rax, 0));
559
+ #endif
560
+ // Check if the result handle holds 0.
561
+ testq(rax, rax);
562
+ j(zero, &empty_result);
563
+ // It was non-zero. Dereference to get the result value.
564
+ movq(rax, Operand(rax, 0));
565
+ bind(&prologue);
566
+
567
+ // No more valid handles (the result handle was the last one). Restore
568
+ // previous handle scope.
569
+ subl(Operand(base_reg, kLevelOffset), Immediate(1));
570
+ movq(Operand(base_reg, kNextOffset), prev_next_address_reg);
571
+ cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset));
572
+ j(not_equal, &delete_allocated_handles);
573
+ bind(&leave_exit_frame);
574
+
575
+ // Check if the function scheduled an exception.
576
+ movq(rsi, scheduled_exception_address);
577
+ Cmp(Operand(rsi, 0), Factory::the_hole_value());
578
+ j(not_equal, &promote_scheduled_exception);
579
+
580
+ LeaveApiExitFrame();
581
+ ret(stack_space * kPointerSize);
582
+
583
+ bind(&promote_scheduled_exception);
584
+ MaybeObject* result = TryTailCallRuntime(Runtime::kPromoteScheduledException,
585
+ 0, 1);
586
+ if (result->IsFailure()) {
587
+ return result;
588
+ }
589
+
590
+ bind(&empty_result);
591
+ // It was zero; the result is undefined.
592
+ Move(rax, Factory::undefined_value());
593
+ jmp(&prologue);
594
+
595
+ // HandleScope limit has changed. Delete allocated extensions.
596
+ bind(&delete_allocated_handles);
597
+ movq(Operand(base_reg, kLimitOffset), prev_limit_reg);
598
+ movq(prev_limit_reg, rax);
599
+ movq(rax, ExternalReference::delete_handle_scope_extensions());
600
+ call(rax);
601
+ movq(rax, prev_limit_reg);
602
+ jmp(&leave_exit_frame);
603
+
604
+ return result;
605
+ }
606
+
607
+
608
+ void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
609
+ int result_size) {
610
+ // Set the entry point and jump to the C entry runtime stub.
611
+ movq(rbx, ext);
612
+ CEntryStub ces(result_size);
613
+ jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
614
+ }
615
+
616
+
617
+ MaybeObject* MacroAssembler::TryJumpToExternalReference(
618
+ const ExternalReference& ext, int result_size) {
619
+ // Set the entry point and jump to the C entry runtime stub.
620
+ movq(rbx, ext);
621
+ CEntryStub ces(result_size);
622
+ return TryTailCallStub(&ces);
623
+ }
624
+
625
+
626
+ void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
627
+ InvokeFlag flag,
628
+ PostCallGenerator* post_call_generator) {
629
+ // Calls are not allowed in some stubs.
630
+ ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
631
+
632
+ // Rely on the assertion to check that the number of provided
633
+ // arguments match the expected number of arguments. Fake a
634
+ // parameter count to avoid emitting code to do the check.
635
+ ParameterCount expected(0);
636
+ GetBuiltinEntry(rdx, id);
637
+ InvokeCode(rdx, expected, expected, flag, post_call_generator);
638
+ }
639
+
640
+
641
+ void MacroAssembler::GetBuiltinFunction(Register target,
642
+ Builtins::JavaScript id) {
643
+ // Load the builtins object into target register.
644
+ movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
645
+ movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
646
+ movq(target, FieldOperand(target,
647
+ JSBuiltinsObject::OffsetOfFunctionWithId(id)));
648
+ }
649
+
650
+
651
+ void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
652
+ ASSERT(!target.is(rdi));
653
+ // Load the JavaScript builtin function from the builtins object.
654
+ GetBuiltinFunction(rdi, id);
655
+ movq(target, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
656
+ }
657
+
658
+
659
+ void MacroAssembler::Set(Register dst, int64_t x) {
660
+ if (x == 0) {
661
+ xorl(dst, dst);
662
+ } else if (is_int32(x)) {
663
+ movq(dst, Immediate(static_cast<int32_t>(x)));
664
+ } else if (is_uint32(x)) {
665
+ movl(dst, Immediate(static_cast<uint32_t>(x)));
666
+ } else {
667
+ movq(dst, x, RelocInfo::NONE);
668
+ }
669
+ }
670
+
671
+ void MacroAssembler::Set(const Operand& dst, int64_t x) {
672
+ if (is_int32(x)) {
673
+ movq(dst, Immediate(static_cast<int32_t>(x)));
674
+ } else {
675
+ movq(kScratchRegister, x, RelocInfo::NONE);
676
+ movq(dst, kScratchRegister);
677
+ }
678
+ }
679
+
680
+ // ----------------------------------------------------------------------------
681
+ // Smi tagging, untagging and tag detection.
682
+
683
+ Register MacroAssembler::GetSmiConstant(Smi* source) {
684
+ int value = source->value();
685
+ if (value == 0) {
686
+ xorl(kScratchRegister, kScratchRegister);
687
+ return kScratchRegister;
688
+ }
689
+ if (value == 1) {
690
+ return kSmiConstantRegister;
691
+ }
692
+ LoadSmiConstant(kScratchRegister, source);
693
+ return kScratchRegister;
694
+ }
695
+
696
+ void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
697
+ if (FLAG_debug_code) {
698
+ movq(dst,
699
+ reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
700
+ RelocInfo::NONE);
701
+ cmpq(dst, kSmiConstantRegister);
702
+ if (allow_stub_calls()) {
703
+ Assert(equal, "Uninitialized kSmiConstantRegister");
704
+ } else {
705
+ NearLabel ok;
706
+ j(equal, &ok);
707
+ int3();
708
+ bind(&ok);
709
+ }
710
+ }
711
+ if (source->value() == 0) {
712
+ xorl(dst, dst);
713
+ return;
714
+ }
715
+ int value = source->value();
716
+ bool negative = value < 0;
717
+ unsigned int uvalue = negative ? -value : value;
718
+
719
+ switch (uvalue) {
720
+ case 9:
721
+ lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_8, 0));
722
+ break;
723
+ case 8:
724
+ xorl(dst, dst);
725
+ lea(dst, Operand(dst, kSmiConstantRegister, times_8, 0));
726
+ break;
727
+ case 4:
728
+ xorl(dst, dst);
729
+ lea(dst, Operand(dst, kSmiConstantRegister, times_4, 0));
730
+ break;
731
+ case 5:
732
+ lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_4, 0));
733
+ break;
734
+ case 3:
735
+ lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_2, 0));
736
+ break;
737
+ case 2:
738
+ lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0));
739
+ break;
740
+ case 1:
741
+ movq(dst, kSmiConstantRegister);
742
+ break;
743
+ case 0:
744
+ UNREACHABLE();
745
+ return;
746
+ default:
747
+ movq(dst, reinterpret_cast<uint64_t>(source), RelocInfo::NONE);
748
+ return;
749
+ }
750
+ if (negative) {
751
+ neg(dst);
752
+ }
753
+ }
754
+
755
+
756
+ void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
757
+ ASSERT_EQ(0, kSmiTag);
758
+ if (!dst.is(src)) {
759
+ movl(dst, src);
760
+ }
761
+ shl(dst, Immediate(kSmiShift));
762
+ }
763
+
764
+
765
+ void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
766
+ if (FLAG_debug_code) {
767
+ testb(dst, Immediate(0x01));
768
+ NearLabel ok;
769
+ j(zero, &ok);
770
+ if (allow_stub_calls()) {
771
+ Abort("Integer32ToSmiField writing to non-smi location");
772
+ } else {
773
+ int3();
774
+ }
775
+ bind(&ok);
776
+ }
777
+ ASSERT(kSmiShift % kBitsPerByte == 0);
778
+ movl(Operand(dst, kSmiShift / kBitsPerByte), src);
779
+ }
780
+
781
+
782
+ void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
783
+ Register src,
784
+ int constant) {
785
+ if (dst.is(src)) {
786
+ addq(dst, Immediate(constant));
787
+ } else {
788
+ lea(dst, Operand(src, constant));
789
+ }
790
+ shl(dst, Immediate(kSmiShift));
791
+ }
792
+
793
+
794
+ void MacroAssembler::SmiToInteger32(Register dst, Register src) {
795
+ ASSERT_EQ(0, kSmiTag);
796
+ if (!dst.is(src)) {
797
+ movq(dst, src);
798
+ }
799
+ shr(dst, Immediate(kSmiShift));
800
+ }
801
+
802
+
803
+ void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
804
+ movl(dst, Operand(src, kSmiShift / kBitsPerByte));
805
+ }
806
+
807
+
808
+ void MacroAssembler::SmiToInteger64(Register dst, Register src) {
809
+ ASSERT_EQ(0, kSmiTag);
810
+ if (!dst.is(src)) {
811
+ movq(dst, src);
812
+ }
813
+ sar(dst, Immediate(kSmiShift));
814
+ }
815
+
816
+
817
+ void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
818
+ movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
819
+ }
820
+
821
+
822
+ void MacroAssembler::SmiTest(Register src) {
823
+ testq(src, src);
824
+ }
825
+
826
+
827
+ void MacroAssembler::SmiCompare(Register dst, Register src) {
828
+ cmpq(dst, src);
829
+ }
830
+
831
+
832
+ void MacroAssembler::SmiCompare(Register dst, Smi* src) {
833
+ ASSERT(!dst.is(kScratchRegister));
834
+ if (src->value() == 0) {
835
+ testq(dst, dst);
836
+ } else {
837
+ Register constant_reg = GetSmiConstant(src);
838
+ cmpq(dst, constant_reg);
839
+ }
840
+ }
841
+
842
+
843
+ void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
844
+ cmpq(dst, src);
845
+ }
846
+
847
+
848
+ void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
849
+ cmpq(dst, src);
850
+ }
851
+
852
+
853
+ void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
854
+ cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
855
+ }
856
+
857
+
858
+ void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
859
+ cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
860
+ }
861
+
862
+
863
+ void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
864
+ Register src,
865
+ int power) {
866
+ ASSERT(power >= 0);
867
+ ASSERT(power < 64);
868
+ if (power == 0) {
869
+ SmiToInteger64(dst, src);
870
+ return;
871
+ }
872
+ if (!dst.is(src)) {
873
+ movq(dst, src);
874
+ }
875
+ if (power < kSmiShift) {
876
+ sar(dst, Immediate(kSmiShift - power));
877
+ } else if (power > kSmiShift) {
878
+ shl(dst, Immediate(power - kSmiShift));
879
+ }
880
+ }
881
+
882
+
883
+ void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
884
+ Register src,
885
+ int power) {
886
+ ASSERT((0 <= power) && (power < 32));
887
+ if (dst.is(src)) {
888
+ shr(dst, Immediate(power + kSmiShift));
889
+ } else {
890
+ UNIMPLEMENTED(); // Not used.
891
+ }
892
+ }
893
+
894
+
895
+ Condition MacroAssembler::CheckSmi(Register src) {
896
+ ASSERT_EQ(0, kSmiTag);
897
+ testb(src, Immediate(kSmiTagMask));
898
+ return zero;
899
+ }
900
+
901
+
902
+ Condition MacroAssembler::CheckSmi(const Operand& src) {
903
+ ASSERT_EQ(0, kSmiTag);
904
+ testb(src, Immediate(kSmiTagMask));
905
+ return zero;
906
+ }
907
+
908
+
909
+ Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
910
+ ASSERT_EQ(0, kSmiTag);
911
+ // Make mask 0x8000000000000001 and test that both bits are zero.
912
+ movq(kScratchRegister, src);
913
+ rol(kScratchRegister, Immediate(1));
914
+ testb(kScratchRegister, Immediate(3));
915
+ return zero;
916
+ }
917
+
918
+
919
+ Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
920
+ if (first.is(second)) {
921
+ return CheckSmi(first);
922
+ }
923
+ ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
924
+ leal(kScratchRegister, Operand(first, second, times_1, 0));
925
+ testb(kScratchRegister, Immediate(0x03));
926
+ return zero;
927
+ }
928
+
929
+
930
+ Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
931
+ Register second) {
932
+ if (first.is(second)) {
933
+ return CheckNonNegativeSmi(first);
934
+ }
935
+ movq(kScratchRegister, first);
936
+ or_(kScratchRegister, second);
937
+ rol(kScratchRegister, Immediate(1));
938
+ testl(kScratchRegister, Immediate(3));
939
+ return zero;
940
+ }
941
+
942
+
943
+ Condition MacroAssembler::CheckEitherSmi(Register first,
944
+ Register second,
945
+ Register scratch) {
946
+ if (first.is(second)) {
947
+ return CheckSmi(first);
948
+ }
949
+ if (scratch.is(second)) {
950
+ andl(scratch, first);
951
+ } else {
952
+ if (!scratch.is(first)) {
953
+ movl(scratch, first);
954
+ }
955
+ andl(scratch, second);
956
+ }
957
+ testb(scratch, Immediate(kSmiTagMask));
958
+ return zero;
959
+ }
960
+
961
+
962
+ Condition MacroAssembler::CheckIsMinSmi(Register src) {
963
+ ASSERT(!src.is(kScratchRegister));
964
+ // If we overflow by subtracting one, it's the minimal smi value.
965
+ cmpq(src, kSmiConstantRegister);
966
+ return overflow;
967
+ }
968
+
969
+
970
+ Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
971
+ // A 32-bit integer value can always be converted to a smi.
972
+ return always;
973
+ }
974
+
975
+
976
+ Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
977
+ // An unsigned 32-bit integer value is valid as long as the high bit
978
+ // is not set.
979
+ testl(src, src);
980
+ return positive;
981
+ }
982
+
983
+
984
+ void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) {
985
+ if (dst.is(src)) {
986
+ andl(dst, Immediate(kSmiTagMask));
987
+ } else {
988
+ movl(dst, Immediate(kSmiTagMask));
989
+ andl(dst, src);
990
+ }
991
+ }
992
+
993
+
994
+ void MacroAssembler::CheckSmiToIndicator(Register dst, const Operand& src) {
995
+ if (!(src.AddressUsesRegister(dst))) {
996
+ movl(dst, Immediate(kSmiTagMask));
997
+ andl(dst, src);
998
+ } else {
999
+ movl(dst, src);
1000
+ andl(dst, Immediate(kSmiTagMask));
1001
+ }
1002
+ }
1003
+
1004
+
1005
+ void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
1006
+ if (constant->value() == 0) {
1007
+ if (!dst.is(src)) {
1008
+ movq(dst, src);
1009
+ }
1010
+ return;
1011
+ } else if (dst.is(src)) {
1012
+ ASSERT(!dst.is(kScratchRegister));
1013
+ switch (constant->value()) {
1014
+ case 1:
1015
+ addq(dst, kSmiConstantRegister);
1016
+ return;
1017
+ case 2:
1018
+ lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
1019
+ return;
1020
+ case 4:
1021
+ lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1022
+ return;
1023
+ case 8:
1024
+ lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1025
+ return;
1026
+ default:
1027
+ Register constant_reg = GetSmiConstant(constant);
1028
+ addq(dst, constant_reg);
1029
+ return;
1030
+ }
1031
+ } else {
1032
+ switch (constant->value()) {
1033
+ case 1:
1034
+ lea(dst, Operand(src, kSmiConstantRegister, times_1, 0));
1035
+ return;
1036
+ case 2:
1037
+ lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
1038
+ return;
1039
+ case 4:
1040
+ lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1041
+ return;
1042
+ case 8:
1043
+ lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1044
+ return;
1045
+ default:
1046
+ LoadSmiConstant(dst, constant);
1047
+ addq(dst, src);
1048
+ return;
1049
+ }
1050
+ }
1051
+ }
1052
+
1053
+
1054
+ void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
1055
+ if (constant->value() != 0) {
1056
+ addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value()));
1057
+ }
1058
+ }
1059
+
1060
+
1061
+ void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1062
+ if (constant->value() == 0) {
1063
+ if (!dst.is(src)) {
1064
+ movq(dst, src);
1065
+ }
1066
+ } else if (dst.is(src)) {
1067
+ ASSERT(!dst.is(kScratchRegister));
1068
+ Register constant_reg = GetSmiConstant(constant);
1069
+ subq(dst, constant_reg);
1070
+ } else {
1071
+ if (constant->value() == Smi::kMinValue) {
1072
+ LoadSmiConstant(dst, constant);
1073
+ // Adding and subtracting the min-value gives the same result, it only
1074
+ // differs on the overflow bit, which we don't check here.
1075
+ addq(dst, src);
1076
+ } else {
1077
+ // Subtract by adding the negation.
1078
+ LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
1079
+ addq(dst, src);
1080
+ }
1081
+ }
1082
+ }
1083
+
1084
+
1085
+ void MacroAssembler::SmiAdd(Register dst,
1086
+ Register src1,
1087
+ Register src2) {
1088
+ // No overflow checking. Use only when it's known that
1089
+ // overflowing is impossible.
1090
+ ASSERT(!dst.is(src2));
1091
+ if (dst.is(src1)) {
1092
+ addq(dst, src2);
1093
+ } else {
1094
+ movq(dst, src1);
1095
+ addq(dst, src2);
1096
+ }
1097
+ Assert(no_overflow, "Smi addition overflow");
1098
+ }
1099
+
1100
+
1101
+ void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1102
+ // No overflow checking. Use only when it's known that
1103
+ // overflowing is impossible (e.g., subtracting two positive smis).
1104
+ ASSERT(!dst.is(src2));
1105
+ if (dst.is(src1)) {
1106
+ subq(dst, src2);
1107
+ } else {
1108
+ movq(dst, src1);
1109
+ subq(dst, src2);
1110
+ }
1111
+ Assert(no_overflow, "Smi subtraction overflow");
1112
+ }
1113
+
1114
+
1115
+ void MacroAssembler::SmiSub(Register dst,
1116
+ Register src1,
1117
+ const Operand& src2) {
1118
+ // No overflow checking. Use only when it's known that
1119
+ // overflowing is impossible (e.g., subtracting two positive smis).
1120
+ if (dst.is(src1)) {
1121
+ subq(dst, src2);
1122
+ } else {
1123
+ movq(dst, src1);
1124
+ subq(dst, src2);
1125
+ }
1126
+ Assert(no_overflow, "Smi subtraction overflow");
1127
+ }
1128
+
1129
+
1130
+ void MacroAssembler::SmiNot(Register dst, Register src) {
1131
+ ASSERT(!dst.is(kScratchRegister));
1132
+ ASSERT(!src.is(kScratchRegister));
1133
+ // Set tag and padding bits before negating, so that they are zero afterwards.
1134
+ movl(kScratchRegister, Immediate(~0));
1135
+ if (dst.is(src)) {
1136
+ xor_(dst, kScratchRegister);
1137
+ } else {
1138
+ lea(dst, Operand(src, kScratchRegister, times_1, 0));
1139
+ }
1140
+ not_(dst);
1141
+ }
1142
+
1143
+
1144
+ void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
1145
+ ASSERT(!dst.is(src2));
1146
+ if (!dst.is(src1)) {
1147
+ movq(dst, src1);
1148
+ }
1149
+ and_(dst, src2);
1150
+ }
1151
+
1152
+
1153
+ void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1154
+ if (constant->value() == 0) {
1155
+ Set(dst, 0);
1156
+ } else if (dst.is(src)) {
1157
+ ASSERT(!dst.is(kScratchRegister));
1158
+ Register constant_reg = GetSmiConstant(constant);
1159
+ and_(dst, constant_reg);
1160
+ } else {
1161
+ LoadSmiConstant(dst, constant);
1162
+ and_(dst, src);
1163
+ }
1164
+ }
1165
+
1166
+
1167
+ void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1168
+ if (!dst.is(src1)) {
1169
+ movq(dst, src1);
1170
+ }
1171
+ or_(dst, src2);
1172
+ }
1173
+
1174
+
1175
+ void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1176
+ if (dst.is(src)) {
1177
+ ASSERT(!dst.is(kScratchRegister));
1178
+ Register constant_reg = GetSmiConstant(constant);
1179
+ or_(dst, constant_reg);
1180
+ } else {
1181
+ LoadSmiConstant(dst, constant);
1182
+ or_(dst, src);
1183
+ }
1184
+ }
1185
+
1186
+
1187
+ void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1188
+ if (!dst.is(src1)) {
1189
+ movq(dst, src1);
1190
+ }
1191
+ xor_(dst, src2);
1192
+ }
1193
+
1194
+
1195
+ void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1196
+ if (dst.is(src)) {
1197
+ ASSERT(!dst.is(kScratchRegister));
1198
+ Register constant_reg = GetSmiConstant(constant);
1199
+ xor_(dst, constant_reg);
1200
+ } else {
1201
+ LoadSmiConstant(dst, constant);
1202
+ xor_(dst, src);
1203
+ }
1204
+ }
1205
+
1206
+
1207
+ void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1208
+ Register src,
1209
+ int shift_value) {
1210
+ ASSERT(is_uint5(shift_value));
1211
+ if (shift_value > 0) {
1212
+ if (dst.is(src)) {
1213
+ sar(dst, Immediate(shift_value + kSmiShift));
1214
+ shl(dst, Immediate(kSmiShift));
1215
+ } else {
1216
+ UNIMPLEMENTED(); // Not used.
1217
+ }
1218
+ }
1219
+ }
1220
+
1221
+
1222
+ void MacroAssembler::SmiShiftLeftConstant(Register dst,
1223
+ Register src,
1224
+ int shift_value) {
1225
+ if (!dst.is(src)) {
1226
+ movq(dst, src);
1227
+ }
1228
+ if (shift_value > 0) {
1229
+ shl(dst, Immediate(shift_value));
1230
+ }
1231
+ }
1232
+
1233
+
1234
+ void MacroAssembler::SmiShiftLeft(Register dst,
1235
+ Register src1,
1236
+ Register src2) {
1237
+ ASSERT(!dst.is(rcx));
1238
+ NearLabel result_ok;
1239
+ // Untag shift amount.
1240
+ if (!dst.is(src1)) {
1241
+ movq(dst, src1);
1242
+ }
1243
+ SmiToInteger32(rcx, src2);
1244
+ // Shift amount specified by lower 5 bits, not six as the shl opcode.
1245
+ and_(rcx, Immediate(0x1f));
1246
+ shl_cl(dst);
1247
+ }
1248
+
1249
+
1250
+ void MacroAssembler::SmiShiftArithmeticRight(Register dst,
1251
+ Register src1,
1252
+ Register src2) {
1253
+ ASSERT(!dst.is(kScratchRegister));
1254
+ ASSERT(!src1.is(kScratchRegister));
1255
+ ASSERT(!src2.is(kScratchRegister));
1256
+ ASSERT(!dst.is(rcx));
1257
+ if (src1.is(rcx)) {
1258
+ movq(kScratchRegister, src1);
1259
+ } else if (src2.is(rcx)) {
1260
+ movq(kScratchRegister, src2);
1261
+ }
1262
+ if (!dst.is(src1)) {
1263
+ movq(dst, src1);
1264
+ }
1265
+ SmiToInteger32(rcx, src2);
1266
+ orl(rcx, Immediate(kSmiShift));
1267
+ sar_cl(dst); // Shift 32 + original rcx & 0x1f.
1268
+ shl(dst, Immediate(kSmiShift));
1269
+ if (src1.is(rcx)) {
1270
+ movq(src1, kScratchRegister);
1271
+ } else if (src2.is(rcx)) {
1272
+ movq(src2, kScratchRegister);
1273
+ }
1274
+ }
1275
+
1276
+
1277
+ SmiIndex MacroAssembler::SmiToIndex(Register dst,
1278
+ Register src,
1279
+ int shift) {
1280
+ ASSERT(is_uint6(shift));
1281
+ // There is a possible optimization if shift is in the range 60-63, but that
1282
+ // will (and must) never happen.
1283
+ if (!dst.is(src)) {
1284
+ movq(dst, src);
1285
+ }
1286
+ if (shift < kSmiShift) {
1287
+ sar(dst, Immediate(kSmiShift - shift));
1288
+ } else {
1289
+ shl(dst, Immediate(shift - kSmiShift));
1290
+ }
1291
+ return SmiIndex(dst, times_1);
1292
+ }
1293
+
1294
+ SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
1295
+ Register src,
1296
+ int shift) {
1297
+ // Register src holds a positive smi.
1298
+ ASSERT(is_uint6(shift));
1299
+ if (!dst.is(src)) {
1300
+ movq(dst, src);
1301
+ }
1302
+ neg(dst);
1303
+ if (shift < kSmiShift) {
1304
+ sar(dst, Immediate(kSmiShift - shift));
1305
+ } else {
1306
+ shl(dst, Immediate(shift - kSmiShift));
1307
+ }
1308
+ return SmiIndex(dst, times_1);
1309
+ }
1310
+
1311
+
1312
+ void MacroAssembler::Move(Register dst, Register src) {
1313
+ if (!dst.is(src)) {
1314
+ movq(dst, src);
1315
+ }
1316
+ }
1317
+
1318
+
1319
+ void MacroAssembler::Move(Register dst, Handle<Object> source) {
1320
+ ASSERT(!source->IsFailure());
1321
+ if (source->IsSmi()) {
1322
+ Move(dst, Smi::cast(*source));
1323
+ } else {
1324
+ movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
1325
+ }
1326
+ }
1327
+
1328
+
1329
+ void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
1330
+ ASSERT(!source->IsFailure());
1331
+ if (source->IsSmi()) {
1332
+ Move(dst, Smi::cast(*source));
1333
+ } else {
1334
+ movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1335
+ movq(dst, kScratchRegister);
1336
+ }
1337
+ }
1338
+
1339
+
1340
+ void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
1341
+ if (source->IsSmi()) {
1342
+ SmiCompare(dst, Smi::cast(*source));
1343
+ } else {
1344
+ Move(kScratchRegister, source);
1345
+ cmpq(dst, kScratchRegister);
1346
+ }
1347
+ }
1348
+
1349
+
1350
+ void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
1351
+ if (source->IsSmi()) {
1352
+ SmiCompare(dst, Smi::cast(*source));
1353
+ } else {
1354
+ ASSERT(source->IsHeapObject());
1355
+ movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1356
+ cmpq(dst, kScratchRegister);
1357
+ }
1358
+ }
1359
+
1360
+
1361
+ void MacroAssembler::Push(Handle<Object> source) {
1362
+ if (source->IsSmi()) {
1363
+ Push(Smi::cast(*source));
1364
+ } else {
1365
+ ASSERT(source->IsHeapObject());
1366
+ movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1367
+ push(kScratchRegister);
1368
+ }
1369
+ }
1370
+
1371
+
1372
+ void MacroAssembler::Push(Smi* source) {
1373
+ intptr_t smi = reinterpret_cast<intptr_t>(source);
1374
+ if (is_int32(smi)) {
1375
+ push(Immediate(static_cast<int32_t>(smi)));
1376
+ } else {
1377
+ Register constant = GetSmiConstant(source);
1378
+ push(constant);
1379
+ }
1380
+ }
1381
+
1382
+
1383
+ void MacroAssembler::Drop(int stack_elements) {
1384
+ if (stack_elements > 0) {
1385
+ addq(rsp, Immediate(stack_elements * kPointerSize));
1386
+ }
1387
+ }
1388
+
1389
+
1390
+ void MacroAssembler::Test(const Operand& src, Smi* source) {
1391
+ testl(Operand(src, kIntSize), Immediate(source->value()));
1392
+ }
1393
+
1394
+
1395
+ void MacroAssembler::Jump(ExternalReference ext) {
1396
+ movq(kScratchRegister, ext);
1397
+ jmp(kScratchRegister);
1398
+ }
1399
+
1400
+
1401
+ void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1402
+ movq(kScratchRegister, destination, rmode);
1403
+ jmp(kScratchRegister);
1404
+ }
1405
+
1406
+
1407
+ void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
1408
+ // TODO(X64): Inline this
1409
+ jmp(code_object, rmode);
1410
+ }
1411
+
1412
+
1413
+ void MacroAssembler::Call(ExternalReference ext) {
1414
+ movq(kScratchRegister, ext);
1415
+ call(kScratchRegister);
1416
+ }
1417
+
1418
+
1419
+ void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
1420
+ movq(kScratchRegister, destination, rmode);
1421
+ call(kScratchRegister);
1422
+ }
1423
+
1424
+
1425
+ void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
1426
+ ASSERT(RelocInfo::IsCodeTarget(rmode));
1427
+ call(code_object, rmode);
1428
+ }
1429
+
1430
+
1431
+ void MacroAssembler::Pushad() {
1432
+ push(rax);
1433
+ push(rcx);
1434
+ push(rdx);
1435
+ push(rbx);
1436
+ // Not pushing rsp or rbp.
1437
+ push(rsi);
1438
+ push(rdi);
1439
+ push(r8);
1440
+ push(r9);
1441
+ // r10 is kScratchRegister.
1442
+ push(r11);
1443
+ push(r12);
1444
+ // r13 is kRootRegister.
1445
+ push(r14);
1446
+ // r15 is kSmiConstantRegister
1447
+ STATIC_ASSERT(11 == kNumSafepointSavedRegisters);
1448
+ // Use lea for symmetry with Popad.
1449
+ int sp_delta =
1450
+ (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
1451
+ lea(rsp, Operand(rsp, -sp_delta));
1452
+ }
1453
+
1454
+
1455
+ void MacroAssembler::Popad() {
1456
+ // Popad must not change the flags, so use lea instead of addq.
1457
+ int sp_delta =
1458
+ (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
1459
+ lea(rsp, Operand(rsp, sp_delta));
1460
+ pop(r14);
1461
+ pop(r12);
1462
+ pop(r11);
1463
+ pop(r9);
1464
+ pop(r8);
1465
+ pop(rdi);
1466
+ pop(rsi);
1467
+ pop(rbx);
1468
+ pop(rdx);
1469
+ pop(rcx);
1470
+ pop(rax);
1471
+ }
1472
+
1473
+
1474
+ void MacroAssembler::Dropad() {
1475
+ addq(rsp, Immediate(kNumSafepointRegisters * kPointerSize));
1476
+ }
1477
+
1478
+
1479
+ // Order general registers are pushed by Pushad:
1480
+ // rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r12, r14.
1481
+ int MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = {
1482
+ 0,
1483
+ 1,
1484
+ 2,
1485
+ 3,
1486
+ -1,
1487
+ -1,
1488
+ 4,
1489
+ 5,
1490
+ 6,
1491
+ 7,
1492
+ -1,
1493
+ 8,
1494
+ 9,
1495
+ -1,
1496
+ 10,
1497
+ -1
1498
+ };
1499
+
1500
+
1501
+ void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
1502
+ movq(SafepointRegisterSlot(dst), src);
1503
+ }
1504
+
1505
+
1506
+ void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
1507
+ movq(dst, SafepointRegisterSlot(src));
1508
+ }
1509
+
1510
+
1511
+ Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
1512
+ return Operand(rsp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
1513
+ }
1514
+
1515
+
1516
+ void MacroAssembler::PushTryHandler(CodeLocation try_location,
1517
+ HandlerType type) {
1518
+ // Adjust this code if not the case.
1519
+ ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1520
+
1521
+ // The pc (return address) is already on TOS. This code pushes state,
1522
+ // frame pointer and current handler. Check that they are expected
1523
+ // next on the stack, in that order.
1524
+ ASSERT_EQ(StackHandlerConstants::kStateOffset,
1525
+ StackHandlerConstants::kPCOffset - kPointerSize);
1526
+ ASSERT_EQ(StackHandlerConstants::kFPOffset,
1527
+ StackHandlerConstants::kStateOffset - kPointerSize);
1528
+ ASSERT_EQ(StackHandlerConstants::kNextOffset,
1529
+ StackHandlerConstants::kFPOffset - kPointerSize);
1530
+
1531
+ if (try_location == IN_JAVASCRIPT) {
1532
+ if (type == TRY_CATCH_HANDLER) {
1533
+ push(Immediate(StackHandler::TRY_CATCH));
1534
+ } else {
1535
+ push(Immediate(StackHandler::TRY_FINALLY));
1536
+ }
1537
+ push(rbp);
1538
+ } else {
1539
+ ASSERT(try_location == IN_JS_ENTRY);
1540
+ // The frame pointer does not point to a JS frame so we save NULL
1541
+ // for rbp. We expect the code throwing an exception to check rbp
1542
+ // before dereferencing it to restore the context.
1543
+ push(Immediate(StackHandler::ENTRY));
1544
+ push(Immediate(0)); // NULL frame pointer.
1545
+ }
1546
+ // Save the current handler.
1547
+ movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1548
+ push(Operand(kScratchRegister, 0));
1549
+ // Link this handler.
1550
+ movq(Operand(kScratchRegister, 0), rsp);
1551
+ }
1552
+
1553
+
1554
+ void MacroAssembler::PopTryHandler() {
1555
+ ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1556
+ // Unlink this handler.
1557
+ movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1558
+ pop(Operand(kScratchRegister, 0));
1559
+ // Remove the remaining fields.
1560
+ addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1561
+ }
1562
+
1563
+
1564
+ void MacroAssembler::Throw(Register value) {
1565
+ // Check that stack should contain next handler, frame pointer, state and
1566
+ // return address in that order.
1567
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
1568
+ StackHandlerConstants::kStateOffset);
1569
+ STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
1570
+ StackHandlerConstants::kPCOffset);
1571
+ // Keep thrown value in rax.
1572
+ if (!value.is(rax)) {
1573
+ movq(rax, value);
1574
+ }
1575
+
1576
+ ExternalReference handler_address(Top::k_handler_address);
1577
+ movq(kScratchRegister, handler_address);
1578
+ movq(rsp, Operand(kScratchRegister, 0));
1579
+ // get next in chain
1580
+ pop(rcx);
1581
+ movq(Operand(kScratchRegister, 0), rcx);
1582
+ pop(rbp); // pop frame pointer
1583
+ pop(rdx); // remove state
1584
+
1585
+ // Before returning we restore the context from the frame pointer if not NULL.
1586
+ // The frame pointer is NULL in the exception handler of a JS entry frame.
1587
+ Set(rsi, 0); // Tentatively set context pointer to NULL
1588
+ NearLabel skip;
1589
+ cmpq(rbp, Immediate(0));
1590
+ j(equal, &skip);
1591
+ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
1592
+ bind(&skip);
1593
+ ret(0);
1594
+ }
1595
+
1596
+
1597
+ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
1598
+ Register value) {
1599
+ // Keep thrown value in rax.
1600
+ if (!value.is(rax)) {
1601
+ movq(rax, value);
1602
+ }
1603
+ // Fetch top stack handler.
1604
+ ExternalReference handler_address(Top::k_handler_address);
1605
+ movq(kScratchRegister, handler_address);
1606
+ movq(rsp, Operand(kScratchRegister, 0));
1607
+
1608
+ // Unwind the handlers until the ENTRY handler is found.
1609
+ NearLabel loop, done;
1610
+ bind(&loop);
1611
+ // Load the type of the current stack handler.
1612
+ const int kStateOffset = StackHandlerConstants::kStateOffset;
1613
+ cmpq(Operand(rsp, kStateOffset), Immediate(StackHandler::ENTRY));
1614
+ j(equal, &done);
1615
+ // Fetch the next handler in the list.
1616
+ const int kNextOffset = StackHandlerConstants::kNextOffset;
1617
+ movq(rsp, Operand(rsp, kNextOffset));
1618
+ jmp(&loop);
1619
+ bind(&done);
1620
+
1621
+ // Set the top handler address to next handler past the current ENTRY handler.
1622
+ movq(kScratchRegister, handler_address);
1623
+ pop(Operand(kScratchRegister, 0));
1624
+
1625
+ if (type == OUT_OF_MEMORY) {
1626
+ // Set external caught exception to false.
1627
+ ExternalReference external_caught(Top::k_external_caught_exception_address);
1628
+ movq(rax, Immediate(false));
1629
+ store_rax(external_caught);
1630
+
1631
+ // Set pending exception and rax to out of memory exception.
1632
+ ExternalReference pending_exception(Top::k_pending_exception_address);
1633
+ movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE);
1634
+ store_rax(pending_exception);
1635
+ }
1636
+
1637
+ // Clear the context pointer.
1638
+ Set(rsi, 0);
1639
+
1640
+ // Restore registers from handler.
1641
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset + kPointerSize ==
1642
+ StackHandlerConstants::kFPOffset);
1643
+ pop(rbp); // FP
1644
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
1645
+ StackHandlerConstants::kStateOffset);
1646
+ pop(rdx); // State
1647
+
1648
+ STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
1649
+ StackHandlerConstants::kPCOffset);
1650
+ ret(0);
1651
+ }
1652
+
1653
+
1654
+ void MacroAssembler::Ret() {
1655
+ ret(0);
1656
+ }
1657
+
1658
+
1659
+ void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
1660
+ if (is_uint16(bytes_dropped)) {
1661
+ ret(bytes_dropped);
1662
+ } else {
1663
+ pop(scratch);
1664
+ addq(rsp, Immediate(bytes_dropped));
1665
+ push(scratch);
1666
+ ret(0);
1667
+ }
1668
+ }
1669
+
1670
+
1671
+ void MacroAssembler::FCmp() {
1672
+ fucomip();
1673
+ fstp(0);
1674
+ }
1675
+
1676
+
1677
+ void MacroAssembler::CmpObjectType(Register heap_object,
1678
+ InstanceType type,
1679
+ Register map) {
1680
+ movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1681
+ CmpInstanceType(map, type);
1682
+ }
1683
+
1684
+
1685
+ void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
1686
+ cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
1687
+ Immediate(static_cast<int8_t>(type)));
1688
+ }
1689
+
1690
+
1691
+ void MacroAssembler::CheckMap(Register obj,
1692
+ Handle<Map> map,
1693
+ Label* fail,
1694
+ bool is_heap_object) {
1695
+ if (!is_heap_object) {
1696
+ JumpIfSmi(obj, fail);
1697
+ }
1698
+ Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
1699
+ j(not_equal, fail);
1700
+ }
1701
+
1702
+
1703
+ void MacroAssembler::AbortIfNotNumber(Register object) {
1704
+ NearLabel ok;
1705
+ Condition is_smi = CheckSmi(object);
1706
+ j(is_smi, &ok);
1707
+ Cmp(FieldOperand(object, HeapObject::kMapOffset),
1708
+ Factory::heap_number_map());
1709
+ Assert(equal, "Operand not a number");
1710
+ bind(&ok);
1711
+ }
1712
+
1713
+
1714
+ void MacroAssembler::AbortIfSmi(Register object) {
1715
+ NearLabel ok;
1716
+ Condition is_smi = CheckSmi(object);
1717
+ Assert(NegateCondition(is_smi), "Operand is a smi");
1718
+ }
1719
+
1720
+
1721
+ void MacroAssembler::AbortIfNotSmi(Register object) {
1722
+ NearLabel ok;
1723
+ Condition is_smi = CheckSmi(object);
1724
+ Assert(is_smi, "Operand is not a smi");
1725
+ }
1726
+
1727
+
1728
+ void MacroAssembler::AbortIfNotString(Register object) {
1729
+ testb(object, Immediate(kSmiTagMask));
1730
+ Assert(not_equal, "Operand is not a string");
1731
+ push(object);
1732
+ movq(object, FieldOperand(object, HeapObject::kMapOffset));
1733
+ CmpInstanceType(object, FIRST_NONSTRING_TYPE);
1734
+ pop(object);
1735
+ Assert(below, "Operand is not a string");
1736
+ }
1737
+
1738
+
1739
+ void MacroAssembler::AbortIfNotRootValue(Register src,
1740
+ Heap::RootListIndex root_value_index,
1741
+ const char* message) {
1742
+ ASSERT(!src.is(kScratchRegister));
1743
+ LoadRoot(kScratchRegister, root_value_index);
1744
+ cmpq(src, kScratchRegister);
1745
+ Check(equal, message);
1746
+ }
1747
+
1748
+
1749
+
1750
+ Condition MacroAssembler::IsObjectStringType(Register heap_object,
1751
+ Register map,
1752
+ Register instance_type) {
1753
+ movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1754
+ movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
1755
+ ASSERT(kNotStringTag != 0);
1756
+ testb(instance_type, Immediate(kIsNotStringMask));
1757
+ return zero;
1758
+ }
1759
+
1760
+
1761
+ void MacroAssembler::TryGetFunctionPrototype(Register function,
1762
+ Register result,
1763
+ Label* miss) {
1764
+ // Check that the receiver isn't a smi.
1765
+ testl(function, Immediate(kSmiTagMask));
1766
+ j(zero, miss);
1767
+
1768
+ // Check that the function really is a function.
1769
+ CmpObjectType(function, JS_FUNCTION_TYPE, result);
1770
+ j(not_equal, miss);
1771
+
1772
+ // Make sure that the function has an instance prototype.
1773
+ NearLabel non_instance;
1774
+ testb(FieldOperand(result, Map::kBitFieldOffset),
1775
+ Immediate(1 << Map::kHasNonInstancePrototype));
1776
+ j(not_zero, &non_instance);
1777
+
1778
+ // Get the prototype or initial map from the function.
1779
+ movq(result,
1780
+ FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1781
+
1782
+ // If the prototype or initial map is the hole, don't return it and
1783
+ // simply miss the cache instead. This will allow us to allocate a
1784
+ // prototype object on-demand in the runtime system.
1785
+ CompareRoot(result, Heap::kTheHoleValueRootIndex);
1786
+ j(equal, miss);
1787
+
1788
+ // If the function does not have an initial map, we're done.
1789
+ NearLabel done;
1790
+ CmpObjectType(result, MAP_TYPE, kScratchRegister);
1791
+ j(not_equal, &done);
1792
+
1793
+ // Get the prototype from the initial map.
1794
+ movq(result, FieldOperand(result, Map::kPrototypeOffset));
1795
+ jmp(&done);
1796
+
1797
+ // Non-instance prototype: Fetch prototype from constructor field
1798
+ // in initial map.
1799
+ bind(&non_instance);
1800
+ movq(result, FieldOperand(result, Map::kConstructorOffset));
1801
+
1802
+ // All done.
1803
+ bind(&done);
1804
+ }
1805
+
1806
+
1807
+ void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1808
+ if (FLAG_native_code_counters && counter->Enabled()) {
1809
+ movq(kScratchRegister, ExternalReference(counter));
1810
+ movl(Operand(kScratchRegister, 0), Immediate(value));
1811
+ }
1812
+ }
1813
+
1814
+
1815
+ void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1816
+ ASSERT(value > 0);
1817
+ if (FLAG_native_code_counters && counter->Enabled()) {
1818
+ movq(kScratchRegister, ExternalReference(counter));
1819
+ Operand operand(kScratchRegister, 0);
1820
+ if (value == 1) {
1821
+ incl(operand);
1822
+ } else {
1823
+ addl(operand, Immediate(value));
1824
+ }
1825
+ }
1826
+ }
1827
+
1828
+
1829
+ void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1830
+ ASSERT(value > 0);
1831
+ if (FLAG_native_code_counters && counter->Enabled()) {
1832
+ movq(kScratchRegister, ExternalReference(counter));
1833
+ Operand operand(kScratchRegister, 0);
1834
+ if (value == 1) {
1835
+ decl(operand);
1836
+ } else {
1837
+ subl(operand, Immediate(value));
1838
+ }
1839
+ }
1840
+ }
1841
+
1842
+
1843
+ #ifdef ENABLE_DEBUGGER_SUPPORT
1844
+ void MacroAssembler::DebugBreak() {
1845
+ ASSERT(allow_stub_calls());
1846
+ Set(rax, 0); // No arguments.
1847
+ movq(rbx, ExternalReference(Runtime::kDebugBreak));
1848
+ CEntryStub ces(1);
1849
+ Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
1850
+ }
1851
+ #endif // ENABLE_DEBUGGER_SUPPORT
1852
+
1853
+
1854
+ void MacroAssembler::InvokeCode(Register code,
1855
+ const ParameterCount& expected,
1856
+ const ParameterCount& actual,
1857
+ InvokeFlag flag,
1858
+ PostCallGenerator* post_call_generator) {
1859
+ NearLabel done;
1860
+ InvokePrologue(expected,
1861
+ actual,
1862
+ Handle<Code>::null(),
1863
+ code,
1864
+ &done,
1865
+ flag,
1866
+ post_call_generator);
1867
+ if (flag == CALL_FUNCTION) {
1868
+ call(code);
1869
+ if (post_call_generator != NULL) post_call_generator->Generate();
1870
+ } else {
1871
+ ASSERT(flag == JUMP_FUNCTION);
1872
+ jmp(code);
1873
+ }
1874
+ bind(&done);
1875
+ }
1876
+
1877
+
1878
+ void MacroAssembler::InvokeCode(Handle<Code> code,
1879
+ const ParameterCount& expected,
1880
+ const ParameterCount& actual,
1881
+ RelocInfo::Mode rmode,
1882
+ InvokeFlag flag,
1883
+ PostCallGenerator* post_call_generator) {
1884
+ NearLabel done;
1885
+ Register dummy = rax;
1886
+ InvokePrologue(expected,
1887
+ actual,
1888
+ code,
1889
+ dummy,
1890
+ &done,
1891
+ flag,
1892
+ post_call_generator);
1893
+ if (flag == CALL_FUNCTION) {
1894
+ Call(code, rmode);
1895
+ if (post_call_generator != NULL) post_call_generator->Generate();
1896
+ } else {
1897
+ ASSERT(flag == JUMP_FUNCTION);
1898
+ Jump(code, rmode);
1899
+ }
1900
+ bind(&done);
1901
+ }
1902
+
1903
+
1904
+ void MacroAssembler::InvokeFunction(Register function,
1905
+ const ParameterCount& actual,
1906
+ InvokeFlag flag,
1907
+ PostCallGenerator* post_call_generator) {
1908
+ ASSERT(function.is(rdi));
1909
+ movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
1910
+ movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
1911
+ movsxlq(rbx,
1912
+ FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
1913
+ // Advances rdx to the end of the Code object header, to the start of
1914
+ // the executable code.
1915
+ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
1916
+
1917
+ ParameterCount expected(rbx);
1918
+ InvokeCode(rdx, expected, actual, flag, post_call_generator);
1919
+ }
1920
+
1921
+
1922
+ void MacroAssembler::InvokeFunction(JSFunction* function,
1923
+ const ParameterCount& actual,
1924
+ InvokeFlag flag,
1925
+ PostCallGenerator* post_call_generator) {
1926
+ ASSERT(function->is_compiled());
1927
+ // Get the function and setup the context.
1928
+ Move(rdi, Handle<JSFunction>(function));
1929
+ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1930
+
1931
+ if (V8::UseCrankshaft()) {
1932
+ // Since Crankshaft can recompile a function, we need to load
1933
+ // the Code object every time we call the function.
1934
+ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
1935
+ ParameterCount expected(function->shared()->formal_parameter_count());
1936
+ InvokeCode(rdx, expected, actual, flag, post_call_generator);
1937
+ } else {
1938
+ // Invoke the cached code.
1939
+ Handle<Code> code(function->code());
1940
+ ParameterCount expected(function->shared()->formal_parameter_count());
1941
+ InvokeCode(code,
1942
+ expected,
1943
+ actual,
1944
+ RelocInfo::CODE_TARGET,
1945
+ flag,
1946
+ post_call_generator);
1947
+ }
1948
+ }
1949
+
1950
+
1951
+ void MacroAssembler::EnterFrame(StackFrame::Type type) {
1952
+ push(rbp);
1953
+ movq(rbp, rsp);
1954
+ push(rsi); // Context.
1955
+ Push(Smi::FromInt(type));
1956
+ movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
1957
+ push(kScratchRegister);
1958
+ if (FLAG_debug_code) {
1959
+ movq(kScratchRegister,
1960
+ Factory::undefined_value(),
1961
+ RelocInfo::EMBEDDED_OBJECT);
1962
+ cmpq(Operand(rsp, 0), kScratchRegister);
1963
+ Check(not_equal, "code object not properly patched");
1964
+ }
1965
+ }
1966
+
1967
+
1968
+ void MacroAssembler::LeaveFrame(StackFrame::Type type) {
1969
+ if (FLAG_debug_code) {
1970
+ Move(kScratchRegister, Smi::FromInt(type));
1971
+ cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
1972
+ Check(equal, "stack frame types must match");
1973
+ }
1974
+ movq(rsp, rbp);
1975
+ pop(rbp);
1976
+ }
1977
+
1978
+
1979
+ void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
1980
+ // Setup the frame structure on the stack.
1981
+ // All constants are relative to the frame pointer of the exit frame.
1982
+ ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
1983
+ ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
1984
+ ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
1985
+ push(rbp);
1986
+ movq(rbp, rsp);
1987
+
1988
+ // Reserve room for entry stack pointer and push the code object.
1989
+ ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
1990
+ push(Immediate(0)); // Saved entry sp, patched before call.
1991
+ movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
1992
+ push(kScratchRegister); // Accessed from EditFrame::code_slot.
1993
+
1994
+ // Save the frame pointer and the context in top.
1995
+ ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
1996
+ ExternalReference context_address(Top::k_context_address);
1997
+ if (save_rax) {
1998
+ movq(r14, rax); // Backup rax before we use it.
1999
+ }
2000
+
2001
+ movq(rax, rbp);
2002
+ store_rax(c_entry_fp_address);
2003
+ movq(rax, rsi);
2004
+ store_rax(context_address);
2005
+ }
2006
+
2007
+
2008
+ void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
2009
+ bool save_doubles) {
2010
+ #ifdef _WIN64
2011
+ const int kShadowSpace = 4;
2012
+ arg_stack_space += kShadowSpace;
2013
+ #endif
2014
+ // Optionally save all XMM registers.
2015
+ if (save_doubles) {
2016
+ CpuFeatures::Scope scope(SSE2);
2017
+ int space = XMMRegister::kNumRegisters * kDoubleSize +
2018
+ arg_stack_space * kPointerSize;
2019
+ subq(rsp, Immediate(space));
2020
+ int offset = -2 * kPointerSize;
2021
+ for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
2022
+ XMMRegister reg = XMMRegister::FromAllocationIndex(i);
2023
+ movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg);
2024
+ }
2025
+ } else if (arg_stack_space > 0) {
2026
+ subq(rsp, Immediate(arg_stack_space * kPointerSize));
2027
+ }
2028
+
2029
+ // Get the required frame alignment for the OS.
2030
+ static const int kFrameAlignment = OS::ActivationFrameAlignment();
2031
+ if (kFrameAlignment > 0) {
2032
+ ASSERT(IsPowerOf2(kFrameAlignment));
2033
+ movq(kScratchRegister, Immediate(-kFrameAlignment));
2034
+ and_(rsp, kScratchRegister);
2035
+ }
2036
+
2037
+ // Patch the saved entry sp.
2038
+ movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
2039
+ }
2040
+
2041
+
2042
+ void MacroAssembler::EnterExitFrame(int arg_stack_space, bool save_doubles) {
2043
+ EnterExitFramePrologue(true);
2044
+
2045
+ // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame,
2046
+ // so it must be retained across the C-call.
2047
+ int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
2048
+ lea(r12, Operand(rbp, r14, times_pointer_size, offset));
2049
+
2050
+ EnterExitFrameEpilogue(arg_stack_space, save_doubles);
2051
+ }
2052
+
2053
+
2054
+ void MacroAssembler::EnterApiExitFrame(int arg_stack_space) {
2055
+ EnterExitFramePrologue(false);
2056
+ EnterExitFrameEpilogue(arg_stack_space, false);
2057
+ }
2058
+
2059
+
2060
+ void MacroAssembler::LeaveExitFrame(bool save_doubles) {
2061
+ // Registers:
2062
+ // r12 : argv
2063
+ if (save_doubles) {
2064
+ int offset = -2 * kPointerSize;
2065
+ for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
2066
+ XMMRegister reg = XMMRegister::FromAllocationIndex(i);
2067
+ movsd(reg, Operand(rbp, offset - ((i + 1) * kDoubleSize)));
2068
+ }
2069
+ }
2070
+ // Get the return address from the stack and restore the frame pointer.
2071
+ movq(rcx, Operand(rbp, 1 * kPointerSize));
2072
+ movq(rbp, Operand(rbp, 0 * kPointerSize));
2073
+
2074
+ // Drop everything up to and including the arguments and the receiver
2075
+ // from the caller stack.
2076
+ lea(rsp, Operand(r12, 1 * kPointerSize));
2077
+
2078
+ // Push the return address to get ready to return.
2079
+ push(rcx);
2080
+
2081
+ LeaveExitFrameEpilogue();
2082
+ }
2083
+
2084
+
2085
+ void MacroAssembler::LeaveApiExitFrame() {
2086
+ movq(rsp, rbp);
2087
+ pop(rbp);
2088
+
2089
+ LeaveExitFrameEpilogue();
2090
+ }
2091
+
2092
+
2093
+ void MacroAssembler::LeaveExitFrameEpilogue() {
2094
+ // Restore current context from top and clear it in debug mode.
2095
+ ExternalReference context_address(Top::k_context_address);
2096
+ movq(kScratchRegister, context_address);
2097
+ movq(rsi, Operand(kScratchRegister, 0));
2098
+ #ifdef DEBUG
2099
+ movq(Operand(kScratchRegister, 0), Immediate(0));
2100
+ #endif
2101
+
2102
+ // Clear the top frame.
2103
+ ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
2104
+ movq(kScratchRegister, c_entry_fp_address);
2105
+ movq(Operand(kScratchRegister, 0), Immediate(0));
2106
+ }
2107
+
2108
+
2109
+ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
2110
+ Register scratch,
2111
+ Label* miss) {
2112
+ Label same_contexts;
2113
+
2114
+ ASSERT(!holder_reg.is(scratch));
2115
+ ASSERT(!scratch.is(kScratchRegister));
2116
+ // Load current lexical context from the stack frame.
2117
+ movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
2118
+
2119
+ // When generating debug code, make sure the lexical context is set.
2120
+ if (FLAG_debug_code) {
2121
+ cmpq(scratch, Immediate(0));
2122
+ Check(not_equal, "we should not have an empty lexical context");
2123
+ }
2124
+ // Load the global context of the current context.
2125
+ int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
2126
+ movq(scratch, FieldOperand(scratch, offset));
2127
+ movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
2128
+
2129
+ // Check the context is a global context.
2130
+ if (FLAG_debug_code) {
2131
+ Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
2132
+ Factory::global_context_map());
2133
+ Check(equal, "JSGlobalObject::global_context should be a global context.");
2134
+ }
2135
+
2136
+ // Check if both contexts are the same.
2137
+ cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2138
+ j(equal, &same_contexts);
2139
+
2140
+ // Compare security tokens.
2141
+ // Check that the security token in the calling global object is
2142
+ // compatible with the security token in the receiving global
2143
+ // object.
2144
+
2145
+ // Check the context is a global context.
2146
+ if (FLAG_debug_code) {
2147
+ // Preserve original value of holder_reg.
2148
+ push(holder_reg);
2149
+ movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2150
+ CompareRoot(holder_reg, Heap::kNullValueRootIndex);
2151
+ Check(not_equal, "JSGlobalProxy::context() should not be null.");
2152
+
2153
+ // Read the first word and compare to global_context_map(),
2154
+ movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
2155
+ CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
2156
+ Check(equal, "JSGlobalObject::global_context should be a global context.");
2157
+ pop(holder_reg);
2158
+ }
2159
+
2160
+ movq(kScratchRegister,
2161
+ FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2162
+ int token_offset =
2163
+ Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
2164
+ movq(scratch, FieldOperand(scratch, token_offset));
2165
+ cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
2166
+ j(not_equal, miss);
2167
+
2168
+ bind(&same_contexts);
2169
+ }
2170
+
2171
+
2172
+ void MacroAssembler::LoadAllocationTopHelper(Register result,
2173
+ Register scratch,
2174
+ AllocationFlags flags) {
2175
+ ExternalReference new_space_allocation_top =
2176
+ ExternalReference::new_space_allocation_top_address();
2177
+
2178
+ // Just return if allocation top is already known.
2179
+ if ((flags & RESULT_CONTAINS_TOP) != 0) {
2180
+ // No use of scratch if allocation top is provided.
2181
+ ASSERT(!scratch.is_valid());
2182
+ #ifdef DEBUG
2183
+ // Assert that result actually contains top on entry.
2184
+ movq(kScratchRegister, new_space_allocation_top);
2185
+ cmpq(result, Operand(kScratchRegister, 0));
2186
+ Check(equal, "Unexpected allocation top");
2187
+ #endif
2188
+ return;
2189
+ }
2190
+
2191
+ // Move address of new object to result. Use scratch register if available,
2192
+ // and keep address in scratch until call to UpdateAllocationTopHelper.
2193
+ if (scratch.is_valid()) {
2194
+ movq(scratch, new_space_allocation_top);
2195
+ movq(result, Operand(scratch, 0));
2196
+ } else if (result.is(rax)) {
2197
+ load_rax(new_space_allocation_top);
2198
+ } else {
2199
+ movq(kScratchRegister, new_space_allocation_top);
2200
+ movq(result, Operand(kScratchRegister, 0));
2201
+ }
2202
+ }
2203
+
2204
+
2205
+ void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
2206
+ Register scratch) {
2207
+ if (FLAG_debug_code) {
2208
+ testq(result_end, Immediate(kObjectAlignmentMask));
2209
+ Check(zero, "Unaligned allocation in new space");
2210
+ }
2211
+
2212
+ ExternalReference new_space_allocation_top =
2213
+ ExternalReference::new_space_allocation_top_address();
2214
+
2215
+ // Update new top.
2216
+ if (result_end.is(rax)) {
2217
+ // rax can be stored directly to a memory location.
2218
+ store_rax(new_space_allocation_top);
2219
+ } else {
2220
+ // Register required - use scratch provided if available.
2221
+ if (scratch.is_valid()) {
2222
+ movq(Operand(scratch, 0), result_end);
2223
+ } else {
2224
+ movq(kScratchRegister, new_space_allocation_top);
2225
+ movq(Operand(kScratchRegister, 0), result_end);
2226
+ }
2227
+ }
2228
+ }
2229
+
2230
+
2231
+ void MacroAssembler::AllocateInNewSpace(int object_size,
2232
+ Register result,
2233
+ Register result_end,
2234
+ Register scratch,
2235
+ Label* gc_required,
2236
+ AllocationFlags flags) {
2237
+ if (!FLAG_inline_new) {
2238
+ if (FLAG_debug_code) {
2239
+ // Trash the registers to simulate an allocation failure.
2240
+ movl(result, Immediate(0x7091));
2241
+ if (result_end.is_valid()) {
2242
+ movl(result_end, Immediate(0x7191));
2243
+ }
2244
+ if (scratch.is_valid()) {
2245
+ movl(scratch, Immediate(0x7291));
2246
+ }
2247
+ }
2248
+ jmp(gc_required);
2249
+ return;
2250
+ }
2251
+ ASSERT(!result.is(result_end));
2252
+
2253
+ // Load address of new object into result.
2254
+ LoadAllocationTopHelper(result, scratch, flags);
2255
+
2256
+ // Calculate new top and bail out if new space is exhausted.
2257
+ ExternalReference new_space_allocation_limit =
2258
+ ExternalReference::new_space_allocation_limit_address();
2259
+
2260
+ Register top_reg = result_end.is_valid() ? result_end : result;
2261
+
2262
+ if (!top_reg.is(result)) {
2263
+ movq(top_reg, result);
2264
+ }
2265
+ addq(top_reg, Immediate(object_size));
2266
+ j(carry, gc_required);
2267
+ movq(kScratchRegister, new_space_allocation_limit);
2268
+ cmpq(top_reg, Operand(kScratchRegister, 0));
2269
+ j(above, gc_required);
2270
+
2271
+ // Update allocation top.
2272
+ UpdateAllocationTopHelper(top_reg, scratch);
2273
+
2274
+ if (top_reg.is(result)) {
2275
+ if ((flags & TAG_OBJECT) != 0) {
2276
+ subq(result, Immediate(object_size - kHeapObjectTag));
2277
+ } else {
2278
+ subq(result, Immediate(object_size));
2279
+ }
2280
+ } else if ((flags & TAG_OBJECT) != 0) {
2281
+ // Tag the result if requested.
2282
+ addq(result, Immediate(kHeapObjectTag));
2283
+ }
2284
+ }
2285
+
2286
+
2287
+ void MacroAssembler::AllocateInNewSpace(int header_size,
2288
+ ScaleFactor element_size,
2289
+ Register element_count,
2290
+ Register result,
2291
+ Register result_end,
2292
+ Register scratch,
2293
+ Label* gc_required,
2294
+ AllocationFlags flags) {
2295
+ if (!FLAG_inline_new) {
2296
+ if (FLAG_debug_code) {
2297
+ // Trash the registers to simulate an allocation failure.
2298
+ movl(result, Immediate(0x7091));
2299
+ movl(result_end, Immediate(0x7191));
2300
+ if (scratch.is_valid()) {
2301
+ movl(scratch, Immediate(0x7291));
2302
+ }
2303
+ // Register element_count is not modified by the function.
2304
+ }
2305
+ jmp(gc_required);
2306
+ return;
2307
+ }
2308
+ ASSERT(!result.is(result_end));
2309
+
2310
+ // Load address of new object into result.
2311
+ LoadAllocationTopHelper(result, scratch, flags);
2312
+
2313
+ // Calculate new top and bail out if new space is exhausted.
2314
+ ExternalReference new_space_allocation_limit =
2315
+ ExternalReference::new_space_allocation_limit_address();
2316
+
2317
+ // We assume that element_count*element_size + header_size does not
2318
+ // overflow.
2319
+ lea(result_end, Operand(element_count, element_size, header_size));
2320
+ addq(result_end, result);
2321
+ j(carry, gc_required);
2322
+ movq(kScratchRegister, new_space_allocation_limit);
2323
+ cmpq(result_end, Operand(kScratchRegister, 0));
2324
+ j(above, gc_required);
2325
+
2326
+ // Update allocation top.
2327
+ UpdateAllocationTopHelper(result_end, scratch);
2328
+
2329
+ // Tag the result if requested.
2330
+ if ((flags & TAG_OBJECT) != 0) {
2331
+ addq(result, Immediate(kHeapObjectTag));
2332
+ }
2333
+ }
2334
+
2335
+
2336
+ void MacroAssembler::AllocateInNewSpace(Register object_size,
2337
+ Register result,
2338
+ Register result_end,
2339
+ Register scratch,
2340
+ Label* gc_required,
2341
+ AllocationFlags flags) {
2342
+ if (!FLAG_inline_new) {
2343
+ if (FLAG_debug_code) {
2344
+ // Trash the registers to simulate an allocation failure.
2345
+ movl(result, Immediate(0x7091));
2346
+ movl(result_end, Immediate(0x7191));
2347
+ if (scratch.is_valid()) {
2348
+ movl(scratch, Immediate(0x7291));
2349
+ }
2350
+ // object_size is left unchanged by this function.
2351
+ }
2352
+ jmp(gc_required);
2353
+ return;
2354
+ }
2355
+ ASSERT(!result.is(result_end));
2356
+
2357
+ // Load address of new object into result.
2358
+ LoadAllocationTopHelper(result, scratch, flags);
2359
+
2360
+ // Calculate new top and bail out if new space is exhausted.
2361
+ ExternalReference new_space_allocation_limit =
2362
+ ExternalReference::new_space_allocation_limit_address();
2363
+ if (!object_size.is(result_end)) {
2364
+ movq(result_end, object_size);
2365
+ }
2366
+ addq(result_end, result);
2367
+ j(carry, gc_required);
2368
+ movq(kScratchRegister, new_space_allocation_limit);
2369
+ cmpq(result_end, Operand(kScratchRegister, 0));
2370
+ j(above, gc_required);
2371
+
2372
+ // Update allocation top.
2373
+ UpdateAllocationTopHelper(result_end, scratch);
2374
+
2375
+ // Tag the result if requested.
2376
+ if ((flags & TAG_OBJECT) != 0) {
2377
+ addq(result, Immediate(kHeapObjectTag));
2378
+ }
2379
+ }
2380
+
2381
+
2382
+ void MacroAssembler::UndoAllocationInNewSpace(Register object) {
2383
+ ExternalReference new_space_allocation_top =
2384
+ ExternalReference::new_space_allocation_top_address();
2385
+
2386
+ // Make sure the object has no tag before resetting top.
2387
+ and_(object, Immediate(~kHeapObjectTagMask));
2388
+ movq(kScratchRegister, new_space_allocation_top);
2389
+ #ifdef DEBUG
2390
+ cmpq(object, Operand(kScratchRegister, 0));
2391
+ Check(below, "Undo allocation of non allocated memory");
2392
+ #endif
2393
+ movq(Operand(kScratchRegister, 0), object);
2394
+ }
2395
+
2396
+
2397
+ void MacroAssembler::AllocateHeapNumber(Register result,
2398
+ Register scratch,
2399
+ Label* gc_required) {
2400
+ // Allocate heap number in new space.
2401
+ AllocateInNewSpace(HeapNumber::kSize,
2402
+ result,
2403
+ scratch,
2404
+ no_reg,
2405
+ gc_required,
2406
+ TAG_OBJECT);
2407
+
2408
+ // Set the map.
2409
+ LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
2410
+ movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2411
+ }
2412
+
2413
+
2414
+ void MacroAssembler::AllocateTwoByteString(Register result,
2415
+ Register length,
2416
+ Register scratch1,
2417
+ Register scratch2,
2418
+ Register scratch3,
2419
+ Label* gc_required) {
2420
+ // Calculate the number of bytes needed for the characters in the string while
2421
+ // observing object alignment.
2422
+ const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
2423
+ kObjectAlignmentMask;
2424
+ ASSERT(kShortSize == 2);
2425
+ // scratch1 = length * 2 + kObjectAlignmentMask.
2426
+ lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
2427
+ kHeaderAlignment));
2428
+ and_(scratch1, Immediate(~kObjectAlignmentMask));
2429
+ if (kHeaderAlignment > 0) {
2430
+ subq(scratch1, Immediate(kHeaderAlignment));
2431
+ }
2432
+
2433
+ // Allocate two byte string in new space.
2434
+ AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
2435
+ times_1,
2436
+ scratch1,
2437
+ result,
2438
+ scratch2,
2439
+ scratch3,
2440
+ gc_required,
2441
+ TAG_OBJECT);
2442
+
2443
+ // Set the map, length and hash field.
2444
+ LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
2445
+ movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2446
+ Integer32ToSmi(scratch1, length);
2447
+ movq(FieldOperand(result, String::kLengthOffset), scratch1);
2448
+ movq(FieldOperand(result, String::kHashFieldOffset),
2449
+ Immediate(String::kEmptyHashField));
2450
+ }
2451
+
2452
+
2453
+ void MacroAssembler::AllocateAsciiString(Register result,
2454
+ Register length,
2455
+ Register scratch1,
2456
+ Register scratch2,
2457
+ Register scratch3,
2458
+ Label* gc_required) {
2459
+ // Calculate the number of bytes needed for the characters in the string while
2460
+ // observing object alignment.
2461
+ const int kHeaderAlignment = SeqAsciiString::kHeaderSize &
2462
+ kObjectAlignmentMask;
2463
+ movl(scratch1, length);
2464
+ ASSERT(kCharSize == 1);
2465
+ addq(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
2466
+ and_(scratch1, Immediate(~kObjectAlignmentMask));
2467
+ if (kHeaderAlignment > 0) {
2468
+ subq(scratch1, Immediate(kHeaderAlignment));
2469
+ }
2470
+
2471
+ // Allocate ascii string in new space.
2472
+ AllocateInNewSpace(SeqAsciiString::kHeaderSize,
2473
+ times_1,
2474
+ scratch1,
2475
+ result,
2476
+ scratch2,
2477
+ scratch3,
2478
+ gc_required,
2479
+ TAG_OBJECT);
2480
+
2481
+ // Set the map, length and hash field.
2482
+ LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
2483
+ movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2484
+ Integer32ToSmi(scratch1, length);
2485
+ movq(FieldOperand(result, String::kLengthOffset), scratch1);
2486
+ movq(FieldOperand(result, String::kHashFieldOffset),
2487
+ Immediate(String::kEmptyHashField));
2488
+ }
2489
+
2490
+
2491
+ void MacroAssembler::AllocateConsString(Register result,
2492
+ Register scratch1,
2493
+ Register scratch2,
2494
+ Label* gc_required) {
2495
+ // Allocate heap number in new space.
2496
+ AllocateInNewSpace(ConsString::kSize,
2497
+ result,
2498
+ scratch1,
2499
+ scratch2,
2500
+ gc_required,
2501
+ TAG_OBJECT);
2502
+
2503
+ // Set the map. The other fields are left uninitialized.
2504
+ LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
2505
+ movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2506
+ }
2507
+
2508
+
2509
+ void MacroAssembler::AllocateAsciiConsString(Register result,
2510
+ Register scratch1,
2511
+ Register scratch2,
2512
+ Label* gc_required) {
2513
+ // Allocate heap number in new space.
2514
+ AllocateInNewSpace(ConsString::kSize,
2515
+ result,
2516
+ scratch1,
2517
+ scratch2,
2518
+ gc_required,
2519
+ TAG_OBJECT);
2520
+
2521
+ // Set the map. The other fields are left uninitialized.
2522
+ LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
2523
+ movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2524
+ }
2525
+
2526
+
2527
+ void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2528
+ if (context_chain_length > 0) {
2529
+ // Move up the chain of contexts to the context containing the slot.
2530
+ movq(dst, Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX)));
2531
+ // Load the function context (which is the incoming, outer context).
2532
+ movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2533
+ for (int i = 1; i < context_chain_length; i++) {
2534
+ movq(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
2535
+ movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2536
+ }
2537
+ // The context may be an intermediate context, not a function context.
2538
+ movq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2539
+ } else {
2540
+ // Slot is in the current function context. Move it into the
2541
+ // destination register in case we store into it (the write barrier
2542
+ // cannot be allowed to destroy the context in rsi).
2543
+ movq(dst, rsi);
2544
+ }
2545
+
2546
+ // We should not have found a 'with' context by walking the context chain
2547
+ // (i.e., the static scope chain and runtime context chain do not agree).
2548
+ // A variable occurring in such a scope should have slot type LOOKUP and
2549
+ // not CONTEXT.
2550
+ if (FLAG_debug_code) {
2551
+ cmpq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2552
+ Check(equal, "Yo dawg, I heard you liked function contexts "
2553
+ "so I put function contexts in all your contexts");
2554
+ }
2555
+ }
2556
+
2557
+
2558
+ void MacroAssembler::LoadGlobalFunction(int index, Register function) {
2559
+ // Load the global or builtins object from the current context.
2560
+ movq(function, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2561
+ // Load the global context from the global or builtins object.
2562
+ movq(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
2563
+ // Load the function from the global context.
2564
+ movq(function, Operand(function, Context::SlotOffset(index)));
2565
+ }
2566
+
2567
+
2568
+ void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2569
+ Register map) {
2570
+ // Load the initial map. The global functions all have initial maps.
2571
+ movq(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2572
+ if (FLAG_debug_code) {
2573
+ Label ok, fail;
2574
+ CheckMap(map, Factory::meta_map(), &fail, false);
2575
+ jmp(&ok);
2576
+ bind(&fail);
2577
+ Abort("Global functions must have initial map");
2578
+ bind(&ok);
2579
+ }
2580
+ }
2581
+
2582
+
2583
+ int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
2584
+ // On Windows 64 stack slots are reserved by the caller for all arguments
2585
+ // including the ones passed in registers, and space is always allocated for
2586
+ // the four register arguments even if the function takes fewer than four
2587
+ // arguments.
2588
+ // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
2589
+ // and the caller does not reserve stack slots for them.
2590
+ ASSERT(num_arguments >= 0);
2591
+ #ifdef _WIN64
2592
+ static const int kMinimumStackSlots = 4;
2593
+ if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
2594
+ return num_arguments;
2595
+ #else
2596
+ static const int kRegisterPassedArguments = 6;
2597
+ if (num_arguments < kRegisterPassedArguments) return 0;
2598
+ return num_arguments - kRegisterPassedArguments;
2599
+ #endif
2600
+ }
2601
+
2602
+
2603
+ void MacroAssembler::PrepareCallCFunction(int num_arguments) {
2604
+ int frame_alignment = OS::ActivationFrameAlignment();
2605
+ ASSERT(frame_alignment != 0);
2606
+ ASSERT(num_arguments >= 0);
2607
+ // Make stack end at alignment and allocate space for arguments and old rsp.
2608
+ movq(kScratchRegister, rsp);
2609
+ ASSERT(IsPowerOf2(frame_alignment));
2610
+ int argument_slots_on_stack =
2611
+ ArgumentStackSlotsForCFunctionCall(num_arguments);
2612
+ subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
2613
+ and_(rsp, Immediate(-frame_alignment));
2614
+ movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
2615
+ }
2616
+
2617
+
2618
+ void MacroAssembler::CallCFunction(ExternalReference function,
2619
+ int num_arguments) {
2620
+ movq(rax, function);
2621
+ CallCFunction(rax, num_arguments);
2622
+ }
2623
+
2624
+
2625
+ void MacroAssembler::CallCFunction(Register function, int num_arguments) {
2626
+ // Check stack alignment.
2627
+ if (FLAG_debug_code) {
2628
+ CheckStackAlignment();
2629
+ }
2630
+
2631
+ call(function);
2632
+ ASSERT(OS::ActivationFrameAlignment() != 0);
2633
+ ASSERT(num_arguments >= 0);
2634
+ int argument_slots_on_stack =
2635
+ ArgumentStackSlotsForCFunctionCall(num_arguments);
2636
+ movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
2637
+ }
2638
+
2639
+
2640
+ CodePatcher::CodePatcher(byte* address, int size)
2641
+ : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
2642
+ // Create a new macro assembler pointing to the address of the code to patch.
2643
+ // The size is adjusted with kGap on order for the assembler to generate size
2644
+ // bytes of instructions without failing with buffer size constraints.
2645
+ ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2646
+ }
2647
+
2648
+
2649
+ CodePatcher::~CodePatcher() {
2650
+ // Indicate that code has changed.
2651
+ CPU::FlushICache(address_, size_);
2652
+
2653
+ // Check that the code was patched as expected.
2654
+ ASSERT(masm_.pc_ == address_ + size_);
2655
+ ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2656
+ }
2657
+
2658
+ } } // namespace v8::internal
2659
+
2660
+ #endif // V8_TARGET_ARCH_X64