therubyracer 0.8.1.pre2 → 0.8.1

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of therubyracer might be problematic. Click here for more details.

Files changed (877) hide show
  1. data/Changelog.md +2 -1
  2. data/README.md +6 -3
  3. data/ext/v8/upstream/3.1.8/.gitignore +31 -0
  4. data/ext/v8/upstream/3.1.8/AUTHORS +40 -0
  5. data/ext/v8/upstream/3.1.8/ChangeLog +2566 -0
  6. data/ext/v8/upstream/3.1.8/LICENSE +52 -0
  7. data/ext/v8/upstream/3.1.8/LICENSE.strongtalk +29 -0
  8. data/ext/v8/upstream/3.1.8/LICENSE.v8 +26 -0
  9. data/ext/v8/upstream/3.1.8/LICENSE.valgrind +45 -0
  10. data/ext/v8/upstream/3.1.8/SConstruct +1192 -0
  11. data/ext/v8/upstream/3.1.8/build/README.txt +25 -0
  12. data/ext/v8/upstream/3.1.8/build/all.gyp +18 -0
  13. data/ext/v8/upstream/3.1.8/build/armu.gypi +32 -0
  14. data/ext/v8/upstream/3.1.8/build/common.gypi +82 -0
  15. data/ext/v8/upstream/3.1.8/build/gyp_v8 +145 -0
  16. data/ext/v8/upstream/3.1.8/include/v8-debug.h +384 -0
  17. data/ext/v8/upstream/3.1.8/include/v8-preparser.h +116 -0
  18. data/ext/v8/upstream/3.1.8/include/v8-profiler.h +426 -0
  19. data/ext/v8/upstream/3.1.8/include/v8-testing.h +99 -0
  20. data/ext/v8/upstream/3.1.8/include/v8.h +3846 -0
  21. data/ext/v8/upstream/3.1.8/include/v8stdint.h +53 -0
  22. data/ext/v8/upstream/3.1.8/preparser/preparser-process.cc +206 -0
  23. data/ext/v8/upstream/3.1.8/src/SConscript +356 -0
  24. data/ext/v8/upstream/3.1.8/src/accessors.cc +907 -0
  25. data/ext/v8/upstream/3.1.8/src/accessors.h +121 -0
  26. data/ext/v8/upstream/3.1.8/src/allocation.cc +204 -0
  27. data/ext/v8/upstream/3.1.8/src/allocation.h +176 -0
  28. data/ext/v8/upstream/3.1.8/src/api.cc +5191 -0
  29. data/ext/v8/upstream/3.1.8/src/api.h +508 -0
  30. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/apinatives.js +0 -0
  31. data/ext/v8/upstream/3.1.8/src/apiutils.h +80 -0
  32. data/ext/v8/upstream/3.1.8/src/arguments.h +105 -0
  33. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm-inl.h +352 -0
  34. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.cc +2756 -0
  35. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.h +1294 -0
  36. data/ext/v8/upstream/3.1.8/src/arm/builtins-arm.cc +1628 -0
  37. data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.cc +6783 -0
  38. data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.h +657 -0
  39. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm-inl.h +48 -0
  40. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.cc +7403 -0
  41. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.h +595 -0
  42. data/ext/v8/upstream/3.1.8/src/arm/constants-arm.cc +152 -0
  43. data/ext/v8/upstream/3.1.8/src/arm/constants-arm.h +769 -0
  44. data/ext/v8/upstream/3.1.8/src/arm/cpu-arm.cc +147 -0
  45. data/ext/v8/upstream/3.1.8/src/arm/debug-arm.cc +315 -0
  46. data/ext/v8/upstream/3.1.8/src/arm/deoptimizer-arm.cc +700 -0
  47. data/ext/v8/upstream/3.1.8/src/arm/disasm-arm.cc +1439 -0
  48. data/ext/v8/upstream/3.1.8/src/arm/frames-arm.cc +45 -0
  49. data/ext/v8/upstream/3.1.8/src/arm/frames-arm.h +168 -0
  50. data/ext/v8/upstream/3.1.8/src/arm/full-codegen-arm.cc +4230 -0
  51. data/ext/v8/upstream/3.1.8/src/arm/ic-arm.cc +1799 -0
  52. data/ext/v8/upstream/3.1.8/src/arm/jump-target-arm.cc +174 -0
  53. data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.cc +2041 -0
  54. data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.h +2046 -0
  55. data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.cc +3822 -0
  56. data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.h +312 -0
  57. data/ext/v8/upstream/3.1.8/src/arm/lithium-gap-resolver-arm.cc +303 -0
  58. data/ext/v8/upstream/3.1.8/src/arm/lithium-gap-resolver-arm.h +84 -0
  59. data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.cc +2701 -0
  60. data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.h +1015 -0
  61. data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.cc +1280 -0
  62. data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.h +252 -0
  63. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/register-allocator-arm-inl.h +0 -0
  64. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/register-allocator-arm.cc +0 -0
  65. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/register-allocator-arm.h +0 -0
  66. data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.cc +3165 -0
  67. data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.h +402 -0
  68. data/ext/v8/upstream/3.1.8/src/arm/stub-cache-arm.cc +4077 -0
  69. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/virtual-frame-arm-inl.h +0 -0
  70. data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.cc +843 -0
  71. data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.h +520 -0
  72. data/ext/v8/upstream/3.1.8/src/array.js +1231 -0
  73. data/ext/v8/upstream/3.1.8/src/assembler.cc +973 -0
  74. data/ext/v8/upstream/3.1.8/src/assembler.h +787 -0
  75. data/ext/v8/upstream/3.1.8/src/ast-inl.h +107 -0
  76. data/ext/v8/upstream/3.1.8/src/ast.cc +1067 -0
  77. data/ext/v8/upstream/3.1.8/src/ast.h +2177 -0
  78. data/ext/v8/upstream/3.1.8/src/atomicops.h +165 -0
  79. data/ext/v8/upstream/3.1.8/src/atomicops_internals_arm_gcc.h +145 -0
  80. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_gcc.cc +126 -0
  81. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_gcc.h +287 -0
  82. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_macosx.h +301 -0
  83. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_msvc.h +203 -0
  84. data/ext/v8/upstream/3.1.8/src/bignum-dtoa.cc +655 -0
  85. data/ext/v8/upstream/3.1.8/src/bignum-dtoa.h +81 -0
  86. data/ext/v8/upstream/3.1.8/src/bignum.cc +768 -0
  87. data/ext/v8/upstream/3.1.8/src/bignum.h +140 -0
  88. data/ext/v8/upstream/3.1.8/src/bootstrapper.cc +1888 -0
  89. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/bootstrapper.h +0 -0
  90. data/ext/v8/upstream/3.1.8/src/builtins.cc +1586 -0
  91. data/ext/v8/upstream/3.1.8/src/builtins.h +339 -0
  92. data/ext/v8/upstream/3.1.8/src/bytecodes-irregexp.h +105 -0
  93. data/ext/v8/upstream/3.1.8/src/cached-powers.cc +177 -0
  94. data/ext/v8/upstream/3.1.8/src/cached-powers.h +65 -0
  95. data/ext/v8/upstream/3.1.8/src/char-predicates-inl.h +94 -0
  96. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/char-predicates.h +0 -0
  97. data/ext/v8/upstream/3.1.8/src/checks.cc +110 -0
  98. data/ext/v8/upstream/3.1.8/src/checks.h +292 -0
  99. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/circular-queue-inl.h +0 -0
  100. data/ext/v8/upstream/3.1.8/src/circular-queue.cc +122 -0
  101. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/circular-queue.h +0 -0
  102. data/ext/v8/upstream/3.1.8/src/code-stubs.cc +230 -0
  103. data/ext/v8/upstream/3.1.8/src/code-stubs.h +950 -0
  104. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/code.h +0 -0
  105. data/ext/v8/upstream/3.1.8/src/codegen-inl.h +64 -0
  106. data/ext/v8/upstream/3.1.8/src/codegen.cc +495 -0
  107. data/ext/v8/upstream/3.1.8/src/codegen.h +245 -0
  108. data/ext/v8/upstream/3.1.8/src/compilation-cache.cc +654 -0
  109. data/ext/v8/upstream/3.1.8/src/compilation-cache.h +112 -0
  110. data/ext/v8/upstream/3.1.8/src/compiler.cc +806 -0
  111. data/ext/v8/upstream/3.1.8/src/compiler.h +290 -0
  112. data/ext/v8/upstream/3.1.8/src/contexts.cc +320 -0
  113. data/ext/v8/upstream/3.1.8/src/contexts.h +376 -0
  114. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/conversions-inl.h +0 -0
  115. data/ext/v8/upstream/3.1.8/src/conversions.cc +1069 -0
  116. data/ext/v8/upstream/3.1.8/src/conversions.h +122 -0
  117. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/counters.cc +0 -0
  118. data/ext/v8/upstream/3.1.8/src/counters.h +242 -0
  119. data/ext/v8/upstream/3.1.8/src/cpu-profiler-inl.h +100 -0
  120. data/ext/v8/upstream/3.1.8/src/cpu-profiler.cc +554 -0
  121. data/ext/v8/upstream/3.1.8/src/cpu-profiler.h +291 -0
  122. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/cpu.h +0 -0
  123. data/ext/v8/upstream/3.1.8/src/d8-debug.cc +367 -0
  124. data/ext/v8/upstream/3.1.8/src/d8-debug.h +157 -0
  125. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/d8-posix.cc +0 -0
  126. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/d8-readline.cc +0 -0
  127. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/d8-windows.cc +0 -0
  128. data/ext/v8/upstream/3.1.8/src/d8.cc +792 -0
  129. data/ext/v8/upstream/3.1.8/src/d8.gyp +85 -0
  130. data/ext/v8/upstream/3.1.8/src/d8.h +231 -0
  131. data/ext/v8/upstream/3.1.8/src/d8.js +2798 -0
  132. data/ext/v8/upstream/3.1.8/src/data-flow.cc +545 -0
  133. data/ext/v8/upstream/3.1.8/src/data-flow.h +379 -0
  134. data/ext/v8/upstream/3.1.8/src/date.js +1103 -0
  135. data/ext/v8/upstream/3.1.8/src/dateparser-inl.h +125 -0
  136. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/dateparser.cc +0 -0
  137. data/ext/v8/upstream/3.1.8/src/dateparser.h +263 -0
  138. data/ext/v8/upstream/3.1.8/src/debug-agent.cc +446 -0
  139. data/ext/v8/upstream/3.1.8/src/debug-agent.h +131 -0
  140. data/ext/v8/upstream/3.1.8/src/debug-debugger.js +2569 -0
  141. data/ext/v8/upstream/3.1.8/src/debug.cc +3085 -0
  142. data/ext/v8/upstream/3.1.8/src/debug.h +1025 -0
  143. data/ext/v8/upstream/3.1.8/src/deoptimizer.cc +1185 -0
  144. data/ext/v8/upstream/3.1.8/src/deoptimizer.h +529 -0
  145. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/disasm.h +0 -0
  146. data/ext/v8/upstream/3.1.8/src/disassembler.cc +338 -0
  147. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/disassembler.h +0 -0
  148. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/diy-fp.cc +0 -0
  149. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/diy-fp.h +0 -0
  150. data/ext/v8/upstream/3.1.8/src/double.h +238 -0
  151. data/ext/v8/upstream/3.1.8/src/dtoa.cc +103 -0
  152. data/ext/v8/upstream/3.1.8/src/dtoa.h +85 -0
  153. data/ext/v8/upstream/3.1.8/src/execution.cc +735 -0
  154. data/ext/v8/upstream/3.1.8/src/execution.h +322 -0
  155. data/ext/v8/upstream/3.1.8/src/extensions/experimental/experimental.gyp +53 -0
  156. data/ext/v8/upstream/3.1.8/src/extensions/experimental/i18n-extension.cc +264 -0
  157. data/ext/v8/upstream/3.1.8/src/extensions/experimental/i18n-extension.h +64 -0
  158. data/ext/v8/upstream/3.1.8/src/extensions/externalize-string-extension.cc +141 -0
  159. data/ext/v8/upstream/3.1.8/src/extensions/externalize-string-extension.h +50 -0
  160. data/ext/v8/upstream/3.1.8/src/extensions/gc-extension.cc +58 -0
  161. data/ext/v8/upstream/3.1.8/src/extensions/gc-extension.h +49 -0
  162. data/ext/v8/upstream/3.1.8/src/factory.cc +1087 -0
  163. data/ext/v8/upstream/3.1.8/src/factory.h +432 -0
  164. data/ext/v8/upstream/3.1.8/src/fast-dtoa.cc +736 -0
  165. data/ext/v8/upstream/3.1.8/src/fast-dtoa.h +83 -0
  166. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/fixed-dtoa.cc +0 -0
  167. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/fixed-dtoa.h +0 -0
  168. data/ext/v8/upstream/3.1.8/src/flag-definitions.h +552 -0
  169. data/ext/v8/upstream/3.1.8/src/flags.cc +551 -0
  170. data/ext/v8/upstream/3.1.8/src/flags.h +79 -0
  171. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/frame-element.cc +0 -0
  172. data/ext/v8/upstream/3.1.8/src/frame-element.h +277 -0
  173. data/ext/v8/upstream/3.1.8/src/frames-inl.h +210 -0
  174. data/ext/v8/upstream/3.1.8/src/frames.cc +1232 -0
  175. data/ext/v8/upstream/3.1.8/src/frames.h +826 -0
  176. data/ext/v8/upstream/3.1.8/src/full-codegen.cc +1382 -0
  177. data/ext/v8/upstream/3.1.8/src/full-codegen.h +751 -0
  178. data/ext/v8/upstream/3.1.8/src/func-name-inferrer.cc +90 -0
  179. data/ext/v8/upstream/3.1.8/src/func-name-inferrer.h +111 -0
  180. data/ext/v8/upstream/3.1.8/src/gdb-jit.cc +1547 -0
  181. data/ext/v8/upstream/3.1.8/src/gdb-jit.h +138 -0
  182. data/ext/v8/upstream/3.1.8/src/global-handles.cc +534 -0
  183. data/ext/v8/upstream/3.1.8/src/global-handles.h +181 -0
  184. data/ext/v8/upstream/3.1.8/src/globals.h +325 -0
  185. data/ext/v8/upstream/3.1.8/src/handles-inl.h +80 -0
  186. data/ext/v8/upstream/3.1.8/src/handles.cc +910 -0
  187. data/ext/v8/upstream/3.1.8/src/handles.h +424 -0
  188. data/ext/v8/upstream/3.1.8/src/hashmap.cc +230 -0
  189. data/ext/v8/upstream/3.1.8/src/hashmap.h +121 -0
  190. data/ext/v8/upstream/3.1.8/src/heap-inl.h +587 -0
  191. data/ext/v8/upstream/3.1.8/src/heap-profiler.cc +1128 -0
  192. data/ext/v8/upstream/3.1.8/src/heap-profiler.h +381 -0
  193. data/ext/v8/upstream/3.1.8/src/heap.cc +5610 -0
  194. data/ext/v8/upstream/3.1.8/src/heap.h +2218 -0
  195. data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.cc +1490 -0
  196. data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.h +3493 -0
  197. data/ext/v8/upstream/3.1.8/src/hydrogen.cc +6056 -0
  198. data/ext/v8/upstream/3.1.8/src/hydrogen.h +1091 -0
  199. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32-inl.h +429 -0
  200. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.cc +2800 -0
  201. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.h +1093 -0
  202. data/ext/v8/upstream/3.1.8/src/ia32/builtins-ia32.cc +1590 -0
  203. data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.cc +6624 -0
  204. data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.h +536 -0
  205. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/codegen-ia32-inl.h +0 -0
  206. data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.cc +10354 -0
  207. data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.h +798 -0
  208. data/ext/v8/upstream/3.1.8/src/ia32/cpu-ia32.cc +87 -0
  209. data/ext/v8/upstream/3.1.8/src/ia32/debug-ia32.cc +309 -0
  210. data/ext/v8/upstream/3.1.8/src/ia32/deoptimizer-ia32.cc +664 -0
  211. data/ext/v8/upstream/3.1.8/src/ia32/disasm-ia32.cc +1597 -0
  212. data/ext/v8/upstream/3.1.8/src/ia32/frames-ia32.cc +45 -0
  213. data/ext/v8/upstream/3.1.8/src/ia32/frames-ia32.h +140 -0
  214. data/ext/v8/upstream/3.1.8/src/ia32/full-codegen-ia32.cc +4278 -0
  215. data/ext/v8/upstream/3.1.8/src/ia32/ic-ia32.cc +1786 -0
  216. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/jump-target-ia32.cc +0 -0
  217. data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.cc +3880 -0
  218. data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.h +309 -0
  219. data/ext/v8/upstream/3.1.8/src/ia32/lithium-gap-resolver-ia32.cc +460 -0
  220. data/ext/v8/upstream/3.1.8/src/ia32/lithium-gap-resolver-ia32.h +110 -0
  221. data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.cc +2095 -0
  222. data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.h +2127 -0
  223. data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.cc +2031 -0
  224. data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.h +798 -0
  225. data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.cc +1253 -0
  226. data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.h +215 -0
  227. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/register-allocator-ia32-inl.h +0 -0
  228. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/register-allocator-ia32.cc +0 -0
  229. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/register-allocator-ia32.h +0 -0
  230. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/simulator-ia32.cc +0 -0
  231. data/ext/v8/upstream/3.1.8/src/ia32/simulator-ia32.h +72 -0
  232. data/ext/v8/upstream/3.1.8/src/ia32/stub-cache-ia32.cc +3732 -0
  233. data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.cc +1360 -0
  234. data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.h +646 -0
  235. data/ext/v8/upstream/3.1.8/src/ic-inl.h +129 -0
  236. data/ext/v8/upstream/3.1.8/src/ic.cc +2333 -0
  237. data/ext/v8/upstream/3.1.8/src/ic.h +639 -0
  238. data/ext/v8/upstream/3.1.8/src/inspector.cc +63 -0
  239. data/ext/v8/upstream/3.1.8/src/inspector.h +62 -0
  240. data/ext/v8/upstream/3.1.8/src/interpreter-irregexp.cc +655 -0
  241. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/interpreter-irregexp.h +0 -0
  242. data/ext/v8/upstream/3.1.8/src/json.js +342 -0
  243. data/ext/v8/upstream/3.1.8/src/jsregexp.cc +5340 -0
  244. data/ext/v8/upstream/3.1.8/src/jsregexp.h +1484 -0
  245. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target-heavy-inl.h +0 -0
  246. data/ext/v8/upstream/3.1.8/src/jump-target-heavy.cc +430 -0
  247. data/ext/v8/upstream/3.1.8/src/jump-target-heavy.h +244 -0
  248. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target-inl.h +0 -0
  249. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target-light-inl.h +0 -0
  250. data/ext/v8/upstream/3.1.8/src/jump-target-light.cc +111 -0
  251. data/ext/v8/upstream/3.1.8/src/jump-target-light.h +193 -0
  252. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target.cc +0 -0
  253. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target.h +0 -0
  254. data/ext/v8/upstream/3.1.8/src/list-inl.h +206 -0
  255. data/ext/v8/upstream/3.1.8/src/list.h +164 -0
  256. data/ext/v8/upstream/3.1.8/src/lithium-allocator-inl.h +140 -0
  257. data/ext/v8/upstream/3.1.8/src/lithium-allocator.cc +2093 -0
  258. data/ext/v8/upstream/3.1.8/src/lithium-allocator.h +644 -0
  259. data/ext/v8/upstream/3.1.8/src/lithium.cc +168 -0
  260. data/ext/v8/upstream/3.1.8/src/lithium.h +592 -0
  261. data/ext/v8/upstream/3.1.8/src/liveedit-debugger.js +1082 -0
  262. data/ext/v8/upstream/3.1.8/src/liveedit.cc +1650 -0
  263. data/ext/v8/upstream/3.1.8/src/liveedit.h +174 -0
  264. data/ext/v8/upstream/3.1.8/src/liveobjectlist-inl.h +126 -0
  265. data/ext/v8/upstream/3.1.8/src/liveobjectlist.cc +2527 -0
  266. data/ext/v8/upstream/3.1.8/src/liveobjectlist.h +322 -0
  267. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/log-inl.h +0 -0
  268. data/ext/v8/upstream/3.1.8/src/log-utils.cc +336 -0
  269. data/ext/v8/upstream/3.1.8/src/log-utils.h +232 -0
  270. data/ext/v8/upstream/3.1.8/src/log.cc +1608 -0
  271. data/ext/v8/upstream/3.1.8/src/log.h +379 -0
  272. data/ext/v8/upstream/3.1.8/src/macro-assembler.h +120 -0
  273. data/ext/v8/upstream/3.1.8/src/macros.py +178 -0
  274. data/ext/v8/upstream/3.1.8/src/mark-compact.cc +2957 -0
  275. data/ext/v8/upstream/3.1.8/src/mark-compact.h +433 -0
  276. data/ext/v8/upstream/3.1.8/src/math.js +264 -0
  277. data/ext/v8/upstream/3.1.8/src/memory.h +82 -0
  278. data/ext/v8/upstream/3.1.8/src/messages.cc +164 -0
  279. data/ext/v8/upstream/3.1.8/src/messages.h +114 -0
  280. data/ext/v8/upstream/3.1.8/src/messages.js +1071 -0
  281. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/assembler-mips-inl.h +0 -0
  282. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/assembler-mips.cc +0 -0
  283. data/ext/v8/upstream/3.1.8/src/mips/assembler-mips.h +667 -0
  284. data/ext/v8/upstream/3.1.8/src/mips/builtins-mips.cc +205 -0
  285. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/codegen-mips-inl.h +0 -0
  286. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/codegen-mips.cc +0 -0
  287. data/ext/v8/upstream/3.1.8/src/mips/codegen-mips.h +431 -0
  288. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/constants-mips.cc +0 -0
  289. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/constants-mips.h +0 -0
  290. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/cpu-mips.cc +0 -0
  291. data/ext/v8/upstream/3.1.8/src/mips/debug-mips.cc +127 -0
  292. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/disasm-mips.cc +0 -0
  293. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/fast-codegen-mips.cc +0 -0
  294. data/ext/v8/upstream/3.1.8/src/mips/frames-mips.cc +96 -0
  295. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/frames-mips.h +0 -0
  296. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/full-codegen-mips.cc +0 -0
  297. data/ext/v8/upstream/3.1.8/src/mips/ic-mips.cc +208 -0
  298. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/jump-target-mips.cc +0 -0
  299. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/macro-assembler-mips.cc +0 -0
  300. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/macro-assembler-mips.h +0 -0
  301. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/register-allocator-mips-inl.h +0 -0
  302. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/register-allocator-mips.cc +0 -0
  303. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/register-allocator-mips.h +0 -0
  304. data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.cc +1650 -0
  305. data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.h +311 -0
  306. data/ext/v8/upstream/3.1.8/src/mips/stub-cache-mips.cc +418 -0
  307. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/virtual-frame-mips.cc +0 -0
  308. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/virtual-frame-mips.h +0 -0
  309. data/ext/v8/upstream/3.1.8/src/mirror-debugger.js +2380 -0
  310. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mksnapshot.cc +0 -0
  311. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/natives.h +0 -0
  312. data/ext/v8/upstream/3.1.8/src/objects-debug.cc +722 -0
  313. data/ext/v8/upstream/3.1.8/src/objects-inl.h +3946 -0
  314. data/ext/v8/upstream/3.1.8/src/objects-printer.cc +801 -0
  315. data/ext/v8/upstream/3.1.8/src/objects-visiting.cc +142 -0
  316. data/ext/v8/upstream/3.1.8/src/objects-visiting.h +401 -0
  317. data/ext/v8/upstream/3.1.8/src/objects.cc +10044 -0
  318. data/ext/v8/upstream/3.1.8/src/objects.h +6571 -0
  319. data/ext/v8/upstream/3.1.8/src/parser.cc +5165 -0
  320. data/ext/v8/upstream/3.1.8/src/parser.h +802 -0
  321. data/ext/v8/upstream/3.1.8/src/platform-cygwin.cc +745 -0
  322. data/ext/v8/upstream/3.1.8/src/platform-freebsd.cc +702 -0
  323. data/ext/v8/upstream/3.1.8/src/platform-linux.cc +981 -0
  324. data/ext/v8/upstream/3.1.8/src/platform-macos.cc +732 -0
  325. data/ext/v8/upstream/3.1.8/src/platform-nullos.cc +498 -0
  326. data/ext/v8/upstream/3.1.8/src/platform-openbsd.cc +657 -0
  327. data/ext/v8/upstream/3.1.8/src/platform-posix.cc +399 -0
  328. data/ext/v8/upstream/3.1.8/src/platform-solaris.cc +714 -0
  329. data/ext/v8/upstream/3.1.8/src/platform-win32.cc +1974 -0
  330. data/ext/v8/upstream/3.1.8/src/platform.h +636 -0
  331. data/ext/v8/upstream/3.1.8/src/preparse-data.cc +183 -0
  332. data/ext/v8/upstream/3.1.8/src/preparse-data.h +249 -0
  333. data/ext/v8/upstream/3.1.8/src/preparser-api.cc +213 -0
  334. data/ext/v8/upstream/3.1.8/src/preparser.cc +1205 -0
  335. data/ext/v8/upstream/3.1.8/src/preparser.h +278 -0
  336. data/ext/v8/upstream/3.1.8/src/prettyprinter.cc +1539 -0
  337. data/ext/v8/upstream/3.1.8/src/prettyprinter.h +223 -0
  338. data/ext/v8/upstream/3.1.8/src/profile-generator-inl.h +128 -0
  339. data/ext/v8/upstream/3.1.8/src/profile-generator.cc +2899 -0
  340. data/ext/v8/upstream/3.1.8/src/profile-generator.h +1151 -0
  341. data/ext/v8/upstream/3.1.8/src/property.cc +96 -0
  342. data/ext/v8/upstream/3.1.8/src/property.h +337 -0
  343. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/regexp-macro-assembler-irregexp-inl.h +0 -0
  344. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-irregexp.cc +470 -0
  345. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-irregexp.h +142 -0
  346. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-tracer.cc +373 -0
  347. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-tracer.h +104 -0
  348. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.cc +257 -0
  349. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.h +231 -0
  350. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/regexp-stack.cc +0 -0
  351. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/regexp-stack.h +0 -0
  352. data/ext/v8/upstream/3.1.8/src/regexp.js +483 -0
  353. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/register-allocator-inl.h +0 -0
  354. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/register-allocator.cc +0 -0
  355. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/register-allocator.h +0 -0
  356. data/ext/v8/upstream/3.1.8/src/rewriter.cc +1023 -0
  357. data/ext/v8/upstream/3.1.8/src/rewriter.h +59 -0
  358. data/ext/v8/upstream/3.1.8/src/runtime-profiler.cc +443 -0
  359. data/ext/v8/upstream/3.1.8/src/runtime-profiler.h +77 -0
  360. data/ext/v8/upstream/3.1.8/src/runtime.cc +11592 -0
  361. data/ext/v8/upstream/3.1.8/src/runtime.h +582 -0
  362. data/ext/v8/upstream/3.1.8/src/runtime.js +643 -0
  363. data/ext/v8/upstream/3.1.8/src/safepoint-table.cc +253 -0
  364. data/ext/v8/upstream/3.1.8/src/safepoint-table.h +263 -0
  365. data/ext/v8/upstream/3.1.8/src/scanner-base.cc +971 -0
  366. data/ext/v8/upstream/3.1.8/src/scanner-base.h +653 -0
  367. data/ext/v8/upstream/3.1.8/src/scanner.cc +586 -0
  368. data/ext/v8/upstream/3.1.8/src/scanner.h +194 -0
  369. data/ext/v8/upstream/3.1.8/src/scopeinfo.cc +636 -0
  370. data/ext/v8/upstream/3.1.8/src/scopeinfo.h +238 -0
  371. data/ext/v8/upstream/3.1.8/src/scopes.cc +1063 -0
  372. data/ext/v8/upstream/3.1.8/src/scopes.h +494 -0
  373. data/ext/v8/upstream/3.1.8/src/serialize.cc +1535 -0
  374. data/ext/v8/upstream/3.1.8/src/serialize.h +584 -0
  375. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/shell.h +0 -0
  376. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/simulator.h +0 -0
  377. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/smart-pointer.h +0 -0
  378. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/snapshot-common.cc +0 -0
  379. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/snapshot-empty.cc +0 -0
  380. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/snapshot.h +0 -0
  381. data/ext/v8/upstream/3.1.8/src/spaces-inl.h +524 -0
  382. data/ext/v8/upstream/3.1.8/src/spaces.cc +3254 -0
  383. data/ext/v8/upstream/3.1.8/src/spaces.h +2362 -0
  384. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/splay-tree-inl.h +0 -0
  385. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/splay-tree.h +0 -0
  386. data/ext/v8/upstream/3.1.8/src/string-search.cc +40 -0
  387. data/ext/v8/upstream/3.1.8/src/string-search.h +567 -0
  388. data/ext/v8/upstream/3.1.8/src/string-stream.cc +584 -0
  389. data/ext/v8/upstream/3.1.8/src/string-stream.h +191 -0
  390. data/ext/v8/upstream/3.1.8/src/string.js +915 -0
  391. data/ext/v8/upstream/3.1.8/src/strtod.cc +440 -0
  392. data/ext/v8/upstream/3.1.8/src/strtod.h +40 -0
  393. data/ext/v8/upstream/3.1.8/src/stub-cache.cc +1878 -0
  394. data/ext/v8/upstream/3.1.8/src/stub-cache.h +849 -0
  395. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/third_party/valgrind/valgrind.h +0 -0
  396. data/ext/v8/upstream/3.1.8/src/token.cc +63 -0
  397. data/ext/v8/upstream/3.1.8/src/token.h +288 -0
  398. data/ext/v8/upstream/3.1.8/src/top.cc +1152 -0
  399. data/ext/v8/upstream/3.1.8/src/top.h +608 -0
  400. data/ext/v8/upstream/3.1.8/src/type-info.cc +406 -0
  401. data/ext/v8/upstream/3.1.8/src/type-info.h +283 -0
  402. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/unbound-queue-inl.h +0 -0
  403. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/unbound-queue.h +0 -0
  404. data/ext/v8/upstream/3.1.8/src/unicode-inl.h +238 -0
  405. data/ext/v8/upstream/3.1.8/src/unicode.cc +1624 -0
  406. data/ext/v8/upstream/3.1.8/src/unicode.h +280 -0
  407. data/ext/v8/upstream/3.1.8/src/uri.js +402 -0
  408. data/ext/v8/upstream/3.1.8/src/utils.cc +371 -0
  409. data/ext/v8/upstream/3.1.8/src/utils.h +793 -0
  410. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/v8-counters.cc +0 -0
  411. data/ext/v8/upstream/3.1.8/src/v8-counters.h +290 -0
  412. data/ext/v8/upstream/3.1.8/src/v8.cc +270 -0
  413. data/ext/v8/upstream/3.1.8/src/v8.h +127 -0
  414. data/ext/v8/upstream/3.1.8/src/v8checks.h +64 -0
  415. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/v8dll-main.cc +0 -0
  416. data/ext/v8/upstream/3.1.8/src/v8globals.h +480 -0
  417. data/ext/v8/upstream/3.1.8/src/v8natives.js +1252 -0
  418. data/ext/v8/upstream/3.1.8/src/v8preparserdll-main.cc +39 -0
  419. data/ext/v8/upstream/3.1.8/src/v8threads.cc +440 -0
  420. data/ext/v8/upstream/3.1.8/src/v8threads.h +157 -0
  421. data/ext/v8/upstream/3.1.8/src/v8utils.h +354 -0
  422. data/ext/v8/upstream/3.1.8/src/variables.cc +132 -0
  423. data/ext/v8/upstream/3.1.8/src/variables.h +212 -0
  424. data/ext/v8/upstream/3.1.8/src/version.cc +95 -0
  425. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/version.h +0 -0
  426. data/ext/v8/upstream/3.1.8/src/virtual-frame-heavy-inl.h +190 -0
  427. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-heavy.cc +0 -0
  428. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-inl.h +0 -0
  429. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-light-inl.h +0 -0
  430. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-light.cc +0 -0
  431. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame.cc +0 -0
  432. data/ext/v8/upstream/3.1.8/src/virtual-frame.h +59 -0
  433. data/ext/v8/upstream/3.1.8/src/vm-state-inl.h +134 -0
  434. data/ext/v8/upstream/3.1.8/src/vm-state.h +68 -0
  435. data/ext/v8/upstream/3.1.8/src/win32-headers.h +95 -0
  436. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64-inl.h +455 -0
  437. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.cc +3162 -0
  438. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.h +1584 -0
  439. data/ext/v8/upstream/3.1.8/src/x64/builtins-x64.cc +1492 -0
  440. data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.cc +5150 -0
  441. data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.h +519 -0
  442. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64-inl.h +46 -0
  443. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.cc +8835 -0
  444. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.h +750 -0
  445. data/ext/v8/upstream/3.1.8/src/x64/cpu-x64.cc +86 -0
  446. data/ext/v8/upstream/3.1.8/src/x64/debug-x64.cc +316 -0
  447. data/ext/v8/upstream/3.1.8/src/x64/deoptimizer-x64.cc +781 -0
  448. data/ext/v8/upstream/3.1.8/src/x64/disasm-x64.cc +1737 -0
  449. data/ext/v8/upstream/3.1.8/src/x64/frames-x64.cc +45 -0
  450. data/ext/v8/upstream/3.1.8/src/x64/frames-x64.h +130 -0
  451. data/ext/v8/upstream/3.1.8/src/x64/full-codegen-x64.cc +3984 -0
  452. data/ext/v8/upstream/3.1.8/src/x64/ic-x64.cc +1761 -0
  453. data/ext/v8/upstream/3.1.8/src/x64/jump-target-x64.cc +437 -0
  454. data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.cc +3639 -0
  455. data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.h +305 -0
  456. data/ext/v8/upstream/3.1.8/src/x64/lithium-gap-resolver-x64.cc +320 -0
  457. data/ext/v8/upstream/3.1.8/src/x64/lithium-gap-resolver-x64.h +74 -0
  458. data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.cc +2044 -0
  459. data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.h +2052 -0
  460. data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.cc +2660 -0
  461. data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.h +1852 -0
  462. data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.cc +1382 -0
  463. data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.h +278 -0
  464. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/register-allocator-x64-inl.h +0 -0
  465. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/register-allocator-x64.cc +0 -0
  466. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/register-allocator-x64.h +0 -0
  467. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/simulator-x64.cc +0 -0
  468. data/ext/v8/upstream/3.1.8/src/x64/simulator-x64.h +71 -0
  469. data/ext/v8/upstream/3.1.8/src/x64/stub-cache-x64.cc +3509 -0
  470. data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.cc +1292 -0
  471. data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.h +593 -0
  472. data/ext/v8/upstream/3.1.8/src/zone-inl.h +83 -0
  473. data/ext/v8/upstream/3.1.8/src/zone.cc +195 -0
  474. data/ext/v8/upstream/3.1.8/src/zone.h +233 -0
  475. data/ext/v8/upstream/3.1.8/tools/codemap.js +265 -0
  476. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/consarray.js +0 -0
  477. data/ext/v8/upstream/3.1.8/tools/csvparser.js +78 -0
  478. data/ext/v8/upstream/3.1.8/tools/disasm.py +92 -0
  479. data/ext/v8/upstream/3.1.8/tools/gc-nvp-trace-processor.py +328 -0
  480. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/generate-ten-powers.scm +0 -0
  481. data/ext/v8/upstream/3.1.8/tools/grokdump.py +840 -0
  482. data/ext/v8/upstream/3.1.8/tools/gyp/v8.gyp +869 -0
  483. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/js2c.py +0 -0
  484. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/jsmin.py +0 -0
  485. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/linux-tick-processor +0 -0
  486. data/ext/v8/upstream/3.1.8/tools/ll_prof.py +919 -0
  487. data/ext/v8/upstream/3.1.8/tools/logreader.js +185 -0
  488. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/mac-nm +0 -0
  489. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/mac-tick-processor +0 -0
  490. data/ext/v8/upstream/3.1.8/tools/oom_dump/README +31 -0
  491. data/ext/v8/upstream/3.1.8/tools/oom_dump/SConstruct +42 -0
  492. data/ext/v8/upstream/3.1.8/tools/oom_dump/oom_dump.cc +288 -0
  493. data/ext/v8/upstream/3.1.8/tools/presubmit.py +305 -0
  494. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/process-heap-prof.py +0 -0
  495. data/ext/v8/upstream/3.1.8/tools/profile.js +751 -0
  496. data/ext/v8/upstream/3.1.8/tools/profile_view.js +219 -0
  497. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/run-valgrind.py +0 -0
  498. data/ext/v8/upstream/3.1.8/tools/splaytree.js +316 -0
  499. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/stats-viewer.py +0 -0
  500. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/tickprocessor-driver.js +0 -0
  501. data/ext/v8/upstream/3.1.8/tools/tickprocessor.js +863 -0
  502. data/ext/v8/upstream/3.1.8/tools/utils.py +96 -0
  503. data/ext/v8/upstream/3.1.8/tools/visual_studio/README.txt +70 -0
  504. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/arm.vsprops +0 -0
  505. data/ext/v8/upstream/3.1.8/tools/visual_studio/common.vsprops +34 -0
  506. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8.vcproj +0 -0
  507. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8_arm.vcproj +0 -0
  508. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8_x64.vcproj +0 -0
  509. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8js2c.cmd +0 -0
  510. data/ext/v8/upstream/3.1.8/tools/visual_studio/debug.vsprops +17 -0
  511. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/ia32.vsprops +0 -0
  512. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/js2c.cmd +0 -0
  513. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/release.vsprops +0 -0
  514. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8.sln +0 -0
  515. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8.vcproj +0 -0
  516. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_arm.sln +0 -0
  517. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_arm.vcproj +0 -0
  518. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base.vcproj +1296 -0
  519. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_arm.vcproj +1234 -0
  520. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_x64.vcproj +1296 -0
  521. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_cctest.vcproj +0 -0
  522. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_cctest_arm.vcproj +0 -0
  523. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_cctest_x64.vcproj +0 -0
  524. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_mksnapshot.vcproj +0 -0
  525. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_mksnapshot_x64.vcproj +0 -0
  526. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_process_sample.vcproj +0 -0
  527. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_process_sample_arm.vcproj +0 -0
  528. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_process_sample_x64.vcproj +0 -0
  529. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_shell_sample.vcproj +147 -0
  530. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_shell_sample_arm.vcproj +147 -0
  531. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_shell_sample_x64.vcproj +163 -0
  532. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot.vcproj +0 -0
  533. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot_cc.vcproj +0 -0
  534. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot_cc_x64.vcproj +0 -0
  535. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot_x64.vcproj +0 -0
  536. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_x64.sln +0 -0
  537. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_x64.vcproj +0 -0
  538. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/x64.vsprops +0 -0
  539. data/ext/v8/upstream/3.1.8/tools/windows-tick-processor.bat +30 -0
  540. data/ext/v8/upstream/Makefile +2 -1
  541. data/ext/v8/v8_template.cpp +2 -2
  542. data/lib/v8/version.rb +1 -1
  543. data/spec/redjs/jsapi_spec.rb +2 -2
  544. metadata +552 -490
  545. data/ext/v8/upstream/2.3.3/.gitignore +0 -26
  546. data/ext/v8/upstream/2.3.3/AUTHORS +0 -31
  547. data/ext/v8/upstream/2.3.3/ChangeLog +0 -1916
  548. data/ext/v8/upstream/2.3.3/LICENSE +0 -55
  549. data/ext/v8/upstream/2.3.3/SConstruct +0 -1154
  550. data/ext/v8/upstream/2.3.3/include/v8-debug.h +0 -381
  551. data/ext/v8/upstream/2.3.3/include/v8-profiler.h +0 -353
  552. data/ext/v8/upstream/2.3.3/include/v8.h +0 -3616
  553. data/ext/v8/upstream/2.3.3/src/SConscript +0 -330
  554. data/ext/v8/upstream/2.3.3/src/accessors.cc +0 -661
  555. data/ext/v8/upstream/2.3.3/src/accessors.h +0 -114
  556. data/ext/v8/upstream/2.3.3/src/allocation.cc +0 -198
  557. data/ext/v8/upstream/2.3.3/src/allocation.h +0 -169
  558. data/ext/v8/upstream/2.3.3/src/api.cc +0 -4795
  559. data/ext/v8/upstream/2.3.3/src/api.h +0 -485
  560. data/ext/v8/upstream/2.3.3/src/apiutils.h +0 -69
  561. data/ext/v8/upstream/2.3.3/src/arguments.h +0 -96
  562. data/ext/v8/upstream/2.3.3/src/arm/assembler-arm-inl.h +0 -305
  563. data/ext/v8/upstream/2.3.3/src/arm/assembler-arm.cc +0 -2580
  564. data/ext/v8/upstream/2.3.3/src/arm/assembler-arm.h +0 -1275
  565. data/ext/v8/upstream/2.3.3/src/arm/builtins-arm.cc +0 -1320
  566. data/ext/v8/upstream/2.3.3/src/arm/codegen-arm-inl.h +0 -48
  567. data/ext/v8/upstream/2.3.3/src/arm/codegen-arm.cc +0 -11398
  568. data/ext/v8/upstream/2.3.3/src/arm/codegen-arm.h +0 -1102
  569. data/ext/v8/upstream/2.3.3/src/arm/constants-arm.cc +0 -154
  570. data/ext/v8/upstream/2.3.3/src/arm/constants-arm.h +0 -388
  571. data/ext/v8/upstream/2.3.3/src/arm/cpu-arm.cc +0 -142
  572. data/ext/v8/upstream/2.3.3/src/arm/debug-arm.cc +0 -309
  573. data/ext/v8/upstream/2.3.3/src/arm/disasm-arm.cc +0 -1459
  574. data/ext/v8/upstream/2.3.3/src/arm/fast-codegen-arm.cc +0 -241
  575. data/ext/v8/upstream/2.3.3/src/arm/frames-arm.cc +0 -123
  576. data/ext/v8/upstream/2.3.3/src/arm/frames-arm.h +0 -162
  577. data/ext/v8/upstream/2.3.3/src/arm/full-codegen-arm.cc +0 -3178
  578. data/ext/v8/upstream/2.3.3/src/arm/ic-arm.cc +0 -2258
  579. data/ext/v8/upstream/2.3.3/src/arm/jump-target-arm.cc +0 -164
  580. data/ext/v8/upstream/2.3.3/src/arm/macro-assembler-arm.cc +0 -1892
  581. data/ext/v8/upstream/2.3.3/src/arm/macro-assembler-arm.h +0 -727
  582. data/ext/v8/upstream/2.3.3/src/arm/regexp-macro-assembler-arm.cc +0 -1261
  583. data/ext/v8/upstream/2.3.3/src/arm/regexp-macro-assembler-arm.h +0 -266
  584. data/ext/v8/upstream/2.3.3/src/arm/simulator-arm.cc +0 -2822
  585. data/ext/v8/upstream/2.3.3/src/arm/simulator-arm.h +0 -361
  586. data/ext/v8/upstream/2.3.3/src/arm/stub-cache-arm.cc +0 -2387
  587. data/ext/v8/upstream/2.3.3/src/arm/virtual-frame-arm.cc +0 -834
  588. data/ext/v8/upstream/2.3.3/src/arm/virtual-frame-arm.h +0 -519
  589. data/ext/v8/upstream/2.3.3/src/array.js +0 -1127
  590. data/ext/v8/upstream/2.3.3/src/assembler.cc +0 -801
  591. data/ext/v8/upstream/2.3.3/src/assembler.h +0 -573
  592. data/ext/v8/upstream/2.3.3/src/ast-inl.h +0 -81
  593. data/ext/v8/upstream/2.3.3/src/ast.cc +0 -1152
  594. data/ext/v8/upstream/2.3.3/src/ast.h +0 -2106
  595. data/ext/v8/upstream/2.3.3/src/bootstrapper.cc +0 -1819
  596. data/ext/v8/upstream/2.3.3/src/builtins.cc +0 -1529
  597. data/ext/v8/upstream/2.3.3/src/builtins.h +0 -263
  598. data/ext/v8/upstream/2.3.3/src/bytecodes-irregexp.h +0 -104
  599. data/ext/v8/upstream/2.3.3/src/cached-powers.h +0 -119
  600. data/ext/v8/upstream/2.3.3/src/char-predicates-inl.h +0 -86
  601. data/ext/v8/upstream/2.3.3/src/checks.cc +0 -100
  602. data/ext/v8/upstream/2.3.3/src/checks.h +0 -310
  603. data/ext/v8/upstream/2.3.3/src/circular-queue.cc +0 -121
  604. data/ext/v8/upstream/2.3.3/src/code-stubs.cc +0 -177
  605. data/ext/v8/upstream/2.3.3/src/code-stubs.h +0 -177
  606. data/ext/v8/upstream/2.3.3/src/codegen-inl.h +0 -60
  607. data/ext/v8/upstream/2.3.3/src/codegen.cc +0 -516
  608. data/ext/v8/upstream/2.3.3/src/codegen.h +0 -897
  609. data/ext/v8/upstream/2.3.3/src/compilation-cache.cc +0 -562
  610. data/ext/v8/upstream/2.3.3/src/compilation-cache.h +0 -102
  611. data/ext/v8/upstream/2.3.3/src/compiler.cc +0 -654
  612. data/ext/v8/upstream/2.3.3/src/compiler.h +0 -299
  613. data/ext/v8/upstream/2.3.3/src/contexts.cc +0 -256
  614. data/ext/v8/upstream/2.3.3/src/contexts.h +0 -342
  615. data/ext/v8/upstream/2.3.3/src/conversions.cc +0 -1119
  616. data/ext/v8/upstream/2.3.3/src/conversions.h +0 -123
  617. data/ext/v8/upstream/2.3.3/src/counters.h +0 -239
  618. data/ext/v8/upstream/2.3.3/src/cpu-profiler-inl.h +0 -100
  619. data/ext/v8/upstream/2.3.3/src/cpu-profiler.cc +0 -538
  620. data/ext/v8/upstream/2.3.3/src/cpu-profiler.h +0 -285
  621. data/ext/v8/upstream/2.3.3/src/d8-debug.cc +0 -356
  622. data/ext/v8/upstream/2.3.3/src/d8-debug.h +0 -155
  623. data/ext/v8/upstream/2.3.3/src/d8.cc +0 -783
  624. data/ext/v8/upstream/2.3.3/src/d8.h +0 -227
  625. data/ext/v8/upstream/2.3.3/src/d8.js +0 -1683
  626. data/ext/v8/upstream/2.3.3/src/data-flow.cc +0 -758
  627. data/ext/v8/upstream/2.3.3/src/data-flow.h +0 -278
  628. data/ext/v8/upstream/2.3.3/src/date.js +0 -1059
  629. data/ext/v8/upstream/2.3.3/src/dateparser-inl.h +0 -123
  630. data/ext/v8/upstream/2.3.3/src/dateparser.h +0 -244
  631. data/ext/v8/upstream/2.3.3/src/debug-agent.cc +0 -427
  632. data/ext/v8/upstream/2.3.3/src/debug-agent.h +0 -129
  633. data/ext/v8/upstream/2.3.3/src/debug-debugger.js +0 -2227
  634. data/ext/v8/upstream/2.3.3/src/debug.cc +0 -3005
  635. data/ext/v8/upstream/2.3.3/src/debug.h +0 -993
  636. data/ext/v8/upstream/2.3.3/src/disassembler.cc +0 -312
  637. data/ext/v8/upstream/2.3.3/src/double.h +0 -169
  638. data/ext/v8/upstream/2.3.3/src/dtoa-config.c +0 -92
  639. data/ext/v8/upstream/2.3.3/src/dtoa.cc +0 -77
  640. data/ext/v8/upstream/2.3.3/src/dtoa.h +0 -81
  641. data/ext/v8/upstream/2.3.3/src/execution.cc +0 -809
  642. data/ext/v8/upstream/2.3.3/src/execution.h +0 -336
  643. data/ext/v8/upstream/2.3.3/src/factory.cc +0 -1003
  644. data/ext/v8/upstream/2.3.3/src/factory.h +0 -410
  645. data/ext/v8/upstream/2.3.3/src/fast-codegen.cc +0 -746
  646. data/ext/v8/upstream/2.3.3/src/fast-codegen.h +0 -161
  647. data/ext/v8/upstream/2.3.3/src/fast-dtoa.cc +0 -505
  648. data/ext/v8/upstream/2.3.3/src/fast-dtoa.h +0 -58
  649. data/ext/v8/upstream/2.3.3/src/flag-definitions.h +0 -455
  650. data/ext/v8/upstream/2.3.3/src/flags.cc +0 -551
  651. data/ext/v8/upstream/2.3.3/src/flags.h +0 -81
  652. data/ext/v8/upstream/2.3.3/src/flow-graph.cc +0 -763
  653. data/ext/v8/upstream/2.3.3/src/flow-graph.h +0 -180
  654. data/ext/v8/upstream/2.3.3/src/frame-element.h +0 -273
  655. data/ext/v8/upstream/2.3.3/src/frames-inl.h +0 -217
  656. data/ext/v8/upstream/2.3.3/src/frames.cc +0 -826
  657. data/ext/v8/upstream/2.3.3/src/frames.h +0 -682
  658. data/ext/v8/upstream/2.3.3/src/full-codegen.cc +0 -1443
  659. data/ext/v8/upstream/2.3.3/src/full-codegen.h +0 -548
  660. data/ext/v8/upstream/2.3.3/src/func-name-inferrer.cc +0 -76
  661. data/ext/v8/upstream/2.3.3/src/func-name-inferrer.h +0 -135
  662. data/ext/v8/upstream/2.3.3/src/global-handles.cc +0 -520
  663. data/ext/v8/upstream/2.3.3/src/global-handles.h +0 -180
  664. data/ext/v8/upstream/2.3.3/src/globals.h +0 -669
  665. data/ext/v8/upstream/2.3.3/src/handles-inl.h +0 -76
  666. data/ext/v8/upstream/2.3.3/src/handles.cc +0 -825
  667. data/ext/v8/upstream/2.3.3/src/handles.h +0 -393
  668. data/ext/v8/upstream/2.3.3/src/hashmap.cc +0 -226
  669. data/ext/v8/upstream/2.3.3/src/hashmap.h +0 -120
  670. data/ext/v8/upstream/2.3.3/src/heap-inl.h +0 -493
  671. data/ext/v8/upstream/2.3.3/src/heap-profiler.cc +0 -779
  672. data/ext/v8/upstream/2.3.3/src/heap-profiler.h +0 -323
  673. data/ext/v8/upstream/2.3.3/src/heap.cc +0 -4994
  674. data/ext/v8/upstream/2.3.3/src/heap.h +0 -1984
  675. data/ext/v8/upstream/2.3.3/src/ia32/assembler-ia32-inl.h +0 -360
  676. data/ext/v8/upstream/2.3.3/src/ia32/assembler-ia32.cc +0 -2600
  677. data/ext/v8/upstream/2.3.3/src/ia32/assembler-ia32.h +0 -969
  678. data/ext/v8/upstream/2.3.3/src/ia32/builtins-ia32.cc +0 -1261
  679. data/ext/v8/upstream/2.3.3/src/ia32/codegen-ia32.cc +0 -13968
  680. data/ext/v8/upstream/2.3.3/src/ia32/codegen-ia32.h +0 -1097
  681. data/ext/v8/upstream/2.3.3/src/ia32/cpu-ia32.cc +0 -83
  682. data/ext/v8/upstream/2.3.3/src/ia32/debug-ia32.cc +0 -309
  683. data/ext/v8/upstream/2.3.3/src/ia32/disasm-ia32.cc +0 -1471
  684. data/ext/v8/upstream/2.3.3/src/ia32/fast-codegen-ia32.cc +0 -954
  685. data/ext/v8/upstream/2.3.3/src/ia32/fast-codegen-ia32.h +0 -155
  686. data/ext/v8/upstream/2.3.3/src/ia32/frames-ia32.cc +0 -115
  687. data/ext/v8/upstream/2.3.3/src/ia32/frames-ia32.h +0 -135
  688. data/ext/v8/upstream/2.3.3/src/ia32/full-codegen-ia32.cc +0 -3281
  689. data/ext/v8/upstream/2.3.3/src/ia32/ic-ia32.cc +0 -1966
  690. data/ext/v8/upstream/2.3.3/src/ia32/macro-assembler-ia32.cc +0 -1610
  691. data/ext/v8/upstream/2.3.3/src/ia32/macro-assembler-ia32.h +0 -610
  692. data/ext/v8/upstream/2.3.3/src/ia32/regexp-macro-assembler-ia32.cc +0 -1247
  693. data/ext/v8/upstream/2.3.3/src/ia32/regexp-macro-assembler-ia32.h +0 -214
  694. data/ext/v8/upstream/2.3.3/src/ia32/simulator-ia32.h +0 -62
  695. data/ext/v8/upstream/2.3.3/src/ia32/stub-cache-ia32.cc +0 -2750
  696. data/ext/v8/upstream/2.3.3/src/ia32/virtual-frame-ia32.cc +0 -1334
  697. data/ext/v8/upstream/2.3.3/src/ia32/virtual-frame-ia32.h +0 -627
  698. data/ext/v8/upstream/2.3.3/src/ic-inl.h +0 -120
  699. data/ext/v8/upstream/2.3.3/src/ic.cc +0 -1827
  700. data/ext/v8/upstream/2.3.3/src/ic.h +0 -515
  701. data/ext/v8/upstream/2.3.3/src/interpreter-irregexp.cc +0 -646
  702. data/ext/v8/upstream/2.3.3/src/json.js +0 -268
  703. data/ext/v8/upstream/2.3.3/src/jsregexp.cc +0 -5283
  704. data/ext/v8/upstream/2.3.3/src/jsregexp.h +0 -1463
  705. data/ext/v8/upstream/2.3.3/src/jump-target-heavy.cc +0 -429
  706. data/ext/v8/upstream/2.3.3/src/jump-target-heavy.h +0 -244
  707. data/ext/v8/upstream/2.3.3/src/jump-target-light.cc +0 -110
  708. data/ext/v8/upstream/2.3.3/src/jump-target-light.h +0 -192
  709. data/ext/v8/upstream/2.3.3/src/list-inl.h +0 -166
  710. data/ext/v8/upstream/2.3.3/src/list.h +0 -159
  711. data/ext/v8/upstream/2.3.3/src/liveedit-debugger.js +0 -1057
  712. data/ext/v8/upstream/2.3.3/src/liveedit.cc +0 -1480
  713. data/ext/v8/upstream/2.3.3/src/liveedit.h +0 -170
  714. data/ext/v8/upstream/2.3.3/src/log-utils.cc +0 -497
  715. data/ext/v8/upstream/2.3.3/src/log-utils.h +0 -289
  716. data/ext/v8/upstream/2.3.3/src/log.cc +0 -1561
  717. data/ext/v8/upstream/2.3.3/src/log.h +0 -384
  718. data/ext/v8/upstream/2.3.3/src/macro-assembler.h +0 -86
  719. data/ext/v8/upstream/2.3.3/src/macros.py +0 -177
  720. data/ext/v8/upstream/2.3.3/src/mark-compact.cc +0 -2330
  721. data/ext/v8/upstream/2.3.3/src/mark-compact.h +0 -451
  722. data/ext/v8/upstream/2.3.3/src/math.js +0 -264
  723. data/ext/v8/upstream/2.3.3/src/memory.h +0 -74
  724. data/ext/v8/upstream/2.3.3/src/messages.cc +0 -183
  725. data/ext/v8/upstream/2.3.3/src/messages.h +0 -113
  726. data/ext/v8/upstream/2.3.3/src/messages.js +0 -982
  727. data/ext/v8/upstream/2.3.3/src/mips/assembler-mips.h +0 -668
  728. data/ext/v8/upstream/2.3.3/src/mips/builtins-mips.cc +0 -205
  729. data/ext/v8/upstream/2.3.3/src/mips/codegen-mips.h +0 -434
  730. data/ext/v8/upstream/2.3.3/src/mips/debug-mips.cc +0 -131
  731. data/ext/v8/upstream/2.3.3/src/mips/frames-mips.cc +0 -102
  732. data/ext/v8/upstream/2.3.3/src/mips/ic-mips.cc +0 -220
  733. data/ext/v8/upstream/2.3.3/src/mips/simulator-mips.cc +0 -1651
  734. data/ext/v8/upstream/2.3.3/src/mips/simulator-mips.h +0 -311
  735. data/ext/v8/upstream/2.3.3/src/mips/stub-cache-mips.cc +0 -403
  736. data/ext/v8/upstream/2.3.3/src/mirror-debugger.js +0 -2380
  737. data/ext/v8/upstream/2.3.3/src/objects-debug.cc +0 -1366
  738. data/ext/v8/upstream/2.3.3/src/objects-inl.h +0 -3333
  739. data/ext/v8/upstream/2.3.3/src/objects.cc +0 -8820
  740. data/ext/v8/upstream/2.3.3/src/objects.h +0 -5373
  741. data/ext/v8/upstream/2.3.3/src/oprofile-agent.cc +0 -108
  742. data/ext/v8/upstream/2.3.3/src/oprofile-agent.h +0 -77
  743. data/ext/v8/upstream/2.3.3/src/parser.cc +0 -5207
  744. data/ext/v8/upstream/2.3.3/src/parser.h +0 -197
  745. data/ext/v8/upstream/2.3.3/src/platform-freebsd.cc +0 -667
  746. data/ext/v8/upstream/2.3.3/src/platform-linux.cc +0 -862
  747. data/ext/v8/upstream/2.3.3/src/platform-macos.cc +0 -665
  748. data/ext/v8/upstream/2.3.3/src/platform-nullos.cc +0 -454
  749. data/ext/v8/upstream/2.3.3/src/platform-openbsd.cc +0 -622
  750. data/ext/v8/upstream/2.3.3/src/platform-posix.cc +0 -362
  751. data/ext/v8/upstream/2.3.3/src/platform-solaris.cc +0 -653
  752. data/ext/v8/upstream/2.3.3/src/platform-win32.cc +0 -1911
  753. data/ext/v8/upstream/2.3.3/src/platform.h +0 -577
  754. data/ext/v8/upstream/2.3.3/src/powers-ten.h +0 -2461
  755. data/ext/v8/upstream/2.3.3/src/prettyprinter.cc +0 -1531
  756. data/ext/v8/upstream/2.3.3/src/prettyprinter.h +0 -221
  757. data/ext/v8/upstream/2.3.3/src/profile-generator-inl.h +0 -148
  758. data/ext/v8/upstream/2.3.3/src/profile-generator.cc +0 -1830
  759. data/ext/v8/upstream/2.3.3/src/profile-generator.h +0 -853
  760. data/ext/v8/upstream/2.3.3/src/property.cc +0 -96
  761. data/ext/v8/upstream/2.3.3/src/property.h +0 -315
  762. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-irregexp.cc +0 -464
  763. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-irregexp.h +0 -141
  764. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-tracer.cc +0 -356
  765. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-tracer.h +0 -103
  766. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler.cc +0 -261
  767. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler.h +0 -228
  768. data/ext/v8/upstream/2.3.3/src/regexp.js +0 -549
  769. data/ext/v8/upstream/2.3.3/src/rewriter.cc +0 -1038
  770. data/ext/v8/upstream/2.3.3/src/rewriter.h +0 -54
  771. data/ext/v8/upstream/2.3.3/src/runtime.cc +0 -10599
  772. data/ext/v8/upstream/2.3.3/src/runtime.h +0 -459
  773. data/ext/v8/upstream/2.3.3/src/runtime.js +0 -629
  774. data/ext/v8/upstream/2.3.3/src/scanner.cc +0 -1346
  775. data/ext/v8/upstream/2.3.3/src/scanner.h +0 -503
  776. data/ext/v8/upstream/2.3.3/src/scopeinfo.cc +0 -637
  777. data/ext/v8/upstream/2.3.3/src/scopeinfo.h +0 -233
  778. data/ext/v8/upstream/2.3.3/src/scopes.cc +0 -962
  779. data/ext/v8/upstream/2.3.3/src/scopes.h +0 -400
  780. data/ext/v8/upstream/2.3.3/src/serialize.cc +0 -1461
  781. data/ext/v8/upstream/2.3.3/src/serialize.h +0 -581
  782. data/ext/v8/upstream/2.3.3/src/spaces-inl.h +0 -483
  783. data/ext/v8/upstream/2.3.3/src/spaces.cc +0 -2901
  784. data/ext/v8/upstream/2.3.3/src/spaces.h +0 -2197
  785. data/ext/v8/upstream/2.3.3/src/string-stream.cc +0 -584
  786. data/ext/v8/upstream/2.3.3/src/string-stream.h +0 -189
  787. data/ext/v8/upstream/2.3.3/src/string.js +0 -1006
  788. data/ext/v8/upstream/2.3.3/src/stub-cache.cc +0 -1379
  789. data/ext/v8/upstream/2.3.3/src/stub-cache.h +0 -756
  790. data/ext/v8/upstream/2.3.3/src/third_party/dtoa/COPYING +0 -15
  791. data/ext/v8/upstream/2.3.3/src/third_party/dtoa/dtoa.c +0 -3334
  792. data/ext/v8/upstream/2.3.3/src/token.cc +0 -56
  793. data/ext/v8/upstream/2.3.3/src/token.h +0 -270
  794. data/ext/v8/upstream/2.3.3/src/top.cc +0 -1067
  795. data/ext/v8/upstream/2.3.3/src/top.h +0 -463
  796. data/ext/v8/upstream/2.3.3/src/type-info.cc +0 -53
  797. data/ext/v8/upstream/2.3.3/src/type-info.h +0 -244
  798. data/ext/v8/upstream/2.3.3/src/unicode-inl.h +0 -238
  799. data/ext/v8/upstream/2.3.3/src/unicode.cc +0 -749
  800. data/ext/v8/upstream/2.3.3/src/unicode.h +0 -279
  801. data/ext/v8/upstream/2.3.3/src/uri.js +0 -415
  802. data/ext/v8/upstream/2.3.3/src/utils.cc +0 -285
  803. data/ext/v8/upstream/2.3.3/src/utils.h +0 -745
  804. data/ext/v8/upstream/2.3.3/src/v8-counters.h +0 -250
  805. data/ext/v8/upstream/2.3.3/src/v8.cc +0 -228
  806. data/ext/v8/upstream/2.3.3/src/v8.h +0 -121
  807. data/ext/v8/upstream/2.3.3/src/v8natives.js +0 -1188
  808. data/ext/v8/upstream/2.3.3/src/v8threads.cc +0 -461
  809. data/ext/v8/upstream/2.3.3/src/v8threads.h +0 -159
  810. data/ext/v8/upstream/2.3.3/src/variables.cc +0 -119
  811. data/ext/v8/upstream/2.3.3/src/variables.h +0 -205
  812. data/ext/v8/upstream/2.3.3/src/version.cc +0 -88
  813. data/ext/v8/upstream/2.3.3/src/virtual-frame-heavy-inl.h +0 -192
  814. data/ext/v8/upstream/2.3.3/src/virtual-frame.h +0 -46
  815. data/ext/v8/upstream/2.3.3/src/vm-state-inl.h +0 -137
  816. data/ext/v8/upstream/2.3.3/src/vm-state.cc +0 -39
  817. data/ext/v8/upstream/2.3.3/src/vm-state.h +0 -77
  818. data/ext/v8/upstream/2.3.3/src/x64/assembler-x64-inl.h +0 -400
  819. data/ext/v8/upstream/2.3.3/src/x64/assembler-x64.cc +0 -2963
  820. data/ext/v8/upstream/2.3.3/src/x64/assembler-x64.h +0 -1438
  821. data/ext/v8/upstream/2.3.3/src/x64/builtins-x64.cc +0 -1296
  822. data/ext/v8/upstream/2.3.3/src/x64/codegen-x64-inl.h +0 -46
  823. data/ext/v8/upstream/2.3.3/src/x64/codegen-x64.cc +0 -12491
  824. data/ext/v8/upstream/2.3.3/src/x64/codegen-x64.h +0 -1090
  825. data/ext/v8/upstream/2.3.3/src/x64/cpu-x64.cc +0 -83
  826. data/ext/v8/upstream/2.3.3/src/x64/debug-x64.cc +0 -267
  827. data/ext/v8/upstream/2.3.3/src/x64/disasm-x64.cc +0 -1696
  828. data/ext/v8/upstream/2.3.3/src/x64/fast-codegen-x64.cc +0 -250
  829. data/ext/v8/upstream/2.3.3/src/x64/frames-x64.cc +0 -113
  830. data/ext/v8/upstream/2.3.3/src/x64/frames-x64.h +0 -125
  831. data/ext/v8/upstream/2.3.3/src/x64/full-codegen-x64.cc +0 -3270
  832. data/ext/v8/upstream/2.3.3/src/x64/ic-x64.cc +0 -1907
  833. data/ext/v8/upstream/2.3.3/src/x64/jump-target-x64.cc +0 -437
  834. data/ext/v8/upstream/2.3.3/src/x64/macro-assembler-x64.cc +0 -2793
  835. data/ext/v8/upstream/2.3.3/src/x64/macro-assembler-x64.h +0 -916
  836. data/ext/v8/upstream/2.3.3/src/x64/regexp-macro-assembler-x64.cc +0 -1374
  837. data/ext/v8/upstream/2.3.3/src/x64/regexp-macro-assembler-x64.h +0 -277
  838. data/ext/v8/upstream/2.3.3/src/x64/simulator-x64.h +0 -63
  839. data/ext/v8/upstream/2.3.3/src/x64/stub-cache-x64.cc +0 -2560
  840. data/ext/v8/upstream/2.3.3/src/x64/virtual-frame-x64.cc +0 -1264
  841. data/ext/v8/upstream/2.3.3/src/x64/virtual-frame-x64.h +0 -590
  842. data/ext/v8/upstream/2.3.3/src/zone-inl.h +0 -82
  843. data/ext/v8/upstream/2.3.3/src/zone.cc +0 -194
  844. data/ext/v8/upstream/2.3.3/src/zone.h +0 -221
  845. data/ext/v8/upstream/2.3.3/tools/codemap.js +0 -270
  846. data/ext/v8/upstream/2.3.3/tools/csvparser.js +0 -83
  847. data/ext/v8/upstream/2.3.3/tools/gc-nvp-trace-processor.py +0 -317
  848. data/ext/v8/upstream/2.3.3/tools/gyp/v8.gyp +0 -749
  849. data/ext/v8/upstream/2.3.3/tools/linux-tick-processor.py +0 -78
  850. data/ext/v8/upstream/2.3.3/tools/logreader.js +0 -338
  851. data/ext/v8/upstream/2.3.3/tools/oprofile/annotate +0 -7
  852. data/ext/v8/upstream/2.3.3/tools/oprofile/common +0 -19
  853. data/ext/v8/upstream/2.3.3/tools/oprofile/dump +0 -7
  854. data/ext/v8/upstream/2.3.3/tools/oprofile/report +0 -7
  855. data/ext/v8/upstream/2.3.3/tools/oprofile/reset +0 -7
  856. data/ext/v8/upstream/2.3.3/tools/oprofile/run +0 -14
  857. data/ext/v8/upstream/2.3.3/tools/oprofile/shutdown +0 -7
  858. data/ext/v8/upstream/2.3.3/tools/oprofile/start +0 -7
  859. data/ext/v8/upstream/2.3.3/tools/presubmit.py +0 -299
  860. data/ext/v8/upstream/2.3.3/tools/profile.js +0 -691
  861. data/ext/v8/upstream/2.3.3/tools/profile_view.js +0 -224
  862. data/ext/v8/upstream/2.3.3/tools/splaytree.js +0 -322
  863. data/ext/v8/upstream/2.3.3/tools/splaytree.py +0 -226
  864. data/ext/v8/upstream/2.3.3/tools/tickprocessor.js +0 -862
  865. data/ext/v8/upstream/2.3.3/tools/tickprocessor.py +0 -571
  866. data/ext/v8/upstream/2.3.3/tools/utils.py +0 -88
  867. data/ext/v8/upstream/2.3.3/tools/visual_studio/README.txt +0 -71
  868. data/ext/v8/upstream/2.3.3/tools/visual_studio/common.vsprops +0 -34
  869. data/ext/v8/upstream/2.3.3/tools/visual_studio/debug.vsprops +0 -17
  870. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_base.vcproj +0 -1143
  871. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_base_arm.vcproj +0 -1115
  872. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_base_x64.vcproj +0 -1096
  873. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_shell_sample.vcproj +0 -145
  874. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_shell_sample_arm.vcproj +0 -145
  875. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_shell_sample_x64.vcproj +0 -161
  876. data/ext/v8/upstream/2.3.3/tools/windows-tick-processor.bat +0 -29
  877. data/ext/v8/upstream/2.3.3/tools/windows-tick-processor.py +0 -137
@@ -1,437 +0,0 @@
1
- // Copyright 2009 the V8 project authors. All rights reserved.
2
- // Redistribution and use in source and binary forms, with or without
3
- // modification, are permitted provided that the following conditions are
4
- // met:
5
- //
6
- // * Redistributions of source code must retain the above copyright
7
- // notice, this list of conditions and the following disclaimer.
8
- // * Redistributions in binary form must reproduce the above
9
- // copyright notice, this list of conditions and the following
10
- // disclaimer in the documentation and/or other materials provided
11
- // with the distribution.
12
- // * Neither the name of Google Inc. nor the names of its
13
- // contributors may be used to endorse or promote products derived
14
- // from this software without specific prior written permission.
15
- //
16
- // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
- // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
- // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
- // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
- // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
- // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
- // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
- // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
- // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
- // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
- // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
-
28
- #include "v8.h"
29
-
30
- #if defined(V8_TARGET_ARCH_X64)
31
-
32
- #include "codegen-inl.h"
33
- #include "jump-target-inl.h"
34
- #include "register-allocator-inl.h"
35
- #include "virtual-frame-inl.h"
36
-
37
- namespace v8 {
38
- namespace internal {
39
-
40
- // -------------------------------------------------------------------------
41
- // JumpTarget implementation.
42
-
43
- #define __ ACCESS_MASM(cgen()->masm())
44
-
45
- void JumpTarget::DoJump() {
46
- ASSERT(cgen()->has_valid_frame());
47
- // Live non-frame registers are not allowed at unconditional jumps
48
- // because we have no way of invalidating the corresponding results
49
- // which are still live in the C++ code.
50
- ASSERT(cgen()->HasValidEntryRegisters());
51
-
52
- if (is_bound()) {
53
- // Backward jump. There is an expected frame to merge to.
54
- ASSERT(direction_ == BIDIRECTIONAL);
55
- cgen()->frame()->PrepareMergeTo(entry_frame_);
56
- cgen()->frame()->MergeTo(entry_frame_);
57
- cgen()->DeleteFrame();
58
- __ jmp(&entry_label_);
59
- } else if (entry_frame_ != NULL) {
60
- // Forward jump with a preconfigured entry frame. Assert the
61
- // current frame matches the expected one and jump to the block.
62
- ASSERT(cgen()->frame()->Equals(entry_frame_));
63
- cgen()->DeleteFrame();
64
- __ jmp(&entry_label_);
65
- } else {
66
- // Forward jump. Remember the current frame and emit a jump to
67
- // its merge code.
68
- AddReachingFrame(cgen()->frame());
69
- RegisterFile empty;
70
- cgen()->SetFrame(NULL, &empty);
71
- __ jmp(&merge_labels_.last());
72
- }
73
- }
74
-
75
-
76
- void JumpTarget::DoBranch(Condition cc, Hint b) {
77
- ASSERT(cgen() != NULL);
78
- ASSERT(cgen()->has_valid_frame());
79
-
80
- if (is_bound()) {
81
- ASSERT(direction_ == BIDIRECTIONAL);
82
- // Backward branch. We have an expected frame to merge to on the
83
- // backward edge.
84
-
85
- // Swap the current frame for a copy (we do the swapping to get
86
- // the off-frame registers off the fall through) to use for the
87
- // branch.
88
- VirtualFrame* fall_through_frame = cgen()->frame();
89
- VirtualFrame* branch_frame = new VirtualFrame(fall_through_frame);
90
- RegisterFile non_frame_registers;
91
- cgen()->SetFrame(branch_frame, &non_frame_registers);
92
-
93
- // Check if we can avoid merge code.
94
- cgen()->frame()->PrepareMergeTo(entry_frame_);
95
- if (cgen()->frame()->Equals(entry_frame_)) {
96
- // Branch right in to the block.
97
- cgen()->DeleteFrame();
98
- __ j(cc, &entry_label_);
99
- cgen()->SetFrame(fall_through_frame, &non_frame_registers);
100
- return;
101
- }
102
-
103
- // Check if we can reuse existing merge code.
104
- for (int i = 0; i < reaching_frames_.length(); i++) {
105
- if (reaching_frames_[i] != NULL &&
106
- cgen()->frame()->Equals(reaching_frames_[i])) {
107
- // Branch to the merge code.
108
- cgen()->DeleteFrame();
109
- __ j(cc, &merge_labels_[i]);
110
- cgen()->SetFrame(fall_through_frame, &non_frame_registers);
111
- return;
112
- }
113
- }
114
-
115
- // To emit the merge code here, we negate the condition and branch
116
- // around the merge code on the fall through path.
117
- Label original_fall_through;
118
- __ j(NegateCondition(cc), &original_fall_through);
119
- cgen()->frame()->MergeTo(entry_frame_);
120
- cgen()->DeleteFrame();
121
- __ jmp(&entry_label_);
122
- cgen()->SetFrame(fall_through_frame, &non_frame_registers);
123
- __ bind(&original_fall_through);
124
-
125
- } else if (entry_frame_ != NULL) {
126
- // Forward branch with a preconfigured entry frame. Assert the
127
- // current frame matches the expected one and branch to the block.
128
- ASSERT(cgen()->frame()->Equals(entry_frame_));
129
- // Explicitly use the macro assembler instead of __ as forward
130
- // branches are expected to be a fixed size (no inserted
131
- // coverage-checking instructions please). This is used in
132
- // Reference::GetValue.
133
- cgen()->masm()->j(cc, &entry_label_);
134
-
135
- } else {
136
- // Forward branch. A copy of the current frame is remembered and
137
- // a branch to the merge code is emitted. Explicitly use the
138
- // macro assembler instead of __ as forward branches are expected
139
- // to be a fixed size (no inserted coverage-checking instructions
140
- // please). This is used in Reference::GetValue.
141
- AddReachingFrame(new VirtualFrame(cgen()->frame()));
142
- cgen()->masm()->j(cc, &merge_labels_.last());
143
- }
144
- }
145
-
146
-
147
- void JumpTarget::Call() {
148
- // Call is used to push the address of the catch block on the stack as
149
- // a return address when compiling try/catch and try/finally. We
150
- // fully spill the frame before making the call. The expected frame
151
- // at the label (which should be the only one) is the spilled current
152
- // frame plus an in-memory return address. The "fall-through" frame
153
- // at the return site is the spilled current frame.
154
- ASSERT(cgen() != NULL);
155
- ASSERT(cgen()->has_valid_frame());
156
- // There are no non-frame references across the call.
157
- ASSERT(cgen()->HasValidEntryRegisters());
158
- ASSERT(!is_linked());
159
-
160
- cgen()->frame()->SpillAll();
161
- VirtualFrame* target_frame = new VirtualFrame(cgen()->frame());
162
- target_frame->Adjust(1);
163
- // We do not expect a call with a preconfigured entry frame.
164
- ASSERT(entry_frame_ == NULL);
165
- AddReachingFrame(target_frame);
166
- __ call(&merge_labels_.last());
167
- }
168
-
169
-
170
- void JumpTarget::DoBind() {
171
- ASSERT(cgen() != NULL);
172
- ASSERT(!is_bound());
173
-
174
- // Live non-frame registers are not allowed at the start of a basic
175
- // block.
176
- ASSERT(!cgen()->has_valid_frame() || cgen()->HasValidEntryRegisters());
177
-
178
- // Fast case: the jump target was manually configured with an entry
179
- // frame to use.
180
- if (entry_frame_ != NULL) {
181
- // Assert no reaching frames to deal with.
182
- ASSERT(reaching_frames_.is_empty());
183
- ASSERT(!cgen()->has_valid_frame());
184
-
185
- RegisterFile empty;
186
- if (direction_ == BIDIRECTIONAL) {
187
- // Copy the entry frame so the original can be used for a
188
- // possible backward jump.
189
- cgen()->SetFrame(new VirtualFrame(entry_frame_), &empty);
190
- } else {
191
- // Take ownership of the entry frame.
192
- cgen()->SetFrame(entry_frame_, &empty);
193
- entry_frame_ = NULL;
194
- }
195
- __ bind(&entry_label_);
196
- return;
197
- }
198
-
199
- if (!is_linked()) {
200
- ASSERT(cgen()->has_valid_frame());
201
- if (direction_ == FORWARD_ONLY) {
202
- // Fast case: no forward jumps and no possible backward jumps.
203
- // The stack pointer can be floating above the top of the
204
- // virtual frame before the bind. Afterward, it should not.
205
- VirtualFrame* frame = cgen()->frame();
206
- int difference = frame->stack_pointer_ - (frame->element_count() - 1);
207
- if (difference > 0) {
208
- frame->stack_pointer_ -= difference;
209
- __ addq(rsp, Immediate(difference * kPointerSize));
210
- }
211
- } else {
212
- ASSERT(direction_ == BIDIRECTIONAL);
213
- // Fast case: no forward jumps, possible backward ones. Remove
214
- // constants and copies above the watermark on the fall-through
215
- // frame and use it as the entry frame.
216
- cgen()->frame()->MakeMergable();
217
- entry_frame_ = new VirtualFrame(cgen()->frame());
218
- }
219
- __ bind(&entry_label_);
220
- return;
221
- }
222
-
223
- if (direction_ == FORWARD_ONLY &&
224
- !cgen()->has_valid_frame() &&
225
- reaching_frames_.length() == 1) {
226
- // Fast case: no fall-through, a single forward jump, and no
227
- // possible backward jumps. Pick up the only reaching frame, take
228
- // ownership of it, and use it for the block about to be emitted.
229
- VirtualFrame* frame = reaching_frames_[0];
230
- RegisterFile empty;
231
- cgen()->SetFrame(frame, &empty);
232
- reaching_frames_[0] = NULL;
233
- __ bind(&merge_labels_[0]);
234
-
235
- // The stack pointer can be floating above the top of the
236
- // virtual frame before the bind. Afterward, it should not.
237
- int difference = frame->stack_pointer_ - (frame->element_count() - 1);
238
- if (difference > 0) {
239
- frame->stack_pointer_ -= difference;
240
- __ addq(rsp, Immediate(difference * kPointerSize));
241
- }
242
-
243
- __ bind(&entry_label_);
244
- return;
245
- }
246
-
247
- // If there is a current frame, record it as the fall-through. It
248
- // is owned by the reaching frames for now.
249
- bool had_fall_through = false;
250
- if (cgen()->has_valid_frame()) {
251
- had_fall_through = true;
252
- AddReachingFrame(cgen()->frame()); // Return value ignored.
253
- RegisterFile empty;
254
- cgen()->SetFrame(NULL, &empty);
255
- }
256
-
257
- // Compute the frame to use for entry to the block.
258
- ComputeEntryFrame();
259
-
260
- // Some moves required to merge to an expected frame require purely
261
- // frame state changes, and do not require any code generation.
262
- // Perform those first to increase the possibility of finding equal
263
- // frames below.
264
- for (int i = 0; i < reaching_frames_.length(); i++) {
265
- if (reaching_frames_[i] != NULL) {
266
- reaching_frames_[i]->PrepareMergeTo(entry_frame_);
267
- }
268
- }
269
-
270
- if (is_linked()) {
271
- // There were forward jumps. Handle merging the reaching frames
272
- // to the entry frame.
273
-
274
- // Loop over the (non-null) reaching frames and process any that
275
- // need merge code. Iterate backwards through the list to handle
276
- // the fall-through frame first. Set frames that will be
277
- // processed after 'i' to NULL if we want to avoid processing
278
- // them.
279
- for (int i = reaching_frames_.length() - 1; i >= 0; i--) {
280
- VirtualFrame* frame = reaching_frames_[i];
281
-
282
- if (frame != NULL) {
283
- // Does the frame (probably) need merge code?
284
- if (!frame->Equals(entry_frame_)) {
285
- // We could have a valid frame as the fall through to the
286
- // binding site or as the fall through from a previous merge
287
- // code block. Jump around the code we are about to
288
- // generate.
289
- if (cgen()->has_valid_frame()) {
290
- cgen()->DeleteFrame();
291
- __ jmp(&entry_label_);
292
- }
293
- // Pick up the frame for this block. Assume ownership if
294
- // there cannot be backward jumps.
295
- RegisterFile empty;
296
- if (direction_ == BIDIRECTIONAL) {
297
- cgen()->SetFrame(new VirtualFrame(frame), &empty);
298
- } else {
299
- cgen()->SetFrame(frame, &empty);
300
- reaching_frames_[i] = NULL;
301
- }
302
- __ bind(&merge_labels_[i]);
303
-
304
- // Loop over the remaining (non-null) reaching frames,
305
- // looking for any that can share merge code with this one.
306
- for (int j = 0; j < i; j++) {
307
- VirtualFrame* other = reaching_frames_[j];
308
- if (other != NULL && other->Equals(cgen()->frame())) {
309
- // Set the reaching frame element to null to avoid
310
- // processing it later, and then bind its entry label.
311
- reaching_frames_[j] = NULL;
312
- __ bind(&merge_labels_[j]);
313
- }
314
- }
315
-
316
- // Emit the merge code.
317
- cgen()->frame()->MergeTo(entry_frame_);
318
- } else if (i == reaching_frames_.length() - 1 && had_fall_through) {
319
- // If this is the fall through frame, and it didn't need
320
- // merge code, we need to pick up the frame so we can jump
321
- // around subsequent merge blocks if necessary.
322
- RegisterFile empty;
323
- cgen()->SetFrame(frame, &empty);
324
- reaching_frames_[i] = NULL;
325
- }
326
- }
327
- }
328
-
329
- // The code generator may not have a current frame if there was no
330
- // fall through and none of the reaching frames needed merging.
331
- // In that case, clone the entry frame as the current frame.
332
- if (!cgen()->has_valid_frame()) {
333
- RegisterFile empty;
334
- cgen()->SetFrame(new VirtualFrame(entry_frame_), &empty);
335
- }
336
-
337
- // There may be unprocessed reaching frames that did not need
338
- // merge code. They will have unbound merge labels. Bind their
339
- // merge labels to be the same as the entry label and deallocate
340
- // them.
341
- for (int i = 0; i < reaching_frames_.length(); i++) {
342
- if (!merge_labels_[i].is_bound()) {
343
- reaching_frames_[i] = NULL;
344
- __ bind(&merge_labels_[i]);
345
- }
346
- }
347
-
348
- // There are non-NULL reaching frames with bound labels for each
349
- // merge block, but only on backward targets.
350
- } else {
351
- // There were no forward jumps. There must be a current frame and
352
- // this must be a bidirectional target.
353
- ASSERT(reaching_frames_.length() == 1);
354
- ASSERT(reaching_frames_[0] != NULL);
355
- ASSERT(direction_ == BIDIRECTIONAL);
356
-
357
- // Use a copy of the reaching frame so the original can be saved
358
- // for possible reuse as a backward merge block.
359
- RegisterFile empty;
360
- cgen()->SetFrame(new VirtualFrame(reaching_frames_[0]), &empty);
361
- __ bind(&merge_labels_[0]);
362
- cgen()->frame()->MergeTo(entry_frame_);
363
- }
364
-
365
- __ bind(&entry_label_);
366
- }
367
-
368
-
369
- void BreakTarget::Jump() {
370
- // Drop leftover statement state from the frame before merging, without
371
- // emitting code.
372
- ASSERT(cgen()->has_valid_frame());
373
- int count = cgen()->frame()->height() - expected_height_;
374
- cgen()->frame()->ForgetElements(count);
375
- DoJump();
376
- }
377
-
378
-
379
- void BreakTarget::Jump(Result* arg) {
380
- // Drop leftover statement state from the frame before merging, without
381
- // emitting code.
382
- ASSERT(cgen()->has_valid_frame());
383
- int count = cgen()->frame()->height() - expected_height_;
384
- cgen()->frame()->ForgetElements(count);
385
- cgen()->frame()->Push(arg);
386
- DoJump();
387
- }
388
-
389
-
390
- void BreakTarget::Bind() {
391
- #ifdef DEBUG
392
- // All the forward-reaching frames should have been adjusted at the
393
- // jumps to this target.
394
- for (int i = 0; i < reaching_frames_.length(); i++) {
395
- ASSERT(reaching_frames_[i] == NULL ||
396
- reaching_frames_[i]->height() == expected_height_);
397
- }
398
- #endif
399
- // Drop leftover statement state from the frame before merging, even on
400
- // the fall through. This is so we can bind the return target with state
401
- // on the frame.
402
- if (cgen()->has_valid_frame()) {
403
- int count = cgen()->frame()->height() - expected_height_;
404
- cgen()->frame()->ForgetElements(count);
405
- }
406
- DoBind();
407
- }
408
-
409
-
410
- void BreakTarget::Bind(Result* arg) {
411
- #ifdef DEBUG
412
- // All the forward-reaching frames should have been adjusted at the
413
- // jumps to this target.
414
- for (int i = 0; i < reaching_frames_.length(); i++) {
415
- ASSERT(reaching_frames_[i] == NULL ||
416
- reaching_frames_[i]->height() == expected_height_ + 1);
417
- }
418
- #endif
419
- // Drop leftover statement state from the frame before merging, even on
420
- // the fall through. This is so we can bind the return target with state
421
- // on the frame.
422
- if (cgen()->has_valid_frame()) {
423
- int count = cgen()->frame()->height() - expected_height_;
424
- cgen()->frame()->ForgetElements(count);
425
- cgen()->frame()->Push(arg);
426
- }
427
- DoBind();
428
- *arg = cgen()->frame()->Pop();
429
- }
430
-
431
-
432
- #undef __
433
-
434
-
435
- } } // namespace v8::internal
436
-
437
- #endif // V8_TARGET_ARCH_X64
@@ -1,2793 +0,0 @@
1
- // Copyright 2009 the V8 project authors. All rights reserved.
2
- // Redistribution and use in source and binary forms, with or without
3
- // modification, are permitted provided that the following conditions are
4
- // met:
5
- //
6
- // * Redistributions of source code must retain the above copyright
7
- // notice, this list of conditions and the following disclaimer.
8
- // * Redistributions in binary form must reproduce the above
9
- // copyright notice, this list of conditions and the following
10
- // disclaimer in the documentation and/or other materials provided
11
- // with the distribution.
12
- // * Neither the name of Google Inc. nor the names of its
13
- // contributors may be used to endorse or promote products derived
14
- // from this software without specific prior written permission.
15
- //
16
- // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
- // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
- // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
- // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
- // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
- // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
- // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
- // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
- // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
- // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
- // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
-
28
- #include "v8.h"
29
-
30
- #if defined(V8_TARGET_ARCH_X64)
31
-
32
- #include "bootstrapper.h"
33
- #include "codegen-inl.h"
34
- #include "assembler-x64.h"
35
- #include "macro-assembler-x64.h"
36
- #include "serialize.h"
37
- #include "debug.h"
38
- #include "heap.h"
39
-
40
- namespace v8 {
41
- namespace internal {
42
-
43
- MacroAssembler::MacroAssembler(void* buffer, int size)
44
- : Assembler(buffer, size),
45
- generating_stub_(false),
46
- allow_stub_calls_(true),
47
- code_object_(Heap::undefined_value()) {
48
- }
49
-
50
-
51
- void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
52
- movq(destination, Operand(kRootRegister, index << kPointerSizeLog2));
53
- }
54
-
55
-
56
- void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
57
- movq(Operand(kRootRegister, index << kPointerSizeLog2), source);
58
- }
59
-
60
-
61
- void MacroAssembler::PushRoot(Heap::RootListIndex index) {
62
- push(Operand(kRootRegister, index << kPointerSizeLog2));
63
- }
64
-
65
-
66
- void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
67
- cmpq(with, Operand(kRootRegister, index << kPointerSizeLog2));
68
- }
69
-
70
-
71
- void MacroAssembler::CompareRoot(Operand with, Heap::RootListIndex index) {
72
- LoadRoot(kScratchRegister, index);
73
- cmpq(with, kScratchRegister);
74
- }
75
-
76
-
77
- void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
78
- CompareRoot(rsp, Heap::kStackLimitRootIndex);
79
- j(below, on_stack_overflow);
80
- }
81
-
82
-
83
- void MacroAssembler::RecordWriteHelper(Register object,
84
- Register addr,
85
- Register scratch) {
86
- if (FLAG_debug_code) {
87
- // Check that the object is not in new space.
88
- Label not_in_new_space;
89
- InNewSpace(object, scratch, not_equal, &not_in_new_space);
90
- Abort("new-space object passed to RecordWriteHelper");
91
- bind(&not_in_new_space);
92
- }
93
-
94
- // Compute the page start address from the heap object pointer, and reuse
95
- // the 'object' register for it.
96
- and_(object, Immediate(~Page::kPageAlignmentMask));
97
-
98
- // Compute number of region covering addr. See Page::GetRegionNumberForAddress
99
- // method for more details.
100
- shrl(addr, Immediate(Page::kRegionSizeLog2));
101
- andl(addr, Immediate(Page::kPageAlignmentMask >> Page::kRegionSizeLog2));
102
-
103
- // Set dirty mark for region.
104
- bts(Operand(object, Page::kDirtyFlagOffset), addr);
105
- }
106
-
107
-
108
- void MacroAssembler::RecordWrite(Register object,
109
- int offset,
110
- Register value,
111
- Register index) {
112
- // The compiled code assumes that record write doesn't change the
113
- // context register, so we check that none of the clobbered
114
- // registers are rsi.
115
- ASSERT(!object.is(rsi) && !value.is(rsi) && !index.is(rsi));
116
-
117
- // First, check if a write barrier is even needed. The tests below
118
- // catch stores of Smis and stores into young gen.
119
- Label done;
120
- JumpIfSmi(value, &done);
121
-
122
- RecordWriteNonSmi(object, offset, value, index);
123
- bind(&done);
124
-
125
- // Clobber all input registers when running with the debug-code flag
126
- // turned on to provoke errors. This clobbering repeats the
127
- // clobbering done inside RecordWriteNonSmi but it's necessary to
128
- // avoid having the fast case for smis leave the registers
129
- // unchanged.
130
- if (FLAG_debug_code) {
131
- movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
132
- movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
133
- movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
134
- }
135
- }
136
-
137
-
138
- void MacroAssembler::RecordWrite(Register object,
139
- Register address,
140
- Register value) {
141
- // The compiled code assumes that record write doesn't change the
142
- // context register, so we check that none of the clobbered
143
- // registers are esi.
144
- ASSERT(!object.is(rsi) && !value.is(rsi) && !address.is(rsi));
145
-
146
- // First, check if a write barrier is even needed. The tests below
147
- // catch stores of Smis and stores into young gen.
148
- Label done;
149
- JumpIfSmi(value, &done);
150
-
151
- InNewSpace(object, value, equal, &done);
152
-
153
- RecordWriteHelper(object, address, value);
154
-
155
- bind(&done);
156
-
157
- // Clobber all input registers when running with the debug-code flag
158
- // turned on to provoke errors.
159
- if (FLAG_debug_code) {
160
- movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
161
- movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
162
- movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
163
- }
164
- }
165
-
166
-
167
- void MacroAssembler::RecordWriteNonSmi(Register object,
168
- int offset,
169
- Register scratch,
170
- Register index) {
171
- Label done;
172
-
173
- if (FLAG_debug_code) {
174
- Label okay;
175
- JumpIfNotSmi(object, &okay);
176
- Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis");
177
- bind(&okay);
178
-
179
- if (offset == 0) {
180
- // index must be int32.
181
- Register tmp = index.is(rax) ? rbx : rax;
182
- push(tmp);
183
- movl(tmp, index);
184
- cmpq(tmp, index);
185
- Check(equal, "Index register for RecordWrite must be untagged int32.");
186
- pop(tmp);
187
- }
188
- }
189
-
190
- // Test that the object address is not in the new space. We cannot
191
- // update page dirty marks for new space pages.
192
- InNewSpace(object, scratch, equal, &done);
193
-
194
- // The offset is relative to a tagged or untagged HeapObject pointer,
195
- // so either offset or offset + kHeapObjectTag must be a
196
- // multiple of kPointerSize.
197
- ASSERT(IsAligned(offset, kPointerSize) ||
198
- IsAligned(offset + kHeapObjectTag, kPointerSize));
199
-
200
- Register dst = index;
201
- if (offset != 0) {
202
- lea(dst, Operand(object, offset));
203
- } else {
204
- // array access: calculate the destination address in the same manner as
205
- // KeyedStoreIC::GenerateGeneric.
206
- lea(dst, FieldOperand(object,
207
- index,
208
- times_pointer_size,
209
- FixedArray::kHeaderSize));
210
- }
211
- RecordWriteHelper(object, dst, scratch);
212
-
213
- bind(&done);
214
-
215
- // Clobber all input registers when running with the debug-code flag
216
- // turned on to provoke errors.
217
- if (FLAG_debug_code) {
218
- movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
219
- movq(scratch, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
220
- movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
221
- }
222
- }
223
-
224
-
225
- void MacroAssembler::InNewSpace(Register object,
226
- Register scratch,
227
- Condition cc,
228
- Label* branch) {
229
- if (Serializer::enabled()) {
230
- // Can't do arithmetic on external references if it might get serialized.
231
- // The mask isn't really an address. We load it as an external reference in
232
- // case the size of the new space is different between the snapshot maker
233
- // and the running system.
234
- if (scratch.is(object)) {
235
- movq(kScratchRegister, ExternalReference::new_space_mask());
236
- and_(scratch, kScratchRegister);
237
- } else {
238
- movq(scratch, ExternalReference::new_space_mask());
239
- and_(scratch, object);
240
- }
241
- movq(kScratchRegister, ExternalReference::new_space_start());
242
- cmpq(scratch, kScratchRegister);
243
- j(cc, branch);
244
- } else {
245
- ASSERT(is_int32(static_cast<int64_t>(Heap::NewSpaceMask())));
246
- intptr_t new_space_start =
247
- reinterpret_cast<intptr_t>(Heap::NewSpaceStart());
248
- movq(kScratchRegister, -new_space_start, RelocInfo::NONE);
249
- if (scratch.is(object)) {
250
- addq(scratch, kScratchRegister);
251
- } else {
252
- lea(scratch, Operand(object, kScratchRegister, times_1, 0));
253
- }
254
- and_(scratch, Immediate(static_cast<int32_t>(Heap::NewSpaceMask())));
255
- j(cc, branch);
256
- }
257
- }
258
-
259
-
260
- void MacroAssembler::Assert(Condition cc, const char* msg) {
261
- if (FLAG_debug_code) Check(cc, msg);
262
- }
263
-
264
-
265
- void MacroAssembler::Check(Condition cc, const char* msg) {
266
- Label L;
267
- j(cc, &L);
268
- Abort(msg);
269
- // will not return here
270
- bind(&L);
271
- }
272
-
273
-
274
- void MacroAssembler::CheckStackAlignment() {
275
- int frame_alignment = OS::ActivationFrameAlignment();
276
- int frame_alignment_mask = frame_alignment - 1;
277
- if (frame_alignment > kPointerSize) {
278
- ASSERT(IsPowerOf2(frame_alignment));
279
- Label alignment_as_expected;
280
- testq(rsp, Immediate(frame_alignment_mask));
281
- j(zero, &alignment_as_expected);
282
- // Abort if stack is not aligned.
283
- int3();
284
- bind(&alignment_as_expected);
285
- }
286
- }
287
-
288
-
289
- void MacroAssembler::NegativeZeroTest(Register result,
290
- Register op,
291
- Label* then_label) {
292
- Label ok;
293
- testl(result, result);
294
- j(not_zero, &ok);
295
- testl(op, op);
296
- j(sign, then_label);
297
- bind(&ok);
298
- }
299
-
300
-
301
- void MacroAssembler::Abort(const char* msg) {
302
- // We want to pass the msg string like a smi to avoid GC
303
- // problems, however msg is not guaranteed to be aligned
304
- // properly. Instead, we pass an aligned pointer that is
305
- // a proper v8 smi, but also pass the alignment difference
306
- // from the real pointer as a smi.
307
- intptr_t p1 = reinterpret_cast<intptr_t>(msg);
308
- intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
309
- // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
310
- ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
311
- #ifdef DEBUG
312
- if (msg != NULL) {
313
- RecordComment("Abort message: ");
314
- RecordComment(msg);
315
- }
316
- #endif
317
- // Disable stub call restrictions to always allow calls to abort.
318
- set_allow_stub_calls(true);
319
-
320
- push(rax);
321
- movq(kScratchRegister, p0, RelocInfo::NONE);
322
- push(kScratchRegister);
323
- movq(kScratchRegister,
324
- reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
325
- RelocInfo::NONE);
326
- push(kScratchRegister);
327
- CallRuntime(Runtime::kAbort, 2);
328
- // will not return here
329
- int3();
330
- }
331
-
332
-
333
- void MacroAssembler::CallStub(CodeStub* stub) {
334
- ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
335
- Call(stub->GetCode(), RelocInfo::CODE_TARGET);
336
- }
337
-
338
-
339
- void MacroAssembler::TailCallStub(CodeStub* stub) {
340
- ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
341
- Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
342
- }
343
-
344
-
345
- void MacroAssembler::StubReturn(int argc) {
346
- ASSERT(argc >= 1 && generating_stub());
347
- ret((argc - 1) * kPointerSize);
348
- }
349
-
350
-
351
- void MacroAssembler::IllegalOperation(int num_arguments) {
352
- if (num_arguments > 0) {
353
- addq(rsp, Immediate(num_arguments * kPointerSize));
354
- }
355
- LoadRoot(rax, Heap::kUndefinedValueRootIndex);
356
- }
357
-
358
-
359
- void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
360
- CallRuntime(Runtime::FunctionForId(id), num_arguments);
361
- }
362
-
363
-
364
- void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
365
- // If the expected number of arguments of the runtime function is
366
- // constant, we check that the actual number of arguments match the
367
- // expectation.
368
- if (f->nargs >= 0 && f->nargs != num_arguments) {
369
- IllegalOperation(num_arguments);
370
- return;
371
- }
372
-
373
- // TODO(1236192): Most runtime routines don't need the number of
374
- // arguments passed in because it is constant. At some point we
375
- // should remove this need and make the runtime routine entry code
376
- // smarter.
377
- Set(rax, num_arguments);
378
- movq(rbx, ExternalReference(f));
379
- CEntryStub ces(f->result_size);
380
- CallStub(&ces);
381
- }
382
-
383
-
384
- void MacroAssembler::CallExternalReference(const ExternalReference& ext,
385
- int num_arguments) {
386
- Set(rax, num_arguments);
387
- movq(rbx, ext);
388
-
389
- CEntryStub stub(1);
390
- CallStub(&stub);
391
- }
392
-
393
-
394
- void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
395
- int num_arguments,
396
- int result_size) {
397
- // ----------- S t a t e -------------
398
- // -- rsp[0] : return address
399
- // -- rsp[8] : argument num_arguments - 1
400
- // ...
401
- // -- rsp[8 * num_arguments] : argument 0 (receiver)
402
- // -----------------------------------
403
-
404
- // TODO(1236192): Most runtime routines don't need the number of
405
- // arguments passed in because it is constant. At some point we
406
- // should remove this need and make the runtime routine entry code
407
- // smarter.
408
- Set(rax, num_arguments);
409
- JumpToExternalReference(ext, result_size);
410
- }
411
-
412
-
413
- void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
414
- int num_arguments,
415
- int result_size) {
416
- TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
417
- }
418
-
419
-
420
- void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
421
- int result_size) {
422
- // Set the entry point and jump to the C entry runtime stub.
423
- movq(rbx, ext);
424
- CEntryStub ces(result_size);
425
- jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
426
- }
427
-
428
-
429
- void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
430
- // Calls are not allowed in some stubs.
431
- ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
432
-
433
- // Rely on the assertion to check that the number of provided
434
- // arguments match the expected number of arguments. Fake a
435
- // parameter count to avoid emitting code to do the check.
436
- ParameterCount expected(0);
437
- GetBuiltinEntry(rdx, id);
438
- InvokeCode(rdx, expected, expected, flag);
439
- }
440
-
441
-
442
- void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
443
- ASSERT(!target.is(rdi));
444
-
445
- // Load the builtins object into target register.
446
- movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
447
- movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
448
-
449
- // Load the JavaScript builtin function from the builtins object.
450
- movq(rdi, FieldOperand(target, JSBuiltinsObject::OffsetOfFunctionWithId(id)));
451
-
452
- // Load the code entry point from the builtins object.
453
- movq(target, FieldOperand(target, JSBuiltinsObject::OffsetOfCodeWithId(id)));
454
- if (FLAG_debug_code) {
455
- // Make sure the code objects in the builtins object and in the
456
- // builtin function are the same.
457
- push(target);
458
- movq(target, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
459
- movq(target, FieldOperand(target, SharedFunctionInfo::kCodeOffset));
460
- cmpq(target, Operand(rsp, 0));
461
- Assert(equal, "Builtin code object changed");
462
- pop(target);
463
- }
464
- lea(target, FieldOperand(target, Code::kHeaderSize));
465
- }
466
-
467
-
468
- void MacroAssembler::Set(Register dst, int64_t x) {
469
- if (x == 0) {
470
- xorl(dst, dst);
471
- } else if (is_int32(x)) {
472
- movq(dst, Immediate(static_cast<int32_t>(x)));
473
- } else if (is_uint32(x)) {
474
- movl(dst, Immediate(static_cast<uint32_t>(x)));
475
- } else {
476
- movq(dst, x, RelocInfo::NONE);
477
- }
478
- }
479
-
480
- void MacroAssembler::Set(const Operand& dst, int64_t x) {
481
- if (is_int32(x)) {
482
- movq(dst, Immediate(static_cast<int32_t>(x)));
483
- } else {
484
- movq(kScratchRegister, x, RelocInfo::NONE);
485
- movq(dst, kScratchRegister);
486
- }
487
- }
488
-
489
- // ----------------------------------------------------------------------------
490
- // Smi tagging, untagging and tag detection.
491
-
492
- static int kSmiShift = kSmiTagSize + kSmiShiftSize;
493
-
494
- Register MacroAssembler::GetSmiConstant(Smi* source) {
495
- int value = source->value();
496
- if (value == 0) {
497
- xorl(kScratchRegister, kScratchRegister);
498
- return kScratchRegister;
499
- }
500
- if (value == 1) {
501
- return kSmiConstantRegister;
502
- }
503
- LoadSmiConstant(kScratchRegister, source);
504
- return kScratchRegister;
505
- }
506
-
507
- void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
508
- if (FLAG_debug_code) {
509
- movq(dst,
510
- reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
511
- RelocInfo::NONE);
512
- cmpq(dst, kSmiConstantRegister);
513
- if (allow_stub_calls()) {
514
- Assert(equal, "Uninitialized kSmiConstantRegister");
515
- } else {
516
- Label ok;
517
- j(equal, &ok);
518
- int3();
519
- bind(&ok);
520
- }
521
- }
522
- if (source->value() == 0) {
523
- xorl(dst, dst);
524
- return;
525
- }
526
- int value = source->value();
527
- bool negative = value < 0;
528
- unsigned int uvalue = negative ? -value : value;
529
-
530
- switch (uvalue) {
531
- case 9:
532
- lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_8, 0));
533
- break;
534
- case 8:
535
- xorl(dst, dst);
536
- lea(dst, Operand(dst, kSmiConstantRegister, times_8, 0));
537
- break;
538
- case 4:
539
- xorl(dst, dst);
540
- lea(dst, Operand(dst, kSmiConstantRegister, times_4, 0));
541
- break;
542
- case 5:
543
- lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_4, 0));
544
- break;
545
- case 3:
546
- lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_2, 0));
547
- break;
548
- case 2:
549
- lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0));
550
- break;
551
- case 1:
552
- movq(dst, kSmiConstantRegister);
553
- break;
554
- case 0:
555
- UNREACHABLE();
556
- return;
557
- default:
558
- movq(dst, reinterpret_cast<uint64_t>(source), RelocInfo::NONE);
559
- return;
560
- }
561
- if (negative) {
562
- neg(dst);
563
- }
564
- }
565
-
566
- void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
567
- ASSERT_EQ(0, kSmiTag);
568
- if (!dst.is(src)) {
569
- movl(dst, src);
570
- }
571
- shl(dst, Immediate(kSmiShift));
572
- }
573
-
574
-
575
- void MacroAssembler::Integer32ToSmi(Register dst,
576
- Register src,
577
- Label* on_overflow) {
578
- ASSERT_EQ(0, kSmiTag);
579
- // 32-bit integer always fits in a long smi.
580
- if (!dst.is(src)) {
581
- movl(dst, src);
582
- }
583
- shl(dst, Immediate(kSmiShift));
584
- }
585
-
586
-
587
- void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
588
- if (FLAG_debug_code) {
589
- testb(dst, Immediate(0x01));
590
- Label ok;
591
- j(zero, &ok);
592
- if (allow_stub_calls()) {
593
- Abort("Integer32ToSmiField writing to non-smi location");
594
- } else {
595
- int3();
596
- }
597
- bind(&ok);
598
- }
599
- ASSERT(kSmiShift % kBitsPerByte == 0);
600
- movl(Operand(dst, kSmiShift / kBitsPerByte), src);
601
- }
602
-
603
-
604
- void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
605
- Register src,
606
- int constant) {
607
- if (dst.is(src)) {
608
- addq(dst, Immediate(constant));
609
- } else {
610
- lea(dst, Operand(src, constant));
611
- }
612
- shl(dst, Immediate(kSmiShift));
613
- }
614
-
615
-
616
- void MacroAssembler::SmiToInteger32(Register dst, Register src) {
617
- ASSERT_EQ(0, kSmiTag);
618
- if (!dst.is(src)) {
619
- movq(dst, src);
620
- }
621
- shr(dst, Immediate(kSmiShift));
622
- }
623
-
624
-
625
- void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
626
- movl(dst, Operand(src, kSmiShift / kBitsPerByte));
627
- }
628
-
629
-
630
- void MacroAssembler::SmiToInteger64(Register dst, Register src) {
631
- ASSERT_EQ(0, kSmiTag);
632
- if (!dst.is(src)) {
633
- movq(dst, src);
634
- }
635
- sar(dst, Immediate(kSmiShift));
636
- }
637
-
638
-
639
- void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
640
- movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
641
- }
642
-
643
-
644
- void MacroAssembler::SmiTest(Register src) {
645
- testq(src, src);
646
- }
647
-
648
-
649
- void MacroAssembler::SmiCompare(Register dst, Register src) {
650
- cmpq(dst, src);
651
- }
652
-
653
-
654
- void MacroAssembler::SmiCompare(Register dst, Smi* src) {
655
- ASSERT(!dst.is(kScratchRegister));
656
- if (src->value() == 0) {
657
- testq(dst, dst);
658
- } else {
659
- Move(kScratchRegister, src);
660
- cmpq(dst, kScratchRegister);
661
- }
662
- }
663
-
664
-
665
- void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
666
- cmpq(dst, src);
667
- }
668
-
669
-
670
- void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
671
- cmpq(dst, src);
672
- }
673
-
674
-
675
- void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
676
- cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
677
- }
678
-
679
-
680
- void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
681
- cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
682
- }
683
-
684
-
685
- void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
686
- Register src,
687
- int power) {
688
- ASSERT(power >= 0);
689
- ASSERT(power < 64);
690
- if (power == 0) {
691
- SmiToInteger64(dst, src);
692
- return;
693
- }
694
- if (!dst.is(src)) {
695
- movq(dst, src);
696
- }
697
- if (power < kSmiShift) {
698
- sar(dst, Immediate(kSmiShift - power));
699
- } else if (power > kSmiShift) {
700
- shl(dst, Immediate(power - kSmiShift));
701
- }
702
- }
703
-
704
-
705
- void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
706
- Register src,
707
- int power) {
708
- ASSERT((0 <= power) && (power < 32));
709
- if (dst.is(src)) {
710
- shr(dst, Immediate(power + kSmiShift));
711
- } else {
712
- UNIMPLEMENTED(); // Not used.
713
- }
714
- }
715
-
716
-
717
- Condition MacroAssembler::CheckSmi(Register src) {
718
- ASSERT_EQ(0, kSmiTag);
719
- testb(src, Immediate(kSmiTagMask));
720
- return zero;
721
- }
722
-
723
-
724
- Condition MacroAssembler::CheckPositiveSmi(Register src) {
725
- ASSERT_EQ(0, kSmiTag);
726
- // Make mask 0x8000000000000001 and test that both bits are zero.
727
- movq(kScratchRegister, src);
728
- rol(kScratchRegister, Immediate(1));
729
- testb(kScratchRegister, Immediate(3));
730
- return zero;
731
- }
732
-
733
-
734
- Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
735
- if (first.is(second)) {
736
- return CheckSmi(first);
737
- }
738
- ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
739
- leal(kScratchRegister, Operand(first, second, times_1, 0));
740
- testb(kScratchRegister, Immediate(0x03));
741
- return zero;
742
- }
743
-
744
-
745
- Condition MacroAssembler::CheckBothPositiveSmi(Register first,
746
- Register second) {
747
- if (first.is(second)) {
748
- return CheckPositiveSmi(first);
749
- }
750
- movq(kScratchRegister, first);
751
- or_(kScratchRegister, second);
752
- rol(kScratchRegister, Immediate(1));
753
- testl(kScratchRegister, Immediate(0x03));
754
- return zero;
755
- }
756
-
757
-
758
- Condition MacroAssembler::CheckEitherSmi(Register first, Register second) {
759
- if (first.is(second)) {
760
- return CheckSmi(first);
761
- }
762
- movl(kScratchRegister, first);
763
- andl(kScratchRegister, second);
764
- testb(kScratchRegister, Immediate(kSmiTagMask));
765
- return zero;
766
- }
767
-
768
-
769
- Condition MacroAssembler::CheckIsMinSmi(Register src) {
770
- ASSERT(!src.is(kScratchRegister));
771
- // If we overflow by subtracting one, it's the minimal smi value.
772
- cmpq(src, kSmiConstantRegister);
773
- return overflow;
774
- }
775
-
776
-
777
- Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
778
- // A 32-bit integer value can always be converted to a smi.
779
- return always;
780
- }
781
-
782
-
783
- Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
784
- // An unsigned 32-bit integer value is valid as long as the high bit
785
- // is not set.
786
- testl(src, src);
787
- return positive;
788
- }
789
-
790
-
791
- void MacroAssembler::SmiNeg(Register dst, Register src, Label* on_smi_result) {
792
- if (dst.is(src)) {
793
- ASSERT(!dst.is(kScratchRegister));
794
- movq(kScratchRegister, src);
795
- neg(dst); // Low 32 bits are retained as zero by negation.
796
- // Test if result is zero or Smi::kMinValue.
797
- cmpq(dst, kScratchRegister);
798
- j(not_equal, on_smi_result);
799
- movq(src, kScratchRegister);
800
- } else {
801
- movq(dst, src);
802
- neg(dst);
803
- cmpq(dst, src);
804
- // If the result is zero or Smi::kMinValue, negation failed to create a smi.
805
- j(not_equal, on_smi_result);
806
- }
807
- }
808
-
809
-
810
- void MacroAssembler::SmiAdd(Register dst,
811
- Register src1,
812
- Register src2,
813
- Label* on_not_smi_result) {
814
- ASSERT(!dst.is(src2));
815
- if (on_not_smi_result == NULL) {
816
- // No overflow checking. Use only when it's known that
817
- // overflowing is impossible.
818
- if (dst.is(src1)) {
819
- addq(dst, src2);
820
- } else {
821
- movq(dst, src1);
822
- addq(dst, src2);
823
- }
824
- Assert(no_overflow, "Smi addition overflow");
825
- } else if (dst.is(src1)) {
826
- movq(kScratchRegister, src1);
827
- addq(kScratchRegister, src2);
828
- j(overflow, on_not_smi_result);
829
- movq(dst, kScratchRegister);
830
- } else {
831
- movq(dst, src1);
832
- addq(dst, src2);
833
- j(overflow, on_not_smi_result);
834
- }
835
- }
836
-
837
-
838
- void MacroAssembler::SmiSub(Register dst,
839
- Register src1,
840
- Register src2,
841
- Label* on_not_smi_result) {
842
- ASSERT(!dst.is(src2));
843
- if (on_not_smi_result == NULL) {
844
- // No overflow checking. Use only when it's known that
845
- // overflowing is impossible (e.g., subtracting two positive smis).
846
- if (dst.is(src1)) {
847
- subq(dst, src2);
848
- } else {
849
- movq(dst, src1);
850
- subq(dst, src2);
851
- }
852
- Assert(no_overflow, "Smi subtraction overflow");
853
- } else if (dst.is(src1)) {
854
- cmpq(dst, src2);
855
- j(overflow, on_not_smi_result);
856
- subq(dst, src2);
857
- } else {
858
- movq(dst, src1);
859
- subq(dst, src2);
860
- j(overflow, on_not_smi_result);
861
- }
862
- }
863
-
864
-
865
- void MacroAssembler::SmiSub(Register dst,
866
- Register src1,
867
- const Operand& src2,
868
- Label* on_not_smi_result) {
869
- if (on_not_smi_result == NULL) {
870
- // No overflow checking. Use only when it's known that
871
- // overflowing is impossible (e.g., subtracting two positive smis).
872
- if (dst.is(src1)) {
873
- subq(dst, src2);
874
- } else {
875
- movq(dst, src1);
876
- subq(dst, src2);
877
- }
878
- Assert(no_overflow, "Smi subtraction overflow");
879
- } else if (dst.is(src1)) {
880
- movq(kScratchRegister, src2);
881
- cmpq(src1, kScratchRegister);
882
- j(overflow, on_not_smi_result);
883
- subq(src1, kScratchRegister);
884
- } else {
885
- movq(dst, src1);
886
- subq(dst, src2);
887
- j(overflow, on_not_smi_result);
888
- }
889
- }
890
-
891
- void MacroAssembler::SmiMul(Register dst,
892
- Register src1,
893
- Register src2,
894
- Label* on_not_smi_result) {
895
- ASSERT(!dst.is(src2));
896
- ASSERT(!dst.is(kScratchRegister));
897
- ASSERT(!src1.is(kScratchRegister));
898
- ASSERT(!src2.is(kScratchRegister));
899
-
900
- if (dst.is(src1)) {
901
- Label failure, zero_correct_result;
902
- movq(kScratchRegister, src1); // Create backup for later testing.
903
- SmiToInteger64(dst, src1);
904
- imul(dst, src2);
905
- j(overflow, &failure);
906
-
907
- // Check for negative zero result. If product is zero, and one
908
- // argument is negative, go to slow case.
909
- Label correct_result;
910
- testq(dst, dst);
911
- j(not_zero, &correct_result);
912
-
913
- movq(dst, kScratchRegister);
914
- xor_(dst, src2);
915
- j(positive, &zero_correct_result); // Result was positive zero.
916
-
917
- bind(&failure); // Reused failure exit, restores src1.
918
- movq(src1, kScratchRegister);
919
- jmp(on_not_smi_result);
920
-
921
- bind(&zero_correct_result);
922
- xor_(dst, dst);
923
-
924
- bind(&correct_result);
925
- } else {
926
- SmiToInteger64(dst, src1);
927
- imul(dst, src2);
928
- j(overflow, on_not_smi_result);
929
- // Check for negative zero result. If product is zero, and one
930
- // argument is negative, go to slow case.
931
- Label correct_result;
932
- testq(dst, dst);
933
- j(not_zero, &correct_result);
934
- // One of src1 and src2 is zero, the check whether the other is
935
- // negative.
936
- movq(kScratchRegister, src1);
937
- xor_(kScratchRegister, src2);
938
- j(negative, on_not_smi_result);
939
- bind(&correct_result);
940
- }
941
- }
942
-
943
-
944
- void MacroAssembler::SmiTryAddConstant(Register dst,
945
- Register src,
946
- Smi* constant,
947
- Label* on_not_smi_result) {
948
- // Does not assume that src is a smi.
949
- ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
950
- ASSERT_EQ(0, kSmiTag);
951
- ASSERT(!dst.is(kScratchRegister));
952
- ASSERT(!src.is(kScratchRegister));
953
-
954
- JumpIfNotSmi(src, on_not_smi_result);
955
- Register tmp = (dst.is(src) ? kScratchRegister : dst);
956
- LoadSmiConstant(tmp, constant);
957
- addq(tmp, src);
958
- j(overflow, on_not_smi_result);
959
- if (dst.is(src)) {
960
- movq(dst, tmp);
961
- }
962
- }
963
-
964
-
965
- void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
966
- if (constant->value() == 0) {
967
- if (!dst.is(src)) {
968
- movq(dst, src);
969
- }
970
- return;
971
- } else if (dst.is(src)) {
972
- ASSERT(!dst.is(kScratchRegister));
973
- switch (constant->value()) {
974
- case 1:
975
- addq(dst, kSmiConstantRegister);
976
- return;
977
- case 2:
978
- lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
979
- return;
980
- case 4:
981
- lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
982
- return;
983
- case 8:
984
- lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
985
- return;
986
- default:
987
- Register constant_reg = GetSmiConstant(constant);
988
- addq(dst, constant_reg);
989
- return;
990
- }
991
- } else {
992
- switch (constant->value()) {
993
- case 1:
994
- lea(dst, Operand(src, kSmiConstantRegister, times_1, 0));
995
- return;
996
- case 2:
997
- lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
998
- return;
999
- case 4:
1000
- lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1001
- return;
1002
- case 8:
1003
- lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1004
- return;
1005
- default:
1006
- LoadSmiConstant(dst, constant);
1007
- addq(dst, src);
1008
- return;
1009
- }
1010
- }
1011
- }
1012
-
1013
-
1014
- void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
1015
- if (constant->value() != 0) {
1016
- addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value()));
1017
- }
1018
- }
1019
-
1020
-
1021
- void MacroAssembler::SmiAddConstant(Register dst,
1022
- Register src,
1023
- Smi* constant,
1024
- Label* on_not_smi_result) {
1025
- if (constant->value() == 0) {
1026
- if (!dst.is(src)) {
1027
- movq(dst, src);
1028
- }
1029
- } else if (dst.is(src)) {
1030
- ASSERT(!dst.is(kScratchRegister));
1031
-
1032
- LoadSmiConstant(kScratchRegister, constant);
1033
- addq(kScratchRegister, src);
1034
- j(overflow, on_not_smi_result);
1035
- movq(dst, kScratchRegister);
1036
- } else {
1037
- LoadSmiConstant(dst, constant);
1038
- addq(dst, src);
1039
- j(overflow, on_not_smi_result);
1040
- }
1041
- }
1042
-
1043
-
1044
- void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1045
- if (constant->value() == 0) {
1046
- if (!dst.is(src)) {
1047
- movq(dst, src);
1048
- }
1049
- } else if (dst.is(src)) {
1050
- ASSERT(!dst.is(kScratchRegister));
1051
- Register constant_reg = GetSmiConstant(constant);
1052
- subq(dst, constant_reg);
1053
- } else {
1054
- if (constant->value() == Smi::kMinValue) {
1055
- LoadSmiConstant(dst, constant);
1056
- // Adding and subtracting the min-value gives the same result, it only
1057
- // differs on the overflow bit, which we don't check here.
1058
- addq(dst, src);
1059
- } else {
1060
- // Subtract by adding the negation.
1061
- LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
1062
- addq(dst, src);
1063
- }
1064
- }
1065
- }
1066
-
1067
-
1068
- void MacroAssembler::SmiSubConstant(Register dst,
1069
- Register src,
1070
- Smi* constant,
1071
- Label* on_not_smi_result) {
1072
- if (constant->value() == 0) {
1073
- if (!dst.is(src)) {
1074
- movq(dst, src);
1075
- }
1076
- } else if (dst.is(src)) {
1077
- ASSERT(!dst.is(kScratchRegister));
1078
- if (constant->value() == Smi::kMinValue) {
1079
- // Subtracting min-value from any non-negative value will overflow.
1080
- // We test the non-negativeness before doing the subtraction.
1081
- testq(src, src);
1082
- j(not_sign, on_not_smi_result);
1083
- LoadSmiConstant(kScratchRegister, constant);
1084
- subq(dst, kScratchRegister);
1085
- } else {
1086
- // Subtract by adding the negation.
1087
- LoadSmiConstant(kScratchRegister, Smi::FromInt(-constant->value()));
1088
- addq(kScratchRegister, dst);
1089
- j(overflow, on_not_smi_result);
1090
- movq(dst, kScratchRegister);
1091
- }
1092
- } else {
1093
- if (constant->value() == Smi::kMinValue) {
1094
- // Subtracting min-value from any non-negative value will overflow.
1095
- // We test the non-negativeness before doing the subtraction.
1096
- testq(src, src);
1097
- j(not_sign, on_not_smi_result);
1098
- LoadSmiConstant(dst, constant);
1099
- // Adding and subtracting the min-value gives the same result, it only
1100
- // differs on the overflow bit, which we don't check here.
1101
- addq(dst, src);
1102
- } else {
1103
- // Subtract by adding the negation.
1104
- LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
1105
- addq(dst, src);
1106
- j(overflow, on_not_smi_result);
1107
- }
1108
- }
1109
- }
1110
-
1111
-
1112
- void MacroAssembler::SmiDiv(Register dst,
1113
- Register src1,
1114
- Register src2,
1115
- Label* on_not_smi_result) {
1116
- ASSERT(!src1.is(kScratchRegister));
1117
- ASSERT(!src2.is(kScratchRegister));
1118
- ASSERT(!dst.is(kScratchRegister));
1119
- ASSERT(!src2.is(rax));
1120
- ASSERT(!src2.is(rdx));
1121
- ASSERT(!src1.is(rdx));
1122
-
1123
- // Check for 0 divisor (result is +/-Infinity).
1124
- Label positive_divisor;
1125
- testq(src2, src2);
1126
- j(zero, on_not_smi_result);
1127
-
1128
- if (src1.is(rax)) {
1129
- movq(kScratchRegister, src1);
1130
- }
1131
- SmiToInteger32(rax, src1);
1132
- // We need to rule out dividing Smi::kMinValue by -1, since that would
1133
- // overflow in idiv and raise an exception.
1134
- // We combine this with negative zero test (negative zero only happens
1135
- // when dividing zero by a negative number).
1136
-
1137
- // We overshoot a little and go to slow case if we divide min-value
1138
- // by any negative value, not just -1.
1139
- Label safe_div;
1140
- testl(rax, Immediate(0x7fffffff));
1141
- j(not_zero, &safe_div);
1142
- testq(src2, src2);
1143
- if (src1.is(rax)) {
1144
- j(positive, &safe_div);
1145
- movq(src1, kScratchRegister);
1146
- jmp(on_not_smi_result);
1147
- } else {
1148
- j(negative, on_not_smi_result);
1149
- }
1150
- bind(&safe_div);
1151
-
1152
- SmiToInteger32(src2, src2);
1153
- // Sign extend src1 into edx:eax.
1154
- cdq();
1155
- idivl(src2);
1156
- Integer32ToSmi(src2, src2);
1157
- // Check that the remainder is zero.
1158
- testl(rdx, rdx);
1159
- if (src1.is(rax)) {
1160
- Label smi_result;
1161
- j(zero, &smi_result);
1162
- movq(src1, kScratchRegister);
1163
- jmp(on_not_smi_result);
1164
- bind(&smi_result);
1165
- } else {
1166
- j(not_zero, on_not_smi_result);
1167
- }
1168
- if (!dst.is(src1) && src1.is(rax)) {
1169
- movq(src1, kScratchRegister);
1170
- }
1171
- Integer32ToSmi(dst, rax);
1172
- }
1173
-
1174
-
1175
- void MacroAssembler::SmiMod(Register dst,
1176
- Register src1,
1177
- Register src2,
1178
- Label* on_not_smi_result) {
1179
- ASSERT(!dst.is(kScratchRegister));
1180
- ASSERT(!src1.is(kScratchRegister));
1181
- ASSERT(!src2.is(kScratchRegister));
1182
- ASSERT(!src2.is(rax));
1183
- ASSERT(!src2.is(rdx));
1184
- ASSERT(!src1.is(rdx));
1185
- ASSERT(!src1.is(src2));
1186
-
1187
- testq(src2, src2);
1188
- j(zero, on_not_smi_result);
1189
-
1190
- if (src1.is(rax)) {
1191
- movq(kScratchRegister, src1);
1192
- }
1193
- SmiToInteger32(rax, src1);
1194
- SmiToInteger32(src2, src2);
1195
-
1196
- // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
1197
- Label safe_div;
1198
- cmpl(rax, Immediate(Smi::kMinValue));
1199
- j(not_equal, &safe_div);
1200
- cmpl(src2, Immediate(-1));
1201
- j(not_equal, &safe_div);
1202
- // Retag inputs and go slow case.
1203
- Integer32ToSmi(src2, src2);
1204
- if (src1.is(rax)) {
1205
- movq(src1, kScratchRegister);
1206
- }
1207
- jmp(on_not_smi_result);
1208
- bind(&safe_div);
1209
-
1210
- // Sign extend eax into edx:eax.
1211
- cdq();
1212
- idivl(src2);
1213
- // Restore smi tags on inputs.
1214
- Integer32ToSmi(src2, src2);
1215
- if (src1.is(rax)) {
1216
- movq(src1, kScratchRegister);
1217
- }
1218
- // Check for a negative zero result. If the result is zero, and the
1219
- // dividend is negative, go slow to return a floating point negative zero.
1220
- Label smi_result;
1221
- testl(rdx, rdx);
1222
- j(not_zero, &smi_result);
1223
- testq(src1, src1);
1224
- j(negative, on_not_smi_result);
1225
- bind(&smi_result);
1226
- Integer32ToSmi(dst, rdx);
1227
- }
1228
-
1229
-
1230
- void MacroAssembler::SmiNot(Register dst, Register src) {
1231
- ASSERT(!dst.is(kScratchRegister));
1232
- ASSERT(!src.is(kScratchRegister));
1233
- // Set tag and padding bits before negating, so that they are zero afterwards.
1234
- movl(kScratchRegister, Immediate(~0));
1235
- if (dst.is(src)) {
1236
- xor_(dst, kScratchRegister);
1237
- } else {
1238
- lea(dst, Operand(src, kScratchRegister, times_1, 0));
1239
- }
1240
- not_(dst);
1241
- }
1242
-
1243
-
1244
- void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
1245
- ASSERT(!dst.is(src2));
1246
- if (!dst.is(src1)) {
1247
- movq(dst, src1);
1248
- }
1249
- and_(dst, src2);
1250
- }
1251
-
1252
-
1253
- void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1254
- if (constant->value() == 0) {
1255
- xor_(dst, dst);
1256
- } else if (dst.is(src)) {
1257
- ASSERT(!dst.is(kScratchRegister));
1258
- Register constant_reg = GetSmiConstant(constant);
1259
- and_(dst, constant_reg);
1260
- } else {
1261
- LoadSmiConstant(dst, constant);
1262
- and_(dst, src);
1263
- }
1264
- }
1265
-
1266
-
1267
- void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1268
- if (!dst.is(src1)) {
1269
- movq(dst, src1);
1270
- }
1271
- or_(dst, src2);
1272
- }
1273
-
1274
-
1275
- void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1276
- if (dst.is(src)) {
1277
- ASSERT(!dst.is(kScratchRegister));
1278
- Register constant_reg = GetSmiConstant(constant);
1279
- or_(dst, constant_reg);
1280
- } else {
1281
- LoadSmiConstant(dst, constant);
1282
- or_(dst, src);
1283
- }
1284
- }
1285
-
1286
-
1287
- void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1288
- if (!dst.is(src1)) {
1289
- movq(dst, src1);
1290
- }
1291
- xor_(dst, src2);
1292
- }
1293
-
1294
-
1295
- void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1296
- if (dst.is(src)) {
1297
- ASSERT(!dst.is(kScratchRegister));
1298
- Register constant_reg = GetSmiConstant(constant);
1299
- xor_(dst, constant_reg);
1300
- } else {
1301
- LoadSmiConstant(dst, constant);
1302
- xor_(dst, src);
1303
- }
1304
- }
1305
-
1306
-
1307
- void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1308
- Register src,
1309
- int shift_value) {
1310
- ASSERT(is_uint5(shift_value));
1311
- if (shift_value > 0) {
1312
- if (dst.is(src)) {
1313
- sar(dst, Immediate(shift_value + kSmiShift));
1314
- shl(dst, Immediate(kSmiShift));
1315
- } else {
1316
- UNIMPLEMENTED(); // Not used.
1317
- }
1318
- }
1319
- }
1320
-
1321
-
1322
- void MacroAssembler::SmiShiftLogicalRightConstant(Register dst,
1323
- Register src,
1324
- int shift_value,
1325
- Label* on_not_smi_result) {
1326
- // Logic right shift interprets its result as an *unsigned* number.
1327
- if (dst.is(src)) {
1328
- UNIMPLEMENTED(); // Not used.
1329
- } else {
1330
- movq(dst, src);
1331
- if (shift_value == 0) {
1332
- testq(dst, dst);
1333
- j(negative, on_not_smi_result);
1334
- }
1335
- shr(dst, Immediate(shift_value + kSmiShift));
1336
- shl(dst, Immediate(kSmiShift));
1337
- }
1338
- }
1339
-
1340
-
1341
- void MacroAssembler::SmiShiftLeftConstant(Register dst,
1342
- Register src,
1343
- int shift_value) {
1344
- if (!dst.is(src)) {
1345
- movq(dst, src);
1346
- }
1347
- if (shift_value > 0) {
1348
- shl(dst, Immediate(shift_value));
1349
- }
1350
- }
1351
-
1352
-
1353
- void MacroAssembler::SmiShiftLeft(Register dst,
1354
- Register src1,
1355
- Register src2) {
1356
- ASSERT(!dst.is(rcx));
1357
- Label result_ok;
1358
- // Untag shift amount.
1359
- if (!dst.is(src1)) {
1360
- movq(dst, src1);
1361
- }
1362
- SmiToInteger32(rcx, src2);
1363
- // Shift amount specified by lower 5 bits, not six as the shl opcode.
1364
- and_(rcx, Immediate(0x1f));
1365
- shl_cl(dst);
1366
- }
1367
-
1368
-
1369
- void MacroAssembler::SmiShiftLogicalRight(Register dst,
1370
- Register src1,
1371
- Register src2,
1372
- Label* on_not_smi_result) {
1373
- ASSERT(!dst.is(kScratchRegister));
1374
- ASSERT(!src1.is(kScratchRegister));
1375
- ASSERT(!src2.is(kScratchRegister));
1376
- ASSERT(!dst.is(rcx));
1377
- Label result_ok;
1378
- if (src1.is(rcx) || src2.is(rcx)) {
1379
- movq(kScratchRegister, rcx);
1380
- }
1381
- if (!dst.is(src1)) {
1382
- movq(dst, src1);
1383
- }
1384
- SmiToInteger32(rcx, src2);
1385
- orl(rcx, Immediate(kSmiShift));
1386
- shr_cl(dst); // Shift is rcx modulo 0x1f + 32.
1387
- shl(dst, Immediate(kSmiShift));
1388
- testq(dst, dst);
1389
- if (src1.is(rcx) || src2.is(rcx)) {
1390
- Label positive_result;
1391
- j(positive, &positive_result);
1392
- if (src1.is(rcx)) {
1393
- movq(src1, kScratchRegister);
1394
- } else {
1395
- movq(src2, kScratchRegister);
1396
- }
1397
- jmp(on_not_smi_result);
1398
- bind(&positive_result);
1399
- } else {
1400
- j(negative, on_not_smi_result); // src2 was zero and src1 negative.
1401
- }
1402
- }
1403
-
1404
-
1405
- void MacroAssembler::SmiShiftArithmeticRight(Register dst,
1406
- Register src1,
1407
- Register src2) {
1408
- ASSERT(!dst.is(kScratchRegister));
1409
- ASSERT(!src1.is(kScratchRegister));
1410
- ASSERT(!src2.is(kScratchRegister));
1411
- ASSERT(!dst.is(rcx));
1412
- if (src1.is(rcx)) {
1413
- movq(kScratchRegister, src1);
1414
- } else if (src2.is(rcx)) {
1415
- movq(kScratchRegister, src2);
1416
- }
1417
- if (!dst.is(src1)) {
1418
- movq(dst, src1);
1419
- }
1420
- SmiToInteger32(rcx, src2);
1421
- orl(rcx, Immediate(kSmiShift));
1422
- sar_cl(dst); // Shift 32 + original rcx & 0x1f.
1423
- shl(dst, Immediate(kSmiShift));
1424
- if (src1.is(rcx)) {
1425
- movq(src1, kScratchRegister);
1426
- } else if (src2.is(rcx)) {
1427
- movq(src2, kScratchRegister);
1428
- }
1429
- }
1430
-
1431
-
1432
- void MacroAssembler::SelectNonSmi(Register dst,
1433
- Register src1,
1434
- Register src2,
1435
- Label* on_not_smis) {
1436
- ASSERT(!dst.is(kScratchRegister));
1437
- ASSERT(!src1.is(kScratchRegister));
1438
- ASSERT(!src2.is(kScratchRegister));
1439
- ASSERT(!dst.is(src1));
1440
- ASSERT(!dst.is(src2));
1441
- // Both operands must not be smis.
1442
- #ifdef DEBUG
1443
- if (allow_stub_calls()) { // Check contains a stub call.
1444
- Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
1445
- Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
1446
- }
1447
- #endif
1448
- ASSERT_EQ(0, kSmiTag);
1449
- ASSERT_EQ(0, Smi::FromInt(0));
1450
- movl(kScratchRegister, Immediate(kSmiTagMask));
1451
- and_(kScratchRegister, src1);
1452
- testl(kScratchRegister, src2);
1453
- // If non-zero then both are smis.
1454
- j(not_zero, on_not_smis);
1455
-
1456
- // Exactly one operand is a smi.
1457
- ASSERT_EQ(1, static_cast<int>(kSmiTagMask));
1458
- // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
1459
- subq(kScratchRegister, Immediate(1));
1460
- // If src1 is a smi, then scratch register all 1s, else it is all 0s.
1461
- movq(dst, src1);
1462
- xor_(dst, src2);
1463
- and_(dst, kScratchRegister);
1464
- // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
1465
- xor_(dst, src1);
1466
- // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
1467
- }
1468
-
1469
-
1470
- SmiIndex MacroAssembler::SmiToIndex(Register dst,
1471
- Register src,
1472
- int shift) {
1473
- ASSERT(is_uint6(shift));
1474
- // There is a possible optimization if shift is in the range 60-63, but that
1475
- // will (and must) never happen.
1476
- if (!dst.is(src)) {
1477
- movq(dst, src);
1478
- }
1479
- if (shift < kSmiShift) {
1480
- sar(dst, Immediate(kSmiShift - shift));
1481
- } else {
1482
- shl(dst, Immediate(shift - kSmiShift));
1483
- }
1484
- return SmiIndex(dst, times_1);
1485
- }
1486
-
1487
- SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
1488
- Register src,
1489
- int shift) {
1490
- // Register src holds a positive smi.
1491
- ASSERT(is_uint6(shift));
1492
- if (!dst.is(src)) {
1493
- movq(dst, src);
1494
- }
1495
- neg(dst);
1496
- if (shift < kSmiShift) {
1497
- sar(dst, Immediate(kSmiShift - shift));
1498
- } else {
1499
- shl(dst, Immediate(shift - kSmiShift));
1500
- }
1501
- return SmiIndex(dst, times_1);
1502
- }
1503
-
1504
-
1505
- void MacroAssembler::JumpIfSmi(Register src, Label* on_smi) {
1506
- ASSERT_EQ(0, kSmiTag);
1507
- Condition smi = CheckSmi(src);
1508
- j(smi, on_smi);
1509
- }
1510
-
1511
-
1512
- void MacroAssembler::JumpIfNotSmi(Register src, Label* on_not_smi) {
1513
- Condition smi = CheckSmi(src);
1514
- j(NegateCondition(smi), on_not_smi);
1515
- }
1516
-
1517
-
1518
- void MacroAssembler::JumpIfNotPositiveSmi(Register src,
1519
- Label* on_not_positive_smi) {
1520
- Condition positive_smi = CheckPositiveSmi(src);
1521
- j(NegateCondition(positive_smi), on_not_positive_smi);
1522
- }
1523
-
1524
-
1525
- void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1526
- Smi* constant,
1527
- Label* on_equals) {
1528
- SmiCompare(src, constant);
1529
- j(equal, on_equals);
1530
- }
1531
-
1532
-
1533
- void MacroAssembler::JumpIfNotValidSmiValue(Register src, Label* on_invalid) {
1534
- Condition is_valid = CheckInteger32ValidSmiValue(src);
1535
- j(NegateCondition(is_valid), on_invalid);
1536
- }
1537
-
1538
-
1539
- void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1540
- Label* on_invalid) {
1541
- Condition is_valid = CheckUInteger32ValidSmiValue(src);
1542
- j(NegateCondition(is_valid), on_invalid);
1543
- }
1544
-
1545
-
1546
- void MacroAssembler::JumpIfNotBothSmi(Register src1, Register src2,
1547
- Label* on_not_both_smi) {
1548
- Condition both_smi = CheckBothSmi(src1, src2);
1549
- j(NegateCondition(both_smi), on_not_both_smi);
1550
- }
1551
-
1552
-
1553
- void MacroAssembler::JumpIfNotBothPositiveSmi(Register src1, Register src2,
1554
- Label* on_not_both_smi) {
1555
- Condition both_smi = CheckBothPositiveSmi(src1, src2);
1556
- j(NegateCondition(both_smi), on_not_both_smi);
1557
- }
1558
-
1559
-
1560
-
1561
- void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first_object,
1562
- Register second_object,
1563
- Register scratch1,
1564
- Register scratch2,
1565
- Label* on_fail) {
1566
- // Check that both objects are not smis.
1567
- Condition either_smi = CheckEitherSmi(first_object, second_object);
1568
- j(either_smi, on_fail);
1569
-
1570
- // Load instance type for both strings.
1571
- movq(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
1572
- movq(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
1573
- movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1574
- movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1575
-
1576
- // Check that both are flat ascii strings.
1577
- ASSERT(kNotStringTag != 0);
1578
- const int kFlatAsciiStringMask =
1579
- kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1580
- const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1581
-
1582
- andl(scratch1, Immediate(kFlatAsciiStringMask));
1583
- andl(scratch2, Immediate(kFlatAsciiStringMask));
1584
- // Interleave the bits to check both scratch1 and scratch2 in one test.
1585
- ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1586
- lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1587
- cmpl(scratch1,
1588
- Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1589
- j(not_equal, on_fail);
1590
- }
1591
-
1592
-
1593
- void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1594
- Register instance_type,
1595
- Register scratch,
1596
- Label *failure) {
1597
- if (!scratch.is(instance_type)) {
1598
- movl(scratch, instance_type);
1599
- }
1600
-
1601
- const int kFlatAsciiStringMask =
1602
- kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1603
-
1604
- andl(scratch, Immediate(kFlatAsciiStringMask));
1605
- cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kAsciiStringTag));
1606
- j(not_equal, failure);
1607
- }
1608
-
1609
-
1610
- void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
1611
- Register first_object_instance_type,
1612
- Register second_object_instance_type,
1613
- Register scratch1,
1614
- Register scratch2,
1615
- Label* on_fail) {
1616
- // Load instance type for both strings.
1617
- movq(scratch1, first_object_instance_type);
1618
- movq(scratch2, second_object_instance_type);
1619
-
1620
- // Check that both are flat ascii strings.
1621
- ASSERT(kNotStringTag != 0);
1622
- const int kFlatAsciiStringMask =
1623
- kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1624
- const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1625
-
1626
- andl(scratch1, Immediate(kFlatAsciiStringMask));
1627
- andl(scratch2, Immediate(kFlatAsciiStringMask));
1628
- // Interleave the bits to check both scratch1 and scratch2 in one test.
1629
- ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1630
- lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1631
- cmpl(scratch1,
1632
- Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1633
- j(not_equal, on_fail);
1634
- }
1635
-
1636
-
1637
- void MacroAssembler::Move(Register dst, Handle<Object> source) {
1638
- ASSERT(!source->IsFailure());
1639
- if (source->IsSmi()) {
1640
- Move(dst, Smi::cast(*source));
1641
- } else {
1642
- movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
1643
- }
1644
- }
1645
-
1646
-
1647
- void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
1648
- ASSERT(!source->IsFailure());
1649
- if (source->IsSmi()) {
1650
- Move(dst, Smi::cast(*source));
1651
- } else {
1652
- movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1653
- movq(dst, kScratchRegister);
1654
- }
1655
- }
1656
-
1657
-
1658
- void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
1659
- if (source->IsSmi()) {
1660
- SmiCompare(dst, Smi::cast(*source));
1661
- } else {
1662
- Move(kScratchRegister, source);
1663
- cmpq(dst, kScratchRegister);
1664
- }
1665
- }
1666
-
1667
-
1668
- void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
1669
- if (source->IsSmi()) {
1670
- SmiCompare(dst, Smi::cast(*source));
1671
- } else {
1672
- ASSERT(source->IsHeapObject());
1673
- movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1674
- cmpq(dst, kScratchRegister);
1675
- }
1676
- }
1677
-
1678
-
1679
- void MacroAssembler::Push(Handle<Object> source) {
1680
- if (source->IsSmi()) {
1681
- Push(Smi::cast(*source));
1682
- } else {
1683
- ASSERT(source->IsHeapObject());
1684
- movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1685
- push(kScratchRegister);
1686
- }
1687
- }
1688
-
1689
-
1690
- void MacroAssembler::Push(Smi* source) {
1691
- intptr_t smi = reinterpret_cast<intptr_t>(source);
1692
- if (is_int32(smi)) {
1693
- push(Immediate(static_cast<int32_t>(smi)));
1694
- } else {
1695
- Register constant = GetSmiConstant(source);
1696
- push(constant);
1697
- }
1698
- }
1699
-
1700
-
1701
- void MacroAssembler::Drop(int stack_elements) {
1702
- if (stack_elements > 0) {
1703
- addq(rsp, Immediate(stack_elements * kPointerSize));
1704
- }
1705
- }
1706
-
1707
-
1708
- void MacroAssembler::Test(const Operand& src, Smi* source) {
1709
- testl(Operand(src, kIntSize), Immediate(source->value()));
1710
- }
1711
-
1712
-
1713
- void MacroAssembler::Jump(ExternalReference ext) {
1714
- movq(kScratchRegister, ext);
1715
- jmp(kScratchRegister);
1716
- }
1717
-
1718
-
1719
- void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1720
- movq(kScratchRegister, destination, rmode);
1721
- jmp(kScratchRegister);
1722
- }
1723
-
1724
-
1725
- void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
1726
- // TODO(X64): Inline this
1727
- jmp(code_object, rmode);
1728
- }
1729
-
1730
-
1731
- void MacroAssembler::Call(ExternalReference ext) {
1732
- movq(kScratchRegister, ext);
1733
- call(kScratchRegister);
1734
- }
1735
-
1736
-
1737
- void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
1738
- movq(kScratchRegister, destination, rmode);
1739
- call(kScratchRegister);
1740
- }
1741
-
1742
-
1743
- void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
1744
- ASSERT(RelocInfo::IsCodeTarget(rmode));
1745
- WriteRecordedPositions();
1746
- call(code_object, rmode);
1747
- }
1748
-
1749
-
1750
- void MacroAssembler::PushTryHandler(CodeLocation try_location,
1751
- HandlerType type) {
1752
- // Adjust this code if not the case.
1753
- ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1754
-
1755
- // The pc (return address) is already on TOS. This code pushes state,
1756
- // frame pointer and current handler. Check that they are expected
1757
- // next on the stack, in that order.
1758
- ASSERT_EQ(StackHandlerConstants::kStateOffset,
1759
- StackHandlerConstants::kPCOffset - kPointerSize);
1760
- ASSERT_EQ(StackHandlerConstants::kFPOffset,
1761
- StackHandlerConstants::kStateOffset - kPointerSize);
1762
- ASSERT_EQ(StackHandlerConstants::kNextOffset,
1763
- StackHandlerConstants::kFPOffset - kPointerSize);
1764
-
1765
- if (try_location == IN_JAVASCRIPT) {
1766
- if (type == TRY_CATCH_HANDLER) {
1767
- push(Immediate(StackHandler::TRY_CATCH));
1768
- } else {
1769
- push(Immediate(StackHandler::TRY_FINALLY));
1770
- }
1771
- push(rbp);
1772
- } else {
1773
- ASSERT(try_location == IN_JS_ENTRY);
1774
- // The frame pointer does not point to a JS frame so we save NULL
1775
- // for rbp. We expect the code throwing an exception to check rbp
1776
- // before dereferencing it to restore the context.
1777
- push(Immediate(StackHandler::ENTRY));
1778
- push(Immediate(0)); // NULL frame pointer.
1779
- }
1780
- // Save the current handler.
1781
- movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1782
- push(Operand(kScratchRegister, 0));
1783
- // Link this handler.
1784
- movq(Operand(kScratchRegister, 0), rsp);
1785
- }
1786
-
1787
-
1788
- void MacroAssembler::PopTryHandler() {
1789
- ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1790
- // Unlink this handler.
1791
- movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1792
- pop(Operand(kScratchRegister, 0));
1793
- // Remove the remaining fields.
1794
- addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1795
- }
1796
-
1797
-
1798
- void MacroAssembler::Ret() {
1799
- ret(0);
1800
- }
1801
-
1802
-
1803
- void MacroAssembler::FCmp() {
1804
- fucomip();
1805
- fstp(0);
1806
- }
1807
-
1808
-
1809
- void MacroAssembler::CmpObjectType(Register heap_object,
1810
- InstanceType type,
1811
- Register map) {
1812
- movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1813
- CmpInstanceType(map, type);
1814
- }
1815
-
1816
-
1817
- void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
1818
- cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
1819
- Immediate(static_cast<int8_t>(type)));
1820
- }
1821
-
1822
-
1823
- void MacroAssembler::CheckMap(Register obj,
1824
- Handle<Map> map,
1825
- Label* fail,
1826
- bool is_heap_object) {
1827
- if (!is_heap_object) {
1828
- JumpIfSmi(obj, fail);
1829
- }
1830
- Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
1831
- j(not_equal, fail);
1832
- }
1833
-
1834
-
1835
- void MacroAssembler::AbortIfNotNumber(Register object) {
1836
- Label ok;
1837
- Condition is_smi = CheckSmi(object);
1838
- j(is_smi, &ok);
1839
- Cmp(FieldOperand(object, HeapObject::kMapOffset),
1840
- Factory::heap_number_map());
1841
- Assert(equal, "Operand not a number");
1842
- bind(&ok);
1843
- }
1844
-
1845
-
1846
- void MacroAssembler::AbortIfNotSmi(Register object) {
1847
- Label ok;
1848
- Condition is_smi = CheckSmi(object);
1849
- Assert(is_smi, "Operand not a smi");
1850
- }
1851
-
1852
-
1853
- void MacroAssembler::AbortIfNotRootValue(Register src,
1854
- Heap::RootListIndex root_value_index,
1855
- const char* message) {
1856
- ASSERT(!src.is(kScratchRegister));
1857
- LoadRoot(kScratchRegister, root_value_index);
1858
- cmpq(src, kScratchRegister);
1859
- Check(equal, message);
1860
- }
1861
-
1862
-
1863
-
1864
- Condition MacroAssembler::IsObjectStringType(Register heap_object,
1865
- Register map,
1866
- Register instance_type) {
1867
- movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1868
- movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
1869
- ASSERT(kNotStringTag != 0);
1870
- testb(instance_type, Immediate(kIsNotStringMask));
1871
- return zero;
1872
- }
1873
-
1874
-
1875
- void MacroAssembler::TryGetFunctionPrototype(Register function,
1876
- Register result,
1877
- Label* miss) {
1878
- // Check that the receiver isn't a smi.
1879
- testl(function, Immediate(kSmiTagMask));
1880
- j(zero, miss);
1881
-
1882
- // Check that the function really is a function.
1883
- CmpObjectType(function, JS_FUNCTION_TYPE, result);
1884
- j(not_equal, miss);
1885
-
1886
- // Make sure that the function has an instance prototype.
1887
- Label non_instance;
1888
- testb(FieldOperand(result, Map::kBitFieldOffset),
1889
- Immediate(1 << Map::kHasNonInstancePrototype));
1890
- j(not_zero, &non_instance);
1891
-
1892
- // Get the prototype or initial map from the function.
1893
- movq(result,
1894
- FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1895
-
1896
- // If the prototype or initial map is the hole, don't return it and
1897
- // simply miss the cache instead. This will allow us to allocate a
1898
- // prototype object on-demand in the runtime system.
1899
- CompareRoot(result, Heap::kTheHoleValueRootIndex);
1900
- j(equal, miss);
1901
-
1902
- // If the function does not have an initial map, we're done.
1903
- Label done;
1904
- CmpObjectType(result, MAP_TYPE, kScratchRegister);
1905
- j(not_equal, &done);
1906
-
1907
- // Get the prototype from the initial map.
1908
- movq(result, FieldOperand(result, Map::kPrototypeOffset));
1909
- jmp(&done);
1910
-
1911
- // Non-instance prototype: Fetch prototype from constructor field
1912
- // in initial map.
1913
- bind(&non_instance);
1914
- movq(result, FieldOperand(result, Map::kConstructorOffset));
1915
-
1916
- // All done.
1917
- bind(&done);
1918
- }
1919
-
1920
-
1921
- void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1922
- if (FLAG_native_code_counters && counter->Enabled()) {
1923
- movq(kScratchRegister, ExternalReference(counter));
1924
- movl(Operand(kScratchRegister, 0), Immediate(value));
1925
- }
1926
- }
1927
-
1928
-
1929
- void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1930
- ASSERT(value > 0);
1931
- if (FLAG_native_code_counters && counter->Enabled()) {
1932
- movq(kScratchRegister, ExternalReference(counter));
1933
- Operand operand(kScratchRegister, 0);
1934
- if (value == 1) {
1935
- incl(operand);
1936
- } else {
1937
- addl(operand, Immediate(value));
1938
- }
1939
- }
1940
- }
1941
-
1942
-
1943
- void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1944
- ASSERT(value > 0);
1945
- if (FLAG_native_code_counters && counter->Enabled()) {
1946
- movq(kScratchRegister, ExternalReference(counter));
1947
- Operand operand(kScratchRegister, 0);
1948
- if (value == 1) {
1949
- decl(operand);
1950
- } else {
1951
- subl(operand, Immediate(value));
1952
- }
1953
- }
1954
- }
1955
-
1956
- #ifdef ENABLE_DEBUGGER_SUPPORT
1957
-
1958
- void MacroAssembler::PushRegistersFromMemory(RegList regs) {
1959
- ASSERT((regs & ~kJSCallerSaved) == 0);
1960
- // Push the content of the memory location to the stack.
1961
- for (int i = 0; i < kNumJSCallerSaved; i++) {
1962
- int r = JSCallerSavedCode(i);
1963
- if ((regs & (1 << r)) != 0) {
1964
- ExternalReference reg_addr =
1965
- ExternalReference(Debug_Address::Register(i));
1966
- movq(kScratchRegister, reg_addr);
1967
- push(Operand(kScratchRegister, 0));
1968
- }
1969
- }
1970
- }
1971
-
1972
-
1973
- void MacroAssembler::SaveRegistersToMemory(RegList regs) {
1974
- ASSERT((regs & ~kJSCallerSaved) == 0);
1975
- // Copy the content of registers to memory location.
1976
- for (int i = 0; i < kNumJSCallerSaved; i++) {
1977
- int r = JSCallerSavedCode(i);
1978
- if ((regs & (1 << r)) != 0) {
1979
- Register reg = { r };
1980
- ExternalReference reg_addr =
1981
- ExternalReference(Debug_Address::Register(i));
1982
- movq(kScratchRegister, reg_addr);
1983
- movq(Operand(kScratchRegister, 0), reg);
1984
- }
1985
- }
1986
- }
1987
-
1988
-
1989
- void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
1990
- ASSERT((regs & ~kJSCallerSaved) == 0);
1991
- // Copy the content of memory location to registers.
1992
- for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1993
- int r = JSCallerSavedCode(i);
1994
- if ((regs & (1 << r)) != 0) {
1995
- Register reg = { r };
1996
- ExternalReference reg_addr =
1997
- ExternalReference(Debug_Address::Register(i));
1998
- movq(kScratchRegister, reg_addr);
1999
- movq(reg, Operand(kScratchRegister, 0));
2000
- }
2001
- }
2002
- }
2003
-
2004
-
2005
- void MacroAssembler::PopRegistersToMemory(RegList regs) {
2006
- ASSERT((regs & ~kJSCallerSaved) == 0);
2007
- // Pop the content from the stack to the memory location.
2008
- for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
2009
- int r = JSCallerSavedCode(i);
2010
- if ((regs & (1 << r)) != 0) {
2011
- ExternalReference reg_addr =
2012
- ExternalReference(Debug_Address::Register(i));
2013
- movq(kScratchRegister, reg_addr);
2014
- pop(Operand(kScratchRegister, 0));
2015
- }
2016
- }
2017
- }
2018
-
2019
-
2020
- void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
2021
- Register scratch,
2022
- RegList regs) {
2023
- ASSERT(!scratch.is(kScratchRegister));
2024
- ASSERT(!base.is(kScratchRegister));
2025
- ASSERT(!base.is(scratch));
2026
- ASSERT((regs & ~kJSCallerSaved) == 0);
2027
- // Copy the content of the stack to the memory location and adjust base.
2028
- for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
2029
- int r = JSCallerSavedCode(i);
2030
- if ((regs & (1 << r)) != 0) {
2031
- movq(scratch, Operand(base, 0));
2032
- ExternalReference reg_addr =
2033
- ExternalReference(Debug_Address::Register(i));
2034
- movq(kScratchRegister, reg_addr);
2035
- movq(Operand(kScratchRegister, 0), scratch);
2036
- lea(base, Operand(base, kPointerSize));
2037
- }
2038
- }
2039
- }
2040
-
2041
- void MacroAssembler::DebugBreak() {
2042
- ASSERT(allow_stub_calls());
2043
- xor_(rax, rax); // no arguments
2044
- movq(rbx, ExternalReference(Runtime::kDebugBreak));
2045
- CEntryStub ces(1);
2046
- Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
2047
- }
2048
- #endif // ENABLE_DEBUGGER_SUPPORT
2049
-
2050
-
2051
- void MacroAssembler::InvokePrologue(const ParameterCount& expected,
2052
- const ParameterCount& actual,
2053
- Handle<Code> code_constant,
2054
- Register code_register,
2055
- Label* done,
2056
- InvokeFlag flag) {
2057
- bool definitely_matches = false;
2058
- Label invoke;
2059
- if (expected.is_immediate()) {
2060
- ASSERT(actual.is_immediate());
2061
- if (expected.immediate() == actual.immediate()) {
2062
- definitely_matches = true;
2063
- } else {
2064
- Set(rax, actual.immediate());
2065
- if (expected.immediate() ==
2066
- SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
2067
- // Don't worry about adapting arguments for built-ins that
2068
- // don't want that done. Skip adaption code by making it look
2069
- // like we have a match between expected and actual number of
2070
- // arguments.
2071
- definitely_matches = true;
2072
- } else {
2073
- Set(rbx, expected.immediate());
2074
- }
2075
- }
2076
- } else {
2077
- if (actual.is_immediate()) {
2078
- // Expected is in register, actual is immediate. This is the
2079
- // case when we invoke function values without going through the
2080
- // IC mechanism.
2081
- cmpq(expected.reg(), Immediate(actual.immediate()));
2082
- j(equal, &invoke);
2083
- ASSERT(expected.reg().is(rbx));
2084
- Set(rax, actual.immediate());
2085
- } else if (!expected.reg().is(actual.reg())) {
2086
- // Both expected and actual are in (different) registers. This
2087
- // is the case when we invoke functions using call and apply.
2088
- cmpq(expected.reg(), actual.reg());
2089
- j(equal, &invoke);
2090
- ASSERT(actual.reg().is(rax));
2091
- ASSERT(expected.reg().is(rbx));
2092
- }
2093
- }
2094
-
2095
- if (!definitely_matches) {
2096
- Handle<Code> adaptor =
2097
- Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
2098
- if (!code_constant.is_null()) {
2099
- movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
2100
- addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2101
- } else if (!code_register.is(rdx)) {
2102
- movq(rdx, code_register);
2103
- }
2104
-
2105
- if (flag == CALL_FUNCTION) {
2106
- Call(adaptor, RelocInfo::CODE_TARGET);
2107
- jmp(done);
2108
- } else {
2109
- Jump(adaptor, RelocInfo::CODE_TARGET);
2110
- }
2111
- bind(&invoke);
2112
- }
2113
- }
2114
-
2115
-
2116
- void MacroAssembler::InvokeCode(Register code,
2117
- const ParameterCount& expected,
2118
- const ParameterCount& actual,
2119
- InvokeFlag flag) {
2120
- Label done;
2121
- InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
2122
- if (flag == CALL_FUNCTION) {
2123
- call(code);
2124
- } else {
2125
- ASSERT(flag == JUMP_FUNCTION);
2126
- jmp(code);
2127
- }
2128
- bind(&done);
2129
- }
2130
-
2131
-
2132
- void MacroAssembler::InvokeCode(Handle<Code> code,
2133
- const ParameterCount& expected,
2134
- const ParameterCount& actual,
2135
- RelocInfo::Mode rmode,
2136
- InvokeFlag flag) {
2137
- Label done;
2138
- Register dummy = rax;
2139
- InvokePrologue(expected, actual, code, dummy, &done, flag);
2140
- if (flag == CALL_FUNCTION) {
2141
- Call(code, rmode);
2142
- } else {
2143
- ASSERT(flag == JUMP_FUNCTION);
2144
- Jump(code, rmode);
2145
- }
2146
- bind(&done);
2147
- }
2148
-
2149
-
2150
- void MacroAssembler::InvokeFunction(Register function,
2151
- const ParameterCount& actual,
2152
- InvokeFlag flag) {
2153
- ASSERT(function.is(rdi));
2154
- movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2155
- movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
2156
- movsxlq(rbx,
2157
- FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
2158
- movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
2159
- // Advances rdx to the end of the Code object header, to the start of
2160
- // the executable code.
2161
- lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
2162
-
2163
- ParameterCount expected(rbx);
2164
- InvokeCode(rdx, expected, actual, flag);
2165
- }
2166
-
2167
-
2168
- void MacroAssembler::InvokeFunction(JSFunction* function,
2169
- const ParameterCount& actual,
2170
- InvokeFlag flag) {
2171
- ASSERT(function->is_compiled());
2172
- // Get the function and setup the context.
2173
- Move(rdi, Handle<JSFunction>(function));
2174
- movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2175
-
2176
- // Invoke the cached code.
2177
- Handle<Code> code(function->code());
2178
- ParameterCount expected(function->shared()->formal_parameter_count());
2179
- InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
2180
- }
2181
-
2182
-
2183
- void MacroAssembler::EnterFrame(StackFrame::Type type) {
2184
- push(rbp);
2185
- movq(rbp, rsp);
2186
- push(rsi); // Context.
2187
- Push(Smi::FromInt(type));
2188
- movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2189
- push(kScratchRegister);
2190
- if (FLAG_debug_code) {
2191
- movq(kScratchRegister,
2192
- Factory::undefined_value(),
2193
- RelocInfo::EMBEDDED_OBJECT);
2194
- cmpq(Operand(rsp, 0), kScratchRegister);
2195
- Check(not_equal, "code object not properly patched");
2196
- }
2197
- }
2198
-
2199
-
2200
- void MacroAssembler::LeaveFrame(StackFrame::Type type) {
2201
- if (FLAG_debug_code) {
2202
- Move(kScratchRegister, Smi::FromInt(type));
2203
- cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
2204
- Check(equal, "stack frame types must match");
2205
- }
2206
- movq(rsp, rbp);
2207
- pop(rbp);
2208
- }
2209
-
2210
-
2211
- void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode, int result_size) {
2212
- // Setup the frame structure on the stack.
2213
- // All constants are relative to the frame pointer of the exit frame.
2214
- ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
2215
- ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
2216
- ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
2217
- push(rbp);
2218
- movq(rbp, rsp);
2219
-
2220
- // Reserve room for entry stack pointer and push the debug marker.
2221
- ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
2222
- push(Immediate(0)); // Saved entry sp, patched before call.
2223
- movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2224
- push(kScratchRegister); // Accessed from EditFrame::code_slot.
2225
-
2226
- // Save the frame pointer and the context in top.
2227
- ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
2228
- ExternalReference context_address(Top::k_context_address);
2229
- movq(r14, rax); // Backup rax before we use it.
2230
-
2231
- movq(rax, rbp);
2232
- store_rax(c_entry_fp_address);
2233
- movq(rax, rsi);
2234
- store_rax(context_address);
2235
-
2236
- // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame,
2237
- // so it must be retained across the C-call.
2238
- int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
2239
- lea(r12, Operand(rbp, r14, times_pointer_size, offset));
2240
-
2241
- #ifdef ENABLE_DEBUGGER_SUPPORT
2242
- // Save the state of all registers to the stack from the memory
2243
- // location. This is needed to allow nested break points.
2244
- if (mode == ExitFrame::MODE_DEBUG) {
2245
- // TODO(1243899): This should be symmetric to
2246
- // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
2247
- // correct here, but computed for the other call. Very error
2248
- // prone! FIX THIS. Actually there are deeper problems with
2249
- // register saving than this asymmetry (see the bug report
2250
- // associated with this issue).
2251
- PushRegistersFromMemory(kJSCallerSaved);
2252
- }
2253
- #endif
2254
-
2255
- #ifdef _WIN64
2256
- // Reserve space on stack for result and argument structures, if necessary.
2257
- int result_stack_space = (result_size < 2) ? 0 : result_size * kPointerSize;
2258
- // Reserve space for the Arguments object. The Windows 64-bit ABI
2259
- // requires us to pass this structure as a pointer to its location on
2260
- // the stack. The structure contains 2 values.
2261
- int argument_stack_space = 2 * kPointerSize;
2262
- // We also need backing space for 4 parameters, even though
2263
- // we only pass one or two parameter, and it is in a register.
2264
- int argument_mirror_space = 4 * kPointerSize;
2265
- int total_stack_space =
2266
- argument_mirror_space + argument_stack_space + result_stack_space;
2267
- subq(rsp, Immediate(total_stack_space));
2268
- #endif
2269
-
2270
- // Get the required frame alignment for the OS.
2271
- static const int kFrameAlignment = OS::ActivationFrameAlignment();
2272
- if (kFrameAlignment > 0) {
2273
- ASSERT(IsPowerOf2(kFrameAlignment));
2274
- movq(kScratchRegister, Immediate(-kFrameAlignment));
2275
- and_(rsp, kScratchRegister);
2276
- }
2277
-
2278
- // Patch the saved entry sp.
2279
- movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
2280
- }
2281
-
2282
-
2283
- void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode, int result_size) {
2284
- // Registers:
2285
- // r12 : argv
2286
- #ifdef ENABLE_DEBUGGER_SUPPORT
2287
- // Restore the memory copy of the registers by digging them out from
2288
- // the stack. This is needed to allow nested break points.
2289
- if (mode == ExitFrame::MODE_DEBUG) {
2290
- // It's okay to clobber register rbx below because we don't need
2291
- // the function pointer after this.
2292
- const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
2293
- int kOffset = ExitFrameConstants::kCodeOffset - kCallerSavedSize;
2294
- lea(rbx, Operand(rbp, kOffset));
2295
- CopyRegistersFromStackToMemory(rbx, rcx, kJSCallerSaved);
2296
- }
2297
- #endif
2298
-
2299
- // Get the return address from the stack and restore the frame pointer.
2300
- movq(rcx, Operand(rbp, 1 * kPointerSize));
2301
- movq(rbp, Operand(rbp, 0 * kPointerSize));
2302
-
2303
- // Pop everything up to and including the arguments and the receiver
2304
- // from the caller stack.
2305
- lea(rsp, Operand(r12, 1 * kPointerSize));
2306
-
2307
- // Restore current context from top and clear it in debug mode.
2308
- ExternalReference context_address(Top::k_context_address);
2309
- movq(kScratchRegister, context_address);
2310
- movq(rsi, Operand(kScratchRegister, 0));
2311
- #ifdef DEBUG
2312
- movq(Operand(kScratchRegister, 0), Immediate(0));
2313
- #endif
2314
-
2315
- // Push the return address to get ready to return.
2316
- push(rcx);
2317
-
2318
- // Clear the top frame.
2319
- ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
2320
- movq(kScratchRegister, c_entry_fp_address);
2321
- movq(Operand(kScratchRegister, 0), Immediate(0));
2322
- }
2323
-
2324
-
2325
- void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
2326
- Register scratch,
2327
- Label* miss) {
2328
- Label same_contexts;
2329
-
2330
- ASSERT(!holder_reg.is(scratch));
2331
- ASSERT(!scratch.is(kScratchRegister));
2332
- // Load current lexical context from the stack frame.
2333
- movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
2334
-
2335
- // When generating debug code, make sure the lexical context is set.
2336
- if (FLAG_debug_code) {
2337
- cmpq(scratch, Immediate(0));
2338
- Check(not_equal, "we should not have an empty lexical context");
2339
- }
2340
- // Load the global context of the current context.
2341
- int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
2342
- movq(scratch, FieldOperand(scratch, offset));
2343
- movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
2344
-
2345
- // Check the context is a global context.
2346
- if (FLAG_debug_code) {
2347
- Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
2348
- Factory::global_context_map());
2349
- Check(equal, "JSGlobalObject::global_context should be a global context.");
2350
- }
2351
-
2352
- // Check if both contexts are the same.
2353
- cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2354
- j(equal, &same_contexts);
2355
-
2356
- // Compare security tokens.
2357
- // Check that the security token in the calling global object is
2358
- // compatible with the security token in the receiving global
2359
- // object.
2360
-
2361
- // Check the context is a global context.
2362
- if (FLAG_debug_code) {
2363
- // Preserve original value of holder_reg.
2364
- push(holder_reg);
2365
- movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2366
- CompareRoot(holder_reg, Heap::kNullValueRootIndex);
2367
- Check(not_equal, "JSGlobalProxy::context() should not be null.");
2368
-
2369
- // Read the first word and compare to global_context_map(),
2370
- movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
2371
- CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
2372
- Check(equal, "JSGlobalObject::global_context should be a global context.");
2373
- pop(holder_reg);
2374
- }
2375
-
2376
- movq(kScratchRegister,
2377
- FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2378
- int token_offset =
2379
- Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
2380
- movq(scratch, FieldOperand(scratch, token_offset));
2381
- cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
2382
- j(not_equal, miss);
2383
-
2384
- bind(&same_contexts);
2385
- }
2386
-
2387
-
2388
- void MacroAssembler::LoadAllocationTopHelper(Register result,
2389
- Register result_end,
2390
- Register scratch,
2391
- AllocationFlags flags) {
2392
- ExternalReference new_space_allocation_top =
2393
- ExternalReference::new_space_allocation_top_address();
2394
-
2395
- // Just return if allocation top is already known.
2396
- if ((flags & RESULT_CONTAINS_TOP) != 0) {
2397
- // No use of scratch if allocation top is provided.
2398
- ASSERT(!scratch.is_valid());
2399
- #ifdef DEBUG
2400
- // Assert that result actually contains top on entry.
2401
- movq(kScratchRegister, new_space_allocation_top);
2402
- cmpq(result, Operand(kScratchRegister, 0));
2403
- Check(equal, "Unexpected allocation top");
2404
- #endif
2405
- return;
2406
- }
2407
-
2408
- // Move address of new object to result. Use scratch register if available,
2409
- // and keep address in scratch until call to UpdateAllocationTopHelper.
2410
- if (scratch.is_valid()) {
2411
- ASSERT(!scratch.is(result_end));
2412
- movq(scratch, new_space_allocation_top);
2413
- movq(result, Operand(scratch, 0));
2414
- } else if (result.is(rax)) {
2415
- load_rax(new_space_allocation_top);
2416
- } else {
2417
- movq(kScratchRegister, new_space_allocation_top);
2418
- movq(result, Operand(kScratchRegister, 0));
2419
- }
2420
- }
2421
-
2422
-
2423
- void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
2424
- Register scratch) {
2425
- if (FLAG_debug_code) {
2426
- testq(result_end, Immediate(kObjectAlignmentMask));
2427
- Check(zero, "Unaligned allocation in new space");
2428
- }
2429
-
2430
- ExternalReference new_space_allocation_top =
2431
- ExternalReference::new_space_allocation_top_address();
2432
-
2433
- // Update new top.
2434
- if (result_end.is(rax)) {
2435
- // rax can be stored directly to a memory location.
2436
- store_rax(new_space_allocation_top);
2437
- } else {
2438
- // Register required - use scratch provided if available.
2439
- if (scratch.is_valid()) {
2440
- movq(Operand(scratch, 0), result_end);
2441
- } else {
2442
- movq(kScratchRegister, new_space_allocation_top);
2443
- movq(Operand(kScratchRegister, 0), result_end);
2444
- }
2445
- }
2446
- }
2447
-
2448
-
2449
- void MacroAssembler::AllocateInNewSpace(int object_size,
2450
- Register result,
2451
- Register result_end,
2452
- Register scratch,
2453
- Label* gc_required,
2454
- AllocationFlags flags) {
2455
- ASSERT(!result.is(result_end));
2456
-
2457
- // Load address of new object into result.
2458
- LoadAllocationTopHelper(result, result_end, scratch, flags);
2459
-
2460
- // Calculate new top and bail out if new space is exhausted.
2461
- ExternalReference new_space_allocation_limit =
2462
- ExternalReference::new_space_allocation_limit_address();
2463
-
2464
- Register top_reg = result_end.is_valid() ? result_end : result;
2465
-
2466
- if (top_reg.is(result)) {
2467
- addq(top_reg, Immediate(object_size));
2468
- } else {
2469
- lea(top_reg, Operand(result, object_size));
2470
- }
2471
- movq(kScratchRegister, new_space_allocation_limit);
2472
- cmpq(top_reg, Operand(kScratchRegister, 0));
2473
- j(above, gc_required);
2474
-
2475
- // Update allocation top.
2476
- UpdateAllocationTopHelper(top_reg, scratch);
2477
-
2478
- if (top_reg.is(result)) {
2479
- if ((flags & TAG_OBJECT) != 0) {
2480
- subq(result, Immediate(object_size - kHeapObjectTag));
2481
- } else {
2482
- subq(result, Immediate(object_size));
2483
- }
2484
- } else if ((flags & TAG_OBJECT) != 0) {
2485
- // Tag the result if requested.
2486
- addq(result, Immediate(kHeapObjectTag));
2487
- }
2488
- }
2489
-
2490
-
2491
- void MacroAssembler::AllocateInNewSpace(int header_size,
2492
- ScaleFactor element_size,
2493
- Register element_count,
2494
- Register result,
2495
- Register result_end,
2496
- Register scratch,
2497
- Label* gc_required,
2498
- AllocationFlags flags) {
2499
- ASSERT(!result.is(result_end));
2500
-
2501
- // Load address of new object into result.
2502
- LoadAllocationTopHelper(result, result_end, scratch, flags);
2503
-
2504
- // Calculate new top and bail out if new space is exhausted.
2505
- ExternalReference new_space_allocation_limit =
2506
- ExternalReference::new_space_allocation_limit_address();
2507
- lea(result_end, Operand(result, element_count, element_size, header_size));
2508
- movq(kScratchRegister, new_space_allocation_limit);
2509
- cmpq(result_end, Operand(kScratchRegister, 0));
2510
- j(above, gc_required);
2511
-
2512
- // Update allocation top.
2513
- UpdateAllocationTopHelper(result_end, scratch);
2514
-
2515
- // Tag the result if requested.
2516
- if ((flags & TAG_OBJECT) != 0) {
2517
- addq(result, Immediate(kHeapObjectTag));
2518
- }
2519
- }
2520
-
2521
-
2522
- void MacroAssembler::AllocateInNewSpace(Register object_size,
2523
- Register result,
2524
- Register result_end,
2525
- Register scratch,
2526
- Label* gc_required,
2527
- AllocationFlags flags) {
2528
- // Load address of new object into result.
2529
- LoadAllocationTopHelper(result, result_end, scratch, flags);
2530
-
2531
- // Calculate new top and bail out if new space is exhausted.
2532
- ExternalReference new_space_allocation_limit =
2533
- ExternalReference::new_space_allocation_limit_address();
2534
- if (!object_size.is(result_end)) {
2535
- movq(result_end, object_size);
2536
- }
2537
- addq(result_end, result);
2538
- movq(kScratchRegister, new_space_allocation_limit);
2539
- cmpq(result_end, Operand(kScratchRegister, 0));
2540
- j(above, gc_required);
2541
-
2542
- // Update allocation top.
2543
- UpdateAllocationTopHelper(result_end, scratch);
2544
-
2545
- // Tag the result if requested.
2546
- if ((flags & TAG_OBJECT) != 0) {
2547
- addq(result, Immediate(kHeapObjectTag));
2548
- }
2549
- }
2550
-
2551
-
2552
- void MacroAssembler::UndoAllocationInNewSpace(Register object) {
2553
- ExternalReference new_space_allocation_top =
2554
- ExternalReference::new_space_allocation_top_address();
2555
-
2556
- // Make sure the object has no tag before resetting top.
2557
- and_(object, Immediate(~kHeapObjectTagMask));
2558
- movq(kScratchRegister, new_space_allocation_top);
2559
- #ifdef DEBUG
2560
- cmpq(object, Operand(kScratchRegister, 0));
2561
- Check(below, "Undo allocation of non allocated memory");
2562
- #endif
2563
- movq(Operand(kScratchRegister, 0), object);
2564
- }
2565
-
2566
-
2567
- void MacroAssembler::AllocateHeapNumber(Register result,
2568
- Register scratch,
2569
- Label* gc_required) {
2570
- // Allocate heap number in new space.
2571
- AllocateInNewSpace(HeapNumber::kSize,
2572
- result,
2573
- scratch,
2574
- no_reg,
2575
- gc_required,
2576
- TAG_OBJECT);
2577
-
2578
- // Set the map.
2579
- LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
2580
- movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2581
- }
2582
-
2583
-
2584
- void MacroAssembler::AllocateTwoByteString(Register result,
2585
- Register length,
2586
- Register scratch1,
2587
- Register scratch2,
2588
- Register scratch3,
2589
- Label* gc_required) {
2590
- // Calculate the number of bytes needed for the characters in the string while
2591
- // observing object alignment.
2592
- const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
2593
- kObjectAlignmentMask;
2594
- ASSERT(kShortSize == 2);
2595
- // scratch1 = length * 2 + kObjectAlignmentMask.
2596
- lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
2597
- kHeaderAlignment));
2598
- and_(scratch1, Immediate(~kObjectAlignmentMask));
2599
- if (kHeaderAlignment > 0) {
2600
- subq(scratch1, Immediate(kHeaderAlignment));
2601
- }
2602
-
2603
- // Allocate two byte string in new space.
2604
- AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
2605
- times_1,
2606
- scratch1,
2607
- result,
2608
- scratch2,
2609
- scratch3,
2610
- gc_required,
2611
- TAG_OBJECT);
2612
-
2613
- // Set the map, length and hash field.
2614
- LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
2615
- movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2616
- Integer32ToSmi(scratch1, length);
2617
- movq(FieldOperand(result, String::kLengthOffset), scratch1);
2618
- movq(FieldOperand(result, String::kHashFieldOffset),
2619
- Immediate(String::kEmptyHashField));
2620
- }
2621
-
2622
-
2623
- void MacroAssembler::AllocateAsciiString(Register result,
2624
- Register length,
2625
- Register scratch1,
2626
- Register scratch2,
2627
- Register scratch3,
2628
- Label* gc_required) {
2629
- // Calculate the number of bytes needed for the characters in the string while
2630
- // observing object alignment.
2631
- const int kHeaderAlignment = SeqAsciiString::kHeaderSize &
2632
- kObjectAlignmentMask;
2633
- movl(scratch1, length);
2634
- ASSERT(kCharSize == 1);
2635
- addq(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
2636
- and_(scratch1, Immediate(~kObjectAlignmentMask));
2637
- if (kHeaderAlignment > 0) {
2638
- subq(scratch1, Immediate(kHeaderAlignment));
2639
- }
2640
-
2641
- // Allocate ascii string in new space.
2642
- AllocateInNewSpace(SeqAsciiString::kHeaderSize,
2643
- times_1,
2644
- scratch1,
2645
- result,
2646
- scratch2,
2647
- scratch3,
2648
- gc_required,
2649
- TAG_OBJECT);
2650
-
2651
- // Set the map, length and hash field.
2652
- LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
2653
- movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2654
- Integer32ToSmi(scratch1, length);
2655
- movq(FieldOperand(result, String::kLengthOffset), scratch1);
2656
- movq(FieldOperand(result, String::kHashFieldOffset),
2657
- Immediate(String::kEmptyHashField));
2658
- }
2659
-
2660
-
2661
- void MacroAssembler::AllocateConsString(Register result,
2662
- Register scratch1,
2663
- Register scratch2,
2664
- Label* gc_required) {
2665
- // Allocate heap number in new space.
2666
- AllocateInNewSpace(ConsString::kSize,
2667
- result,
2668
- scratch1,
2669
- scratch2,
2670
- gc_required,
2671
- TAG_OBJECT);
2672
-
2673
- // Set the map. The other fields are left uninitialized.
2674
- LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
2675
- movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2676
- }
2677
-
2678
-
2679
- void MacroAssembler::AllocateAsciiConsString(Register result,
2680
- Register scratch1,
2681
- Register scratch2,
2682
- Label* gc_required) {
2683
- // Allocate heap number in new space.
2684
- AllocateInNewSpace(ConsString::kSize,
2685
- result,
2686
- scratch1,
2687
- scratch2,
2688
- gc_required,
2689
- TAG_OBJECT);
2690
-
2691
- // Set the map. The other fields are left uninitialized.
2692
- LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
2693
- movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2694
- }
2695
-
2696
-
2697
- void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2698
- if (context_chain_length > 0) {
2699
- // Move up the chain of contexts to the context containing the slot.
2700
- movq(dst, Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX)));
2701
- // Load the function context (which is the incoming, outer context).
2702
- movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2703
- for (int i = 1; i < context_chain_length; i++) {
2704
- movq(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
2705
- movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2706
- }
2707
- // The context may be an intermediate context, not a function context.
2708
- movq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2709
- } else { // context is the current function context.
2710
- // The context may be an intermediate context, not a function context.
2711
- movq(dst, Operand(rsi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2712
- }
2713
- }
2714
-
2715
-
2716
- int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
2717
- // On Windows 64 stack slots are reserved by the caller for all arguments
2718
- // including the ones passed in registers, and space is always allocated for
2719
- // the four register arguments even if the function takes fewer than four
2720
- // arguments.
2721
- // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
2722
- // and the caller does not reserve stack slots for them.
2723
- ASSERT(num_arguments >= 0);
2724
- #ifdef _WIN64
2725
- static const int kMinimumStackSlots = 4;
2726
- if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
2727
- return num_arguments;
2728
- #else
2729
- static const int kRegisterPassedArguments = 6;
2730
- if (num_arguments < kRegisterPassedArguments) return 0;
2731
- return num_arguments - kRegisterPassedArguments;
2732
- #endif
2733
- }
2734
-
2735
-
2736
- void MacroAssembler::PrepareCallCFunction(int num_arguments) {
2737
- int frame_alignment = OS::ActivationFrameAlignment();
2738
- ASSERT(frame_alignment != 0);
2739
- ASSERT(num_arguments >= 0);
2740
- // Make stack end at alignment and allocate space for arguments and old rsp.
2741
- movq(kScratchRegister, rsp);
2742
- ASSERT(IsPowerOf2(frame_alignment));
2743
- int argument_slots_on_stack =
2744
- ArgumentStackSlotsForCFunctionCall(num_arguments);
2745
- subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
2746
- and_(rsp, Immediate(-frame_alignment));
2747
- movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
2748
- }
2749
-
2750
-
2751
- void MacroAssembler::CallCFunction(ExternalReference function,
2752
- int num_arguments) {
2753
- movq(rax, function);
2754
- CallCFunction(rax, num_arguments);
2755
- }
2756
-
2757
-
2758
- void MacroAssembler::CallCFunction(Register function, int num_arguments) {
2759
- // Check stack alignment.
2760
- if (FLAG_debug_code) {
2761
- CheckStackAlignment();
2762
- }
2763
-
2764
- call(function);
2765
- ASSERT(OS::ActivationFrameAlignment() != 0);
2766
- ASSERT(num_arguments >= 0);
2767
- int argument_slots_on_stack =
2768
- ArgumentStackSlotsForCFunctionCall(num_arguments);
2769
- movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
2770
- }
2771
-
2772
-
2773
- CodePatcher::CodePatcher(byte* address, int size)
2774
- : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
2775
- // Create a new macro assembler pointing to the address of the code to patch.
2776
- // The size is adjusted with kGap on order for the assembler to generate size
2777
- // bytes of instructions without failing with buffer size constraints.
2778
- ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2779
- }
2780
-
2781
-
2782
- CodePatcher::~CodePatcher() {
2783
- // Indicate that code has changed.
2784
- CPU::FlushICache(address_, size_);
2785
-
2786
- // Check that the code was patched as expected.
2787
- ASSERT(masm_.pc_ == address_ + size_);
2788
- ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2789
- }
2790
-
2791
- } } // namespace v8::internal
2792
-
2793
- #endif // V8_TARGET_ARCH_X64