therubyracer 0.8.1.pre2 → 0.8.1

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of therubyracer might be problematic. Click here for more details.

Files changed (877) hide show
  1. data/Changelog.md +2 -1
  2. data/README.md +6 -3
  3. data/ext/v8/upstream/3.1.8/.gitignore +31 -0
  4. data/ext/v8/upstream/3.1.8/AUTHORS +40 -0
  5. data/ext/v8/upstream/3.1.8/ChangeLog +2566 -0
  6. data/ext/v8/upstream/3.1.8/LICENSE +52 -0
  7. data/ext/v8/upstream/3.1.8/LICENSE.strongtalk +29 -0
  8. data/ext/v8/upstream/3.1.8/LICENSE.v8 +26 -0
  9. data/ext/v8/upstream/3.1.8/LICENSE.valgrind +45 -0
  10. data/ext/v8/upstream/3.1.8/SConstruct +1192 -0
  11. data/ext/v8/upstream/3.1.8/build/README.txt +25 -0
  12. data/ext/v8/upstream/3.1.8/build/all.gyp +18 -0
  13. data/ext/v8/upstream/3.1.8/build/armu.gypi +32 -0
  14. data/ext/v8/upstream/3.1.8/build/common.gypi +82 -0
  15. data/ext/v8/upstream/3.1.8/build/gyp_v8 +145 -0
  16. data/ext/v8/upstream/3.1.8/include/v8-debug.h +384 -0
  17. data/ext/v8/upstream/3.1.8/include/v8-preparser.h +116 -0
  18. data/ext/v8/upstream/3.1.8/include/v8-profiler.h +426 -0
  19. data/ext/v8/upstream/3.1.8/include/v8-testing.h +99 -0
  20. data/ext/v8/upstream/3.1.8/include/v8.h +3846 -0
  21. data/ext/v8/upstream/3.1.8/include/v8stdint.h +53 -0
  22. data/ext/v8/upstream/3.1.8/preparser/preparser-process.cc +206 -0
  23. data/ext/v8/upstream/3.1.8/src/SConscript +356 -0
  24. data/ext/v8/upstream/3.1.8/src/accessors.cc +907 -0
  25. data/ext/v8/upstream/3.1.8/src/accessors.h +121 -0
  26. data/ext/v8/upstream/3.1.8/src/allocation.cc +204 -0
  27. data/ext/v8/upstream/3.1.8/src/allocation.h +176 -0
  28. data/ext/v8/upstream/3.1.8/src/api.cc +5191 -0
  29. data/ext/v8/upstream/3.1.8/src/api.h +508 -0
  30. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/apinatives.js +0 -0
  31. data/ext/v8/upstream/3.1.8/src/apiutils.h +80 -0
  32. data/ext/v8/upstream/3.1.8/src/arguments.h +105 -0
  33. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm-inl.h +352 -0
  34. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.cc +2756 -0
  35. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.h +1294 -0
  36. data/ext/v8/upstream/3.1.8/src/arm/builtins-arm.cc +1628 -0
  37. data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.cc +6783 -0
  38. data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.h +657 -0
  39. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm-inl.h +48 -0
  40. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.cc +7403 -0
  41. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.h +595 -0
  42. data/ext/v8/upstream/3.1.8/src/arm/constants-arm.cc +152 -0
  43. data/ext/v8/upstream/3.1.8/src/arm/constants-arm.h +769 -0
  44. data/ext/v8/upstream/3.1.8/src/arm/cpu-arm.cc +147 -0
  45. data/ext/v8/upstream/3.1.8/src/arm/debug-arm.cc +315 -0
  46. data/ext/v8/upstream/3.1.8/src/arm/deoptimizer-arm.cc +700 -0
  47. data/ext/v8/upstream/3.1.8/src/arm/disasm-arm.cc +1439 -0
  48. data/ext/v8/upstream/3.1.8/src/arm/frames-arm.cc +45 -0
  49. data/ext/v8/upstream/3.1.8/src/arm/frames-arm.h +168 -0
  50. data/ext/v8/upstream/3.1.8/src/arm/full-codegen-arm.cc +4230 -0
  51. data/ext/v8/upstream/3.1.8/src/arm/ic-arm.cc +1799 -0
  52. data/ext/v8/upstream/3.1.8/src/arm/jump-target-arm.cc +174 -0
  53. data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.cc +2041 -0
  54. data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.h +2046 -0
  55. data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.cc +3822 -0
  56. data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.h +312 -0
  57. data/ext/v8/upstream/3.1.8/src/arm/lithium-gap-resolver-arm.cc +303 -0
  58. data/ext/v8/upstream/3.1.8/src/arm/lithium-gap-resolver-arm.h +84 -0
  59. data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.cc +2701 -0
  60. data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.h +1015 -0
  61. data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.cc +1280 -0
  62. data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.h +252 -0
  63. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/register-allocator-arm-inl.h +0 -0
  64. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/register-allocator-arm.cc +0 -0
  65. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/register-allocator-arm.h +0 -0
  66. data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.cc +3165 -0
  67. data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.h +402 -0
  68. data/ext/v8/upstream/3.1.8/src/arm/stub-cache-arm.cc +4077 -0
  69. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/virtual-frame-arm-inl.h +0 -0
  70. data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.cc +843 -0
  71. data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.h +520 -0
  72. data/ext/v8/upstream/3.1.8/src/array.js +1231 -0
  73. data/ext/v8/upstream/3.1.8/src/assembler.cc +973 -0
  74. data/ext/v8/upstream/3.1.8/src/assembler.h +787 -0
  75. data/ext/v8/upstream/3.1.8/src/ast-inl.h +107 -0
  76. data/ext/v8/upstream/3.1.8/src/ast.cc +1067 -0
  77. data/ext/v8/upstream/3.1.8/src/ast.h +2177 -0
  78. data/ext/v8/upstream/3.1.8/src/atomicops.h +165 -0
  79. data/ext/v8/upstream/3.1.8/src/atomicops_internals_arm_gcc.h +145 -0
  80. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_gcc.cc +126 -0
  81. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_gcc.h +287 -0
  82. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_macosx.h +301 -0
  83. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_msvc.h +203 -0
  84. data/ext/v8/upstream/3.1.8/src/bignum-dtoa.cc +655 -0
  85. data/ext/v8/upstream/3.1.8/src/bignum-dtoa.h +81 -0
  86. data/ext/v8/upstream/3.1.8/src/bignum.cc +768 -0
  87. data/ext/v8/upstream/3.1.8/src/bignum.h +140 -0
  88. data/ext/v8/upstream/3.1.8/src/bootstrapper.cc +1888 -0
  89. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/bootstrapper.h +0 -0
  90. data/ext/v8/upstream/3.1.8/src/builtins.cc +1586 -0
  91. data/ext/v8/upstream/3.1.8/src/builtins.h +339 -0
  92. data/ext/v8/upstream/3.1.8/src/bytecodes-irregexp.h +105 -0
  93. data/ext/v8/upstream/3.1.8/src/cached-powers.cc +177 -0
  94. data/ext/v8/upstream/3.1.8/src/cached-powers.h +65 -0
  95. data/ext/v8/upstream/3.1.8/src/char-predicates-inl.h +94 -0
  96. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/char-predicates.h +0 -0
  97. data/ext/v8/upstream/3.1.8/src/checks.cc +110 -0
  98. data/ext/v8/upstream/3.1.8/src/checks.h +292 -0
  99. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/circular-queue-inl.h +0 -0
  100. data/ext/v8/upstream/3.1.8/src/circular-queue.cc +122 -0
  101. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/circular-queue.h +0 -0
  102. data/ext/v8/upstream/3.1.8/src/code-stubs.cc +230 -0
  103. data/ext/v8/upstream/3.1.8/src/code-stubs.h +950 -0
  104. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/code.h +0 -0
  105. data/ext/v8/upstream/3.1.8/src/codegen-inl.h +64 -0
  106. data/ext/v8/upstream/3.1.8/src/codegen.cc +495 -0
  107. data/ext/v8/upstream/3.1.8/src/codegen.h +245 -0
  108. data/ext/v8/upstream/3.1.8/src/compilation-cache.cc +654 -0
  109. data/ext/v8/upstream/3.1.8/src/compilation-cache.h +112 -0
  110. data/ext/v8/upstream/3.1.8/src/compiler.cc +806 -0
  111. data/ext/v8/upstream/3.1.8/src/compiler.h +290 -0
  112. data/ext/v8/upstream/3.1.8/src/contexts.cc +320 -0
  113. data/ext/v8/upstream/3.1.8/src/contexts.h +376 -0
  114. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/conversions-inl.h +0 -0
  115. data/ext/v8/upstream/3.1.8/src/conversions.cc +1069 -0
  116. data/ext/v8/upstream/3.1.8/src/conversions.h +122 -0
  117. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/counters.cc +0 -0
  118. data/ext/v8/upstream/3.1.8/src/counters.h +242 -0
  119. data/ext/v8/upstream/3.1.8/src/cpu-profiler-inl.h +100 -0
  120. data/ext/v8/upstream/3.1.8/src/cpu-profiler.cc +554 -0
  121. data/ext/v8/upstream/3.1.8/src/cpu-profiler.h +291 -0
  122. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/cpu.h +0 -0
  123. data/ext/v8/upstream/3.1.8/src/d8-debug.cc +367 -0
  124. data/ext/v8/upstream/3.1.8/src/d8-debug.h +157 -0
  125. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/d8-posix.cc +0 -0
  126. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/d8-readline.cc +0 -0
  127. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/d8-windows.cc +0 -0
  128. data/ext/v8/upstream/3.1.8/src/d8.cc +792 -0
  129. data/ext/v8/upstream/3.1.8/src/d8.gyp +85 -0
  130. data/ext/v8/upstream/3.1.8/src/d8.h +231 -0
  131. data/ext/v8/upstream/3.1.8/src/d8.js +2798 -0
  132. data/ext/v8/upstream/3.1.8/src/data-flow.cc +545 -0
  133. data/ext/v8/upstream/3.1.8/src/data-flow.h +379 -0
  134. data/ext/v8/upstream/3.1.8/src/date.js +1103 -0
  135. data/ext/v8/upstream/3.1.8/src/dateparser-inl.h +125 -0
  136. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/dateparser.cc +0 -0
  137. data/ext/v8/upstream/3.1.8/src/dateparser.h +263 -0
  138. data/ext/v8/upstream/3.1.8/src/debug-agent.cc +446 -0
  139. data/ext/v8/upstream/3.1.8/src/debug-agent.h +131 -0
  140. data/ext/v8/upstream/3.1.8/src/debug-debugger.js +2569 -0
  141. data/ext/v8/upstream/3.1.8/src/debug.cc +3085 -0
  142. data/ext/v8/upstream/3.1.8/src/debug.h +1025 -0
  143. data/ext/v8/upstream/3.1.8/src/deoptimizer.cc +1185 -0
  144. data/ext/v8/upstream/3.1.8/src/deoptimizer.h +529 -0
  145. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/disasm.h +0 -0
  146. data/ext/v8/upstream/3.1.8/src/disassembler.cc +338 -0
  147. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/disassembler.h +0 -0
  148. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/diy-fp.cc +0 -0
  149. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/diy-fp.h +0 -0
  150. data/ext/v8/upstream/3.1.8/src/double.h +238 -0
  151. data/ext/v8/upstream/3.1.8/src/dtoa.cc +103 -0
  152. data/ext/v8/upstream/3.1.8/src/dtoa.h +85 -0
  153. data/ext/v8/upstream/3.1.8/src/execution.cc +735 -0
  154. data/ext/v8/upstream/3.1.8/src/execution.h +322 -0
  155. data/ext/v8/upstream/3.1.8/src/extensions/experimental/experimental.gyp +53 -0
  156. data/ext/v8/upstream/3.1.8/src/extensions/experimental/i18n-extension.cc +264 -0
  157. data/ext/v8/upstream/3.1.8/src/extensions/experimental/i18n-extension.h +64 -0
  158. data/ext/v8/upstream/3.1.8/src/extensions/externalize-string-extension.cc +141 -0
  159. data/ext/v8/upstream/3.1.8/src/extensions/externalize-string-extension.h +50 -0
  160. data/ext/v8/upstream/3.1.8/src/extensions/gc-extension.cc +58 -0
  161. data/ext/v8/upstream/3.1.8/src/extensions/gc-extension.h +49 -0
  162. data/ext/v8/upstream/3.1.8/src/factory.cc +1087 -0
  163. data/ext/v8/upstream/3.1.8/src/factory.h +432 -0
  164. data/ext/v8/upstream/3.1.8/src/fast-dtoa.cc +736 -0
  165. data/ext/v8/upstream/3.1.8/src/fast-dtoa.h +83 -0
  166. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/fixed-dtoa.cc +0 -0
  167. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/fixed-dtoa.h +0 -0
  168. data/ext/v8/upstream/3.1.8/src/flag-definitions.h +552 -0
  169. data/ext/v8/upstream/3.1.8/src/flags.cc +551 -0
  170. data/ext/v8/upstream/3.1.8/src/flags.h +79 -0
  171. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/frame-element.cc +0 -0
  172. data/ext/v8/upstream/3.1.8/src/frame-element.h +277 -0
  173. data/ext/v8/upstream/3.1.8/src/frames-inl.h +210 -0
  174. data/ext/v8/upstream/3.1.8/src/frames.cc +1232 -0
  175. data/ext/v8/upstream/3.1.8/src/frames.h +826 -0
  176. data/ext/v8/upstream/3.1.8/src/full-codegen.cc +1382 -0
  177. data/ext/v8/upstream/3.1.8/src/full-codegen.h +751 -0
  178. data/ext/v8/upstream/3.1.8/src/func-name-inferrer.cc +90 -0
  179. data/ext/v8/upstream/3.1.8/src/func-name-inferrer.h +111 -0
  180. data/ext/v8/upstream/3.1.8/src/gdb-jit.cc +1547 -0
  181. data/ext/v8/upstream/3.1.8/src/gdb-jit.h +138 -0
  182. data/ext/v8/upstream/3.1.8/src/global-handles.cc +534 -0
  183. data/ext/v8/upstream/3.1.8/src/global-handles.h +181 -0
  184. data/ext/v8/upstream/3.1.8/src/globals.h +325 -0
  185. data/ext/v8/upstream/3.1.8/src/handles-inl.h +80 -0
  186. data/ext/v8/upstream/3.1.8/src/handles.cc +910 -0
  187. data/ext/v8/upstream/3.1.8/src/handles.h +424 -0
  188. data/ext/v8/upstream/3.1.8/src/hashmap.cc +230 -0
  189. data/ext/v8/upstream/3.1.8/src/hashmap.h +121 -0
  190. data/ext/v8/upstream/3.1.8/src/heap-inl.h +587 -0
  191. data/ext/v8/upstream/3.1.8/src/heap-profiler.cc +1128 -0
  192. data/ext/v8/upstream/3.1.8/src/heap-profiler.h +381 -0
  193. data/ext/v8/upstream/3.1.8/src/heap.cc +5610 -0
  194. data/ext/v8/upstream/3.1.8/src/heap.h +2218 -0
  195. data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.cc +1490 -0
  196. data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.h +3493 -0
  197. data/ext/v8/upstream/3.1.8/src/hydrogen.cc +6056 -0
  198. data/ext/v8/upstream/3.1.8/src/hydrogen.h +1091 -0
  199. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32-inl.h +429 -0
  200. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.cc +2800 -0
  201. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.h +1093 -0
  202. data/ext/v8/upstream/3.1.8/src/ia32/builtins-ia32.cc +1590 -0
  203. data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.cc +6624 -0
  204. data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.h +536 -0
  205. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/codegen-ia32-inl.h +0 -0
  206. data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.cc +10354 -0
  207. data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.h +798 -0
  208. data/ext/v8/upstream/3.1.8/src/ia32/cpu-ia32.cc +87 -0
  209. data/ext/v8/upstream/3.1.8/src/ia32/debug-ia32.cc +309 -0
  210. data/ext/v8/upstream/3.1.8/src/ia32/deoptimizer-ia32.cc +664 -0
  211. data/ext/v8/upstream/3.1.8/src/ia32/disasm-ia32.cc +1597 -0
  212. data/ext/v8/upstream/3.1.8/src/ia32/frames-ia32.cc +45 -0
  213. data/ext/v8/upstream/3.1.8/src/ia32/frames-ia32.h +140 -0
  214. data/ext/v8/upstream/3.1.8/src/ia32/full-codegen-ia32.cc +4278 -0
  215. data/ext/v8/upstream/3.1.8/src/ia32/ic-ia32.cc +1786 -0
  216. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/jump-target-ia32.cc +0 -0
  217. data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.cc +3880 -0
  218. data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.h +309 -0
  219. data/ext/v8/upstream/3.1.8/src/ia32/lithium-gap-resolver-ia32.cc +460 -0
  220. data/ext/v8/upstream/3.1.8/src/ia32/lithium-gap-resolver-ia32.h +110 -0
  221. data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.cc +2095 -0
  222. data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.h +2127 -0
  223. data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.cc +2031 -0
  224. data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.h +798 -0
  225. data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.cc +1253 -0
  226. data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.h +215 -0
  227. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/register-allocator-ia32-inl.h +0 -0
  228. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/register-allocator-ia32.cc +0 -0
  229. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/register-allocator-ia32.h +0 -0
  230. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/simulator-ia32.cc +0 -0
  231. data/ext/v8/upstream/3.1.8/src/ia32/simulator-ia32.h +72 -0
  232. data/ext/v8/upstream/3.1.8/src/ia32/stub-cache-ia32.cc +3732 -0
  233. data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.cc +1360 -0
  234. data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.h +646 -0
  235. data/ext/v8/upstream/3.1.8/src/ic-inl.h +129 -0
  236. data/ext/v8/upstream/3.1.8/src/ic.cc +2333 -0
  237. data/ext/v8/upstream/3.1.8/src/ic.h +639 -0
  238. data/ext/v8/upstream/3.1.8/src/inspector.cc +63 -0
  239. data/ext/v8/upstream/3.1.8/src/inspector.h +62 -0
  240. data/ext/v8/upstream/3.1.8/src/interpreter-irregexp.cc +655 -0
  241. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/interpreter-irregexp.h +0 -0
  242. data/ext/v8/upstream/3.1.8/src/json.js +342 -0
  243. data/ext/v8/upstream/3.1.8/src/jsregexp.cc +5340 -0
  244. data/ext/v8/upstream/3.1.8/src/jsregexp.h +1484 -0
  245. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target-heavy-inl.h +0 -0
  246. data/ext/v8/upstream/3.1.8/src/jump-target-heavy.cc +430 -0
  247. data/ext/v8/upstream/3.1.8/src/jump-target-heavy.h +244 -0
  248. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target-inl.h +0 -0
  249. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target-light-inl.h +0 -0
  250. data/ext/v8/upstream/3.1.8/src/jump-target-light.cc +111 -0
  251. data/ext/v8/upstream/3.1.8/src/jump-target-light.h +193 -0
  252. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target.cc +0 -0
  253. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target.h +0 -0
  254. data/ext/v8/upstream/3.1.8/src/list-inl.h +206 -0
  255. data/ext/v8/upstream/3.1.8/src/list.h +164 -0
  256. data/ext/v8/upstream/3.1.8/src/lithium-allocator-inl.h +140 -0
  257. data/ext/v8/upstream/3.1.8/src/lithium-allocator.cc +2093 -0
  258. data/ext/v8/upstream/3.1.8/src/lithium-allocator.h +644 -0
  259. data/ext/v8/upstream/3.1.8/src/lithium.cc +168 -0
  260. data/ext/v8/upstream/3.1.8/src/lithium.h +592 -0
  261. data/ext/v8/upstream/3.1.8/src/liveedit-debugger.js +1082 -0
  262. data/ext/v8/upstream/3.1.8/src/liveedit.cc +1650 -0
  263. data/ext/v8/upstream/3.1.8/src/liveedit.h +174 -0
  264. data/ext/v8/upstream/3.1.8/src/liveobjectlist-inl.h +126 -0
  265. data/ext/v8/upstream/3.1.8/src/liveobjectlist.cc +2527 -0
  266. data/ext/v8/upstream/3.1.8/src/liveobjectlist.h +322 -0
  267. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/log-inl.h +0 -0
  268. data/ext/v8/upstream/3.1.8/src/log-utils.cc +336 -0
  269. data/ext/v8/upstream/3.1.8/src/log-utils.h +232 -0
  270. data/ext/v8/upstream/3.1.8/src/log.cc +1608 -0
  271. data/ext/v8/upstream/3.1.8/src/log.h +379 -0
  272. data/ext/v8/upstream/3.1.8/src/macro-assembler.h +120 -0
  273. data/ext/v8/upstream/3.1.8/src/macros.py +178 -0
  274. data/ext/v8/upstream/3.1.8/src/mark-compact.cc +2957 -0
  275. data/ext/v8/upstream/3.1.8/src/mark-compact.h +433 -0
  276. data/ext/v8/upstream/3.1.8/src/math.js +264 -0
  277. data/ext/v8/upstream/3.1.8/src/memory.h +82 -0
  278. data/ext/v8/upstream/3.1.8/src/messages.cc +164 -0
  279. data/ext/v8/upstream/3.1.8/src/messages.h +114 -0
  280. data/ext/v8/upstream/3.1.8/src/messages.js +1071 -0
  281. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/assembler-mips-inl.h +0 -0
  282. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/assembler-mips.cc +0 -0
  283. data/ext/v8/upstream/3.1.8/src/mips/assembler-mips.h +667 -0
  284. data/ext/v8/upstream/3.1.8/src/mips/builtins-mips.cc +205 -0
  285. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/codegen-mips-inl.h +0 -0
  286. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/codegen-mips.cc +0 -0
  287. data/ext/v8/upstream/3.1.8/src/mips/codegen-mips.h +431 -0
  288. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/constants-mips.cc +0 -0
  289. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/constants-mips.h +0 -0
  290. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/cpu-mips.cc +0 -0
  291. data/ext/v8/upstream/3.1.8/src/mips/debug-mips.cc +127 -0
  292. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/disasm-mips.cc +0 -0
  293. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/fast-codegen-mips.cc +0 -0
  294. data/ext/v8/upstream/3.1.8/src/mips/frames-mips.cc +96 -0
  295. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/frames-mips.h +0 -0
  296. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/full-codegen-mips.cc +0 -0
  297. data/ext/v8/upstream/3.1.8/src/mips/ic-mips.cc +208 -0
  298. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/jump-target-mips.cc +0 -0
  299. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/macro-assembler-mips.cc +0 -0
  300. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/macro-assembler-mips.h +0 -0
  301. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/register-allocator-mips-inl.h +0 -0
  302. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/register-allocator-mips.cc +0 -0
  303. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/register-allocator-mips.h +0 -0
  304. data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.cc +1650 -0
  305. data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.h +311 -0
  306. data/ext/v8/upstream/3.1.8/src/mips/stub-cache-mips.cc +418 -0
  307. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/virtual-frame-mips.cc +0 -0
  308. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/virtual-frame-mips.h +0 -0
  309. data/ext/v8/upstream/3.1.8/src/mirror-debugger.js +2380 -0
  310. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mksnapshot.cc +0 -0
  311. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/natives.h +0 -0
  312. data/ext/v8/upstream/3.1.8/src/objects-debug.cc +722 -0
  313. data/ext/v8/upstream/3.1.8/src/objects-inl.h +3946 -0
  314. data/ext/v8/upstream/3.1.8/src/objects-printer.cc +801 -0
  315. data/ext/v8/upstream/3.1.8/src/objects-visiting.cc +142 -0
  316. data/ext/v8/upstream/3.1.8/src/objects-visiting.h +401 -0
  317. data/ext/v8/upstream/3.1.8/src/objects.cc +10044 -0
  318. data/ext/v8/upstream/3.1.8/src/objects.h +6571 -0
  319. data/ext/v8/upstream/3.1.8/src/parser.cc +5165 -0
  320. data/ext/v8/upstream/3.1.8/src/parser.h +802 -0
  321. data/ext/v8/upstream/3.1.8/src/platform-cygwin.cc +745 -0
  322. data/ext/v8/upstream/3.1.8/src/platform-freebsd.cc +702 -0
  323. data/ext/v8/upstream/3.1.8/src/platform-linux.cc +981 -0
  324. data/ext/v8/upstream/3.1.8/src/platform-macos.cc +732 -0
  325. data/ext/v8/upstream/3.1.8/src/platform-nullos.cc +498 -0
  326. data/ext/v8/upstream/3.1.8/src/platform-openbsd.cc +657 -0
  327. data/ext/v8/upstream/3.1.8/src/platform-posix.cc +399 -0
  328. data/ext/v8/upstream/3.1.8/src/platform-solaris.cc +714 -0
  329. data/ext/v8/upstream/3.1.8/src/platform-win32.cc +1974 -0
  330. data/ext/v8/upstream/3.1.8/src/platform.h +636 -0
  331. data/ext/v8/upstream/3.1.8/src/preparse-data.cc +183 -0
  332. data/ext/v8/upstream/3.1.8/src/preparse-data.h +249 -0
  333. data/ext/v8/upstream/3.1.8/src/preparser-api.cc +213 -0
  334. data/ext/v8/upstream/3.1.8/src/preparser.cc +1205 -0
  335. data/ext/v8/upstream/3.1.8/src/preparser.h +278 -0
  336. data/ext/v8/upstream/3.1.8/src/prettyprinter.cc +1539 -0
  337. data/ext/v8/upstream/3.1.8/src/prettyprinter.h +223 -0
  338. data/ext/v8/upstream/3.1.8/src/profile-generator-inl.h +128 -0
  339. data/ext/v8/upstream/3.1.8/src/profile-generator.cc +2899 -0
  340. data/ext/v8/upstream/3.1.8/src/profile-generator.h +1151 -0
  341. data/ext/v8/upstream/3.1.8/src/property.cc +96 -0
  342. data/ext/v8/upstream/3.1.8/src/property.h +337 -0
  343. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/regexp-macro-assembler-irregexp-inl.h +0 -0
  344. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-irregexp.cc +470 -0
  345. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-irregexp.h +142 -0
  346. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-tracer.cc +373 -0
  347. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-tracer.h +104 -0
  348. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.cc +257 -0
  349. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.h +231 -0
  350. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/regexp-stack.cc +0 -0
  351. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/regexp-stack.h +0 -0
  352. data/ext/v8/upstream/3.1.8/src/regexp.js +483 -0
  353. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/register-allocator-inl.h +0 -0
  354. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/register-allocator.cc +0 -0
  355. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/register-allocator.h +0 -0
  356. data/ext/v8/upstream/3.1.8/src/rewriter.cc +1023 -0
  357. data/ext/v8/upstream/3.1.8/src/rewriter.h +59 -0
  358. data/ext/v8/upstream/3.1.8/src/runtime-profiler.cc +443 -0
  359. data/ext/v8/upstream/3.1.8/src/runtime-profiler.h +77 -0
  360. data/ext/v8/upstream/3.1.8/src/runtime.cc +11592 -0
  361. data/ext/v8/upstream/3.1.8/src/runtime.h +582 -0
  362. data/ext/v8/upstream/3.1.8/src/runtime.js +643 -0
  363. data/ext/v8/upstream/3.1.8/src/safepoint-table.cc +253 -0
  364. data/ext/v8/upstream/3.1.8/src/safepoint-table.h +263 -0
  365. data/ext/v8/upstream/3.1.8/src/scanner-base.cc +971 -0
  366. data/ext/v8/upstream/3.1.8/src/scanner-base.h +653 -0
  367. data/ext/v8/upstream/3.1.8/src/scanner.cc +586 -0
  368. data/ext/v8/upstream/3.1.8/src/scanner.h +194 -0
  369. data/ext/v8/upstream/3.1.8/src/scopeinfo.cc +636 -0
  370. data/ext/v8/upstream/3.1.8/src/scopeinfo.h +238 -0
  371. data/ext/v8/upstream/3.1.8/src/scopes.cc +1063 -0
  372. data/ext/v8/upstream/3.1.8/src/scopes.h +494 -0
  373. data/ext/v8/upstream/3.1.8/src/serialize.cc +1535 -0
  374. data/ext/v8/upstream/3.1.8/src/serialize.h +584 -0
  375. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/shell.h +0 -0
  376. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/simulator.h +0 -0
  377. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/smart-pointer.h +0 -0
  378. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/snapshot-common.cc +0 -0
  379. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/snapshot-empty.cc +0 -0
  380. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/snapshot.h +0 -0
  381. data/ext/v8/upstream/3.1.8/src/spaces-inl.h +524 -0
  382. data/ext/v8/upstream/3.1.8/src/spaces.cc +3254 -0
  383. data/ext/v8/upstream/3.1.8/src/spaces.h +2362 -0
  384. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/splay-tree-inl.h +0 -0
  385. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/splay-tree.h +0 -0
  386. data/ext/v8/upstream/3.1.8/src/string-search.cc +40 -0
  387. data/ext/v8/upstream/3.1.8/src/string-search.h +567 -0
  388. data/ext/v8/upstream/3.1.8/src/string-stream.cc +584 -0
  389. data/ext/v8/upstream/3.1.8/src/string-stream.h +191 -0
  390. data/ext/v8/upstream/3.1.8/src/string.js +915 -0
  391. data/ext/v8/upstream/3.1.8/src/strtod.cc +440 -0
  392. data/ext/v8/upstream/3.1.8/src/strtod.h +40 -0
  393. data/ext/v8/upstream/3.1.8/src/stub-cache.cc +1878 -0
  394. data/ext/v8/upstream/3.1.8/src/stub-cache.h +849 -0
  395. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/third_party/valgrind/valgrind.h +0 -0
  396. data/ext/v8/upstream/3.1.8/src/token.cc +63 -0
  397. data/ext/v8/upstream/3.1.8/src/token.h +288 -0
  398. data/ext/v8/upstream/3.1.8/src/top.cc +1152 -0
  399. data/ext/v8/upstream/3.1.8/src/top.h +608 -0
  400. data/ext/v8/upstream/3.1.8/src/type-info.cc +406 -0
  401. data/ext/v8/upstream/3.1.8/src/type-info.h +283 -0
  402. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/unbound-queue-inl.h +0 -0
  403. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/unbound-queue.h +0 -0
  404. data/ext/v8/upstream/3.1.8/src/unicode-inl.h +238 -0
  405. data/ext/v8/upstream/3.1.8/src/unicode.cc +1624 -0
  406. data/ext/v8/upstream/3.1.8/src/unicode.h +280 -0
  407. data/ext/v8/upstream/3.1.8/src/uri.js +402 -0
  408. data/ext/v8/upstream/3.1.8/src/utils.cc +371 -0
  409. data/ext/v8/upstream/3.1.8/src/utils.h +793 -0
  410. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/v8-counters.cc +0 -0
  411. data/ext/v8/upstream/3.1.8/src/v8-counters.h +290 -0
  412. data/ext/v8/upstream/3.1.8/src/v8.cc +270 -0
  413. data/ext/v8/upstream/3.1.8/src/v8.h +127 -0
  414. data/ext/v8/upstream/3.1.8/src/v8checks.h +64 -0
  415. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/v8dll-main.cc +0 -0
  416. data/ext/v8/upstream/3.1.8/src/v8globals.h +480 -0
  417. data/ext/v8/upstream/3.1.8/src/v8natives.js +1252 -0
  418. data/ext/v8/upstream/3.1.8/src/v8preparserdll-main.cc +39 -0
  419. data/ext/v8/upstream/3.1.8/src/v8threads.cc +440 -0
  420. data/ext/v8/upstream/3.1.8/src/v8threads.h +157 -0
  421. data/ext/v8/upstream/3.1.8/src/v8utils.h +354 -0
  422. data/ext/v8/upstream/3.1.8/src/variables.cc +132 -0
  423. data/ext/v8/upstream/3.1.8/src/variables.h +212 -0
  424. data/ext/v8/upstream/3.1.8/src/version.cc +95 -0
  425. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/version.h +0 -0
  426. data/ext/v8/upstream/3.1.8/src/virtual-frame-heavy-inl.h +190 -0
  427. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-heavy.cc +0 -0
  428. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-inl.h +0 -0
  429. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-light-inl.h +0 -0
  430. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-light.cc +0 -0
  431. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame.cc +0 -0
  432. data/ext/v8/upstream/3.1.8/src/virtual-frame.h +59 -0
  433. data/ext/v8/upstream/3.1.8/src/vm-state-inl.h +134 -0
  434. data/ext/v8/upstream/3.1.8/src/vm-state.h +68 -0
  435. data/ext/v8/upstream/3.1.8/src/win32-headers.h +95 -0
  436. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64-inl.h +455 -0
  437. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.cc +3162 -0
  438. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.h +1584 -0
  439. data/ext/v8/upstream/3.1.8/src/x64/builtins-x64.cc +1492 -0
  440. data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.cc +5150 -0
  441. data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.h +519 -0
  442. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64-inl.h +46 -0
  443. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.cc +8835 -0
  444. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.h +750 -0
  445. data/ext/v8/upstream/3.1.8/src/x64/cpu-x64.cc +86 -0
  446. data/ext/v8/upstream/3.1.8/src/x64/debug-x64.cc +316 -0
  447. data/ext/v8/upstream/3.1.8/src/x64/deoptimizer-x64.cc +781 -0
  448. data/ext/v8/upstream/3.1.8/src/x64/disasm-x64.cc +1737 -0
  449. data/ext/v8/upstream/3.1.8/src/x64/frames-x64.cc +45 -0
  450. data/ext/v8/upstream/3.1.8/src/x64/frames-x64.h +130 -0
  451. data/ext/v8/upstream/3.1.8/src/x64/full-codegen-x64.cc +3984 -0
  452. data/ext/v8/upstream/3.1.8/src/x64/ic-x64.cc +1761 -0
  453. data/ext/v8/upstream/3.1.8/src/x64/jump-target-x64.cc +437 -0
  454. data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.cc +3639 -0
  455. data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.h +305 -0
  456. data/ext/v8/upstream/3.1.8/src/x64/lithium-gap-resolver-x64.cc +320 -0
  457. data/ext/v8/upstream/3.1.8/src/x64/lithium-gap-resolver-x64.h +74 -0
  458. data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.cc +2044 -0
  459. data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.h +2052 -0
  460. data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.cc +2660 -0
  461. data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.h +1852 -0
  462. data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.cc +1382 -0
  463. data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.h +278 -0
  464. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/register-allocator-x64-inl.h +0 -0
  465. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/register-allocator-x64.cc +0 -0
  466. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/register-allocator-x64.h +0 -0
  467. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/simulator-x64.cc +0 -0
  468. data/ext/v8/upstream/3.1.8/src/x64/simulator-x64.h +71 -0
  469. data/ext/v8/upstream/3.1.8/src/x64/stub-cache-x64.cc +3509 -0
  470. data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.cc +1292 -0
  471. data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.h +593 -0
  472. data/ext/v8/upstream/3.1.8/src/zone-inl.h +83 -0
  473. data/ext/v8/upstream/3.1.8/src/zone.cc +195 -0
  474. data/ext/v8/upstream/3.1.8/src/zone.h +233 -0
  475. data/ext/v8/upstream/3.1.8/tools/codemap.js +265 -0
  476. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/consarray.js +0 -0
  477. data/ext/v8/upstream/3.1.8/tools/csvparser.js +78 -0
  478. data/ext/v8/upstream/3.1.8/tools/disasm.py +92 -0
  479. data/ext/v8/upstream/3.1.8/tools/gc-nvp-trace-processor.py +328 -0
  480. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/generate-ten-powers.scm +0 -0
  481. data/ext/v8/upstream/3.1.8/tools/grokdump.py +840 -0
  482. data/ext/v8/upstream/3.1.8/tools/gyp/v8.gyp +869 -0
  483. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/js2c.py +0 -0
  484. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/jsmin.py +0 -0
  485. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/linux-tick-processor +0 -0
  486. data/ext/v8/upstream/3.1.8/tools/ll_prof.py +919 -0
  487. data/ext/v8/upstream/3.1.8/tools/logreader.js +185 -0
  488. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/mac-nm +0 -0
  489. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/mac-tick-processor +0 -0
  490. data/ext/v8/upstream/3.1.8/tools/oom_dump/README +31 -0
  491. data/ext/v8/upstream/3.1.8/tools/oom_dump/SConstruct +42 -0
  492. data/ext/v8/upstream/3.1.8/tools/oom_dump/oom_dump.cc +288 -0
  493. data/ext/v8/upstream/3.1.8/tools/presubmit.py +305 -0
  494. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/process-heap-prof.py +0 -0
  495. data/ext/v8/upstream/3.1.8/tools/profile.js +751 -0
  496. data/ext/v8/upstream/3.1.8/tools/profile_view.js +219 -0
  497. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/run-valgrind.py +0 -0
  498. data/ext/v8/upstream/3.1.8/tools/splaytree.js +316 -0
  499. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/stats-viewer.py +0 -0
  500. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/tickprocessor-driver.js +0 -0
  501. data/ext/v8/upstream/3.1.8/tools/tickprocessor.js +863 -0
  502. data/ext/v8/upstream/3.1.8/tools/utils.py +96 -0
  503. data/ext/v8/upstream/3.1.8/tools/visual_studio/README.txt +70 -0
  504. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/arm.vsprops +0 -0
  505. data/ext/v8/upstream/3.1.8/tools/visual_studio/common.vsprops +34 -0
  506. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8.vcproj +0 -0
  507. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8_arm.vcproj +0 -0
  508. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8_x64.vcproj +0 -0
  509. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8js2c.cmd +0 -0
  510. data/ext/v8/upstream/3.1.8/tools/visual_studio/debug.vsprops +17 -0
  511. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/ia32.vsprops +0 -0
  512. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/js2c.cmd +0 -0
  513. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/release.vsprops +0 -0
  514. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8.sln +0 -0
  515. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8.vcproj +0 -0
  516. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_arm.sln +0 -0
  517. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_arm.vcproj +0 -0
  518. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base.vcproj +1296 -0
  519. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_arm.vcproj +1234 -0
  520. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_x64.vcproj +1296 -0
  521. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_cctest.vcproj +0 -0
  522. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_cctest_arm.vcproj +0 -0
  523. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_cctest_x64.vcproj +0 -0
  524. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_mksnapshot.vcproj +0 -0
  525. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_mksnapshot_x64.vcproj +0 -0
  526. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_process_sample.vcproj +0 -0
  527. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_process_sample_arm.vcproj +0 -0
  528. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_process_sample_x64.vcproj +0 -0
  529. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_shell_sample.vcproj +147 -0
  530. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_shell_sample_arm.vcproj +147 -0
  531. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_shell_sample_x64.vcproj +163 -0
  532. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot.vcproj +0 -0
  533. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot_cc.vcproj +0 -0
  534. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot_cc_x64.vcproj +0 -0
  535. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot_x64.vcproj +0 -0
  536. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_x64.sln +0 -0
  537. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_x64.vcproj +0 -0
  538. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/x64.vsprops +0 -0
  539. data/ext/v8/upstream/3.1.8/tools/windows-tick-processor.bat +30 -0
  540. data/ext/v8/upstream/Makefile +2 -1
  541. data/ext/v8/v8_template.cpp +2 -2
  542. data/lib/v8/version.rb +1 -1
  543. data/spec/redjs/jsapi_spec.rb +2 -2
  544. metadata +552 -490
  545. data/ext/v8/upstream/2.3.3/.gitignore +0 -26
  546. data/ext/v8/upstream/2.3.3/AUTHORS +0 -31
  547. data/ext/v8/upstream/2.3.3/ChangeLog +0 -1916
  548. data/ext/v8/upstream/2.3.3/LICENSE +0 -55
  549. data/ext/v8/upstream/2.3.3/SConstruct +0 -1154
  550. data/ext/v8/upstream/2.3.3/include/v8-debug.h +0 -381
  551. data/ext/v8/upstream/2.3.3/include/v8-profiler.h +0 -353
  552. data/ext/v8/upstream/2.3.3/include/v8.h +0 -3616
  553. data/ext/v8/upstream/2.3.3/src/SConscript +0 -330
  554. data/ext/v8/upstream/2.3.3/src/accessors.cc +0 -661
  555. data/ext/v8/upstream/2.3.3/src/accessors.h +0 -114
  556. data/ext/v8/upstream/2.3.3/src/allocation.cc +0 -198
  557. data/ext/v8/upstream/2.3.3/src/allocation.h +0 -169
  558. data/ext/v8/upstream/2.3.3/src/api.cc +0 -4795
  559. data/ext/v8/upstream/2.3.3/src/api.h +0 -485
  560. data/ext/v8/upstream/2.3.3/src/apiutils.h +0 -69
  561. data/ext/v8/upstream/2.3.3/src/arguments.h +0 -96
  562. data/ext/v8/upstream/2.3.3/src/arm/assembler-arm-inl.h +0 -305
  563. data/ext/v8/upstream/2.3.3/src/arm/assembler-arm.cc +0 -2580
  564. data/ext/v8/upstream/2.3.3/src/arm/assembler-arm.h +0 -1275
  565. data/ext/v8/upstream/2.3.3/src/arm/builtins-arm.cc +0 -1320
  566. data/ext/v8/upstream/2.3.3/src/arm/codegen-arm-inl.h +0 -48
  567. data/ext/v8/upstream/2.3.3/src/arm/codegen-arm.cc +0 -11398
  568. data/ext/v8/upstream/2.3.3/src/arm/codegen-arm.h +0 -1102
  569. data/ext/v8/upstream/2.3.3/src/arm/constants-arm.cc +0 -154
  570. data/ext/v8/upstream/2.3.3/src/arm/constants-arm.h +0 -388
  571. data/ext/v8/upstream/2.3.3/src/arm/cpu-arm.cc +0 -142
  572. data/ext/v8/upstream/2.3.3/src/arm/debug-arm.cc +0 -309
  573. data/ext/v8/upstream/2.3.3/src/arm/disasm-arm.cc +0 -1459
  574. data/ext/v8/upstream/2.3.3/src/arm/fast-codegen-arm.cc +0 -241
  575. data/ext/v8/upstream/2.3.3/src/arm/frames-arm.cc +0 -123
  576. data/ext/v8/upstream/2.3.3/src/arm/frames-arm.h +0 -162
  577. data/ext/v8/upstream/2.3.3/src/arm/full-codegen-arm.cc +0 -3178
  578. data/ext/v8/upstream/2.3.3/src/arm/ic-arm.cc +0 -2258
  579. data/ext/v8/upstream/2.3.3/src/arm/jump-target-arm.cc +0 -164
  580. data/ext/v8/upstream/2.3.3/src/arm/macro-assembler-arm.cc +0 -1892
  581. data/ext/v8/upstream/2.3.3/src/arm/macro-assembler-arm.h +0 -727
  582. data/ext/v8/upstream/2.3.3/src/arm/regexp-macro-assembler-arm.cc +0 -1261
  583. data/ext/v8/upstream/2.3.3/src/arm/regexp-macro-assembler-arm.h +0 -266
  584. data/ext/v8/upstream/2.3.3/src/arm/simulator-arm.cc +0 -2822
  585. data/ext/v8/upstream/2.3.3/src/arm/simulator-arm.h +0 -361
  586. data/ext/v8/upstream/2.3.3/src/arm/stub-cache-arm.cc +0 -2387
  587. data/ext/v8/upstream/2.3.3/src/arm/virtual-frame-arm.cc +0 -834
  588. data/ext/v8/upstream/2.3.3/src/arm/virtual-frame-arm.h +0 -519
  589. data/ext/v8/upstream/2.3.3/src/array.js +0 -1127
  590. data/ext/v8/upstream/2.3.3/src/assembler.cc +0 -801
  591. data/ext/v8/upstream/2.3.3/src/assembler.h +0 -573
  592. data/ext/v8/upstream/2.3.3/src/ast-inl.h +0 -81
  593. data/ext/v8/upstream/2.3.3/src/ast.cc +0 -1152
  594. data/ext/v8/upstream/2.3.3/src/ast.h +0 -2106
  595. data/ext/v8/upstream/2.3.3/src/bootstrapper.cc +0 -1819
  596. data/ext/v8/upstream/2.3.3/src/builtins.cc +0 -1529
  597. data/ext/v8/upstream/2.3.3/src/builtins.h +0 -263
  598. data/ext/v8/upstream/2.3.3/src/bytecodes-irregexp.h +0 -104
  599. data/ext/v8/upstream/2.3.3/src/cached-powers.h +0 -119
  600. data/ext/v8/upstream/2.3.3/src/char-predicates-inl.h +0 -86
  601. data/ext/v8/upstream/2.3.3/src/checks.cc +0 -100
  602. data/ext/v8/upstream/2.3.3/src/checks.h +0 -310
  603. data/ext/v8/upstream/2.3.3/src/circular-queue.cc +0 -121
  604. data/ext/v8/upstream/2.3.3/src/code-stubs.cc +0 -177
  605. data/ext/v8/upstream/2.3.3/src/code-stubs.h +0 -177
  606. data/ext/v8/upstream/2.3.3/src/codegen-inl.h +0 -60
  607. data/ext/v8/upstream/2.3.3/src/codegen.cc +0 -516
  608. data/ext/v8/upstream/2.3.3/src/codegen.h +0 -897
  609. data/ext/v8/upstream/2.3.3/src/compilation-cache.cc +0 -562
  610. data/ext/v8/upstream/2.3.3/src/compilation-cache.h +0 -102
  611. data/ext/v8/upstream/2.3.3/src/compiler.cc +0 -654
  612. data/ext/v8/upstream/2.3.3/src/compiler.h +0 -299
  613. data/ext/v8/upstream/2.3.3/src/contexts.cc +0 -256
  614. data/ext/v8/upstream/2.3.3/src/contexts.h +0 -342
  615. data/ext/v8/upstream/2.3.3/src/conversions.cc +0 -1119
  616. data/ext/v8/upstream/2.3.3/src/conversions.h +0 -123
  617. data/ext/v8/upstream/2.3.3/src/counters.h +0 -239
  618. data/ext/v8/upstream/2.3.3/src/cpu-profiler-inl.h +0 -100
  619. data/ext/v8/upstream/2.3.3/src/cpu-profiler.cc +0 -538
  620. data/ext/v8/upstream/2.3.3/src/cpu-profiler.h +0 -285
  621. data/ext/v8/upstream/2.3.3/src/d8-debug.cc +0 -356
  622. data/ext/v8/upstream/2.3.3/src/d8-debug.h +0 -155
  623. data/ext/v8/upstream/2.3.3/src/d8.cc +0 -783
  624. data/ext/v8/upstream/2.3.3/src/d8.h +0 -227
  625. data/ext/v8/upstream/2.3.3/src/d8.js +0 -1683
  626. data/ext/v8/upstream/2.3.3/src/data-flow.cc +0 -758
  627. data/ext/v8/upstream/2.3.3/src/data-flow.h +0 -278
  628. data/ext/v8/upstream/2.3.3/src/date.js +0 -1059
  629. data/ext/v8/upstream/2.3.3/src/dateparser-inl.h +0 -123
  630. data/ext/v8/upstream/2.3.3/src/dateparser.h +0 -244
  631. data/ext/v8/upstream/2.3.3/src/debug-agent.cc +0 -427
  632. data/ext/v8/upstream/2.3.3/src/debug-agent.h +0 -129
  633. data/ext/v8/upstream/2.3.3/src/debug-debugger.js +0 -2227
  634. data/ext/v8/upstream/2.3.3/src/debug.cc +0 -3005
  635. data/ext/v8/upstream/2.3.3/src/debug.h +0 -993
  636. data/ext/v8/upstream/2.3.3/src/disassembler.cc +0 -312
  637. data/ext/v8/upstream/2.3.3/src/double.h +0 -169
  638. data/ext/v8/upstream/2.3.3/src/dtoa-config.c +0 -92
  639. data/ext/v8/upstream/2.3.3/src/dtoa.cc +0 -77
  640. data/ext/v8/upstream/2.3.3/src/dtoa.h +0 -81
  641. data/ext/v8/upstream/2.3.3/src/execution.cc +0 -809
  642. data/ext/v8/upstream/2.3.3/src/execution.h +0 -336
  643. data/ext/v8/upstream/2.3.3/src/factory.cc +0 -1003
  644. data/ext/v8/upstream/2.3.3/src/factory.h +0 -410
  645. data/ext/v8/upstream/2.3.3/src/fast-codegen.cc +0 -746
  646. data/ext/v8/upstream/2.3.3/src/fast-codegen.h +0 -161
  647. data/ext/v8/upstream/2.3.3/src/fast-dtoa.cc +0 -505
  648. data/ext/v8/upstream/2.3.3/src/fast-dtoa.h +0 -58
  649. data/ext/v8/upstream/2.3.3/src/flag-definitions.h +0 -455
  650. data/ext/v8/upstream/2.3.3/src/flags.cc +0 -551
  651. data/ext/v8/upstream/2.3.3/src/flags.h +0 -81
  652. data/ext/v8/upstream/2.3.3/src/flow-graph.cc +0 -763
  653. data/ext/v8/upstream/2.3.3/src/flow-graph.h +0 -180
  654. data/ext/v8/upstream/2.3.3/src/frame-element.h +0 -273
  655. data/ext/v8/upstream/2.3.3/src/frames-inl.h +0 -217
  656. data/ext/v8/upstream/2.3.3/src/frames.cc +0 -826
  657. data/ext/v8/upstream/2.3.3/src/frames.h +0 -682
  658. data/ext/v8/upstream/2.3.3/src/full-codegen.cc +0 -1443
  659. data/ext/v8/upstream/2.3.3/src/full-codegen.h +0 -548
  660. data/ext/v8/upstream/2.3.3/src/func-name-inferrer.cc +0 -76
  661. data/ext/v8/upstream/2.3.3/src/func-name-inferrer.h +0 -135
  662. data/ext/v8/upstream/2.3.3/src/global-handles.cc +0 -520
  663. data/ext/v8/upstream/2.3.3/src/global-handles.h +0 -180
  664. data/ext/v8/upstream/2.3.3/src/globals.h +0 -669
  665. data/ext/v8/upstream/2.3.3/src/handles-inl.h +0 -76
  666. data/ext/v8/upstream/2.3.3/src/handles.cc +0 -825
  667. data/ext/v8/upstream/2.3.3/src/handles.h +0 -393
  668. data/ext/v8/upstream/2.3.3/src/hashmap.cc +0 -226
  669. data/ext/v8/upstream/2.3.3/src/hashmap.h +0 -120
  670. data/ext/v8/upstream/2.3.3/src/heap-inl.h +0 -493
  671. data/ext/v8/upstream/2.3.3/src/heap-profiler.cc +0 -779
  672. data/ext/v8/upstream/2.3.3/src/heap-profiler.h +0 -323
  673. data/ext/v8/upstream/2.3.3/src/heap.cc +0 -4994
  674. data/ext/v8/upstream/2.3.3/src/heap.h +0 -1984
  675. data/ext/v8/upstream/2.3.3/src/ia32/assembler-ia32-inl.h +0 -360
  676. data/ext/v8/upstream/2.3.3/src/ia32/assembler-ia32.cc +0 -2600
  677. data/ext/v8/upstream/2.3.3/src/ia32/assembler-ia32.h +0 -969
  678. data/ext/v8/upstream/2.3.3/src/ia32/builtins-ia32.cc +0 -1261
  679. data/ext/v8/upstream/2.3.3/src/ia32/codegen-ia32.cc +0 -13968
  680. data/ext/v8/upstream/2.3.3/src/ia32/codegen-ia32.h +0 -1097
  681. data/ext/v8/upstream/2.3.3/src/ia32/cpu-ia32.cc +0 -83
  682. data/ext/v8/upstream/2.3.3/src/ia32/debug-ia32.cc +0 -309
  683. data/ext/v8/upstream/2.3.3/src/ia32/disasm-ia32.cc +0 -1471
  684. data/ext/v8/upstream/2.3.3/src/ia32/fast-codegen-ia32.cc +0 -954
  685. data/ext/v8/upstream/2.3.3/src/ia32/fast-codegen-ia32.h +0 -155
  686. data/ext/v8/upstream/2.3.3/src/ia32/frames-ia32.cc +0 -115
  687. data/ext/v8/upstream/2.3.3/src/ia32/frames-ia32.h +0 -135
  688. data/ext/v8/upstream/2.3.3/src/ia32/full-codegen-ia32.cc +0 -3281
  689. data/ext/v8/upstream/2.3.3/src/ia32/ic-ia32.cc +0 -1966
  690. data/ext/v8/upstream/2.3.3/src/ia32/macro-assembler-ia32.cc +0 -1610
  691. data/ext/v8/upstream/2.3.3/src/ia32/macro-assembler-ia32.h +0 -610
  692. data/ext/v8/upstream/2.3.3/src/ia32/regexp-macro-assembler-ia32.cc +0 -1247
  693. data/ext/v8/upstream/2.3.3/src/ia32/regexp-macro-assembler-ia32.h +0 -214
  694. data/ext/v8/upstream/2.3.3/src/ia32/simulator-ia32.h +0 -62
  695. data/ext/v8/upstream/2.3.3/src/ia32/stub-cache-ia32.cc +0 -2750
  696. data/ext/v8/upstream/2.3.3/src/ia32/virtual-frame-ia32.cc +0 -1334
  697. data/ext/v8/upstream/2.3.3/src/ia32/virtual-frame-ia32.h +0 -627
  698. data/ext/v8/upstream/2.3.3/src/ic-inl.h +0 -120
  699. data/ext/v8/upstream/2.3.3/src/ic.cc +0 -1827
  700. data/ext/v8/upstream/2.3.3/src/ic.h +0 -515
  701. data/ext/v8/upstream/2.3.3/src/interpreter-irregexp.cc +0 -646
  702. data/ext/v8/upstream/2.3.3/src/json.js +0 -268
  703. data/ext/v8/upstream/2.3.3/src/jsregexp.cc +0 -5283
  704. data/ext/v8/upstream/2.3.3/src/jsregexp.h +0 -1463
  705. data/ext/v8/upstream/2.3.3/src/jump-target-heavy.cc +0 -429
  706. data/ext/v8/upstream/2.3.3/src/jump-target-heavy.h +0 -244
  707. data/ext/v8/upstream/2.3.3/src/jump-target-light.cc +0 -110
  708. data/ext/v8/upstream/2.3.3/src/jump-target-light.h +0 -192
  709. data/ext/v8/upstream/2.3.3/src/list-inl.h +0 -166
  710. data/ext/v8/upstream/2.3.3/src/list.h +0 -159
  711. data/ext/v8/upstream/2.3.3/src/liveedit-debugger.js +0 -1057
  712. data/ext/v8/upstream/2.3.3/src/liveedit.cc +0 -1480
  713. data/ext/v8/upstream/2.3.3/src/liveedit.h +0 -170
  714. data/ext/v8/upstream/2.3.3/src/log-utils.cc +0 -497
  715. data/ext/v8/upstream/2.3.3/src/log-utils.h +0 -289
  716. data/ext/v8/upstream/2.3.3/src/log.cc +0 -1561
  717. data/ext/v8/upstream/2.3.3/src/log.h +0 -384
  718. data/ext/v8/upstream/2.3.3/src/macro-assembler.h +0 -86
  719. data/ext/v8/upstream/2.3.3/src/macros.py +0 -177
  720. data/ext/v8/upstream/2.3.3/src/mark-compact.cc +0 -2330
  721. data/ext/v8/upstream/2.3.3/src/mark-compact.h +0 -451
  722. data/ext/v8/upstream/2.3.3/src/math.js +0 -264
  723. data/ext/v8/upstream/2.3.3/src/memory.h +0 -74
  724. data/ext/v8/upstream/2.3.3/src/messages.cc +0 -183
  725. data/ext/v8/upstream/2.3.3/src/messages.h +0 -113
  726. data/ext/v8/upstream/2.3.3/src/messages.js +0 -982
  727. data/ext/v8/upstream/2.3.3/src/mips/assembler-mips.h +0 -668
  728. data/ext/v8/upstream/2.3.3/src/mips/builtins-mips.cc +0 -205
  729. data/ext/v8/upstream/2.3.3/src/mips/codegen-mips.h +0 -434
  730. data/ext/v8/upstream/2.3.3/src/mips/debug-mips.cc +0 -131
  731. data/ext/v8/upstream/2.3.3/src/mips/frames-mips.cc +0 -102
  732. data/ext/v8/upstream/2.3.3/src/mips/ic-mips.cc +0 -220
  733. data/ext/v8/upstream/2.3.3/src/mips/simulator-mips.cc +0 -1651
  734. data/ext/v8/upstream/2.3.3/src/mips/simulator-mips.h +0 -311
  735. data/ext/v8/upstream/2.3.3/src/mips/stub-cache-mips.cc +0 -403
  736. data/ext/v8/upstream/2.3.3/src/mirror-debugger.js +0 -2380
  737. data/ext/v8/upstream/2.3.3/src/objects-debug.cc +0 -1366
  738. data/ext/v8/upstream/2.3.3/src/objects-inl.h +0 -3333
  739. data/ext/v8/upstream/2.3.3/src/objects.cc +0 -8820
  740. data/ext/v8/upstream/2.3.3/src/objects.h +0 -5373
  741. data/ext/v8/upstream/2.3.3/src/oprofile-agent.cc +0 -108
  742. data/ext/v8/upstream/2.3.3/src/oprofile-agent.h +0 -77
  743. data/ext/v8/upstream/2.3.3/src/parser.cc +0 -5207
  744. data/ext/v8/upstream/2.3.3/src/parser.h +0 -197
  745. data/ext/v8/upstream/2.3.3/src/platform-freebsd.cc +0 -667
  746. data/ext/v8/upstream/2.3.3/src/platform-linux.cc +0 -862
  747. data/ext/v8/upstream/2.3.3/src/platform-macos.cc +0 -665
  748. data/ext/v8/upstream/2.3.3/src/platform-nullos.cc +0 -454
  749. data/ext/v8/upstream/2.3.3/src/platform-openbsd.cc +0 -622
  750. data/ext/v8/upstream/2.3.3/src/platform-posix.cc +0 -362
  751. data/ext/v8/upstream/2.3.3/src/platform-solaris.cc +0 -653
  752. data/ext/v8/upstream/2.3.3/src/platform-win32.cc +0 -1911
  753. data/ext/v8/upstream/2.3.3/src/platform.h +0 -577
  754. data/ext/v8/upstream/2.3.3/src/powers-ten.h +0 -2461
  755. data/ext/v8/upstream/2.3.3/src/prettyprinter.cc +0 -1531
  756. data/ext/v8/upstream/2.3.3/src/prettyprinter.h +0 -221
  757. data/ext/v8/upstream/2.3.3/src/profile-generator-inl.h +0 -148
  758. data/ext/v8/upstream/2.3.3/src/profile-generator.cc +0 -1830
  759. data/ext/v8/upstream/2.3.3/src/profile-generator.h +0 -853
  760. data/ext/v8/upstream/2.3.3/src/property.cc +0 -96
  761. data/ext/v8/upstream/2.3.3/src/property.h +0 -315
  762. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-irregexp.cc +0 -464
  763. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-irregexp.h +0 -141
  764. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-tracer.cc +0 -356
  765. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-tracer.h +0 -103
  766. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler.cc +0 -261
  767. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler.h +0 -228
  768. data/ext/v8/upstream/2.3.3/src/regexp.js +0 -549
  769. data/ext/v8/upstream/2.3.3/src/rewriter.cc +0 -1038
  770. data/ext/v8/upstream/2.3.3/src/rewriter.h +0 -54
  771. data/ext/v8/upstream/2.3.3/src/runtime.cc +0 -10599
  772. data/ext/v8/upstream/2.3.3/src/runtime.h +0 -459
  773. data/ext/v8/upstream/2.3.3/src/runtime.js +0 -629
  774. data/ext/v8/upstream/2.3.3/src/scanner.cc +0 -1346
  775. data/ext/v8/upstream/2.3.3/src/scanner.h +0 -503
  776. data/ext/v8/upstream/2.3.3/src/scopeinfo.cc +0 -637
  777. data/ext/v8/upstream/2.3.3/src/scopeinfo.h +0 -233
  778. data/ext/v8/upstream/2.3.3/src/scopes.cc +0 -962
  779. data/ext/v8/upstream/2.3.3/src/scopes.h +0 -400
  780. data/ext/v8/upstream/2.3.3/src/serialize.cc +0 -1461
  781. data/ext/v8/upstream/2.3.3/src/serialize.h +0 -581
  782. data/ext/v8/upstream/2.3.3/src/spaces-inl.h +0 -483
  783. data/ext/v8/upstream/2.3.3/src/spaces.cc +0 -2901
  784. data/ext/v8/upstream/2.3.3/src/spaces.h +0 -2197
  785. data/ext/v8/upstream/2.3.3/src/string-stream.cc +0 -584
  786. data/ext/v8/upstream/2.3.3/src/string-stream.h +0 -189
  787. data/ext/v8/upstream/2.3.3/src/string.js +0 -1006
  788. data/ext/v8/upstream/2.3.3/src/stub-cache.cc +0 -1379
  789. data/ext/v8/upstream/2.3.3/src/stub-cache.h +0 -756
  790. data/ext/v8/upstream/2.3.3/src/third_party/dtoa/COPYING +0 -15
  791. data/ext/v8/upstream/2.3.3/src/third_party/dtoa/dtoa.c +0 -3334
  792. data/ext/v8/upstream/2.3.3/src/token.cc +0 -56
  793. data/ext/v8/upstream/2.3.3/src/token.h +0 -270
  794. data/ext/v8/upstream/2.3.3/src/top.cc +0 -1067
  795. data/ext/v8/upstream/2.3.3/src/top.h +0 -463
  796. data/ext/v8/upstream/2.3.3/src/type-info.cc +0 -53
  797. data/ext/v8/upstream/2.3.3/src/type-info.h +0 -244
  798. data/ext/v8/upstream/2.3.3/src/unicode-inl.h +0 -238
  799. data/ext/v8/upstream/2.3.3/src/unicode.cc +0 -749
  800. data/ext/v8/upstream/2.3.3/src/unicode.h +0 -279
  801. data/ext/v8/upstream/2.3.3/src/uri.js +0 -415
  802. data/ext/v8/upstream/2.3.3/src/utils.cc +0 -285
  803. data/ext/v8/upstream/2.3.3/src/utils.h +0 -745
  804. data/ext/v8/upstream/2.3.3/src/v8-counters.h +0 -250
  805. data/ext/v8/upstream/2.3.3/src/v8.cc +0 -228
  806. data/ext/v8/upstream/2.3.3/src/v8.h +0 -121
  807. data/ext/v8/upstream/2.3.3/src/v8natives.js +0 -1188
  808. data/ext/v8/upstream/2.3.3/src/v8threads.cc +0 -461
  809. data/ext/v8/upstream/2.3.3/src/v8threads.h +0 -159
  810. data/ext/v8/upstream/2.3.3/src/variables.cc +0 -119
  811. data/ext/v8/upstream/2.3.3/src/variables.h +0 -205
  812. data/ext/v8/upstream/2.3.3/src/version.cc +0 -88
  813. data/ext/v8/upstream/2.3.3/src/virtual-frame-heavy-inl.h +0 -192
  814. data/ext/v8/upstream/2.3.3/src/virtual-frame.h +0 -46
  815. data/ext/v8/upstream/2.3.3/src/vm-state-inl.h +0 -137
  816. data/ext/v8/upstream/2.3.3/src/vm-state.cc +0 -39
  817. data/ext/v8/upstream/2.3.3/src/vm-state.h +0 -77
  818. data/ext/v8/upstream/2.3.3/src/x64/assembler-x64-inl.h +0 -400
  819. data/ext/v8/upstream/2.3.3/src/x64/assembler-x64.cc +0 -2963
  820. data/ext/v8/upstream/2.3.3/src/x64/assembler-x64.h +0 -1438
  821. data/ext/v8/upstream/2.3.3/src/x64/builtins-x64.cc +0 -1296
  822. data/ext/v8/upstream/2.3.3/src/x64/codegen-x64-inl.h +0 -46
  823. data/ext/v8/upstream/2.3.3/src/x64/codegen-x64.cc +0 -12491
  824. data/ext/v8/upstream/2.3.3/src/x64/codegen-x64.h +0 -1090
  825. data/ext/v8/upstream/2.3.3/src/x64/cpu-x64.cc +0 -83
  826. data/ext/v8/upstream/2.3.3/src/x64/debug-x64.cc +0 -267
  827. data/ext/v8/upstream/2.3.3/src/x64/disasm-x64.cc +0 -1696
  828. data/ext/v8/upstream/2.3.3/src/x64/fast-codegen-x64.cc +0 -250
  829. data/ext/v8/upstream/2.3.3/src/x64/frames-x64.cc +0 -113
  830. data/ext/v8/upstream/2.3.3/src/x64/frames-x64.h +0 -125
  831. data/ext/v8/upstream/2.3.3/src/x64/full-codegen-x64.cc +0 -3270
  832. data/ext/v8/upstream/2.3.3/src/x64/ic-x64.cc +0 -1907
  833. data/ext/v8/upstream/2.3.3/src/x64/jump-target-x64.cc +0 -437
  834. data/ext/v8/upstream/2.3.3/src/x64/macro-assembler-x64.cc +0 -2793
  835. data/ext/v8/upstream/2.3.3/src/x64/macro-assembler-x64.h +0 -916
  836. data/ext/v8/upstream/2.3.3/src/x64/regexp-macro-assembler-x64.cc +0 -1374
  837. data/ext/v8/upstream/2.3.3/src/x64/regexp-macro-assembler-x64.h +0 -277
  838. data/ext/v8/upstream/2.3.3/src/x64/simulator-x64.h +0 -63
  839. data/ext/v8/upstream/2.3.3/src/x64/stub-cache-x64.cc +0 -2560
  840. data/ext/v8/upstream/2.3.3/src/x64/virtual-frame-x64.cc +0 -1264
  841. data/ext/v8/upstream/2.3.3/src/x64/virtual-frame-x64.h +0 -590
  842. data/ext/v8/upstream/2.3.3/src/zone-inl.h +0 -82
  843. data/ext/v8/upstream/2.3.3/src/zone.cc +0 -194
  844. data/ext/v8/upstream/2.3.3/src/zone.h +0 -221
  845. data/ext/v8/upstream/2.3.3/tools/codemap.js +0 -270
  846. data/ext/v8/upstream/2.3.3/tools/csvparser.js +0 -83
  847. data/ext/v8/upstream/2.3.3/tools/gc-nvp-trace-processor.py +0 -317
  848. data/ext/v8/upstream/2.3.3/tools/gyp/v8.gyp +0 -749
  849. data/ext/v8/upstream/2.3.3/tools/linux-tick-processor.py +0 -78
  850. data/ext/v8/upstream/2.3.3/tools/logreader.js +0 -338
  851. data/ext/v8/upstream/2.3.3/tools/oprofile/annotate +0 -7
  852. data/ext/v8/upstream/2.3.3/tools/oprofile/common +0 -19
  853. data/ext/v8/upstream/2.3.3/tools/oprofile/dump +0 -7
  854. data/ext/v8/upstream/2.3.3/tools/oprofile/report +0 -7
  855. data/ext/v8/upstream/2.3.3/tools/oprofile/reset +0 -7
  856. data/ext/v8/upstream/2.3.3/tools/oprofile/run +0 -14
  857. data/ext/v8/upstream/2.3.3/tools/oprofile/shutdown +0 -7
  858. data/ext/v8/upstream/2.3.3/tools/oprofile/start +0 -7
  859. data/ext/v8/upstream/2.3.3/tools/presubmit.py +0 -299
  860. data/ext/v8/upstream/2.3.3/tools/profile.js +0 -691
  861. data/ext/v8/upstream/2.3.3/tools/profile_view.js +0 -224
  862. data/ext/v8/upstream/2.3.3/tools/splaytree.js +0 -322
  863. data/ext/v8/upstream/2.3.3/tools/splaytree.py +0 -226
  864. data/ext/v8/upstream/2.3.3/tools/tickprocessor.js +0 -862
  865. data/ext/v8/upstream/2.3.3/tools/tickprocessor.py +0 -571
  866. data/ext/v8/upstream/2.3.3/tools/utils.py +0 -88
  867. data/ext/v8/upstream/2.3.3/tools/visual_studio/README.txt +0 -71
  868. data/ext/v8/upstream/2.3.3/tools/visual_studio/common.vsprops +0 -34
  869. data/ext/v8/upstream/2.3.3/tools/visual_studio/debug.vsprops +0 -17
  870. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_base.vcproj +0 -1143
  871. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_base_arm.vcproj +0 -1115
  872. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_base_x64.vcproj +0 -1096
  873. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_shell_sample.vcproj +0 -145
  874. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_shell_sample_arm.vcproj +0 -145
  875. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_shell_sample_x64.vcproj +0 -161
  876. data/ext/v8/upstream/2.3.3/tools/windows-tick-processor.bat +0 -29
  877. data/ext/v8/upstream/2.3.3/tools/windows-tick-processor.py +0 -137
@@ -0,0 +1,45 @@
1
+ // Copyright 2006-2008 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #include "v8.h"
29
+
30
+ #if defined(V8_TARGET_ARCH_IA32)
31
+
32
+ #include "frames-inl.h"
33
+
34
+ namespace v8 {
35
+ namespace internal {
36
+
37
+
38
+ Address ExitFrame::ComputeStackPointer(Address fp) {
39
+ return Memory::Address_at(fp + ExitFrameConstants::kSPOffset);
40
+ }
41
+
42
+
43
+ } } // namespace v8::internal
44
+
45
+ #endif // V8_TARGET_ARCH_IA32
@@ -0,0 +1,140 @@
1
+ // Copyright 2006-2008 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #ifndef V8_IA32_FRAMES_IA32_H_
29
+ #define V8_IA32_FRAMES_IA32_H_
30
+
31
+ namespace v8 {
32
+ namespace internal {
33
+
34
+
35
+ // Register lists
36
+ // Note that the bit values must match those used in actual instruction encoding
37
+ static const int kNumRegs = 8;
38
+
39
+
40
+ // Caller-saved registers
41
+ static const RegList kJSCallerSaved =
42
+ 1 << 0 | // eax
43
+ 1 << 1 | // ecx
44
+ 1 << 2 | // edx
45
+ 1 << 3 | // ebx - used as a caller-saved register in JavaScript code
46
+ 1 << 7; // edi - callee function
47
+
48
+ static const int kNumJSCallerSaved = 5;
49
+
50
+ typedef Object* JSCallerSavedBuffer[kNumJSCallerSaved];
51
+
52
+
53
+ // Number of registers for which space is reserved in safepoints.
54
+ static const int kNumSafepointRegisters = 8;
55
+
56
+ // ----------------------------------------------------
57
+
58
+
59
+ class StackHandlerConstants : public AllStatic {
60
+ public:
61
+ static const int kNextOffset = 0 * kPointerSize;
62
+ static const int kFPOffset = 1 * kPointerSize;
63
+ static const int kStateOffset = 2 * kPointerSize;
64
+ static const int kPCOffset = 3 * kPointerSize;
65
+
66
+ static const int kSize = kPCOffset + kPointerSize;
67
+ };
68
+
69
+
70
+ class EntryFrameConstants : public AllStatic {
71
+ public:
72
+ static const int kCallerFPOffset = -6 * kPointerSize;
73
+
74
+ static const int kFunctionArgOffset = +3 * kPointerSize;
75
+ static const int kReceiverArgOffset = +4 * kPointerSize;
76
+ static const int kArgcOffset = +5 * kPointerSize;
77
+ static const int kArgvOffset = +6 * kPointerSize;
78
+ };
79
+
80
+
81
+ class ExitFrameConstants : public AllStatic {
82
+ public:
83
+ static const int kCodeOffset = -2 * kPointerSize;
84
+ static const int kSPOffset = -1 * kPointerSize;
85
+
86
+ static const int kCallerFPOffset = 0 * kPointerSize;
87
+ static const int kCallerPCOffset = +1 * kPointerSize;
88
+
89
+ // FP-relative displacement of the caller's SP. It points just
90
+ // below the saved PC.
91
+ static const int kCallerSPDisplacement = +2 * kPointerSize;
92
+ };
93
+
94
+
95
+ class StandardFrameConstants : public AllStatic {
96
+ public:
97
+ static const int kFixedFrameSize = 4;
98
+ static const int kExpressionsOffset = -3 * kPointerSize;
99
+ static const int kMarkerOffset = -2 * kPointerSize;
100
+ static const int kContextOffset = -1 * kPointerSize;
101
+ static const int kCallerFPOffset = 0 * kPointerSize;
102
+ static const int kCallerPCOffset = +1 * kPointerSize;
103
+ static const int kCallerSPOffset = +2 * kPointerSize;
104
+ };
105
+
106
+
107
+ class JavaScriptFrameConstants : public AllStatic {
108
+ public:
109
+ // FP-relative.
110
+ static const int kLocal0Offset = StandardFrameConstants::kExpressionsOffset;
111
+ static const int kSavedRegistersOffset = +2 * kPointerSize;
112
+ static const int kFunctionOffset = StandardFrameConstants::kMarkerOffset;
113
+
114
+ // Caller SP-relative.
115
+ static const int kParam0Offset = -2 * kPointerSize;
116
+ static const int kReceiverOffset = -1 * kPointerSize;
117
+ };
118
+
119
+
120
+ class ArgumentsAdaptorFrameConstants : public AllStatic {
121
+ public:
122
+ static const int kLengthOffset = StandardFrameConstants::kExpressionsOffset;
123
+ };
124
+
125
+
126
+ class InternalFrameConstants : public AllStatic {
127
+ public:
128
+ static const int kCodeOffset = StandardFrameConstants::kExpressionsOffset;
129
+ };
130
+
131
+
132
+ inline Object* JavaScriptFrame::function_slot_object() const {
133
+ const int offset = JavaScriptFrameConstants::kFunctionOffset;
134
+ return Memory::Object_at(fp() + offset);
135
+ }
136
+
137
+
138
+ } } // namespace v8::internal
139
+
140
+ #endif // V8_IA32_FRAMES_IA32_H_
@@ -0,0 +1,4278 @@
1
+ // Copyright 2011 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #include "v8.h"
29
+
30
+ #if defined(V8_TARGET_ARCH_IA32)
31
+
32
+ #include "code-stubs.h"
33
+ #include "codegen-inl.h"
34
+ #include "compiler.h"
35
+ #include "debug.h"
36
+ #include "full-codegen.h"
37
+ #include "parser.h"
38
+ #include "scopes.h"
39
+ #include "stub-cache.h"
40
+
41
+ namespace v8 {
42
+ namespace internal {
43
+
44
+
45
+ #define __ ACCESS_MASM(masm_)
46
+
47
+
48
+ class JumpPatchSite BASE_EMBEDDED {
49
+ public:
50
+ explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
51
+ #ifdef DEBUG
52
+ info_emitted_ = false;
53
+ #endif
54
+ }
55
+
56
+ ~JumpPatchSite() {
57
+ ASSERT(patch_site_.is_bound() == info_emitted_);
58
+ }
59
+
60
+ void EmitJumpIfNotSmi(Register reg, NearLabel* target) {
61
+ __ test(reg, Immediate(kSmiTagMask));
62
+ EmitJump(not_carry, target); // Always taken before patched.
63
+ }
64
+
65
+ void EmitJumpIfSmi(Register reg, NearLabel* target) {
66
+ __ test(reg, Immediate(kSmiTagMask));
67
+ EmitJump(carry, target); // Never taken before patched.
68
+ }
69
+
70
+ void EmitPatchInfo() {
71
+ int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
72
+ ASSERT(is_int8(delta_to_patch_site));
73
+ __ test(eax, Immediate(delta_to_patch_site));
74
+ #ifdef DEBUG
75
+ info_emitted_ = true;
76
+ #endif
77
+ }
78
+
79
+ bool is_bound() const { return patch_site_.is_bound(); }
80
+
81
+ private:
82
+ // jc will be patched with jz, jnc will become jnz.
83
+ void EmitJump(Condition cc, NearLabel* target) {
84
+ ASSERT(!patch_site_.is_bound() && !info_emitted_);
85
+ ASSERT(cc == carry || cc == not_carry);
86
+ __ bind(&patch_site_);
87
+ __ j(cc, target);
88
+ }
89
+
90
+ MacroAssembler* masm_;
91
+ Label patch_site_;
92
+ #ifdef DEBUG
93
+ bool info_emitted_;
94
+ #endif
95
+ };
96
+
97
+
98
+ // Generate code for a JS function. On entry to the function the receiver
99
+ // and arguments have been pushed on the stack left to right, with the
100
+ // return address on top of them. The actual argument count matches the
101
+ // formal parameter count expected by the function.
102
+ //
103
+ // The live registers are:
104
+ // o edi: the JS function object being called (ie, ourselves)
105
+ // o esi: our context
106
+ // o ebp: our caller's frame pointer
107
+ // o esp: stack pointer (pointing to return address)
108
+ //
109
+ // The function builds a JS frame. Please see JavaScriptFrameConstants in
110
+ // frames-ia32.h for its layout.
111
+ void FullCodeGenerator::Generate(CompilationInfo* info) {
112
+ ASSERT(info_ == NULL);
113
+ info_ = info;
114
+ SetFunctionPosition(function());
115
+ Comment cmnt(masm_, "[ function compiled by full code generator");
116
+
117
+ #ifdef DEBUG
118
+ if (strlen(FLAG_stop_at) > 0 &&
119
+ info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
120
+ __ int3();
121
+ }
122
+ #endif
123
+
124
+ __ push(ebp); // Caller's frame pointer.
125
+ __ mov(ebp, esp);
126
+ __ push(esi); // Callee's context.
127
+ __ push(edi); // Callee's JS Function.
128
+
129
+ { Comment cmnt(masm_, "[ Allocate locals");
130
+ int locals_count = scope()->num_stack_slots();
131
+ if (locals_count == 1) {
132
+ __ push(Immediate(Factory::undefined_value()));
133
+ } else if (locals_count > 1) {
134
+ __ mov(eax, Immediate(Factory::undefined_value()));
135
+ for (int i = 0; i < locals_count; i++) {
136
+ __ push(eax);
137
+ }
138
+ }
139
+ }
140
+
141
+ bool function_in_register = true;
142
+
143
+ // Possibly allocate a local context.
144
+ int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
145
+ if (heap_slots > 0) {
146
+ Comment cmnt(masm_, "[ Allocate local context");
147
+ // Argument to NewContext is the function, which is still in edi.
148
+ __ push(edi);
149
+ if (heap_slots <= FastNewContextStub::kMaximumSlots) {
150
+ FastNewContextStub stub(heap_slots);
151
+ __ CallStub(&stub);
152
+ } else {
153
+ __ CallRuntime(Runtime::kNewContext, 1);
154
+ }
155
+ function_in_register = false;
156
+ // Context is returned in both eax and esi. It replaces the context
157
+ // passed to us. It's saved in the stack and kept live in esi.
158
+ __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
159
+
160
+ // Copy parameters into context if necessary.
161
+ int num_parameters = scope()->num_parameters();
162
+ for (int i = 0; i < num_parameters; i++) {
163
+ Slot* slot = scope()->parameter(i)->AsSlot();
164
+ if (slot != NULL && slot->type() == Slot::CONTEXT) {
165
+ int parameter_offset = StandardFrameConstants::kCallerSPOffset +
166
+ (num_parameters - 1 - i) * kPointerSize;
167
+ // Load parameter from stack.
168
+ __ mov(eax, Operand(ebp, parameter_offset));
169
+ // Store it in the context.
170
+ int context_offset = Context::SlotOffset(slot->index());
171
+ __ mov(Operand(esi, context_offset), eax);
172
+ // Update the write barrier. This clobbers all involved
173
+ // registers, so we have use a third register to avoid
174
+ // clobbering esi.
175
+ __ mov(ecx, esi);
176
+ __ RecordWrite(ecx, context_offset, eax, ebx);
177
+ }
178
+ }
179
+ }
180
+
181
+ Variable* arguments = scope()->arguments();
182
+ if (arguments != NULL) {
183
+ // Function uses arguments object.
184
+ Comment cmnt(masm_, "[ Allocate arguments object");
185
+ if (function_in_register) {
186
+ __ push(edi);
187
+ } else {
188
+ __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
189
+ }
190
+ // Receiver is just before the parameters on the caller's stack.
191
+ int offset = scope()->num_parameters() * kPointerSize;
192
+ __ lea(edx,
193
+ Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
194
+ __ push(edx);
195
+ __ push(Immediate(Smi::FromInt(scope()->num_parameters())));
196
+ // Arguments to ArgumentsAccessStub:
197
+ // function, receiver address, parameter count.
198
+ // The stub will rewrite receiver and parameter count if the previous
199
+ // stack frame was an arguments adapter frame.
200
+ ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
201
+ __ CallStub(&stub);
202
+ __ mov(ecx, eax); // Duplicate result.
203
+ Move(arguments->AsSlot(), eax, ebx, edx);
204
+ Slot* dot_arguments_slot = scope()->arguments_shadow()->AsSlot();
205
+ Move(dot_arguments_slot, ecx, ebx, edx);
206
+ }
207
+
208
+ if (FLAG_trace) {
209
+ __ CallRuntime(Runtime::kTraceEnter, 0);
210
+ }
211
+
212
+ // Visit the declarations and body unless there is an illegal
213
+ // redeclaration.
214
+ if (scope()->HasIllegalRedeclaration()) {
215
+ Comment cmnt(masm_, "[ Declarations");
216
+ scope()->VisitIllegalRedeclaration(this);
217
+
218
+ } else {
219
+ { Comment cmnt(masm_, "[ Declarations");
220
+ // For named function expressions, declare the function name as a
221
+ // constant.
222
+ if (scope()->is_function_scope() && scope()->function() != NULL) {
223
+ EmitDeclaration(scope()->function(), Variable::CONST, NULL);
224
+ }
225
+ VisitDeclarations(scope()->declarations());
226
+ }
227
+
228
+ { Comment cmnt(masm_, "[ Stack check");
229
+ PrepareForBailout(info->function(), NO_REGISTERS);
230
+ NearLabel ok;
231
+ ExternalReference stack_limit =
232
+ ExternalReference::address_of_stack_limit();
233
+ __ cmp(esp, Operand::StaticVariable(stack_limit));
234
+ __ j(above_equal, &ok, taken);
235
+ StackCheckStub stub;
236
+ __ CallStub(&stub);
237
+ __ bind(&ok);
238
+ }
239
+
240
+ { Comment cmnt(masm_, "[ Body");
241
+ ASSERT(loop_depth() == 0);
242
+ VisitStatements(function()->body());
243
+ ASSERT(loop_depth() == 0);
244
+ }
245
+ }
246
+
247
+ // Always emit a 'return undefined' in case control fell off the end of
248
+ // the body.
249
+ { Comment cmnt(masm_, "[ return <undefined>;");
250
+ __ mov(eax, Factory::undefined_value());
251
+ EmitReturnSequence();
252
+ }
253
+ }
254
+
255
+
256
+ void FullCodeGenerator::ClearAccumulator() {
257
+ __ Set(eax, Immediate(Smi::FromInt(0)));
258
+ }
259
+
260
+
261
+ void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
262
+ Comment cmnt(masm_, "[ Stack check");
263
+ NearLabel ok;
264
+ ExternalReference stack_limit = ExternalReference::address_of_stack_limit();
265
+ __ cmp(esp, Operand::StaticVariable(stack_limit));
266
+ __ j(above_equal, &ok, taken);
267
+ StackCheckStub stub;
268
+ __ CallStub(&stub);
269
+ // Record a mapping of this PC offset to the OSR id. This is used to find
270
+ // the AST id from the unoptimized code in order to use it as a key into
271
+ // the deoptimization input data found in the optimized code.
272
+ RecordStackCheck(stmt->OsrEntryId());
273
+
274
+ // Loop stack checks can be patched to perform on-stack replacement. In
275
+ // order to decide whether or not to perform OSR we embed the loop depth
276
+ // in a test instruction after the call so we can extract it from the OSR
277
+ // builtin.
278
+ ASSERT(loop_depth() > 0);
279
+ __ test(eax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker)));
280
+
281
+ __ bind(&ok);
282
+ PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
283
+ // Record a mapping of the OSR id to this PC. This is used if the OSR
284
+ // entry becomes the target of a bailout. We don't expect it to be, but
285
+ // we want it to work if it is.
286
+ PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
287
+ }
288
+
289
+
290
+ void FullCodeGenerator::EmitReturnSequence() {
291
+ Comment cmnt(masm_, "[ Return sequence");
292
+ if (return_label_.is_bound()) {
293
+ __ jmp(&return_label_);
294
+ } else {
295
+ // Common return label
296
+ __ bind(&return_label_);
297
+ if (FLAG_trace) {
298
+ __ push(eax);
299
+ __ CallRuntime(Runtime::kTraceExit, 1);
300
+ }
301
+ #ifdef DEBUG
302
+ // Add a label for checking the size of the code used for returning.
303
+ Label check_exit_codesize;
304
+ masm_->bind(&check_exit_codesize);
305
+ #endif
306
+ SetSourcePosition(function()->end_position() - 1);
307
+ __ RecordJSReturn();
308
+ // Do not use the leave instruction here because it is too short to
309
+ // patch with the code required by the debugger.
310
+ __ mov(esp, ebp);
311
+ __ pop(ebp);
312
+
313
+ int arguments_bytes = (scope()->num_parameters() + 1) * kPointerSize;
314
+ __ Ret(arguments_bytes, ecx);
315
+ #ifdef ENABLE_DEBUGGER_SUPPORT
316
+ // Check that the size of the code used for returning is large enough
317
+ // for the debugger's requirements.
318
+ ASSERT(Assembler::kJSReturnSequenceLength <=
319
+ masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
320
+ #endif
321
+ }
322
+ }
323
+
324
+
325
+ void FullCodeGenerator::EffectContext::Plug(Slot* slot) const {
326
+ }
327
+
328
+
329
+ void FullCodeGenerator::AccumulatorValueContext::Plug(Slot* slot) const {
330
+ MemOperand slot_operand = codegen()->EmitSlotSearch(slot, result_register());
331
+ __ mov(result_register(), slot_operand);
332
+ }
333
+
334
+
335
+ void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const {
336
+ MemOperand slot_operand = codegen()->EmitSlotSearch(slot, result_register());
337
+ // Memory operands can be pushed directly.
338
+ __ push(slot_operand);
339
+ }
340
+
341
+
342
+ void FullCodeGenerator::TestContext::Plug(Slot* slot) const {
343
+ // For simplicity we always test the accumulator register.
344
+ codegen()->Move(result_register(), slot);
345
+ codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
346
+ codegen()->DoTest(true_label_, false_label_, fall_through_);
347
+ }
348
+
349
+
350
+ void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
351
+ UNREACHABLE(); // Not used on IA32.
352
+ }
353
+
354
+
355
+ void FullCodeGenerator::AccumulatorValueContext::Plug(
356
+ Heap::RootListIndex index) const {
357
+ UNREACHABLE(); // Not used on IA32.
358
+ }
359
+
360
+
361
+ void FullCodeGenerator::StackValueContext::Plug(
362
+ Heap::RootListIndex index) const {
363
+ UNREACHABLE(); // Not used on IA32.
364
+ }
365
+
366
+
367
+ void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
368
+ UNREACHABLE(); // Not used on IA32.
369
+ }
370
+
371
+
372
+ void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
373
+ }
374
+
375
+
376
+ void FullCodeGenerator::AccumulatorValueContext::Plug(
377
+ Handle<Object> lit) const {
378
+ __ Set(result_register(), Immediate(lit));
379
+ }
380
+
381
+
382
+ void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
383
+ // Immediates can be pushed directly.
384
+ __ push(Immediate(lit));
385
+ }
386
+
387
+
388
+ void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
389
+ codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
390
+ true,
391
+ true_label_,
392
+ false_label_);
393
+ ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
394
+ if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
395
+ if (false_label_ != fall_through_) __ jmp(false_label_);
396
+ } else if (lit->IsTrue() || lit->IsJSObject()) {
397
+ if (true_label_ != fall_through_) __ jmp(true_label_);
398
+ } else if (lit->IsString()) {
399
+ if (String::cast(*lit)->length() == 0) {
400
+ if (false_label_ != fall_through_) __ jmp(false_label_);
401
+ } else {
402
+ if (true_label_ != fall_through_) __ jmp(true_label_);
403
+ }
404
+ } else if (lit->IsSmi()) {
405
+ if (Smi::cast(*lit)->value() == 0) {
406
+ if (false_label_ != fall_through_) __ jmp(false_label_);
407
+ } else {
408
+ if (true_label_ != fall_through_) __ jmp(true_label_);
409
+ }
410
+ } else {
411
+ // For simplicity we always test the accumulator register.
412
+ __ mov(result_register(), lit);
413
+ codegen()->DoTest(true_label_, false_label_, fall_through_);
414
+ }
415
+ }
416
+
417
+
418
+ void FullCodeGenerator::EffectContext::DropAndPlug(int count,
419
+ Register reg) const {
420
+ ASSERT(count > 0);
421
+ __ Drop(count);
422
+ }
423
+
424
+
425
+ void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
426
+ int count,
427
+ Register reg) const {
428
+ ASSERT(count > 0);
429
+ __ Drop(count);
430
+ __ Move(result_register(), reg);
431
+ }
432
+
433
+
434
+ void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
435
+ Register reg) const {
436
+ ASSERT(count > 0);
437
+ if (count > 1) __ Drop(count - 1);
438
+ __ mov(Operand(esp, 0), reg);
439
+ }
440
+
441
+
442
+ void FullCodeGenerator::TestContext::DropAndPlug(int count,
443
+ Register reg) const {
444
+ ASSERT(count > 0);
445
+ // For simplicity we always test the accumulator register.
446
+ __ Drop(count);
447
+ __ Move(result_register(), reg);
448
+ codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
449
+ codegen()->DoTest(true_label_, false_label_, fall_through_);
450
+ }
451
+
452
+
453
+ void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
454
+ Label* materialize_false) const {
455
+ ASSERT(materialize_true == materialize_false);
456
+ __ bind(materialize_true);
457
+ }
458
+
459
+
460
+ void FullCodeGenerator::AccumulatorValueContext::Plug(
461
+ Label* materialize_true,
462
+ Label* materialize_false) const {
463
+ NearLabel done;
464
+ __ bind(materialize_true);
465
+ __ mov(result_register(), Factory::true_value());
466
+ __ jmp(&done);
467
+ __ bind(materialize_false);
468
+ __ mov(result_register(), Factory::false_value());
469
+ __ bind(&done);
470
+ }
471
+
472
+
473
+ void FullCodeGenerator::StackValueContext::Plug(
474
+ Label* materialize_true,
475
+ Label* materialize_false) const {
476
+ NearLabel done;
477
+ __ bind(materialize_true);
478
+ __ push(Immediate(Factory::true_value()));
479
+ __ jmp(&done);
480
+ __ bind(materialize_false);
481
+ __ push(Immediate(Factory::false_value()));
482
+ __ bind(&done);
483
+ }
484
+
485
+
486
+ void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
487
+ Label* materialize_false) const {
488
+ ASSERT(materialize_true == true_label_);
489
+ ASSERT(materialize_false == false_label_);
490
+ }
491
+
492
+
493
+ void FullCodeGenerator::EffectContext::Plug(bool flag) const {
494
+ }
495
+
496
+
497
+ void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
498
+ Handle<Object> value =
499
+ flag ? Factory::true_value() : Factory::false_value();
500
+ __ mov(result_register(), value);
501
+ }
502
+
503
+
504
+ void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
505
+ Handle<Object> value =
506
+ flag ? Factory::true_value() : Factory::false_value();
507
+ __ push(Immediate(value));
508
+ }
509
+
510
+
511
+ void FullCodeGenerator::TestContext::Plug(bool flag) const {
512
+ codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
513
+ true,
514
+ true_label_,
515
+ false_label_);
516
+ if (flag) {
517
+ if (true_label_ != fall_through_) __ jmp(true_label_);
518
+ } else {
519
+ if (false_label_ != fall_through_) __ jmp(false_label_);
520
+ }
521
+ }
522
+
523
+
524
+ void FullCodeGenerator::DoTest(Label* if_true,
525
+ Label* if_false,
526
+ Label* fall_through) {
527
+ // Emit the inlined tests assumed by the stub.
528
+ __ cmp(result_register(), Factory::undefined_value());
529
+ __ j(equal, if_false);
530
+ __ cmp(result_register(), Factory::true_value());
531
+ __ j(equal, if_true);
532
+ __ cmp(result_register(), Factory::false_value());
533
+ __ j(equal, if_false);
534
+ STATIC_ASSERT(kSmiTag == 0);
535
+ __ test(result_register(), Operand(result_register()));
536
+ __ j(zero, if_false);
537
+ __ test(result_register(), Immediate(kSmiTagMask));
538
+ __ j(zero, if_true);
539
+
540
+ // Call the ToBoolean stub for all other cases.
541
+ ToBooleanStub stub;
542
+ __ push(result_register());
543
+ __ CallStub(&stub);
544
+ __ test(eax, Operand(eax));
545
+
546
+ // The stub returns nonzero for true.
547
+ Split(not_zero, if_true, if_false, fall_through);
548
+ }
549
+
550
+
551
+ void FullCodeGenerator::Split(Condition cc,
552
+ Label* if_true,
553
+ Label* if_false,
554
+ Label* fall_through) {
555
+ if (if_false == fall_through) {
556
+ __ j(cc, if_true);
557
+ } else if (if_true == fall_through) {
558
+ __ j(NegateCondition(cc), if_false);
559
+ } else {
560
+ __ j(cc, if_true);
561
+ __ jmp(if_false);
562
+ }
563
+ }
564
+
565
+
566
+ MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) {
567
+ switch (slot->type()) {
568
+ case Slot::PARAMETER:
569
+ case Slot::LOCAL:
570
+ return Operand(ebp, SlotOffset(slot));
571
+ case Slot::CONTEXT: {
572
+ int context_chain_length =
573
+ scope()->ContextChainLength(slot->var()->scope());
574
+ __ LoadContext(scratch, context_chain_length);
575
+ return ContextOperand(scratch, slot->index());
576
+ }
577
+ case Slot::LOOKUP:
578
+ UNREACHABLE();
579
+ }
580
+ UNREACHABLE();
581
+ return Operand(eax, 0);
582
+ }
583
+
584
+
585
+ void FullCodeGenerator::Move(Register destination, Slot* source) {
586
+ MemOperand location = EmitSlotSearch(source, destination);
587
+ __ mov(destination, location);
588
+ }
589
+
590
+
591
+ void FullCodeGenerator::Move(Slot* dst,
592
+ Register src,
593
+ Register scratch1,
594
+ Register scratch2) {
595
+ ASSERT(dst->type() != Slot::LOOKUP); // Not yet implemented.
596
+ ASSERT(!scratch1.is(src) && !scratch2.is(src));
597
+ MemOperand location = EmitSlotSearch(dst, scratch1);
598
+ __ mov(location, src);
599
+ // Emit the write barrier code if the location is in the heap.
600
+ if (dst->type() == Slot::CONTEXT) {
601
+ int offset = Context::SlotOffset(dst->index());
602
+ ASSERT(!scratch1.is(esi) && !src.is(esi) && !scratch2.is(esi));
603
+ __ RecordWrite(scratch1, offset, src, scratch2);
604
+ }
605
+ }
606
+
607
+
608
+ void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state,
609
+ bool should_normalize,
610
+ Label* if_true,
611
+ Label* if_false) {
612
+ // Only prepare for bailouts before splits if we're in a test
613
+ // context. Otherwise, we let the Visit function deal with the
614
+ // preparation to avoid preparing with the same AST id twice.
615
+ if (!context()->IsTest() || !info_->IsOptimizable()) return;
616
+
617
+ NearLabel skip;
618
+ if (should_normalize) __ jmp(&skip);
619
+
620
+ ForwardBailoutStack* current = forward_bailout_stack_;
621
+ while (current != NULL) {
622
+ PrepareForBailout(current->expr(), state);
623
+ current = current->parent();
624
+ }
625
+
626
+ if (should_normalize) {
627
+ __ cmp(eax, Factory::true_value());
628
+ Split(equal, if_true, if_false, NULL);
629
+ __ bind(&skip);
630
+ }
631
+ }
632
+
633
+
634
+ void FullCodeGenerator::EmitDeclaration(Variable* variable,
635
+ Variable::Mode mode,
636
+ FunctionLiteral* function) {
637
+ Comment cmnt(masm_, "[ Declaration");
638
+ ASSERT(variable != NULL); // Must have been resolved.
639
+ Slot* slot = variable->AsSlot();
640
+ Property* prop = variable->AsProperty();
641
+
642
+ if (slot != NULL) {
643
+ switch (slot->type()) {
644
+ case Slot::PARAMETER:
645
+ case Slot::LOCAL:
646
+ if (mode == Variable::CONST) {
647
+ __ mov(Operand(ebp, SlotOffset(slot)),
648
+ Immediate(Factory::the_hole_value()));
649
+ } else if (function != NULL) {
650
+ VisitForAccumulatorValue(function);
651
+ __ mov(Operand(ebp, SlotOffset(slot)), result_register());
652
+ }
653
+ break;
654
+
655
+ case Slot::CONTEXT:
656
+ // We bypass the general EmitSlotSearch because we know more about
657
+ // this specific context.
658
+
659
+ // The variable in the decl always resides in the current function
660
+ // context.
661
+ ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
662
+ if (FLAG_debug_code) {
663
+ // Check that we're not inside a 'with'.
664
+ __ mov(ebx, ContextOperand(esi, Context::FCONTEXT_INDEX));
665
+ __ cmp(ebx, Operand(esi));
666
+ __ Check(equal, "Unexpected declaration in current context.");
667
+ }
668
+ if (mode == Variable::CONST) {
669
+ __ mov(ContextOperand(esi, slot->index()),
670
+ Immediate(Factory::the_hole_value()));
671
+ // No write barrier since the hole value is in old space.
672
+ } else if (function != NULL) {
673
+ VisitForAccumulatorValue(function);
674
+ __ mov(ContextOperand(esi, slot->index()), result_register());
675
+ int offset = Context::SlotOffset(slot->index());
676
+ __ mov(ebx, esi);
677
+ __ RecordWrite(ebx, offset, result_register(), ecx);
678
+ }
679
+ break;
680
+
681
+ case Slot::LOOKUP: {
682
+ __ push(esi);
683
+ __ push(Immediate(variable->name()));
684
+ // Declaration nodes are always introduced in one of two modes.
685
+ ASSERT(mode == Variable::VAR || mode == Variable::CONST);
686
+ PropertyAttributes attr = (mode == Variable::VAR) ? NONE : READ_ONLY;
687
+ __ push(Immediate(Smi::FromInt(attr)));
688
+ // Push initial value, if any.
689
+ // Note: For variables we must not push an initial value (such as
690
+ // 'undefined') because we may have a (legal) redeclaration and we
691
+ // must not destroy the current value.
692
+ if (mode == Variable::CONST) {
693
+ __ push(Immediate(Factory::the_hole_value()));
694
+ } else if (function != NULL) {
695
+ VisitForStackValue(function);
696
+ } else {
697
+ __ push(Immediate(Smi::FromInt(0))); // No initial value!
698
+ }
699
+ __ CallRuntime(Runtime::kDeclareContextSlot, 4);
700
+ break;
701
+ }
702
+ }
703
+
704
+ } else if (prop != NULL) {
705
+ if (function != NULL || mode == Variable::CONST) {
706
+ // We are declaring a function or constant that rewrites to a
707
+ // property. Use (keyed) IC to set the initial value. We cannot
708
+ // visit the rewrite because it's shared and we risk recording
709
+ // duplicate AST IDs for bailouts from optimized code.
710
+ ASSERT(prop->obj()->AsVariableProxy() != NULL);
711
+ { AccumulatorValueContext for_object(this);
712
+ EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
713
+ }
714
+
715
+ if (function != NULL) {
716
+ __ push(eax);
717
+ VisitForAccumulatorValue(function);
718
+ __ pop(edx);
719
+ } else {
720
+ __ mov(edx, eax);
721
+ __ mov(eax, Factory::the_hole_value());
722
+ }
723
+ ASSERT(prop->key()->AsLiteral() != NULL &&
724
+ prop->key()->AsLiteral()->handle()->IsSmi());
725
+ __ Set(ecx, Immediate(prop->key()->AsLiteral()->handle()));
726
+
727
+ Handle<Code> ic(Builtins::builtin(is_strict()
728
+ ? Builtins::KeyedStoreIC_Initialize_Strict
729
+ : Builtins::KeyedStoreIC_Initialize));
730
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
731
+ }
732
+ }
733
+ }
734
+
735
+
736
+ void FullCodeGenerator::VisitDeclaration(Declaration* decl) {
737
+ EmitDeclaration(decl->proxy()->var(), decl->mode(), decl->fun());
738
+ }
739
+
740
+
741
+ void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
742
+ // Call the runtime to declare the globals.
743
+ __ push(esi); // The context is the first argument.
744
+ __ push(Immediate(pairs));
745
+ __ push(Immediate(Smi::FromInt(is_eval() ? 1 : 0)));
746
+ __ push(Immediate(Smi::FromInt(strict_mode_flag())));
747
+ __ CallRuntime(Runtime::kDeclareGlobals, 4);
748
+ // Return value is ignored.
749
+ }
750
+
751
+
752
+ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
753
+ Comment cmnt(masm_, "[ SwitchStatement");
754
+ Breakable nested_statement(this, stmt);
755
+ SetStatementPosition(stmt);
756
+
757
+ // Keep the switch value on the stack until a case matches.
758
+ VisitForStackValue(stmt->tag());
759
+ PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
760
+
761
+ ZoneList<CaseClause*>* clauses = stmt->cases();
762
+ CaseClause* default_clause = NULL; // Can occur anywhere in the list.
763
+
764
+ Label next_test; // Recycled for each test.
765
+ // Compile all the tests with branches to their bodies.
766
+ for (int i = 0; i < clauses->length(); i++) {
767
+ CaseClause* clause = clauses->at(i);
768
+ clause->body_target()->entry_label()->Unuse();
769
+
770
+ // The default is not a test, but remember it as final fall through.
771
+ if (clause->is_default()) {
772
+ default_clause = clause;
773
+ continue;
774
+ }
775
+
776
+ Comment cmnt(masm_, "[ Case comparison");
777
+ __ bind(&next_test);
778
+ next_test.Unuse();
779
+
780
+ // Compile the label expression.
781
+ VisitForAccumulatorValue(clause->label());
782
+
783
+ // Perform the comparison as if via '==='.
784
+ __ mov(edx, Operand(esp, 0)); // Switch value.
785
+ bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
786
+ JumpPatchSite patch_site(masm_);
787
+ if (inline_smi_code) {
788
+ NearLabel slow_case;
789
+ __ mov(ecx, edx);
790
+ __ or_(ecx, Operand(eax));
791
+ patch_site.EmitJumpIfNotSmi(ecx, &slow_case);
792
+
793
+ __ cmp(edx, Operand(eax));
794
+ __ j(not_equal, &next_test);
795
+ __ Drop(1); // Switch value is no longer needed.
796
+ __ jmp(clause->body_target()->entry_label());
797
+ __ bind(&slow_case);
798
+ }
799
+
800
+ // Record position before stub call for type feedback.
801
+ SetSourcePosition(clause->position());
802
+ Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
803
+ EmitCallIC(ic, &patch_site);
804
+ __ test(eax, Operand(eax));
805
+ __ j(not_equal, &next_test);
806
+ __ Drop(1); // Switch value is no longer needed.
807
+ __ jmp(clause->body_target()->entry_label());
808
+ }
809
+
810
+ // Discard the test value and jump to the default if present, otherwise to
811
+ // the end of the statement.
812
+ __ bind(&next_test);
813
+ __ Drop(1); // Switch value is no longer needed.
814
+ if (default_clause == NULL) {
815
+ __ jmp(nested_statement.break_target());
816
+ } else {
817
+ __ jmp(default_clause->body_target()->entry_label());
818
+ }
819
+
820
+ // Compile all the case bodies.
821
+ for (int i = 0; i < clauses->length(); i++) {
822
+ Comment cmnt(masm_, "[ Case body");
823
+ CaseClause* clause = clauses->at(i);
824
+ __ bind(clause->body_target()->entry_label());
825
+ VisitStatements(clause->statements());
826
+ }
827
+
828
+ __ bind(nested_statement.break_target());
829
+ PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
830
+ }
831
+
832
+
833
+ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
834
+ Comment cmnt(masm_, "[ ForInStatement");
835
+ SetStatementPosition(stmt);
836
+
837
+ Label loop, exit;
838
+ ForIn loop_statement(this, stmt);
839
+ increment_loop_depth();
840
+
841
+ // Get the object to enumerate over. Both SpiderMonkey and JSC
842
+ // ignore null and undefined in contrast to the specification; see
843
+ // ECMA-262 section 12.6.4.
844
+ VisitForAccumulatorValue(stmt->enumerable());
845
+ __ cmp(eax, Factory::undefined_value());
846
+ __ j(equal, &exit);
847
+ __ cmp(eax, Factory::null_value());
848
+ __ j(equal, &exit);
849
+
850
+ // Convert the object to a JS object.
851
+ NearLabel convert, done_convert;
852
+ __ test(eax, Immediate(kSmiTagMask));
853
+ __ j(zero, &convert);
854
+ __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
855
+ __ j(above_equal, &done_convert);
856
+ __ bind(&convert);
857
+ __ push(eax);
858
+ __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
859
+ __ bind(&done_convert);
860
+ __ push(eax);
861
+
862
+ // Check cache validity in generated code. This is a fast case for
863
+ // the JSObject::IsSimpleEnum cache validity checks. If we cannot
864
+ // guarantee cache validity, call the runtime system to check cache
865
+ // validity or get the property names in a fixed array.
866
+ Label next, call_runtime;
867
+ __ mov(ecx, eax);
868
+ __ bind(&next);
869
+
870
+ // Check that there are no elements. Register ecx contains the
871
+ // current JS object we've reached through the prototype chain.
872
+ __ cmp(FieldOperand(ecx, JSObject::kElementsOffset),
873
+ Factory::empty_fixed_array());
874
+ __ j(not_equal, &call_runtime);
875
+
876
+ // Check that instance descriptors are not empty so that we can
877
+ // check for an enum cache. Leave the map in ebx for the subsequent
878
+ // prototype load.
879
+ __ mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
880
+ __ mov(edx, FieldOperand(ebx, Map::kInstanceDescriptorsOffset));
881
+ __ cmp(edx, Factory::empty_descriptor_array());
882
+ __ j(equal, &call_runtime);
883
+
884
+ // Check that there is an enum cache in the non-empty instance
885
+ // descriptors (edx). This is the case if the next enumeration
886
+ // index field does not contain a smi.
887
+ __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumerationIndexOffset));
888
+ __ test(edx, Immediate(kSmiTagMask));
889
+ __ j(zero, &call_runtime);
890
+
891
+ // For all objects but the receiver, check that the cache is empty.
892
+ NearLabel check_prototype;
893
+ __ cmp(ecx, Operand(eax));
894
+ __ j(equal, &check_prototype);
895
+ __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset));
896
+ __ cmp(edx, Factory::empty_fixed_array());
897
+ __ j(not_equal, &call_runtime);
898
+
899
+ // Load the prototype from the map and loop if non-null.
900
+ __ bind(&check_prototype);
901
+ __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
902
+ __ cmp(ecx, Factory::null_value());
903
+ __ j(not_equal, &next);
904
+
905
+ // The enum cache is valid. Load the map of the object being
906
+ // iterated over and use the cache for the iteration.
907
+ NearLabel use_cache;
908
+ __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
909
+ __ jmp(&use_cache);
910
+
911
+ // Get the set of properties to enumerate.
912
+ __ bind(&call_runtime);
913
+ __ push(eax); // Duplicate the enumerable object on the stack.
914
+ __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
915
+
916
+ // If we got a map from the runtime call, we can do a fast
917
+ // modification check. Otherwise, we got a fixed array, and we have
918
+ // to do a slow check.
919
+ NearLabel fixed_array;
920
+ __ cmp(FieldOperand(eax, HeapObject::kMapOffset), Factory::meta_map());
921
+ __ j(not_equal, &fixed_array);
922
+
923
+ // We got a map in register eax. Get the enumeration cache from it.
924
+ __ bind(&use_cache);
925
+ __ mov(ecx, FieldOperand(eax, Map::kInstanceDescriptorsOffset));
926
+ __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumerationIndexOffset));
927
+ __ mov(edx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
928
+
929
+ // Setup the four remaining stack slots.
930
+ __ push(eax); // Map.
931
+ __ push(edx); // Enumeration cache.
932
+ __ mov(eax, FieldOperand(edx, FixedArray::kLengthOffset));
933
+ __ push(eax); // Enumeration cache length (as smi).
934
+ __ push(Immediate(Smi::FromInt(0))); // Initial index.
935
+ __ jmp(&loop);
936
+
937
+ // We got a fixed array in register eax. Iterate through that.
938
+ __ bind(&fixed_array);
939
+ __ push(Immediate(Smi::FromInt(0))); // Map (0) - force slow check.
940
+ __ push(eax);
941
+ __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
942
+ __ push(eax); // Fixed array length (as smi).
943
+ __ push(Immediate(Smi::FromInt(0))); // Initial index.
944
+
945
+ // Generate code for doing the condition check.
946
+ __ bind(&loop);
947
+ __ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index.
948
+ __ cmp(eax, Operand(esp, 1 * kPointerSize)); // Compare to the array length.
949
+ __ j(above_equal, loop_statement.break_target());
950
+
951
+ // Get the current entry of the array into register ebx.
952
+ __ mov(ebx, Operand(esp, 2 * kPointerSize));
953
+ __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
954
+
955
+ // Get the expected map from the stack or a zero map in the
956
+ // permanent slow case into register edx.
957
+ __ mov(edx, Operand(esp, 3 * kPointerSize));
958
+
959
+ // Check if the expected map still matches that of the enumerable.
960
+ // If not, we have to filter the key.
961
+ NearLabel update_each;
962
+ __ mov(ecx, Operand(esp, 4 * kPointerSize));
963
+ __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset));
964
+ __ j(equal, &update_each);
965
+
966
+ // Convert the entry to a string or null if it isn't a property
967
+ // anymore. If the property has been removed while iterating, we
968
+ // just skip it.
969
+ __ push(ecx); // Enumerable.
970
+ __ push(ebx); // Current entry.
971
+ __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
972
+ __ test(eax, Operand(eax));
973
+ __ j(equal, loop_statement.continue_target());
974
+ __ mov(ebx, Operand(eax));
975
+
976
+ // Update the 'each' property or variable from the possibly filtered
977
+ // entry in register ebx.
978
+ __ bind(&update_each);
979
+ __ mov(result_register(), ebx);
980
+ // Perform the assignment as if via '='.
981
+ { EffectContext context(this);
982
+ EmitAssignment(stmt->each(), stmt->AssignmentId());
983
+ }
984
+
985
+ // Generate code for the body of the loop.
986
+ Visit(stmt->body());
987
+
988
+ // Generate code for going to the next element by incrementing the
989
+ // index (smi) stored on top of the stack.
990
+ __ bind(loop_statement.continue_target());
991
+ __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
992
+
993
+ EmitStackCheck(stmt);
994
+ __ jmp(&loop);
995
+
996
+ // Remove the pointers stored on the stack.
997
+ __ bind(loop_statement.break_target());
998
+ __ add(Operand(esp), Immediate(5 * kPointerSize));
999
+
1000
+ // Exit and decrement the loop depth.
1001
+ __ bind(&exit);
1002
+ decrement_loop_depth();
1003
+ }
1004
+
1005
+
1006
+ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1007
+ bool pretenure) {
1008
+ // Use the fast case closure allocation code that allocates in new
1009
+ // space for nested functions that don't need literals cloning. If
1010
+ // we're running with the --always-opt or the --prepare-always-opt
1011
+ // flag, we need to use the runtime function so that the new function
1012
+ // we are creating here gets a chance to have its code optimized and
1013
+ // doesn't just get a copy of the existing unoptimized code.
1014
+ if (!FLAG_always_opt &&
1015
+ !FLAG_prepare_always_opt &&
1016
+ scope()->is_function_scope() &&
1017
+ info->num_literals() == 0 &&
1018
+ !pretenure) {
1019
+ FastNewClosureStub stub;
1020
+ __ push(Immediate(info));
1021
+ __ CallStub(&stub);
1022
+ } else {
1023
+ __ push(esi);
1024
+ __ push(Immediate(info));
1025
+ __ push(Immediate(pretenure
1026
+ ? Factory::true_value()
1027
+ : Factory::false_value()));
1028
+ __ CallRuntime(Runtime::kNewClosure, 3);
1029
+ }
1030
+ context()->Plug(eax);
1031
+ }
1032
+
1033
+
1034
+ void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1035
+ Comment cmnt(masm_, "[ VariableProxy");
1036
+ EmitVariableLoad(expr->var());
1037
+ }
1038
+
1039
+
1040
+ void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
1041
+ Slot* slot,
1042
+ TypeofState typeof_state,
1043
+ Label* slow) {
1044
+ Register context = esi;
1045
+ Register temp = edx;
1046
+
1047
+ Scope* s = scope();
1048
+ while (s != NULL) {
1049
+ if (s->num_heap_slots() > 0) {
1050
+ if (s->calls_eval()) {
1051
+ // Check that extension is NULL.
1052
+ __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1053
+ Immediate(0));
1054
+ __ j(not_equal, slow);
1055
+ }
1056
+ // Load next context in chain.
1057
+ __ mov(temp, ContextOperand(context, Context::CLOSURE_INDEX));
1058
+ __ mov(temp, FieldOperand(temp, JSFunction::kContextOffset));
1059
+ // Walk the rest of the chain without clobbering esi.
1060
+ context = temp;
1061
+ }
1062
+ // If no outer scope calls eval, we do not need to check more
1063
+ // context extensions. If we have reached an eval scope, we check
1064
+ // all extensions from this point.
1065
+ if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
1066
+ s = s->outer_scope();
1067
+ }
1068
+
1069
+ if (s != NULL && s->is_eval_scope()) {
1070
+ // Loop up the context chain. There is no frame effect so it is
1071
+ // safe to use raw labels here.
1072
+ NearLabel next, fast;
1073
+ if (!context.is(temp)) {
1074
+ __ mov(temp, context);
1075
+ }
1076
+ __ bind(&next);
1077
+ // Terminate at global context.
1078
+ __ cmp(FieldOperand(temp, HeapObject::kMapOffset),
1079
+ Immediate(Factory::global_context_map()));
1080
+ __ j(equal, &fast);
1081
+ // Check that extension is NULL.
1082
+ __ cmp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1083
+ __ j(not_equal, slow);
1084
+ // Load next context in chain.
1085
+ __ mov(temp, ContextOperand(temp, Context::CLOSURE_INDEX));
1086
+ __ mov(temp, FieldOperand(temp, JSFunction::kContextOffset));
1087
+ __ jmp(&next);
1088
+ __ bind(&fast);
1089
+ }
1090
+
1091
+ // All extension objects were empty and it is safe to use a global
1092
+ // load IC call.
1093
+ __ mov(eax, GlobalObjectOperand());
1094
+ __ mov(ecx, slot->var()->name());
1095
+ Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
1096
+ RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1097
+ ? RelocInfo::CODE_TARGET
1098
+ : RelocInfo::CODE_TARGET_CONTEXT;
1099
+ EmitCallIC(ic, mode);
1100
+ }
1101
+
1102
+
1103
+ MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(
1104
+ Slot* slot,
1105
+ Label* slow) {
1106
+ ASSERT(slot->type() == Slot::CONTEXT);
1107
+ Register context = esi;
1108
+ Register temp = ebx;
1109
+
1110
+ for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
1111
+ if (s->num_heap_slots() > 0) {
1112
+ if (s->calls_eval()) {
1113
+ // Check that extension is NULL.
1114
+ __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1115
+ Immediate(0));
1116
+ __ j(not_equal, slow);
1117
+ }
1118
+ __ mov(temp, ContextOperand(context, Context::CLOSURE_INDEX));
1119
+ __ mov(temp, FieldOperand(temp, JSFunction::kContextOffset));
1120
+ // Walk the rest of the chain without clobbering esi.
1121
+ context = temp;
1122
+ }
1123
+ }
1124
+ // Check that last extension is NULL.
1125
+ __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1126
+ __ j(not_equal, slow);
1127
+
1128
+ // This function is used only for loads, not stores, so it's safe to
1129
+ // return an esi-based operand (the write barrier cannot be allowed to
1130
+ // destroy the esi register).
1131
+ return ContextOperand(context, slot->index());
1132
+ }
1133
+
1134
+
1135
+ void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
1136
+ Slot* slot,
1137
+ TypeofState typeof_state,
1138
+ Label* slow,
1139
+ Label* done) {
1140
+ // Generate fast-case code for variables that might be shadowed by
1141
+ // eval-introduced variables. Eval is used a lot without
1142
+ // introducing variables. In those cases, we do not want to
1143
+ // perform a runtime call for all variables in the scope
1144
+ // containing the eval.
1145
+ if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
1146
+ EmitLoadGlobalSlotCheckExtensions(slot, typeof_state, slow);
1147
+ __ jmp(done);
1148
+ } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
1149
+ Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot();
1150
+ Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
1151
+ if (potential_slot != NULL) {
1152
+ // Generate fast case for locals that rewrite to slots.
1153
+ __ mov(eax,
1154
+ ContextSlotOperandCheckExtensions(potential_slot, slow));
1155
+ if (potential_slot->var()->mode() == Variable::CONST) {
1156
+ __ cmp(eax, Factory::the_hole_value());
1157
+ __ j(not_equal, done);
1158
+ __ mov(eax, Factory::undefined_value());
1159
+ }
1160
+ __ jmp(done);
1161
+ } else if (rewrite != NULL) {
1162
+ // Generate fast case for calls of an argument function.
1163
+ Property* property = rewrite->AsProperty();
1164
+ if (property != NULL) {
1165
+ VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1166
+ Literal* key_literal = property->key()->AsLiteral();
1167
+ if (obj_proxy != NULL &&
1168
+ key_literal != NULL &&
1169
+ obj_proxy->IsArguments() &&
1170
+ key_literal->handle()->IsSmi()) {
1171
+ // Load arguments object if there are no eval-introduced
1172
+ // variables. Then load the argument from the arguments
1173
+ // object using keyed load.
1174
+ __ mov(edx,
1175
+ ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
1176
+ slow));
1177
+ __ mov(eax, Immediate(key_literal->handle()));
1178
+ Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
1179
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
1180
+ __ jmp(done);
1181
+ }
1182
+ }
1183
+ }
1184
+ }
1185
+ }
1186
+
1187
+
1188
+ void FullCodeGenerator::EmitVariableLoad(Variable* var) {
1189
+ // Four cases: non-this global variables, lookup slots, all other
1190
+ // types of slots, and parameters that rewrite to explicit property
1191
+ // accesses on the arguments object.
1192
+ Slot* slot = var->AsSlot();
1193
+ Property* property = var->AsProperty();
1194
+
1195
+ if (var->is_global() && !var->is_this()) {
1196
+ Comment cmnt(masm_, "Global variable");
1197
+ // Use inline caching. Variable name is passed in ecx and the global
1198
+ // object on the stack.
1199
+ __ mov(eax, GlobalObjectOperand());
1200
+ __ mov(ecx, var->name());
1201
+ Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
1202
+ EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1203
+ context()->Plug(eax);
1204
+
1205
+ } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
1206
+ Label done, slow;
1207
+
1208
+ // Generate code for loading from variables potentially shadowed
1209
+ // by eval-introduced variables.
1210
+ EmitDynamicLoadFromSlotFastCase(slot, NOT_INSIDE_TYPEOF, &slow, &done);
1211
+
1212
+ __ bind(&slow);
1213
+ Comment cmnt(masm_, "Lookup slot");
1214
+ __ push(esi); // Context.
1215
+ __ push(Immediate(var->name()));
1216
+ __ CallRuntime(Runtime::kLoadContextSlot, 2);
1217
+ __ bind(&done);
1218
+
1219
+ context()->Plug(eax);
1220
+
1221
+ } else if (slot != NULL) {
1222
+ Comment cmnt(masm_, (slot->type() == Slot::CONTEXT)
1223
+ ? "Context slot"
1224
+ : "Stack slot");
1225
+ if (var->mode() == Variable::CONST) {
1226
+ // Constants may be the hole value if they have not been initialized.
1227
+ // Unhole them.
1228
+ NearLabel done;
1229
+ MemOperand slot_operand = EmitSlotSearch(slot, eax);
1230
+ __ mov(eax, slot_operand);
1231
+ __ cmp(eax, Factory::the_hole_value());
1232
+ __ j(not_equal, &done);
1233
+ __ mov(eax, Factory::undefined_value());
1234
+ __ bind(&done);
1235
+ context()->Plug(eax);
1236
+ } else {
1237
+ context()->Plug(slot);
1238
+ }
1239
+
1240
+ } else {
1241
+ Comment cmnt(masm_, "Rewritten parameter");
1242
+ ASSERT_NOT_NULL(property);
1243
+ // Rewritten parameter accesses are of the form "slot[literal]".
1244
+
1245
+ // Assert that the object is in a slot.
1246
+ Variable* object_var = property->obj()->AsVariableProxy()->AsVariable();
1247
+ ASSERT_NOT_NULL(object_var);
1248
+ Slot* object_slot = object_var->AsSlot();
1249
+ ASSERT_NOT_NULL(object_slot);
1250
+
1251
+ // Load the object.
1252
+ MemOperand object_loc = EmitSlotSearch(object_slot, eax);
1253
+ __ mov(edx, object_loc);
1254
+
1255
+ // Assert that the key is a smi.
1256
+ Literal* key_literal = property->key()->AsLiteral();
1257
+ ASSERT_NOT_NULL(key_literal);
1258
+ ASSERT(key_literal->handle()->IsSmi());
1259
+
1260
+ // Load the key.
1261
+ __ mov(eax, Immediate(key_literal->handle()));
1262
+
1263
+ // Do a keyed property load.
1264
+ Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
1265
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
1266
+
1267
+ // Drop key and object left on the stack by IC.
1268
+ context()->Plug(eax);
1269
+ }
1270
+ }
1271
+
1272
+
1273
+ void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1274
+ Comment cmnt(masm_, "[ RegExpLiteral");
1275
+ NearLabel materialized;
1276
+ // Registers will be used as follows:
1277
+ // edi = JS function.
1278
+ // ecx = literals array.
1279
+ // ebx = regexp literal.
1280
+ // eax = regexp literal clone.
1281
+ __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1282
+ __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
1283
+ int literal_offset =
1284
+ FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1285
+ __ mov(ebx, FieldOperand(ecx, literal_offset));
1286
+ __ cmp(ebx, Factory::undefined_value());
1287
+ __ j(not_equal, &materialized);
1288
+
1289
+ // Create regexp literal using runtime function
1290
+ // Result will be in eax.
1291
+ __ push(ecx);
1292
+ __ push(Immediate(Smi::FromInt(expr->literal_index())));
1293
+ __ push(Immediate(expr->pattern()));
1294
+ __ push(Immediate(expr->flags()));
1295
+ __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1296
+ __ mov(ebx, eax);
1297
+
1298
+ __ bind(&materialized);
1299
+ int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1300
+ Label allocated, runtime_allocate;
1301
+ __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
1302
+ __ jmp(&allocated);
1303
+
1304
+ __ bind(&runtime_allocate);
1305
+ __ push(ebx);
1306
+ __ push(Immediate(Smi::FromInt(size)));
1307
+ __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1308
+ __ pop(ebx);
1309
+
1310
+ __ bind(&allocated);
1311
+ // Copy the content into the newly allocated memory.
1312
+ // (Unroll copy loop once for better throughput).
1313
+ for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1314
+ __ mov(edx, FieldOperand(ebx, i));
1315
+ __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
1316
+ __ mov(FieldOperand(eax, i), edx);
1317
+ __ mov(FieldOperand(eax, i + kPointerSize), ecx);
1318
+ }
1319
+ if ((size % (2 * kPointerSize)) != 0) {
1320
+ __ mov(edx, FieldOperand(ebx, size - kPointerSize));
1321
+ __ mov(FieldOperand(eax, size - kPointerSize), edx);
1322
+ }
1323
+ context()->Plug(eax);
1324
+ }
1325
+
1326
+
1327
+ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1328
+ Comment cmnt(masm_, "[ ObjectLiteral");
1329
+ __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1330
+ __ push(FieldOperand(edi, JSFunction::kLiteralsOffset));
1331
+ __ push(Immediate(Smi::FromInt(expr->literal_index())));
1332
+ __ push(Immediate(expr->constant_properties()));
1333
+ __ push(Immediate(Smi::FromInt(expr->fast_elements() ? 1 : 0)));
1334
+ if (expr->depth() > 1) {
1335
+ __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1336
+ } else {
1337
+ __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1338
+ }
1339
+
1340
+ // If result_saved is true the result is on top of the stack. If
1341
+ // result_saved is false the result is in eax.
1342
+ bool result_saved = false;
1343
+
1344
+ // Mark all computed expressions that are bound to a key that
1345
+ // is shadowed by a later occurrence of the same key. For the
1346
+ // marked expressions, no store code is emitted.
1347
+ expr->CalculateEmitStore();
1348
+
1349
+ for (int i = 0; i < expr->properties()->length(); i++) {
1350
+ ObjectLiteral::Property* property = expr->properties()->at(i);
1351
+ if (property->IsCompileTimeValue()) continue;
1352
+
1353
+ Literal* key = property->key();
1354
+ Expression* value = property->value();
1355
+ if (!result_saved) {
1356
+ __ push(eax); // Save result on the stack
1357
+ result_saved = true;
1358
+ }
1359
+ switch (property->kind()) {
1360
+ case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1361
+ ASSERT(!CompileTimeValue::IsCompileTimeValue(value));
1362
+ // Fall through.
1363
+ case ObjectLiteral::Property::COMPUTED:
1364
+ if (key->handle()->IsSymbol()) {
1365
+ if (property->emit_store()) {
1366
+ VisitForAccumulatorValue(value);
1367
+ __ mov(ecx, Immediate(key->handle()));
1368
+ __ mov(edx, Operand(esp, 0));
1369
+ Handle<Code> ic(Builtins::builtin(
1370
+ is_strict() ? Builtins::StoreIC_Initialize_Strict
1371
+ : Builtins::StoreIC_Initialize));
1372
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
1373
+ PrepareForBailoutForId(key->id(), NO_REGISTERS);
1374
+ } else {
1375
+ VisitForEffect(value);
1376
+ }
1377
+ break;
1378
+ }
1379
+ // Fall through.
1380
+ case ObjectLiteral::Property::PROTOTYPE:
1381
+ __ push(Operand(esp, 0)); // Duplicate receiver.
1382
+ VisitForStackValue(key);
1383
+ VisitForStackValue(value);
1384
+ if (property->emit_store()) {
1385
+ __ push(Immediate(Smi::FromInt(NONE))); // PropertyAttributes
1386
+ __ CallRuntime(Runtime::kSetProperty, 4);
1387
+ } else {
1388
+ __ Drop(3);
1389
+ }
1390
+ break;
1391
+ case ObjectLiteral::Property::SETTER:
1392
+ case ObjectLiteral::Property::GETTER:
1393
+ __ push(Operand(esp, 0)); // Duplicate receiver.
1394
+ VisitForStackValue(key);
1395
+ __ push(Immediate(property->kind() == ObjectLiteral::Property::SETTER ?
1396
+ Smi::FromInt(1) :
1397
+ Smi::FromInt(0)));
1398
+ VisitForStackValue(value);
1399
+ __ CallRuntime(Runtime::kDefineAccessor, 4);
1400
+ break;
1401
+ default: UNREACHABLE();
1402
+ }
1403
+ }
1404
+
1405
+ if (result_saved) {
1406
+ context()->PlugTOS();
1407
+ } else {
1408
+ context()->Plug(eax);
1409
+ }
1410
+ }
1411
+
1412
+
1413
+ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1414
+ Comment cmnt(masm_, "[ ArrayLiteral");
1415
+
1416
+ ZoneList<Expression*>* subexprs = expr->values();
1417
+ int length = subexprs->length();
1418
+
1419
+ __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1420
+ __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
1421
+ __ push(Immediate(Smi::FromInt(expr->literal_index())));
1422
+ __ push(Immediate(expr->constant_elements()));
1423
+ if (expr->constant_elements()->map() == Heap::fixed_cow_array_map()) {
1424
+ ASSERT(expr->depth() == 1);
1425
+ FastCloneShallowArrayStub stub(
1426
+ FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
1427
+ __ CallStub(&stub);
1428
+ __ IncrementCounter(&Counters::cow_arrays_created_stub, 1);
1429
+ } else if (expr->depth() > 1) {
1430
+ __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1431
+ } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1432
+ __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1433
+ } else {
1434
+ FastCloneShallowArrayStub stub(
1435
+ FastCloneShallowArrayStub::CLONE_ELEMENTS, length);
1436
+ __ CallStub(&stub);
1437
+ }
1438
+
1439
+ bool result_saved = false; // Is the result saved to the stack?
1440
+
1441
+ // Emit code to evaluate all the non-constant subexpressions and to store
1442
+ // them into the newly cloned array.
1443
+ for (int i = 0; i < length; i++) {
1444
+ Expression* subexpr = subexprs->at(i);
1445
+ // If the subexpression is a literal or a simple materialized literal it
1446
+ // is already set in the cloned array.
1447
+ if (subexpr->AsLiteral() != NULL ||
1448
+ CompileTimeValue::IsCompileTimeValue(subexpr)) {
1449
+ continue;
1450
+ }
1451
+
1452
+ if (!result_saved) {
1453
+ __ push(eax);
1454
+ result_saved = true;
1455
+ }
1456
+ VisitForAccumulatorValue(subexpr);
1457
+
1458
+ // Store the subexpression value in the array's elements.
1459
+ __ mov(ebx, Operand(esp, 0)); // Copy of array literal.
1460
+ __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
1461
+ int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1462
+ __ mov(FieldOperand(ebx, offset), result_register());
1463
+
1464
+ // Update the write barrier for the array store.
1465
+ __ RecordWrite(ebx, offset, result_register(), ecx);
1466
+
1467
+ PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1468
+ }
1469
+
1470
+ if (result_saved) {
1471
+ context()->PlugTOS();
1472
+ } else {
1473
+ context()->Plug(eax);
1474
+ }
1475
+ }
1476
+
1477
+
1478
+ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1479
+ Comment cmnt(masm_, "[ Assignment");
1480
+ // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1481
+ // on the left-hand side.
1482
+ if (!expr->target()->IsValidLeftHandSide()) {
1483
+ VisitForEffect(expr->target());
1484
+ return;
1485
+ }
1486
+
1487
+ // Left-hand side can only be a property, a global or a (parameter or local)
1488
+ // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
1489
+ enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1490
+ LhsKind assign_type = VARIABLE;
1491
+ Property* property = expr->target()->AsProperty();
1492
+ if (property != NULL) {
1493
+ assign_type = (property->key()->IsPropertyName())
1494
+ ? NAMED_PROPERTY
1495
+ : KEYED_PROPERTY;
1496
+ }
1497
+
1498
+ // Evaluate LHS expression.
1499
+ switch (assign_type) {
1500
+ case VARIABLE:
1501
+ // Nothing to do here.
1502
+ break;
1503
+ case NAMED_PROPERTY:
1504
+ if (expr->is_compound()) {
1505
+ // We need the receiver both on the stack and in the accumulator.
1506
+ VisitForAccumulatorValue(property->obj());
1507
+ __ push(result_register());
1508
+ } else {
1509
+ VisitForStackValue(property->obj());
1510
+ }
1511
+ break;
1512
+ case KEYED_PROPERTY: {
1513
+ if (expr->is_compound()) {
1514
+ if (property->is_arguments_access()) {
1515
+ VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1516
+ MemOperand slot_operand =
1517
+ EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx);
1518
+ __ push(slot_operand);
1519
+ __ mov(eax, Immediate(property->key()->AsLiteral()->handle()));
1520
+ } else {
1521
+ VisitForStackValue(property->obj());
1522
+ VisitForAccumulatorValue(property->key());
1523
+ }
1524
+ __ mov(edx, Operand(esp, 0));
1525
+ __ push(eax);
1526
+ } else {
1527
+ if (property->is_arguments_access()) {
1528
+ VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1529
+ MemOperand slot_operand =
1530
+ EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx);
1531
+ __ push(slot_operand);
1532
+ __ push(Immediate(property->key()->AsLiteral()->handle()));
1533
+ } else {
1534
+ VisitForStackValue(property->obj());
1535
+ VisitForStackValue(property->key());
1536
+ }
1537
+ }
1538
+ break;
1539
+ }
1540
+ }
1541
+
1542
+ if (expr->is_compound()) {
1543
+ { AccumulatorValueContext context(this);
1544
+ switch (assign_type) {
1545
+ case VARIABLE:
1546
+ EmitVariableLoad(expr->target()->AsVariableProxy()->var());
1547
+ break;
1548
+ case NAMED_PROPERTY:
1549
+ EmitNamedPropertyLoad(property);
1550
+ break;
1551
+ case KEYED_PROPERTY:
1552
+ EmitKeyedPropertyLoad(property);
1553
+ break;
1554
+ }
1555
+ }
1556
+
1557
+ // For property compound assignments we need another deoptimization
1558
+ // point after the property load.
1559
+ if (property != NULL) {
1560
+ PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1561
+ }
1562
+
1563
+ Token::Value op = expr->binary_op();
1564
+ __ push(eax); // Left operand goes on the stack.
1565
+ VisitForAccumulatorValue(expr->value());
1566
+
1567
+ OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1568
+ ? OVERWRITE_RIGHT
1569
+ : NO_OVERWRITE;
1570
+ SetSourcePosition(expr->position() + 1);
1571
+ AccumulatorValueContext context(this);
1572
+ if (ShouldInlineSmiCase(op)) {
1573
+ EmitInlineSmiBinaryOp(expr,
1574
+ op,
1575
+ mode,
1576
+ expr->target(),
1577
+ expr->value());
1578
+ } else {
1579
+ EmitBinaryOp(op, mode);
1580
+ }
1581
+
1582
+ // Deoptimization point in case the binary operation may have side effects.
1583
+ PrepareForBailout(expr->binary_operation(), TOS_REG);
1584
+ } else {
1585
+ VisitForAccumulatorValue(expr->value());
1586
+ }
1587
+
1588
+ // Record source position before possible IC call.
1589
+ SetSourcePosition(expr->position());
1590
+
1591
+ // Store the value.
1592
+ switch (assign_type) {
1593
+ case VARIABLE:
1594
+ EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1595
+ expr->op());
1596
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1597
+ context()->Plug(eax);
1598
+ break;
1599
+ case NAMED_PROPERTY:
1600
+ EmitNamedPropertyAssignment(expr);
1601
+ break;
1602
+ case KEYED_PROPERTY:
1603
+ EmitKeyedPropertyAssignment(expr);
1604
+ break;
1605
+ }
1606
+ }
1607
+
1608
+
1609
+ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1610
+ SetSourcePosition(prop->position());
1611
+ Literal* key = prop->key()->AsLiteral();
1612
+ __ mov(ecx, Immediate(key->handle()));
1613
+ Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
1614
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
1615
+ }
1616
+
1617
+
1618
+ void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1619
+ SetSourcePosition(prop->position());
1620
+ Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
1621
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
1622
+ }
1623
+
1624
+
1625
+ void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
1626
+ Token::Value op,
1627
+ OverwriteMode mode,
1628
+ Expression* left,
1629
+ Expression* right) {
1630
+ // Do combined smi check of the operands. Left operand is on the
1631
+ // stack. Right operand is in eax.
1632
+ NearLabel done, smi_case, stub_call;
1633
+ __ pop(edx);
1634
+ __ mov(ecx, eax);
1635
+ __ or_(eax, Operand(edx));
1636
+ JumpPatchSite patch_site(masm_);
1637
+ patch_site.EmitJumpIfSmi(eax, &smi_case);
1638
+
1639
+ __ bind(&stub_call);
1640
+ __ mov(eax, ecx);
1641
+ TypeRecordingBinaryOpStub stub(op, mode);
1642
+ EmitCallIC(stub.GetCode(), &patch_site);
1643
+ __ jmp(&done);
1644
+
1645
+ // Smi case.
1646
+ __ bind(&smi_case);
1647
+ __ mov(eax, edx); // Copy left operand in case of a stub call.
1648
+
1649
+ switch (op) {
1650
+ case Token::SAR:
1651
+ __ SmiUntag(eax);
1652
+ __ SmiUntag(ecx);
1653
+ __ sar_cl(eax); // No checks of result necessary
1654
+ __ SmiTag(eax);
1655
+ break;
1656
+ case Token::SHL: {
1657
+ Label result_ok;
1658
+ __ SmiUntag(eax);
1659
+ __ SmiUntag(ecx);
1660
+ __ shl_cl(eax);
1661
+ // Check that the *signed* result fits in a smi.
1662
+ __ cmp(eax, 0xc0000000);
1663
+ __ j(positive, &result_ok);
1664
+ __ SmiTag(ecx);
1665
+ __ jmp(&stub_call);
1666
+ __ bind(&result_ok);
1667
+ __ SmiTag(eax);
1668
+ break;
1669
+ }
1670
+ case Token::SHR: {
1671
+ Label result_ok;
1672
+ __ SmiUntag(eax);
1673
+ __ SmiUntag(ecx);
1674
+ __ shr_cl(eax);
1675
+ __ test(eax, Immediate(0xc0000000));
1676
+ __ j(zero, &result_ok);
1677
+ __ SmiTag(ecx);
1678
+ __ jmp(&stub_call);
1679
+ __ bind(&result_ok);
1680
+ __ SmiTag(eax);
1681
+ break;
1682
+ }
1683
+ case Token::ADD:
1684
+ __ add(eax, Operand(ecx));
1685
+ __ j(overflow, &stub_call);
1686
+ break;
1687
+ case Token::SUB:
1688
+ __ sub(eax, Operand(ecx));
1689
+ __ j(overflow, &stub_call);
1690
+ break;
1691
+ case Token::MUL: {
1692
+ __ SmiUntag(eax);
1693
+ __ imul(eax, Operand(ecx));
1694
+ __ j(overflow, &stub_call);
1695
+ __ test(eax, Operand(eax));
1696
+ __ j(not_zero, &done, taken);
1697
+ __ mov(ebx, edx);
1698
+ __ or_(ebx, Operand(ecx));
1699
+ __ j(negative, &stub_call);
1700
+ break;
1701
+ }
1702
+ case Token::BIT_OR:
1703
+ __ or_(eax, Operand(ecx));
1704
+ break;
1705
+ case Token::BIT_AND:
1706
+ __ and_(eax, Operand(ecx));
1707
+ break;
1708
+ case Token::BIT_XOR:
1709
+ __ xor_(eax, Operand(ecx));
1710
+ break;
1711
+ default:
1712
+ UNREACHABLE();
1713
+ }
1714
+
1715
+ __ bind(&done);
1716
+ context()->Plug(eax);
1717
+ }
1718
+
1719
+
1720
+ void FullCodeGenerator::EmitBinaryOp(Token::Value op,
1721
+ OverwriteMode mode) {
1722
+ __ pop(edx);
1723
+ TypeRecordingBinaryOpStub stub(op, mode);
1724
+ EmitCallIC(stub.GetCode(), NULL); // NULL signals no inlined smi code.
1725
+ context()->Plug(eax);
1726
+ }
1727
+
1728
+
1729
+ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
1730
+ // Invalid left-hand sides are rewritten to have a 'throw
1731
+ // ReferenceError' on the left-hand side.
1732
+ if (!expr->IsValidLeftHandSide()) {
1733
+ VisitForEffect(expr);
1734
+ return;
1735
+ }
1736
+
1737
+ // Left-hand side can only be a property, a global or a (parameter or local)
1738
+ // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
1739
+ enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1740
+ LhsKind assign_type = VARIABLE;
1741
+ Property* prop = expr->AsProperty();
1742
+ if (prop != NULL) {
1743
+ assign_type = (prop->key()->IsPropertyName())
1744
+ ? NAMED_PROPERTY
1745
+ : KEYED_PROPERTY;
1746
+ }
1747
+
1748
+ switch (assign_type) {
1749
+ case VARIABLE: {
1750
+ Variable* var = expr->AsVariableProxy()->var();
1751
+ EffectContext context(this);
1752
+ EmitVariableAssignment(var, Token::ASSIGN);
1753
+ break;
1754
+ }
1755
+ case NAMED_PROPERTY: {
1756
+ __ push(eax); // Preserve value.
1757
+ VisitForAccumulatorValue(prop->obj());
1758
+ __ mov(edx, eax);
1759
+ __ pop(eax); // Restore value.
1760
+ __ mov(ecx, prop->key()->AsLiteral()->handle());
1761
+ Handle<Code> ic(Builtins::builtin(
1762
+ is_strict() ? Builtins::StoreIC_Initialize_Strict
1763
+ : Builtins::StoreIC_Initialize));
1764
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
1765
+ break;
1766
+ }
1767
+ case KEYED_PROPERTY: {
1768
+ __ push(eax); // Preserve value.
1769
+ if (prop->is_synthetic()) {
1770
+ ASSERT(prop->obj()->AsVariableProxy() != NULL);
1771
+ ASSERT(prop->key()->AsLiteral() != NULL);
1772
+ { AccumulatorValueContext for_object(this);
1773
+ EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
1774
+ }
1775
+ __ mov(edx, eax);
1776
+ __ Set(ecx, Immediate(prop->key()->AsLiteral()->handle()));
1777
+ } else {
1778
+ VisitForStackValue(prop->obj());
1779
+ VisitForAccumulatorValue(prop->key());
1780
+ __ mov(ecx, eax);
1781
+ __ pop(edx);
1782
+ }
1783
+ __ pop(eax); // Restore value.
1784
+ Handle<Code> ic(Builtins::builtin(
1785
+ is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict
1786
+ : Builtins::KeyedStoreIC_Initialize));
1787
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
1788
+ break;
1789
+ }
1790
+ }
1791
+ PrepareForBailoutForId(bailout_ast_id, TOS_REG);
1792
+ context()->Plug(eax);
1793
+ }
1794
+
1795
+
1796
+ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
1797
+ Token::Value op) {
1798
+ // Left-hand sides that rewrite to explicit property accesses do not reach
1799
+ // here.
1800
+ ASSERT(var != NULL);
1801
+ ASSERT(var->is_global() || var->AsSlot() != NULL);
1802
+
1803
+ if (var->is_global()) {
1804
+ ASSERT(!var->is_this());
1805
+ // Assignment to a global variable. Use inline caching for the
1806
+ // assignment. Right-hand-side value is passed in eax, variable name in
1807
+ // ecx, and the global object on the stack.
1808
+ __ mov(ecx, var->name());
1809
+ __ mov(edx, GlobalObjectOperand());
1810
+ Handle<Code> ic(Builtins::builtin(
1811
+ is_strict() ? Builtins::StoreIC_Initialize_Strict
1812
+ : Builtins::StoreIC_Initialize));
1813
+ EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1814
+
1815
+ } else if (op == Token::INIT_CONST) {
1816
+ // Like var declarations, const declarations are hoisted to function
1817
+ // scope. However, unlike var initializers, const initializers are able
1818
+ // to drill a hole to that function context, even from inside a 'with'
1819
+ // context. We thus bypass the normal static scope lookup.
1820
+ Slot* slot = var->AsSlot();
1821
+ Label skip;
1822
+ switch (slot->type()) {
1823
+ case Slot::PARAMETER:
1824
+ // No const parameters.
1825
+ UNREACHABLE();
1826
+ break;
1827
+ case Slot::LOCAL:
1828
+ __ mov(edx, Operand(ebp, SlotOffset(slot)));
1829
+ __ cmp(edx, Factory::the_hole_value());
1830
+ __ j(not_equal, &skip);
1831
+ __ mov(Operand(ebp, SlotOffset(slot)), eax);
1832
+ break;
1833
+ case Slot::CONTEXT: {
1834
+ __ mov(ecx, ContextOperand(esi, Context::FCONTEXT_INDEX));
1835
+ __ mov(edx, ContextOperand(ecx, slot->index()));
1836
+ __ cmp(edx, Factory::the_hole_value());
1837
+ __ j(not_equal, &skip);
1838
+ __ mov(ContextOperand(ecx, slot->index()), eax);
1839
+ int offset = Context::SlotOffset(slot->index());
1840
+ __ mov(edx, eax); // Preserve the stored value in eax.
1841
+ __ RecordWrite(ecx, offset, edx, ebx);
1842
+ break;
1843
+ }
1844
+ case Slot::LOOKUP:
1845
+ __ push(eax);
1846
+ __ push(esi);
1847
+ __ push(Immediate(var->name()));
1848
+ __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
1849
+ break;
1850
+ }
1851
+ __ bind(&skip);
1852
+
1853
+ } else if (var->mode() != Variable::CONST) {
1854
+ // Perform the assignment for non-const variables. Const assignments
1855
+ // are simply skipped.
1856
+ Slot* slot = var->AsSlot();
1857
+ switch (slot->type()) {
1858
+ case Slot::PARAMETER:
1859
+ case Slot::LOCAL:
1860
+ // Perform the assignment.
1861
+ __ mov(Operand(ebp, SlotOffset(slot)), eax);
1862
+ break;
1863
+
1864
+ case Slot::CONTEXT: {
1865
+ MemOperand target = EmitSlotSearch(slot, ecx);
1866
+ // Perform the assignment and issue the write barrier.
1867
+ __ mov(target, eax);
1868
+ // The value of the assignment is in eax. RecordWrite clobbers its
1869
+ // register arguments.
1870
+ __ mov(edx, eax);
1871
+ int offset = Context::SlotOffset(slot->index());
1872
+ __ RecordWrite(ecx, offset, edx, ebx);
1873
+ break;
1874
+ }
1875
+
1876
+ case Slot::LOOKUP:
1877
+ // Call the runtime for the assignment.
1878
+ __ push(eax); // Value.
1879
+ __ push(esi); // Context.
1880
+ __ push(Immediate(var->name()));
1881
+ __ push(Immediate(Smi::FromInt(strict_mode_flag())));
1882
+ __ CallRuntime(Runtime::kStoreContextSlot, 4);
1883
+ break;
1884
+ }
1885
+ }
1886
+ }
1887
+
1888
+
1889
+ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
1890
+ // Assignment to a property, using a named store IC.
1891
+ Property* prop = expr->target()->AsProperty();
1892
+ ASSERT(prop != NULL);
1893
+ ASSERT(prop->key()->AsLiteral() != NULL);
1894
+
1895
+ // If the assignment starts a block of assignments to the same object,
1896
+ // change to slow case to avoid the quadratic behavior of repeatedly
1897
+ // adding fast properties.
1898
+ if (expr->starts_initialization_block()) {
1899
+ __ push(result_register());
1900
+ __ push(Operand(esp, kPointerSize)); // Receiver is now under value.
1901
+ __ CallRuntime(Runtime::kToSlowProperties, 1);
1902
+ __ pop(result_register());
1903
+ }
1904
+
1905
+ // Record source code position before IC call.
1906
+ SetSourcePosition(expr->position());
1907
+ __ mov(ecx, prop->key()->AsLiteral()->handle());
1908
+ if (expr->ends_initialization_block()) {
1909
+ __ mov(edx, Operand(esp, 0));
1910
+ } else {
1911
+ __ pop(edx);
1912
+ }
1913
+ Handle<Code> ic(Builtins::builtin(
1914
+ is_strict() ? Builtins::StoreIC_Initialize_Strict
1915
+ : Builtins::StoreIC_Initialize));
1916
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
1917
+
1918
+ // If the assignment ends an initialization block, revert to fast case.
1919
+ if (expr->ends_initialization_block()) {
1920
+ __ push(eax); // Result of assignment, saved even if not needed.
1921
+ __ push(Operand(esp, kPointerSize)); // Receiver is under value.
1922
+ __ CallRuntime(Runtime::kToFastProperties, 1);
1923
+ __ pop(eax);
1924
+ __ Drop(1);
1925
+ }
1926
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1927
+ context()->Plug(eax);
1928
+ }
1929
+
1930
+
1931
+ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
1932
+ // Assignment to a property, using a keyed store IC.
1933
+
1934
+ // If the assignment starts a block of assignments to the same object,
1935
+ // change to slow case to avoid the quadratic behavior of repeatedly
1936
+ // adding fast properties.
1937
+ if (expr->starts_initialization_block()) {
1938
+ __ push(result_register());
1939
+ // Receiver is now under the key and value.
1940
+ __ push(Operand(esp, 2 * kPointerSize));
1941
+ __ CallRuntime(Runtime::kToSlowProperties, 1);
1942
+ __ pop(result_register());
1943
+ }
1944
+
1945
+ __ pop(ecx);
1946
+ if (expr->ends_initialization_block()) {
1947
+ __ mov(edx, Operand(esp, 0)); // Leave receiver on the stack for later.
1948
+ } else {
1949
+ __ pop(edx);
1950
+ }
1951
+ // Record source code position before IC call.
1952
+ SetSourcePosition(expr->position());
1953
+ Handle<Code> ic(Builtins::builtin(
1954
+ is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict
1955
+ : Builtins::KeyedStoreIC_Initialize));
1956
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
1957
+
1958
+ // If the assignment ends an initialization block, revert to fast case.
1959
+ if (expr->ends_initialization_block()) {
1960
+ __ pop(edx);
1961
+ __ push(eax); // Result of assignment, saved even if not needed.
1962
+ __ push(edx);
1963
+ __ CallRuntime(Runtime::kToFastProperties, 1);
1964
+ __ pop(eax);
1965
+ }
1966
+
1967
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1968
+ context()->Plug(eax);
1969
+ }
1970
+
1971
+
1972
+ void FullCodeGenerator::VisitProperty(Property* expr) {
1973
+ Comment cmnt(masm_, "[ Property");
1974
+ Expression* key = expr->key();
1975
+
1976
+ if (key->IsPropertyName()) {
1977
+ VisitForAccumulatorValue(expr->obj());
1978
+ EmitNamedPropertyLoad(expr);
1979
+ context()->Plug(eax);
1980
+ } else {
1981
+ VisitForStackValue(expr->obj());
1982
+ VisitForAccumulatorValue(expr->key());
1983
+ __ pop(edx);
1984
+ EmitKeyedPropertyLoad(expr);
1985
+ context()->Plug(eax);
1986
+ }
1987
+ }
1988
+
1989
+
1990
+ void FullCodeGenerator::EmitCallWithIC(Call* expr,
1991
+ Handle<Object> name,
1992
+ RelocInfo::Mode mode) {
1993
+ // Code common for calls using the IC.
1994
+ ZoneList<Expression*>* args = expr->arguments();
1995
+ int arg_count = args->length();
1996
+ { PreservePositionScope scope(masm()->positions_recorder());
1997
+ for (int i = 0; i < arg_count; i++) {
1998
+ VisitForStackValue(args->at(i));
1999
+ }
2000
+ __ Set(ecx, Immediate(name));
2001
+ }
2002
+ // Record source position of the IC call.
2003
+ SetSourcePosition(expr->position());
2004
+ InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2005
+ Handle<Code> ic = StubCache::ComputeCallInitialize(arg_count, in_loop);
2006
+ EmitCallIC(ic, mode);
2007
+ RecordJSReturnSite(expr);
2008
+ // Restore context register.
2009
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2010
+ context()->Plug(eax);
2011
+ }
2012
+
2013
+
2014
+ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2015
+ Expression* key,
2016
+ RelocInfo::Mode mode) {
2017
+ // Load the key.
2018
+ VisitForAccumulatorValue(key);
2019
+
2020
+ // Swap the name of the function and the receiver on the stack to follow
2021
+ // the calling convention for call ICs.
2022
+ __ pop(ecx);
2023
+ __ push(eax);
2024
+ __ push(ecx);
2025
+
2026
+ // Load the arguments.
2027
+ ZoneList<Expression*>* args = expr->arguments();
2028
+ int arg_count = args->length();
2029
+ { PreservePositionScope scope(masm()->positions_recorder());
2030
+ for (int i = 0; i < arg_count; i++) {
2031
+ VisitForStackValue(args->at(i));
2032
+ }
2033
+ }
2034
+ // Record source position of the IC call.
2035
+ SetSourcePosition(expr->position());
2036
+ InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2037
+ Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arg_count, in_loop);
2038
+ __ mov(ecx, Operand(esp, (arg_count + 1) * kPointerSize)); // Key.
2039
+ EmitCallIC(ic, mode);
2040
+ RecordJSReturnSite(expr);
2041
+ // Restore context register.
2042
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2043
+ context()->DropAndPlug(1, eax); // Drop the key still on the stack.
2044
+ }
2045
+
2046
+
2047
+ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
2048
+ // Code common for calls using the call stub.
2049
+ ZoneList<Expression*>* args = expr->arguments();
2050
+ int arg_count = args->length();
2051
+ { PreservePositionScope scope(masm()->positions_recorder());
2052
+ for (int i = 0; i < arg_count; i++) {
2053
+ VisitForStackValue(args->at(i));
2054
+ }
2055
+ }
2056
+ // Record source position for debugger.
2057
+ SetSourcePosition(expr->position());
2058
+ InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2059
+ CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
2060
+ __ CallStub(&stub);
2061
+ RecordJSReturnSite(expr);
2062
+ // Restore context register.
2063
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2064
+ context()->DropAndPlug(1, eax);
2065
+ }
2066
+
2067
+
2068
+ void FullCodeGenerator::EmitResolvePossiblyDirectEval(ResolveEvalFlag flag,
2069
+ int arg_count) {
2070
+ // Push copy of the first argument or undefined if it doesn't exist.
2071
+ if (arg_count > 0) {
2072
+ __ push(Operand(esp, arg_count * kPointerSize));
2073
+ } else {
2074
+ __ push(Immediate(Factory::undefined_value()));
2075
+ }
2076
+
2077
+ // Push the receiver of the enclosing function.
2078
+ __ push(Operand(ebp, (2 + scope()->num_parameters()) * kPointerSize));
2079
+
2080
+ // Push the strict mode flag.
2081
+ __ push(Immediate(Smi::FromInt(strict_mode_flag())));
2082
+
2083
+ __ CallRuntime(flag == SKIP_CONTEXT_LOOKUP
2084
+ ? Runtime::kResolvePossiblyDirectEvalNoLookup
2085
+ : Runtime::kResolvePossiblyDirectEval, 4);
2086
+ }
2087
+
2088
+
2089
+ void FullCodeGenerator::VisitCall(Call* expr) {
2090
+ #ifdef DEBUG
2091
+ // We want to verify that RecordJSReturnSite gets called on all paths
2092
+ // through this function. Avoid early returns.
2093
+ expr->return_is_recorded_ = false;
2094
+ #endif
2095
+
2096
+ Comment cmnt(masm_, "[ Call");
2097
+ Expression* fun = expr->expression();
2098
+ Variable* var = fun->AsVariableProxy()->AsVariable();
2099
+
2100
+ if (var != NULL && var->is_possibly_eval()) {
2101
+ // In a call to eval, we first call %ResolvePossiblyDirectEval to
2102
+ // resolve the function we need to call and the receiver of the
2103
+ // call. Then we call the resolved function using the given
2104
+ // arguments.
2105
+ ZoneList<Expression*>* args = expr->arguments();
2106
+ int arg_count = args->length();
2107
+ { PreservePositionScope pos_scope(masm()->positions_recorder());
2108
+ VisitForStackValue(fun);
2109
+ // Reserved receiver slot.
2110
+ __ push(Immediate(Factory::undefined_value()));
2111
+
2112
+ // Push the arguments.
2113
+ for (int i = 0; i < arg_count; i++) {
2114
+ VisitForStackValue(args->at(i));
2115
+ }
2116
+
2117
+ // If we know that eval can only be shadowed by eval-introduced
2118
+ // variables we attempt to load the global eval function directly
2119
+ // in generated code. If we succeed, there is no need to perform a
2120
+ // context lookup in the runtime system.
2121
+ Label done;
2122
+ if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
2123
+ Label slow;
2124
+ EmitLoadGlobalSlotCheckExtensions(var->AsSlot(),
2125
+ NOT_INSIDE_TYPEOF,
2126
+ &slow);
2127
+ // Push the function and resolve eval.
2128
+ __ push(eax);
2129
+ EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count);
2130
+ __ jmp(&done);
2131
+ __ bind(&slow);
2132
+ }
2133
+
2134
+ // Push copy of the function (found below the arguments) and
2135
+ // resolve eval.
2136
+ __ push(Operand(esp, (arg_count + 1) * kPointerSize));
2137
+ EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count);
2138
+ if (done.is_linked()) {
2139
+ __ bind(&done);
2140
+ }
2141
+
2142
+ // The runtime call returns a pair of values in eax (function) and
2143
+ // edx (receiver). Touch up the stack with the right values.
2144
+ __ mov(Operand(esp, (arg_count + 0) * kPointerSize), edx);
2145
+ __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
2146
+ }
2147
+ // Record source position for debugger.
2148
+ SetSourcePosition(expr->position());
2149
+ InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2150
+ CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
2151
+ __ CallStub(&stub);
2152
+ RecordJSReturnSite(expr);
2153
+ // Restore context register.
2154
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2155
+ context()->DropAndPlug(1, eax);
2156
+ } else if (var != NULL && !var->is_this() && var->is_global()) {
2157
+ // Push global object as receiver for the call IC.
2158
+ __ push(GlobalObjectOperand());
2159
+ EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT);
2160
+ } else if (var != NULL && var->AsSlot() != NULL &&
2161
+ var->AsSlot()->type() == Slot::LOOKUP) {
2162
+ // Call to a lookup slot (dynamically introduced variable).
2163
+ Label slow, done;
2164
+
2165
+ { PreservePositionScope scope(masm()->positions_recorder());
2166
+ // Generate code for loading from variables potentially shadowed
2167
+ // by eval-introduced variables.
2168
+ EmitDynamicLoadFromSlotFastCase(var->AsSlot(),
2169
+ NOT_INSIDE_TYPEOF,
2170
+ &slow,
2171
+ &done);
2172
+ }
2173
+
2174
+ __ bind(&slow);
2175
+ // Call the runtime to find the function to call (returned in eax)
2176
+ // and the object holding it (returned in edx).
2177
+ __ push(context_register());
2178
+ __ push(Immediate(var->name()));
2179
+ __ CallRuntime(Runtime::kLoadContextSlot, 2);
2180
+ __ push(eax); // Function.
2181
+ __ push(edx); // Receiver.
2182
+
2183
+ // If fast case code has been generated, emit code to push the
2184
+ // function and receiver and have the slow path jump around this
2185
+ // code.
2186
+ if (done.is_linked()) {
2187
+ Label call;
2188
+ __ jmp(&call);
2189
+ __ bind(&done);
2190
+ // Push function.
2191
+ __ push(eax);
2192
+ // Push global receiver.
2193
+ __ mov(ebx, GlobalObjectOperand());
2194
+ __ push(FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset));
2195
+ __ bind(&call);
2196
+ }
2197
+
2198
+ EmitCallWithStub(expr);
2199
+ } else if (fun->AsProperty() != NULL) {
2200
+ // Call to an object property.
2201
+ Property* prop = fun->AsProperty();
2202
+ Literal* key = prop->key()->AsLiteral();
2203
+ if (key != NULL && key->handle()->IsSymbol()) {
2204
+ // Call to a named property, use call IC.
2205
+ { PreservePositionScope scope(masm()->positions_recorder());
2206
+ VisitForStackValue(prop->obj());
2207
+ }
2208
+ EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
2209
+ } else {
2210
+ // Call to a keyed property.
2211
+ // For a synthetic property use keyed load IC followed by function call,
2212
+ // for a regular property use keyed EmitCallIC.
2213
+ if (prop->is_synthetic()) {
2214
+ // Do not visit the object and key subexpressions (they are shared
2215
+ // by all occurrences of the same rewritten parameter).
2216
+ ASSERT(prop->obj()->AsVariableProxy() != NULL);
2217
+ ASSERT(prop->obj()->AsVariableProxy()->var()->AsSlot() != NULL);
2218
+ Slot* slot = prop->obj()->AsVariableProxy()->var()->AsSlot();
2219
+ MemOperand operand = EmitSlotSearch(slot, edx);
2220
+ __ mov(edx, operand);
2221
+
2222
+ ASSERT(prop->key()->AsLiteral() != NULL);
2223
+ ASSERT(prop->key()->AsLiteral()->handle()->IsSmi());
2224
+ __ mov(eax, prop->key()->AsLiteral()->handle());
2225
+
2226
+ // Record source code position for IC call.
2227
+ SetSourcePosition(prop->position());
2228
+
2229
+ Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
2230
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
2231
+ // Push result (function).
2232
+ __ push(eax);
2233
+ // Push Global receiver.
2234
+ __ mov(ecx, GlobalObjectOperand());
2235
+ __ push(FieldOperand(ecx, GlobalObject::kGlobalReceiverOffset));
2236
+ EmitCallWithStub(expr);
2237
+ } else {
2238
+ { PreservePositionScope scope(masm()->positions_recorder());
2239
+ VisitForStackValue(prop->obj());
2240
+ }
2241
+ EmitKeyedCallWithIC(expr, prop->key(), RelocInfo::CODE_TARGET);
2242
+ }
2243
+ }
2244
+ } else {
2245
+ // Call to some other expression. If the expression is an anonymous
2246
+ // function literal not called in a loop, mark it as one that should
2247
+ // also use the full code generator.
2248
+ FunctionLiteral* lit = fun->AsFunctionLiteral();
2249
+ if (lit != NULL &&
2250
+ lit->name()->Equals(Heap::empty_string()) &&
2251
+ loop_depth() == 0) {
2252
+ lit->set_try_full_codegen(true);
2253
+ }
2254
+ { PreservePositionScope scope(masm()->positions_recorder());
2255
+ VisitForStackValue(fun);
2256
+ }
2257
+ // Load global receiver object.
2258
+ __ mov(ebx, GlobalObjectOperand());
2259
+ __ push(FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset));
2260
+ // Emit function call.
2261
+ EmitCallWithStub(expr);
2262
+ }
2263
+
2264
+ #ifdef DEBUG
2265
+ // RecordJSReturnSite should have been called.
2266
+ ASSERT(expr->return_is_recorded_);
2267
+ #endif
2268
+ }
2269
+
2270
+
2271
+ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2272
+ Comment cmnt(masm_, "[ CallNew");
2273
+ // According to ECMA-262, section 11.2.2, page 44, the function
2274
+ // expression in new calls must be evaluated before the
2275
+ // arguments.
2276
+
2277
+ // Push constructor on the stack. If it's not a function it's used as
2278
+ // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2279
+ // ignored.
2280
+ VisitForStackValue(expr->expression());
2281
+
2282
+ // Push the arguments ("left-to-right") on the stack.
2283
+ ZoneList<Expression*>* args = expr->arguments();
2284
+ int arg_count = args->length();
2285
+ for (int i = 0; i < arg_count; i++) {
2286
+ VisitForStackValue(args->at(i));
2287
+ }
2288
+
2289
+ // Call the construct call builtin that handles allocation and
2290
+ // constructor invocation.
2291
+ SetSourcePosition(expr->position());
2292
+
2293
+ // Load function and argument count into edi and eax.
2294
+ __ Set(eax, Immediate(arg_count));
2295
+ __ mov(edi, Operand(esp, arg_count * kPointerSize));
2296
+
2297
+ Handle<Code> construct_builtin(Builtins::builtin(Builtins::JSConstructCall));
2298
+ __ call(construct_builtin, RelocInfo::CONSTRUCT_CALL);
2299
+ context()->Plug(eax);
2300
+ }
2301
+
2302
+
2303
+ void FullCodeGenerator::EmitIsSmi(ZoneList<Expression*>* args) {
2304
+ ASSERT(args->length() == 1);
2305
+
2306
+ VisitForAccumulatorValue(args->at(0));
2307
+
2308
+ Label materialize_true, materialize_false;
2309
+ Label* if_true = NULL;
2310
+ Label* if_false = NULL;
2311
+ Label* fall_through = NULL;
2312
+ context()->PrepareTest(&materialize_true, &materialize_false,
2313
+ &if_true, &if_false, &fall_through);
2314
+
2315
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2316
+ __ test(eax, Immediate(kSmiTagMask));
2317
+ Split(zero, if_true, if_false, fall_through);
2318
+
2319
+ context()->Plug(if_true, if_false);
2320
+ }
2321
+
2322
+
2323
+ void FullCodeGenerator::EmitIsNonNegativeSmi(ZoneList<Expression*>* args) {
2324
+ ASSERT(args->length() == 1);
2325
+
2326
+ VisitForAccumulatorValue(args->at(0));
2327
+
2328
+ Label materialize_true, materialize_false;
2329
+ Label* if_true = NULL;
2330
+ Label* if_false = NULL;
2331
+ Label* fall_through = NULL;
2332
+ context()->PrepareTest(&materialize_true, &materialize_false,
2333
+ &if_true, &if_false, &fall_through);
2334
+
2335
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2336
+ __ test(eax, Immediate(kSmiTagMask | 0x80000000));
2337
+ Split(zero, if_true, if_false, fall_through);
2338
+
2339
+ context()->Plug(if_true, if_false);
2340
+ }
2341
+
2342
+
2343
+ void FullCodeGenerator::EmitIsObject(ZoneList<Expression*>* args) {
2344
+ ASSERT(args->length() == 1);
2345
+
2346
+ VisitForAccumulatorValue(args->at(0));
2347
+
2348
+ Label materialize_true, materialize_false;
2349
+ Label* if_true = NULL;
2350
+ Label* if_false = NULL;
2351
+ Label* fall_through = NULL;
2352
+ context()->PrepareTest(&materialize_true, &materialize_false,
2353
+ &if_true, &if_false, &fall_through);
2354
+
2355
+ __ test(eax, Immediate(kSmiTagMask));
2356
+ __ j(zero, if_false);
2357
+ __ cmp(eax, Factory::null_value());
2358
+ __ j(equal, if_true);
2359
+ __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2360
+ // Undetectable objects behave like undefined when tested with typeof.
2361
+ __ movzx_b(ecx, FieldOperand(ebx, Map::kBitFieldOffset));
2362
+ __ test(ecx, Immediate(1 << Map::kIsUndetectable));
2363
+ __ j(not_zero, if_false);
2364
+ __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceTypeOffset));
2365
+ __ cmp(ecx, FIRST_JS_OBJECT_TYPE);
2366
+ __ j(below, if_false);
2367
+ __ cmp(ecx, LAST_JS_OBJECT_TYPE);
2368
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2369
+ Split(below_equal, if_true, if_false, fall_through);
2370
+
2371
+ context()->Plug(if_true, if_false);
2372
+ }
2373
+
2374
+
2375
+ void FullCodeGenerator::EmitIsSpecObject(ZoneList<Expression*>* args) {
2376
+ ASSERT(args->length() == 1);
2377
+
2378
+ VisitForAccumulatorValue(args->at(0));
2379
+
2380
+ Label materialize_true, materialize_false;
2381
+ Label* if_true = NULL;
2382
+ Label* if_false = NULL;
2383
+ Label* fall_through = NULL;
2384
+ context()->PrepareTest(&materialize_true, &materialize_false,
2385
+ &if_true, &if_false, &fall_through);
2386
+
2387
+ __ test(eax, Immediate(kSmiTagMask));
2388
+ __ j(equal, if_false);
2389
+ __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ebx);
2390
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2391
+ Split(above_equal, if_true, if_false, fall_through);
2392
+
2393
+ context()->Plug(if_true, if_false);
2394
+ }
2395
+
2396
+
2397
+ void FullCodeGenerator::EmitIsUndetectableObject(ZoneList<Expression*>* args) {
2398
+ ASSERT(args->length() == 1);
2399
+
2400
+ VisitForAccumulatorValue(args->at(0));
2401
+
2402
+ Label materialize_true, materialize_false;
2403
+ Label* if_true = NULL;
2404
+ Label* if_false = NULL;
2405
+ Label* fall_through = NULL;
2406
+ context()->PrepareTest(&materialize_true, &materialize_false,
2407
+ &if_true, &if_false, &fall_through);
2408
+
2409
+ __ test(eax, Immediate(kSmiTagMask));
2410
+ __ j(zero, if_false);
2411
+ __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2412
+ __ movzx_b(ebx, FieldOperand(ebx, Map::kBitFieldOffset));
2413
+ __ test(ebx, Immediate(1 << Map::kIsUndetectable));
2414
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2415
+ Split(not_zero, if_true, if_false, fall_through);
2416
+
2417
+ context()->Plug(if_true, if_false);
2418
+ }
2419
+
2420
+
2421
+ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2422
+ ZoneList<Expression*>* args) {
2423
+ ASSERT(args->length() == 1);
2424
+
2425
+ VisitForAccumulatorValue(args->at(0));
2426
+
2427
+ Label materialize_true, materialize_false;
2428
+ Label* if_true = NULL;
2429
+ Label* if_false = NULL;
2430
+ Label* fall_through = NULL;
2431
+ context()->PrepareTest(&materialize_true, &materialize_false,
2432
+ &if_true, &if_false, &fall_through);
2433
+
2434
+ // TODO(3110205): Implement this.
2435
+ // Currently unimplemented. Emit false, a safe choice.
2436
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2437
+ __ jmp(if_false);
2438
+ context()->Plug(if_true, if_false);
2439
+ }
2440
+
2441
+
2442
+ void FullCodeGenerator::EmitIsFunction(ZoneList<Expression*>* args) {
2443
+ ASSERT(args->length() == 1);
2444
+
2445
+ VisitForAccumulatorValue(args->at(0));
2446
+
2447
+ Label materialize_true, materialize_false;
2448
+ Label* if_true = NULL;
2449
+ Label* if_false = NULL;
2450
+ Label* fall_through = NULL;
2451
+ context()->PrepareTest(&materialize_true, &materialize_false,
2452
+ &if_true, &if_false, &fall_through);
2453
+
2454
+ __ test(eax, Immediate(kSmiTagMask));
2455
+ __ j(zero, if_false);
2456
+ __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
2457
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2458
+ Split(equal, if_true, if_false, fall_through);
2459
+
2460
+ context()->Plug(if_true, if_false);
2461
+ }
2462
+
2463
+
2464
+ void FullCodeGenerator::EmitIsArray(ZoneList<Expression*>* args) {
2465
+ ASSERT(args->length() == 1);
2466
+
2467
+ VisitForAccumulatorValue(args->at(0));
2468
+
2469
+ Label materialize_true, materialize_false;
2470
+ Label* if_true = NULL;
2471
+ Label* if_false = NULL;
2472
+ Label* fall_through = NULL;
2473
+ context()->PrepareTest(&materialize_true, &materialize_false,
2474
+ &if_true, &if_false, &fall_through);
2475
+
2476
+ __ test(eax, Immediate(kSmiTagMask));
2477
+ __ j(equal, if_false);
2478
+ __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
2479
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2480
+ Split(equal, if_true, if_false, fall_through);
2481
+
2482
+ context()->Plug(if_true, if_false);
2483
+ }
2484
+
2485
+
2486
+ void FullCodeGenerator::EmitIsRegExp(ZoneList<Expression*>* args) {
2487
+ ASSERT(args->length() == 1);
2488
+
2489
+ VisitForAccumulatorValue(args->at(0));
2490
+
2491
+ Label materialize_true, materialize_false;
2492
+ Label* if_true = NULL;
2493
+ Label* if_false = NULL;
2494
+ Label* fall_through = NULL;
2495
+ context()->PrepareTest(&materialize_true, &materialize_false,
2496
+ &if_true, &if_false, &fall_through);
2497
+
2498
+ __ test(eax, Immediate(kSmiTagMask));
2499
+ __ j(equal, if_false);
2500
+ __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
2501
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2502
+ Split(equal, if_true, if_false, fall_through);
2503
+
2504
+ context()->Plug(if_true, if_false);
2505
+ }
2506
+
2507
+
2508
+
2509
+ void FullCodeGenerator::EmitIsConstructCall(ZoneList<Expression*>* args) {
2510
+ ASSERT(args->length() == 0);
2511
+
2512
+ Label materialize_true, materialize_false;
2513
+ Label* if_true = NULL;
2514
+ Label* if_false = NULL;
2515
+ Label* fall_through = NULL;
2516
+ context()->PrepareTest(&materialize_true, &materialize_false,
2517
+ &if_true, &if_false, &fall_through);
2518
+
2519
+ // Get the frame pointer for the calling frame.
2520
+ __ mov(eax, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2521
+
2522
+ // Skip the arguments adaptor frame if it exists.
2523
+ Label check_frame_marker;
2524
+ __ cmp(Operand(eax, StandardFrameConstants::kContextOffset),
2525
+ Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2526
+ __ j(not_equal, &check_frame_marker);
2527
+ __ mov(eax, Operand(eax, StandardFrameConstants::kCallerFPOffset));
2528
+
2529
+ // Check the marker in the calling frame.
2530
+ __ bind(&check_frame_marker);
2531
+ __ cmp(Operand(eax, StandardFrameConstants::kMarkerOffset),
2532
+ Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
2533
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2534
+ Split(equal, if_true, if_false, fall_through);
2535
+
2536
+ context()->Plug(if_true, if_false);
2537
+ }
2538
+
2539
+
2540
+ void FullCodeGenerator::EmitObjectEquals(ZoneList<Expression*>* args) {
2541
+ ASSERT(args->length() == 2);
2542
+
2543
+ // Load the two objects into registers and perform the comparison.
2544
+ VisitForStackValue(args->at(0));
2545
+ VisitForAccumulatorValue(args->at(1));
2546
+
2547
+ Label materialize_true, materialize_false;
2548
+ Label* if_true = NULL;
2549
+ Label* if_false = NULL;
2550
+ Label* fall_through = NULL;
2551
+ context()->PrepareTest(&materialize_true, &materialize_false,
2552
+ &if_true, &if_false, &fall_through);
2553
+
2554
+ __ pop(ebx);
2555
+ __ cmp(eax, Operand(ebx));
2556
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2557
+ Split(equal, if_true, if_false, fall_through);
2558
+
2559
+ context()->Plug(if_true, if_false);
2560
+ }
2561
+
2562
+
2563
+ void FullCodeGenerator::EmitArguments(ZoneList<Expression*>* args) {
2564
+ ASSERT(args->length() == 1);
2565
+
2566
+ // ArgumentsAccessStub expects the key in edx and the formal
2567
+ // parameter count in eax.
2568
+ VisitForAccumulatorValue(args->at(0));
2569
+ __ mov(edx, eax);
2570
+ __ mov(eax, Immediate(Smi::FromInt(scope()->num_parameters())));
2571
+ ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
2572
+ __ CallStub(&stub);
2573
+ context()->Plug(eax);
2574
+ }
2575
+
2576
+
2577
+ void FullCodeGenerator::EmitArgumentsLength(ZoneList<Expression*>* args) {
2578
+ ASSERT(args->length() == 0);
2579
+
2580
+ Label exit;
2581
+ // Get the number of formal parameters.
2582
+ __ Set(eax, Immediate(Smi::FromInt(scope()->num_parameters())));
2583
+
2584
+ // Check if the calling frame is an arguments adaptor frame.
2585
+ __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2586
+ __ cmp(Operand(ebx, StandardFrameConstants::kContextOffset),
2587
+ Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2588
+ __ j(not_equal, &exit);
2589
+
2590
+ // Arguments adaptor case: Read the arguments length from the
2591
+ // adaptor frame.
2592
+ __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2593
+
2594
+ __ bind(&exit);
2595
+ if (FLAG_debug_code) __ AbortIfNotSmi(eax);
2596
+ context()->Plug(eax);
2597
+ }
2598
+
2599
+
2600
+ void FullCodeGenerator::EmitClassOf(ZoneList<Expression*>* args) {
2601
+ ASSERT(args->length() == 1);
2602
+ Label done, null, function, non_function_constructor;
2603
+
2604
+ VisitForAccumulatorValue(args->at(0));
2605
+
2606
+ // If the object is a smi, we return null.
2607
+ __ test(eax, Immediate(kSmiTagMask));
2608
+ __ j(zero, &null);
2609
+
2610
+ // Check that the object is a JS object but take special care of JS
2611
+ // functions to make sure they have 'Function' as their class.
2612
+ __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, eax); // Map is now in eax.
2613
+ __ j(below, &null);
2614
+
2615
+ // As long as JS_FUNCTION_TYPE is the last instance type and it is
2616
+ // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
2617
+ // LAST_JS_OBJECT_TYPE.
2618
+ ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
2619
+ ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
2620
+ __ CmpInstanceType(eax, JS_FUNCTION_TYPE);
2621
+ __ j(equal, &function);
2622
+
2623
+ // Check if the constructor in the map is a function.
2624
+ __ mov(eax, FieldOperand(eax, Map::kConstructorOffset));
2625
+ __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
2626
+ __ j(not_equal, &non_function_constructor);
2627
+
2628
+ // eax now contains the constructor function. Grab the
2629
+ // instance class name from there.
2630
+ __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
2631
+ __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset));
2632
+ __ jmp(&done);
2633
+
2634
+ // Functions have class 'Function'.
2635
+ __ bind(&function);
2636
+ __ mov(eax, Factory::function_class_symbol());
2637
+ __ jmp(&done);
2638
+
2639
+ // Objects with a non-function constructor have class 'Object'.
2640
+ __ bind(&non_function_constructor);
2641
+ __ mov(eax, Factory::Object_symbol());
2642
+ __ jmp(&done);
2643
+
2644
+ // Non-JS objects have class null.
2645
+ __ bind(&null);
2646
+ __ mov(eax, Factory::null_value());
2647
+
2648
+ // All done.
2649
+ __ bind(&done);
2650
+
2651
+ context()->Plug(eax);
2652
+ }
2653
+
2654
+
2655
+ void FullCodeGenerator::EmitLog(ZoneList<Expression*>* args) {
2656
+ // Conditionally generate a log call.
2657
+ // Args:
2658
+ // 0 (literal string): The type of logging (corresponds to the flags).
2659
+ // This is used to determine whether or not to generate the log call.
2660
+ // 1 (string): Format string. Access the string at argument index 2
2661
+ // with '%2s' (see Logger::LogRuntime for all the formats).
2662
+ // 2 (array): Arguments to the format string.
2663
+ ASSERT_EQ(args->length(), 3);
2664
+ #ifdef ENABLE_LOGGING_AND_PROFILING
2665
+ if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
2666
+ VisitForStackValue(args->at(1));
2667
+ VisitForStackValue(args->at(2));
2668
+ __ CallRuntime(Runtime::kLog, 2);
2669
+ }
2670
+ #endif
2671
+ // Finally, we're expected to leave a value on the top of the stack.
2672
+ __ mov(eax, Factory::undefined_value());
2673
+ context()->Plug(eax);
2674
+ }
2675
+
2676
+
2677
+ void FullCodeGenerator::EmitRandomHeapNumber(ZoneList<Expression*>* args) {
2678
+ ASSERT(args->length() == 0);
2679
+
2680
+ Label slow_allocate_heapnumber;
2681
+ Label heapnumber_allocated;
2682
+
2683
+ __ AllocateHeapNumber(edi, ebx, ecx, &slow_allocate_heapnumber);
2684
+ __ jmp(&heapnumber_allocated);
2685
+
2686
+ __ bind(&slow_allocate_heapnumber);
2687
+ // Allocate a heap number.
2688
+ __ CallRuntime(Runtime::kNumberAlloc, 0);
2689
+ __ mov(edi, eax);
2690
+
2691
+ __ bind(&heapnumber_allocated);
2692
+
2693
+ __ PrepareCallCFunction(0, ebx);
2694
+ __ CallCFunction(ExternalReference::random_uint32_function(), 0);
2695
+
2696
+ // Convert 32 random bits in eax to 0.(32 random bits) in a double
2697
+ // by computing:
2698
+ // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
2699
+ // This is implemented on both SSE2 and FPU.
2700
+ if (CpuFeatures::IsSupported(SSE2)) {
2701
+ CpuFeatures::Scope fscope(SSE2);
2702
+ __ mov(ebx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
2703
+ __ movd(xmm1, Operand(ebx));
2704
+ __ movd(xmm0, Operand(eax));
2705
+ __ cvtss2sd(xmm1, xmm1);
2706
+ __ pxor(xmm0, xmm1);
2707
+ __ subsd(xmm0, xmm1);
2708
+ __ movdbl(FieldOperand(edi, HeapNumber::kValueOffset), xmm0);
2709
+ } else {
2710
+ // 0x4130000000000000 is 1.0 x 2^20 as a double.
2711
+ __ mov(FieldOperand(edi, HeapNumber::kExponentOffset),
2712
+ Immediate(0x41300000));
2713
+ __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), eax);
2714
+ __ fld_d(FieldOperand(edi, HeapNumber::kValueOffset));
2715
+ __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), Immediate(0));
2716
+ __ fld_d(FieldOperand(edi, HeapNumber::kValueOffset));
2717
+ __ fsubp(1);
2718
+ __ fstp_d(FieldOperand(edi, HeapNumber::kValueOffset));
2719
+ }
2720
+ __ mov(eax, edi);
2721
+ context()->Plug(eax);
2722
+ }
2723
+
2724
+
2725
+ void FullCodeGenerator::EmitSubString(ZoneList<Expression*>* args) {
2726
+ // Load the arguments on the stack and call the stub.
2727
+ SubStringStub stub;
2728
+ ASSERT(args->length() == 3);
2729
+ VisitForStackValue(args->at(0));
2730
+ VisitForStackValue(args->at(1));
2731
+ VisitForStackValue(args->at(2));
2732
+ __ CallStub(&stub);
2733
+ context()->Plug(eax);
2734
+ }
2735
+
2736
+
2737
+ void FullCodeGenerator::EmitRegExpExec(ZoneList<Expression*>* args) {
2738
+ // Load the arguments on the stack and call the stub.
2739
+ RegExpExecStub stub;
2740
+ ASSERT(args->length() == 4);
2741
+ VisitForStackValue(args->at(0));
2742
+ VisitForStackValue(args->at(1));
2743
+ VisitForStackValue(args->at(2));
2744
+ VisitForStackValue(args->at(3));
2745
+ __ CallStub(&stub);
2746
+ context()->Plug(eax);
2747
+ }
2748
+
2749
+
2750
+ void FullCodeGenerator::EmitValueOf(ZoneList<Expression*>* args) {
2751
+ ASSERT(args->length() == 1);
2752
+
2753
+ VisitForAccumulatorValue(args->at(0)); // Load the object.
2754
+
2755
+ NearLabel done;
2756
+ // If the object is a smi return the object.
2757
+ __ test(eax, Immediate(kSmiTagMask));
2758
+ __ j(zero, &done);
2759
+ // If the object is not a value type, return the object.
2760
+ __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
2761
+ __ j(not_equal, &done);
2762
+ __ mov(eax, FieldOperand(eax, JSValue::kValueOffset));
2763
+
2764
+ __ bind(&done);
2765
+ context()->Plug(eax);
2766
+ }
2767
+
2768
+
2769
+ void FullCodeGenerator::EmitMathPow(ZoneList<Expression*>* args) {
2770
+ // Load the arguments on the stack and call the runtime function.
2771
+ ASSERT(args->length() == 2);
2772
+ VisitForStackValue(args->at(0));
2773
+ VisitForStackValue(args->at(1));
2774
+
2775
+ MathPowStub stub;
2776
+ __ CallStub(&stub);
2777
+ context()->Plug(eax);
2778
+ }
2779
+
2780
+
2781
+ void FullCodeGenerator::EmitSetValueOf(ZoneList<Expression*>* args) {
2782
+ ASSERT(args->length() == 2);
2783
+
2784
+ VisitForStackValue(args->at(0)); // Load the object.
2785
+ VisitForAccumulatorValue(args->at(1)); // Load the value.
2786
+ __ pop(ebx); // eax = value. ebx = object.
2787
+
2788
+ NearLabel done;
2789
+ // If the object is a smi, return the value.
2790
+ __ test(ebx, Immediate(kSmiTagMask));
2791
+ __ j(zero, &done);
2792
+
2793
+ // If the object is not a value type, return the value.
2794
+ __ CmpObjectType(ebx, JS_VALUE_TYPE, ecx);
2795
+ __ j(not_equal, &done);
2796
+
2797
+ // Store the value.
2798
+ __ mov(FieldOperand(ebx, JSValue::kValueOffset), eax);
2799
+ // Update the write barrier. Save the value as it will be
2800
+ // overwritten by the write barrier code and is needed afterward.
2801
+ __ mov(edx, eax);
2802
+ __ RecordWrite(ebx, JSValue::kValueOffset, edx, ecx);
2803
+
2804
+ __ bind(&done);
2805
+ context()->Plug(eax);
2806
+ }
2807
+
2808
+
2809
+ void FullCodeGenerator::EmitNumberToString(ZoneList<Expression*>* args) {
2810
+ ASSERT_EQ(args->length(), 1);
2811
+
2812
+ // Load the argument on the stack and call the stub.
2813
+ VisitForStackValue(args->at(0));
2814
+
2815
+ NumberToStringStub stub;
2816
+ __ CallStub(&stub);
2817
+ context()->Plug(eax);
2818
+ }
2819
+
2820
+
2821
+ void FullCodeGenerator::EmitStringCharFromCode(ZoneList<Expression*>* args) {
2822
+ ASSERT(args->length() == 1);
2823
+
2824
+ VisitForAccumulatorValue(args->at(0));
2825
+
2826
+ Label done;
2827
+ StringCharFromCodeGenerator generator(eax, ebx);
2828
+ generator.GenerateFast(masm_);
2829
+ __ jmp(&done);
2830
+
2831
+ NopRuntimeCallHelper call_helper;
2832
+ generator.GenerateSlow(masm_, call_helper);
2833
+
2834
+ __ bind(&done);
2835
+ context()->Plug(ebx);
2836
+ }
2837
+
2838
+
2839
+ void FullCodeGenerator::EmitStringCharCodeAt(ZoneList<Expression*>* args) {
2840
+ ASSERT(args->length() == 2);
2841
+
2842
+ VisitForStackValue(args->at(0));
2843
+ VisitForAccumulatorValue(args->at(1));
2844
+
2845
+ Register object = ebx;
2846
+ Register index = eax;
2847
+ Register scratch = ecx;
2848
+ Register result = edx;
2849
+
2850
+ __ pop(object);
2851
+
2852
+ Label need_conversion;
2853
+ Label index_out_of_range;
2854
+ Label done;
2855
+ StringCharCodeAtGenerator generator(object,
2856
+ index,
2857
+ scratch,
2858
+ result,
2859
+ &need_conversion,
2860
+ &need_conversion,
2861
+ &index_out_of_range,
2862
+ STRING_INDEX_IS_NUMBER);
2863
+ generator.GenerateFast(masm_);
2864
+ __ jmp(&done);
2865
+
2866
+ __ bind(&index_out_of_range);
2867
+ // When the index is out of range, the spec requires us to return
2868
+ // NaN.
2869
+ __ Set(result, Immediate(Factory::nan_value()));
2870
+ __ jmp(&done);
2871
+
2872
+ __ bind(&need_conversion);
2873
+ // Move the undefined value into the result register, which will
2874
+ // trigger conversion.
2875
+ __ Set(result, Immediate(Factory::undefined_value()));
2876
+ __ jmp(&done);
2877
+
2878
+ NopRuntimeCallHelper call_helper;
2879
+ generator.GenerateSlow(masm_, call_helper);
2880
+
2881
+ __ bind(&done);
2882
+ context()->Plug(result);
2883
+ }
2884
+
2885
+
2886
+ void FullCodeGenerator::EmitStringCharAt(ZoneList<Expression*>* args) {
2887
+ ASSERT(args->length() == 2);
2888
+
2889
+ VisitForStackValue(args->at(0));
2890
+ VisitForAccumulatorValue(args->at(1));
2891
+
2892
+ Register object = ebx;
2893
+ Register index = eax;
2894
+ Register scratch1 = ecx;
2895
+ Register scratch2 = edx;
2896
+ Register result = eax;
2897
+
2898
+ __ pop(object);
2899
+
2900
+ Label need_conversion;
2901
+ Label index_out_of_range;
2902
+ Label done;
2903
+ StringCharAtGenerator generator(object,
2904
+ index,
2905
+ scratch1,
2906
+ scratch2,
2907
+ result,
2908
+ &need_conversion,
2909
+ &need_conversion,
2910
+ &index_out_of_range,
2911
+ STRING_INDEX_IS_NUMBER);
2912
+ generator.GenerateFast(masm_);
2913
+ __ jmp(&done);
2914
+
2915
+ __ bind(&index_out_of_range);
2916
+ // When the index is out of range, the spec requires us to return
2917
+ // the empty string.
2918
+ __ Set(result, Immediate(Factory::empty_string()));
2919
+ __ jmp(&done);
2920
+
2921
+ __ bind(&need_conversion);
2922
+ // Move smi zero into the result register, which will trigger
2923
+ // conversion.
2924
+ __ Set(result, Immediate(Smi::FromInt(0)));
2925
+ __ jmp(&done);
2926
+
2927
+ NopRuntimeCallHelper call_helper;
2928
+ generator.GenerateSlow(masm_, call_helper);
2929
+
2930
+ __ bind(&done);
2931
+ context()->Plug(result);
2932
+ }
2933
+
2934
+
2935
+ void FullCodeGenerator::EmitStringAdd(ZoneList<Expression*>* args) {
2936
+ ASSERT_EQ(2, args->length());
2937
+
2938
+ VisitForStackValue(args->at(0));
2939
+ VisitForStackValue(args->at(1));
2940
+
2941
+ StringAddStub stub(NO_STRING_ADD_FLAGS);
2942
+ __ CallStub(&stub);
2943
+ context()->Plug(eax);
2944
+ }
2945
+
2946
+
2947
+ void FullCodeGenerator::EmitStringCompare(ZoneList<Expression*>* args) {
2948
+ ASSERT_EQ(2, args->length());
2949
+
2950
+ VisitForStackValue(args->at(0));
2951
+ VisitForStackValue(args->at(1));
2952
+
2953
+ StringCompareStub stub;
2954
+ __ CallStub(&stub);
2955
+ context()->Plug(eax);
2956
+ }
2957
+
2958
+
2959
+ void FullCodeGenerator::EmitMathSin(ZoneList<Expression*>* args) {
2960
+ // Load the argument on the stack and call the stub.
2961
+ TranscendentalCacheStub stub(TranscendentalCache::SIN,
2962
+ TranscendentalCacheStub::TAGGED);
2963
+ ASSERT(args->length() == 1);
2964
+ VisitForStackValue(args->at(0));
2965
+ __ CallStub(&stub);
2966
+ context()->Plug(eax);
2967
+ }
2968
+
2969
+
2970
+ void FullCodeGenerator::EmitMathCos(ZoneList<Expression*>* args) {
2971
+ // Load the argument on the stack and call the stub.
2972
+ TranscendentalCacheStub stub(TranscendentalCache::COS,
2973
+ TranscendentalCacheStub::TAGGED);
2974
+ ASSERT(args->length() == 1);
2975
+ VisitForStackValue(args->at(0));
2976
+ __ CallStub(&stub);
2977
+ context()->Plug(eax);
2978
+ }
2979
+
2980
+
2981
+ void FullCodeGenerator::EmitMathLog(ZoneList<Expression*>* args) {
2982
+ // Load the argument on the stack and call the stub.
2983
+ TranscendentalCacheStub stub(TranscendentalCache::LOG,
2984
+ TranscendentalCacheStub::TAGGED);
2985
+ ASSERT(args->length() == 1);
2986
+ VisitForStackValue(args->at(0));
2987
+ __ CallStub(&stub);
2988
+ context()->Plug(eax);
2989
+ }
2990
+
2991
+
2992
+ void FullCodeGenerator::EmitMathSqrt(ZoneList<Expression*>* args) {
2993
+ // Load the argument on the stack and call the runtime function.
2994
+ ASSERT(args->length() == 1);
2995
+ VisitForStackValue(args->at(0));
2996
+ __ CallRuntime(Runtime::kMath_sqrt, 1);
2997
+ context()->Plug(eax);
2998
+ }
2999
+
3000
+
3001
+ void FullCodeGenerator::EmitCallFunction(ZoneList<Expression*>* args) {
3002
+ ASSERT(args->length() >= 2);
3003
+
3004
+ int arg_count = args->length() - 2; // For receiver and function.
3005
+ VisitForStackValue(args->at(0)); // Receiver.
3006
+ for (int i = 0; i < arg_count; i++) {
3007
+ VisitForStackValue(args->at(i + 1));
3008
+ }
3009
+ VisitForAccumulatorValue(args->at(arg_count + 1)); // Function.
3010
+
3011
+ // InvokeFunction requires function in edi. Move it in there.
3012
+ if (!result_register().is(edi)) __ mov(edi, result_register());
3013
+ ParameterCount count(arg_count);
3014
+ __ InvokeFunction(edi, count, CALL_FUNCTION);
3015
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3016
+ context()->Plug(eax);
3017
+ }
3018
+
3019
+
3020
+ void FullCodeGenerator::EmitRegExpConstructResult(ZoneList<Expression*>* args) {
3021
+ // Load the arguments on the stack and call the stub.
3022
+ RegExpConstructResultStub stub;
3023
+ ASSERT(args->length() == 3);
3024
+ VisitForStackValue(args->at(0));
3025
+ VisitForStackValue(args->at(1));
3026
+ VisitForStackValue(args->at(2));
3027
+ __ CallStub(&stub);
3028
+ context()->Plug(eax);
3029
+ }
3030
+
3031
+
3032
+ void FullCodeGenerator::EmitSwapElements(ZoneList<Expression*>* args) {
3033
+ ASSERT(args->length() == 3);
3034
+ VisitForStackValue(args->at(0));
3035
+ VisitForStackValue(args->at(1));
3036
+ VisitForStackValue(args->at(2));
3037
+ Label done;
3038
+ Label slow_case;
3039
+ Register object = eax;
3040
+ Register index_1 = ebx;
3041
+ Register index_2 = ecx;
3042
+ Register elements = edi;
3043
+ Register temp = edx;
3044
+ __ mov(object, Operand(esp, 2 * kPointerSize));
3045
+ // Fetch the map and check if array is in fast case.
3046
+ // Check that object doesn't require security checks and
3047
+ // has no indexed interceptor.
3048
+ __ CmpObjectType(object, FIRST_JS_OBJECT_TYPE, temp);
3049
+ __ j(below, &slow_case);
3050
+ __ test_b(FieldOperand(temp, Map::kBitFieldOffset),
3051
+ KeyedLoadIC::kSlowCaseBitFieldMask);
3052
+ __ j(not_zero, &slow_case);
3053
+
3054
+ // Check the object's elements are in fast case and writable.
3055
+ __ mov(elements, FieldOperand(object, JSObject::kElementsOffset));
3056
+ __ cmp(FieldOperand(elements, HeapObject::kMapOffset),
3057
+ Immediate(Factory::fixed_array_map()));
3058
+ __ j(not_equal, &slow_case);
3059
+
3060
+ // Check that both indices are smis.
3061
+ __ mov(index_1, Operand(esp, 1 * kPointerSize));
3062
+ __ mov(index_2, Operand(esp, 0));
3063
+ __ mov(temp, index_1);
3064
+ __ or_(temp, Operand(index_2));
3065
+ __ test(temp, Immediate(kSmiTagMask));
3066
+ __ j(not_zero, &slow_case);
3067
+
3068
+ // Check that both indices are valid.
3069
+ __ mov(temp, FieldOperand(object, JSArray::kLengthOffset));
3070
+ __ cmp(temp, Operand(index_1));
3071
+ __ j(below_equal, &slow_case);
3072
+ __ cmp(temp, Operand(index_2));
3073
+ __ j(below_equal, &slow_case);
3074
+
3075
+ // Bring addresses into index1 and index2.
3076
+ __ lea(index_1, CodeGenerator::FixedArrayElementOperand(elements, index_1));
3077
+ __ lea(index_2, CodeGenerator::FixedArrayElementOperand(elements, index_2));
3078
+
3079
+ // Swap elements. Use object and temp as scratch registers.
3080
+ __ mov(object, Operand(index_1, 0));
3081
+ __ mov(temp, Operand(index_2, 0));
3082
+ __ mov(Operand(index_2, 0), object);
3083
+ __ mov(Operand(index_1, 0), temp);
3084
+
3085
+ Label new_space;
3086
+ __ InNewSpace(elements, temp, equal, &new_space);
3087
+
3088
+ __ mov(object, elements);
3089
+ __ RecordWriteHelper(object, index_1, temp);
3090
+ __ RecordWriteHelper(elements, index_2, temp);
3091
+
3092
+ __ bind(&new_space);
3093
+ // We are done. Drop elements from the stack, and return undefined.
3094
+ __ add(Operand(esp), Immediate(3 * kPointerSize));
3095
+ __ mov(eax, Factory::undefined_value());
3096
+ __ jmp(&done);
3097
+
3098
+ __ bind(&slow_case);
3099
+ __ CallRuntime(Runtime::kSwapElements, 3);
3100
+
3101
+ __ bind(&done);
3102
+ context()->Plug(eax);
3103
+ }
3104
+
3105
+
3106
+ void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) {
3107
+ ASSERT_EQ(2, args->length());
3108
+
3109
+ ASSERT_NE(NULL, args->at(0)->AsLiteral());
3110
+ int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
3111
+
3112
+ Handle<FixedArray> jsfunction_result_caches(
3113
+ Top::global_context()->jsfunction_result_caches());
3114
+ if (jsfunction_result_caches->length() <= cache_id) {
3115
+ __ Abort("Attempt to use undefined cache.");
3116
+ __ mov(eax, Factory::undefined_value());
3117
+ context()->Plug(eax);
3118
+ return;
3119
+ }
3120
+
3121
+ VisitForAccumulatorValue(args->at(1));
3122
+
3123
+ Register key = eax;
3124
+ Register cache = ebx;
3125
+ Register tmp = ecx;
3126
+ __ mov(cache, ContextOperand(esi, Context::GLOBAL_INDEX));
3127
+ __ mov(cache,
3128
+ FieldOperand(cache, GlobalObject::kGlobalContextOffset));
3129
+ __ mov(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3130
+ __ mov(cache,
3131
+ FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3132
+
3133
+ Label done, not_found;
3134
+ // tmp now holds finger offset as a smi.
3135
+ ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3136
+ __ mov(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
3137
+ __ cmp(key, CodeGenerator::FixedArrayElementOperand(cache, tmp));
3138
+ __ j(not_equal, &not_found);
3139
+
3140
+ __ mov(eax, CodeGenerator::FixedArrayElementOperand(cache, tmp, 1));
3141
+ __ jmp(&done);
3142
+
3143
+ __ bind(&not_found);
3144
+ // Call runtime to perform the lookup.
3145
+ __ push(cache);
3146
+ __ push(key);
3147
+ __ CallRuntime(Runtime::kGetFromCache, 2);
3148
+
3149
+ __ bind(&done);
3150
+ context()->Plug(eax);
3151
+ }
3152
+
3153
+
3154
+ void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) {
3155
+ ASSERT_EQ(2, args->length());
3156
+
3157
+ Register right = eax;
3158
+ Register left = ebx;
3159
+ Register tmp = ecx;
3160
+
3161
+ VisitForStackValue(args->at(0));
3162
+ VisitForAccumulatorValue(args->at(1));
3163
+ __ pop(left);
3164
+
3165
+ Label done, fail, ok;
3166
+ __ cmp(left, Operand(right));
3167
+ __ j(equal, &ok);
3168
+ // Fail if either is a non-HeapObject.
3169
+ __ mov(tmp, left);
3170
+ __ and_(Operand(tmp), right);
3171
+ __ test(Operand(tmp), Immediate(kSmiTagMask));
3172
+ __ j(zero, &fail);
3173
+ __ CmpObjectType(left, JS_REGEXP_TYPE, tmp);
3174
+ __ j(not_equal, &fail);
3175
+ __ cmp(tmp, FieldOperand(right, HeapObject::kMapOffset));
3176
+ __ j(not_equal, &fail);
3177
+ __ mov(tmp, FieldOperand(left, JSRegExp::kDataOffset));
3178
+ __ cmp(tmp, FieldOperand(right, JSRegExp::kDataOffset));
3179
+ __ j(equal, &ok);
3180
+ __ bind(&fail);
3181
+ __ mov(eax, Immediate(Factory::false_value()));
3182
+ __ jmp(&done);
3183
+ __ bind(&ok);
3184
+ __ mov(eax, Immediate(Factory::true_value()));
3185
+ __ bind(&done);
3186
+
3187
+ context()->Plug(eax);
3188
+ }
3189
+
3190
+
3191
+ void FullCodeGenerator::EmitHasCachedArrayIndex(ZoneList<Expression*>* args) {
3192
+ ASSERT(args->length() == 1);
3193
+
3194
+ VisitForAccumulatorValue(args->at(0));
3195
+
3196
+ if (FLAG_debug_code) {
3197
+ __ AbortIfNotString(eax);
3198
+ }
3199
+
3200
+ Label materialize_true, materialize_false;
3201
+ Label* if_true = NULL;
3202
+ Label* if_false = NULL;
3203
+ Label* fall_through = NULL;
3204
+ context()->PrepareTest(&materialize_true, &materialize_false,
3205
+ &if_true, &if_false, &fall_through);
3206
+
3207
+ __ test(FieldOperand(eax, String::kHashFieldOffset),
3208
+ Immediate(String::kContainsCachedArrayIndexMask));
3209
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
3210
+ Split(zero, if_true, if_false, fall_through);
3211
+
3212
+ context()->Plug(if_true, if_false);
3213
+ }
3214
+
3215
+
3216
+ void FullCodeGenerator::EmitGetCachedArrayIndex(ZoneList<Expression*>* args) {
3217
+ ASSERT(args->length() == 1);
3218
+ VisitForAccumulatorValue(args->at(0));
3219
+
3220
+ if (FLAG_debug_code) {
3221
+ __ AbortIfNotString(eax);
3222
+ }
3223
+
3224
+ __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
3225
+ __ IndexFromHash(eax, eax);
3226
+
3227
+ context()->Plug(eax);
3228
+ }
3229
+
3230
+
3231
+ void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) {
3232
+ Label bailout, done, one_char_separator, long_separator,
3233
+ non_trivial_array, not_size_one_array, loop,
3234
+ loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3235
+
3236
+ ASSERT(args->length() == 2);
3237
+ // We will leave the separator on the stack until the end of the function.
3238
+ VisitForStackValue(args->at(1));
3239
+ // Load this to eax (= array)
3240
+ VisitForAccumulatorValue(args->at(0));
3241
+ // All aliases of the same register have disjoint lifetimes.
3242
+ Register array = eax;
3243
+ Register elements = no_reg; // Will be eax.
3244
+
3245
+ Register index = edx;
3246
+
3247
+ Register string_length = ecx;
3248
+
3249
+ Register string = esi;
3250
+
3251
+ Register scratch = ebx;
3252
+
3253
+ Register array_length = edi;
3254
+ Register result_pos = no_reg; // Will be edi.
3255
+
3256
+ // Separator operand is already pushed.
3257
+ Operand separator_operand = Operand(esp, 2 * kPointerSize);
3258
+ Operand result_operand = Operand(esp, 1 * kPointerSize);
3259
+ Operand array_length_operand = Operand(esp, 0);
3260
+ __ sub(Operand(esp), Immediate(2 * kPointerSize));
3261
+ __ cld();
3262
+ // Check that the array is a JSArray
3263
+ __ test(array, Immediate(kSmiTagMask));
3264
+ __ j(zero, &bailout);
3265
+ __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3266
+ __ j(not_equal, &bailout);
3267
+
3268
+ // Check that the array has fast elements.
3269
+ __ test_b(FieldOperand(scratch, Map::kBitField2Offset),
3270
+ 1 << Map::kHasFastElements);
3271
+ __ j(zero, &bailout);
3272
+
3273
+ // If the array has length zero, return the empty string.
3274
+ __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
3275
+ __ SmiUntag(array_length);
3276
+ __ j(not_zero, &non_trivial_array);
3277
+ __ mov(result_operand, Factory::empty_string());
3278
+ __ jmp(&done);
3279
+
3280
+ // Save the array length.
3281
+ __ bind(&non_trivial_array);
3282
+ __ mov(array_length_operand, array_length);
3283
+
3284
+ // Save the FixedArray containing array's elements.
3285
+ // End of array's live range.
3286
+ elements = array;
3287
+ __ mov(elements, FieldOperand(array, JSArray::kElementsOffset));
3288
+ array = no_reg;
3289
+
3290
+
3291
+ // Check that all array elements are sequential ASCII strings, and
3292
+ // accumulate the sum of their lengths, as a smi-encoded value.
3293
+ __ Set(index, Immediate(0));
3294
+ __ Set(string_length, Immediate(0));
3295
+ // Loop condition: while (index < length).
3296
+ // Live loop registers: index, array_length, string,
3297
+ // scratch, string_length, elements.
3298
+ if (FLAG_debug_code) {
3299
+ __ cmp(index, Operand(array_length));
3300
+ __ Assert(less, "No empty arrays here in EmitFastAsciiArrayJoin");
3301
+ }
3302
+ __ bind(&loop);
3303
+ __ mov(string, FieldOperand(elements,
3304
+ index,
3305
+ times_pointer_size,
3306
+ FixedArray::kHeaderSize));
3307
+ __ test(string, Immediate(kSmiTagMask));
3308
+ __ j(zero, &bailout);
3309
+ __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3310
+ __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3311
+ __ and_(scratch, Immediate(
3312
+ kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3313
+ __ cmp(scratch, kStringTag | kAsciiStringTag | kSeqStringTag);
3314
+ __ j(not_equal, &bailout);
3315
+ __ add(string_length,
3316
+ FieldOperand(string, SeqAsciiString::kLengthOffset));
3317
+ __ j(overflow, &bailout);
3318
+ __ add(Operand(index), Immediate(1));
3319
+ __ cmp(index, Operand(array_length));
3320
+ __ j(less, &loop);
3321
+
3322
+ // If array_length is 1, return elements[0], a string.
3323
+ __ cmp(array_length, 1);
3324
+ __ j(not_equal, &not_size_one_array);
3325
+ __ mov(scratch, FieldOperand(elements, FixedArray::kHeaderSize));
3326
+ __ mov(result_operand, scratch);
3327
+ __ jmp(&done);
3328
+
3329
+ __ bind(&not_size_one_array);
3330
+
3331
+ // End of array_length live range.
3332
+ result_pos = array_length;
3333
+ array_length = no_reg;
3334
+
3335
+ // Live registers:
3336
+ // string_length: Sum of string lengths, as a smi.
3337
+ // elements: FixedArray of strings.
3338
+
3339
+ // Check that the separator is a flat ASCII string.
3340
+ __ mov(string, separator_operand);
3341
+ __ test(string, Immediate(kSmiTagMask));
3342
+ __ j(zero, &bailout);
3343
+ __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3344
+ __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3345
+ __ and_(scratch, Immediate(
3346
+ kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3347
+ __ cmp(scratch, ASCII_STRING_TYPE);
3348
+ __ j(not_equal, &bailout);
3349
+
3350
+ // Add (separator length times array_length) - separator length
3351
+ // to string_length.
3352
+ __ mov(scratch, separator_operand);
3353
+ __ mov(scratch, FieldOperand(scratch, SeqAsciiString::kLengthOffset));
3354
+ __ sub(string_length, Operand(scratch)); // May be negative, temporarily.
3355
+ __ imul(scratch, array_length_operand);
3356
+ __ j(overflow, &bailout);
3357
+ __ add(string_length, Operand(scratch));
3358
+ __ j(overflow, &bailout);
3359
+
3360
+ __ shr(string_length, 1);
3361
+ // Live registers and stack values:
3362
+ // string_length
3363
+ // elements
3364
+ __ AllocateAsciiString(result_pos, string_length, scratch,
3365
+ index, string, &bailout);
3366
+ __ mov(result_operand, result_pos);
3367
+ __ lea(result_pos, FieldOperand(result_pos, SeqAsciiString::kHeaderSize));
3368
+
3369
+
3370
+ __ mov(string, separator_operand);
3371
+ __ cmp(FieldOperand(string, SeqAsciiString::kLengthOffset),
3372
+ Immediate(Smi::FromInt(1)));
3373
+ __ j(equal, &one_char_separator);
3374
+ __ j(greater, &long_separator);
3375
+
3376
+
3377
+ // Empty separator case
3378
+ __ mov(index, Immediate(0));
3379
+ __ jmp(&loop_1_condition);
3380
+ // Loop condition: while (index < length).
3381
+ __ bind(&loop_1);
3382
+ // Each iteration of the loop concatenates one string to the result.
3383
+ // Live values in registers:
3384
+ // index: which element of the elements array we are adding to the result.
3385
+ // result_pos: the position to which we are currently copying characters.
3386
+ // elements: the FixedArray of strings we are joining.
3387
+
3388
+ // Get string = array[index].
3389
+ __ mov(string, FieldOperand(elements, index,
3390
+ times_pointer_size,
3391
+ FixedArray::kHeaderSize));
3392
+ __ mov(string_length,
3393
+ FieldOperand(string, String::kLengthOffset));
3394
+ __ shr(string_length, 1);
3395
+ __ lea(string,
3396
+ FieldOperand(string, SeqAsciiString::kHeaderSize));
3397
+ __ CopyBytes(string, result_pos, string_length, scratch);
3398
+ __ add(Operand(index), Immediate(1));
3399
+ __ bind(&loop_1_condition);
3400
+ __ cmp(index, array_length_operand);
3401
+ __ j(less, &loop_1); // End while (index < length).
3402
+ __ jmp(&done);
3403
+
3404
+
3405
+
3406
+ // One-character separator case
3407
+ __ bind(&one_char_separator);
3408
+ // Replace separator with its ascii character value.
3409
+ __ mov_b(scratch, FieldOperand(string, SeqAsciiString::kHeaderSize));
3410
+ __ mov_b(separator_operand, scratch);
3411
+
3412
+ __ Set(index, Immediate(0));
3413
+ // Jump into the loop after the code that copies the separator, so the first
3414
+ // element is not preceded by a separator
3415
+ __ jmp(&loop_2_entry);
3416
+ // Loop condition: while (index < length).
3417
+ __ bind(&loop_2);
3418
+ // Each iteration of the loop concatenates one string to the result.
3419
+ // Live values in registers:
3420
+ // index: which element of the elements array we are adding to the result.
3421
+ // result_pos: the position to which we are currently copying characters.
3422
+
3423
+ // Copy the separator character to the result.
3424
+ __ mov_b(scratch, separator_operand);
3425
+ __ mov_b(Operand(result_pos, 0), scratch);
3426
+ __ inc(result_pos);
3427
+
3428
+ __ bind(&loop_2_entry);
3429
+ // Get string = array[index].
3430
+ __ mov(string, FieldOperand(elements, index,
3431
+ times_pointer_size,
3432
+ FixedArray::kHeaderSize));
3433
+ __ mov(string_length,
3434
+ FieldOperand(string, String::kLengthOffset));
3435
+ __ shr(string_length, 1);
3436
+ __ lea(string,
3437
+ FieldOperand(string, SeqAsciiString::kHeaderSize));
3438
+ __ CopyBytes(string, result_pos, string_length, scratch);
3439
+ __ add(Operand(index), Immediate(1));
3440
+
3441
+ __ cmp(index, array_length_operand);
3442
+ __ j(less, &loop_2); // End while (index < length).
3443
+ __ jmp(&done);
3444
+
3445
+
3446
+ // Long separator case (separator is more than one character).
3447
+ __ bind(&long_separator);
3448
+
3449
+ __ Set(index, Immediate(0));
3450
+ // Jump into the loop after the code that copies the separator, so the first
3451
+ // element is not preceded by a separator
3452
+ __ jmp(&loop_3_entry);
3453
+ // Loop condition: while (index < length).
3454
+ __ bind(&loop_3);
3455
+ // Each iteration of the loop concatenates one string to the result.
3456
+ // Live values in registers:
3457
+ // index: which element of the elements array we are adding to the result.
3458
+ // result_pos: the position to which we are currently copying characters.
3459
+
3460
+ // Copy the separator to the result.
3461
+ __ mov(string, separator_operand);
3462
+ __ mov(string_length,
3463
+ FieldOperand(string, String::kLengthOffset));
3464
+ __ shr(string_length, 1);
3465
+ __ lea(string,
3466
+ FieldOperand(string, SeqAsciiString::kHeaderSize));
3467
+ __ CopyBytes(string, result_pos, string_length, scratch);
3468
+
3469
+ __ bind(&loop_3_entry);
3470
+ // Get string = array[index].
3471
+ __ mov(string, FieldOperand(elements, index,
3472
+ times_pointer_size,
3473
+ FixedArray::kHeaderSize));
3474
+ __ mov(string_length,
3475
+ FieldOperand(string, String::kLengthOffset));
3476
+ __ shr(string_length, 1);
3477
+ __ lea(string,
3478
+ FieldOperand(string, SeqAsciiString::kHeaderSize));
3479
+ __ CopyBytes(string, result_pos, string_length, scratch);
3480
+ __ add(Operand(index), Immediate(1));
3481
+
3482
+ __ cmp(index, array_length_operand);
3483
+ __ j(less, &loop_3); // End while (index < length).
3484
+ __ jmp(&done);
3485
+
3486
+
3487
+ __ bind(&bailout);
3488
+ __ mov(result_operand, Factory::undefined_value());
3489
+ __ bind(&done);
3490
+ __ mov(eax, result_operand);
3491
+ // Drop temp values from the stack, and restore context register.
3492
+ __ add(Operand(esp), Immediate(3 * kPointerSize));
3493
+
3494
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3495
+ context()->Plug(eax);
3496
+ }
3497
+
3498
+
3499
+ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3500
+ Handle<String> name = expr->name();
3501
+ if (name->length() > 0 && name->Get(0) == '_') {
3502
+ Comment cmnt(masm_, "[ InlineRuntimeCall");
3503
+ EmitInlineRuntimeCall(expr);
3504
+ return;
3505
+ }
3506
+
3507
+ Comment cmnt(masm_, "[ CallRuntime");
3508
+ ZoneList<Expression*>* args = expr->arguments();
3509
+
3510
+ if (expr->is_jsruntime()) {
3511
+ // Prepare for calling JS runtime function.
3512
+ __ mov(eax, GlobalObjectOperand());
3513
+ __ push(FieldOperand(eax, GlobalObject::kBuiltinsOffset));
3514
+ }
3515
+
3516
+ // Push the arguments ("left-to-right").
3517
+ int arg_count = args->length();
3518
+ for (int i = 0; i < arg_count; i++) {
3519
+ VisitForStackValue(args->at(i));
3520
+ }
3521
+
3522
+ if (expr->is_jsruntime()) {
3523
+ // Call the JS runtime function via a call IC.
3524
+ __ Set(ecx, Immediate(expr->name()));
3525
+ InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
3526
+ Handle<Code> ic = StubCache::ComputeCallInitialize(arg_count, in_loop);
3527
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
3528
+ // Restore context register.
3529
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3530
+ } else {
3531
+ // Call the C runtime function.
3532
+ __ CallRuntime(expr->function(), arg_count);
3533
+ }
3534
+ context()->Plug(eax);
3535
+ }
3536
+
3537
+
3538
+ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3539
+ switch (expr->op()) {
3540
+ case Token::DELETE: {
3541
+ Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3542
+ Property* prop = expr->expression()->AsProperty();
3543
+ Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
3544
+
3545
+ if (prop != NULL) {
3546
+ if (prop->is_synthetic()) {
3547
+ // Result of deleting parameters is false, even when they rewrite
3548
+ // to accesses on the arguments object.
3549
+ context()->Plug(false);
3550
+ } else {
3551
+ VisitForStackValue(prop->obj());
3552
+ VisitForStackValue(prop->key());
3553
+ __ push(Immediate(Smi::FromInt(strict_mode_flag())));
3554
+ __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3555
+ context()->Plug(eax);
3556
+ }
3557
+ } else if (var != NULL) {
3558
+ // Delete of an unqualified identifier is disallowed in strict mode
3559
+ // but "delete this" is.
3560
+ ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this());
3561
+ if (var->is_global()) {
3562
+ __ push(GlobalObjectOperand());
3563
+ __ push(Immediate(var->name()));
3564
+ __ push(Immediate(Smi::FromInt(kNonStrictMode)));
3565
+ __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3566
+ context()->Plug(eax);
3567
+ } else if (var->AsSlot() != NULL &&
3568
+ var->AsSlot()->type() != Slot::LOOKUP) {
3569
+ // Result of deleting non-global, non-dynamic variables is false.
3570
+ // The subexpression does not have side effects.
3571
+ context()->Plug(false);
3572
+ } else {
3573
+ // Non-global variable. Call the runtime to try to delete from the
3574
+ // context where the variable was introduced.
3575
+ __ push(context_register());
3576
+ __ push(Immediate(var->name()));
3577
+ __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3578
+ context()->Plug(eax);
3579
+ }
3580
+ } else {
3581
+ // Result of deleting non-property, non-variable reference is true.
3582
+ // The subexpression may have side effects.
3583
+ VisitForEffect(expr->expression());
3584
+ context()->Plug(true);
3585
+ }
3586
+ break;
3587
+ }
3588
+
3589
+ case Token::VOID: {
3590
+ Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3591
+ VisitForEffect(expr->expression());
3592
+ context()->Plug(Factory::undefined_value());
3593
+ break;
3594
+ }
3595
+
3596
+ case Token::NOT: {
3597
+ Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3598
+ if (context()->IsEffect()) {
3599
+ // Unary NOT has no side effects so it's only necessary to visit the
3600
+ // subexpression. Match the optimizing compiler by not branching.
3601
+ VisitForEffect(expr->expression());
3602
+ } else {
3603
+ Label materialize_true, materialize_false;
3604
+ Label* if_true = NULL;
3605
+ Label* if_false = NULL;
3606
+ Label* fall_through = NULL;
3607
+
3608
+ // Notice that the labels are swapped.
3609
+ context()->PrepareTest(&materialize_true, &materialize_false,
3610
+ &if_false, &if_true, &fall_through);
3611
+ if (context()->IsTest()) ForwardBailoutToChild(expr);
3612
+ VisitForControl(expr->expression(), if_true, if_false, fall_through);
3613
+ context()->Plug(if_false, if_true); // Labels swapped.
3614
+ }
3615
+ break;
3616
+ }
3617
+
3618
+ case Token::TYPEOF: {
3619
+ Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3620
+ { StackValueContext context(this);
3621
+ VisitForTypeofValue(expr->expression());
3622
+ }
3623
+ __ CallRuntime(Runtime::kTypeof, 1);
3624
+ context()->Plug(eax);
3625
+ break;
3626
+ }
3627
+
3628
+ case Token::ADD: {
3629
+ Comment cmt(masm_, "[ UnaryOperation (ADD)");
3630
+ VisitForAccumulatorValue(expr->expression());
3631
+ Label no_conversion;
3632
+ __ test(result_register(), Immediate(kSmiTagMask));
3633
+ __ j(zero, &no_conversion);
3634
+ ToNumberStub convert_stub;
3635
+ __ CallStub(&convert_stub);
3636
+ __ bind(&no_conversion);
3637
+ context()->Plug(result_register());
3638
+ break;
3639
+ }
3640
+
3641
+ case Token::SUB: {
3642
+ Comment cmt(masm_, "[ UnaryOperation (SUB)");
3643
+ bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
3644
+ UnaryOverwriteMode overwrite =
3645
+ can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
3646
+ GenericUnaryOpStub stub(Token::SUB, overwrite, NO_UNARY_FLAGS);
3647
+ // GenericUnaryOpStub expects the argument to be in the
3648
+ // accumulator register eax.
3649
+ VisitForAccumulatorValue(expr->expression());
3650
+ __ CallStub(&stub);
3651
+ context()->Plug(eax);
3652
+ break;
3653
+ }
3654
+
3655
+ case Token::BIT_NOT: {
3656
+ Comment cmt(masm_, "[ UnaryOperation (BIT_NOT)");
3657
+ // The generic unary operation stub expects the argument to be
3658
+ // in the accumulator register eax.
3659
+ VisitForAccumulatorValue(expr->expression());
3660
+ Label done;
3661
+ bool inline_smi_case = ShouldInlineSmiCase(expr->op());
3662
+ if (inline_smi_case) {
3663
+ NearLabel call_stub;
3664
+ __ test(eax, Immediate(kSmiTagMask));
3665
+ __ j(not_zero, &call_stub);
3666
+ __ lea(eax, Operand(eax, kSmiTagMask));
3667
+ __ not_(eax);
3668
+ __ jmp(&done);
3669
+ __ bind(&call_stub);
3670
+ }
3671
+ bool overwrite = expr->expression()->ResultOverwriteAllowed();
3672
+ UnaryOverwriteMode mode =
3673
+ overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
3674
+ UnaryOpFlags flags = inline_smi_case
3675
+ ? NO_UNARY_SMI_CODE_IN_STUB
3676
+ : NO_UNARY_FLAGS;
3677
+ GenericUnaryOpStub stub(Token::BIT_NOT, mode, flags);
3678
+ __ CallStub(&stub);
3679
+ __ bind(&done);
3680
+ context()->Plug(eax);
3681
+ break;
3682
+ }
3683
+
3684
+ default:
3685
+ UNREACHABLE();
3686
+ }
3687
+ }
3688
+
3689
+
3690
+ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3691
+ Comment cmnt(masm_, "[ CountOperation");
3692
+ SetSourcePosition(expr->position());
3693
+
3694
+ // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
3695
+ // as the left-hand side.
3696
+ if (!expr->expression()->IsValidLeftHandSide()) {
3697
+ VisitForEffect(expr->expression());
3698
+ return;
3699
+ }
3700
+
3701
+ // Expression can only be a property, a global or a (parameter or local)
3702
+ // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
3703
+ enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
3704
+ LhsKind assign_type = VARIABLE;
3705
+ Property* prop = expr->expression()->AsProperty();
3706
+ // In case of a property we use the uninitialized expression context
3707
+ // of the key to detect a named property.
3708
+ if (prop != NULL) {
3709
+ assign_type =
3710
+ (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
3711
+ }
3712
+
3713
+ // Evaluate expression and get value.
3714
+ if (assign_type == VARIABLE) {
3715
+ ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
3716
+ AccumulatorValueContext context(this);
3717
+ EmitVariableLoad(expr->expression()->AsVariableProxy()->var());
3718
+ } else {
3719
+ // Reserve space for result of postfix operation.
3720
+ if (expr->is_postfix() && !context()->IsEffect()) {
3721
+ __ push(Immediate(Smi::FromInt(0)));
3722
+ }
3723
+ if (assign_type == NAMED_PROPERTY) {
3724
+ // Put the object both on the stack and in the accumulator.
3725
+ VisitForAccumulatorValue(prop->obj());
3726
+ __ push(eax);
3727
+ EmitNamedPropertyLoad(prop);
3728
+ } else {
3729
+ if (prop->is_arguments_access()) {
3730
+ VariableProxy* obj_proxy = prop->obj()->AsVariableProxy();
3731
+ MemOperand slot_operand =
3732
+ EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx);
3733
+ __ push(slot_operand);
3734
+ __ mov(eax, Immediate(prop->key()->AsLiteral()->handle()));
3735
+ } else {
3736
+ VisitForStackValue(prop->obj());
3737
+ VisitForAccumulatorValue(prop->key());
3738
+ }
3739
+ __ mov(edx, Operand(esp, 0));
3740
+ __ push(eax);
3741
+ EmitKeyedPropertyLoad(prop);
3742
+ }
3743
+ }
3744
+
3745
+ // We need a second deoptimization point after loading the value
3746
+ // in case evaluating the property load my have a side effect.
3747
+ PrepareForBailout(expr->increment(), TOS_REG);
3748
+
3749
+ // Call ToNumber only if operand is not a smi.
3750
+ NearLabel no_conversion;
3751
+ if (ShouldInlineSmiCase(expr->op())) {
3752
+ __ test(eax, Immediate(kSmiTagMask));
3753
+ __ j(zero, &no_conversion);
3754
+ }
3755
+ ToNumberStub convert_stub;
3756
+ __ CallStub(&convert_stub);
3757
+ __ bind(&no_conversion);
3758
+
3759
+ // Save result for postfix expressions.
3760
+ if (expr->is_postfix()) {
3761
+ if (!context()->IsEffect()) {
3762
+ // Save the result on the stack. If we have a named or keyed property
3763
+ // we store the result under the receiver that is currently on top
3764
+ // of the stack.
3765
+ switch (assign_type) {
3766
+ case VARIABLE:
3767
+ __ push(eax);
3768
+ break;
3769
+ case NAMED_PROPERTY:
3770
+ __ mov(Operand(esp, kPointerSize), eax);
3771
+ break;
3772
+ case KEYED_PROPERTY:
3773
+ __ mov(Operand(esp, 2 * kPointerSize), eax);
3774
+ break;
3775
+ }
3776
+ }
3777
+ }
3778
+
3779
+ // Inline smi case if we are in a loop.
3780
+ NearLabel stub_call, done;
3781
+ JumpPatchSite patch_site(masm_);
3782
+
3783
+ if (ShouldInlineSmiCase(expr->op())) {
3784
+ if (expr->op() == Token::INC) {
3785
+ __ add(Operand(eax), Immediate(Smi::FromInt(1)));
3786
+ } else {
3787
+ __ sub(Operand(eax), Immediate(Smi::FromInt(1)));
3788
+ }
3789
+ __ j(overflow, &stub_call);
3790
+ // We could eliminate this smi check if we split the code at
3791
+ // the first smi check before calling ToNumber.
3792
+ patch_site.EmitJumpIfSmi(eax, &done);
3793
+
3794
+ __ bind(&stub_call);
3795
+ // Call stub. Undo operation first.
3796
+ if (expr->op() == Token::INC) {
3797
+ __ sub(Operand(eax), Immediate(Smi::FromInt(1)));
3798
+ } else {
3799
+ __ add(Operand(eax), Immediate(Smi::FromInt(1)));
3800
+ }
3801
+ }
3802
+
3803
+ // Record position before stub call.
3804
+ SetSourcePosition(expr->position());
3805
+
3806
+ // Call stub for +1/-1.
3807
+ __ mov(edx, eax);
3808
+ __ mov(eax, Immediate(Smi::FromInt(1)));
3809
+ TypeRecordingBinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
3810
+ EmitCallIC(stub.GetCode(), &patch_site);
3811
+ __ bind(&done);
3812
+
3813
+ // Store the value returned in eax.
3814
+ switch (assign_type) {
3815
+ case VARIABLE:
3816
+ if (expr->is_postfix()) {
3817
+ // Perform the assignment as if via '='.
3818
+ { EffectContext context(this);
3819
+ EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3820
+ Token::ASSIGN);
3821
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3822
+ context.Plug(eax);
3823
+ }
3824
+ // For all contexts except EffectContext We have the result on
3825
+ // top of the stack.
3826
+ if (!context()->IsEffect()) {
3827
+ context()->PlugTOS();
3828
+ }
3829
+ } else {
3830
+ // Perform the assignment as if via '='.
3831
+ EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3832
+ Token::ASSIGN);
3833
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3834
+ context()->Plug(eax);
3835
+ }
3836
+ break;
3837
+ case NAMED_PROPERTY: {
3838
+ __ mov(ecx, prop->key()->AsLiteral()->handle());
3839
+ __ pop(edx);
3840
+ Handle<Code> ic(Builtins::builtin(
3841
+ is_strict() ? Builtins::StoreIC_Initialize_Strict
3842
+ : Builtins::StoreIC_Initialize));
3843
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
3844
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3845
+ if (expr->is_postfix()) {
3846
+ if (!context()->IsEffect()) {
3847
+ context()->PlugTOS();
3848
+ }
3849
+ } else {
3850
+ context()->Plug(eax);
3851
+ }
3852
+ break;
3853
+ }
3854
+ case KEYED_PROPERTY: {
3855
+ __ pop(ecx);
3856
+ __ pop(edx);
3857
+ Handle<Code> ic(Builtins::builtin(
3858
+ is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict
3859
+ : Builtins::KeyedStoreIC_Initialize));
3860
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
3861
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3862
+ if (expr->is_postfix()) {
3863
+ // Result is on the stack
3864
+ if (!context()->IsEffect()) {
3865
+ context()->PlugTOS();
3866
+ }
3867
+ } else {
3868
+ context()->Plug(eax);
3869
+ }
3870
+ break;
3871
+ }
3872
+ }
3873
+ }
3874
+
3875
+
3876
+ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
3877
+ VariableProxy* proxy = expr->AsVariableProxy();
3878
+ ASSERT(!context()->IsEffect());
3879
+ ASSERT(!context()->IsTest());
3880
+
3881
+ if (proxy != NULL && !proxy->var()->is_this() && proxy->var()->is_global()) {
3882
+ Comment cmnt(masm_, "Global variable");
3883
+ __ mov(eax, GlobalObjectOperand());
3884
+ __ mov(ecx, Immediate(proxy->name()));
3885
+ Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
3886
+ // Use a regular load, not a contextual load, to avoid a reference
3887
+ // error.
3888
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
3889
+ PrepareForBailout(expr, TOS_REG);
3890
+ context()->Plug(eax);
3891
+ } else if (proxy != NULL &&
3892
+ proxy->var()->AsSlot() != NULL &&
3893
+ proxy->var()->AsSlot()->type() == Slot::LOOKUP) {
3894
+ Label done, slow;
3895
+
3896
+ // Generate code for loading from variables potentially shadowed
3897
+ // by eval-introduced variables.
3898
+ Slot* slot = proxy->var()->AsSlot();
3899
+ EmitDynamicLoadFromSlotFastCase(slot, INSIDE_TYPEOF, &slow, &done);
3900
+
3901
+ __ bind(&slow);
3902
+ __ push(esi);
3903
+ __ push(Immediate(proxy->name()));
3904
+ __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
3905
+ PrepareForBailout(expr, TOS_REG);
3906
+ __ bind(&done);
3907
+
3908
+ context()->Plug(eax);
3909
+ } else {
3910
+ // This expression cannot throw a reference error at the top level.
3911
+ context()->HandleExpression(expr);
3912
+ }
3913
+ }
3914
+
3915
+
3916
+ bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
3917
+ Expression* left,
3918
+ Expression* right,
3919
+ Label* if_true,
3920
+ Label* if_false,
3921
+ Label* fall_through) {
3922
+ if (op != Token::EQ && op != Token::EQ_STRICT) return false;
3923
+
3924
+ // Check for the pattern: typeof <expression> == <string literal>.
3925
+ Literal* right_literal = right->AsLiteral();
3926
+ if (right_literal == NULL) return false;
3927
+ Handle<Object> right_literal_value = right_literal->handle();
3928
+ if (!right_literal_value->IsString()) return false;
3929
+ UnaryOperation* left_unary = left->AsUnaryOperation();
3930
+ if (left_unary == NULL || left_unary->op() != Token::TYPEOF) return false;
3931
+ Handle<String> check = Handle<String>::cast(right_literal_value);
3932
+
3933
+ { AccumulatorValueContext context(this);
3934
+ VisitForTypeofValue(left_unary->expression());
3935
+ }
3936
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
3937
+
3938
+ if (check->Equals(Heap::number_symbol())) {
3939
+ __ test(eax, Immediate(kSmiTagMask));
3940
+ __ j(zero, if_true);
3941
+ __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
3942
+ Factory::heap_number_map());
3943
+ Split(equal, if_true, if_false, fall_through);
3944
+ } else if (check->Equals(Heap::string_symbol())) {
3945
+ __ test(eax, Immediate(kSmiTagMask));
3946
+ __ j(zero, if_false);
3947
+ // Check for undetectable objects => false.
3948
+ __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
3949
+ __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
3950
+ __ test(ecx, Immediate(1 << Map::kIsUndetectable));
3951
+ __ j(not_zero, if_false);
3952
+ __ CmpInstanceType(edx, FIRST_NONSTRING_TYPE);
3953
+ Split(below, if_true, if_false, fall_through);
3954
+ } else if (check->Equals(Heap::boolean_symbol())) {
3955
+ __ cmp(eax, Factory::true_value());
3956
+ __ j(equal, if_true);
3957
+ __ cmp(eax, Factory::false_value());
3958
+ Split(equal, if_true, if_false, fall_through);
3959
+ } else if (check->Equals(Heap::undefined_symbol())) {
3960
+ __ cmp(eax, Factory::undefined_value());
3961
+ __ j(equal, if_true);
3962
+ __ test(eax, Immediate(kSmiTagMask));
3963
+ __ j(zero, if_false);
3964
+ // Check for undetectable objects => true.
3965
+ __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
3966
+ __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
3967
+ __ test(ecx, Immediate(1 << Map::kIsUndetectable));
3968
+ Split(not_zero, if_true, if_false, fall_through);
3969
+ } else if (check->Equals(Heap::function_symbol())) {
3970
+ __ test(eax, Immediate(kSmiTagMask));
3971
+ __ j(zero, if_false);
3972
+ __ CmpObjectType(eax, JS_FUNCTION_TYPE, edx);
3973
+ __ j(equal, if_true);
3974
+ // Regular expressions => 'function' (they are callable).
3975
+ __ CmpInstanceType(edx, JS_REGEXP_TYPE);
3976
+ Split(equal, if_true, if_false, fall_through);
3977
+ } else if (check->Equals(Heap::object_symbol())) {
3978
+ __ test(eax, Immediate(kSmiTagMask));
3979
+ __ j(zero, if_false);
3980
+ __ cmp(eax, Factory::null_value());
3981
+ __ j(equal, if_true);
3982
+ // Regular expressions => 'function', not 'object'.
3983
+ __ CmpObjectType(eax, JS_REGEXP_TYPE, edx);
3984
+ __ j(equal, if_false);
3985
+ // Check for undetectable objects => false.
3986
+ __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
3987
+ __ test(ecx, Immediate(1 << Map::kIsUndetectable));
3988
+ __ j(not_zero, if_false);
3989
+ // Check for JS objects => true.
3990
+ __ movzx_b(ecx, FieldOperand(edx, Map::kInstanceTypeOffset));
3991
+ __ cmp(ecx, FIRST_JS_OBJECT_TYPE);
3992
+ __ j(less, if_false);
3993
+ __ cmp(ecx, LAST_JS_OBJECT_TYPE);
3994
+ Split(less_equal, if_true, if_false, fall_through);
3995
+ } else {
3996
+ if (if_false != fall_through) __ jmp(if_false);
3997
+ }
3998
+
3999
+ return true;
4000
+ }
4001
+
4002
+
4003
+ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4004
+ Comment cmnt(masm_, "[ CompareOperation");
4005
+ SetSourcePosition(expr->position());
4006
+
4007
+ // Always perform the comparison for its control flow. Pack the result
4008
+ // into the expression's context after the comparison is performed.
4009
+
4010
+ Label materialize_true, materialize_false;
4011
+ Label* if_true = NULL;
4012
+ Label* if_false = NULL;
4013
+ Label* fall_through = NULL;
4014
+ context()->PrepareTest(&materialize_true, &materialize_false,
4015
+ &if_true, &if_false, &fall_through);
4016
+
4017
+ // First we try a fast inlined version of the compare when one of
4018
+ // the operands is a literal.
4019
+ Token::Value op = expr->op();
4020
+ Expression* left = expr->left();
4021
+ Expression* right = expr->right();
4022
+ if (TryLiteralCompare(op, left, right, if_true, if_false, fall_through)) {
4023
+ context()->Plug(if_true, if_false);
4024
+ return;
4025
+ }
4026
+
4027
+ VisitForStackValue(expr->left());
4028
+ switch (expr->op()) {
4029
+ case Token::IN:
4030
+ VisitForStackValue(expr->right());
4031
+ __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4032
+ PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
4033
+ __ cmp(eax, Factory::true_value());
4034
+ Split(equal, if_true, if_false, fall_through);
4035
+ break;
4036
+
4037
+ case Token::INSTANCEOF: {
4038
+ VisitForStackValue(expr->right());
4039
+ InstanceofStub stub(InstanceofStub::kNoFlags);
4040
+ __ CallStub(&stub);
4041
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4042
+ __ test(eax, Operand(eax));
4043
+ // The stub returns 0 for true.
4044
+ Split(zero, if_true, if_false, fall_through);
4045
+ break;
4046
+ }
4047
+
4048
+ default: {
4049
+ VisitForAccumulatorValue(expr->right());
4050
+ Condition cc = no_condition;
4051
+ bool strict = false;
4052
+ switch (op) {
4053
+ case Token::EQ_STRICT:
4054
+ strict = true;
4055
+ // Fall through
4056
+ case Token::EQ:
4057
+ cc = equal;
4058
+ __ pop(edx);
4059
+ break;
4060
+ case Token::LT:
4061
+ cc = less;
4062
+ __ pop(edx);
4063
+ break;
4064
+ case Token::GT:
4065
+ // Reverse left and right sizes to obtain ECMA-262 conversion order.
4066
+ cc = less;
4067
+ __ mov(edx, result_register());
4068
+ __ pop(eax);
4069
+ break;
4070
+ case Token::LTE:
4071
+ // Reverse left and right sizes to obtain ECMA-262 conversion order.
4072
+ cc = greater_equal;
4073
+ __ mov(edx, result_register());
4074
+ __ pop(eax);
4075
+ break;
4076
+ case Token::GTE:
4077
+ cc = greater_equal;
4078
+ __ pop(edx);
4079
+ break;
4080
+ case Token::IN:
4081
+ case Token::INSTANCEOF:
4082
+ default:
4083
+ UNREACHABLE();
4084
+ }
4085
+
4086
+ bool inline_smi_code = ShouldInlineSmiCase(op);
4087
+ JumpPatchSite patch_site(masm_);
4088
+ if (inline_smi_code) {
4089
+ NearLabel slow_case;
4090
+ __ mov(ecx, Operand(edx));
4091
+ __ or_(ecx, Operand(eax));
4092
+ patch_site.EmitJumpIfNotSmi(ecx, &slow_case);
4093
+ __ cmp(edx, Operand(eax));
4094
+ Split(cc, if_true, if_false, NULL);
4095
+ __ bind(&slow_case);
4096
+ }
4097
+
4098
+ // Record position and call the compare IC.
4099
+ SetSourcePosition(expr->position());
4100
+ Handle<Code> ic = CompareIC::GetUninitialized(op);
4101
+ EmitCallIC(ic, &patch_site);
4102
+
4103
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4104
+ __ test(eax, Operand(eax));
4105
+ Split(cc, if_true, if_false, fall_through);
4106
+ }
4107
+ }
4108
+
4109
+ // Convert the result of the comparison into one expected for this
4110
+ // expression's context.
4111
+ context()->Plug(if_true, if_false);
4112
+ }
4113
+
4114
+
4115
+ void FullCodeGenerator::VisitCompareToNull(CompareToNull* expr) {
4116
+ Label materialize_true, materialize_false;
4117
+ Label* if_true = NULL;
4118
+ Label* if_false = NULL;
4119
+ Label* fall_through = NULL;
4120
+ context()->PrepareTest(&materialize_true, &materialize_false,
4121
+ &if_true, &if_false, &fall_through);
4122
+
4123
+ VisitForAccumulatorValue(expr->expression());
4124
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4125
+
4126
+ __ cmp(eax, Factory::null_value());
4127
+ if (expr->is_strict()) {
4128
+ Split(equal, if_true, if_false, fall_through);
4129
+ } else {
4130
+ __ j(equal, if_true);
4131
+ __ cmp(eax, Factory::undefined_value());
4132
+ __ j(equal, if_true);
4133
+ __ test(eax, Immediate(kSmiTagMask));
4134
+ __ j(zero, if_false);
4135
+ // It can be an undetectable object.
4136
+ __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
4137
+ __ movzx_b(edx, FieldOperand(edx, Map::kBitFieldOffset));
4138
+ __ test(edx, Immediate(1 << Map::kIsUndetectable));
4139
+ Split(not_zero, if_true, if_false, fall_through);
4140
+ }
4141
+ context()->Plug(if_true, if_false);
4142
+ }
4143
+
4144
+
4145
+ void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4146
+ __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
4147
+ context()->Plug(eax);
4148
+ }
4149
+
4150
+
4151
+ Register FullCodeGenerator::result_register() {
4152
+ return eax;
4153
+ }
4154
+
4155
+
4156
+ Register FullCodeGenerator::context_register() {
4157
+ return esi;
4158
+ }
4159
+
4160
+
4161
+ void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
4162
+ ASSERT(mode == RelocInfo::CODE_TARGET ||
4163
+ mode == RelocInfo::CODE_TARGET_CONTEXT);
4164
+ switch (ic->kind()) {
4165
+ case Code::LOAD_IC:
4166
+ __ IncrementCounter(&Counters::named_load_full, 1);
4167
+ break;
4168
+ case Code::KEYED_LOAD_IC:
4169
+ __ IncrementCounter(&Counters::keyed_load_full, 1);
4170
+ break;
4171
+ case Code::STORE_IC:
4172
+ __ IncrementCounter(&Counters::named_store_full, 1);
4173
+ break;
4174
+ case Code::KEYED_STORE_IC:
4175
+ __ IncrementCounter(&Counters::keyed_store_full, 1);
4176
+ default:
4177
+ break;
4178
+ }
4179
+
4180
+ __ call(ic, mode);
4181
+
4182
+ // Crankshaft doesn't need patching of inlined loads and stores.
4183
+ // When compiling the snapshot we need to produce code that works
4184
+ // with and without Crankshaft.
4185
+ if (V8::UseCrankshaft() && !Serializer::enabled()) {
4186
+ return;
4187
+ }
4188
+
4189
+ // If we're calling a (keyed) load or store stub, we have to mark
4190
+ // the call as containing no inlined code so we will not attempt to
4191
+ // patch it.
4192
+ switch (ic->kind()) {
4193
+ case Code::LOAD_IC:
4194
+ case Code::KEYED_LOAD_IC:
4195
+ case Code::STORE_IC:
4196
+ case Code::KEYED_STORE_IC:
4197
+ __ nop(); // Signals no inlined code.
4198
+ break;
4199
+ default:
4200
+ // Do nothing.
4201
+ break;
4202
+ }
4203
+ }
4204
+
4205
+
4206
+ void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
4207
+ switch (ic->kind()) {
4208
+ case Code::LOAD_IC:
4209
+ __ IncrementCounter(&Counters::named_load_full, 1);
4210
+ break;
4211
+ case Code::KEYED_LOAD_IC:
4212
+ __ IncrementCounter(&Counters::keyed_load_full, 1);
4213
+ break;
4214
+ case Code::STORE_IC:
4215
+ __ IncrementCounter(&Counters::named_store_full, 1);
4216
+ break;
4217
+ case Code::KEYED_STORE_IC:
4218
+ __ IncrementCounter(&Counters::keyed_store_full, 1);
4219
+ default:
4220
+ break;
4221
+ }
4222
+
4223
+ __ call(ic, RelocInfo::CODE_TARGET);
4224
+ if (patch_site != NULL && patch_site->is_bound()) {
4225
+ patch_site->EmitPatchInfo();
4226
+ } else {
4227
+ __ nop(); // Signals no inlined code.
4228
+ }
4229
+ }
4230
+
4231
+
4232
+ void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4233
+ ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4234
+ __ mov(Operand(ebp, frame_offset), value);
4235
+ }
4236
+
4237
+
4238
+ void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4239
+ __ mov(dst, ContextOperand(esi, context_index));
4240
+ }
4241
+
4242
+
4243
+ // ----------------------------------------------------------------------------
4244
+ // Non-local control flow support.
4245
+
4246
+ void FullCodeGenerator::EnterFinallyBlock() {
4247
+ // Cook return address on top of stack (smi encoded Code* delta)
4248
+ ASSERT(!result_register().is(edx));
4249
+ __ mov(edx, Operand(esp, 0));
4250
+ __ sub(Operand(edx), Immediate(masm_->CodeObject()));
4251
+ ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
4252
+ ASSERT_EQ(0, kSmiTag);
4253
+ __ add(edx, Operand(edx)); // Convert to smi.
4254
+ __ mov(Operand(esp, 0), edx);
4255
+ // Store result register while executing finally block.
4256
+ __ push(result_register());
4257
+ }
4258
+
4259
+
4260
+ void FullCodeGenerator::ExitFinallyBlock() {
4261
+ ASSERT(!result_register().is(edx));
4262
+ // Restore result register from stack.
4263
+ __ pop(result_register());
4264
+ // Uncook return address.
4265
+ __ mov(edx, Operand(esp, 0));
4266
+ __ sar(edx, 1); // Convert smi to int.
4267
+ __ add(Operand(edx), Immediate(masm_->CodeObject()));
4268
+ __ mov(Operand(esp, 0), edx);
4269
+ // And return.
4270
+ __ ret(0);
4271
+ }
4272
+
4273
+
4274
+ #undef __
4275
+
4276
+ } } // namespace v8::internal
4277
+
4278
+ #endif // V8_TARGET_ARCH_IA32