therubyracer 0.7.4 → 0.7.5

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of therubyracer might be problematic. Click here for more details.

Files changed (482) hide show
  1. data/History.txt +11 -0
  2. data/Rakefile +1 -1
  3. data/ext/v8/extconf.rb +0 -18
  4. data/ext/v8/rr.cpp +2 -2
  5. data/ext/v8/upstream/{2.1.10 → 2.3.3}/AUTHORS +1 -0
  6. data/ext/v8/upstream/{2.1.10 → 2.3.3}/ChangeLog +239 -0
  7. data/ext/v8/upstream/{2.1.10 → 2.3.3}/LICENSE +0 -0
  8. data/ext/v8/upstream/{2.1.10 → 2.3.3}/SConstruct +29 -17
  9. data/ext/v8/upstream/{2.1.10 → 2.3.3}/include/v8-debug.h +61 -3
  10. data/ext/v8/upstream/{2.1.10 → 2.3.3}/include/v8-profiler.h +182 -5
  11. data/ext/v8/upstream/{2.1.10 → 2.3.3}/include/v8.h +458 -257
  12. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/SConscript +2 -5
  13. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/accessors.cc +2 -2
  14. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/accessors.h +0 -0
  15. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/allocation.cc +0 -0
  16. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/allocation.h +0 -0
  17. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/api.cc +574 -30
  18. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/api.h +12 -10
  19. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/apinatives.js +0 -0
  20. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/apiutils.h +0 -0
  21. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arguments.h +0 -0
  22. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/assembler-arm-inl.h +38 -15
  23. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/assembler-arm.cc +646 -101
  24. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/assembler-arm.h +174 -15
  25. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/builtins-arm.cc +56 -47
  26. data/ext/v8/upstream/2.3.3/src/arm/codegen-arm-inl.h +48 -0
  27. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/codegen-arm.cc +2957 -1448
  28. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/codegen-arm.h +230 -74
  29. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/constants-arm.cc +25 -1
  30. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/constants-arm.h +16 -1
  31. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/cpu-arm.cc +4 -0
  32. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/debug-arm.cc +76 -6
  33. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/disasm-arm.cc +168 -20
  34. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/fast-codegen-arm.cc +5 -2
  35. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/frames-arm.cc +4 -4
  36. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/frames-arm.h +0 -0
  37. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/full-codegen-arm.cc +1558 -248
  38. data/ext/v8/upstream/2.3.3/src/arm/ic-arm.cc +2258 -0
  39. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/jump-target-arm.cc +55 -103
  40. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/macro-assembler-arm.cc +358 -185
  41. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/macro-assembler-arm.h +136 -41
  42. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/regexp-macro-assembler-arm.cc +26 -5
  43. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/regexp-macro-assembler-arm.h +0 -0
  44. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/register-allocator-arm-inl.h +0 -0
  45. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/register-allocator-arm.cc +4 -0
  46. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/register-allocator-arm.h +0 -0
  47. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/simulator-arm.cc +203 -22
  48. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/simulator-arm.h +7 -0
  49. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/stub-cache-arm.cc +531 -324
  50. data/ext/v8/upstream/2.3.3/src/arm/virtual-frame-arm-inl.h +59 -0
  51. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/virtual-frame-arm.cc +247 -81
  52. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/virtual-frame-arm.h +99 -83
  53. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/array.js +2 -2
  54. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/assembler.cc +6 -13
  55. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/assembler.h +36 -10
  56. data/ext/v8/upstream/2.3.3/src/ast-inl.h +81 -0
  57. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ast.cc +14 -0
  58. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ast.h +20 -35
  59. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/bootstrapper.cc +32 -1
  60. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/bootstrapper.h +0 -4
  61. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/builtins.cc +50 -33
  62. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/builtins.h +2 -0
  63. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/bytecodes-irregexp.h +0 -0
  64. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/cached-powers.h +0 -0
  65. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/char-predicates-inl.h +0 -0
  66. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/char-predicates.h +0 -0
  67. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/checks.cc +0 -0
  68. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/checks.h +8 -6
  69. data/ext/v8/upstream/2.3.3/src/circular-queue-inl.h +53 -0
  70. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/circular-queue.cc +0 -0
  71. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/circular-queue.h +0 -26
  72. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/code-stubs.cc +2 -4
  73. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/code-stubs.h +1 -0
  74. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/code.h +0 -0
  75. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/codegen-inl.h +0 -0
  76. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/codegen.cc +44 -13
  77. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/codegen.h +310 -31
  78. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/compilation-cache.cc +28 -0
  79. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/compilation-cache.h +3 -0
  80. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/compiler.cc +45 -14
  81. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/compiler.h +0 -0
  82. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/contexts.cc +11 -11
  83. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/contexts.h +0 -0
  84. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/conversions-inl.h +0 -0
  85. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/conversions.cc +25 -11
  86. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/conversions.h +0 -0
  87. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/counters.cc +0 -0
  88. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/counters.h +0 -0
  89. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/cpu-profiler-inl.h +2 -1
  90. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/cpu-profiler.cc +68 -24
  91. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/cpu-profiler.h +19 -11
  92. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/cpu.h +0 -0
  93. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/d8-debug.cc +0 -0
  94. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/d8-debug.h +0 -0
  95. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/d8-posix.cc +0 -0
  96. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/d8-readline.cc +0 -0
  97. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/d8-windows.cc +0 -0
  98. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/d8.cc +3 -0
  99. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/d8.h +0 -0
  100. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/d8.js +55 -2
  101. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/data-flow.cc +3 -0
  102. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/data-flow.h +0 -0
  103. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/date.js +68 -137
  104. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/dateparser-inl.h +0 -0
  105. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/dateparser.cc +2 -8
  106. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/dateparser.h +0 -0
  107. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/debug-agent.cc +3 -3
  108. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/debug-agent.h +0 -0
  109. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/debug-debugger.js +81 -23
  110. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/debug.cc +275 -81
  111. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/debug.h +85 -6
  112. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/disasm.h +0 -0
  113. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/disassembler.cc +1 -1
  114. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/disassembler.h +0 -0
  115. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/diy-fp.cc +0 -0
  116. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/diy-fp.h +0 -0
  117. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/double.h +0 -0
  118. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/dtoa-config.c +0 -0
  119. data/ext/v8/upstream/2.3.3/src/dtoa.cc +77 -0
  120. data/ext/v8/upstream/2.3.3/src/dtoa.h +81 -0
  121. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/execution.cc +111 -3
  122. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/execution.h +12 -1
  123. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/factory.cc +25 -3
  124. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/factory.h +16 -9
  125. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/fast-codegen.cc +0 -0
  126. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/fast-codegen.h +0 -0
  127. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/fast-dtoa.cc +2 -9
  128. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/fast-dtoa.h +1 -2
  129. data/ext/v8/upstream/2.3.3/src/fixed-dtoa.cc +405 -0
  130. data/ext/v8/upstream/{2.1.10/src/jump-target-light.cc → 2.3.3/src/fixed-dtoa.h} +22 -53
  131. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/flag-definitions.h +14 -6
  132. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/flags.cc +5 -9
  133. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/flags.h +0 -0
  134. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/flow-graph.cc +0 -0
  135. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/flow-graph.h +0 -0
  136. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/frame-element.cc +0 -0
  137. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/frame-element.h +0 -0
  138. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/frames-inl.h +0 -0
  139. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/frames.cc +5 -2
  140. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/frames.h +1 -0
  141. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/full-codegen.cc +387 -20
  142. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/full-codegen.h +102 -5
  143. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/func-name-inferrer.cc +0 -0
  144. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/func-name-inferrer.h +0 -0
  145. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/global-handles.cc +8 -4
  146. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/global-handles.h +0 -0
  147. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/globals.h +44 -7
  148. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/handles-inl.h +0 -0
  149. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/handles.cc +19 -0
  150. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/handles.h +8 -0
  151. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/hashmap.cc +0 -0
  152. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/hashmap.h +0 -0
  153. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/heap-inl.h +56 -14
  154. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/heap-profiler.cc +85 -1
  155. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/heap-profiler.h +45 -1
  156. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/heap.cc +994 -396
  157. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/heap.h +220 -65
  158. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/assembler-ia32-inl.h +41 -12
  159. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/assembler-ia32.cc +94 -24
  160. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/assembler-ia32.h +32 -4
  161. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/builtins-ia32.cc +42 -30
  162. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/codegen-ia32-inl.h +0 -0
  163. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/codegen-ia32.cc +1758 -916
  164. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/codegen-ia32.h +67 -74
  165. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/cpu-ia32.cc +4 -0
  166. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/debug-ia32.cc +46 -0
  167. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/disasm-ia32.cc +37 -6
  168. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/fast-codegen-ia32.cc +4 -0
  169. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/fast-codegen-ia32.h +0 -0
  170. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/frames-ia32.cc +4 -0
  171. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/frames-ia32.h +0 -0
  172. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/full-codegen-ia32.cc +1465 -198
  173. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/ic-ia32.cc +688 -367
  174. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/jump-target-ia32.cc +4 -0
  175. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/macro-assembler-ia32.cc +82 -180
  176. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/macro-assembler-ia32.h +41 -25
  177. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/regexp-macro-assembler-ia32.cc +68 -24
  178. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/regexp-macro-assembler-ia32.h +1 -2
  179. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/register-allocator-ia32-inl.h +0 -0
  180. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/register-allocator-ia32.cc +4 -0
  181. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/register-allocator-ia32.h +0 -0
  182. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/simulator-ia32.cc +0 -0
  183. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/simulator-ia32.h +0 -0
  184. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/stub-cache-ia32.cc +649 -302
  185. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/virtual-frame-ia32.cc +23 -1
  186. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/virtual-frame-ia32.h +18 -27
  187. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ic-inl.h +30 -3
  188. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ic.cc +384 -66
  189. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ic.h +65 -24
  190. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/interpreter-irregexp.cc +0 -0
  191. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/interpreter-irregexp.h +0 -0
  192. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/json.js +3 -3
  193. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/jsregexp.cc +20 -4
  194. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/jsregexp.h +0 -0
  195. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/jump-target-heavy-inl.h +0 -0
  196. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/jump-target-heavy.cc +79 -13
  197. data/ext/v8/upstream/{2.1.10/src/jump-target.h → 2.3.3/src/jump-target-heavy.h} +5 -47
  198. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/jump-target-inl.h +0 -0
  199. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/jump-target-light-inl.h +16 -2
  200. data/ext/v8/upstream/2.3.3/src/jump-target-light.cc +110 -0
  201. data/ext/v8/upstream/2.3.3/src/jump-target-light.h +192 -0
  202. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/jump-target.cc +0 -64
  203. data/ext/v8/upstream/2.3.3/src/jump-target.h +90 -0
  204. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/list-inl.h +0 -0
  205. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/list.h +0 -0
  206. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/liveedit-debugger.js +141 -28
  207. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/liveedit.cc +19 -7
  208. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/liveedit.h +0 -0
  209. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/log-inl.h +0 -0
  210. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/log-utils.cc +0 -0
  211. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/log-utils.h +0 -0
  212. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/log.cc +12 -11
  213. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/log.h +12 -0
  214. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/macro-assembler.h +0 -16
  215. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/macros.py +21 -0
  216. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mark-compact.cc +120 -109
  217. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mark-compact.h +25 -37
  218. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/math.js +0 -0
  219. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/memory.h +0 -0
  220. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/messages.cc +8 -3
  221. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/messages.h +2 -1
  222. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/messages.js +15 -7
  223. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/assembler-mips-inl.h +0 -0
  224. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/assembler-mips.cc +12 -1
  225. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/assembler-mips.h +4 -1
  226. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/builtins-mips.cc +3 -0
  227. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/codegen-mips-inl.h +0 -0
  228. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/codegen-mips.cc +9 -0
  229. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/codegen-mips.h +1 -0
  230. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/constants-mips.cc +5 -0
  231. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/constants-mips.h +0 -0
  232. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/cpu-mips.cc +4 -0
  233. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/debug-mips.cc +3 -0
  234. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/disasm-mips.cc +3 -0
  235. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/fast-codegen-mips.cc +3 -0
  236. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/frames-mips.cc +3 -0
  237. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/frames-mips.h +0 -0
  238. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/full-codegen-mips.cc +5 -1
  239. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/ic-mips.cc +3 -0
  240. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/jump-target-mips.cc +3 -0
  241. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/macro-assembler-mips.cc +3 -0
  242. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/macro-assembler-mips.h +0 -0
  243. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/register-allocator-mips-inl.h +0 -0
  244. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/register-allocator-mips.cc +3 -0
  245. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/register-allocator-mips.h +0 -0
  246. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/simulator-mips.cc +3 -0
  247. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/simulator-mips.h +0 -0
  248. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/stub-cache-mips.cc +3 -0
  249. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/virtual-frame-mips.cc +3 -0
  250. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/virtual-frame-mips.h +0 -0
  251. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mirror-debugger.js +46 -4
  252. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mksnapshot.cc +0 -0
  253. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/natives.h +0 -0
  254. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/objects-debug.cc +8 -1
  255. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/objects-inl.h +235 -62
  256. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/objects.cc +497 -231
  257. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/objects.h +355 -149
  258. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/oprofile-agent.cc +0 -0
  259. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/oprofile-agent.h +0 -0
  260. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/parser.cc +31 -6
  261. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/parser.h +1 -1
  262. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/platform-freebsd.cc +9 -6
  263. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/platform-linux.cc +26 -6
  264. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/platform-macos.cc +11 -6
  265. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/platform-nullos.cc +0 -0
  266. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/platform-openbsd.cc +6 -0
  267. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/platform-posix.cc +0 -0
  268. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/platform-solaris.cc +69 -23
  269. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/platform-win32.cc +15 -11
  270. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/platform.h +10 -6
  271. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/powers-ten.h +0 -0
  272. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/prettyprinter.cc +0 -0
  273. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/prettyprinter.h +0 -0
  274. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/profile-generator-inl.h +26 -2
  275. data/ext/v8/upstream/2.3.3/src/profile-generator.cc +1830 -0
  276. data/ext/v8/upstream/2.3.3/src/profile-generator.h +853 -0
  277. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/property.cc +0 -0
  278. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/property.h +0 -0
  279. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/regexp-macro-assembler-irregexp-inl.h +0 -0
  280. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/regexp-macro-assembler-irregexp.cc +0 -0
  281. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/regexp-macro-assembler-irregexp.h +0 -0
  282. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/regexp-macro-assembler-tracer.cc +0 -0
  283. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/regexp-macro-assembler-tracer.h +0 -0
  284. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/regexp-macro-assembler.cc +1 -3
  285. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/regexp-macro-assembler.h +0 -0
  286. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/regexp-stack.cc +0 -0
  287. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/regexp-stack.h +0 -0
  288. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/regexp.js +25 -4
  289. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/register-allocator-inl.h +0 -0
  290. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/register-allocator.cc +4 -3
  291. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/register-allocator.h +0 -0
  292. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/rewriter.cc +85 -8
  293. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/rewriter.h +0 -0
  294. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/runtime.cc +547 -221
  295. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/runtime.h +5 -1
  296. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/runtime.js +23 -31
  297. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/scanner.cc +12 -6
  298. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/scanner.h +60 -53
  299. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/scopeinfo.cc +156 -168
  300. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/scopeinfo.h +58 -62
  301. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/scopes.cc +0 -0
  302. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/scopes.h +0 -0
  303. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/serialize.cc +320 -242
  304. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/serialize.h +81 -48
  305. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/shell.h +0 -0
  306. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/simulator.h +0 -0
  307. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/smart-pointer.h +0 -0
  308. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/snapshot-common.cc +0 -0
  309. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/snapshot-empty.cc +0 -0
  310. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/snapshot.h +0 -0
  311. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/spaces-inl.h +177 -74
  312. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/spaces.cc +138 -315
  313. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/spaces.h +155 -124
  314. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/splay-tree-inl.h +0 -0
  315. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/splay-tree.h +0 -0
  316. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/string-stream.cc +0 -0
  317. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/string-stream.h +0 -0
  318. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/string.js +113 -119
  319. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/stub-cache.cc +242 -97
  320. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/stub-cache.h +118 -55
  321. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/third_party/dtoa/COPYING +0 -0
  322. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/third_party/dtoa/dtoa.c +4 -0
  323. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/third_party/valgrind/valgrind.h +0 -0
  324. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/token.cc +0 -0
  325. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/token.h +0 -0
  326. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/top.cc +107 -26
  327. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/top.h +9 -4
  328. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/type-info.cc +0 -0
  329. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/type-info.h +2 -2
  330. data/ext/v8/upstream/2.3.3/src/unbound-queue-inl.h +95 -0
  331. data/ext/v8/upstream/2.3.3/src/unbound-queue.h +67 -0
  332. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/unicode-inl.h +0 -0
  333. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/unicode.cc +0 -0
  334. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/unicode.h +0 -0
  335. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/uri.js +0 -0
  336. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/utils.cc +0 -0
  337. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/utils.h +83 -1
  338. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/v8-counters.cc +0 -0
  339. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/v8-counters.h +20 -0
  340. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/v8.cc +5 -1
  341. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/v8.h +0 -0
  342. data/ext/v8/upstream/2.3.3/src/v8dll-main.cc +39 -0
  343. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/v8natives.js +210 -33
  344. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/v8threads.cc +1 -1
  345. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/v8threads.h +1 -1
  346. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/variables.cc +0 -0
  347. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/variables.h +0 -0
  348. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/version.cc +3 -3
  349. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/version.h +0 -0
  350. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/virtual-frame-heavy-inl.h +40 -0
  351. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/virtual-frame-heavy.cc +0 -0
  352. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/virtual-frame-inl.h +0 -0
  353. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/virtual-frame-light-inl.h +106 -5
  354. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/virtual-frame-light.cc +4 -1
  355. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/virtual-frame.cc +0 -0
  356. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/virtual-frame.h +0 -0
  357. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/vm-state-inl.h +6 -3
  358. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/vm-state.cc +1 -1
  359. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/vm-state.h +6 -4
  360. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/assembler-x64-inl.h +42 -5
  361. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/assembler-x64.cc +285 -53
  362. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/assembler-x64.h +54 -18
  363. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/builtins-x64.cc +31 -33
  364. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/codegen-x64-inl.h +0 -0
  365. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/codegen-x64.cc +9787 -8722
  366. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/codegen-x64.h +82 -47
  367. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/cpu-x64.cc +4 -0
  368. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/debug-x64.cc +55 -6
  369. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/disasm-x64.cc +42 -19
  370. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/fast-codegen-x64.cc +4 -0
  371. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/frames-x64.cc +4 -0
  372. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/frames-x64.h +4 -0
  373. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/full-codegen-x64.cc +1487 -210
  374. data/ext/v8/upstream/2.3.3/src/x64/ic-x64.cc +1907 -0
  375. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/jump-target-x64.cc +4 -0
  376. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/macro-assembler-x64.cc +366 -338
  377. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/macro-assembler-x64.h +83 -38
  378. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/regexp-macro-assembler-x64.cc +82 -23
  379. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/regexp-macro-assembler-x64.h +1 -2
  380. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/register-allocator-x64-inl.h +6 -5
  381. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/register-allocator-x64.cc +4 -0
  382. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/register-allocator-x64.h +1 -1
  383. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/simulator-x64.cc +0 -0
  384. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/simulator-x64.h +0 -0
  385. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/stub-cache-x64.cc +556 -377
  386. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/virtual-frame-x64.cc +197 -98
  387. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/virtual-frame-x64.h +37 -28
  388. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/zone-inl.h +0 -0
  389. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/zone.cc +0 -0
  390. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/zone.h +0 -0
  391. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/codemap.js +0 -0
  392. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/consarray.js +0 -0
  393. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/csvparser.js +0 -0
  394. data/ext/v8/upstream/2.3.3/tools/gc-nvp-trace-processor.py +317 -0
  395. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/generate-ten-powers.scm +0 -0
  396. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/gyp/v8.gyp +87 -20
  397. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/js2c.py +19 -15
  398. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/jsmin.py +0 -0
  399. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/linux-tick-processor +0 -0
  400. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/linux-tick-processor.py +0 -0
  401. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/logreader.js +0 -0
  402. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/mac-nm +0 -0
  403. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/mac-tick-processor +0 -0
  404. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/oprofile/annotate +0 -0
  405. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/oprofile/common +0 -0
  406. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/oprofile/dump +0 -0
  407. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/oprofile/report +0 -0
  408. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/oprofile/reset +0 -0
  409. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/oprofile/run +0 -0
  410. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/oprofile/shutdown +0 -0
  411. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/oprofile/start +0 -0
  412. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/presubmit.py +0 -0
  413. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/process-heap-prof.py +0 -0
  414. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/profile.js +0 -0
  415. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/profile_view.js +0 -0
  416. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/run-valgrind.py +0 -0
  417. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/splaytree.js +0 -0
  418. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/splaytree.py +0 -0
  419. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/stats-viewer.py +25 -13
  420. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/test.py +0 -0
  421. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/tickprocessor-driver.js +0 -0
  422. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/tickprocessor.js +0 -0
  423. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/tickprocessor.py +0 -0
  424. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/utils.py +0 -0
  425. data/ext/v8/upstream/2.3.3/tools/v8.xcodeproj/project.pbxproj +1855 -0
  426. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/README.txt +0 -0
  427. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/arm.vsprops +0 -0
  428. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/common.vsprops +0 -0
  429. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/d8.vcproj +0 -0
  430. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/d8_arm.vcproj +0 -0
  431. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/d8_x64.vcproj +0 -0
  432. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/d8js2c.cmd +0 -0
  433. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/debug.vsprops +0 -0
  434. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/ia32.vsprops +0 -0
  435. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/js2c.cmd +0 -0
  436. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/release.vsprops +0 -0
  437. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8.sln +0 -0
  438. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8.vcproj +0 -0
  439. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_arm.sln +0 -0
  440. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_arm.vcproj +0 -0
  441. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_base.vcproj +40 -0
  442. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_base_arm.vcproj +20 -0
  443. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_base_x64.vcproj +16 -0
  444. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_cctest.vcproj +4 -0
  445. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_cctest_arm.vcproj +0 -0
  446. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_cctest_x64.vcproj +0 -0
  447. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_mksnapshot.vcproj +0 -0
  448. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_mksnapshot_x64.vcproj +0 -0
  449. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_process_sample.vcproj +0 -0
  450. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_process_sample_arm.vcproj +0 -0
  451. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_process_sample_x64.vcproj +0 -0
  452. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_shell_sample.vcproj +0 -0
  453. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_shell_sample_arm.vcproj +0 -0
  454. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_shell_sample_x64.vcproj +0 -0
  455. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_snapshot.vcproj +0 -0
  456. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_snapshot_cc.vcproj +0 -0
  457. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_snapshot_cc_x64.vcproj +0 -0
  458. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_snapshot_x64.vcproj +0 -0
  459. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_x64.sln +0 -0
  460. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_x64.vcproj +0 -0
  461. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/x64.vsprops +0 -0
  462. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/windows-tick-processor.bat +0 -0
  463. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/windows-tick-processor.py +0 -0
  464. data/ext/v8/upstream/Makefile +1 -1
  465. data/ext/v8/v8_template.cpp +94 -2
  466. data/ext/v8/v8_try_catch.cpp +2 -2
  467. data/lib/v8.rb +1 -1
  468. data/lib/v8/access.rb +93 -40
  469. data/lib/v8/cli.rb +1 -1
  470. data/lib/v8/function.rb +14 -2
  471. data/spec/redjs/jsapi_spec.rb +231 -42
  472. data/therubyracer.gemspec +3 -3
  473. metadata +463 -453
  474. data/ext/v8/upstream/2.1.10/src/arm/assembler-thumb2-inl.h +0 -263
  475. data/ext/v8/upstream/2.1.10/src/arm/assembler-thumb2.cc +0 -1878
  476. data/ext/v8/upstream/2.1.10/src/arm/assembler-thumb2.h +0 -1036
  477. data/ext/v8/upstream/2.1.10/src/arm/codegen-arm-inl.h +0 -72
  478. data/ext/v8/upstream/2.1.10/src/arm/ic-arm.cc +0 -1833
  479. data/ext/v8/upstream/2.1.10/src/circular-queue-inl.h +0 -101
  480. data/ext/v8/upstream/2.1.10/src/profile-generator.cc +0 -583
  481. data/ext/v8/upstream/2.1.10/src/profile-generator.h +0 -364
  482. data/ext/v8/upstream/2.1.10/src/x64/ic-x64.cc +0 -1621
@@ -0,0 +1,1907 @@
1
+ // Copyright 2010 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #include "v8.h"
29
+
30
+ #if defined(V8_TARGET_ARCH_X64)
31
+
32
+ #include "codegen-inl.h"
33
+ #include "ic-inl.h"
34
+ #include "runtime.h"
35
+ #include "stub-cache.h"
36
+ #include "utils.h"
37
+
38
+ namespace v8 {
39
+ namespace internal {
40
+
41
+ // ----------------------------------------------------------------------------
42
+ // Static IC stub generators.
43
+ //
44
+
45
+ #define __ ACCESS_MASM(masm)
46
+
47
+
48
+ static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm,
49
+ Register type,
50
+ Label* global_object) {
51
+ // Register usage:
52
+ // type: holds the receiver instance type on entry.
53
+ __ cmpb(type, Immediate(JS_GLOBAL_OBJECT_TYPE));
54
+ __ j(equal, global_object);
55
+ __ cmpb(type, Immediate(JS_BUILTINS_OBJECT_TYPE));
56
+ __ j(equal, global_object);
57
+ __ cmpb(type, Immediate(JS_GLOBAL_PROXY_TYPE));
58
+ __ j(equal, global_object);
59
+ }
60
+
61
+
62
+ // Generated code falls through if the receiver is a regular non-global
63
+ // JS object with slow properties and no interceptors.
64
+ static void GenerateStringDictionaryReceiverCheck(MacroAssembler* masm,
65
+ Register receiver,
66
+ Register r0,
67
+ Register r1,
68
+ Label* miss) {
69
+ // Register usage:
70
+ // receiver: holds the receiver on entry and is unchanged.
71
+ // r0: used to hold receiver instance type.
72
+ // Holds the property dictionary on fall through.
73
+ // r1: used to hold receivers map.
74
+
75
+ __ JumpIfSmi(receiver, miss);
76
+
77
+ // Check that the receiver is a valid JS object.
78
+ __ movq(r1, FieldOperand(receiver, HeapObject::kMapOffset));
79
+ __ movb(r0, FieldOperand(r1, Map::kInstanceTypeOffset));
80
+ __ cmpb(r0, Immediate(FIRST_JS_OBJECT_TYPE));
81
+ __ j(below, miss);
82
+
83
+ // If this assert fails, we have to check upper bound too.
84
+ ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
85
+
86
+ GenerateGlobalInstanceTypeCheck(masm, r0, miss);
87
+
88
+ // Check for non-global object that requires access check.
89
+ __ testb(FieldOperand(r1, Map::kBitFieldOffset),
90
+ Immediate((1 << Map::kIsAccessCheckNeeded) |
91
+ (1 << Map::kHasNamedInterceptor)));
92
+ __ j(not_zero, miss);
93
+
94
+ __ movq(r0, FieldOperand(receiver, JSObject::kPropertiesOffset));
95
+ __ CompareRoot(FieldOperand(r0, HeapObject::kMapOffset),
96
+ Heap::kHashTableMapRootIndex);
97
+ __ j(not_equal, miss);
98
+ }
99
+
100
+
101
+ // Probe the string dictionary in the |elements| register. Jump to the
102
+ // |done| label if a property with the given name is found leaving the
103
+ // index into the dictionary in |r1|. Jump to the |miss| label
104
+ // otherwise.
105
+ static void GenerateStringDictionaryProbes(MacroAssembler* masm,
106
+ Label* miss,
107
+ Label* done,
108
+ Register elements,
109
+ Register name,
110
+ Register r0,
111
+ Register r1) {
112
+ // Compute the capacity mask.
113
+ const int kCapacityOffset =
114
+ StringDictionary::kHeaderSize +
115
+ StringDictionary::kCapacityIndex * kPointerSize;
116
+ __ SmiToInteger32(r0, FieldOperand(elements, kCapacityOffset));
117
+ __ decl(r0);
118
+
119
+ // Generate an unrolled loop that performs a few probes before
120
+ // giving up. Measurements done on Gmail indicate that 2 probes
121
+ // cover ~93% of loads from dictionaries.
122
+ static const int kProbes = 4;
123
+ const int kElementsStartOffset =
124
+ StringDictionary::kHeaderSize +
125
+ StringDictionary::kElementsStartIndex * kPointerSize;
126
+ for (int i = 0; i < kProbes; i++) {
127
+ // Compute the masked index: (hash + i + i * i) & mask.
128
+ __ movl(r1, FieldOperand(name, String::kHashFieldOffset));
129
+ __ shrl(r1, Immediate(String::kHashShift));
130
+ if (i > 0) {
131
+ __ addl(r1, Immediate(StringDictionary::GetProbeOffset(i)));
132
+ }
133
+ __ and_(r1, r0);
134
+
135
+ // Scale the index by multiplying by the entry size.
136
+ ASSERT(StringDictionary::kEntrySize == 3);
137
+ __ lea(r1, Operand(r1, r1, times_2, 0)); // r1 = r1 * 3
138
+
139
+ // Check if the key is identical to the name.
140
+ __ cmpq(name, Operand(elements, r1, times_pointer_size,
141
+ kElementsStartOffset - kHeapObjectTag));
142
+ if (i != kProbes - 1) {
143
+ __ j(equal, done);
144
+ } else {
145
+ __ j(not_equal, miss);
146
+ }
147
+ }
148
+ }
149
+
150
+
151
+ // Helper function used to load a property from a dictionary backing storage.
152
+ // This function may return false negatives, so miss_label
153
+ // must always call a backup property load that is complete.
154
+ // This function is safe to call if name is not a symbol, and will jump to
155
+ // the miss_label in that case.
156
+ // The generated code assumes that the receiver has slow properties,
157
+ // is not a global object and does not have interceptors.
158
+ static void GenerateDictionaryLoad(MacroAssembler* masm,
159
+ Label* miss_label,
160
+ Register elements,
161
+ Register name,
162
+ Register r0,
163
+ Register r1,
164
+ Register result) {
165
+ // Register use:
166
+ //
167
+ // elements - holds the property dictionary on entry and is unchanged.
168
+ //
169
+ // name - holds the name of the property on entry and is unchanged.
170
+ //
171
+ // r0 - used to hold the capacity of the property dictionary.
172
+ //
173
+ // r1 - used to hold the index into the property dictionary.
174
+ //
175
+ // result - holds the result on exit if the load succeeded.
176
+
177
+ Label done;
178
+
179
+ // Probe the dictionary.
180
+ GenerateStringDictionaryProbes(masm,
181
+ miss_label,
182
+ &done,
183
+ elements,
184
+ name,
185
+ r0,
186
+ r1);
187
+
188
+ // If probing finds an entry in the dictionary, r0 contains the
189
+ // index into the dictionary. Check that the value is a normal
190
+ // property.
191
+ __ bind(&done);
192
+ const int kElementsStartOffset =
193
+ StringDictionary::kHeaderSize +
194
+ StringDictionary::kElementsStartIndex * kPointerSize;
195
+ const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
196
+ __ Test(Operand(elements, r1, times_pointer_size,
197
+ kDetailsOffset - kHeapObjectTag),
198
+ Smi::FromInt(PropertyDetails::TypeField::mask()));
199
+ __ j(not_zero, miss_label);
200
+
201
+ // Get the value at the masked, scaled index.
202
+ const int kValueOffset = kElementsStartOffset + kPointerSize;
203
+ __ movq(result,
204
+ Operand(elements, r1, times_pointer_size,
205
+ kValueOffset - kHeapObjectTag));
206
+ }
207
+
208
+
209
+ // Helper function used to store a property to a dictionary backing
210
+ // storage. This function may fail to store a property even though it
211
+ // is in the dictionary, so code at miss_label must always call a
212
+ // backup property store that is complete. This function is safe to
213
+ // call if name is not a symbol, and will jump to the miss_label in
214
+ // that case. The generated code assumes that the receiver has slow
215
+ // properties, is not a global object and does not have interceptors.
216
+ static void GenerateDictionaryStore(MacroAssembler* masm,
217
+ Label* miss_label,
218
+ Register elements,
219
+ Register name,
220
+ Register value,
221
+ Register scratch0,
222
+ Register scratch1) {
223
+ // Register use:
224
+ //
225
+ // elements - holds the property dictionary on entry and is clobbered.
226
+ //
227
+ // name - holds the name of the property on entry and is unchanged.
228
+ //
229
+ // value - holds the value to store and is unchanged.
230
+ //
231
+ // scratch0 - used for index into the property dictionary and is clobbered.
232
+ //
233
+ // scratch1 - used to hold the capacity of the property dictionary and is
234
+ // clobbered.
235
+ Label done;
236
+
237
+ // Probe the dictionary.
238
+ GenerateStringDictionaryProbes(masm,
239
+ miss_label,
240
+ &done,
241
+ elements,
242
+ name,
243
+ scratch0,
244
+ scratch1);
245
+
246
+ // If probing finds an entry in the dictionary, scratch0 contains the
247
+ // index into the dictionary. Check that the value is a normal
248
+ // property that is not read only.
249
+ __ bind(&done);
250
+ const int kElementsStartOffset =
251
+ StringDictionary::kHeaderSize +
252
+ StringDictionary::kElementsStartIndex * kPointerSize;
253
+ const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
254
+ const int kTypeAndReadOnlyMask
255
+ = (PropertyDetails::TypeField::mask() |
256
+ PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize;
257
+ __ Test(Operand(elements,
258
+ scratch1,
259
+ times_pointer_size,
260
+ kDetailsOffset - kHeapObjectTag),
261
+ Smi::FromInt(kTypeAndReadOnlyMask));
262
+ __ j(not_zero, miss_label);
263
+
264
+ // Store the value at the masked, scaled index.
265
+ const int kValueOffset = kElementsStartOffset + kPointerSize;
266
+ __ lea(scratch1, Operand(elements,
267
+ scratch1,
268
+ times_pointer_size,
269
+ kValueOffset - kHeapObjectTag));
270
+ __ movq(Operand(scratch1, 0), value);
271
+
272
+ // Update write barrier. Make sure not to clobber the value.
273
+ __ movq(scratch0, value);
274
+ __ RecordWrite(elements, scratch1, scratch0);
275
+ }
276
+
277
+
278
+ static void GenerateNumberDictionaryLoad(MacroAssembler* masm,
279
+ Label* miss,
280
+ Register elements,
281
+ Register key,
282
+ Register r0,
283
+ Register r1,
284
+ Register r2,
285
+ Register result) {
286
+ // Register use:
287
+ //
288
+ // elements - holds the slow-case elements of the receiver on entry.
289
+ // Unchanged unless 'result' is the same register.
290
+ //
291
+ // key - holds the smi key on entry.
292
+ // Unchanged unless 'result' is the same register.
293
+ //
294
+ // Scratch registers:
295
+ //
296
+ // r0 - holds the untagged key on entry and holds the hash once computed.
297
+ //
298
+ // r1 - used to hold the capacity mask of the dictionary
299
+ //
300
+ // r2 - used for the index into the dictionary.
301
+ //
302
+ // result - holds the result on exit if the load succeeded.
303
+ // Allowed to be the same as 'key' or 'result'.
304
+ // Unchanged on bailout so 'key' or 'result' can be used
305
+ // in further computation.
306
+
307
+ Label done;
308
+
309
+ // Compute the hash code from the untagged key. This must be kept in sync
310
+ // with ComputeIntegerHash in utils.h.
311
+ //
312
+ // hash = ~hash + (hash << 15);
313
+ __ movl(r1, r0);
314
+ __ notl(r0);
315
+ __ shll(r1, Immediate(15));
316
+ __ addl(r0, r1);
317
+ // hash = hash ^ (hash >> 12);
318
+ __ movl(r1, r0);
319
+ __ shrl(r1, Immediate(12));
320
+ __ xorl(r0, r1);
321
+ // hash = hash + (hash << 2);
322
+ __ leal(r0, Operand(r0, r0, times_4, 0));
323
+ // hash = hash ^ (hash >> 4);
324
+ __ movl(r1, r0);
325
+ __ shrl(r1, Immediate(4));
326
+ __ xorl(r0, r1);
327
+ // hash = hash * 2057;
328
+ __ imull(r0, r0, Immediate(2057));
329
+ // hash = hash ^ (hash >> 16);
330
+ __ movl(r1, r0);
331
+ __ shrl(r1, Immediate(16));
332
+ __ xorl(r0, r1);
333
+
334
+ // Compute capacity mask.
335
+ __ SmiToInteger32(r1,
336
+ FieldOperand(elements, NumberDictionary::kCapacityOffset));
337
+ __ decl(r1);
338
+
339
+ // Generate an unrolled loop that performs a few probes before giving up.
340
+ const int kProbes = 4;
341
+ for (int i = 0; i < kProbes; i++) {
342
+ // Use r2 for index calculations and keep the hash intact in r0.
343
+ __ movq(r2, r0);
344
+ // Compute the masked index: (hash + i + i * i) & mask.
345
+ if (i > 0) {
346
+ __ addl(r2, Immediate(NumberDictionary::GetProbeOffset(i)));
347
+ }
348
+ __ and_(r2, r1);
349
+
350
+ // Scale the index by multiplying by the entry size.
351
+ ASSERT(NumberDictionary::kEntrySize == 3);
352
+ __ lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
353
+
354
+ // Check if the key matches.
355
+ __ cmpq(key, FieldOperand(elements,
356
+ r2,
357
+ times_pointer_size,
358
+ NumberDictionary::kElementsStartOffset));
359
+ if (i != (kProbes - 1)) {
360
+ __ j(equal, &done);
361
+ } else {
362
+ __ j(not_equal, miss);
363
+ }
364
+ }
365
+
366
+ __ bind(&done);
367
+ // Check that the value is a normal propety.
368
+ const int kDetailsOffset =
369
+ NumberDictionary::kElementsStartOffset + 2 * kPointerSize;
370
+ ASSERT_EQ(NORMAL, 0);
371
+ __ Test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
372
+ Smi::FromInt(PropertyDetails::TypeField::mask()));
373
+ __ j(not_zero, miss);
374
+
375
+ // Get the value at the masked, scaled index.
376
+ const int kValueOffset =
377
+ NumberDictionary::kElementsStartOffset + kPointerSize;
378
+ __ movq(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
379
+ }
380
+
381
+
382
+ // One byte opcode for test rax,0xXXXXXXXX.
383
+ static const byte kTestEaxByte = 0xA9;
384
+
385
+
386
+ static bool PatchInlinedMapCheck(Address address, Object* map) {
387
+ // Arguments are address of start of call sequence that called
388
+ // the IC,
389
+ Address test_instruction_address =
390
+ address + Assembler::kCallTargetAddressOffset;
391
+ // The keyed load has a fast inlined case if the IC call instruction
392
+ // is immediately followed by a test instruction.
393
+ if (*test_instruction_address != kTestEaxByte) return false;
394
+
395
+ // Fetch the offset from the test instruction to the map compare
396
+ // instructions (starting with the 64-bit immediate mov of the map
397
+ // address). This offset is stored in the last 4 bytes of the 5
398
+ // byte test instruction.
399
+ Address delta_address = test_instruction_address + 1;
400
+ int delta = *reinterpret_cast<int*>(delta_address);
401
+ // Compute the map address. The map address is in the last 8 bytes
402
+ // of the 10-byte immediate mov instruction (incl. REX prefix), so we add 2
403
+ // to the offset to get the map address.
404
+ Address map_address = test_instruction_address + delta + 2;
405
+ // Patch the map check.
406
+ *(reinterpret_cast<Object**>(map_address)) = map;
407
+ return true;
408
+ }
409
+
410
+
411
+ bool KeyedLoadIC::PatchInlinedLoad(Address address, Object* map) {
412
+ return PatchInlinedMapCheck(address, map);
413
+ }
414
+
415
+
416
+ bool KeyedStoreIC::PatchInlinedStore(Address address, Object* map) {
417
+ return PatchInlinedMapCheck(address, map);
418
+ }
419
+
420
+
421
+ void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
422
+ // ----------- S t a t e -------------
423
+ // -- rax : key
424
+ // -- rdx : receiver
425
+ // -- rsp[0] : return address
426
+ // -----------------------------------
427
+
428
+ __ IncrementCounter(&Counters::keyed_load_miss, 1);
429
+
430
+ __ pop(rbx);
431
+ __ push(rdx); // receiver
432
+ __ push(rax); // name
433
+ __ push(rbx); // return address
434
+
435
+ // Perform tail call to the entry.
436
+ ExternalReference ref = ExternalReference(IC_Utility(kKeyedLoadIC_Miss));
437
+ __ TailCallExternalReference(ref, 2, 1);
438
+ }
439
+
440
+
441
+ void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
442
+ // ----------- S t a t e -------------
443
+ // -- rax : key
444
+ // -- rdx : receiver
445
+ // -- rsp[0] : return address
446
+ // -----------------------------------
447
+
448
+ __ pop(rbx);
449
+ __ push(rdx); // receiver
450
+ __ push(rax); // name
451
+ __ push(rbx); // return address
452
+
453
+ // Perform tail call to the entry.
454
+ __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
455
+ }
456
+
457
+
458
+ // Checks the receiver for special cases (value type, slow case bits).
459
+ // Falls through for regular JS object.
460
+ static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
461
+ Register receiver,
462
+ Register map,
463
+ int interceptor_bit,
464
+ Label* slow) {
465
+ // Register use:
466
+ // receiver - holds the receiver and is unchanged.
467
+ // Scratch registers:
468
+ // map - used to hold the map of the receiver.
469
+
470
+ // Check that the object isn't a smi.
471
+ __ JumpIfSmi(receiver, slow);
472
+
473
+ // Check that the object is some kind of JS object EXCEPT JS Value type.
474
+ // In the case that the object is a value-wrapper object,
475
+ // we enter the runtime system to make sure that indexing
476
+ // into string objects work as intended.
477
+ ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE);
478
+ __ CmpObjectType(receiver, JS_OBJECT_TYPE, map);
479
+ __ j(below, slow);
480
+
481
+ // Check bit field.
482
+ __ testb(FieldOperand(map, Map::kBitFieldOffset),
483
+ Immediate((1 << Map::kIsAccessCheckNeeded) |
484
+ (1 << interceptor_bit)));
485
+ __ j(not_zero, slow);
486
+ }
487
+
488
+
489
+ // Loads an indexed element from a fast case array.
490
+ static void GenerateFastArrayLoad(MacroAssembler* masm,
491
+ Register receiver,
492
+ Register key,
493
+ Register elements,
494
+ Register scratch,
495
+ Register result,
496
+ Label* not_fast_array,
497
+ Label* out_of_range) {
498
+ // Register use:
499
+ //
500
+ // receiver - holds the receiver on entry.
501
+ // Unchanged unless 'result' is the same register.
502
+ //
503
+ // key - holds the smi key on entry.
504
+ // Unchanged unless 'result' is the same register.
505
+ //
506
+ // elements - holds the elements of the receiver on exit.
507
+ //
508
+ // result - holds the result on exit if the load succeeded.
509
+ // Allowed to be the the same as 'receiver' or 'key'.
510
+ // Unchanged on bailout so 'receiver' and 'key' can be safely
511
+ // used by further computation.
512
+ //
513
+ // Scratch registers:
514
+ //
515
+ // scratch - used to hold elements of the receiver and the loaded value.
516
+
517
+ __ movq(elements, FieldOperand(receiver, JSObject::kElementsOffset));
518
+ // Check that the object is in fast mode (not dictionary).
519
+ __ CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
520
+ Heap::kFixedArrayMapRootIndex);
521
+ __ j(not_equal, not_fast_array);
522
+ // Check that the key (index) is within bounds.
523
+ __ SmiCompare(key, FieldOperand(elements, FixedArray::kLengthOffset));
524
+ // Unsigned comparison rejects negative indices.
525
+ __ j(above_equal, out_of_range);
526
+ // Fast case: Do the load.
527
+ SmiIndex index = masm->SmiToIndex(scratch, key, kPointerSizeLog2);
528
+ __ movq(scratch, FieldOperand(elements,
529
+ index.reg,
530
+ index.scale,
531
+ FixedArray::kHeaderSize));
532
+ __ CompareRoot(scratch, Heap::kTheHoleValueRootIndex);
533
+ // In case the loaded value is the_hole we have to consult GetProperty
534
+ // to ensure the prototype chain is searched.
535
+ __ j(equal, out_of_range);
536
+ if (!result.is(scratch)) {
537
+ __ movq(result, scratch);
538
+ }
539
+ }
540
+
541
+
542
+ // Checks whether a key is an array index string or a symbol string.
543
+ // Falls through if the key is a symbol.
544
+ static void GenerateKeyStringCheck(MacroAssembler* masm,
545
+ Register key,
546
+ Register map,
547
+ Register hash,
548
+ Label* index_string,
549
+ Label* not_symbol) {
550
+ // Register use:
551
+ // key - holds the key and is unchanged. Assumed to be non-smi.
552
+ // Scratch registers:
553
+ // map - used to hold the map of the key.
554
+ // hash - used to hold the hash of the key.
555
+ __ CmpObjectType(key, FIRST_NONSTRING_TYPE, map);
556
+ __ j(above_equal, not_symbol);
557
+ // Is the string an array index, with cached numeric value?
558
+ __ movl(hash, FieldOperand(key, String::kHashFieldOffset));
559
+ __ testl(hash, Immediate(String::kContainsCachedArrayIndexMask));
560
+ __ j(zero, index_string); // The value in hash is used at jump target.
561
+
562
+ // Is the string a symbol?
563
+ ASSERT(kSymbolTag != 0);
564
+ __ testb(FieldOperand(map, Map::kInstanceTypeOffset),
565
+ Immediate(kIsSymbolMask));
566
+ __ j(zero, not_symbol);
567
+ }
568
+
569
+
570
+ // Picks out an array index from the hash field.
571
+ static void GenerateIndexFromHash(MacroAssembler* masm,
572
+ Register key,
573
+ Register hash) {
574
+ // Register use:
575
+ // key - holds the overwritten key on exit.
576
+ // hash - holds the key's hash. Clobbered.
577
+
578
+ // The assert checks that the constants for the maximum number of digits
579
+ // for an array index cached in the hash field and the number of bits
580
+ // reserved for it does not conflict.
581
+ ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
582
+ (1 << String::kArrayIndexValueBits));
583
+ // We want the smi-tagged index in key. Even if we subsequently go to
584
+ // the slow case, converting the key to a smi is always valid.
585
+ // key: string key
586
+ // hash: key's hash field, including its array index value.
587
+ __ and_(hash, Immediate(String::kArrayIndexValueMask));
588
+ __ shr(hash, Immediate(String::kHashShift));
589
+ // Here we actually clobber the key which will be used if calling into
590
+ // runtime later. However as the new key is the numeric value of a string key
591
+ // there is no difference in using either key.
592
+ __ Integer32ToSmi(key, hash);
593
+ }
594
+
595
+
596
+ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
597
+ // ----------- S t a t e -------------
598
+ // -- rax : key
599
+ // -- rdx : receiver
600
+ // -- rsp[0] : return address
601
+ // -----------------------------------
602
+ Label slow, check_string, index_smi, index_string;
603
+ Label check_pixel_array, probe_dictionary, check_number_dictionary;
604
+
605
+ // Check that the key is a smi.
606
+ __ JumpIfNotSmi(rax, &check_string);
607
+ __ bind(&index_smi);
608
+ // Now the key is known to be a smi. This place is also jumped to from below
609
+ // where a numeric string is converted to a smi.
610
+
611
+ GenerateKeyedLoadReceiverCheck(
612
+ masm, rdx, rcx, Map::kHasIndexedInterceptor, &slow);
613
+
614
+ GenerateFastArrayLoad(masm,
615
+ rdx,
616
+ rax,
617
+ rcx,
618
+ rbx,
619
+ rax,
620
+ &check_pixel_array,
621
+ &slow);
622
+ __ IncrementCounter(&Counters::keyed_load_generic_smi, 1);
623
+ __ ret(0);
624
+
625
+ __ bind(&check_pixel_array);
626
+ // Check whether the elements object is a pixel array.
627
+ // rdx: receiver
628
+ // rax: key
629
+ // rcx: elements array
630
+ __ SmiToInteger32(rbx, rax); // Used on both directions of next branch.
631
+ __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
632
+ Heap::kPixelArrayMapRootIndex);
633
+ __ j(not_equal, &check_number_dictionary);
634
+ __ cmpl(rbx, FieldOperand(rcx, PixelArray::kLengthOffset));
635
+ __ j(above_equal, &slow);
636
+ __ movq(rax, FieldOperand(rcx, PixelArray::kExternalPointerOffset));
637
+ __ movzxbq(rax, Operand(rax, rbx, times_1, 0));
638
+ __ Integer32ToSmi(rax, rax);
639
+ __ ret(0);
640
+
641
+ __ bind(&check_number_dictionary);
642
+ // Check whether the elements is a number dictionary.
643
+ // rdx: receiver
644
+ // rax: key
645
+ // rbx: key as untagged int32
646
+ // rcx: elements
647
+ __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
648
+ Heap::kHashTableMapRootIndex);
649
+ __ j(not_equal, &slow);
650
+ GenerateNumberDictionaryLoad(masm, &slow, rcx, rax, rbx, r9, rdi, rax);
651
+ __ ret(0);
652
+
653
+ __ bind(&slow);
654
+ // Slow case: Jump to runtime.
655
+ // rdx: receiver
656
+ // rax: key
657
+ __ IncrementCounter(&Counters::keyed_load_generic_slow, 1);
658
+ GenerateRuntimeGetProperty(masm);
659
+
660
+ __ bind(&check_string);
661
+ GenerateKeyStringCheck(masm, rax, rcx, rbx, &index_string, &slow);
662
+
663
+ GenerateKeyedLoadReceiverCheck(
664
+ masm, rdx, rcx, Map::kHasNamedInterceptor, &slow);
665
+
666
+ // If the receiver is a fast-case object, check the keyed lookup
667
+ // cache. Otherwise probe the dictionary leaving result in rcx.
668
+ __ movq(rbx, FieldOperand(rdx, JSObject::kPropertiesOffset));
669
+ __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
670
+ Heap::kHashTableMapRootIndex);
671
+ __ j(equal, &probe_dictionary);
672
+
673
+ // Load the map of the receiver, compute the keyed lookup cache hash
674
+ // based on 32 bits of the map pointer and the string hash.
675
+ __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
676
+ __ movl(rcx, rbx);
677
+ __ shr(rcx, Immediate(KeyedLookupCache::kMapHashShift));
678
+ __ movl(rdi, FieldOperand(rax, String::kHashFieldOffset));
679
+ __ shr(rdi, Immediate(String::kHashShift));
680
+ __ xor_(rcx, rdi);
681
+ __ and_(rcx, Immediate(KeyedLookupCache::kCapacityMask));
682
+
683
+ // Load the key (consisting of map and symbol) from the cache and
684
+ // check for match.
685
+ ExternalReference cache_keys
686
+ = ExternalReference::keyed_lookup_cache_keys();
687
+ __ movq(rdi, rcx);
688
+ __ shl(rdi, Immediate(kPointerSizeLog2 + 1));
689
+ __ movq(kScratchRegister, cache_keys);
690
+ __ cmpq(rbx, Operand(kScratchRegister, rdi, times_1, 0));
691
+ __ j(not_equal, &slow);
692
+ __ cmpq(rax, Operand(kScratchRegister, rdi, times_1, kPointerSize));
693
+ __ j(not_equal, &slow);
694
+
695
+ // Get field offset which is a 32-bit integer and check that it is
696
+ // an in-object property.
697
+ ExternalReference cache_field_offsets
698
+ = ExternalReference::keyed_lookup_cache_field_offsets();
699
+ __ movq(kScratchRegister, cache_field_offsets);
700
+ __ movl(rdi, Operand(kScratchRegister, rcx, times_4, 0));
701
+ __ movzxbq(rcx, FieldOperand(rbx, Map::kInObjectPropertiesOffset));
702
+ __ subq(rdi, rcx);
703
+ __ j(above_equal, &slow);
704
+
705
+ // Load in-object property.
706
+ __ movzxbq(rcx, FieldOperand(rbx, Map::kInstanceSizeOffset));
707
+ __ addq(rcx, rdi);
708
+ __ movq(rax, FieldOperand(rdx, rcx, times_pointer_size, 0));
709
+ __ IncrementCounter(&Counters::keyed_load_generic_lookup_cache, 1);
710
+ __ ret(0);
711
+
712
+ // Do a quick inline probe of the receiver's dictionary, if it
713
+ // exists.
714
+ __ bind(&probe_dictionary);
715
+ // rdx: receiver
716
+ // rax: key
717
+ // rbx: elements
718
+
719
+ __ movq(rcx, FieldOperand(rdx, JSObject::kMapOffset));
720
+ __ movb(rcx, FieldOperand(rcx, Map::kInstanceTypeOffset));
721
+ GenerateGlobalInstanceTypeCheck(masm, rcx, &slow);
722
+
723
+ GenerateDictionaryLoad(masm, &slow, rbx, rax, rcx, rdi, rax);
724
+ __ IncrementCounter(&Counters::keyed_load_generic_symbol, 1);
725
+ __ ret(0);
726
+
727
+ __ bind(&index_string);
728
+ GenerateIndexFromHash(masm, rax, rbx);
729
+ __ jmp(&index_smi);
730
+ }
731
+
732
+
733
+ void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
734
+ // ----------- S t a t e -------------
735
+ // -- rax : key
736
+ // -- rdx : receiver
737
+ // -- rsp[0] : return address
738
+ // -----------------------------------
739
+ Label miss;
740
+ Label index_out_of_range;
741
+
742
+ Register receiver = rdx;
743
+ Register index = rax;
744
+ Register scratch1 = rbx;
745
+ Register scratch2 = rcx;
746
+ Register result = rax;
747
+
748
+ StringCharAtGenerator char_at_generator(receiver,
749
+ index,
750
+ scratch1,
751
+ scratch2,
752
+ result,
753
+ &miss, // When not a string.
754
+ &miss, // When not a number.
755
+ &index_out_of_range,
756
+ STRING_INDEX_IS_ARRAY_INDEX);
757
+ char_at_generator.GenerateFast(masm);
758
+ __ ret(0);
759
+
760
+ ICRuntimeCallHelper call_helper;
761
+ char_at_generator.GenerateSlow(masm, call_helper);
762
+
763
+ __ bind(&index_out_of_range);
764
+ __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
765
+ __ ret(0);
766
+
767
+ __ bind(&miss);
768
+ GenerateMiss(masm);
769
+ }
770
+
771
+
772
+ void KeyedLoadIC::GenerateExternalArray(MacroAssembler* masm,
773
+ ExternalArrayType array_type) {
774
+ // ----------- S t a t e -------------
775
+ // -- rax : key
776
+ // -- rdx : receiver
777
+ // -- rsp[0] : return address
778
+ // -----------------------------------
779
+ Label slow;
780
+
781
+ // Check that the object isn't a smi.
782
+ __ JumpIfSmi(rdx, &slow);
783
+
784
+ // Check that the key is a smi.
785
+ __ JumpIfNotSmi(rax, &slow);
786
+
787
+ // Check that the object is a JS object.
788
+ __ CmpObjectType(rdx, JS_OBJECT_TYPE, rcx);
789
+ __ j(not_equal, &slow);
790
+ // Check that the receiver does not require access checks. We need
791
+ // to check this explicitly since this generic stub does not perform
792
+ // map checks. The map is already in rdx.
793
+ __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
794
+ Immediate(1 << Map::kIsAccessCheckNeeded));
795
+ __ j(not_zero, &slow);
796
+
797
+ // Check that the elements array is the appropriate type of
798
+ // ExternalArray.
799
+ // rax: index (as a smi)
800
+ // rdx: JSObject
801
+ __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
802
+ __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
803
+ Heap::RootIndexForExternalArrayType(array_type));
804
+ __ j(not_equal, &slow);
805
+
806
+ // Check that the index is in range.
807
+ __ SmiToInteger32(rcx, rax);
808
+ __ cmpl(rcx, FieldOperand(rbx, ExternalArray::kLengthOffset));
809
+ // Unsigned comparison catches both negative and too-large values.
810
+ __ j(above_equal, &slow);
811
+
812
+ // rax: index (as a smi)
813
+ // rdx: receiver (JSObject)
814
+ // rcx: untagged index
815
+ // rbx: elements array
816
+ __ movq(rbx, FieldOperand(rbx, ExternalArray::kExternalPointerOffset));
817
+ // rbx: base pointer of external storage
818
+ switch (array_type) {
819
+ case kExternalByteArray:
820
+ __ movsxbq(rcx, Operand(rbx, rcx, times_1, 0));
821
+ break;
822
+ case kExternalUnsignedByteArray:
823
+ __ movzxbq(rcx, Operand(rbx, rcx, times_1, 0));
824
+ break;
825
+ case kExternalShortArray:
826
+ __ movsxwq(rcx, Operand(rbx, rcx, times_2, 0));
827
+ break;
828
+ case kExternalUnsignedShortArray:
829
+ __ movzxwq(rcx, Operand(rbx, rcx, times_2, 0));
830
+ break;
831
+ case kExternalIntArray:
832
+ __ movsxlq(rcx, Operand(rbx, rcx, times_4, 0));
833
+ break;
834
+ case kExternalUnsignedIntArray:
835
+ __ movl(rcx, Operand(rbx, rcx, times_4, 0));
836
+ break;
837
+ case kExternalFloatArray:
838
+ __ cvtss2sd(xmm0, Operand(rbx, rcx, times_4, 0));
839
+ break;
840
+ default:
841
+ UNREACHABLE();
842
+ break;
843
+ }
844
+
845
+ // rax: index
846
+ // rdx: receiver
847
+ // For integer array types:
848
+ // rcx: value
849
+ // For floating-point array type:
850
+ // xmm0: value as double.
851
+
852
+ ASSERT(kSmiValueSize == 32);
853
+ if (array_type == kExternalUnsignedIntArray) {
854
+ // For the UnsignedInt array type, we need to see whether
855
+ // the value can be represented in a Smi. If not, we need to convert
856
+ // it to a HeapNumber.
857
+ Label box_int;
858
+
859
+ __ JumpIfUIntNotValidSmiValue(rcx, &box_int);
860
+
861
+ __ Integer32ToSmi(rax, rcx);
862
+ __ ret(0);
863
+
864
+ __ bind(&box_int);
865
+
866
+ // Allocate a HeapNumber for the int and perform int-to-double
867
+ // conversion.
868
+ // The value is zero-extended since we loaded the value from memory
869
+ // with movl.
870
+ __ cvtqsi2sd(xmm0, rcx);
871
+
872
+ __ AllocateHeapNumber(rcx, rbx, &slow);
873
+ // Set the value.
874
+ __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm0);
875
+ __ movq(rax, rcx);
876
+ __ ret(0);
877
+ } else if (array_type == kExternalFloatArray) {
878
+ // For the floating-point array type, we need to always allocate a
879
+ // HeapNumber.
880
+ __ AllocateHeapNumber(rcx, rbx, &slow);
881
+ // Set the value.
882
+ __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm0);
883
+ __ movq(rax, rcx);
884
+ __ ret(0);
885
+ } else {
886
+ __ Integer32ToSmi(rax, rcx);
887
+ __ ret(0);
888
+ }
889
+
890
+ // Slow case: Jump to runtime.
891
+ __ bind(&slow);
892
+ __ IncrementCounter(&Counters::keyed_load_external_array_slow, 1);
893
+ GenerateRuntimeGetProperty(masm);
894
+ }
895
+
896
+
897
+ void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
898
+ // ----------- S t a t e -------------
899
+ // -- rax : key
900
+ // -- rdx : receiver
901
+ // -- rsp[0] : return address
902
+ // -----------------------------------
903
+ Label slow;
904
+
905
+ // Check that the receiver isn't a smi.
906
+ __ JumpIfSmi(rdx, &slow);
907
+
908
+ // Check that the key is a smi.
909
+ __ JumpIfNotSmi(rax, &slow);
910
+
911
+ // Get the map of the receiver.
912
+ __ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
913
+
914
+ // Check that it has indexed interceptor and access checks
915
+ // are not enabled for this object.
916
+ __ movb(rcx, FieldOperand(rcx, Map::kBitFieldOffset));
917
+ __ andb(rcx, Immediate(kSlowCaseBitFieldMask));
918
+ __ cmpb(rcx, Immediate(1 << Map::kHasIndexedInterceptor));
919
+ __ j(not_zero, &slow);
920
+
921
+ // Everything is fine, call runtime.
922
+ __ pop(rcx);
923
+ __ push(rdx); // receiver
924
+ __ push(rax); // key
925
+ __ push(rcx); // return address
926
+
927
+ // Perform tail call to the entry.
928
+ __ TailCallExternalReference(ExternalReference(
929
+ IC_Utility(kKeyedLoadPropertyWithInterceptor)), 2, 1);
930
+
931
+ __ bind(&slow);
932
+ GenerateMiss(masm);
933
+ }
934
+
935
+
936
+ void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) {
937
+ // ----------- S t a t e -------------
938
+ // -- rax : value
939
+ // -- rcx : key
940
+ // -- rdx : receiver
941
+ // -- rsp[0] : return address
942
+ // -----------------------------------
943
+
944
+ __ pop(rbx);
945
+ __ push(rdx); // receiver
946
+ __ push(rcx); // key
947
+ __ push(rax); // value
948
+ __ push(rbx); // return address
949
+
950
+ // Do tail-call to runtime routine.
951
+ ExternalReference ref = ExternalReference(IC_Utility(kKeyedStoreIC_Miss));
952
+ __ TailCallExternalReference(ref, 3, 1);
953
+ }
954
+
955
+
956
+ void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm) {
957
+ // ----------- S t a t e -------------
958
+ // -- rax : value
959
+ // -- rcx : key
960
+ // -- rdx : receiver
961
+ // -- rsp[0] : return address
962
+ // -----------------------------------
963
+
964
+ __ pop(rbx);
965
+ __ push(rdx); // receiver
966
+ __ push(rcx); // key
967
+ __ push(rax); // value
968
+ __ push(rbx); // return address
969
+
970
+ // Do tail-call to runtime routine.
971
+ __ TailCallRuntime(Runtime::kSetProperty, 3, 1);
972
+ }
973
+
974
+
975
+ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
976
+ // ----------- S t a t e -------------
977
+ // -- rax : value
978
+ // -- rcx : key
979
+ // -- rdx : receiver
980
+ // -- rsp[0] : return address
981
+ // -----------------------------------
982
+ Label slow, slow_with_tagged_index, fast, array, extra, check_pixel_array;
983
+
984
+ // Check that the object isn't a smi.
985
+ __ JumpIfSmi(rdx, &slow_with_tagged_index);
986
+ // Get the map from the receiver.
987
+ __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
988
+ // Check that the receiver does not require access checks. We need
989
+ // to do this because this generic stub does not perform map checks.
990
+ __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
991
+ Immediate(1 << Map::kIsAccessCheckNeeded));
992
+ __ j(not_zero, &slow_with_tagged_index);
993
+ // Check that the key is a smi.
994
+ __ JumpIfNotSmi(rcx, &slow_with_tagged_index);
995
+ __ SmiToInteger32(rcx, rcx);
996
+
997
+ __ CmpInstanceType(rbx, JS_ARRAY_TYPE);
998
+ __ j(equal, &array);
999
+ // Check that the object is some kind of JS object.
1000
+ __ CmpInstanceType(rbx, FIRST_JS_OBJECT_TYPE);
1001
+ __ j(below, &slow);
1002
+
1003
+ // Object case: Check key against length in the elements array.
1004
+ // rax: value
1005
+ // rdx: JSObject
1006
+ // rcx: index
1007
+ __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
1008
+ // Check that the object is in fast mode (not dictionary).
1009
+ __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
1010
+ Heap::kFixedArrayMapRootIndex);
1011
+ __ j(not_equal, &check_pixel_array);
1012
+ __ SmiCompareInteger32(FieldOperand(rbx, FixedArray::kLengthOffset), rcx);
1013
+ // rax: value
1014
+ // rbx: FixedArray
1015
+ // rcx: index
1016
+ __ j(above, &fast);
1017
+
1018
+ // Slow case: call runtime.
1019
+ __ bind(&slow);
1020
+ __ Integer32ToSmi(rcx, rcx);
1021
+ __ bind(&slow_with_tagged_index);
1022
+ GenerateRuntimeSetProperty(masm);
1023
+ // Never returns to here.
1024
+
1025
+ // Check whether the elements is a pixel array.
1026
+ // rax: value
1027
+ // rdx: receiver
1028
+ // rbx: receiver's elements array
1029
+ // rcx: index, zero-extended.
1030
+ __ bind(&check_pixel_array);
1031
+ __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
1032
+ Heap::kPixelArrayMapRootIndex);
1033
+ __ j(not_equal, &slow);
1034
+ // Check that the value is a smi. If a conversion is needed call into the
1035
+ // runtime to convert and clamp.
1036
+ __ JumpIfNotSmi(rax, &slow);
1037
+ __ cmpl(rcx, FieldOperand(rbx, PixelArray::kLengthOffset));
1038
+ __ j(above_equal, &slow);
1039
+ // No more bailouts to slow case on this path, so key not needed.
1040
+ __ SmiToInteger32(rdi, rax);
1041
+ { // Clamp the value to [0..255].
1042
+ Label done;
1043
+ __ testl(rdi, Immediate(0xFFFFFF00));
1044
+ __ j(zero, &done);
1045
+ __ setcc(negative, rdi); // 1 if negative, 0 if positive.
1046
+ __ decb(rdi); // 0 if negative, 255 if positive.
1047
+ __ bind(&done);
1048
+ }
1049
+ __ movq(rbx, FieldOperand(rbx, PixelArray::kExternalPointerOffset));
1050
+ __ movb(Operand(rbx, rcx, times_1, 0), rdi);
1051
+ __ ret(0);
1052
+
1053
+ // Extra capacity case: Check if there is extra capacity to
1054
+ // perform the store and update the length. Used for adding one
1055
+ // element to the array by writing to array[array.length].
1056
+ __ bind(&extra);
1057
+ // rax: value
1058
+ // rdx: receiver (a JSArray)
1059
+ // rbx: receiver's elements array (a FixedArray)
1060
+ // rcx: index
1061
+ // flags: smicompare (rdx.length(), rbx)
1062
+ __ j(not_equal, &slow); // do not leave holes in the array
1063
+ __ SmiCompareInteger32(FieldOperand(rbx, FixedArray::kLengthOffset), rcx);
1064
+ __ j(below_equal, &slow);
1065
+ // Increment index to get new length.
1066
+ __ leal(rdi, Operand(rcx, 1));
1067
+ __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rdi);
1068
+ __ jmp(&fast);
1069
+
1070
+ // Array case: Get the length and the elements array from the JS
1071
+ // array. Check that the array is in fast mode; if it is the
1072
+ // length is always a smi.
1073
+ __ bind(&array);
1074
+ // rax: value
1075
+ // rdx: receiver (a JSArray)
1076
+ // rcx: index
1077
+ __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
1078
+ __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
1079
+ Heap::kFixedArrayMapRootIndex);
1080
+ __ j(not_equal, &slow);
1081
+
1082
+ // Check the key against the length in the array, compute the
1083
+ // address to store into and fall through to fast case.
1084
+ __ SmiCompareInteger32(FieldOperand(rdx, JSArray::kLengthOffset), rcx);
1085
+ __ j(below_equal, &extra);
1086
+
1087
+ // Fast case: Do the store.
1088
+ __ bind(&fast);
1089
+ // rax: value
1090
+ // rbx: receiver's elements array (a FixedArray)
1091
+ // rcx: index
1092
+ Label non_smi_value;
1093
+ __ movq(FieldOperand(rbx, rcx, times_pointer_size, FixedArray::kHeaderSize),
1094
+ rax);
1095
+ __ JumpIfNotSmi(rax, &non_smi_value);
1096
+ __ ret(0);
1097
+ __ bind(&non_smi_value);
1098
+ // Slow case that needs to retain rcx for use by RecordWrite.
1099
+ // Update write barrier for the elements array address.
1100
+ __ movq(rdx, rax);
1101
+ __ RecordWriteNonSmi(rbx, 0, rdx, rcx);
1102
+ __ ret(0);
1103
+ }
1104
+
1105
+
1106
+ void KeyedStoreIC::GenerateExternalArray(MacroAssembler* masm,
1107
+ ExternalArrayType array_type) {
1108
+ // ----------- S t a t e -------------
1109
+ // -- rax : value
1110
+ // -- rcx : key
1111
+ // -- rdx : receiver
1112
+ // -- rsp[0] : return address
1113
+ // -----------------------------------
1114
+ Label slow, check_heap_number;
1115
+
1116
+ // Check that the object isn't a smi.
1117
+ __ JumpIfSmi(rdx, &slow);
1118
+ // Get the map from the receiver.
1119
+ __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
1120
+ // Check that the receiver does not require access checks. We need
1121
+ // to do this because this generic stub does not perform map checks.
1122
+ __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
1123
+ Immediate(1 << Map::kIsAccessCheckNeeded));
1124
+ __ j(not_zero, &slow);
1125
+ // Check that the key is a smi.
1126
+ __ JumpIfNotSmi(rcx, &slow);
1127
+
1128
+ // Check that the object is a JS object.
1129
+ __ CmpInstanceType(rbx, JS_OBJECT_TYPE);
1130
+ __ j(not_equal, &slow);
1131
+
1132
+ // Check that the elements array is the appropriate type of
1133
+ // ExternalArray.
1134
+ // rax: value
1135
+ // rcx: key (a smi)
1136
+ // rdx: receiver (a JSObject)
1137
+ __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
1138
+ __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
1139
+ Heap::RootIndexForExternalArrayType(array_type));
1140
+ __ j(not_equal, &slow);
1141
+
1142
+ // Check that the index is in range.
1143
+ __ SmiToInteger32(rdi, rcx); // Untag the index.
1144
+ __ cmpl(rdi, FieldOperand(rbx, ExternalArray::kLengthOffset));
1145
+ // Unsigned comparison catches both negative and too-large values.
1146
+ __ j(above_equal, &slow);
1147
+
1148
+ // Handle both smis and HeapNumbers in the fast path. Go to the
1149
+ // runtime for all other kinds of values.
1150
+ // rax: value
1151
+ // rcx: key (a smi)
1152
+ // rdx: receiver (a JSObject)
1153
+ // rbx: elements array
1154
+ // rdi: untagged key
1155
+ __ JumpIfNotSmi(rax, &check_heap_number);
1156
+ // No more branches to slow case on this path. Key and receiver not needed.
1157
+ __ SmiToInteger32(rdx, rax);
1158
+ __ movq(rbx, FieldOperand(rbx, ExternalArray::kExternalPointerOffset));
1159
+ // rbx: base pointer of external storage
1160
+ switch (array_type) {
1161
+ case kExternalByteArray:
1162
+ case kExternalUnsignedByteArray:
1163
+ __ movb(Operand(rbx, rdi, times_1, 0), rdx);
1164
+ break;
1165
+ case kExternalShortArray:
1166
+ case kExternalUnsignedShortArray:
1167
+ __ movw(Operand(rbx, rdi, times_2, 0), rdx);
1168
+ break;
1169
+ case kExternalIntArray:
1170
+ case kExternalUnsignedIntArray:
1171
+ __ movl(Operand(rbx, rdi, times_4, 0), rdx);
1172
+ break;
1173
+ case kExternalFloatArray:
1174
+ // Need to perform int-to-float conversion.
1175
+ __ cvtlsi2ss(xmm0, rdx);
1176
+ __ movss(Operand(rbx, rdi, times_4, 0), xmm0);
1177
+ break;
1178
+ default:
1179
+ UNREACHABLE();
1180
+ break;
1181
+ }
1182
+ __ ret(0);
1183
+
1184
+ __ bind(&check_heap_number);
1185
+ // rax: value
1186
+ // rcx: key (a smi)
1187
+ // rdx: receiver (a JSObject)
1188
+ // rbx: elements array
1189
+ // rdi: untagged key
1190
+ __ CmpObjectType(rax, HEAP_NUMBER_TYPE, kScratchRegister);
1191
+ __ j(not_equal, &slow);
1192
+ // No more branches to slow case on this path.
1193
+
1194
+ // The WebGL specification leaves the behavior of storing NaN and
1195
+ // +/-Infinity into integer arrays basically undefined. For more
1196
+ // reproducible behavior, convert these to zero.
1197
+ __ movsd(xmm0, FieldOperand(rax, HeapNumber::kValueOffset));
1198
+ __ movq(rbx, FieldOperand(rbx, ExternalArray::kExternalPointerOffset));
1199
+ // rdi: untagged index
1200
+ // rbx: base pointer of external storage
1201
+ // top of FPU stack: value
1202
+ if (array_type == kExternalFloatArray) {
1203
+ __ cvtsd2ss(xmm0, xmm0);
1204
+ __ movss(Operand(rbx, rdi, times_4, 0), xmm0);
1205
+ __ ret(0);
1206
+ } else {
1207
+ // Need to perform float-to-int conversion.
1208
+ // Test the value for NaN.
1209
+
1210
+ // Convert to int32 and store the low byte/word.
1211
+ // If the value is NaN or +/-infinity, the result is 0x80000000,
1212
+ // which is automatically zero when taken mod 2^n, n < 32.
1213
+ // rdx: value (converted to an untagged integer)
1214
+ // rdi: untagged index
1215
+ // rbx: base pointer of external storage
1216
+ switch (array_type) {
1217
+ case kExternalByteArray:
1218
+ case kExternalUnsignedByteArray:
1219
+ __ cvtsd2si(rdx, xmm0);
1220
+ __ movb(Operand(rbx, rdi, times_1, 0), rdx);
1221
+ break;
1222
+ case kExternalShortArray:
1223
+ case kExternalUnsignedShortArray:
1224
+ __ cvtsd2si(rdx, xmm0);
1225
+ __ movw(Operand(rbx, rdi, times_2, 0), rdx);
1226
+ break;
1227
+ case kExternalIntArray:
1228
+ case kExternalUnsignedIntArray: {
1229
+ // Convert to int64, so that NaN and infinities become
1230
+ // 0x8000000000000000, which is zero mod 2^32.
1231
+ __ cvtsd2siq(rdx, xmm0);
1232
+ __ movl(Operand(rbx, rdi, times_4, 0), rdx);
1233
+ break;
1234
+ }
1235
+ default:
1236
+ UNREACHABLE();
1237
+ break;
1238
+ }
1239
+ __ ret(0);
1240
+ }
1241
+
1242
+ // Slow case: call runtime.
1243
+ __ bind(&slow);
1244
+ GenerateRuntimeSetProperty(masm);
1245
+ }
1246
+
1247
+
1248
+ // Defined in ic.cc.
1249
+ Object* CallIC_Miss(Arguments args);
1250
+
1251
+
1252
+ static void GenerateCallMiss(MacroAssembler* masm, int argc, IC::UtilityId id) {
1253
+ // ----------- S t a t e -------------
1254
+ // rcx : function name
1255
+ // rsp[0] : return address
1256
+ // rsp[8] : argument argc
1257
+ // rsp[16] : argument argc - 1
1258
+ // ...
1259
+ // rsp[argc * 8] : argument 1
1260
+ // rsp[(argc + 1) * 8] : argument 0 = receiver
1261
+ // -----------------------------------
1262
+
1263
+ if (id == IC::kCallIC_Miss) {
1264
+ __ IncrementCounter(&Counters::call_miss, 1);
1265
+ } else {
1266
+ __ IncrementCounter(&Counters::keyed_call_miss, 1);
1267
+ }
1268
+
1269
+ // Get the receiver of the function from the stack; 1 ~ return address.
1270
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1271
+
1272
+ // Enter an internal frame.
1273
+ __ EnterInternalFrame();
1274
+
1275
+ // Push the receiver and the name of the function.
1276
+ __ push(rdx);
1277
+ __ push(rcx);
1278
+
1279
+ // Call the entry.
1280
+ CEntryStub stub(1);
1281
+ __ movq(rax, Immediate(2));
1282
+ __ movq(rbx, ExternalReference(IC_Utility(id)));
1283
+ __ CallStub(&stub);
1284
+
1285
+ // Move result to rdi and exit the internal frame.
1286
+ __ movq(rdi, rax);
1287
+ __ LeaveInternalFrame();
1288
+
1289
+ // Check if the receiver is a global object of some sort.
1290
+ // This can happen only for regular CallIC but not KeyedCallIC.
1291
+ if (id == IC::kCallIC_Miss) {
1292
+ Label invoke, global;
1293
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); // receiver
1294
+ __ JumpIfSmi(rdx, &invoke);
1295
+ __ CmpObjectType(rdx, JS_GLOBAL_OBJECT_TYPE, rcx);
1296
+ __ j(equal, &global);
1297
+ __ CmpInstanceType(rcx, JS_BUILTINS_OBJECT_TYPE);
1298
+ __ j(not_equal, &invoke);
1299
+
1300
+ // Patch the receiver on the stack.
1301
+ __ bind(&global);
1302
+ __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
1303
+ __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
1304
+ __ bind(&invoke);
1305
+ }
1306
+
1307
+ // Invoke the function.
1308
+ ParameterCount actual(argc);
1309
+ __ InvokeFunction(rdi, actual, JUMP_FUNCTION);
1310
+ }
1311
+
1312
+
1313
+ // The generated code does not accept smi keys.
1314
+ // The generated code falls through if both probes miss.
1315
+ static void GenerateMonomorphicCacheProbe(MacroAssembler* masm,
1316
+ int argc,
1317
+ Code::Kind kind) {
1318
+ // ----------- S t a t e -------------
1319
+ // rcx : function name
1320
+ // rdx : receiver
1321
+ // -----------------------------------
1322
+ Label number, non_number, non_string, boolean, probe, miss;
1323
+
1324
+ // Probe the stub cache.
1325
+ Code::Flags flags =
1326
+ Code::ComputeFlags(kind, NOT_IN_LOOP, MONOMORPHIC, NORMAL, argc);
1327
+ StubCache::GenerateProbe(masm, flags, rdx, rcx, rbx, rax);
1328
+
1329
+ // If the stub cache probing failed, the receiver might be a value.
1330
+ // For value objects, we use the map of the prototype objects for
1331
+ // the corresponding JSValue for the cache and that is what we need
1332
+ // to probe.
1333
+ //
1334
+ // Check for number.
1335
+ __ JumpIfSmi(rdx, &number);
1336
+ __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rbx);
1337
+ __ j(not_equal, &non_number);
1338
+ __ bind(&number);
1339
+ StubCompiler::GenerateLoadGlobalFunctionPrototype(
1340
+ masm, Context::NUMBER_FUNCTION_INDEX, rdx);
1341
+ __ jmp(&probe);
1342
+
1343
+ // Check for string.
1344
+ __ bind(&non_number);
1345
+ __ CmpInstanceType(rbx, FIRST_NONSTRING_TYPE);
1346
+ __ j(above_equal, &non_string);
1347
+ StubCompiler::GenerateLoadGlobalFunctionPrototype(
1348
+ masm, Context::STRING_FUNCTION_INDEX, rdx);
1349
+ __ jmp(&probe);
1350
+
1351
+ // Check for boolean.
1352
+ __ bind(&non_string);
1353
+ __ CompareRoot(rdx, Heap::kTrueValueRootIndex);
1354
+ __ j(equal, &boolean);
1355
+ __ CompareRoot(rdx, Heap::kFalseValueRootIndex);
1356
+ __ j(not_equal, &miss);
1357
+ __ bind(&boolean);
1358
+ StubCompiler::GenerateLoadGlobalFunctionPrototype(
1359
+ masm, Context::BOOLEAN_FUNCTION_INDEX, rdx);
1360
+
1361
+ // Probe the stub cache for the value object.
1362
+ __ bind(&probe);
1363
+ StubCache::GenerateProbe(masm, flags, rdx, rcx, rbx, no_reg);
1364
+
1365
+ __ bind(&miss);
1366
+ }
1367
+
1368
+
1369
+ static void GenerateFunctionTailCall(MacroAssembler* masm,
1370
+ int argc,
1371
+ Label* miss) {
1372
+ // ----------- S t a t e -------------
1373
+ // rcx : function name
1374
+ // rdi : function
1375
+ // rsp[0] : return address
1376
+ // rsp[8] : argument argc
1377
+ // rsp[16] : argument argc - 1
1378
+ // ...
1379
+ // rsp[argc * 8] : argument 1
1380
+ // rsp[(argc + 1) * 8] : argument 0 = receiver
1381
+ // -----------------------------------
1382
+ __ JumpIfSmi(rdi, miss);
1383
+ // Check that the value is a JavaScript function.
1384
+ __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rdx);
1385
+ __ j(not_equal, miss);
1386
+
1387
+ // Invoke the function.
1388
+ ParameterCount actual(argc);
1389
+ __ InvokeFunction(rdi, actual, JUMP_FUNCTION);
1390
+ }
1391
+
1392
+
1393
+ // The generated code falls through if the call should be handled by runtime.
1394
+ static void GenerateCallNormal(MacroAssembler* masm, int argc) {
1395
+ // ----------- S t a t e -------------
1396
+ // rcx : function name
1397
+ // rsp[0] : return address
1398
+ // rsp[8] : argument argc
1399
+ // rsp[16] : argument argc - 1
1400
+ // ...
1401
+ // rsp[argc * 8] : argument 1
1402
+ // rsp[(argc + 1) * 8] : argument 0 = receiver
1403
+ // -----------------------------------
1404
+ Label miss;
1405
+
1406
+ // Get the receiver of the function from the stack.
1407
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1408
+
1409
+ GenerateStringDictionaryReceiverCheck(masm, rdx, rax, rbx, &miss);
1410
+
1411
+ // rax: elements
1412
+ // Search the dictionary placing the result in rdi.
1413
+ GenerateDictionaryLoad(masm, &miss, rax, rcx, rbx, rdi, rdi);
1414
+
1415
+ GenerateFunctionTailCall(masm, argc, &miss);
1416
+
1417
+ __ bind(&miss);
1418
+ }
1419
+
1420
+
1421
+ void CallIC::GenerateMiss(MacroAssembler* masm, int argc) {
1422
+ // ----------- S t a t e -------------
1423
+ // rcx : function name
1424
+ // rsp[0] : return address
1425
+ // rsp[8] : argument argc
1426
+ // rsp[16] : argument argc - 1
1427
+ // ...
1428
+ // rsp[argc * 8] : argument 1
1429
+ // rsp[(argc + 1) * 8] : argument 0 = receiver
1430
+ // -----------------------------------
1431
+ GenerateCallMiss(masm, argc, IC::kCallIC_Miss);
1432
+ }
1433
+
1434
+
1435
+ void CallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
1436
+ // ----------- S t a t e -------------
1437
+ // rcx : function name
1438
+ // rsp[0] : return address
1439
+ // rsp[8] : argument argc
1440
+ // rsp[16] : argument argc - 1
1441
+ // ...
1442
+ // rsp[argc * 8] : argument 1
1443
+ // rsp[(argc + 1) * 8] : argument 0 = receiver
1444
+ // -----------------------------------
1445
+
1446
+ // Get the receiver of the function from the stack; 1 ~ return address.
1447
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1448
+ GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC);
1449
+ GenerateMiss(masm, argc);
1450
+ }
1451
+
1452
+
1453
+ void CallIC::GenerateNormal(MacroAssembler* masm, int argc) {
1454
+ // ----------- S t a t e -------------
1455
+ // rcx : function name
1456
+ // rsp[0] : return address
1457
+ // rsp[8] : argument argc
1458
+ // rsp[16] : argument argc - 1
1459
+ // ...
1460
+ // rsp[argc * 8] : argument 1
1461
+ // rsp[(argc + 1) * 8] : argument 0 = receiver
1462
+ // -----------------------------------
1463
+
1464
+ GenerateCallNormal(masm, argc);
1465
+ GenerateMiss(masm, argc);
1466
+ }
1467
+
1468
+
1469
+ void KeyedCallIC::GenerateMiss(MacroAssembler* masm, int argc) {
1470
+ // ----------- S t a t e -------------
1471
+ // rcx : function name
1472
+ // rsp[0] : return address
1473
+ // rsp[8] : argument argc
1474
+ // rsp[16] : argument argc - 1
1475
+ // ...
1476
+ // rsp[argc * 8] : argument 1
1477
+ // rsp[(argc + 1) * 8] : argument 0 = receiver
1478
+ // -----------------------------------
1479
+
1480
+ GenerateCallMiss(masm, argc, IC::kKeyedCallIC_Miss);
1481
+ }
1482
+
1483
+
1484
+ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
1485
+ // ----------- S t a t e -------------
1486
+ // rcx : function name
1487
+ // rsp[0] : return address
1488
+ // rsp[8] : argument argc
1489
+ // rsp[16] : argument argc - 1
1490
+ // ...
1491
+ // rsp[argc * 8] : argument 1
1492
+ // rsp[(argc + 1) * 8] : argument 0 = receiver
1493
+ // -----------------------------------
1494
+
1495
+ // Get the receiver of the function from the stack; 1 ~ return address.
1496
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1497
+
1498
+ Label do_call, slow_call, slow_load, slow_reload_receiver;
1499
+ Label check_number_dictionary, check_string, lookup_monomorphic_cache;
1500
+ Label index_smi, index_string;
1501
+
1502
+ // Check that the key is a smi.
1503
+ __ JumpIfNotSmi(rcx, &check_string);
1504
+
1505
+ __ bind(&index_smi);
1506
+ // Now the key is known to be a smi. This place is also jumped to from below
1507
+ // where a numeric string is converted to a smi.
1508
+
1509
+ GenerateKeyedLoadReceiverCheck(
1510
+ masm, rdx, rax, Map::kHasIndexedInterceptor, &slow_call);
1511
+
1512
+ GenerateFastArrayLoad(
1513
+ masm, rdx, rcx, rax, rbx, rdi, &check_number_dictionary, &slow_load);
1514
+ __ IncrementCounter(&Counters::keyed_call_generic_smi_fast, 1);
1515
+
1516
+ __ bind(&do_call);
1517
+ // receiver in rdx is not used after this point.
1518
+ // rcx: key
1519
+ // rdi: function
1520
+ GenerateFunctionTailCall(masm, argc, &slow_call);
1521
+
1522
+ __ bind(&check_number_dictionary);
1523
+ // rax: elements
1524
+ // rcx: smi key
1525
+ // Check whether the elements is a number dictionary.
1526
+ __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
1527
+ Heap::kHashTableMapRootIndex);
1528
+ __ j(not_equal, &slow_load);
1529
+ __ SmiToInteger32(rbx, rcx);
1530
+ // ebx: untagged index
1531
+ GenerateNumberDictionaryLoad(masm, &slow_load, rax, rcx, rbx, r9, rdi, rdi);
1532
+ __ IncrementCounter(&Counters::keyed_call_generic_smi_dict, 1);
1533
+ __ jmp(&do_call);
1534
+
1535
+ __ bind(&slow_load);
1536
+ // This branch is taken when calling KeyedCallIC_Miss is neither required
1537
+ // nor beneficial.
1538
+ __ IncrementCounter(&Counters::keyed_call_generic_slow_load, 1);
1539
+ __ EnterInternalFrame();
1540
+ __ push(rcx); // save the key
1541
+ __ push(rdx); // pass the receiver
1542
+ __ push(rcx); // pass the key
1543
+ __ CallRuntime(Runtime::kKeyedGetProperty, 2);
1544
+ __ pop(rcx); // restore the key
1545
+ __ LeaveInternalFrame();
1546
+ __ movq(rdi, rax);
1547
+ __ jmp(&do_call);
1548
+
1549
+ __ bind(&check_string);
1550
+ GenerateKeyStringCheck(masm, rcx, rax, rbx, &index_string, &slow_call);
1551
+
1552
+ // The key is known to be a symbol.
1553
+ // If the receiver is a regular JS object with slow properties then do
1554
+ // a quick inline probe of the receiver's dictionary.
1555
+ // Otherwise do the monomorphic cache probe.
1556
+ GenerateKeyedLoadReceiverCheck(
1557
+ masm, rdx, rax, Map::kHasNamedInterceptor, &lookup_monomorphic_cache);
1558
+
1559
+ __ movq(rbx, FieldOperand(rdx, JSObject::kPropertiesOffset));
1560
+ __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
1561
+ Heap::kHashTableMapRootIndex);
1562
+ __ j(not_equal, &lookup_monomorphic_cache);
1563
+
1564
+ GenerateDictionaryLoad(masm, &slow_load, rbx, rcx, rax, rdi, rdi);
1565
+ __ IncrementCounter(&Counters::keyed_call_generic_lookup_dict, 1);
1566
+ __ jmp(&do_call);
1567
+
1568
+ __ bind(&lookup_monomorphic_cache);
1569
+ __ IncrementCounter(&Counters::keyed_call_generic_lookup_cache, 1);
1570
+ GenerateMonomorphicCacheProbe(masm, argc, Code::KEYED_CALL_IC);
1571
+ // Fall through on miss.
1572
+
1573
+ __ bind(&slow_call);
1574
+ // This branch is taken if:
1575
+ // - the receiver requires boxing or access check,
1576
+ // - the key is neither smi nor symbol,
1577
+ // - the value loaded is not a function,
1578
+ // - there is hope that the runtime will create a monomorphic call stub
1579
+ // that will get fetched next time.
1580
+ __ IncrementCounter(&Counters::keyed_call_generic_slow, 1);
1581
+ GenerateMiss(masm, argc);
1582
+
1583
+ __ bind(&index_string);
1584
+ GenerateIndexFromHash(masm, rcx, rbx);
1585
+ // Now jump to the place where smi keys are handled.
1586
+ __ jmp(&index_smi);
1587
+ }
1588
+
1589
+
1590
+ void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) {
1591
+ // ----------- S t a t e -------------
1592
+ // rcx : function name
1593
+ // rsp[0] : return address
1594
+ // rsp[8] : argument argc
1595
+ // rsp[16] : argument argc - 1
1596
+ // ...
1597
+ // rsp[argc * 8] : argument 1
1598
+ // rsp[(argc + 1) * 8] : argument 0 = receiver
1599
+ // -----------------------------------
1600
+
1601
+ GenerateCallNormal(masm, argc);
1602
+ GenerateMiss(masm, argc);
1603
+ }
1604
+
1605
+
1606
+ // The offset from the inlined patch site to the start of the inlined
1607
+ // load instruction.
1608
+ const int LoadIC::kOffsetToLoadInstruction = 20;
1609
+
1610
+
1611
+ void LoadIC::GenerateMiss(MacroAssembler* masm) {
1612
+ // ----------- S t a t e -------------
1613
+ // -- rax : receiver
1614
+ // -- rcx : name
1615
+ // -- rsp[0] : return address
1616
+ // -----------------------------------
1617
+
1618
+ __ IncrementCounter(&Counters::load_miss, 1);
1619
+
1620
+ __ pop(rbx);
1621
+ __ push(rax); // receiver
1622
+ __ push(rcx); // name
1623
+ __ push(rbx); // return address
1624
+
1625
+ // Perform tail call to the entry.
1626
+ ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss));
1627
+ __ TailCallExternalReference(ref, 2, 1);
1628
+ }
1629
+
1630
+
1631
+ void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
1632
+ // ----------- S t a t e -------------
1633
+ // -- rax : receiver
1634
+ // -- rcx : name
1635
+ // -- rsp[0] : return address
1636
+ // -----------------------------------
1637
+ Label miss;
1638
+
1639
+ StubCompiler::GenerateLoadArrayLength(masm, rax, rdx, &miss);
1640
+ __ bind(&miss);
1641
+ StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
1642
+ }
1643
+
1644
+
1645
+ void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) {
1646
+ // ----------- S t a t e -------------
1647
+ // -- rax : receiver
1648
+ // -- rcx : name
1649
+ // -- rsp[0] : return address
1650
+ // -----------------------------------
1651
+ Label miss;
1652
+
1653
+ StubCompiler::GenerateLoadFunctionPrototype(masm, rax, rdx, rbx, &miss);
1654
+ __ bind(&miss);
1655
+ StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
1656
+ }
1657
+
1658
+
1659
+ void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
1660
+ // ----------- S t a t e -------------
1661
+ // -- rax : receiver
1662
+ // -- rcx : name
1663
+ // -- rsp[0] : return address
1664
+ // -----------------------------------
1665
+
1666
+ // Probe the stub cache.
1667
+ Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC,
1668
+ NOT_IN_LOOP,
1669
+ MONOMORPHIC);
1670
+ StubCache::GenerateProbe(masm, flags, rax, rcx, rbx, rdx);
1671
+
1672
+ // Cache miss: Jump to runtime.
1673
+ StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
1674
+ }
1675
+
1676
+
1677
+ void LoadIC::GenerateNormal(MacroAssembler* masm) {
1678
+ // ----------- S t a t e -------------
1679
+ // -- rax : receiver
1680
+ // -- rcx : name
1681
+ // -- rsp[0] : return address
1682
+ // -----------------------------------
1683
+ Label miss;
1684
+
1685
+ GenerateStringDictionaryReceiverCheck(masm, rax, rdx, rbx, &miss);
1686
+
1687
+ // rdx: elements
1688
+ // Search the dictionary placing the result in rax.
1689
+ GenerateDictionaryLoad(masm, &miss, rdx, rcx, rbx, rdi, rax);
1690
+ __ ret(0);
1691
+
1692
+ // Cache miss: Jump to runtime.
1693
+ __ bind(&miss);
1694
+ GenerateMiss(masm);
1695
+ }
1696
+
1697
+
1698
+ void LoadIC::GenerateStringLength(MacroAssembler* masm) {
1699
+ // ----------- S t a t e -------------
1700
+ // -- rax : receiver
1701
+ // -- rcx : name
1702
+ // -- rsp[0] : return address
1703
+ // -----------------------------------
1704
+ Label miss;
1705
+
1706
+ StubCompiler::GenerateLoadStringLength(masm, rax, rdx, rbx, &miss);
1707
+ __ bind(&miss);
1708
+ StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
1709
+ }
1710
+
1711
+
1712
+ bool LoadIC::PatchInlinedLoad(Address address, Object* map, int offset) {
1713
+ // The address of the instruction following the call.
1714
+ Address test_instruction_address =
1715
+ address + Assembler::kCallTargetAddressOffset;
1716
+ // If the instruction following the call is not a test rax, nothing
1717
+ // was inlined.
1718
+ if (*test_instruction_address != kTestEaxByte) return false;
1719
+
1720
+ Address delta_address = test_instruction_address + 1;
1721
+ // The delta to the start of the map check instruction.
1722
+ int delta = *reinterpret_cast<int*>(delta_address);
1723
+
1724
+ // The map address is the last 8 bytes of the 10-byte
1725
+ // immediate move instruction, so we add 2 to get the
1726
+ // offset to the last 8 bytes.
1727
+ Address map_address = test_instruction_address + delta + 2;
1728
+ *(reinterpret_cast<Object**>(map_address)) = map;
1729
+
1730
+ // The offset is in the 32-bit displacement of a seven byte
1731
+ // memory-to-register move instruction (REX.W 0x88 ModR/M disp32),
1732
+ // so we add 3 to get the offset of the displacement.
1733
+ Address offset_address =
1734
+ test_instruction_address + delta + kOffsetToLoadInstruction + 3;
1735
+ *reinterpret_cast<int*>(offset_address) = offset - kHeapObjectTag;
1736
+ return true;
1737
+ }
1738
+
1739
+
1740
+ // The offset from the inlined patch site to the start of the inlined
1741
+ // store instruction.
1742
+ const int StoreIC::kOffsetToStoreInstruction = 20;
1743
+
1744
+
1745
+ bool StoreIC::PatchInlinedStore(Address address, Object* map, int offset) {
1746
+ // The address of the instruction following the call.
1747
+ Address test_instruction_address =
1748
+ address + Assembler::kCallTargetAddressOffset;
1749
+
1750
+ // If the instruction following the call is not a test rax, nothing
1751
+ // was inlined.
1752
+ if (*test_instruction_address != kTestEaxByte) return false;
1753
+
1754
+ // Extract the encoded deltas from the test rax instruction.
1755
+ Address encoded_offsets_address = test_instruction_address + 1;
1756
+ int encoded_offsets = *reinterpret_cast<int*>(encoded_offsets_address);
1757
+ int delta_to_map_check = -(encoded_offsets & 0xFFFF);
1758
+ int delta_to_record_write = encoded_offsets >> 16;
1759
+
1760
+ // Patch the map to check. The map address is the last 8 bytes of
1761
+ // the 10-byte immediate move instruction.
1762
+ Address map_check_address = test_instruction_address + delta_to_map_check;
1763
+ Address map_address = map_check_address + 2;
1764
+ *(reinterpret_cast<Object**>(map_address)) = map;
1765
+
1766
+ // Patch the offset in the store instruction. The offset is in the
1767
+ // last 4 bytes of a 7 byte register-to-memory move instruction.
1768
+ Address offset_address =
1769
+ map_check_address + StoreIC::kOffsetToStoreInstruction + 3;
1770
+ // The offset should have initial value (kMaxInt - 1), cleared value
1771
+ // (-1) or we should be clearing the inlined version.
1772
+ ASSERT(*reinterpret_cast<int*>(offset_address) == kMaxInt - 1 ||
1773
+ *reinterpret_cast<int*>(offset_address) == -1 ||
1774
+ (offset == 0 && map == Heap::null_value()));
1775
+ *reinterpret_cast<int*>(offset_address) = offset - kHeapObjectTag;
1776
+
1777
+ // Patch the offset in the write-barrier code. The offset is the
1778
+ // last 4 bytes of a 7 byte lea instruction.
1779
+ offset_address = map_check_address + delta_to_record_write + 3;
1780
+ // The offset should have initial value (kMaxInt), cleared value
1781
+ // (-1) or we should be clearing the inlined version.
1782
+ ASSERT(*reinterpret_cast<int*>(offset_address) == kMaxInt ||
1783
+ *reinterpret_cast<int*>(offset_address) == -1 ||
1784
+ (offset == 0 && map == Heap::null_value()));
1785
+ *reinterpret_cast<int*>(offset_address) = offset - kHeapObjectTag;
1786
+
1787
+ return true;
1788
+ }
1789
+
1790
+
1791
+ void StoreIC::GenerateMiss(MacroAssembler* masm) {
1792
+ // ----------- S t a t e -------------
1793
+ // -- rax : value
1794
+ // -- rcx : name
1795
+ // -- rdx : receiver
1796
+ // -- rsp[0] : return address
1797
+ // -----------------------------------
1798
+
1799
+ __ pop(rbx);
1800
+ __ push(rdx); // receiver
1801
+ __ push(rcx); // name
1802
+ __ push(rax); // value
1803
+ __ push(rbx); // return address
1804
+
1805
+ // Perform tail call to the entry.
1806
+ ExternalReference ref = ExternalReference(IC_Utility(kStoreIC_Miss));
1807
+ __ TailCallExternalReference(ref, 3, 1);
1808
+ }
1809
+
1810
+
1811
+ void StoreIC::GenerateMegamorphic(MacroAssembler* masm) {
1812
+ // ----------- S t a t e -------------
1813
+ // -- rax : value
1814
+ // -- rcx : name
1815
+ // -- rdx : receiver
1816
+ // -- rsp[0] : return address
1817
+ // -----------------------------------
1818
+
1819
+ // Get the receiver from the stack and probe the stub cache.
1820
+ Code::Flags flags = Code::ComputeFlags(Code::STORE_IC,
1821
+ NOT_IN_LOOP,
1822
+ MONOMORPHIC);
1823
+ StubCache::GenerateProbe(masm, flags, rdx, rcx, rbx, no_reg);
1824
+
1825
+ // Cache miss: Jump to runtime.
1826
+ GenerateMiss(masm);
1827
+ }
1828
+
1829
+
1830
+ void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
1831
+ // ----------- S t a t e -------------
1832
+ // -- rax : value
1833
+ // -- rcx : name
1834
+ // -- rdx : receiver
1835
+ // -- rsp[0] : return address
1836
+ // -----------------------------------
1837
+ //
1838
+ // This accepts as a receiver anything JSObject::SetElementsLength accepts
1839
+ // (currently anything except for external and pixel arrays which means
1840
+ // anything with elements of FixedArray type.), but currently is restricted
1841
+ // to JSArray.
1842
+ // Value must be a number, but only smis are accepted as the most common case.
1843
+
1844
+ Label miss;
1845
+
1846
+ Register receiver = rdx;
1847
+ Register value = rax;
1848
+ Register scratch = rbx;
1849
+
1850
+ // Check that the receiver isn't a smi.
1851
+ __ JumpIfSmi(receiver, &miss);
1852
+
1853
+ // Check that the object is a JS array.
1854
+ __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
1855
+ __ j(not_equal, &miss);
1856
+
1857
+ // Check that elements are FixedArray.
1858
+ __ movq(scratch, FieldOperand(receiver, JSArray::kElementsOffset));
1859
+ __ CmpObjectType(scratch, FIXED_ARRAY_TYPE, scratch);
1860
+ __ j(not_equal, &miss);
1861
+
1862
+ // Check that value is a smi.
1863
+ __ JumpIfNotSmi(value, &miss);
1864
+
1865
+ // Prepare tail call to StoreIC_ArrayLength.
1866
+ __ pop(scratch);
1867
+ __ push(receiver);
1868
+ __ push(value);
1869
+ __ push(scratch); // return address
1870
+
1871
+ ExternalReference ref = ExternalReference(IC_Utility(kStoreIC_ArrayLength));
1872
+ __ TailCallExternalReference(ref, 2, 1);
1873
+
1874
+ __ bind(&miss);
1875
+
1876
+ GenerateMiss(masm);
1877
+ }
1878
+
1879
+
1880
+ void StoreIC::GenerateNormal(MacroAssembler* masm) {
1881
+ // ----------- S t a t e -------------
1882
+ // -- rax : value
1883
+ // -- rcx : name
1884
+ // -- rdx : receiver
1885
+ // -- rsp[0] : return address
1886
+ // -----------------------------------
1887
+
1888
+ Label miss, restore_miss;
1889
+
1890
+ GenerateStringDictionaryReceiverCheck(masm, rdx, rbx, rdi, &miss);
1891
+
1892
+ GenerateDictionaryStore(masm, &miss, rbx, rcx, rax, r8, r9);
1893
+ __ IncrementCounter(&Counters::store_normal_hit, 1);
1894
+ __ ret(0);
1895
+
1896
+ __ bind(&miss);
1897
+ __ IncrementCounter(&Counters::store_normal_miss, 1);
1898
+ GenerateMiss(masm);
1899
+ }
1900
+
1901
+
1902
+ #undef __
1903
+
1904
+
1905
+ } } // namespace v8::internal
1906
+
1907
+ #endif // V8_TARGET_ARCH_X64