therubyracer 0.7.4 → 0.7.5

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of therubyracer might be problematic. Click here for more details.

Files changed (482) hide show
  1. data/History.txt +11 -0
  2. data/Rakefile +1 -1
  3. data/ext/v8/extconf.rb +0 -18
  4. data/ext/v8/rr.cpp +2 -2
  5. data/ext/v8/upstream/{2.1.10 → 2.3.3}/AUTHORS +1 -0
  6. data/ext/v8/upstream/{2.1.10 → 2.3.3}/ChangeLog +239 -0
  7. data/ext/v8/upstream/{2.1.10 → 2.3.3}/LICENSE +0 -0
  8. data/ext/v8/upstream/{2.1.10 → 2.3.3}/SConstruct +29 -17
  9. data/ext/v8/upstream/{2.1.10 → 2.3.3}/include/v8-debug.h +61 -3
  10. data/ext/v8/upstream/{2.1.10 → 2.3.3}/include/v8-profiler.h +182 -5
  11. data/ext/v8/upstream/{2.1.10 → 2.3.3}/include/v8.h +458 -257
  12. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/SConscript +2 -5
  13. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/accessors.cc +2 -2
  14. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/accessors.h +0 -0
  15. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/allocation.cc +0 -0
  16. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/allocation.h +0 -0
  17. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/api.cc +574 -30
  18. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/api.h +12 -10
  19. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/apinatives.js +0 -0
  20. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/apiutils.h +0 -0
  21. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arguments.h +0 -0
  22. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/assembler-arm-inl.h +38 -15
  23. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/assembler-arm.cc +646 -101
  24. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/assembler-arm.h +174 -15
  25. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/builtins-arm.cc +56 -47
  26. data/ext/v8/upstream/2.3.3/src/arm/codegen-arm-inl.h +48 -0
  27. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/codegen-arm.cc +2957 -1448
  28. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/codegen-arm.h +230 -74
  29. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/constants-arm.cc +25 -1
  30. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/constants-arm.h +16 -1
  31. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/cpu-arm.cc +4 -0
  32. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/debug-arm.cc +76 -6
  33. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/disasm-arm.cc +168 -20
  34. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/fast-codegen-arm.cc +5 -2
  35. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/frames-arm.cc +4 -4
  36. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/frames-arm.h +0 -0
  37. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/full-codegen-arm.cc +1558 -248
  38. data/ext/v8/upstream/2.3.3/src/arm/ic-arm.cc +2258 -0
  39. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/jump-target-arm.cc +55 -103
  40. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/macro-assembler-arm.cc +358 -185
  41. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/macro-assembler-arm.h +136 -41
  42. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/regexp-macro-assembler-arm.cc +26 -5
  43. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/regexp-macro-assembler-arm.h +0 -0
  44. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/register-allocator-arm-inl.h +0 -0
  45. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/register-allocator-arm.cc +4 -0
  46. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/register-allocator-arm.h +0 -0
  47. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/simulator-arm.cc +203 -22
  48. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/simulator-arm.h +7 -0
  49. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/stub-cache-arm.cc +531 -324
  50. data/ext/v8/upstream/2.3.3/src/arm/virtual-frame-arm-inl.h +59 -0
  51. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/virtual-frame-arm.cc +247 -81
  52. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/virtual-frame-arm.h +99 -83
  53. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/array.js +2 -2
  54. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/assembler.cc +6 -13
  55. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/assembler.h +36 -10
  56. data/ext/v8/upstream/2.3.3/src/ast-inl.h +81 -0
  57. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ast.cc +14 -0
  58. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ast.h +20 -35
  59. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/bootstrapper.cc +32 -1
  60. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/bootstrapper.h +0 -4
  61. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/builtins.cc +50 -33
  62. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/builtins.h +2 -0
  63. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/bytecodes-irregexp.h +0 -0
  64. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/cached-powers.h +0 -0
  65. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/char-predicates-inl.h +0 -0
  66. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/char-predicates.h +0 -0
  67. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/checks.cc +0 -0
  68. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/checks.h +8 -6
  69. data/ext/v8/upstream/2.3.3/src/circular-queue-inl.h +53 -0
  70. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/circular-queue.cc +0 -0
  71. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/circular-queue.h +0 -26
  72. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/code-stubs.cc +2 -4
  73. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/code-stubs.h +1 -0
  74. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/code.h +0 -0
  75. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/codegen-inl.h +0 -0
  76. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/codegen.cc +44 -13
  77. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/codegen.h +310 -31
  78. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/compilation-cache.cc +28 -0
  79. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/compilation-cache.h +3 -0
  80. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/compiler.cc +45 -14
  81. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/compiler.h +0 -0
  82. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/contexts.cc +11 -11
  83. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/contexts.h +0 -0
  84. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/conversions-inl.h +0 -0
  85. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/conversions.cc +25 -11
  86. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/conversions.h +0 -0
  87. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/counters.cc +0 -0
  88. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/counters.h +0 -0
  89. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/cpu-profiler-inl.h +2 -1
  90. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/cpu-profiler.cc +68 -24
  91. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/cpu-profiler.h +19 -11
  92. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/cpu.h +0 -0
  93. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/d8-debug.cc +0 -0
  94. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/d8-debug.h +0 -0
  95. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/d8-posix.cc +0 -0
  96. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/d8-readline.cc +0 -0
  97. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/d8-windows.cc +0 -0
  98. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/d8.cc +3 -0
  99. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/d8.h +0 -0
  100. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/d8.js +55 -2
  101. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/data-flow.cc +3 -0
  102. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/data-flow.h +0 -0
  103. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/date.js +68 -137
  104. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/dateparser-inl.h +0 -0
  105. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/dateparser.cc +2 -8
  106. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/dateparser.h +0 -0
  107. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/debug-agent.cc +3 -3
  108. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/debug-agent.h +0 -0
  109. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/debug-debugger.js +81 -23
  110. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/debug.cc +275 -81
  111. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/debug.h +85 -6
  112. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/disasm.h +0 -0
  113. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/disassembler.cc +1 -1
  114. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/disassembler.h +0 -0
  115. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/diy-fp.cc +0 -0
  116. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/diy-fp.h +0 -0
  117. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/double.h +0 -0
  118. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/dtoa-config.c +0 -0
  119. data/ext/v8/upstream/2.3.3/src/dtoa.cc +77 -0
  120. data/ext/v8/upstream/2.3.3/src/dtoa.h +81 -0
  121. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/execution.cc +111 -3
  122. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/execution.h +12 -1
  123. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/factory.cc +25 -3
  124. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/factory.h +16 -9
  125. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/fast-codegen.cc +0 -0
  126. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/fast-codegen.h +0 -0
  127. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/fast-dtoa.cc +2 -9
  128. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/fast-dtoa.h +1 -2
  129. data/ext/v8/upstream/2.3.3/src/fixed-dtoa.cc +405 -0
  130. data/ext/v8/upstream/{2.1.10/src/jump-target-light.cc → 2.3.3/src/fixed-dtoa.h} +22 -53
  131. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/flag-definitions.h +14 -6
  132. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/flags.cc +5 -9
  133. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/flags.h +0 -0
  134. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/flow-graph.cc +0 -0
  135. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/flow-graph.h +0 -0
  136. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/frame-element.cc +0 -0
  137. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/frame-element.h +0 -0
  138. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/frames-inl.h +0 -0
  139. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/frames.cc +5 -2
  140. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/frames.h +1 -0
  141. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/full-codegen.cc +387 -20
  142. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/full-codegen.h +102 -5
  143. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/func-name-inferrer.cc +0 -0
  144. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/func-name-inferrer.h +0 -0
  145. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/global-handles.cc +8 -4
  146. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/global-handles.h +0 -0
  147. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/globals.h +44 -7
  148. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/handles-inl.h +0 -0
  149. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/handles.cc +19 -0
  150. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/handles.h +8 -0
  151. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/hashmap.cc +0 -0
  152. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/hashmap.h +0 -0
  153. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/heap-inl.h +56 -14
  154. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/heap-profiler.cc +85 -1
  155. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/heap-profiler.h +45 -1
  156. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/heap.cc +994 -396
  157. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/heap.h +220 -65
  158. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/assembler-ia32-inl.h +41 -12
  159. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/assembler-ia32.cc +94 -24
  160. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/assembler-ia32.h +32 -4
  161. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/builtins-ia32.cc +42 -30
  162. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/codegen-ia32-inl.h +0 -0
  163. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/codegen-ia32.cc +1758 -916
  164. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/codegen-ia32.h +67 -74
  165. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/cpu-ia32.cc +4 -0
  166. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/debug-ia32.cc +46 -0
  167. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/disasm-ia32.cc +37 -6
  168. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/fast-codegen-ia32.cc +4 -0
  169. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/fast-codegen-ia32.h +0 -0
  170. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/frames-ia32.cc +4 -0
  171. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/frames-ia32.h +0 -0
  172. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/full-codegen-ia32.cc +1465 -198
  173. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/ic-ia32.cc +688 -367
  174. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/jump-target-ia32.cc +4 -0
  175. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/macro-assembler-ia32.cc +82 -180
  176. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/macro-assembler-ia32.h +41 -25
  177. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/regexp-macro-assembler-ia32.cc +68 -24
  178. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/regexp-macro-assembler-ia32.h +1 -2
  179. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/register-allocator-ia32-inl.h +0 -0
  180. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/register-allocator-ia32.cc +4 -0
  181. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/register-allocator-ia32.h +0 -0
  182. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/simulator-ia32.cc +0 -0
  183. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/simulator-ia32.h +0 -0
  184. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/stub-cache-ia32.cc +649 -302
  185. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/virtual-frame-ia32.cc +23 -1
  186. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/virtual-frame-ia32.h +18 -27
  187. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ic-inl.h +30 -3
  188. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ic.cc +384 -66
  189. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ic.h +65 -24
  190. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/interpreter-irregexp.cc +0 -0
  191. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/interpreter-irregexp.h +0 -0
  192. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/json.js +3 -3
  193. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/jsregexp.cc +20 -4
  194. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/jsregexp.h +0 -0
  195. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/jump-target-heavy-inl.h +0 -0
  196. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/jump-target-heavy.cc +79 -13
  197. data/ext/v8/upstream/{2.1.10/src/jump-target.h → 2.3.3/src/jump-target-heavy.h} +5 -47
  198. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/jump-target-inl.h +0 -0
  199. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/jump-target-light-inl.h +16 -2
  200. data/ext/v8/upstream/2.3.3/src/jump-target-light.cc +110 -0
  201. data/ext/v8/upstream/2.3.3/src/jump-target-light.h +192 -0
  202. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/jump-target.cc +0 -64
  203. data/ext/v8/upstream/2.3.3/src/jump-target.h +90 -0
  204. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/list-inl.h +0 -0
  205. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/list.h +0 -0
  206. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/liveedit-debugger.js +141 -28
  207. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/liveedit.cc +19 -7
  208. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/liveedit.h +0 -0
  209. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/log-inl.h +0 -0
  210. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/log-utils.cc +0 -0
  211. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/log-utils.h +0 -0
  212. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/log.cc +12 -11
  213. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/log.h +12 -0
  214. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/macro-assembler.h +0 -16
  215. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/macros.py +21 -0
  216. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mark-compact.cc +120 -109
  217. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mark-compact.h +25 -37
  218. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/math.js +0 -0
  219. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/memory.h +0 -0
  220. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/messages.cc +8 -3
  221. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/messages.h +2 -1
  222. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/messages.js +15 -7
  223. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/assembler-mips-inl.h +0 -0
  224. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/assembler-mips.cc +12 -1
  225. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/assembler-mips.h +4 -1
  226. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/builtins-mips.cc +3 -0
  227. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/codegen-mips-inl.h +0 -0
  228. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/codegen-mips.cc +9 -0
  229. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/codegen-mips.h +1 -0
  230. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/constants-mips.cc +5 -0
  231. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/constants-mips.h +0 -0
  232. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/cpu-mips.cc +4 -0
  233. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/debug-mips.cc +3 -0
  234. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/disasm-mips.cc +3 -0
  235. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/fast-codegen-mips.cc +3 -0
  236. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/frames-mips.cc +3 -0
  237. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/frames-mips.h +0 -0
  238. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/full-codegen-mips.cc +5 -1
  239. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/ic-mips.cc +3 -0
  240. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/jump-target-mips.cc +3 -0
  241. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/macro-assembler-mips.cc +3 -0
  242. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/macro-assembler-mips.h +0 -0
  243. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/register-allocator-mips-inl.h +0 -0
  244. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/register-allocator-mips.cc +3 -0
  245. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/register-allocator-mips.h +0 -0
  246. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/simulator-mips.cc +3 -0
  247. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/simulator-mips.h +0 -0
  248. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/stub-cache-mips.cc +3 -0
  249. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/virtual-frame-mips.cc +3 -0
  250. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/virtual-frame-mips.h +0 -0
  251. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mirror-debugger.js +46 -4
  252. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mksnapshot.cc +0 -0
  253. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/natives.h +0 -0
  254. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/objects-debug.cc +8 -1
  255. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/objects-inl.h +235 -62
  256. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/objects.cc +497 -231
  257. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/objects.h +355 -149
  258. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/oprofile-agent.cc +0 -0
  259. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/oprofile-agent.h +0 -0
  260. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/parser.cc +31 -6
  261. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/parser.h +1 -1
  262. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/platform-freebsd.cc +9 -6
  263. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/platform-linux.cc +26 -6
  264. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/platform-macos.cc +11 -6
  265. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/platform-nullos.cc +0 -0
  266. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/platform-openbsd.cc +6 -0
  267. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/platform-posix.cc +0 -0
  268. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/platform-solaris.cc +69 -23
  269. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/platform-win32.cc +15 -11
  270. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/platform.h +10 -6
  271. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/powers-ten.h +0 -0
  272. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/prettyprinter.cc +0 -0
  273. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/prettyprinter.h +0 -0
  274. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/profile-generator-inl.h +26 -2
  275. data/ext/v8/upstream/2.3.3/src/profile-generator.cc +1830 -0
  276. data/ext/v8/upstream/2.3.3/src/profile-generator.h +853 -0
  277. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/property.cc +0 -0
  278. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/property.h +0 -0
  279. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/regexp-macro-assembler-irregexp-inl.h +0 -0
  280. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/regexp-macro-assembler-irregexp.cc +0 -0
  281. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/regexp-macro-assembler-irregexp.h +0 -0
  282. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/regexp-macro-assembler-tracer.cc +0 -0
  283. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/regexp-macro-assembler-tracer.h +0 -0
  284. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/regexp-macro-assembler.cc +1 -3
  285. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/regexp-macro-assembler.h +0 -0
  286. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/regexp-stack.cc +0 -0
  287. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/regexp-stack.h +0 -0
  288. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/regexp.js +25 -4
  289. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/register-allocator-inl.h +0 -0
  290. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/register-allocator.cc +4 -3
  291. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/register-allocator.h +0 -0
  292. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/rewriter.cc +85 -8
  293. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/rewriter.h +0 -0
  294. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/runtime.cc +547 -221
  295. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/runtime.h +5 -1
  296. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/runtime.js +23 -31
  297. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/scanner.cc +12 -6
  298. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/scanner.h +60 -53
  299. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/scopeinfo.cc +156 -168
  300. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/scopeinfo.h +58 -62
  301. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/scopes.cc +0 -0
  302. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/scopes.h +0 -0
  303. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/serialize.cc +320 -242
  304. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/serialize.h +81 -48
  305. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/shell.h +0 -0
  306. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/simulator.h +0 -0
  307. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/smart-pointer.h +0 -0
  308. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/snapshot-common.cc +0 -0
  309. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/snapshot-empty.cc +0 -0
  310. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/snapshot.h +0 -0
  311. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/spaces-inl.h +177 -74
  312. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/spaces.cc +138 -315
  313. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/spaces.h +155 -124
  314. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/splay-tree-inl.h +0 -0
  315. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/splay-tree.h +0 -0
  316. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/string-stream.cc +0 -0
  317. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/string-stream.h +0 -0
  318. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/string.js +113 -119
  319. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/stub-cache.cc +242 -97
  320. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/stub-cache.h +118 -55
  321. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/third_party/dtoa/COPYING +0 -0
  322. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/third_party/dtoa/dtoa.c +4 -0
  323. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/third_party/valgrind/valgrind.h +0 -0
  324. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/token.cc +0 -0
  325. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/token.h +0 -0
  326. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/top.cc +107 -26
  327. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/top.h +9 -4
  328. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/type-info.cc +0 -0
  329. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/type-info.h +2 -2
  330. data/ext/v8/upstream/2.3.3/src/unbound-queue-inl.h +95 -0
  331. data/ext/v8/upstream/2.3.3/src/unbound-queue.h +67 -0
  332. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/unicode-inl.h +0 -0
  333. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/unicode.cc +0 -0
  334. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/unicode.h +0 -0
  335. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/uri.js +0 -0
  336. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/utils.cc +0 -0
  337. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/utils.h +83 -1
  338. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/v8-counters.cc +0 -0
  339. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/v8-counters.h +20 -0
  340. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/v8.cc +5 -1
  341. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/v8.h +0 -0
  342. data/ext/v8/upstream/2.3.3/src/v8dll-main.cc +39 -0
  343. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/v8natives.js +210 -33
  344. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/v8threads.cc +1 -1
  345. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/v8threads.h +1 -1
  346. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/variables.cc +0 -0
  347. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/variables.h +0 -0
  348. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/version.cc +3 -3
  349. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/version.h +0 -0
  350. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/virtual-frame-heavy-inl.h +40 -0
  351. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/virtual-frame-heavy.cc +0 -0
  352. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/virtual-frame-inl.h +0 -0
  353. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/virtual-frame-light-inl.h +106 -5
  354. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/virtual-frame-light.cc +4 -1
  355. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/virtual-frame.cc +0 -0
  356. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/virtual-frame.h +0 -0
  357. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/vm-state-inl.h +6 -3
  358. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/vm-state.cc +1 -1
  359. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/vm-state.h +6 -4
  360. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/assembler-x64-inl.h +42 -5
  361. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/assembler-x64.cc +285 -53
  362. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/assembler-x64.h +54 -18
  363. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/builtins-x64.cc +31 -33
  364. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/codegen-x64-inl.h +0 -0
  365. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/codegen-x64.cc +9787 -8722
  366. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/codegen-x64.h +82 -47
  367. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/cpu-x64.cc +4 -0
  368. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/debug-x64.cc +55 -6
  369. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/disasm-x64.cc +42 -19
  370. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/fast-codegen-x64.cc +4 -0
  371. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/frames-x64.cc +4 -0
  372. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/frames-x64.h +4 -0
  373. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/full-codegen-x64.cc +1487 -210
  374. data/ext/v8/upstream/2.3.3/src/x64/ic-x64.cc +1907 -0
  375. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/jump-target-x64.cc +4 -0
  376. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/macro-assembler-x64.cc +366 -338
  377. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/macro-assembler-x64.h +83 -38
  378. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/regexp-macro-assembler-x64.cc +82 -23
  379. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/regexp-macro-assembler-x64.h +1 -2
  380. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/register-allocator-x64-inl.h +6 -5
  381. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/register-allocator-x64.cc +4 -0
  382. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/register-allocator-x64.h +1 -1
  383. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/simulator-x64.cc +0 -0
  384. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/simulator-x64.h +0 -0
  385. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/stub-cache-x64.cc +556 -377
  386. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/virtual-frame-x64.cc +197 -98
  387. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/virtual-frame-x64.h +37 -28
  388. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/zone-inl.h +0 -0
  389. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/zone.cc +0 -0
  390. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/zone.h +0 -0
  391. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/codemap.js +0 -0
  392. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/consarray.js +0 -0
  393. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/csvparser.js +0 -0
  394. data/ext/v8/upstream/2.3.3/tools/gc-nvp-trace-processor.py +317 -0
  395. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/generate-ten-powers.scm +0 -0
  396. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/gyp/v8.gyp +87 -20
  397. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/js2c.py +19 -15
  398. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/jsmin.py +0 -0
  399. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/linux-tick-processor +0 -0
  400. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/linux-tick-processor.py +0 -0
  401. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/logreader.js +0 -0
  402. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/mac-nm +0 -0
  403. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/mac-tick-processor +0 -0
  404. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/oprofile/annotate +0 -0
  405. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/oprofile/common +0 -0
  406. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/oprofile/dump +0 -0
  407. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/oprofile/report +0 -0
  408. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/oprofile/reset +0 -0
  409. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/oprofile/run +0 -0
  410. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/oprofile/shutdown +0 -0
  411. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/oprofile/start +0 -0
  412. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/presubmit.py +0 -0
  413. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/process-heap-prof.py +0 -0
  414. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/profile.js +0 -0
  415. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/profile_view.js +0 -0
  416. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/run-valgrind.py +0 -0
  417. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/splaytree.js +0 -0
  418. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/splaytree.py +0 -0
  419. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/stats-viewer.py +25 -13
  420. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/test.py +0 -0
  421. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/tickprocessor-driver.js +0 -0
  422. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/tickprocessor.js +0 -0
  423. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/tickprocessor.py +0 -0
  424. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/utils.py +0 -0
  425. data/ext/v8/upstream/2.3.3/tools/v8.xcodeproj/project.pbxproj +1855 -0
  426. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/README.txt +0 -0
  427. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/arm.vsprops +0 -0
  428. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/common.vsprops +0 -0
  429. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/d8.vcproj +0 -0
  430. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/d8_arm.vcproj +0 -0
  431. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/d8_x64.vcproj +0 -0
  432. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/d8js2c.cmd +0 -0
  433. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/debug.vsprops +0 -0
  434. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/ia32.vsprops +0 -0
  435. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/js2c.cmd +0 -0
  436. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/release.vsprops +0 -0
  437. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8.sln +0 -0
  438. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8.vcproj +0 -0
  439. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_arm.sln +0 -0
  440. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_arm.vcproj +0 -0
  441. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_base.vcproj +40 -0
  442. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_base_arm.vcproj +20 -0
  443. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_base_x64.vcproj +16 -0
  444. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_cctest.vcproj +4 -0
  445. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_cctest_arm.vcproj +0 -0
  446. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_cctest_x64.vcproj +0 -0
  447. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_mksnapshot.vcproj +0 -0
  448. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_mksnapshot_x64.vcproj +0 -0
  449. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_process_sample.vcproj +0 -0
  450. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_process_sample_arm.vcproj +0 -0
  451. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_process_sample_x64.vcproj +0 -0
  452. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_shell_sample.vcproj +0 -0
  453. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_shell_sample_arm.vcproj +0 -0
  454. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_shell_sample_x64.vcproj +0 -0
  455. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_snapshot.vcproj +0 -0
  456. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_snapshot_cc.vcproj +0 -0
  457. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_snapshot_cc_x64.vcproj +0 -0
  458. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_snapshot_x64.vcproj +0 -0
  459. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_x64.sln +0 -0
  460. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_x64.vcproj +0 -0
  461. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/x64.vsprops +0 -0
  462. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/windows-tick-processor.bat +0 -0
  463. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/windows-tick-processor.py +0 -0
  464. data/ext/v8/upstream/Makefile +1 -1
  465. data/ext/v8/v8_template.cpp +94 -2
  466. data/ext/v8/v8_try_catch.cpp +2 -2
  467. data/lib/v8.rb +1 -1
  468. data/lib/v8/access.rb +93 -40
  469. data/lib/v8/cli.rb +1 -1
  470. data/lib/v8/function.rb +14 -2
  471. data/spec/redjs/jsapi_spec.rb +231 -42
  472. data/therubyracer.gemspec +3 -3
  473. metadata +463 -453
  474. data/ext/v8/upstream/2.1.10/src/arm/assembler-thumb2-inl.h +0 -263
  475. data/ext/v8/upstream/2.1.10/src/arm/assembler-thumb2.cc +0 -1878
  476. data/ext/v8/upstream/2.1.10/src/arm/assembler-thumb2.h +0 -1036
  477. data/ext/v8/upstream/2.1.10/src/arm/codegen-arm-inl.h +0 -72
  478. data/ext/v8/upstream/2.1.10/src/arm/ic-arm.cc +0 -1833
  479. data/ext/v8/upstream/2.1.10/src/circular-queue-inl.h +0 -101
  480. data/ext/v8/upstream/2.1.10/src/profile-generator.cc +0 -583
  481. data/ext/v8/upstream/2.1.10/src/profile-generator.h +0 -364
  482. data/ext/v8/upstream/2.1.10/src/x64/ic-x64.cc +0 -1621
@@ -0,0 +1,2258 @@
1
+ // Copyright 2006-2008 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #include "v8.h"
29
+
30
+ #if defined(V8_TARGET_ARCH_ARM)
31
+
32
+ #include "assembler-arm.h"
33
+ #include "codegen.h"
34
+ #include "codegen-inl.h"
35
+ #include "disasm.h"
36
+ #include "ic-inl.h"
37
+ #include "runtime.h"
38
+ #include "stub-cache.h"
39
+
40
+ namespace v8 {
41
+ namespace internal {
42
+
43
+
44
+ // ----------------------------------------------------------------------------
45
+ // Static IC stub generators.
46
+ //
47
+
48
+ #define __ ACCESS_MASM(masm)
49
+
50
+
51
+ static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm,
52
+ Register type,
53
+ Label* global_object) {
54
+ // Register usage:
55
+ // type: holds the receiver instance type on entry.
56
+ __ cmp(type, Operand(JS_GLOBAL_OBJECT_TYPE));
57
+ __ b(eq, global_object);
58
+ __ cmp(type, Operand(JS_BUILTINS_OBJECT_TYPE));
59
+ __ b(eq, global_object);
60
+ __ cmp(type, Operand(JS_GLOBAL_PROXY_TYPE));
61
+ __ b(eq, global_object);
62
+ }
63
+
64
+
65
+ // Generated code falls through if the receiver is a regular non-global
66
+ // JS object with slow properties and no interceptors.
67
+ static void GenerateStringDictionaryReceiverCheck(MacroAssembler* masm,
68
+ Register receiver,
69
+ Register elements,
70
+ Register t0,
71
+ Register t1,
72
+ Label* miss) {
73
+ // Register usage:
74
+ // receiver: holds the receiver on entry and is unchanged.
75
+ // elements: holds the property dictionary on fall through.
76
+ // Scratch registers:
77
+ // t0: used to holds the receiver map.
78
+ // t1: used to holds the receiver instance type, receiver bit mask and
79
+ // elements map.
80
+
81
+ // Check that the receiver isn't a smi.
82
+ __ tst(receiver, Operand(kSmiTagMask));
83
+ __ b(eq, miss);
84
+
85
+ // Check that the receiver is a valid JS object.
86
+ __ CompareObjectType(receiver, t0, t1, FIRST_JS_OBJECT_TYPE);
87
+ __ b(lt, miss);
88
+
89
+ // If this assert fails, we have to check upper bound too.
90
+ ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
91
+
92
+ GenerateGlobalInstanceTypeCheck(masm, t1, miss);
93
+
94
+ // Check that the global object does not require access checks.
95
+ __ ldrb(t1, FieldMemOperand(t0, Map::kBitFieldOffset));
96
+ __ tst(t1, Operand((1 << Map::kIsAccessCheckNeeded) |
97
+ (1 << Map::kHasNamedInterceptor)));
98
+ __ b(nz, miss);
99
+
100
+ __ ldr(elements, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
101
+ __ ldr(t1, FieldMemOperand(elements, HeapObject::kMapOffset));
102
+ __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
103
+ __ cmp(t1, ip);
104
+ __ b(nz, miss);
105
+ }
106
+
107
+
108
+ // Probe the string dictionary in the |elements| register. Jump to the
109
+ // |done| label if a property with the given name is found. Jump to
110
+ // the |miss| label otherwise.
111
+ static void GenerateStringDictionaryProbes(MacroAssembler* masm,
112
+ Label* miss,
113
+ Label* done,
114
+ Register elements,
115
+ Register name,
116
+ Register scratch1,
117
+ Register scratch2) {
118
+ // Compute the capacity mask.
119
+ const int kCapacityOffset = StringDictionary::kHeaderSize +
120
+ StringDictionary::kCapacityIndex * kPointerSize;
121
+ __ ldr(scratch1, FieldMemOperand(elements, kCapacityOffset));
122
+ __ mov(scratch1, Operand(scratch1, ASR, kSmiTagSize)); // convert smi to int
123
+ __ sub(scratch1, scratch1, Operand(1));
124
+
125
+ const int kElementsStartOffset = StringDictionary::kHeaderSize +
126
+ StringDictionary::kElementsStartIndex * kPointerSize;
127
+
128
+ // Generate an unrolled loop that performs a few probes before
129
+ // giving up. Measurements done on Gmail indicate that 2 probes
130
+ // cover ~93% of loads from dictionaries.
131
+ static const int kProbes = 4;
132
+ for (int i = 0; i < kProbes; i++) {
133
+ // Compute the masked index: (hash + i + i * i) & mask.
134
+ __ ldr(scratch2, FieldMemOperand(name, String::kHashFieldOffset));
135
+ if (i > 0) {
136
+ // Add the probe offset (i + i * i) left shifted to avoid right shifting
137
+ // the hash in a separate instruction. The value hash + i + i * i is right
138
+ // shifted in the following and instruction.
139
+ ASSERT(StringDictionary::GetProbeOffset(i) <
140
+ 1 << (32 - String::kHashFieldOffset));
141
+ __ add(scratch2, scratch2, Operand(
142
+ StringDictionary::GetProbeOffset(i) << String::kHashShift));
143
+ }
144
+ __ and_(scratch2, scratch1, Operand(scratch2, LSR, String::kHashShift));
145
+
146
+ // Scale the index by multiplying by the element size.
147
+ ASSERT(StringDictionary::kEntrySize == 3);
148
+ // scratch2 = scratch2 * 3.
149
+ __ add(scratch2, scratch2, Operand(scratch2, LSL, 1));
150
+
151
+ // Check if the key is identical to the name.
152
+ __ add(scratch2, elements, Operand(scratch2, LSL, 2));
153
+ __ ldr(ip, FieldMemOperand(scratch2, kElementsStartOffset));
154
+ __ cmp(name, Operand(ip));
155
+ if (i != kProbes - 1) {
156
+ __ b(eq, done);
157
+ } else {
158
+ __ b(ne, miss);
159
+ }
160
+ }
161
+ }
162
+
163
+
164
+ // Helper function used from LoadIC/CallIC GenerateNormal.
165
+ //
166
+ // elements: Property dictionary. It is not clobbered if a jump to the miss
167
+ // label is done.
168
+ // name: Property name. It is not clobbered if a jump to the miss label is
169
+ // done
170
+ // result: Register for the result. It is only updated if a jump to the miss
171
+ // label is not done. Can be the same as elements or name clobbering
172
+ // one of these in the case of not jumping to the miss label.
173
+ // The two scratch registers need to be different from elements, name and
174
+ // result.
175
+ // The generated code assumes that the receiver has slow properties,
176
+ // is not a global object and does not have interceptors.
177
+ static void GenerateDictionaryLoad(MacroAssembler* masm,
178
+ Label* miss,
179
+ Register elements,
180
+ Register name,
181
+ Register result,
182
+ Register scratch1,
183
+ Register scratch2) {
184
+ // Main use of the scratch registers.
185
+ // scratch1: Used as temporary and to hold the capacity of the property
186
+ // dictionary.
187
+ // scratch2: Used as temporary.
188
+ Label done;
189
+
190
+ // Probe the dictionary.
191
+ GenerateStringDictionaryProbes(masm,
192
+ miss,
193
+ &done,
194
+ elements,
195
+ name,
196
+ scratch1,
197
+ scratch2);
198
+
199
+ // If probing finds an entry check that the value is a normal
200
+ // property.
201
+ __ bind(&done); // scratch2 == elements + 4 * index
202
+ const int kElementsStartOffset = StringDictionary::kHeaderSize +
203
+ StringDictionary::kElementsStartIndex * kPointerSize;
204
+ const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
205
+ __ ldr(scratch1, FieldMemOperand(scratch2, kDetailsOffset));
206
+ __ tst(scratch1, Operand(PropertyDetails::TypeField::mask() << kSmiTagSize));
207
+ __ b(ne, miss);
208
+
209
+ // Get the value at the masked, scaled index and return.
210
+ __ ldr(result,
211
+ FieldMemOperand(scratch2, kElementsStartOffset + 1 * kPointerSize));
212
+ }
213
+
214
+
215
+ // Helper function used from StoreIC::GenerateNormal.
216
+ //
217
+ // elements: Property dictionary. It is not clobbered if a jump to the miss
218
+ // label is done.
219
+ // name: Property name. It is not clobbered if a jump to the miss label is
220
+ // done
221
+ // value: The value to store.
222
+ // The two scratch registers need to be different from elements, name and
223
+ // result.
224
+ // The generated code assumes that the receiver has slow properties,
225
+ // is not a global object and does not have interceptors.
226
+ static void GenerateDictionaryStore(MacroAssembler* masm,
227
+ Label* miss,
228
+ Register elements,
229
+ Register name,
230
+ Register value,
231
+ Register scratch1,
232
+ Register scratch2) {
233
+ // Main use of the scratch registers.
234
+ // scratch1: Used as temporary and to hold the capacity of the property
235
+ // dictionary.
236
+ // scratch2: Used as temporary.
237
+ Label done;
238
+
239
+ // Probe the dictionary.
240
+ GenerateStringDictionaryProbes(masm,
241
+ miss,
242
+ &done,
243
+ elements,
244
+ name,
245
+ scratch1,
246
+ scratch2);
247
+
248
+ // If probing finds an entry in the dictionary check that the value
249
+ // is a normal property that is not read only.
250
+ __ bind(&done); // scratch2 == elements + 4 * index
251
+ const int kElementsStartOffset = StringDictionary::kHeaderSize +
252
+ StringDictionary::kElementsStartIndex * kPointerSize;
253
+ const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
254
+ const int kTypeAndReadOnlyMask
255
+ = (PropertyDetails::TypeField::mask() |
256
+ PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize;
257
+ __ ldr(scratch1, FieldMemOperand(scratch2, kDetailsOffset));
258
+ __ tst(scratch1, Operand(kTypeAndReadOnlyMask));
259
+ __ b(ne, miss);
260
+
261
+ // Store the value at the masked, scaled index and return.
262
+ const int kValueOffset = kElementsStartOffset + kPointerSize;
263
+ __ add(scratch2, scratch2, Operand(kValueOffset - kHeapObjectTag));
264
+ __ str(value, MemOperand(scratch2));
265
+
266
+ // Update the write barrier. Make sure not to clobber the value.
267
+ __ mov(scratch1, value);
268
+ __ RecordWrite(elements, scratch2, scratch1);
269
+ }
270
+
271
+
272
+ static void GenerateNumberDictionaryLoad(MacroAssembler* masm,
273
+ Label* miss,
274
+ Register elements,
275
+ Register key,
276
+ Register result,
277
+ Register t0,
278
+ Register t1,
279
+ Register t2) {
280
+ // Register use:
281
+ //
282
+ // elements - holds the slow-case elements of the receiver on entry.
283
+ // Unchanged unless 'result' is the same register.
284
+ //
285
+ // key - holds the smi key on entry.
286
+ // Unchanged unless 'result' is the same register.
287
+ //
288
+ // result - holds the result on exit if the load succeeded.
289
+ // Allowed to be the same as 'key' or 'result'.
290
+ // Unchanged on bailout so 'key' or 'result' can be used
291
+ // in further computation.
292
+ //
293
+ // Scratch registers:
294
+ //
295
+ // t0 - holds the untagged key on entry and holds the hash once computed.
296
+ //
297
+ // t1 - used to hold the capacity mask of the dictionary
298
+ //
299
+ // t2 - used for the index into the dictionary.
300
+ Label done;
301
+
302
+ // Compute the hash code from the untagged key. This must be kept in sync
303
+ // with ComputeIntegerHash in utils.h.
304
+ //
305
+ // hash = ~hash + (hash << 15);
306
+ __ mvn(t1, Operand(t0));
307
+ __ add(t0, t1, Operand(t0, LSL, 15));
308
+ // hash = hash ^ (hash >> 12);
309
+ __ eor(t0, t0, Operand(t0, LSR, 12));
310
+ // hash = hash + (hash << 2);
311
+ __ add(t0, t0, Operand(t0, LSL, 2));
312
+ // hash = hash ^ (hash >> 4);
313
+ __ eor(t0, t0, Operand(t0, LSR, 4));
314
+ // hash = hash * 2057;
315
+ __ mov(t1, Operand(2057));
316
+ __ mul(t0, t0, t1);
317
+ // hash = hash ^ (hash >> 16);
318
+ __ eor(t0, t0, Operand(t0, LSR, 16));
319
+
320
+ // Compute the capacity mask.
321
+ __ ldr(t1, FieldMemOperand(elements, NumberDictionary::kCapacityOffset));
322
+ __ mov(t1, Operand(t1, ASR, kSmiTagSize)); // convert smi to int
323
+ __ sub(t1, t1, Operand(1));
324
+
325
+ // Generate an unrolled loop that performs a few probes before giving up.
326
+ static const int kProbes = 4;
327
+ for (int i = 0; i < kProbes; i++) {
328
+ // Use t2 for index calculations and keep the hash intact in t0.
329
+ __ mov(t2, t0);
330
+ // Compute the masked index: (hash + i + i * i) & mask.
331
+ if (i > 0) {
332
+ __ add(t2, t2, Operand(NumberDictionary::GetProbeOffset(i)));
333
+ }
334
+ __ and_(t2, t2, Operand(t1));
335
+
336
+ // Scale the index by multiplying by the element size.
337
+ ASSERT(NumberDictionary::kEntrySize == 3);
338
+ __ add(t2, t2, Operand(t2, LSL, 1)); // t2 = t2 * 3
339
+
340
+ // Check if the key is identical to the name.
341
+ __ add(t2, elements, Operand(t2, LSL, kPointerSizeLog2));
342
+ __ ldr(ip, FieldMemOperand(t2, NumberDictionary::kElementsStartOffset));
343
+ __ cmp(key, Operand(ip));
344
+ if (i != kProbes - 1) {
345
+ __ b(eq, &done);
346
+ } else {
347
+ __ b(ne, miss);
348
+ }
349
+ }
350
+
351
+ __ bind(&done);
352
+ // Check that the value is a normal property.
353
+ // t2: elements + (index * kPointerSize)
354
+ const int kDetailsOffset =
355
+ NumberDictionary::kElementsStartOffset + 2 * kPointerSize;
356
+ __ ldr(t1, FieldMemOperand(t2, kDetailsOffset));
357
+ __ tst(t1, Operand(Smi::FromInt(PropertyDetails::TypeField::mask())));
358
+ __ b(ne, miss);
359
+
360
+ // Get the value at the masked, scaled index and return.
361
+ const int kValueOffset =
362
+ NumberDictionary::kElementsStartOffset + kPointerSize;
363
+ __ ldr(result, FieldMemOperand(t2, kValueOffset));
364
+ }
365
+
366
+
367
+ void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
368
+ // ----------- S t a t e -------------
369
+ // -- r2 : name
370
+ // -- lr : return address
371
+ // -- r0 : receiver
372
+ // -- sp[0] : receiver
373
+ // -----------------------------------
374
+ Label miss;
375
+
376
+ StubCompiler::GenerateLoadArrayLength(masm, r0, r3, &miss);
377
+ __ bind(&miss);
378
+ StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
379
+ }
380
+
381
+
382
+ void LoadIC::GenerateStringLength(MacroAssembler* masm) {
383
+ // ----------- S t a t e -------------
384
+ // -- r2 : name
385
+ // -- lr : return address
386
+ // -- r0 : receiver
387
+ // -- sp[0] : receiver
388
+ // -----------------------------------
389
+ Label miss;
390
+
391
+ StubCompiler::GenerateLoadStringLength(masm, r0, r1, r3, &miss);
392
+ // Cache miss: Jump to runtime.
393
+ __ bind(&miss);
394
+ StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
395
+ }
396
+
397
+
398
+ void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) {
399
+ // ----------- S t a t e -------------
400
+ // -- r2 : name
401
+ // -- lr : return address
402
+ // -- r0 : receiver
403
+ // -- sp[0] : receiver
404
+ // -----------------------------------
405
+ Label miss;
406
+
407
+ StubCompiler::GenerateLoadFunctionPrototype(masm, r0, r1, r3, &miss);
408
+ __ bind(&miss);
409
+ StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
410
+ }
411
+
412
+
413
+ // Checks the receiver for special cases (value type, slow case bits).
414
+ // Falls through for regular JS object.
415
+ static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
416
+ Register receiver,
417
+ Register scratch1,
418
+ Register scratch2,
419
+ int interceptor_bit,
420
+ Label* slow) {
421
+ // Check that the object isn't a smi.
422
+ __ BranchOnSmi(receiver, slow);
423
+ // Get the map of the receiver.
424
+ __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
425
+ // Check bit field.
426
+ __ ldrb(scratch2, FieldMemOperand(scratch1, Map::kBitFieldOffset));
427
+ __ tst(scratch2,
428
+ Operand((1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit)));
429
+ __ b(nz, slow);
430
+ // Check that the object is some kind of JS object EXCEPT JS Value type.
431
+ // In the case that the object is a value-wrapper object,
432
+ // we enter the runtime system to make sure that indexing into string
433
+ // objects work as intended.
434
+ ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE);
435
+ __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
436
+ __ cmp(scratch1, Operand(JS_OBJECT_TYPE));
437
+ __ b(lt, slow);
438
+ }
439
+
440
+
441
+ // Loads an indexed element from a fast case array.
442
+ static void GenerateFastArrayLoad(MacroAssembler* masm,
443
+ Register receiver,
444
+ Register key,
445
+ Register elements,
446
+ Register scratch1,
447
+ Register scratch2,
448
+ Register result,
449
+ Label* not_fast_array,
450
+ Label* out_of_range) {
451
+ // Register use:
452
+ //
453
+ // receiver - holds the receiver on entry.
454
+ // Unchanged unless 'result' is the same register.
455
+ //
456
+ // key - holds the smi key on entry.
457
+ // Unchanged unless 'result' is the same register.
458
+ //
459
+ // elements - holds the elements of the receiver on exit.
460
+ //
461
+ // result - holds the result on exit if the load succeeded.
462
+ // Allowed to be the the same as 'receiver' or 'key'.
463
+ // Unchanged on bailout so 'receiver' and 'key' can be safely
464
+ // used by further computation.
465
+ //
466
+ // Scratch registers:
467
+ //
468
+ // scratch1 - used to hold elements map and elements length.
469
+ // Holds the elements map if not_fast_array branch is taken.
470
+ //
471
+ // scratch2 - used to hold the loaded value.
472
+
473
+ __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
474
+ // Check that the object is in fast mode (not dictionary).
475
+ __ ldr(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset));
476
+ __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
477
+ __ cmp(scratch1, ip);
478
+ __ b(ne, not_fast_array);
479
+ // Check that the key (index) is within bounds.
480
+ __ ldr(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset));
481
+ __ cmp(key, Operand(scratch1));
482
+ __ b(hs, out_of_range);
483
+ // Fast case: Do the load.
484
+ __ add(scratch1, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
485
+ // The key is a smi.
486
+ ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
487
+ __ ldr(scratch2,
488
+ MemOperand(scratch1, key, LSL, kPointerSizeLog2 - kSmiTagSize));
489
+ __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
490
+ __ cmp(scratch2, ip);
491
+ // In case the loaded value is the_hole we have to consult GetProperty
492
+ // to ensure the prototype chain is searched.
493
+ __ b(eq, out_of_range);
494
+ __ mov(result, scratch2);
495
+ }
496
+
497
+
498
+ // Checks whether a key is an array index string or a symbol string.
499
+ // Falls through if a key is a symbol.
500
+ static void GenerateKeyStringCheck(MacroAssembler* masm,
501
+ Register key,
502
+ Register map,
503
+ Register hash,
504
+ Label* index_string,
505
+ Label* not_symbol) {
506
+ // The key is not a smi.
507
+ // Is it a string?
508
+ __ CompareObjectType(key, map, hash, FIRST_NONSTRING_TYPE);
509
+ __ b(ge, not_symbol);
510
+
511
+ // Is the string an array index, with cached numeric value?
512
+ __ ldr(hash, FieldMemOperand(key, String::kHashFieldOffset));
513
+ __ tst(hash, Operand(String::kContainsCachedArrayIndexMask));
514
+ __ b(eq, index_string);
515
+
516
+ // Is the string a symbol?
517
+ // map: key map
518
+ __ ldrb(hash, FieldMemOperand(map, Map::kInstanceTypeOffset));
519
+ ASSERT(kSymbolTag != 0);
520
+ __ tst(hash, Operand(kIsSymbolMask));
521
+ __ b(eq, not_symbol);
522
+ }
523
+
524
+
525
+ // Picks out an array index from the hash field.
526
+ static void GenerateIndexFromHash(MacroAssembler* masm,
527
+ Register key,
528
+ Register hash) {
529
+ // Register use:
530
+ // key - holds the overwritten key on exit.
531
+ // hash - holds the key's hash. Clobbered.
532
+
533
+ // If the hash field contains an array index pick it out. The assert checks
534
+ // that the constants for the maximum number of digits for an array index
535
+ // cached in the hash field and the number of bits reserved for it does not
536
+ // conflict.
537
+ ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
538
+ (1 << String::kArrayIndexValueBits));
539
+ // We want the smi-tagged index in key. kArrayIndexValueMask has zeros in
540
+ // the low kHashShift bits.
541
+ ASSERT(String::kHashShift >= kSmiTagSize);
542
+ // Here we actually clobber the key which will be used if calling into
543
+ // runtime later. However as the new key is the numeric value of a string key
544
+ // there is no difference in using either key.
545
+ ASSERT(String::kHashShift >= kSmiTagSize);
546
+ __ Ubfx(hash, hash, String::kHashShift, String::kArrayIndexValueBits);
547
+ __ mov(key, Operand(hash, LSL, kSmiTagSize));
548
+ }
549
+
550
+
551
+ // Defined in ic.cc.
552
+ Object* CallIC_Miss(Arguments args);
553
+
554
+ // The generated code does not accept smi keys.
555
+ // The generated code falls through if both probes miss.
556
+ static void GenerateMonomorphicCacheProbe(MacroAssembler* masm,
557
+ int argc,
558
+ Code::Kind kind) {
559
+ // ----------- S t a t e -------------
560
+ // -- r1 : receiver
561
+ // -- r2 : name
562
+ // -----------------------------------
563
+ Label number, non_number, non_string, boolean, probe, miss;
564
+
565
+ // Probe the stub cache.
566
+ Code::Flags flags =
567
+ Code::ComputeFlags(kind, NOT_IN_LOOP, MONOMORPHIC, NORMAL, argc);
568
+ StubCache::GenerateProbe(masm, flags, r1, r2, r3, no_reg);
569
+
570
+ // If the stub cache probing failed, the receiver might be a value.
571
+ // For value objects, we use the map of the prototype objects for
572
+ // the corresponding JSValue for the cache and that is what we need
573
+ // to probe.
574
+ //
575
+ // Check for number.
576
+ __ tst(r1, Operand(kSmiTagMask));
577
+ __ b(eq, &number);
578
+ __ CompareObjectType(r1, r3, r3, HEAP_NUMBER_TYPE);
579
+ __ b(ne, &non_number);
580
+ __ bind(&number);
581
+ StubCompiler::GenerateLoadGlobalFunctionPrototype(
582
+ masm, Context::NUMBER_FUNCTION_INDEX, r1);
583
+ __ b(&probe);
584
+
585
+ // Check for string.
586
+ __ bind(&non_number);
587
+ __ cmp(r3, Operand(FIRST_NONSTRING_TYPE));
588
+ __ b(hs, &non_string);
589
+ StubCompiler::GenerateLoadGlobalFunctionPrototype(
590
+ masm, Context::STRING_FUNCTION_INDEX, r1);
591
+ __ b(&probe);
592
+
593
+ // Check for boolean.
594
+ __ bind(&non_string);
595
+ __ LoadRoot(ip, Heap::kTrueValueRootIndex);
596
+ __ cmp(r1, ip);
597
+ __ b(eq, &boolean);
598
+ __ LoadRoot(ip, Heap::kFalseValueRootIndex);
599
+ __ cmp(r1, ip);
600
+ __ b(ne, &miss);
601
+ __ bind(&boolean);
602
+ StubCompiler::GenerateLoadGlobalFunctionPrototype(
603
+ masm, Context::BOOLEAN_FUNCTION_INDEX, r1);
604
+
605
+ // Probe the stub cache for the value object.
606
+ __ bind(&probe);
607
+ StubCache::GenerateProbe(masm, flags, r1, r2, r3, no_reg);
608
+
609
+ __ bind(&miss);
610
+ }
611
+
612
+
613
+ static void GenerateFunctionTailCall(MacroAssembler* masm,
614
+ int argc,
615
+ Label* miss,
616
+ Register scratch) {
617
+ // r1: function
618
+
619
+ // Check that the value isn't a smi.
620
+ __ tst(r1, Operand(kSmiTagMask));
621
+ __ b(eq, miss);
622
+
623
+ // Check that the value is a JSFunction.
624
+ __ CompareObjectType(r1, scratch, scratch, JS_FUNCTION_TYPE);
625
+ __ b(ne, miss);
626
+
627
+ // Invoke the function.
628
+ ParameterCount actual(argc);
629
+ __ InvokeFunction(r1, actual, JUMP_FUNCTION);
630
+ }
631
+
632
+
633
+ static void GenerateCallNormal(MacroAssembler* masm, int argc) {
634
+ // ----------- S t a t e -------------
635
+ // -- r2 : name
636
+ // -- lr : return address
637
+ // -----------------------------------
638
+ Label miss;
639
+
640
+ // Get the receiver of the function from the stack into r1.
641
+ __ ldr(r1, MemOperand(sp, argc * kPointerSize));
642
+
643
+ GenerateStringDictionaryReceiverCheck(masm, r1, r0, r3, r4, &miss);
644
+
645
+ // r0: elements
646
+ // Search the dictionary - put result in register r1.
647
+ GenerateDictionaryLoad(masm, &miss, r0, r2, r1, r3, r4);
648
+
649
+ GenerateFunctionTailCall(masm, argc, &miss, r4);
650
+
651
+ __ bind(&miss);
652
+ }
653
+
654
+
655
+ static void GenerateCallMiss(MacroAssembler* masm, int argc, IC::UtilityId id) {
656
+ // ----------- S t a t e -------------
657
+ // -- r2 : name
658
+ // -- lr : return address
659
+ // -----------------------------------
660
+
661
+ if (id == IC::kCallIC_Miss) {
662
+ __ IncrementCounter(&Counters::call_miss, 1, r3, r4);
663
+ } else {
664
+ __ IncrementCounter(&Counters::keyed_call_miss, 1, r3, r4);
665
+ }
666
+
667
+ // Get the receiver of the function from the stack.
668
+ __ ldr(r3, MemOperand(sp, argc * kPointerSize));
669
+
670
+ __ EnterInternalFrame();
671
+
672
+ // Push the receiver and the name of the function.
673
+ __ Push(r3, r2);
674
+
675
+ // Call the entry.
676
+ __ mov(r0, Operand(2));
677
+ __ mov(r1, Operand(ExternalReference(IC_Utility(id))));
678
+
679
+ CEntryStub stub(1);
680
+ __ CallStub(&stub);
681
+
682
+ // Move result to r1 and leave the internal frame.
683
+ __ mov(r1, Operand(r0));
684
+ __ LeaveInternalFrame();
685
+
686
+ // Check if the receiver is a global object of some sort.
687
+ // This can happen only for regular CallIC but not KeyedCallIC.
688
+ if (id == IC::kCallIC_Miss) {
689
+ Label invoke, global;
690
+ __ ldr(r2, MemOperand(sp, argc * kPointerSize)); // receiver
691
+ __ tst(r2, Operand(kSmiTagMask));
692
+ __ b(eq, &invoke);
693
+ __ CompareObjectType(r2, r3, r3, JS_GLOBAL_OBJECT_TYPE);
694
+ __ b(eq, &global);
695
+ __ cmp(r3, Operand(JS_BUILTINS_OBJECT_TYPE));
696
+ __ b(ne, &invoke);
697
+
698
+ // Patch the receiver on the stack.
699
+ __ bind(&global);
700
+ __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset));
701
+ __ str(r2, MemOperand(sp, argc * kPointerSize));
702
+ __ bind(&invoke);
703
+ }
704
+
705
+ // Invoke the function.
706
+ ParameterCount actual(argc);
707
+ __ InvokeFunction(r1, actual, JUMP_FUNCTION);
708
+ }
709
+
710
+
711
+ void CallIC::GenerateMiss(MacroAssembler* masm, int argc) {
712
+ // ----------- S t a t e -------------
713
+ // -- r2 : name
714
+ // -- lr : return address
715
+ // -----------------------------------
716
+
717
+ GenerateCallMiss(masm, argc, IC::kCallIC_Miss);
718
+ }
719
+
720
+
721
+ void CallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
722
+ // ----------- S t a t e -------------
723
+ // -- r2 : name
724
+ // -- lr : return address
725
+ // -----------------------------------
726
+
727
+ // Get the receiver of the function from the stack into r1.
728
+ __ ldr(r1, MemOperand(sp, argc * kPointerSize));
729
+ GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC);
730
+ GenerateMiss(masm, argc);
731
+ }
732
+
733
+
734
+ void CallIC::GenerateNormal(MacroAssembler* masm, int argc) {
735
+ // ----------- S t a t e -------------
736
+ // -- r2 : name
737
+ // -- lr : return address
738
+ // -----------------------------------
739
+
740
+ GenerateCallNormal(masm, argc);
741
+ GenerateMiss(masm, argc);
742
+ }
743
+
744
+
745
+ void KeyedCallIC::GenerateMiss(MacroAssembler* masm, int argc) {
746
+ // ----------- S t a t e -------------
747
+ // -- r2 : name
748
+ // -- lr : return address
749
+ // -----------------------------------
750
+
751
+ GenerateCallMiss(masm, argc, IC::kKeyedCallIC_Miss);
752
+ }
753
+
754
+
755
+ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
756
+ // ----------- S t a t e -------------
757
+ // -- r2 : name
758
+ // -- lr : return address
759
+ // -----------------------------------
760
+
761
+ // Get the receiver of the function from the stack into r1.
762
+ __ ldr(r1, MemOperand(sp, argc * kPointerSize));
763
+
764
+ Label do_call, slow_call, slow_load, slow_reload_receiver;
765
+ Label check_number_dictionary, check_string, lookup_monomorphic_cache;
766
+ Label index_smi, index_string;
767
+
768
+ // Check that the key is a smi.
769
+ __ BranchOnNotSmi(r2, &check_string);
770
+ __ bind(&index_smi);
771
+ // Now the key is known to be a smi. This place is also jumped to from below
772
+ // where a numeric string is converted to a smi.
773
+
774
+ GenerateKeyedLoadReceiverCheck(
775
+ masm, r1, r0, r3, Map::kHasIndexedInterceptor, &slow_call);
776
+
777
+ GenerateFastArrayLoad(
778
+ masm, r1, r2, r4, r3, r0, r1, &check_number_dictionary, &slow_load);
779
+ __ IncrementCounter(&Counters::keyed_call_generic_smi_fast, 1, r0, r3);
780
+
781
+ __ bind(&do_call);
782
+ // receiver in r1 is not used after this point.
783
+ // r2: key
784
+ // r1: function
785
+ GenerateFunctionTailCall(masm, argc, &slow_call, r0);
786
+
787
+ __ bind(&check_number_dictionary);
788
+ // r2: key
789
+ // r3: elements map
790
+ // r4: elements
791
+ // Check whether the elements is a number dictionary.
792
+ __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
793
+ __ cmp(r3, ip);
794
+ __ b(ne, &slow_load);
795
+ __ mov(r0, Operand(r2, ASR, kSmiTagSize));
796
+ // r0: untagged index
797
+ GenerateNumberDictionaryLoad(masm, &slow_load, r4, r2, r1, r0, r3, r5);
798
+ __ IncrementCounter(&Counters::keyed_call_generic_smi_dict, 1, r0, r3);
799
+ __ jmp(&do_call);
800
+
801
+ __ bind(&slow_load);
802
+ // This branch is taken when calling KeyedCallIC_Miss is neither required
803
+ // nor beneficial.
804
+ __ IncrementCounter(&Counters::keyed_call_generic_slow_load, 1, r0, r3);
805
+ __ EnterInternalFrame();
806
+ __ push(r2); // save the key
807
+ __ Push(r1, r2); // pass the receiver and the key
808
+ __ CallRuntime(Runtime::kKeyedGetProperty, 2);
809
+ __ pop(r2); // restore the key
810
+ __ LeaveInternalFrame();
811
+ __ mov(r1, r0);
812
+ __ jmp(&do_call);
813
+
814
+ __ bind(&check_string);
815
+ GenerateKeyStringCheck(masm, r2, r0, r3, &index_string, &slow_call);
816
+
817
+ // The key is known to be a symbol.
818
+ // If the receiver is a regular JS object with slow properties then do
819
+ // a quick inline probe of the receiver's dictionary.
820
+ // Otherwise do the monomorphic cache probe.
821
+ GenerateKeyedLoadReceiverCheck(
822
+ masm, r1, r0, r3, Map::kHasNamedInterceptor, &lookup_monomorphic_cache);
823
+
824
+ __ ldr(r0, FieldMemOperand(r1, JSObject::kPropertiesOffset));
825
+ __ ldr(r3, FieldMemOperand(r0, HeapObject::kMapOffset));
826
+ __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
827
+ __ cmp(r3, ip);
828
+ __ b(ne, &lookup_monomorphic_cache);
829
+
830
+ GenerateDictionaryLoad(masm, &slow_load, r0, r2, r1, r3, r4);
831
+ __ IncrementCounter(&Counters::keyed_call_generic_lookup_dict, 1, r0, r3);
832
+ __ jmp(&do_call);
833
+
834
+ __ bind(&lookup_monomorphic_cache);
835
+ __ IncrementCounter(&Counters::keyed_call_generic_lookup_cache, 1, r0, r3);
836
+ GenerateMonomorphicCacheProbe(masm, argc, Code::KEYED_CALL_IC);
837
+ // Fall through on miss.
838
+
839
+ __ bind(&slow_call);
840
+ // This branch is taken if:
841
+ // - the receiver requires boxing or access check,
842
+ // - the key is neither smi nor symbol,
843
+ // - the value loaded is not a function,
844
+ // - there is hope that the runtime will create a monomorphic call stub
845
+ // that will get fetched next time.
846
+ __ IncrementCounter(&Counters::keyed_call_generic_slow, 1, r0, r3);
847
+ GenerateMiss(masm, argc);
848
+
849
+ __ bind(&index_string);
850
+ GenerateIndexFromHash(masm, r2, r3);
851
+ // Now jump to the place where smi keys are handled.
852
+ __ jmp(&index_smi);
853
+ }
854
+
855
+
856
+ void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) {
857
+ // ----------- S t a t e -------------
858
+ // -- r2 : name
859
+ // -- lr : return address
860
+ // -----------------------------------
861
+
862
+ GenerateCallNormal(masm, argc);
863
+ GenerateMiss(masm, argc);
864
+ }
865
+
866
+
867
+ // Defined in ic.cc.
868
+ Object* LoadIC_Miss(Arguments args);
869
+
870
+ void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
871
+ // ----------- S t a t e -------------
872
+ // -- r2 : name
873
+ // -- lr : return address
874
+ // -- r0 : receiver
875
+ // -- sp[0] : receiver
876
+ // -----------------------------------
877
+
878
+ // Probe the stub cache.
879
+ Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC,
880
+ NOT_IN_LOOP,
881
+ MONOMORPHIC);
882
+ StubCache::GenerateProbe(masm, flags, r0, r2, r3, no_reg);
883
+
884
+ // Cache miss: Jump to runtime.
885
+ GenerateMiss(masm);
886
+ }
887
+
888
+
889
+ void LoadIC::GenerateNormal(MacroAssembler* masm) {
890
+ // ----------- S t a t e -------------
891
+ // -- r2 : name
892
+ // -- lr : return address
893
+ // -- r0 : receiver
894
+ // -- sp[0] : receiver
895
+ // -----------------------------------
896
+ Label miss;
897
+
898
+ GenerateStringDictionaryReceiverCheck(masm, r0, r1, r3, r4, &miss);
899
+
900
+ // r1: elements
901
+ GenerateDictionaryLoad(masm, &miss, r1, r2, r0, r3, r4);
902
+ __ Ret();
903
+
904
+ // Cache miss: Jump to runtime.
905
+ __ bind(&miss);
906
+ GenerateMiss(masm);
907
+ }
908
+
909
+
910
+ void LoadIC::GenerateMiss(MacroAssembler* masm) {
911
+ // ----------- S t a t e -------------
912
+ // -- r2 : name
913
+ // -- lr : return address
914
+ // -- r0 : receiver
915
+ // -- sp[0] : receiver
916
+ // -----------------------------------
917
+
918
+ __ IncrementCounter(&Counters::load_miss, 1, r3, r4);
919
+
920
+ __ mov(r3, r0);
921
+ __ Push(r3, r2);
922
+
923
+ // Perform tail call to the entry.
924
+ ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss));
925
+ __ TailCallExternalReference(ref, 2, 1);
926
+ }
927
+
928
+
929
+ static inline bool IsInlinedICSite(Address address,
930
+ Address* inline_end_address) {
931
+ // If the instruction after the call site is not the pseudo instruction nop1
932
+ // then this is not related to an inlined in-object property load. The nop1
933
+ // instruction is located just after the call to the IC in the deferred code
934
+ // handling the miss in the inlined code. After the nop1 instruction there is
935
+ // a branch instruction for jumping back from the deferred code.
936
+ Address address_after_call = address + Assembler::kCallTargetAddressOffset;
937
+ Instr instr_after_call = Assembler::instr_at(address_after_call);
938
+ if (!Assembler::IsNop(instr_after_call, PROPERTY_ACCESS_INLINED)) {
939
+ return false;
940
+ }
941
+ Address address_after_nop = address_after_call + Assembler::kInstrSize;
942
+ Instr instr_after_nop = Assembler::instr_at(address_after_nop);
943
+ // There may be some reg-reg move and frame merging code to skip over before
944
+ // the branch back from the DeferredReferenceGetKeyedValue code to the inlined
945
+ // code.
946
+ while (!Assembler::IsBranch(instr_after_nop)) {
947
+ address_after_nop += Assembler::kInstrSize;
948
+ instr_after_nop = Assembler::instr_at(address_after_nop);
949
+ }
950
+
951
+ // Find the end of the inlined code for handling the load.
952
+ int b_offset =
953
+ Assembler::GetBranchOffset(instr_after_nop) + Assembler::kPcLoadDelta;
954
+ ASSERT(b_offset < 0); // Jumping back from deferred code.
955
+ *inline_end_address = address_after_nop + b_offset;
956
+
957
+ return true;
958
+ }
959
+
960
+
961
+ bool LoadIC::PatchInlinedLoad(Address address, Object* map, int offset) {
962
+ // Find the end of the inlined code for handling the load if this is an
963
+ // inlined IC call site.
964
+ Address inline_end_address;
965
+ if (!IsInlinedICSite(address, &inline_end_address)) return false;
966
+
967
+ // Patch the offset of the property load instruction (ldr r0, [r1, #+XXX]).
968
+ // The immediate must be representable in 12 bits.
969
+ ASSERT((JSObject::kMaxInstanceSize - JSObject::kHeaderSize) < (1 << 12));
970
+ Address ldr_property_instr_address =
971
+ inline_end_address - Assembler::kInstrSize;
972
+ ASSERT(Assembler::IsLdrRegisterImmediate(
973
+ Assembler::instr_at(ldr_property_instr_address)));
974
+ Instr ldr_property_instr = Assembler::instr_at(ldr_property_instr_address);
975
+ ldr_property_instr = Assembler::SetLdrRegisterImmediateOffset(
976
+ ldr_property_instr, offset - kHeapObjectTag);
977
+ Assembler::instr_at_put(ldr_property_instr_address, ldr_property_instr);
978
+
979
+ // Indicate that code has changed.
980
+ CPU::FlushICache(ldr_property_instr_address, 1 * Assembler::kInstrSize);
981
+
982
+ // Patch the map check.
983
+ Address ldr_map_instr_address =
984
+ inline_end_address - 4 * Assembler::kInstrSize;
985
+ Assembler::set_target_address_at(ldr_map_instr_address,
986
+ reinterpret_cast<Address>(map));
987
+ return true;
988
+ }
989
+
990
+
991
+ bool StoreIC::PatchInlinedStore(Address address, Object* map, int offset) {
992
+ // Find the end of the inlined code for the store if there is an
993
+ // inlined version of the store.
994
+ Address inline_end_address;
995
+ if (!IsInlinedICSite(address, &inline_end_address)) return false;
996
+
997
+ // Compute the address of the map load instruction.
998
+ Address ldr_map_instr_address =
999
+ inline_end_address -
1000
+ (CodeGenerator::GetInlinedNamedStoreInstructionsAfterPatch() *
1001
+ Assembler::kInstrSize);
1002
+
1003
+ // Update the offsets if initializing the inlined store. No reason
1004
+ // to update the offsets when clearing the inlined version because
1005
+ // it will bail out in the map check.
1006
+ if (map != Heap::null_value()) {
1007
+ // Patch the offset in the actual store instruction.
1008
+ Address str_property_instr_address =
1009
+ ldr_map_instr_address + 3 * Assembler::kInstrSize;
1010
+ Instr str_property_instr = Assembler::instr_at(str_property_instr_address);
1011
+ ASSERT(Assembler::IsStrRegisterImmediate(str_property_instr));
1012
+ str_property_instr = Assembler::SetStrRegisterImmediateOffset(
1013
+ str_property_instr, offset - kHeapObjectTag);
1014
+ Assembler::instr_at_put(str_property_instr_address, str_property_instr);
1015
+
1016
+ // Patch the offset in the add instruction that is part of the
1017
+ // write barrier.
1018
+ Address add_offset_instr_address =
1019
+ str_property_instr_address + Assembler::kInstrSize;
1020
+ Instr add_offset_instr = Assembler::instr_at(add_offset_instr_address);
1021
+ ASSERT(Assembler::IsAddRegisterImmediate(add_offset_instr));
1022
+ add_offset_instr = Assembler::SetAddRegisterImmediateOffset(
1023
+ add_offset_instr, offset - kHeapObjectTag);
1024
+ Assembler::instr_at_put(add_offset_instr_address, add_offset_instr);
1025
+
1026
+ // Indicate that code has changed.
1027
+ CPU::FlushICache(str_property_instr_address, 2 * Assembler::kInstrSize);
1028
+ }
1029
+
1030
+ // Patch the map check.
1031
+ Assembler::set_target_address_at(ldr_map_instr_address,
1032
+ reinterpret_cast<Address>(map));
1033
+
1034
+ return true;
1035
+ }
1036
+
1037
+
1038
+ bool KeyedLoadIC::PatchInlinedLoad(Address address, Object* map) {
1039
+ Address inline_end_address;
1040
+ if (!IsInlinedICSite(address, &inline_end_address)) return false;
1041
+
1042
+ // Patch the map check.
1043
+ Address ldr_map_instr_address =
1044
+ inline_end_address -
1045
+ (CodeGenerator::GetInlinedKeyedLoadInstructionsAfterPatch() *
1046
+ Assembler::kInstrSize);
1047
+ Assembler::set_target_address_at(ldr_map_instr_address,
1048
+ reinterpret_cast<Address>(map));
1049
+ return true;
1050
+ }
1051
+
1052
+
1053
+ bool KeyedStoreIC::PatchInlinedStore(Address address, Object* map) {
1054
+ // Find the end of the inlined code for handling the store if this is an
1055
+ // inlined IC call site.
1056
+ Address inline_end_address;
1057
+ if (!IsInlinedICSite(address, &inline_end_address)) return false;
1058
+
1059
+ // Patch the map check.
1060
+ Address ldr_map_instr_address =
1061
+ inline_end_address -
1062
+ (CodeGenerator::kInlinedKeyedStoreInstructionsAfterPatch *
1063
+ Assembler::kInstrSize);
1064
+ Assembler::set_target_address_at(ldr_map_instr_address,
1065
+ reinterpret_cast<Address>(map));
1066
+ return true;
1067
+ }
1068
+
1069
+
1070
+ Object* KeyedLoadIC_Miss(Arguments args);
1071
+
1072
+
1073
+ void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
1074
+ // ---------- S t a t e --------------
1075
+ // -- lr : return address
1076
+ // -- r0 : key
1077
+ // -- r1 : receiver
1078
+ // -----------------------------------
1079
+
1080
+ __ IncrementCounter(&Counters::keyed_load_miss, 1, r3, r4);
1081
+
1082
+ __ Push(r1, r0);
1083
+
1084
+ ExternalReference ref = ExternalReference(IC_Utility(kKeyedLoadIC_Miss));
1085
+ __ TailCallExternalReference(ref, 2, 1);
1086
+ }
1087
+
1088
+
1089
+ void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
1090
+ // ---------- S t a t e --------------
1091
+ // -- lr : return address
1092
+ // -- r0 : key
1093
+ // -- r1 : receiver
1094
+ // -----------------------------------
1095
+
1096
+ __ Push(r1, r0);
1097
+
1098
+ __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
1099
+ }
1100
+
1101
+
1102
+ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
1103
+ // ---------- S t a t e --------------
1104
+ // -- lr : return address
1105
+ // -- r0 : key
1106
+ // -- r1 : receiver
1107
+ // -----------------------------------
1108
+ Label slow, check_string, index_smi, index_string;
1109
+ Label check_pixel_array, probe_dictionary, check_number_dictionary;
1110
+
1111
+ Register key = r0;
1112
+ Register receiver = r1;
1113
+
1114
+ // Check that the key is a smi.
1115
+ __ BranchOnNotSmi(key, &check_string);
1116
+ __ bind(&index_smi);
1117
+ // Now the key is known to be a smi. This place is also jumped to from below
1118
+ // where a numeric string is converted to a smi.
1119
+
1120
+ GenerateKeyedLoadReceiverCheck(
1121
+ masm, receiver, r2, r3, Map::kHasIndexedInterceptor, &slow);
1122
+
1123
+ GenerateFastArrayLoad(
1124
+ masm, receiver, key, r4, r3, r2, r0, &check_pixel_array, &slow);
1125
+ __ IncrementCounter(&Counters::keyed_load_generic_smi, 1, r2, r3);
1126
+ __ Ret();
1127
+
1128
+ // Check whether the elements is a pixel array.
1129
+ // r0: key
1130
+ // r3: elements map
1131
+ // r4: elements
1132
+ __ bind(&check_pixel_array);
1133
+ __ LoadRoot(ip, Heap::kPixelArrayMapRootIndex);
1134
+ __ cmp(r3, ip);
1135
+ __ b(ne, &check_number_dictionary);
1136
+ __ ldr(ip, FieldMemOperand(r4, PixelArray::kLengthOffset));
1137
+ __ mov(r2, Operand(key, ASR, kSmiTagSize));
1138
+ __ cmp(r2, ip);
1139
+ __ b(hs, &slow);
1140
+ __ ldr(ip, FieldMemOperand(r4, PixelArray::kExternalPointerOffset));
1141
+ __ ldrb(r2, MemOperand(ip, r2));
1142
+ __ mov(r0, Operand(r2, LSL, kSmiTagSize)); // Tag result as smi.
1143
+ __ Ret();
1144
+
1145
+ __ bind(&check_number_dictionary);
1146
+ // Check whether the elements is a number dictionary.
1147
+ // r0: key
1148
+ // r3: elements map
1149
+ // r4: elements
1150
+ __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
1151
+ __ cmp(r3, ip);
1152
+ __ b(ne, &slow);
1153
+ __ mov(r2, Operand(r0, ASR, kSmiTagSize));
1154
+ GenerateNumberDictionaryLoad(masm, &slow, r4, r0, r0, r2, r3, r5);
1155
+ __ Ret();
1156
+
1157
+ // Slow case, key and receiver still in r0 and r1.
1158
+ __ bind(&slow);
1159
+ __ IncrementCounter(&Counters::keyed_load_generic_slow, 1, r2, r3);
1160
+ GenerateRuntimeGetProperty(masm);
1161
+
1162
+ __ bind(&check_string);
1163
+ GenerateKeyStringCheck(masm, key, r2, r3, &index_string, &slow);
1164
+
1165
+ GenerateKeyedLoadReceiverCheck(
1166
+ masm, receiver, r2, r3, Map::kHasNamedInterceptor, &slow);
1167
+
1168
+ // If the receiver is a fast-case object, check the keyed lookup
1169
+ // cache. Otherwise probe the dictionary.
1170
+ __ ldr(r3, FieldMemOperand(r1, JSObject::kPropertiesOffset));
1171
+ __ ldr(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
1172
+ __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
1173
+ __ cmp(r4, ip);
1174
+ __ b(eq, &probe_dictionary);
1175
+
1176
+ // Load the map of the receiver, compute the keyed lookup cache hash
1177
+ // based on 32 bits of the map pointer and the string hash.
1178
+ __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
1179
+ __ mov(r3, Operand(r2, ASR, KeyedLookupCache::kMapHashShift));
1180
+ __ ldr(r4, FieldMemOperand(r0, String::kHashFieldOffset));
1181
+ __ eor(r3, r3, Operand(r4, ASR, String::kHashShift));
1182
+ __ And(r3, r3, Operand(KeyedLookupCache::kCapacityMask));
1183
+
1184
+ // Load the key (consisting of map and symbol) from the cache and
1185
+ // check for match.
1186
+ ExternalReference cache_keys = ExternalReference::keyed_lookup_cache_keys();
1187
+ __ mov(r4, Operand(cache_keys));
1188
+ __ add(r4, r4, Operand(r3, LSL, kPointerSizeLog2 + 1));
1189
+ __ ldr(r5, MemOperand(r4, kPointerSize, PostIndex)); // Move r4 to symbol.
1190
+ __ cmp(r2, r5);
1191
+ __ b(ne, &slow);
1192
+ __ ldr(r5, MemOperand(r4));
1193
+ __ cmp(r0, r5);
1194
+ __ b(ne, &slow);
1195
+
1196
+ // Get field offset and check that it is an in-object property.
1197
+ // r0 : key
1198
+ // r1 : receiver
1199
+ // r2 : receiver's map
1200
+ // r3 : lookup cache index
1201
+ ExternalReference cache_field_offsets
1202
+ = ExternalReference::keyed_lookup_cache_field_offsets();
1203
+ __ mov(r4, Operand(cache_field_offsets));
1204
+ __ ldr(r5, MemOperand(r4, r3, LSL, kPointerSizeLog2));
1205
+ __ ldrb(r6, FieldMemOperand(r2, Map::kInObjectPropertiesOffset));
1206
+ __ cmp(r5, r6);
1207
+ __ b(ge, &slow);
1208
+
1209
+ // Load in-object property.
1210
+ __ sub(r5, r5, r6); // Index from end of object.
1211
+ __ ldrb(r6, FieldMemOperand(r2, Map::kInstanceSizeOffset));
1212
+ __ add(r6, r6, r5); // Index from start of object.
1213
+ __ sub(r1, r1, Operand(kHeapObjectTag)); // Remove the heap tag.
1214
+ __ ldr(r0, MemOperand(r1, r6, LSL, kPointerSizeLog2));
1215
+ __ IncrementCounter(&Counters::keyed_load_generic_lookup_cache, 1, r2, r3);
1216
+ __ Ret();
1217
+
1218
+ // Do a quick inline probe of the receiver's dictionary, if it
1219
+ // exists.
1220
+ __ bind(&probe_dictionary);
1221
+ // r1: receiver
1222
+ // r0: key
1223
+ // r3: elements
1224
+ __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
1225
+ __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
1226
+ GenerateGlobalInstanceTypeCheck(masm, r2, &slow);
1227
+ // Load the property to r0.
1228
+ GenerateDictionaryLoad(masm, &slow, r3, r0, r0, r2, r4);
1229
+ __ IncrementCounter(&Counters::keyed_load_generic_symbol, 1, r2, r3);
1230
+ __ Ret();
1231
+
1232
+ __ bind(&index_string);
1233
+ GenerateIndexFromHash(masm, key, r3);
1234
+ // Now jump to the place where smi keys are handled.
1235
+ __ jmp(&index_smi);
1236
+ }
1237
+
1238
+
1239
+ void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
1240
+ // ---------- S t a t e --------------
1241
+ // -- lr : return address
1242
+ // -- r0 : key (index)
1243
+ // -- r1 : receiver
1244
+ // -----------------------------------
1245
+ Label miss;
1246
+ Label index_out_of_range;
1247
+
1248
+ Register receiver = r1;
1249
+ Register index = r0;
1250
+ Register scratch1 = r2;
1251
+ Register scratch2 = r3;
1252
+ Register result = r0;
1253
+
1254
+ StringCharAtGenerator char_at_generator(receiver,
1255
+ index,
1256
+ scratch1,
1257
+ scratch2,
1258
+ result,
1259
+ &miss, // When not a string.
1260
+ &miss, // When not a number.
1261
+ &index_out_of_range,
1262
+ STRING_INDEX_IS_ARRAY_INDEX);
1263
+ char_at_generator.GenerateFast(masm);
1264
+ __ Ret();
1265
+
1266
+ ICRuntimeCallHelper call_helper;
1267
+ char_at_generator.GenerateSlow(masm, call_helper);
1268
+
1269
+ __ bind(&index_out_of_range);
1270
+ __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1271
+ __ Ret();
1272
+
1273
+ __ bind(&miss);
1274
+ GenerateMiss(masm);
1275
+ }
1276
+
1277
+
1278
+ // Convert unsigned integer with specified number of leading zeroes in binary
1279
+ // representation to IEEE 754 double.
1280
+ // Integer to convert is passed in register hiword.
1281
+ // Resulting double is returned in registers hiword:loword.
1282
+ // This functions does not work correctly for 0.
1283
+ static void GenerateUInt2Double(MacroAssembler* masm,
1284
+ Register hiword,
1285
+ Register loword,
1286
+ Register scratch,
1287
+ int leading_zeroes) {
1288
+ const int meaningful_bits = kBitsPerInt - leading_zeroes - 1;
1289
+ const int biased_exponent = HeapNumber::kExponentBias + meaningful_bits;
1290
+
1291
+ const int mantissa_shift_for_hi_word =
1292
+ meaningful_bits - HeapNumber::kMantissaBitsInTopWord;
1293
+
1294
+ const int mantissa_shift_for_lo_word =
1295
+ kBitsPerInt - mantissa_shift_for_hi_word;
1296
+
1297
+ __ mov(scratch, Operand(biased_exponent << HeapNumber::kExponentShift));
1298
+ if (mantissa_shift_for_hi_word > 0) {
1299
+ __ mov(loword, Operand(hiword, LSL, mantissa_shift_for_lo_word));
1300
+ __ orr(hiword, scratch, Operand(hiword, LSR, mantissa_shift_for_hi_word));
1301
+ } else {
1302
+ __ mov(loword, Operand(0));
1303
+ __ orr(hiword, scratch, Operand(hiword, LSL, mantissa_shift_for_hi_word));
1304
+ }
1305
+
1306
+ // If least significant bit of biased exponent was not 1 it was corrupted
1307
+ // by most significant bit of mantissa so we should fix that.
1308
+ if (!(biased_exponent & 1)) {
1309
+ __ bic(hiword, hiword, Operand(1 << HeapNumber::kExponentShift));
1310
+ }
1311
+ }
1312
+
1313
+
1314
+ void KeyedLoadIC::GenerateExternalArray(MacroAssembler* masm,
1315
+ ExternalArrayType array_type) {
1316
+ // ---------- S t a t e --------------
1317
+ // -- lr : return address
1318
+ // -- r0 : key
1319
+ // -- r1 : receiver
1320
+ // -----------------------------------
1321
+ Label slow, failed_allocation;
1322
+
1323
+ Register key = r0;
1324
+ Register receiver = r1;
1325
+
1326
+ // Check that the object isn't a smi
1327
+ __ BranchOnSmi(receiver, &slow);
1328
+
1329
+ // Check that the key is a smi.
1330
+ __ BranchOnNotSmi(key, &slow);
1331
+
1332
+ // Check that the object is a JS object. Load map into r2.
1333
+ __ CompareObjectType(receiver, r2, r3, FIRST_JS_OBJECT_TYPE);
1334
+ __ b(lt, &slow);
1335
+
1336
+ // Check that the receiver does not require access checks. We need
1337
+ // to check this explicitly since this generic stub does not perform
1338
+ // map checks.
1339
+ __ ldrb(r3, FieldMemOperand(r2, Map::kBitFieldOffset));
1340
+ __ tst(r3, Operand(1 << Map::kIsAccessCheckNeeded));
1341
+ __ b(ne, &slow);
1342
+
1343
+ // Check that the elements array is the appropriate type of
1344
+ // ExternalArray.
1345
+ __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
1346
+ __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
1347
+ __ LoadRoot(ip, Heap::RootIndexForExternalArrayType(array_type));
1348
+ __ cmp(r2, ip);
1349
+ __ b(ne, &slow);
1350
+
1351
+ // Check that the index is in range.
1352
+ __ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset));
1353
+ __ cmp(ip, Operand(key, ASR, kSmiTagSize));
1354
+ // Unsigned comparison catches both negative and too-large values.
1355
+ __ b(lo, &slow);
1356
+
1357
+ // r3: elements array
1358
+ __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
1359
+ // r3: base pointer of external storage
1360
+
1361
+ // We are not untagging smi key and instead work with it
1362
+ // as if it was premultiplied by 2.
1363
+ ASSERT((kSmiTag == 0) && (kSmiTagSize == 1));
1364
+
1365
+ Register value = r2;
1366
+ switch (array_type) {
1367
+ case kExternalByteArray:
1368
+ __ ldrsb(value, MemOperand(r3, key, LSR, 1));
1369
+ break;
1370
+ case kExternalUnsignedByteArray:
1371
+ __ ldrb(value, MemOperand(r3, key, LSR, 1));
1372
+ break;
1373
+ case kExternalShortArray:
1374
+ __ ldrsh(value, MemOperand(r3, key, LSL, 0));
1375
+ break;
1376
+ case kExternalUnsignedShortArray:
1377
+ __ ldrh(value, MemOperand(r3, key, LSL, 0));
1378
+ break;
1379
+ case kExternalIntArray:
1380
+ case kExternalUnsignedIntArray:
1381
+ __ ldr(value, MemOperand(r3, key, LSL, 1));
1382
+ break;
1383
+ case kExternalFloatArray:
1384
+ if (CpuFeatures::IsSupported(VFP3)) {
1385
+ CpuFeatures::Scope scope(VFP3);
1386
+ __ add(r2, r3, Operand(key, LSL, 1));
1387
+ __ vldr(s0, r2, 0);
1388
+ } else {
1389
+ __ ldr(value, MemOperand(r3, key, LSL, 1));
1390
+ }
1391
+ break;
1392
+ default:
1393
+ UNREACHABLE();
1394
+ break;
1395
+ }
1396
+
1397
+ // For integer array types:
1398
+ // r2: value
1399
+ // For floating-point array type
1400
+ // s0: value (if VFP3 is supported)
1401
+ // r2: value (if VFP3 is not supported)
1402
+
1403
+ if (array_type == kExternalIntArray) {
1404
+ // For the Int and UnsignedInt array types, we need to see whether
1405
+ // the value can be represented in a Smi. If not, we need to convert
1406
+ // it to a HeapNumber.
1407
+ Label box_int;
1408
+ __ cmp(value, Operand(0xC0000000));
1409
+ __ b(mi, &box_int);
1410
+ // Tag integer as smi and return it.
1411
+ __ mov(r0, Operand(value, LSL, kSmiTagSize));
1412
+ __ Ret();
1413
+
1414
+ __ bind(&box_int);
1415
+ // Allocate a HeapNumber for the result and perform int-to-double
1416
+ // conversion. Use r0 for result as key is not needed any more.
1417
+ __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
1418
+ __ AllocateHeapNumber(r0, r3, r4, r6, &slow);
1419
+
1420
+ if (CpuFeatures::IsSupported(VFP3)) {
1421
+ CpuFeatures::Scope scope(VFP3);
1422
+ __ vmov(s0, value);
1423
+ __ vcvt_f64_s32(d0, s0);
1424
+ __ sub(r3, r0, Operand(kHeapObjectTag));
1425
+ __ vstr(d0, r3, HeapNumber::kValueOffset);
1426
+ __ Ret();
1427
+ } else {
1428
+ WriteInt32ToHeapNumberStub stub(value, r0, r3);
1429
+ __ TailCallStub(&stub);
1430
+ }
1431
+ } else if (array_type == kExternalUnsignedIntArray) {
1432
+ // The test is different for unsigned int values. Since we need
1433
+ // the value to be in the range of a positive smi, we can't
1434
+ // handle either of the top two bits being set in the value.
1435
+ if (CpuFeatures::IsSupported(VFP3)) {
1436
+ CpuFeatures::Scope scope(VFP3);
1437
+ Label box_int, done;
1438
+ __ tst(value, Operand(0xC0000000));
1439
+ __ b(ne, &box_int);
1440
+ // Tag integer as smi and return it.
1441
+ __ mov(r0, Operand(value, LSL, kSmiTagSize));
1442
+ __ Ret();
1443
+
1444
+ __ bind(&box_int);
1445
+ __ vmov(s0, value);
1446
+ // Allocate a HeapNumber for the result and perform int-to-double
1447
+ // conversion. Don't use r0 and r1 as AllocateHeapNumber clobbers all
1448
+ // registers - also when jumping due to exhausted young space.
1449
+ __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
1450
+ __ AllocateHeapNumber(r2, r3, r4, r6, &slow);
1451
+
1452
+ __ vcvt_f64_u32(d0, s0);
1453
+ __ sub(r1, r2, Operand(kHeapObjectTag));
1454
+ __ vstr(d0, r1, HeapNumber::kValueOffset);
1455
+
1456
+ __ mov(r0, r2);
1457
+ __ Ret();
1458
+ } else {
1459
+ // Check whether unsigned integer fits into smi.
1460
+ Label box_int_0, box_int_1, done;
1461
+ __ tst(value, Operand(0x80000000));
1462
+ __ b(ne, &box_int_0);
1463
+ __ tst(value, Operand(0x40000000));
1464
+ __ b(ne, &box_int_1);
1465
+ // Tag integer as smi and return it.
1466
+ __ mov(r0, Operand(value, LSL, kSmiTagSize));
1467
+ __ Ret();
1468
+
1469
+ Register hiword = value; // r2.
1470
+ Register loword = r3;
1471
+
1472
+ __ bind(&box_int_0);
1473
+ // Integer does not have leading zeros.
1474
+ GenerateUInt2Double(masm, hiword, loword, r4, 0);
1475
+ __ b(&done);
1476
+
1477
+ __ bind(&box_int_1);
1478
+ // Integer has one leading zero.
1479
+ GenerateUInt2Double(masm, hiword, loword, r4, 1);
1480
+
1481
+
1482
+ __ bind(&done);
1483
+ // Integer was converted to double in registers hiword:loword.
1484
+ // Wrap it into a HeapNumber. Don't use r0 and r1 as AllocateHeapNumber
1485
+ // clobbers all registers - also when jumping due to exhausted young
1486
+ // space.
1487
+ __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
1488
+ __ AllocateHeapNumber(r4, r5, r7, r6, &slow);
1489
+
1490
+ __ str(hiword, FieldMemOperand(r4, HeapNumber::kExponentOffset));
1491
+ __ str(loword, FieldMemOperand(r4, HeapNumber::kMantissaOffset));
1492
+
1493
+ __ mov(r0, r4);
1494
+ __ Ret();
1495
+ }
1496
+ } else if (array_type == kExternalFloatArray) {
1497
+ // For the floating-point array type, we need to always allocate a
1498
+ // HeapNumber.
1499
+ if (CpuFeatures::IsSupported(VFP3)) {
1500
+ CpuFeatures::Scope scope(VFP3);
1501
+ // Allocate a HeapNumber for the result. Don't use r0 and r1 as
1502
+ // AllocateHeapNumber clobbers all registers - also when jumping due to
1503
+ // exhausted young space.
1504
+ __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
1505
+ __ AllocateHeapNumber(r2, r3, r4, r6, &slow);
1506
+ __ vcvt_f64_f32(d0, s0);
1507
+ __ sub(r1, r2, Operand(kHeapObjectTag));
1508
+ __ vstr(d0, r1, HeapNumber::kValueOffset);
1509
+
1510
+ __ mov(r0, r2);
1511
+ __ Ret();
1512
+ } else {
1513
+ // Allocate a HeapNumber for the result. Don't use r0 and r1 as
1514
+ // AllocateHeapNumber clobbers all registers - also when jumping due to
1515
+ // exhausted young space.
1516
+ __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
1517
+ __ AllocateHeapNumber(r3, r4, r5, r6, &slow);
1518
+ // VFP is not available, do manual single to double conversion.
1519
+
1520
+ // r2: floating point value (binary32)
1521
+ // r3: heap number for result
1522
+
1523
+ // Extract mantissa to r0. OK to clobber r0 now as there are no jumps to
1524
+ // the slow case from here.
1525
+ __ and_(r0, value, Operand(kBinary32MantissaMask));
1526
+
1527
+ // Extract exponent to r1. OK to clobber r1 now as there are no jumps to
1528
+ // the slow case from here.
1529
+ __ mov(r1, Operand(value, LSR, kBinary32MantissaBits));
1530
+ __ and_(r1, r1, Operand(kBinary32ExponentMask >> kBinary32MantissaBits));
1531
+
1532
+ Label exponent_rebiased;
1533
+ __ teq(r1, Operand(0x00));
1534
+ __ b(eq, &exponent_rebiased);
1535
+
1536
+ __ teq(r1, Operand(0xff));
1537
+ __ mov(r1, Operand(0x7ff), LeaveCC, eq);
1538
+ __ b(eq, &exponent_rebiased);
1539
+
1540
+ // Rebias exponent.
1541
+ __ add(r1,
1542
+ r1,
1543
+ Operand(-kBinary32ExponentBias + HeapNumber::kExponentBias));
1544
+
1545
+ __ bind(&exponent_rebiased);
1546
+ __ and_(r2, value, Operand(kBinary32SignMask));
1547
+ value = no_reg;
1548
+ __ orr(r2, r2, Operand(r1, LSL, HeapNumber::kMantissaBitsInTopWord));
1549
+
1550
+ // Shift mantissa.
1551
+ static const int kMantissaShiftForHiWord =
1552
+ kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord;
1553
+
1554
+ static const int kMantissaShiftForLoWord =
1555
+ kBitsPerInt - kMantissaShiftForHiWord;
1556
+
1557
+ __ orr(r2, r2, Operand(r0, LSR, kMantissaShiftForHiWord));
1558
+ __ mov(r0, Operand(r0, LSL, kMantissaShiftForLoWord));
1559
+
1560
+ __ str(r2, FieldMemOperand(r3, HeapNumber::kExponentOffset));
1561
+ __ str(r0, FieldMemOperand(r3, HeapNumber::kMantissaOffset));
1562
+
1563
+ __ mov(r0, r3);
1564
+ __ Ret();
1565
+ }
1566
+
1567
+ } else {
1568
+ // Tag integer as smi and return it.
1569
+ __ mov(r0, Operand(value, LSL, kSmiTagSize));
1570
+ __ Ret();
1571
+ }
1572
+
1573
+ // Slow case, key and receiver still in r0 and r1.
1574
+ __ bind(&slow);
1575
+ __ IncrementCounter(&Counters::keyed_load_external_array_slow, 1, r2, r3);
1576
+ GenerateRuntimeGetProperty(masm);
1577
+ }
1578
+
1579
+
1580
+ void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
1581
+ // ---------- S t a t e --------------
1582
+ // -- lr : return address
1583
+ // -- r0 : key
1584
+ // -- r1 : receiver
1585
+ // -----------------------------------
1586
+ Label slow;
1587
+
1588
+ // Check that the receiver isn't a smi.
1589
+ __ BranchOnSmi(r1, &slow);
1590
+
1591
+ // Check that the key is a smi.
1592
+ __ BranchOnNotSmi(r0, &slow);
1593
+
1594
+ // Get the map of the receiver.
1595
+ __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
1596
+
1597
+ // Check that it has indexed interceptor and access checks
1598
+ // are not enabled for this object.
1599
+ __ ldrb(r3, FieldMemOperand(r2, Map::kBitFieldOffset));
1600
+ __ and_(r3, r3, Operand(kSlowCaseBitFieldMask));
1601
+ __ cmp(r3, Operand(1 << Map::kHasIndexedInterceptor));
1602
+ __ b(ne, &slow);
1603
+
1604
+ // Everything is fine, call runtime.
1605
+ __ Push(r1, r0); // Receiver, key.
1606
+
1607
+ // Perform tail call to the entry.
1608
+ __ TailCallExternalReference(ExternalReference(
1609
+ IC_Utility(kKeyedLoadPropertyWithInterceptor)), 2, 1);
1610
+
1611
+ __ bind(&slow);
1612
+ GenerateMiss(masm);
1613
+ }
1614
+
1615
+
1616
+ void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) {
1617
+ // ---------- S t a t e --------------
1618
+ // -- r0 : value
1619
+ // -- r1 : key
1620
+ // -- r2 : receiver
1621
+ // -- lr : return address
1622
+ // -----------------------------------
1623
+
1624
+ // Push receiver, key and value for runtime call.
1625
+ __ Push(r2, r1, r0);
1626
+
1627
+ ExternalReference ref = ExternalReference(IC_Utility(kKeyedStoreIC_Miss));
1628
+ __ TailCallExternalReference(ref, 3, 1);
1629
+ }
1630
+
1631
+
1632
+ void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm) {
1633
+ // ---------- S t a t e --------------
1634
+ // -- r0 : value
1635
+ // -- r1 : key
1636
+ // -- r2 : receiver
1637
+ // -- lr : return address
1638
+ // -----------------------------------
1639
+
1640
+ // Push receiver, key and value for runtime call.
1641
+ __ Push(r2, r1, r0);
1642
+
1643
+ __ TailCallRuntime(Runtime::kSetProperty, 3, 1);
1644
+ }
1645
+
1646
+
1647
+ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
1648
+ // ---------- S t a t e --------------
1649
+ // -- r0 : value
1650
+ // -- r1 : key
1651
+ // -- r2 : receiver
1652
+ // -- lr : return address
1653
+ // -----------------------------------
1654
+ Label slow, fast, array, extra, check_pixel_array;
1655
+
1656
+ // Register usage.
1657
+ Register value = r0;
1658
+ Register key = r1;
1659
+ Register receiver = r2;
1660
+ Register elements = r3; // Elements array of the receiver.
1661
+ // r4 and r5 are used as general scratch registers.
1662
+
1663
+ // Check that the key is a smi.
1664
+ __ tst(key, Operand(kSmiTagMask));
1665
+ __ b(ne, &slow);
1666
+ // Check that the object isn't a smi.
1667
+ __ tst(receiver, Operand(kSmiTagMask));
1668
+ __ b(eq, &slow);
1669
+ // Get the map of the object.
1670
+ __ ldr(r4, FieldMemOperand(receiver, HeapObject::kMapOffset));
1671
+ // Check that the receiver does not require access checks. We need
1672
+ // to do this because this generic stub does not perform map checks.
1673
+ __ ldrb(ip, FieldMemOperand(r4, Map::kBitFieldOffset));
1674
+ __ tst(ip, Operand(1 << Map::kIsAccessCheckNeeded));
1675
+ __ b(ne, &slow);
1676
+ // Check if the object is a JS array or not.
1677
+ __ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset));
1678
+ __ cmp(r4, Operand(JS_ARRAY_TYPE));
1679
+ __ b(eq, &array);
1680
+ // Check that the object is some kind of JS object.
1681
+ __ cmp(r4, Operand(FIRST_JS_OBJECT_TYPE));
1682
+ __ b(lt, &slow);
1683
+
1684
+ // Object case: Check key against length in the elements array.
1685
+ __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1686
+ // Check that the object is in fast mode (not dictionary).
1687
+ __ ldr(r4, FieldMemOperand(elements, HeapObject::kMapOffset));
1688
+ __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
1689
+ __ cmp(r4, ip);
1690
+ __ b(ne, &check_pixel_array);
1691
+ // Check array bounds. Both the key and the length of FixedArray are smis.
1692
+ __ ldr(ip, FieldMemOperand(elements, FixedArray::kLengthOffset));
1693
+ __ cmp(key, Operand(ip));
1694
+ __ b(lo, &fast);
1695
+
1696
+ // Slow case, handle jump to runtime.
1697
+ __ bind(&slow);
1698
+ // Entry registers are intact.
1699
+ // r0: value.
1700
+ // r1: key.
1701
+ // r2: receiver.
1702
+ GenerateRuntimeSetProperty(masm);
1703
+
1704
+ // Check whether the elements is a pixel array.
1705
+ // r4: elements map.
1706
+ __ bind(&check_pixel_array);
1707
+ __ LoadRoot(ip, Heap::kPixelArrayMapRootIndex);
1708
+ __ cmp(r4, ip);
1709
+ __ b(ne, &slow);
1710
+ // Check that the value is a smi. If a conversion is needed call into the
1711
+ // runtime to convert and clamp.
1712
+ __ BranchOnNotSmi(value, &slow);
1713
+ __ mov(r4, Operand(key, ASR, kSmiTagSize)); // Untag the key.
1714
+ __ ldr(ip, FieldMemOperand(elements, PixelArray::kLengthOffset));
1715
+ __ cmp(r4, Operand(ip));
1716
+ __ b(hs, &slow);
1717
+ __ mov(r5, Operand(value, ASR, kSmiTagSize)); // Untag the value.
1718
+ __ Usat(r5, 8, Operand(r5)); // Clamp the value to [0..255].
1719
+
1720
+ // Get the pointer to the external array. This clobbers elements.
1721
+ __ ldr(elements,
1722
+ FieldMemOperand(elements, PixelArray::kExternalPointerOffset));
1723
+ __ strb(r5, MemOperand(elements, r4)); // Elements is now external array.
1724
+ __ Ret();
1725
+
1726
+ // Extra capacity case: Check if there is extra capacity to
1727
+ // perform the store and update the length. Used for adding one
1728
+ // element to the array by writing to array[array.length].
1729
+ __ bind(&extra);
1730
+ // Condition code from comparing key and array length is still available.
1731
+ __ b(ne, &slow); // Only support writing to writing to array[array.length].
1732
+ // Check for room in the elements backing store.
1733
+ // Both the key and the length of FixedArray are smis.
1734
+ __ ldr(ip, FieldMemOperand(elements, FixedArray::kLengthOffset));
1735
+ __ cmp(key, Operand(ip));
1736
+ __ b(hs, &slow);
1737
+ // Calculate key + 1 as smi.
1738
+ ASSERT_EQ(0, kSmiTag);
1739
+ __ add(r4, key, Operand(Smi::FromInt(1)));
1740
+ __ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
1741
+ __ b(&fast);
1742
+
1743
+ // Array case: Get the length and the elements array from the JS
1744
+ // array. Check that the array is in fast mode; if it is the
1745
+ // length is always a smi.
1746
+ __ bind(&array);
1747
+ __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1748
+ __ ldr(r4, FieldMemOperand(elements, HeapObject::kMapOffset));
1749
+ __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
1750
+ __ cmp(r4, ip);
1751
+ __ b(ne, &slow);
1752
+
1753
+ // Check the key against the length in the array.
1754
+ __ ldr(ip, FieldMemOperand(receiver, JSArray::kLengthOffset));
1755
+ __ cmp(key, Operand(ip));
1756
+ __ b(hs, &extra);
1757
+ // Fall through to fast case.
1758
+
1759
+ __ bind(&fast);
1760
+ // Fast case, store the value to the elements backing store.
1761
+ __ add(r5, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1762
+ __ add(r5, r5, Operand(key, LSL, kPointerSizeLog2 - kSmiTagSize));
1763
+ __ str(value, MemOperand(r5));
1764
+ // Skip write barrier if the written value is a smi.
1765
+ __ tst(value, Operand(kSmiTagMask));
1766
+ __ Ret(eq);
1767
+ // Update write barrier for the elements array address.
1768
+ __ sub(r4, r5, Operand(elements));
1769
+ __ RecordWrite(elements, Operand(r4), r5, r6);
1770
+
1771
+ __ Ret();
1772
+ }
1773
+
1774
+
1775
+ // Convert int passed in register ival to IEE 754 single precision
1776
+ // floating point value and store it into register fval.
1777
+ // If VFP3 is available use it for conversion.
1778
+ static void ConvertIntToFloat(MacroAssembler* masm,
1779
+ Register ival,
1780
+ Register fval,
1781
+ Register scratch1,
1782
+ Register scratch2) {
1783
+ if (CpuFeatures::IsSupported(VFP3)) {
1784
+ CpuFeatures::Scope scope(VFP3);
1785
+ __ vmov(s0, ival);
1786
+ __ vcvt_f32_s32(s0, s0);
1787
+ __ vmov(fval, s0);
1788
+ } else {
1789
+ Label not_special, done;
1790
+ // Move sign bit from source to destination. This works because the sign
1791
+ // bit in the exponent word of the double has the same position and polarity
1792
+ // as the 2's complement sign bit in a Smi.
1793
+ ASSERT(kBinary32SignMask == 0x80000000u);
1794
+
1795
+ __ and_(fval, ival, Operand(kBinary32SignMask), SetCC);
1796
+ // Negate value if it is negative.
1797
+ __ rsb(ival, ival, Operand(0), LeaveCC, ne);
1798
+
1799
+ // We have -1, 0 or 1, which we treat specially. Register ival contains
1800
+ // absolute value: it is either equal to 1 (special case of -1 and 1),
1801
+ // greater than 1 (not a special case) or less than 1 (special case of 0).
1802
+ __ cmp(ival, Operand(1));
1803
+ __ b(gt, &not_special);
1804
+
1805
+ // For 1 or -1 we need to or in the 0 exponent (biased).
1806
+ static const uint32_t exponent_word_for_1 =
1807
+ kBinary32ExponentBias << kBinary32ExponentShift;
1808
+
1809
+ __ orr(fval, fval, Operand(exponent_word_for_1), LeaveCC, eq);
1810
+ __ b(&done);
1811
+
1812
+ __ bind(&not_special);
1813
+ // Count leading zeros.
1814
+ // Gets the wrong answer for 0, but we already checked for that case above.
1815
+ Register zeros = scratch2;
1816
+ __ CountLeadingZeros(zeros, ival, scratch1);
1817
+
1818
+ // Compute exponent and or it into the exponent register.
1819
+ __ rsb(scratch1,
1820
+ zeros,
1821
+ Operand((kBitsPerInt - 1) + kBinary32ExponentBias));
1822
+
1823
+ __ orr(fval,
1824
+ fval,
1825
+ Operand(scratch1, LSL, kBinary32ExponentShift));
1826
+
1827
+ // Shift up the source chopping the top bit off.
1828
+ __ add(zeros, zeros, Operand(1));
1829
+ // This wouldn't work for 1 and -1 as the shift would be 32 which means 0.
1830
+ __ mov(ival, Operand(ival, LSL, zeros));
1831
+ // And the top (top 20 bits).
1832
+ __ orr(fval,
1833
+ fval,
1834
+ Operand(ival, LSR, kBitsPerInt - kBinary32MantissaBits));
1835
+
1836
+ __ bind(&done);
1837
+ }
1838
+ }
1839
+
1840
+
1841
+ static bool IsElementTypeSigned(ExternalArrayType array_type) {
1842
+ switch (array_type) {
1843
+ case kExternalByteArray:
1844
+ case kExternalShortArray:
1845
+ case kExternalIntArray:
1846
+ return true;
1847
+
1848
+ case kExternalUnsignedByteArray:
1849
+ case kExternalUnsignedShortArray:
1850
+ case kExternalUnsignedIntArray:
1851
+ return false;
1852
+
1853
+ default:
1854
+ UNREACHABLE();
1855
+ return false;
1856
+ }
1857
+ }
1858
+
1859
+
1860
+ void KeyedStoreIC::GenerateExternalArray(MacroAssembler* masm,
1861
+ ExternalArrayType array_type) {
1862
+ // ---------- S t a t e --------------
1863
+ // -- r0 : value
1864
+ // -- r1 : key
1865
+ // -- r2 : receiver
1866
+ // -- lr : return address
1867
+ // -----------------------------------
1868
+ Label slow, check_heap_number;
1869
+
1870
+ // Register usage.
1871
+ Register value = r0;
1872
+ Register key = r1;
1873
+ Register receiver = r2;
1874
+ // r3 mostly holds the elements array or the destination external array.
1875
+
1876
+ // Check that the object isn't a smi.
1877
+ __ BranchOnSmi(receiver, &slow);
1878
+
1879
+ // Check that the object is a JS object. Load map into r3.
1880
+ __ CompareObjectType(receiver, r3, r4, FIRST_JS_OBJECT_TYPE);
1881
+ __ b(le, &slow);
1882
+
1883
+ // Check that the receiver does not require access checks. We need
1884
+ // to do this because this generic stub does not perform map checks.
1885
+ __ ldrb(ip, FieldMemOperand(r3, Map::kBitFieldOffset));
1886
+ __ tst(ip, Operand(1 << Map::kIsAccessCheckNeeded));
1887
+ __ b(ne, &slow);
1888
+
1889
+ // Check that the key is a smi.
1890
+ __ BranchOnNotSmi(key, &slow);
1891
+
1892
+ // Check that the elements array is the appropriate type of ExternalArray.
1893
+ __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
1894
+ __ ldr(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
1895
+ __ LoadRoot(ip, Heap::RootIndexForExternalArrayType(array_type));
1896
+ __ cmp(r4, ip);
1897
+ __ b(ne, &slow);
1898
+
1899
+ // Check that the index is in range.
1900
+ __ mov(r4, Operand(key, ASR, kSmiTagSize)); // Untag the index.
1901
+ __ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset));
1902
+ __ cmp(r4, ip);
1903
+ // Unsigned comparison catches both negative and too-large values.
1904
+ __ b(hs, &slow);
1905
+
1906
+ // Handle both smis and HeapNumbers in the fast path. Go to the
1907
+ // runtime for all other kinds of values.
1908
+ // r3: external array.
1909
+ // r4: key (integer).
1910
+ __ BranchOnNotSmi(value, &check_heap_number);
1911
+ __ mov(r5, Operand(value, ASR, kSmiTagSize)); // Untag the value.
1912
+ __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
1913
+
1914
+ // r3: base pointer of external storage.
1915
+ // r4: key (integer).
1916
+ // r5: value (integer).
1917
+ switch (array_type) {
1918
+ case kExternalByteArray:
1919
+ case kExternalUnsignedByteArray:
1920
+ __ strb(r5, MemOperand(r3, r4, LSL, 0));
1921
+ break;
1922
+ case kExternalShortArray:
1923
+ case kExternalUnsignedShortArray:
1924
+ __ strh(r5, MemOperand(r3, r4, LSL, 1));
1925
+ break;
1926
+ case kExternalIntArray:
1927
+ case kExternalUnsignedIntArray:
1928
+ __ str(r5, MemOperand(r3, r4, LSL, 2));
1929
+ break;
1930
+ case kExternalFloatArray:
1931
+ // Need to perform int-to-float conversion.
1932
+ ConvertIntToFloat(masm, r5, r6, r7, r9);
1933
+ __ str(r6, MemOperand(r3, r4, LSL, 2));
1934
+ break;
1935
+ default:
1936
+ UNREACHABLE();
1937
+ break;
1938
+ }
1939
+
1940
+ // Entry registers are intact, r0 holds the value which is the return value.
1941
+ __ Ret();
1942
+
1943
+
1944
+ // r3: external array.
1945
+ // r4: index (integer).
1946
+ __ bind(&check_heap_number);
1947
+ __ CompareObjectType(value, r5, r6, HEAP_NUMBER_TYPE);
1948
+ __ b(ne, &slow);
1949
+
1950
+ __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
1951
+
1952
+ // r3: base pointer of external storage.
1953
+ // r4: key (integer).
1954
+
1955
+ // The WebGL specification leaves the behavior of storing NaN and
1956
+ // +/-Infinity into integer arrays basically undefined. For more
1957
+ // reproducible behavior, convert these to zero.
1958
+ if (CpuFeatures::IsSupported(VFP3)) {
1959
+ CpuFeatures::Scope scope(VFP3);
1960
+
1961
+
1962
+ if (array_type == kExternalFloatArray) {
1963
+ // vldr requires offset to be a multiple of 4 so we can not
1964
+ // include -kHeapObjectTag into it.
1965
+ __ sub(r5, r0, Operand(kHeapObjectTag));
1966
+ __ vldr(d0, r5, HeapNumber::kValueOffset);
1967
+ __ vcvt_f32_f64(s0, d0);
1968
+ __ vmov(r5, s0);
1969
+ __ str(r5, MemOperand(r3, r4, LSL, 2));
1970
+ } else {
1971
+ // Need to perform float-to-int conversion.
1972
+ // Test for NaN or infinity (both give zero).
1973
+ __ ldr(r6, FieldMemOperand(r5, HeapNumber::kExponentOffset));
1974
+
1975
+ // Hoisted load. vldr requires offset to be a multiple of 4 so we can not
1976
+ // include -kHeapObjectTag into it.
1977
+ __ sub(r5, r0, Operand(kHeapObjectTag));
1978
+ __ vldr(d0, r5, HeapNumber::kValueOffset);
1979
+
1980
+ __ Sbfx(r6, r6, HeapNumber::kExponentShift, HeapNumber::kExponentBits);
1981
+ // NaNs and Infinities have all-one exponents so they sign extend to -1.
1982
+ __ cmp(r6, Operand(-1));
1983
+ __ mov(r5, Operand(Smi::FromInt(0)), LeaveCC, eq);
1984
+
1985
+ // Not infinity or NaN simply convert to int.
1986
+ if (IsElementTypeSigned(array_type)) {
1987
+ __ vcvt_s32_f64(s0, d0, ne);
1988
+ } else {
1989
+ __ vcvt_u32_f64(s0, d0, ne);
1990
+ }
1991
+ __ vmov(r5, s0, ne);
1992
+
1993
+ switch (array_type) {
1994
+ case kExternalByteArray:
1995
+ case kExternalUnsignedByteArray:
1996
+ __ strb(r5, MemOperand(r3, r4, LSL, 0));
1997
+ break;
1998
+ case kExternalShortArray:
1999
+ case kExternalUnsignedShortArray:
2000
+ __ strh(r5, MemOperand(r3, r4, LSL, 1));
2001
+ break;
2002
+ case kExternalIntArray:
2003
+ case kExternalUnsignedIntArray:
2004
+ __ str(r5, MemOperand(r3, r4, LSL, 2));
2005
+ break;
2006
+ default:
2007
+ UNREACHABLE();
2008
+ break;
2009
+ }
2010
+ }
2011
+
2012
+ // Entry registers are intact, r0 holds the value which is the return value.
2013
+ __ Ret();
2014
+ } else {
2015
+ // VFP3 is not available do manual conversions.
2016
+ __ ldr(r5, FieldMemOperand(value, HeapNumber::kExponentOffset));
2017
+ __ ldr(r6, FieldMemOperand(value, HeapNumber::kMantissaOffset));
2018
+
2019
+ if (array_type == kExternalFloatArray) {
2020
+ Label done, nan_or_infinity_or_zero;
2021
+ static const int kMantissaInHiWordShift =
2022
+ kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord;
2023
+
2024
+ static const int kMantissaInLoWordShift =
2025
+ kBitsPerInt - kMantissaInHiWordShift;
2026
+
2027
+ // Test for all special exponent values: zeros, subnormal numbers, NaNs
2028
+ // and infinities. All these should be converted to 0.
2029
+ __ mov(r7, Operand(HeapNumber::kExponentMask));
2030
+ __ and_(r9, r5, Operand(r7), SetCC);
2031
+ __ b(eq, &nan_or_infinity_or_zero);
2032
+
2033
+ __ teq(r9, Operand(r7));
2034
+ __ mov(r9, Operand(kBinary32ExponentMask), LeaveCC, eq);
2035
+ __ b(eq, &nan_or_infinity_or_zero);
2036
+
2037
+ // Rebias exponent.
2038
+ __ mov(r9, Operand(r9, LSR, HeapNumber::kExponentShift));
2039
+ __ add(r9,
2040
+ r9,
2041
+ Operand(kBinary32ExponentBias - HeapNumber::kExponentBias));
2042
+
2043
+ __ cmp(r9, Operand(kBinary32MaxExponent));
2044
+ __ and_(r5, r5, Operand(HeapNumber::kSignMask), LeaveCC, gt);
2045
+ __ orr(r5, r5, Operand(kBinary32ExponentMask), LeaveCC, gt);
2046
+ __ b(gt, &done);
2047
+
2048
+ __ cmp(r9, Operand(kBinary32MinExponent));
2049
+ __ and_(r5, r5, Operand(HeapNumber::kSignMask), LeaveCC, lt);
2050
+ __ b(lt, &done);
2051
+
2052
+ __ and_(r7, r5, Operand(HeapNumber::kSignMask));
2053
+ __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
2054
+ __ orr(r7, r7, Operand(r5, LSL, kMantissaInHiWordShift));
2055
+ __ orr(r7, r7, Operand(r6, LSR, kMantissaInLoWordShift));
2056
+ __ orr(r5, r7, Operand(r9, LSL, kBinary32ExponentShift));
2057
+
2058
+ __ bind(&done);
2059
+ __ str(r5, MemOperand(r3, r4, LSL, 2));
2060
+ // Entry registers are intact, r0 holds the value which is the return
2061
+ // value.
2062
+ __ Ret();
2063
+
2064
+ __ bind(&nan_or_infinity_or_zero);
2065
+ __ and_(r7, r5, Operand(HeapNumber::kSignMask));
2066
+ __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
2067
+ __ orr(r9, r9, r7);
2068
+ __ orr(r9, r9, Operand(r5, LSL, kMantissaInHiWordShift));
2069
+ __ orr(r5, r9, Operand(r6, LSR, kMantissaInLoWordShift));
2070
+ __ b(&done);
2071
+ } else {
2072
+ bool is_signed_type = IsElementTypeSigned(array_type);
2073
+ int meaningfull_bits = is_signed_type ? (kBitsPerInt - 1) : kBitsPerInt;
2074
+ int32_t min_value = is_signed_type ? 0x80000000 : 0x00000000;
2075
+
2076
+ Label done, sign;
2077
+
2078
+ // Test for all special exponent values: zeros, subnormal numbers, NaNs
2079
+ // and infinities. All these should be converted to 0.
2080
+ __ mov(r7, Operand(HeapNumber::kExponentMask));
2081
+ __ and_(r9, r5, Operand(r7), SetCC);
2082
+ __ mov(r5, Operand(0), LeaveCC, eq);
2083
+ __ b(eq, &done);
2084
+
2085
+ __ teq(r9, Operand(r7));
2086
+ __ mov(r5, Operand(0), LeaveCC, eq);
2087
+ __ b(eq, &done);
2088
+
2089
+ // Unbias exponent.
2090
+ __ mov(r9, Operand(r9, LSR, HeapNumber::kExponentShift));
2091
+ __ sub(r9, r9, Operand(HeapNumber::kExponentBias), SetCC);
2092
+ // If exponent is negative than result is 0.
2093
+ __ mov(r5, Operand(0), LeaveCC, mi);
2094
+ __ b(mi, &done);
2095
+
2096
+ // If exponent is too big than result is minimal value.
2097
+ __ cmp(r9, Operand(meaningfull_bits - 1));
2098
+ __ mov(r5, Operand(min_value), LeaveCC, ge);
2099
+ __ b(ge, &done);
2100
+
2101
+ __ and_(r7, r5, Operand(HeapNumber::kSignMask), SetCC);
2102
+ __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
2103
+ __ orr(r5, r5, Operand(1u << HeapNumber::kMantissaBitsInTopWord));
2104
+
2105
+ __ rsb(r9, r9, Operand(HeapNumber::kMantissaBitsInTopWord), SetCC);
2106
+ __ mov(r5, Operand(r5, LSR, r9), LeaveCC, pl);
2107
+ __ b(pl, &sign);
2108
+
2109
+ __ rsb(r9, r9, Operand(0));
2110
+ __ mov(r5, Operand(r5, LSL, r9));
2111
+ __ rsb(r9, r9, Operand(meaningfull_bits));
2112
+ __ orr(r5, r5, Operand(r6, LSR, r9));
2113
+
2114
+ __ bind(&sign);
2115
+ __ teq(r7, Operand(0));
2116
+ __ rsb(r5, r5, Operand(0), LeaveCC, ne);
2117
+
2118
+ __ bind(&done);
2119
+ switch (array_type) {
2120
+ case kExternalByteArray:
2121
+ case kExternalUnsignedByteArray:
2122
+ __ strb(r5, MemOperand(r3, r4, LSL, 0));
2123
+ break;
2124
+ case kExternalShortArray:
2125
+ case kExternalUnsignedShortArray:
2126
+ __ strh(r5, MemOperand(r3, r4, LSL, 1));
2127
+ break;
2128
+ case kExternalIntArray:
2129
+ case kExternalUnsignedIntArray:
2130
+ __ str(r5, MemOperand(r3, r4, LSL, 2));
2131
+ break;
2132
+ default:
2133
+ UNREACHABLE();
2134
+ break;
2135
+ }
2136
+ }
2137
+ }
2138
+
2139
+ // Slow case: call runtime.
2140
+ __ bind(&slow);
2141
+
2142
+ // Entry registers are intact.
2143
+ // r0: value
2144
+ // r1: key
2145
+ // r2: receiver
2146
+ GenerateRuntimeSetProperty(masm);
2147
+ }
2148
+
2149
+
2150
+ void StoreIC::GenerateMegamorphic(MacroAssembler* masm) {
2151
+ // ----------- S t a t e -------------
2152
+ // -- r0 : value
2153
+ // -- r1 : receiver
2154
+ // -- r2 : name
2155
+ // -- lr : return address
2156
+ // -----------------------------------
2157
+
2158
+ // Get the receiver from the stack and probe the stub cache.
2159
+ Code::Flags flags = Code::ComputeFlags(Code::STORE_IC,
2160
+ NOT_IN_LOOP,
2161
+ MONOMORPHIC);
2162
+ StubCache::GenerateProbe(masm, flags, r1, r2, r3, no_reg);
2163
+
2164
+ // Cache miss: Jump to runtime.
2165
+ GenerateMiss(masm);
2166
+ }
2167
+
2168
+
2169
+ void StoreIC::GenerateMiss(MacroAssembler* masm) {
2170
+ // ----------- S t a t e -------------
2171
+ // -- r0 : value
2172
+ // -- r1 : receiver
2173
+ // -- r2 : name
2174
+ // -- lr : return address
2175
+ // -----------------------------------
2176
+
2177
+ __ Push(r1, r2, r0);
2178
+
2179
+ // Perform tail call to the entry.
2180
+ ExternalReference ref = ExternalReference(IC_Utility(kStoreIC_Miss));
2181
+ __ TailCallExternalReference(ref, 3, 1);
2182
+ }
2183
+
2184
+
2185
+ void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
2186
+ // ----------- S t a t e -------------
2187
+ // -- r0 : value
2188
+ // -- r1 : receiver
2189
+ // -- r2 : name
2190
+ // -- lr : return address
2191
+ // -----------------------------------
2192
+ //
2193
+ // This accepts as a receiver anything JSObject::SetElementsLength accepts
2194
+ // (currently anything except for external and pixel arrays which means
2195
+ // anything with elements of FixedArray type.), but currently is restricted
2196
+ // to JSArray.
2197
+ // Value must be a number, but only smis are accepted as the most common case.
2198
+
2199
+ Label miss;
2200
+
2201
+ Register receiver = r1;
2202
+ Register value = r0;
2203
+ Register scratch = r3;
2204
+
2205
+ // Check that the receiver isn't a smi.
2206
+ __ BranchOnSmi(receiver, &miss);
2207
+
2208
+ // Check that the object is a JS array.
2209
+ __ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE);
2210
+ __ b(ne, &miss);
2211
+
2212
+ // Check that elements are FixedArray.
2213
+ __ ldr(scratch, FieldMemOperand(receiver, JSArray::kElementsOffset));
2214
+ __ CompareObjectType(scratch, scratch, scratch, FIXED_ARRAY_TYPE);
2215
+ __ b(ne, &miss);
2216
+
2217
+ // Check that value is a smi.
2218
+ __ BranchOnNotSmi(value, &miss);
2219
+
2220
+ // Prepare tail call to StoreIC_ArrayLength.
2221
+ __ Push(receiver, value);
2222
+
2223
+ ExternalReference ref = ExternalReference(IC_Utility(kStoreIC_ArrayLength));
2224
+ __ TailCallExternalReference(ref, 2, 1);
2225
+
2226
+ __ bind(&miss);
2227
+
2228
+ GenerateMiss(masm);
2229
+ }
2230
+
2231
+
2232
+ void StoreIC::GenerateNormal(MacroAssembler* masm) {
2233
+ // ----------- S t a t e -------------
2234
+ // -- r0 : value
2235
+ // -- r1 : receiver
2236
+ // -- r2 : name
2237
+ // -- lr : return address
2238
+ // -----------------------------------
2239
+ Label miss;
2240
+
2241
+ GenerateStringDictionaryReceiverCheck(masm, r1, r3, r4, r5, &miss);
2242
+
2243
+ GenerateDictionaryStore(masm, &miss, r3, r2, r0, r4, r5);
2244
+ __ IncrementCounter(&Counters::store_normal_hit, 1, r4, r5);
2245
+ __ Ret();
2246
+
2247
+ __ bind(&miss);
2248
+ __ IncrementCounter(&Counters::store_normal_miss, 1, r4, r5);
2249
+ GenerateMiss(masm);
2250
+ }
2251
+
2252
+
2253
+ #undef __
2254
+
2255
+
2256
+ } } // namespace v8::internal
2257
+
2258
+ #endif // V8_TARGET_ARCH_ARM