therubyracer 0.6.3 → 0.7.0.pre

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of therubyracer might be problematic. Click here for more details.

Files changed (520) hide show
  1. data/Rakefile +10 -9
  2. data/ext/v8/callbacks.cpp +15 -8
  3. data/ext/v8/converters.cpp +7 -8
  4. data/ext/v8/converters.h +0 -2
  5. data/ext/v8/extconf.rb +2 -0
  6. data/ext/v8/rr.cpp +169 -0
  7. data/ext/v8/rr.h +34 -0
  8. data/ext/v8/upstream/{2.0.6 → 2.1.10}/AUTHORS +8 -1
  9. data/ext/v8/upstream/{2.0.6 → 2.1.10}/ChangeLog +198 -0
  10. data/ext/v8/upstream/{2.0.6 → 2.1.10}/LICENSE +0 -0
  11. data/ext/v8/upstream/{2.0.6 → 2.1.10}/SConstruct +152 -38
  12. data/ext/v8/upstream/{2.0.6 → 2.1.10}/include/v8-debug.h +52 -4
  13. data/ext/v8/upstream/2.1.10/include/v8-profiler.h +176 -0
  14. data/ext/v8/upstream/{2.0.6 → 2.1.10}/include/v8.h +227 -48
  15. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/SConscript +60 -10
  16. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/accessors.cc +5 -39
  17. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/accessors.h +0 -0
  18. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/allocation.cc +0 -0
  19. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/allocation.h +0 -0
  20. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/api.cc +477 -57
  21. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/api.h +8 -4
  22. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/apinatives.js +1 -1
  23. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/apiutils.h +0 -0
  24. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/arguments.h +5 -6
  25. data/ext/v8/upstream/{2.0.6/src/arm/assembler-thumb2-inl.h → 2.1.10/src/arm/assembler-arm-inl.h} +42 -27
  26. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/arm/assembler-arm.cc +430 -216
  27. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/arm/assembler-arm.h +199 -110
  28. data/ext/v8/upstream/{2.0.6/src/arm/assembler-arm-inl.h → 2.1.10/src/arm/assembler-thumb2-inl.h} +4 -18
  29. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/arm/assembler-thumb2.cc +142 -85
  30. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/arm/assembler-thumb2.h +18 -9
  31. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/arm/builtins-arm.cc +127 -87
  32. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/arm/codegen-arm-inl.h +3 -5
  33. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/arm/codegen-arm.cc +4634 -1427
  34. data/ext/v8/upstream/2.1.10/src/arm/codegen-arm.h +946 -0
  35. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/arm/constants-arm.cc +21 -3
  36. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/arm/constants-arm.h +39 -13
  37. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/arm/cpu-arm.cc +9 -3
  38. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/arm/debug-arm.cc +34 -8
  39. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/arm/disasm-arm.cc +262 -117
  40. data/ext/v8/upstream/2.1.10/src/arm/fast-codegen-arm.cc +238 -0
  41. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/arm/frames-arm.cc +0 -0
  42. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/arm/frames-arm.h +0 -0
  43. data/ext/v8/upstream/{2.0.6/src/arm/fast-codegen-arm.cc → 2.1.10/src/arm/full-codegen-arm.cc} +453 -283
  44. data/ext/v8/upstream/2.1.10/src/arm/ic-arm.cc +1833 -0
  45. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/arm/jump-target-arm.cc +3 -29
  46. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/arm/macro-assembler-arm.cc +564 -104
  47. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/arm/macro-assembler-arm.h +255 -46
  48. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/arm/regexp-macro-assembler-arm.cc +78 -104
  49. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/arm/regexp-macro-assembler-arm.h +6 -22
  50. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/arm/register-allocator-arm-inl.h +0 -3
  51. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/arm/register-allocator-arm.cc +0 -0
  52. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/arm/register-allocator-arm.h +2 -1
  53. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/arm/simulator-arm.cc +557 -180
  54. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/arm/simulator-arm.h +52 -4
  55. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/arm/stub-cache-arm.cc +904 -240
  56. data/ext/v8/upstream/2.1.10/src/arm/virtual-frame-arm.cc +668 -0
  57. data/ext/v8/upstream/2.1.10/src/arm/virtual-frame-arm.h +503 -0
  58. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/array.js +82 -109
  59. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/assembler.cc +49 -13
  60. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/assembler.h +27 -5
  61. data/ext/v8/upstream/2.1.10/src/ast.cc +1138 -0
  62. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/ast.h +354 -53
  63. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/bootstrapper.cc +609 -501
  64. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/bootstrapper.h +27 -8
  65. data/ext/v8/upstream/2.1.10/src/builtins.cc +1512 -0
  66. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/builtins.h +41 -25
  67. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/bytecodes-irregexp.h +0 -0
  68. data/ext/v8/upstream/2.1.10/src/cached-powers.h +119 -0
  69. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/char-predicates-inl.h +0 -0
  70. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/char-predicates.h +0 -0
  71. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/checks.cc +0 -0
  72. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/checks.h +25 -1
  73. data/ext/v8/upstream/2.1.10/src/circular-queue-inl.h +101 -0
  74. data/ext/v8/upstream/2.1.10/src/circular-queue.cc +121 -0
  75. data/ext/v8/upstream/2.1.10/src/circular-queue.h +129 -0
  76. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/code-stubs.cc +23 -8
  77. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/code-stubs.h +16 -4
  78. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/code.h +0 -0
  79. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/codegen-inl.h +6 -34
  80. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/codegen.cc +73 -92
  81. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/codegen.h +164 -68
  82. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/compilation-cache.cc +117 -73
  83. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/compilation-cache.h +18 -17
  84. data/ext/v8/upstream/2.1.10/src/compiler.cc +623 -0
  85. data/ext/v8/upstream/2.1.10/src/compiler.h +299 -0
  86. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/contexts.cc +0 -0
  87. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/contexts.h +8 -11
  88. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/conversions-inl.h +28 -13
  89. data/ext/v8/upstream/2.1.10/src/conversions.cc +1105 -0
  90. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/conversions.h +9 -4
  91. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/counters.cc +0 -0
  92. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/counters.h +1 -1
  93. data/ext/v8/upstream/2.1.10/src/cpu-profiler-inl.h +99 -0
  94. data/ext/v8/upstream/2.1.10/src/cpu-profiler.cc +494 -0
  95. data/ext/v8/upstream/2.1.10/src/cpu-profiler.h +277 -0
  96. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/cpu.h +0 -0
  97. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/d8-debug.cc +15 -4
  98. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/d8-debug.h +0 -0
  99. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/d8-posix.cc +18 -0
  100. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/d8-readline.cc +2 -2
  101. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/d8-windows.cc +0 -0
  102. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/d8.cc +10 -6
  103. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/d8.h +2 -0
  104. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/d8.js +43 -38
  105. data/ext/v8/upstream/2.1.10/src/data-flow.cc +755 -0
  106. data/ext/v8/upstream/2.1.10/src/data-flow.h +278 -0
  107. data/ext/v8/upstream/{2.0.6/src/date-delay.js → 2.1.10/src/date.js} +100 -110
  108. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/dateparser-inl.h +11 -2
  109. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/dateparser.cc +24 -26
  110. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/dateparser.h +8 -4
  111. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/debug-agent.cc +3 -1
  112. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/debug-agent.h +0 -0
  113. data/ext/v8/upstream/{2.0.6/src/debug-delay.js → 2.1.10/src/debug-debugger.js} +111 -15
  114. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/debug.cc +156 -96
  115. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/debug.h +53 -5
  116. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/disasm.h +0 -0
  117. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/disassembler.cc +2 -8
  118. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/disassembler.h +0 -0
  119. data/ext/v8/upstream/2.1.10/src/diy-fp.cc +58 -0
  120. data/ext/v8/upstream/2.1.10/src/diy-fp.h +117 -0
  121. data/ext/v8/upstream/2.1.10/src/double.h +169 -0
  122. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/dtoa-config.c +4 -3
  123. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/execution.cc +22 -22
  124. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/execution.h +17 -4
  125. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/factory.cc +70 -46
  126. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/factory.h +27 -17
  127. data/ext/v8/upstream/2.1.10/src/fast-codegen.cc +746 -0
  128. data/ext/v8/upstream/2.1.10/src/fast-codegen.h +161 -0
  129. data/ext/v8/upstream/2.1.10/src/fast-dtoa.cc +512 -0
  130. data/ext/v8/upstream/2.1.10/src/fast-dtoa.h +59 -0
  131. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/flag-definitions.h +32 -11
  132. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/flags.cc +0 -0
  133. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/flags.h +0 -0
  134. data/ext/v8/upstream/2.1.10/src/flow-graph.cc +763 -0
  135. data/ext/v8/upstream/2.1.10/src/flow-graph.h +180 -0
  136. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/frame-element.cc +1 -4
  137. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/frame-element.h +49 -11
  138. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/frames-inl.h +2 -0
  139. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/frames.cc +91 -17
  140. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/frames.h +24 -2
  141. data/ext/v8/upstream/{2.0.6/src/fast-codegen.cc → 2.1.10/src/full-codegen.cc} +549 -198
  142. data/ext/v8/upstream/{2.0.6/src/fast-codegen.h → 2.1.10/src/full-codegen.h} +109 -29
  143. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/func-name-inferrer.cc +0 -0
  144. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/func-name-inferrer.h +0 -0
  145. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/global-handles.cc +0 -0
  146. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/global-handles.h +0 -0
  147. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/globals.h +67 -43
  148. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/handles-inl.h +0 -0
  149. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/handles.cc +124 -129
  150. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/handles.h +33 -15
  151. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/hashmap.cc +0 -0
  152. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/hashmap.h +0 -0
  153. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/heap-inl.h +56 -12
  154. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/heap-profiler.cc +2 -2
  155. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/heap-profiler.h +2 -0
  156. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/heap.cc +413 -221
  157. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/heap.h +192 -67
  158. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/ia32/assembler-ia32-inl.h +6 -0
  159. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/ia32/assembler-ia32.cc +187 -32
  160. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/ia32/assembler-ia32.h +31 -4
  161. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/ia32/builtins-ia32.cc +160 -133
  162. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/ia32/codegen-ia32-inl.h +0 -0
  163. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/ia32/codegen-ia32.cc +5533 -2177
  164. data/ext/v8/upstream/{2.0.6/src/x64/codegen-x64.h → 2.1.10/src/ia32/codegen-ia32.h} +453 -134
  165. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/ia32/cpu-ia32.cc +0 -0
  166. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/ia32/debug-ia32.cc +63 -8
  167. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/ia32/disasm-ia32.cc +99 -16
  168. data/ext/v8/upstream/2.1.10/src/ia32/fast-codegen-ia32.cc +950 -0
  169. data/ext/v8/upstream/2.1.10/src/ia32/fast-codegen-ia32.h +155 -0
  170. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/ia32/frames-ia32.cc +0 -0
  171. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/ia32/frames-ia32.h +0 -0
  172. data/ext/v8/upstream/{2.0.6/src/ia32/fast-codegen-ia32.cc → 2.1.10/src/ia32/full-codegen-ia32.cc} +454 -253
  173. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/ia32/ic-ia32.cc +545 -390
  174. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/ia32/jump-target-ia32.cc +1 -0
  175. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/ia32/macro-assembler-ia32.cc +330 -139
  176. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/ia32/macro-assembler-ia32.h +96 -30
  177. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/ia32/regexp-macro-assembler-ia32.cc +73 -89
  178. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/ia32/regexp-macro-assembler-ia32.h +6 -21
  179. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/ia32/register-allocator-ia32-inl.h +0 -0
  180. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/ia32/register-allocator-ia32.cc +59 -5
  181. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/ia32/register-allocator-ia32.h +0 -0
  182. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/ia32/simulator-ia32.cc +0 -0
  183. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/ia32/simulator-ia32.h +2 -2
  184. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/ia32/stub-cache-ia32.cc +745 -303
  185. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/ia32/virtual-frame-ia32.cc +278 -71
  186. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/ia32/virtual-frame-ia32.h +78 -22
  187. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/ic-inl.h +0 -0
  188. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/ic.cc +172 -89
  189. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/ic.h +51 -20
  190. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/interpreter-irregexp.cc +0 -0
  191. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/interpreter-irregexp.h +0 -0
  192. data/ext/v8/upstream/{2.0.6/src/json-delay.js → 2.1.10/src/json.js} +26 -12
  193. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/jsregexp.cc +151 -118
  194. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/jsregexp.h +31 -7
  195. data/ext/v8/upstream/2.1.10/src/jump-target-heavy-inl.h +51 -0
  196. data/ext/v8/upstream/{2.0.6/src/jump-target.cc → 2.1.10/src/jump-target-heavy.cc} +164 -184
  197. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/jump-target-inl.h +8 -9
  198. data/ext/v8/upstream/{2.0.6/src/usage-analyzer.h → 2.1.10/src/jump-target-light-inl.h} +9 -7
  199. data/ext/v8/upstream/2.1.10/src/jump-target-light.cc +86 -0
  200. data/ext/v8/upstream/2.1.10/src/jump-target.cc +155 -0
  201. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/jump-target.h +6 -0
  202. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/list-inl.h +0 -0
  203. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/list.h +2 -1
  204. data/ext/v8/upstream/2.1.10/src/liveedit-debugger.js +944 -0
  205. data/ext/v8/upstream/2.1.10/src/liveedit.cc +1468 -0
  206. data/ext/v8/upstream/2.1.10/src/liveedit.h +170 -0
  207. data/ext/v8/upstream/2.1.10/src/log-inl.h +59 -0
  208. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/log-utils.cc +3 -9
  209. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/log-utils.h +1 -4
  210. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/log.cc +198 -95
  211. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/log.h +50 -49
  212. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/macro-assembler.h +9 -0
  213. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/macros.py +30 -11
  214. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/mark-compact.cc +464 -152
  215. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/mark-compact.h +41 -20
  216. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/math.js +9 -8
  217. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/memory.h +0 -0
  218. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/messages.cc +1 -0
  219. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/messages.h +0 -0
  220. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/messages.js +46 -9
  221. data/ext/v8/upstream/2.1.10/src/mips/assembler-mips-inl.h +215 -0
  222. data/ext/v8/upstream/2.1.10/src/mips/assembler-mips.cc +1208 -0
  223. data/ext/v8/upstream/2.1.10/src/mips/assembler-mips.h +665 -0
  224. data/ext/v8/upstream/2.1.10/src/mips/builtins-mips.cc +202 -0
  225. data/ext/v8/upstream/2.1.10/src/mips/codegen-mips-inl.h +70 -0
  226. data/ext/v8/upstream/2.1.10/src/mips/codegen-mips.cc +1428 -0
  227. data/ext/v8/upstream/{2.0.6/src/arm/codegen-arm.h → 2.1.10/src/mips/codegen-mips.h} +103 -205
  228. data/ext/v8/upstream/2.1.10/src/mips/constants-mips.cc +323 -0
  229. data/ext/v8/upstream/2.1.10/src/mips/constants-mips.h +525 -0
  230. data/ext/v8/upstream/2.1.10/src/mips/cpu-mips.cc +69 -0
  231. data/ext/v8/upstream/2.1.10/src/mips/debug-mips.cc +128 -0
  232. data/ext/v8/upstream/2.1.10/src/mips/disasm-mips.cc +784 -0
  233. data/ext/v8/upstream/2.1.10/src/mips/fast-codegen-mips.cc +74 -0
  234. data/ext/v8/upstream/2.1.10/src/mips/frames-mips.cc +99 -0
  235. data/ext/v8/upstream/2.1.10/src/mips/frames-mips.h +164 -0
  236. data/ext/v8/upstream/2.1.10/src/mips/full-codegen-mips.cc +273 -0
  237. data/ext/v8/upstream/2.1.10/src/mips/ic-mips.cc +217 -0
  238. data/ext/v8/upstream/2.1.10/src/mips/jump-target-mips.cc +172 -0
  239. data/ext/v8/upstream/2.1.10/src/mips/macro-assembler-mips.cc +1323 -0
  240. data/ext/v8/upstream/2.1.10/src/mips/macro-assembler-mips.h +461 -0
  241. data/ext/v8/upstream/2.1.10/src/mips/register-allocator-mips-inl.h +137 -0
  242. data/ext/v8/upstream/2.1.10/src/mips/register-allocator-mips.cc +60 -0
  243. data/ext/v8/upstream/2.1.10/src/mips/register-allocator-mips.h +46 -0
  244. data/ext/v8/upstream/2.1.10/src/mips/simulator-mips.cc +1648 -0
  245. data/ext/v8/upstream/2.1.10/src/mips/simulator-mips.h +311 -0
  246. data/ext/v8/upstream/2.1.10/src/mips/stub-cache-mips.cc +400 -0
  247. data/ext/v8/upstream/2.1.10/src/mips/virtual-frame-mips.cc +316 -0
  248. data/ext/v8/upstream/{2.0.6/src/arm/virtual-frame-arm.h → 2.1.10/src/mips/virtual-frame-mips.h} +87 -71
  249. data/ext/v8/upstream/{2.0.6/src/mirror-delay.js → 2.1.10/src/mirror-debugger.js} +51 -45
  250. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/mksnapshot.cc +97 -10
  251. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/natives.h +6 -6
  252. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/objects-debug.cc +47 -5
  253. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/objects-inl.h +154 -38
  254. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/objects.cc +528 -280
  255. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/objects.h +302 -95
  256. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/oprofile-agent.cc +25 -33
  257. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/oprofile-agent.h +9 -1
  258. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/parser.cc +444 -72
  259. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/parser.h +4 -2
  260. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/platform-freebsd.cc +32 -13
  261. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/platform-linux.cc +59 -25
  262. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/platform-macos.cc +30 -13
  263. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/platform-nullos.cc +0 -0
  264. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/platform-openbsd.cc +21 -2
  265. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/platform-posix.cc +0 -18
  266. data/ext/v8/upstream/2.1.10/src/platform-solaris.cc +607 -0
  267. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/platform-win32.cc +16 -17
  268. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/platform.h +25 -8
  269. data/ext/v8/upstream/2.1.10/src/powers-ten.h +2461 -0
  270. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/prettyprinter.cc +49 -29
  271. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/prettyprinter.h +3 -1
  272. data/ext/v8/upstream/2.1.10/src/profile-generator-inl.h +124 -0
  273. data/ext/v8/upstream/2.1.10/src/profile-generator.cc +583 -0
  274. data/ext/v8/upstream/2.1.10/src/profile-generator.h +364 -0
  275. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/property.cc +1 -1
  276. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/property.h +12 -24
  277. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/regexp-macro-assembler-irregexp-inl.h +2 -2
  278. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/regexp-macro-assembler-irregexp.cc +2 -2
  279. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/regexp-macro-assembler-irregexp.h +2 -2
  280. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/regexp-macro-assembler-tracer.cc +2 -2
  281. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/regexp-macro-assembler-tracer.h +0 -0
  282. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/regexp-macro-assembler.cc +33 -10
  283. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/regexp-macro-assembler.h +12 -4
  284. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/regexp-stack.cc +0 -0
  285. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/regexp-stack.h +0 -0
  286. data/ext/v8/upstream/{2.0.6/src/regexp-delay.js → 2.1.10/src/regexp.js} +180 -58
  287. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/register-allocator-inl.h +68 -1
  288. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/register-allocator.cc +5 -2
  289. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/register-allocator.h +42 -17
  290. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/rewriter.cc +110 -4
  291. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/rewriter.h +0 -0
  292. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/runtime.cc +2733 -623
  293. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/runtime.h +43 -20
  294. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/runtime.js +46 -35
  295. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/scanner.cc +278 -36
  296. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/scanner.h +97 -26
  297. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/scopeinfo.cc +3 -3
  298. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/scopeinfo.h +1 -0
  299. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/scopes.cc +11 -12
  300. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/scopes.h +0 -1
  301. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/serialize.cc +298 -175
  302. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/serialize.h +184 -40
  303. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/shell.h +0 -0
  304. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/simulator.h +2 -0
  305. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/smart-pointer.h +0 -0
  306. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/snapshot-common.cc +16 -31
  307. data/ext/v8/upstream/2.1.10/src/snapshot-empty.cc +50 -0
  308. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/snapshot.h +13 -1
  309. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/spaces-inl.h +35 -27
  310. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/spaces.cc +256 -42
  311. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/spaces.h +136 -42
  312. data/ext/v8/upstream/{2.0.6/src/zone-inl.h → 2.1.10/src/splay-tree-inl.h} +102 -89
  313. data/ext/v8/upstream/2.1.10/src/splay-tree.h +203 -0
  314. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/string-stream.cc +0 -0
  315. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/string-stream.h +0 -0
  316. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/string.js +260 -149
  317. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/stub-cache.cc +195 -69
  318. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/stub-cache.h +127 -12
  319. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/third_party/dtoa/COPYING +0 -0
  320. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/third_party/dtoa/dtoa.c +0 -0
  321. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/third_party/valgrind/valgrind.h +0 -0
  322. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/token.cc +0 -0
  323. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/token.h +0 -0
  324. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/top.cc +26 -31
  325. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/top.h +3 -4
  326. data/ext/v8/upstream/2.1.10/src/type-info.cc +53 -0
  327. data/ext/v8/upstream/2.1.10/src/type-info.h +244 -0
  328. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/unicode-inl.h +0 -0
  329. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/unicode.cc +0 -0
  330. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/unicode.h +0 -0
  331. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/uri.js +6 -6
  332. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/utils.cc +0 -37
  333. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/utils.h +121 -50
  334. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/v8-counters.cc +0 -0
  335. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/v8-counters.h +130 -98
  336. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/v8.cc +42 -11
  337. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/v8.h +4 -2
  338. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/v8natives.js +202 -37
  339. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/v8threads.cc +11 -0
  340. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/v8threads.h +15 -0
  341. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/variables.cc +7 -51
  342. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/variables.h +5 -35
  343. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/version.cc +3 -3
  344. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/version.h +0 -0
  345. data/ext/v8/upstream/2.1.10/src/virtual-frame-heavy-inl.h +152 -0
  346. data/ext/v8/upstream/{2.0.6/src/virtual-frame.cc → 2.1.10/src/virtual-frame-heavy.cc} +107 -176
  347. data/ext/v8/upstream/2.1.10/src/virtual-frame-inl.h +39 -0
  348. data/ext/v8/upstream/2.1.10/src/virtual-frame-light-inl.h +69 -0
  349. data/ext/v8/upstream/2.1.10/src/virtual-frame-light.cc +49 -0
  350. data/ext/v8/upstream/2.1.10/src/virtual-frame.cc +49 -0
  351. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/virtual-frame.h +2 -0
  352. data/ext/v8/upstream/{2.0.6/src/log-inl.h → 2.1.10/src/vm-state-inl.h} +28 -20
  353. data/ext/v8/upstream/{2.0.6/src/snapshot-empty.cc → 2.1.10/src/vm-state.cc} +5 -6
  354. data/ext/v8/upstream/2.1.10/src/vm-state.h +75 -0
  355. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/x64/assembler-x64-inl.h +11 -0
  356. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/x64/assembler-x64.cc +285 -93
  357. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/x64/assembler-x64.h +81 -78
  358. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/x64/builtins-x64.cc +130 -87
  359. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/x64/codegen-x64-inl.h +0 -0
  360. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/x64/codegen-x64.cc +4520 -1317
  361. data/ext/v8/upstream/{2.0.6/src/ia32/codegen-ia32.h → 2.1.10/src/x64/codegen-x64.h} +362 -141
  362. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/x64/cpu-x64.cc +0 -0
  363. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/x64/debug-x64.cc +20 -4
  364. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/x64/disasm-x64.cc +121 -44
  365. data/ext/v8/upstream/2.1.10/src/x64/fast-codegen-x64.cc +246 -0
  366. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/x64/frames-x64.cc +0 -0
  367. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/x64/frames-x64.h +0 -0
  368. data/ext/v8/upstream/{2.0.6/src/x64/fast-codegen-x64.cc → 2.1.10/src/x64/full-codegen-x64.cc} +404 -231
  369. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/x64/ic-x64.cc +346 -117
  370. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/x64/jump-target-x64.cc +1 -0
  371. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/x64/macro-assembler-x64.cc +537 -181
  372. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/x64/macro-assembler-x64.h +140 -34
  373. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/x64/regexp-macro-assembler-x64.cc +74 -96
  374. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/x64/regexp-macro-assembler-x64.h +8 -25
  375. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/x64/register-allocator-x64-inl.h +1 -1
  376. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/x64/register-allocator-x64.cc +3 -0
  377. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/x64/register-allocator-x64.h +0 -0
  378. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/x64/simulator-x64.cc +0 -0
  379. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/x64/simulator-x64.h +2 -2
  380. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/x64/stub-cache-x64.cc +785 -288
  381. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/x64/virtual-frame-x64.cc +128 -52
  382. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/x64/virtual-frame-x64.h +40 -19
  383. data/ext/v8/upstream/2.1.10/src/zone-inl.h +82 -0
  384. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/zone.cc +1 -0
  385. data/ext/v8/upstream/{2.0.6 → 2.1.10}/src/zone.h +6 -90
  386. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/codemap.js +12 -0
  387. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/consarray.js +0 -0
  388. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/csvparser.js +22 -37
  389. data/ext/v8/upstream/2.1.10/tools/generate-ten-powers.scm +286 -0
  390. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/gyp/v8.gyp +86 -24
  391. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/js2c.py +22 -22
  392. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/jsmin.py +0 -0
  393. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/linux-tick-processor +10 -1
  394. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/linux-tick-processor.py +0 -0
  395. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/logreader.js +34 -16
  396. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/mac-nm +0 -0
  397. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/mac-tick-processor +0 -0
  398. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/oprofile/annotate +0 -0
  399. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/oprofile/common +0 -0
  400. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/oprofile/dump +0 -0
  401. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/oprofile/report +0 -0
  402. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/oprofile/reset +0 -0
  403. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/oprofile/run +0 -0
  404. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/oprofile/shutdown +0 -0
  405. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/oprofile/start +0 -0
  406. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/presubmit.py +0 -0
  407. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/process-heap-prof.py +0 -0
  408. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/profile.js +70 -0
  409. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/profile_view.js +0 -0
  410. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/run-valgrind.py +0 -0
  411. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/splaytree.js +0 -0
  412. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/splaytree.py +0 -0
  413. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/stats-viewer.py +0 -0
  414. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/test.py +7 -7
  415. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/tickprocessor-driver.js +7 -1
  416. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/tickprocessor.js +140 -9
  417. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/tickprocessor.py +40 -4
  418. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/utils.py +6 -0
  419. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/README.txt +0 -0
  420. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/arm.vsprops +1 -1
  421. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/common.vsprops +1 -2
  422. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/d8.vcproj +0 -6
  423. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/d8_arm.vcproj +193 -199
  424. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/d8_x64.vcproj +16 -8
  425. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/d8js2c.cmd +0 -0
  426. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/debug.vsprops +0 -0
  427. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/ia32.vsprops +5 -1
  428. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/js2c.cmd +1 -1
  429. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/release.vsprops +0 -0
  430. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/v8.sln +0 -0
  431. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/v8.vcproj +11 -7
  432. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/v8_arm.sln +0 -0
  433. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/v8_arm.vcproj +227 -223
  434. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/v8_base.vcproj +137 -5
  435. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/v8_base_arm.vcproj +116 -4
  436. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/v8_base_x64.vcproj +125 -4
  437. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/v8_cctest.vcproj +12 -6
  438. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/v8_cctest_arm.vcproj +12 -6
  439. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/v8_cctest_x64.vcproj +11 -11
  440. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/v8_mksnapshot.vcproj +0 -6
  441. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/v8_mksnapshot_x64.vcproj +0 -6
  442. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/v8_process_sample.vcproj +0 -6
  443. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/v8_process_sample_arm.vcproj +145 -151
  444. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/v8_process_sample_x64.vcproj +16 -6
  445. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/v8_shell_sample.vcproj +0 -6
  446. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/v8_shell_sample_arm.vcproj +145 -151
  447. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/v8_shell_sample_x64.vcproj +16 -8
  448. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/v8_snapshot.vcproj +0 -0
  449. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/v8_snapshot_cc.vcproj +0 -0
  450. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/v8_snapshot_cc_x64.vcproj +0 -0
  451. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/v8_snapshot_x64.vcproj +0 -0
  452. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/v8_x64.sln +0 -0
  453. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/v8_x64.vcproj +11 -7
  454. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/visual_studio/x64.vsprops +5 -1
  455. data/ext/v8/upstream/2.1.10/tools/windows-tick-processor.bat +29 -0
  456. data/ext/v8/upstream/{2.0.6 → 2.1.10}/tools/windows-tick-processor.py +0 -0
  457. data/ext/v8/upstream/Makefile +8 -2
  458. data/ext/v8/v8.cpp +21 -73
  459. data/ext/v8/v8_array.cpp +56 -0
  460. data/ext/v8/v8_array.h +8 -0
  461. data/ext/v8/v8_callbacks.cpp +121 -0
  462. data/ext/v8/v8_callbacks.h +8 -0
  463. data/ext/v8/v8_cxt.cpp +74 -77
  464. data/ext/v8/v8_cxt.h +2 -9
  465. data/ext/v8/v8_date.cpp +26 -0
  466. data/ext/v8/v8_date.h +6 -0
  467. data/ext/v8/v8_exception.cpp +55 -0
  468. data/ext/v8/v8_exception.h +6 -0
  469. data/ext/v8/v8_external.cpp +50 -0
  470. data/ext/v8/v8_external.h +8 -0
  471. data/ext/v8/v8_func.cpp +76 -18
  472. data/ext/v8/v8_func.h +5 -4
  473. data/ext/v8/v8_msg.cpp +55 -46
  474. data/ext/v8/v8_msg.h +3 -11
  475. data/ext/v8/v8_obj.cpp +67 -36
  476. data/ext/v8/v8_obj.h +6 -8
  477. data/ext/v8/v8_ref.cpp +25 -9
  478. data/ext/v8/v8_ref.h +3 -5
  479. data/ext/v8/v8_script.cpp +17 -10
  480. data/ext/v8/v8_script.h +3 -3
  481. data/ext/v8/v8_str.cpp +34 -6
  482. data/ext/v8/v8_str.h +4 -2
  483. data/ext/v8/v8_template.cpp +195 -33
  484. data/ext/v8/v8_template.h +4 -5
  485. data/ext/v8/v8_try_catch.cpp +99 -0
  486. data/ext/v8/v8_try_catch.h +5 -0
  487. data/ext/v8/v8_value.cpp +164 -0
  488. data/ext/v8/v8_value.h +10 -0
  489. data/lib/v8.rb +3 -1
  490. data/lib/v8/access.rb +60 -0
  491. data/lib/v8/array.rb +15 -0
  492. data/lib/v8/callbacks.rb +88 -0
  493. data/lib/v8/cli.rb +1 -1
  494. data/lib/v8/context.rb +55 -66
  495. data/lib/v8/function.rb +20 -2
  496. data/lib/v8/object.rb +14 -12
  497. data/lib/v8/ruby_error.rb +3 -0
  498. data/lib/v8/to.rb +59 -7
  499. data/spec/ext/cxt_spec.rb +2 -15
  500. data/spec/ext/func_spec.rb +17 -10
  501. data/spec/ext/try_catch_spec.rb +32 -0
  502. data/spec/foo.rb +17 -0
  503. data/spec/redjs/jsapi_spec.rb +173 -96
  504. data/spec/spec_helper.rb +7 -0
  505. data/spec/v8/to_spec.rb +0 -1
  506. data/therubyracer.gemspec +6 -6
  507. metadata +493 -386
  508. data/ext/v8/upstream/2.0.6/src/arm/ic-arm.cc +0 -849
  509. data/ext/v8/upstream/2.0.6/src/arm/virtual-frame-arm.cc +0 -412
  510. data/ext/v8/upstream/2.0.6/src/ast.cc +0 -512
  511. data/ext/v8/upstream/2.0.6/src/builtins.cc +0 -851
  512. data/ext/v8/upstream/2.0.6/src/compiler.cc +0 -1132
  513. data/ext/v8/upstream/2.0.6/src/compiler.h +0 -107
  514. data/ext/v8/upstream/2.0.6/src/conversions.cc +0 -709
  515. data/ext/v8/upstream/2.0.6/src/usage-analyzer.cc +0 -426
  516. data/ext/v8/upstream/2.0.6/tools/windows-tick-processor.bat +0 -5
  517. data/ext/v8/upstream/no-strict-aliasing.patch +0 -13
  518. data/ext/v8/v8_standalone.cpp +0 -69
  519. data/ext/v8/v8_standalone.h +0 -31
  520. data/spec/ext/obj_spec.rb +0 -13
@@ -0,0 +1,1833 @@
1
+ // Copyright 2006-2008 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #include "v8.h"
29
+
30
+ #include "assembler-arm.h"
31
+ #include "codegen-inl.h"
32
+ #include "disasm.h"
33
+ #include "ic-inl.h"
34
+ #include "runtime.h"
35
+ #include "stub-cache.h"
36
+
37
+ namespace v8 {
38
+ namespace internal {
39
+
40
+
41
+ // ----------------------------------------------------------------------------
42
+ // Static IC stub generators.
43
+ //
44
+
45
+ #define __ ACCESS_MASM(masm)
46
+
47
+ // Helper function used from LoadIC/CallIC GenerateNormal.
48
+ static void GenerateDictionaryLoad(MacroAssembler* masm,
49
+ Label* miss,
50
+ Register t0,
51
+ Register t1) {
52
+ // Register use:
53
+ //
54
+ // t0 - used to hold the property dictionary.
55
+ //
56
+ // t1 - initially the receiver
57
+ // - used for the index into the property dictionary
58
+ // - holds the result on exit.
59
+ //
60
+ // r3 - used as temporary and to hold the capacity of the property
61
+ // dictionary.
62
+ //
63
+ // r2 - holds the name of the property and is unchanged.
64
+ // r4 - used as temporary.
65
+
66
+ Label done;
67
+
68
+ // Check for the absence of an interceptor.
69
+ // Load the map into t0.
70
+ __ ldr(t0, FieldMemOperand(t1, JSObject::kMapOffset));
71
+
72
+ // Bail out if the receiver has a named interceptor.
73
+ __ ldrb(r3, FieldMemOperand(t0, Map::kBitFieldOffset));
74
+ __ tst(r3, Operand(1 << Map::kHasNamedInterceptor));
75
+ __ b(nz, miss);
76
+
77
+ // Bail out if we have a JS global proxy object.
78
+ __ ldrb(r3, FieldMemOperand(t0, Map::kInstanceTypeOffset));
79
+ __ cmp(r3, Operand(JS_GLOBAL_PROXY_TYPE));
80
+ __ b(eq, miss);
81
+
82
+ // Possible work-around for http://crbug.com/16276.
83
+ // See also: http://codereview.chromium.org/155418.
84
+ __ cmp(r3, Operand(JS_GLOBAL_OBJECT_TYPE));
85
+ __ b(eq, miss);
86
+ __ cmp(r3, Operand(JS_BUILTINS_OBJECT_TYPE));
87
+ __ b(eq, miss);
88
+
89
+ // Check that the properties array is a dictionary.
90
+ __ ldr(t0, FieldMemOperand(t1, JSObject::kPropertiesOffset));
91
+ __ ldr(r3, FieldMemOperand(t0, HeapObject::kMapOffset));
92
+ __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
93
+ __ cmp(r3, ip);
94
+ __ b(ne, miss);
95
+
96
+ // Compute the capacity mask.
97
+ const int kCapacityOffset = StringDictionary::kHeaderSize +
98
+ StringDictionary::kCapacityIndex * kPointerSize;
99
+ __ ldr(r3, FieldMemOperand(t0, kCapacityOffset));
100
+ __ mov(r3, Operand(r3, ASR, kSmiTagSize)); // convert smi to int
101
+ __ sub(r3, r3, Operand(1));
102
+
103
+ const int kElementsStartOffset = StringDictionary::kHeaderSize +
104
+ StringDictionary::kElementsStartIndex * kPointerSize;
105
+
106
+ // Generate an unrolled loop that performs a few probes before
107
+ // giving up. Measurements done on Gmail indicate that 2 probes
108
+ // cover ~93% of loads from dictionaries.
109
+ static const int kProbes = 4;
110
+ for (int i = 0; i < kProbes; i++) {
111
+ // Compute the masked index: (hash + i + i * i) & mask.
112
+ __ ldr(r4, FieldMemOperand(r2, String::kHashFieldOffset));
113
+ if (i > 0) {
114
+ // Add the probe offset (i + i * i) left shifted to avoid right shifting
115
+ // the hash in a separate instruction. The value hash + i + i * i is right
116
+ // shifted in the following and instruction.
117
+ ASSERT(StringDictionary::GetProbeOffset(i) <
118
+ 1 << (32 - String::kHashFieldOffset));
119
+ __ add(r4, r4, Operand(
120
+ StringDictionary::GetProbeOffset(i) << String::kHashShift));
121
+ }
122
+ __ and_(r4, r3, Operand(r4, LSR, String::kHashShift));
123
+
124
+ // Scale the index by multiplying by the element size.
125
+ ASSERT(StringDictionary::kEntrySize == 3);
126
+ __ add(r4, r4, Operand(r4, LSL, 1)); // r4 = r4 * 3
127
+
128
+ // Check if the key is identical to the name.
129
+ __ add(r4, t0, Operand(r4, LSL, 2));
130
+ __ ldr(ip, FieldMemOperand(r4, kElementsStartOffset));
131
+ __ cmp(r2, Operand(ip));
132
+ if (i != kProbes - 1) {
133
+ __ b(eq, &done);
134
+ } else {
135
+ __ b(ne, miss);
136
+ }
137
+ }
138
+
139
+ // Check that the value is a normal property.
140
+ __ bind(&done); // r4 == t0 + 4*index
141
+ __ ldr(r3, FieldMemOperand(r4, kElementsStartOffset + 2 * kPointerSize));
142
+ __ tst(r3, Operand(PropertyDetails::TypeField::mask() << kSmiTagSize));
143
+ __ b(ne, miss);
144
+
145
+ // Get the value at the masked, scaled index and return.
146
+ __ ldr(t1, FieldMemOperand(r4, kElementsStartOffset + 1 * kPointerSize));
147
+ }
148
+
149
+
150
+ static void GenerateNumberDictionaryLoad(MacroAssembler* masm,
151
+ Label* miss,
152
+ Register elements,
153
+ Register key,
154
+ Register t0,
155
+ Register t1,
156
+ Register t2) {
157
+ // Register use:
158
+ //
159
+ // elements - holds the slow-case elements of the receiver and is unchanged.
160
+ //
161
+ // key - holds the smi key on entry and is unchanged if a branch is
162
+ // performed to the miss label.
163
+ //
164
+ // Scratch registers:
165
+ //
166
+ // t0 - holds the untagged key on entry and holds the hash once computed.
167
+ // Holds the result on exit if the load succeeded.
168
+ //
169
+ // t1 - used to hold the capacity mask of the dictionary
170
+ //
171
+ // t2 - used for the index into the dictionary.
172
+ Label done;
173
+
174
+ // Compute the hash code from the untagged key. This must be kept in sync
175
+ // with ComputeIntegerHash in utils.h.
176
+ //
177
+ // hash = ~hash + (hash << 15);
178
+ __ mvn(t1, Operand(t0));
179
+ __ add(t0, t1, Operand(t0, LSL, 15));
180
+ // hash = hash ^ (hash >> 12);
181
+ __ eor(t0, t0, Operand(t0, LSR, 12));
182
+ // hash = hash + (hash << 2);
183
+ __ add(t0, t0, Operand(t0, LSL, 2));
184
+ // hash = hash ^ (hash >> 4);
185
+ __ eor(t0, t0, Operand(t0, LSR, 4));
186
+ // hash = hash * 2057;
187
+ __ mov(t1, Operand(2057));
188
+ __ mul(t0, t0, t1);
189
+ // hash = hash ^ (hash >> 16);
190
+ __ eor(t0, t0, Operand(t0, LSR, 16));
191
+
192
+ // Compute the capacity mask.
193
+ __ ldr(t1, FieldMemOperand(elements, NumberDictionary::kCapacityOffset));
194
+ __ mov(t1, Operand(t1, ASR, kSmiTagSize)); // convert smi to int
195
+ __ sub(t1, t1, Operand(1));
196
+
197
+ // Generate an unrolled loop that performs a few probes before giving up.
198
+ static const int kProbes = 4;
199
+ for (int i = 0; i < kProbes; i++) {
200
+ // Use t2 for index calculations and keep the hash intact in t0.
201
+ __ mov(t2, t0);
202
+ // Compute the masked index: (hash + i + i * i) & mask.
203
+ if (i > 0) {
204
+ __ add(t2, t2, Operand(NumberDictionary::GetProbeOffset(i)));
205
+ }
206
+ __ and_(t2, t2, Operand(t1));
207
+
208
+ // Scale the index by multiplying by the element size.
209
+ ASSERT(NumberDictionary::kEntrySize == 3);
210
+ __ add(t2, t2, Operand(t2, LSL, 1)); // t2 = t2 * 3
211
+
212
+ // Check if the key is identical to the name.
213
+ __ add(t2, elements, Operand(t2, LSL, kPointerSizeLog2));
214
+ __ ldr(ip, FieldMemOperand(t2, NumberDictionary::kElementsStartOffset));
215
+ __ cmp(key, Operand(ip));
216
+ if (i != kProbes - 1) {
217
+ __ b(eq, &done);
218
+ } else {
219
+ __ b(ne, miss);
220
+ }
221
+ }
222
+
223
+ __ bind(&done);
224
+ // Check that the value is a normal property.
225
+ // t2: elements + (index * kPointerSize)
226
+ const int kDetailsOffset =
227
+ NumberDictionary::kElementsStartOffset + 2 * kPointerSize;
228
+ __ ldr(t1, FieldMemOperand(t2, kDetailsOffset));
229
+ __ tst(t1, Operand(Smi::FromInt(PropertyDetails::TypeField::mask())));
230
+ __ b(ne, miss);
231
+
232
+ // Get the value at the masked, scaled index and return.
233
+ const int kValueOffset =
234
+ NumberDictionary::kElementsStartOffset + kPointerSize;
235
+ __ ldr(t0, FieldMemOperand(t2, kValueOffset));
236
+ }
237
+
238
+
239
+ void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
240
+ // ----------- S t a t e -------------
241
+ // -- r2 : name
242
+ // -- lr : return address
243
+ // -- r0 : receiver
244
+ // -- sp[0] : receiver
245
+ // -----------------------------------
246
+ Label miss;
247
+
248
+ StubCompiler::GenerateLoadArrayLength(masm, r0, r3, &miss);
249
+ __ bind(&miss);
250
+ StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
251
+ }
252
+
253
+
254
+ void LoadIC::GenerateStringLength(MacroAssembler* masm) {
255
+ // ----------- S t a t e -------------
256
+ // -- r2 : name
257
+ // -- lr : return address
258
+ // -- r0 : receiver
259
+ // -- sp[0] : receiver
260
+ // -----------------------------------
261
+ Label miss;
262
+
263
+ StubCompiler::GenerateLoadStringLength(masm, r0, r1, r3, &miss);
264
+ // Cache miss: Jump to runtime.
265
+ __ bind(&miss);
266
+ StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
267
+ }
268
+
269
+
270
+ void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) {
271
+ // ----------- S t a t e -------------
272
+ // -- r2 : name
273
+ // -- lr : return address
274
+ // -- r0 : receiver
275
+ // -- sp[0] : receiver
276
+ // -----------------------------------
277
+ Label miss;
278
+
279
+ StubCompiler::GenerateLoadFunctionPrototype(masm, r0, r1, r3, &miss);
280
+ __ bind(&miss);
281
+ StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
282
+ }
283
+
284
+
285
+ // Defined in ic.cc.
286
+ Object* CallIC_Miss(Arguments args);
287
+
288
+ void CallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
289
+ // ----------- S t a t e -------------
290
+ // -- r2 : name
291
+ // -- lr : return address
292
+ // -----------------------------------
293
+ Label number, non_number, non_string, boolean, probe, miss;
294
+
295
+ // Get the receiver of the function from the stack into r1.
296
+ __ ldr(r1, MemOperand(sp, argc * kPointerSize));
297
+
298
+ // Probe the stub cache.
299
+ Code::Flags flags =
300
+ Code::ComputeFlags(Code::CALL_IC, NOT_IN_LOOP, MONOMORPHIC, NORMAL, argc);
301
+ StubCache::GenerateProbe(masm, flags, r1, r2, r3, no_reg);
302
+
303
+ // If the stub cache probing failed, the receiver might be a value.
304
+ // For value objects, we use the map of the prototype objects for
305
+ // the corresponding JSValue for the cache and that is what we need
306
+ // to probe.
307
+ //
308
+ // Check for number.
309
+ __ tst(r1, Operand(kSmiTagMask));
310
+ __ b(eq, &number);
311
+ __ CompareObjectType(r1, r3, r3, HEAP_NUMBER_TYPE);
312
+ __ b(ne, &non_number);
313
+ __ bind(&number);
314
+ StubCompiler::GenerateLoadGlobalFunctionPrototype(
315
+ masm, Context::NUMBER_FUNCTION_INDEX, r1);
316
+ __ b(&probe);
317
+
318
+ // Check for string.
319
+ __ bind(&non_number);
320
+ __ cmp(r3, Operand(FIRST_NONSTRING_TYPE));
321
+ __ b(hs, &non_string);
322
+ StubCompiler::GenerateLoadGlobalFunctionPrototype(
323
+ masm, Context::STRING_FUNCTION_INDEX, r1);
324
+ __ b(&probe);
325
+
326
+ // Check for boolean.
327
+ __ bind(&non_string);
328
+ __ LoadRoot(ip, Heap::kTrueValueRootIndex);
329
+ __ cmp(r1, ip);
330
+ __ b(eq, &boolean);
331
+ __ LoadRoot(ip, Heap::kFalseValueRootIndex);
332
+ __ cmp(r1, ip);
333
+ __ b(ne, &miss);
334
+ __ bind(&boolean);
335
+ StubCompiler::GenerateLoadGlobalFunctionPrototype(
336
+ masm, Context::BOOLEAN_FUNCTION_INDEX, r1);
337
+
338
+ // Probe the stub cache for the value object.
339
+ __ bind(&probe);
340
+ StubCache::GenerateProbe(masm, flags, r1, r2, r3, no_reg);
341
+
342
+ // Cache miss: Jump to runtime.
343
+ __ bind(&miss);
344
+ GenerateMiss(masm, argc);
345
+ }
346
+
347
+
348
+ static void GenerateNormalHelper(MacroAssembler* masm,
349
+ int argc,
350
+ bool is_global_object,
351
+ Label* miss,
352
+ Register scratch) {
353
+ // Search dictionary - put result in register r1.
354
+ GenerateDictionaryLoad(masm, miss, r0, r1);
355
+
356
+ // Check that the value isn't a smi.
357
+ __ tst(r1, Operand(kSmiTagMask));
358
+ __ b(eq, miss);
359
+
360
+ // Check that the value is a JSFunction.
361
+ __ CompareObjectType(r1, scratch, scratch, JS_FUNCTION_TYPE);
362
+ __ b(ne, miss);
363
+
364
+ // Patch the receiver with the global proxy if necessary.
365
+ if (is_global_object) {
366
+ __ ldr(r0, MemOperand(sp, argc * kPointerSize));
367
+ __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
368
+ __ str(r0, MemOperand(sp, argc * kPointerSize));
369
+ }
370
+
371
+ // Invoke the function.
372
+ ParameterCount actual(argc);
373
+ __ InvokeFunction(r1, actual, JUMP_FUNCTION);
374
+ }
375
+
376
+
377
+ void CallIC::GenerateNormal(MacroAssembler* masm, int argc) {
378
+ // ----------- S t a t e -------------
379
+ // -- r2 : name
380
+ // -- lr : return address
381
+ // -----------------------------------
382
+ Label miss, global_object, non_global_object;
383
+
384
+ // Get the receiver of the function from the stack into r1.
385
+ __ ldr(r1, MemOperand(sp, argc * kPointerSize));
386
+
387
+ // Check that the receiver isn't a smi.
388
+ __ tst(r1, Operand(kSmiTagMask));
389
+ __ b(eq, &miss);
390
+
391
+ // Check that the receiver is a valid JS object. Put the map in r3.
392
+ __ CompareObjectType(r1, r3, r0, FIRST_JS_OBJECT_TYPE);
393
+ __ b(lt, &miss);
394
+
395
+ // If this assert fails, we have to check upper bound too.
396
+ ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
397
+
398
+ // Check for access to global object.
399
+ __ cmp(r0, Operand(JS_GLOBAL_OBJECT_TYPE));
400
+ __ b(eq, &global_object);
401
+ __ cmp(r0, Operand(JS_BUILTINS_OBJECT_TYPE));
402
+ __ b(ne, &non_global_object);
403
+
404
+ // Accessing global object: Load and invoke.
405
+ __ bind(&global_object);
406
+ // Check that the global object does not require access checks.
407
+ __ ldrb(r3, FieldMemOperand(r3, Map::kBitFieldOffset));
408
+ __ tst(r3, Operand(1 << Map::kIsAccessCheckNeeded));
409
+ __ b(ne, &miss);
410
+ GenerateNormalHelper(masm, argc, true, &miss, r4);
411
+
412
+ // Accessing non-global object: Check for access to global proxy.
413
+ Label global_proxy, invoke;
414
+ __ bind(&non_global_object);
415
+ __ cmp(r0, Operand(JS_GLOBAL_PROXY_TYPE));
416
+ __ b(eq, &global_proxy);
417
+ // Check that the non-global, non-global-proxy object does not
418
+ // require access checks.
419
+ __ ldrb(r3, FieldMemOperand(r3, Map::kBitFieldOffset));
420
+ __ tst(r3, Operand(1 << Map::kIsAccessCheckNeeded));
421
+ __ b(ne, &miss);
422
+ __ bind(&invoke);
423
+ GenerateNormalHelper(masm, argc, false, &miss, r4);
424
+
425
+ // Global object access: Check access rights.
426
+ __ bind(&global_proxy);
427
+ __ CheckAccessGlobalProxy(r1, r0, &miss);
428
+ __ b(&invoke);
429
+
430
+ // Cache miss: Jump to runtime.
431
+ __ bind(&miss);
432
+ GenerateMiss(masm, argc);
433
+ }
434
+
435
+
436
+ void CallIC::GenerateMiss(MacroAssembler* masm, int argc) {
437
+ // ----------- S t a t e -------------
438
+ // -- r2 : name
439
+ // -- lr : return address
440
+ // -----------------------------------
441
+
442
+ // Get the receiver of the function from the stack.
443
+ __ ldr(r3, MemOperand(sp, argc * kPointerSize));
444
+
445
+ __ EnterInternalFrame();
446
+
447
+ // Push the receiver and the name of the function.
448
+ __ Push(r3, r2);
449
+
450
+ // Call the entry.
451
+ __ mov(r0, Operand(2));
452
+ __ mov(r1, Operand(ExternalReference(IC_Utility(kCallIC_Miss))));
453
+
454
+ CEntryStub stub(1);
455
+ __ CallStub(&stub);
456
+
457
+ // Move result to r1 and leave the internal frame.
458
+ __ mov(r1, Operand(r0));
459
+ __ LeaveInternalFrame();
460
+
461
+ // Check if the receiver is a global object of some sort.
462
+ Label invoke, global;
463
+ __ ldr(r2, MemOperand(sp, argc * kPointerSize)); // receiver
464
+ __ tst(r2, Operand(kSmiTagMask));
465
+ __ b(eq, &invoke);
466
+ __ CompareObjectType(r2, r3, r3, JS_GLOBAL_OBJECT_TYPE);
467
+ __ b(eq, &global);
468
+ __ cmp(r3, Operand(JS_BUILTINS_OBJECT_TYPE));
469
+ __ b(ne, &invoke);
470
+
471
+ // Patch the receiver on the stack.
472
+ __ bind(&global);
473
+ __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset));
474
+ __ str(r2, MemOperand(sp, argc * kPointerSize));
475
+
476
+ // Invoke the function.
477
+ ParameterCount actual(argc);
478
+ __ bind(&invoke);
479
+ __ InvokeFunction(r1, actual, JUMP_FUNCTION);
480
+ }
481
+
482
+
483
+ // Defined in ic.cc.
484
+ Object* LoadIC_Miss(Arguments args);
485
+
486
+ void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
487
+ // ----------- S t a t e -------------
488
+ // -- r2 : name
489
+ // -- lr : return address
490
+ // -- r0 : receiver
491
+ // -- sp[0] : receiver
492
+ // -----------------------------------
493
+
494
+ // Probe the stub cache.
495
+ Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC,
496
+ NOT_IN_LOOP,
497
+ MONOMORPHIC);
498
+ StubCache::GenerateProbe(masm, flags, r0, r2, r3, no_reg);
499
+
500
+ // Cache miss: Jump to runtime.
501
+ GenerateMiss(masm);
502
+ }
503
+
504
+
505
+ void LoadIC::GenerateNormal(MacroAssembler* masm) {
506
+ // ----------- S t a t e -------------
507
+ // -- r2 : name
508
+ // -- lr : return address
509
+ // -- r0 : receiver
510
+ // -- sp[0] : receiver
511
+ // -----------------------------------
512
+ Label miss, probe, global;
513
+
514
+ // Check that the receiver isn't a smi.
515
+ __ tst(r0, Operand(kSmiTagMask));
516
+ __ b(eq, &miss);
517
+
518
+ // Check that the receiver is a valid JS object. Put the map in r3.
519
+ __ CompareObjectType(r0, r3, r1, FIRST_JS_OBJECT_TYPE);
520
+ __ b(lt, &miss);
521
+ // If this assert fails, we have to check upper bound too.
522
+ ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
523
+
524
+ // Check for access to global object (unlikely).
525
+ __ cmp(r1, Operand(JS_GLOBAL_PROXY_TYPE));
526
+ __ b(eq, &global);
527
+
528
+ // Check for non-global object that requires access check.
529
+ __ ldrb(r3, FieldMemOperand(r3, Map::kBitFieldOffset));
530
+ __ tst(r3, Operand(1 << Map::kIsAccessCheckNeeded));
531
+ __ b(ne, &miss);
532
+
533
+ __ bind(&probe);
534
+ GenerateDictionaryLoad(masm, &miss, r1, r0);
535
+ __ Ret();
536
+
537
+ // Global object access: Check access rights.
538
+ __ bind(&global);
539
+ __ CheckAccessGlobalProxy(r0, r1, &miss);
540
+ __ b(&probe);
541
+
542
+ // Cache miss: Restore receiver from stack and jump to runtime.
543
+ __ bind(&miss);
544
+ GenerateMiss(masm);
545
+ }
546
+
547
+
548
+ void LoadIC::GenerateMiss(MacroAssembler* masm) {
549
+ // ----------- S t a t e -------------
550
+ // -- r2 : name
551
+ // -- lr : return address
552
+ // -- r0 : receiver
553
+ // -- sp[0] : receiver
554
+ // -----------------------------------
555
+
556
+ __ mov(r3, r0);
557
+ __ Push(r3, r2);
558
+
559
+ // Perform tail call to the entry.
560
+ ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss));
561
+ __ TailCallExternalReference(ref, 2, 1);
562
+ }
563
+
564
+
565
+ static inline bool IsInlinedICSite(Address address,
566
+ Address* inline_end_address) {
567
+ // If the instruction after the call site is not the pseudo instruction nop1
568
+ // then this is not related to an inlined in-object property load. The nop1
569
+ // instruction is located just after the call to the IC in the deferred code
570
+ // handling the miss in the inlined code. After the nop1 instruction there is
571
+ // a branch instruction for jumping back from the deferred code.
572
+ Address address_after_call = address + Assembler::kCallTargetAddressOffset;
573
+ Instr instr_after_call = Assembler::instr_at(address_after_call);
574
+ if (!Assembler::IsNop(instr_after_call, PROPERTY_ACCESS_INLINED)) {
575
+ return false;
576
+ }
577
+ Address address_after_nop = address_after_call + Assembler::kInstrSize;
578
+ Instr instr_after_nop = Assembler::instr_at(address_after_nop);
579
+ ASSERT(Assembler::IsBranch(instr_after_nop));
580
+
581
+ // Find the end of the inlined code for handling the load.
582
+ int b_offset =
583
+ Assembler::GetBranchOffset(instr_after_nop) + Assembler::kPcLoadDelta;
584
+ ASSERT(b_offset < 0); // Jumping back from deferred code.
585
+ *inline_end_address = address_after_nop + b_offset;
586
+
587
+ return true;
588
+ }
589
+
590
+
591
+ void LoadIC::ClearInlinedVersion(Address address) {
592
+ // Reset the map check of the inlined in-object property load (if present) to
593
+ // guarantee failure by holding an invalid map (the null value). The offset
594
+ // can be patched to anything.
595
+ PatchInlinedLoad(address, Heap::null_value(), 0);
596
+ }
597
+
598
+
599
+ bool LoadIC::PatchInlinedLoad(Address address, Object* map, int offset) {
600
+ // Find the end of the inlined code for handling the load if this is an
601
+ // inlined IC call site.
602
+ Address inline_end_address;
603
+ if (!IsInlinedICSite(address, &inline_end_address)) return false;
604
+
605
+ // Patch the offset of the property load instruction (ldr r0, [r1, #+XXX]).
606
+ // The immediate must be representable in 12 bits.
607
+ ASSERT((JSObject::kMaxInstanceSize - JSObject::kHeaderSize) < (1 << 12));
608
+ Address ldr_property_instr_address =
609
+ inline_end_address - Assembler::kInstrSize;
610
+ ASSERT(Assembler::IsLdrRegisterImmediate(
611
+ Assembler::instr_at(ldr_property_instr_address)));
612
+ Instr ldr_property_instr = Assembler::instr_at(ldr_property_instr_address);
613
+ ldr_property_instr = Assembler::SetLdrRegisterImmediateOffset(
614
+ ldr_property_instr, offset - kHeapObjectTag);
615
+ Assembler::instr_at_put(ldr_property_instr_address, ldr_property_instr);
616
+
617
+ // Indicate that code has changed.
618
+ CPU::FlushICache(ldr_property_instr_address, 1 * Assembler::kInstrSize);
619
+
620
+ // Patch the map check.
621
+ Address ldr_map_instr_address =
622
+ inline_end_address - 4 * Assembler::kInstrSize;
623
+ Assembler::set_target_address_at(ldr_map_instr_address,
624
+ reinterpret_cast<Address>(map));
625
+ return true;
626
+ }
627
+
628
+
629
+ void KeyedLoadIC::ClearInlinedVersion(Address address) {
630
+ // Reset the map check of the inlined keyed load (if present) to
631
+ // guarantee failure by holding an invalid map (the null value).
632
+ PatchInlinedLoad(address, Heap::null_value());
633
+ }
634
+
635
+
636
+ bool KeyedLoadIC::PatchInlinedLoad(Address address, Object* map) {
637
+ Address inline_end_address;
638
+ if (!IsInlinedICSite(address, &inline_end_address)) return false;
639
+
640
+ // Patch the map check.
641
+ Address ldr_map_instr_address =
642
+ inline_end_address - 18 * Assembler::kInstrSize;
643
+ Assembler::set_target_address_at(ldr_map_instr_address,
644
+ reinterpret_cast<Address>(map));
645
+ return true;
646
+ }
647
+
648
+
649
+ void KeyedStoreIC::ClearInlinedVersion(Address address) {
650
+ // Insert null as the elements map to check for. This will make
651
+ // sure that the elements fast-case map check fails so that control
652
+ // flows to the IC instead of the inlined version.
653
+ PatchInlinedStore(address, Heap::null_value());
654
+ }
655
+
656
+
657
+ void KeyedStoreIC::RestoreInlinedVersion(Address address) {
658
+ // Restore the fast-case elements map check so that the inlined
659
+ // version can be used again.
660
+ PatchInlinedStore(address, Heap::fixed_array_map());
661
+ }
662
+
663
+
664
+ bool KeyedStoreIC::PatchInlinedStore(Address address, Object* map) {
665
+ // Find the end of the inlined code for handling the store if this is an
666
+ // inlined IC call site.
667
+ Address inline_end_address;
668
+ if (!IsInlinedICSite(address, &inline_end_address)) return false;
669
+
670
+ // Patch the map check.
671
+ Address ldr_map_instr_address =
672
+ inline_end_address - 5 * Assembler::kInstrSize;
673
+ Assembler::set_target_address_at(ldr_map_instr_address,
674
+ reinterpret_cast<Address>(map));
675
+ return true;
676
+ }
677
+
678
+
679
+ Object* KeyedLoadIC_Miss(Arguments args);
680
+
681
+
682
+ void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
683
+ // ---------- S t a t e --------------
684
+ // -- lr : return address
685
+ // -- r0 : key
686
+ // -- sp[0] : key
687
+ // -- sp[4] : receiver
688
+ // -----------------------------------
689
+
690
+ __ ldr(r1, MemOperand(sp, kPointerSize));
691
+ __ Push(r1, r0);
692
+
693
+ ExternalReference ref = ExternalReference(IC_Utility(kKeyedLoadIC_Miss));
694
+ __ TailCallExternalReference(ref, 2, 1);
695
+ }
696
+
697
+
698
+ void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
699
+ // ---------- S t a t e --------------
700
+ // -- lr : return address
701
+ // -- r0 : key
702
+ // -- sp[0] : key
703
+ // -- sp[4] : receiver
704
+ // -----------------------------------
705
+
706
+ __ ldr(r1, MemOperand(sp, kPointerSize));
707
+ __ Push(r1, r0);
708
+
709
+ __ TailCallRuntime(Runtime::kGetProperty, 2, 1);
710
+ }
711
+
712
+
713
+ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
714
+ // ---------- S t a t e --------------
715
+ // -- lr : return address
716
+ // -- r0 : key
717
+ // -- sp[0] : key
718
+ // -- sp[4] : receiver
719
+ // -----------------------------------
720
+ Label slow, fast, check_pixel_array, check_number_dictionary;
721
+
722
+ // Get the object from the stack.
723
+ __ ldr(r1, MemOperand(sp, kPointerSize));
724
+
725
+ // Check that the object isn't a smi.
726
+ __ BranchOnSmi(r1, &slow);
727
+ // Get the map of the receiver.
728
+ __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
729
+ // Check bit field.
730
+ __ ldrb(r3, FieldMemOperand(r2, Map::kBitFieldOffset));
731
+ __ tst(r3, Operand(kSlowCaseBitFieldMask));
732
+ __ b(ne, &slow);
733
+ // Check that the object is some kind of JS object EXCEPT JS Value type.
734
+ // In the case that the object is a value-wrapper object,
735
+ // we enter the runtime system to make sure that indexing into string
736
+ // objects work as intended.
737
+ ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE);
738
+ __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
739
+ __ cmp(r2, Operand(JS_OBJECT_TYPE));
740
+ __ b(lt, &slow);
741
+
742
+ // Check that the key is a smi.
743
+ __ BranchOnNotSmi(r0, &slow);
744
+ // Save key in r2 in case we want it for the number dictionary case.
745
+ __ mov(r2, r0);
746
+ __ mov(r0, Operand(r0, ASR, kSmiTagSize));
747
+
748
+ // Get the elements array of the object.
749
+ __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset));
750
+ // Check that the object is in fast mode (not dictionary).
751
+ __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
752
+ __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
753
+ __ cmp(r3, ip);
754
+ __ b(ne, &check_pixel_array);
755
+ // Check that the key (index) is within bounds.
756
+ __ ldr(r3, FieldMemOperand(r1, Array::kLengthOffset));
757
+ __ cmp(r0, r3);
758
+ __ b(hs, &slow);
759
+ // Fast case: Do the load.
760
+ __ add(r3, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
761
+ __ ldr(r0, MemOperand(r3, r0, LSL, kPointerSizeLog2));
762
+ __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
763
+ __ cmp(r0, ip);
764
+ // In case the loaded value is the_hole we have to consult GetProperty
765
+ // to ensure the prototype chain is searched.
766
+ __ b(eq, &slow);
767
+ __ Ret();
768
+
769
+ // Check whether the elements is a pixel array.
770
+ __ bind(&check_pixel_array);
771
+ __ LoadRoot(ip, Heap::kPixelArrayMapRootIndex);
772
+ __ cmp(r3, ip);
773
+ __ b(ne, &check_number_dictionary);
774
+ __ ldr(ip, FieldMemOperand(r1, PixelArray::kLengthOffset));
775
+ __ cmp(r0, ip);
776
+ __ b(hs, &slow);
777
+ __ ldr(ip, FieldMemOperand(r1, PixelArray::kExternalPointerOffset));
778
+ __ ldrb(r0, MemOperand(ip, r0));
779
+ __ mov(r0, Operand(r0, LSL, kSmiTagSize)); // Tag result as smi.
780
+ __ Ret();
781
+
782
+ __ bind(&check_number_dictionary);
783
+ // Check whether the elements is a number dictionary.
784
+ // r0: untagged index
785
+ // r1: elements
786
+ // r2: key
787
+ __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
788
+ __ cmp(r3, ip);
789
+ __ b(ne, &slow);
790
+ GenerateNumberDictionaryLoad(masm, &slow, r1, r2, r0, r3, r4);
791
+ __ Ret();
792
+
793
+ // Slow case: Push extra copies of the arguments (2).
794
+ __ bind(&slow);
795
+ __ IncrementCounter(&Counters::keyed_load_generic_slow, 1, r0, r1);
796
+ __ ldr(r0, MemOperand(sp, 0));
797
+ GenerateRuntimeGetProperty(masm);
798
+ }
799
+
800
+
801
+ void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
802
+ // ---------- S t a t e --------------
803
+ // -- lr : return address
804
+ // -- r0 : key
805
+ // -- sp[0] : key
806
+ // -- sp[4] : receiver
807
+ // -----------------------------------
808
+ Label miss;
809
+ Label index_not_smi;
810
+ Label index_out_of_range;
811
+ Label slow_char_code;
812
+ Label got_char_code;
813
+
814
+ // Get the object from the stack.
815
+ __ ldr(r1, MemOperand(sp, kPointerSize));
816
+
817
+ Register object = r1;
818
+ Register index = r0;
819
+ Register code = r2;
820
+ Register scratch = r3;
821
+
822
+ StringHelper::GenerateFastCharCodeAt(masm,
823
+ object,
824
+ index,
825
+ scratch,
826
+ code,
827
+ &miss, // When not a string.
828
+ &index_not_smi,
829
+ &index_out_of_range,
830
+ &slow_char_code);
831
+
832
+ // If we didn't bail out, code register contains smi tagged char
833
+ // code.
834
+ __ bind(&got_char_code);
835
+ StringHelper::GenerateCharFromCode(masm, code, scratch, r0, JUMP_FUNCTION);
836
+ #ifdef DEBUG
837
+ __ Abort("Unexpected fall-through from char from code tail call");
838
+ #endif
839
+
840
+ // Check if key is a heap number.
841
+ __ bind(&index_not_smi);
842
+ __ CheckMap(index, scratch, Factory::heap_number_map(), &miss, true);
843
+
844
+ // Push receiver and key on the stack (now that we know they are a
845
+ // string and a number), and call runtime.
846
+ __ bind(&slow_char_code);
847
+ __ EnterInternalFrame();
848
+ __ Push(object, index);
849
+ __ CallRuntime(Runtime::kStringCharCodeAt, 2);
850
+ ASSERT(!code.is(r0));
851
+ __ mov(code, r0);
852
+ __ LeaveInternalFrame();
853
+
854
+ // Check if the runtime call returned NaN char code. If yes, return
855
+ // undefined. Otherwise, we can continue.
856
+ if (FLAG_debug_code) {
857
+ __ BranchOnSmi(code, &got_char_code);
858
+ __ ldr(scratch, FieldMemOperand(code, HeapObject::kMapOffset));
859
+ __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
860
+ __ cmp(scratch, ip);
861
+ __ Assert(eq, "StringCharCodeAt must return smi or heap number");
862
+ }
863
+ __ LoadRoot(scratch, Heap::kNanValueRootIndex);
864
+ __ cmp(code, scratch);
865
+ __ b(ne, &got_char_code);
866
+ __ bind(&index_out_of_range);
867
+ __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
868
+ __ Ret();
869
+
870
+ __ bind(&miss);
871
+ GenerateGeneric(masm);
872
+ }
873
+
874
+
875
+ // Convert unsigned integer with specified number of leading zeroes in binary
876
+ // representation to IEEE 754 double.
877
+ // Integer to convert is passed in register hiword.
878
+ // Resulting double is returned in registers hiword:loword.
879
+ // This functions does not work correctly for 0.
880
+ static void GenerateUInt2Double(MacroAssembler* masm,
881
+ Register hiword,
882
+ Register loword,
883
+ Register scratch,
884
+ int leading_zeroes) {
885
+ const int meaningful_bits = kBitsPerInt - leading_zeroes - 1;
886
+ const int biased_exponent = HeapNumber::kExponentBias + meaningful_bits;
887
+
888
+ const int mantissa_shift_for_hi_word =
889
+ meaningful_bits - HeapNumber::kMantissaBitsInTopWord;
890
+
891
+ const int mantissa_shift_for_lo_word =
892
+ kBitsPerInt - mantissa_shift_for_hi_word;
893
+
894
+ __ mov(scratch, Operand(biased_exponent << HeapNumber::kExponentShift));
895
+ if (mantissa_shift_for_hi_word > 0) {
896
+ __ mov(loword, Operand(hiword, LSL, mantissa_shift_for_lo_word));
897
+ __ orr(hiword, scratch, Operand(hiword, LSR, mantissa_shift_for_hi_word));
898
+ } else {
899
+ __ mov(loword, Operand(0));
900
+ __ orr(hiword, scratch, Operand(hiword, LSL, mantissa_shift_for_hi_word));
901
+ }
902
+
903
+ // If least significant bit of biased exponent was not 1 it was corrupted
904
+ // by most significant bit of mantissa so we should fix that.
905
+ if (!(biased_exponent & 1)) {
906
+ __ bic(hiword, hiword, Operand(1 << HeapNumber::kExponentShift));
907
+ }
908
+ }
909
+
910
+
911
+ void KeyedLoadIC::GenerateExternalArray(MacroAssembler* masm,
912
+ ExternalArrayType array_type) {
913
+ // ---------- S t a t e --------------
914
+ // -- lr : return address
915
+ // -- r0 : key
916
+ // -- sp[0] : key
917
+ // -- sp[4] : receiver
918
+ // -----------------------------------
919
+ Label slow, failed_allocation;
920
+
921
+ // Get the object from the stack.
922
+ __ ldr(r1, MemOperand(sp, kPointerSize));
923
+
924
+ // r0: key
925
+ // r1: receiver object
926
+
927
+ // Check that the object isn't a smi
928
+ __ BranchOnSmi(r1, &slow);
929
+
930
+ // Check that the key is a smi.
931
+ __ BranchOnNotSmi(r0, &slow);
932
+
933
+ // Check that the object is a JS object. Load map into r2.
934
+ __ CompareObjectType(r1, r2, r3, FIRST_JS_OBJECT_TYPE);
935
+ __ b(lt, &slow);
936
+
937
+ // Check that the receiver does not require access checks. We need
938
+ // to check this explicitly since this generic stub does not perform
939
+ // map checks.
940
+ __ ldrb(r3, FieldMemOperand(r2, Map::kBitFieldOffset));
941
+ __ tst(r3, Operand(1 << Map::kIsAccessCheckNeeded));
942
+ __ b(ne, &slow);
943
+
944
+ // Check that the elements array is the appropriate type of
945
+ // ExternalArray.
946
+ // r0: index (as a smi)
947
+ // r1: JSObject
948
+ __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset));
949
+ __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
950
+ __ LoadRoot(ip, Heap::RootIndexForExternalArrayType(array_type));
951
+ __ cmp(r2, ip);
952
+ __ b(ne, &slow);
953
+
954
+ // Check that the index is in range.
955
+ __ ldr(ip, FieldMemOperand(r1, ExternalArray::kLengthOffset));
956
+ __ cmp(r1, Operand(r0, ASR, kSmiTagSize));
957
+ // Unsigned comparison catches both negative and too-large values.
958
+ __ b(lo, &slow);
959
+
960
+ // r0: index (smi)
961
+ // r1: elements array
962
+ __ ldr(r1, FieldMemOperand(r1, ExternalArray::kExternalPointerOffset));
963
+ // r1: base pointer of external storage
964
+
965
+ // We are not untagging smi key and instead work with it
966
+ // as if it was premultiplied by 2.
967
+ ASSERT((kSmiTag == 0) && (kSmiTagSize == 1));
968
+
969
+ switch (array_type) {
970
+ case kExternalByteArray:
971
+ __ ldrsb(r0, MemOperand(r1, r0, LSR, 1));
972
+ break;
973
+ case kExternalUnsignedByteArray:
974
+ __ ldrb(r0, MemOperand(r1, r0, LSR, 1));
975
+ break;
976
+ case kExternalShortArray:
977
+ __ ldrsh(r0, MemOperand(r1, r0, LSL, 0));
978
+ break;
979
+ case kExternalUnsignedShortArray:
980
+ __ ldrh(r0, MemOperand(r1, r0, LSL, 0));
981
+ break;
982
+ case kExternalIntArray:
983
+ case kExternalUnsignedIntArray:
984
+ __ ldr(r0, MemOperand(r1, r0, LSL, 1));
985
+ break;
986
+ case kExternalFloatArray:
987
+ if (CpuFeatures::IsSupported(VFP3)) {
988
+ CpuFeatures::Scope scope(VFP3);
989
+ __ add(r0, r1, Operand(r0, LSL, 1));
990
+ __ vldr(s0, r0, 0);
991
+ } else {
992
+ __ ldr(r0, MemOperand(r1, r0, LSL, 1));
993
+ }
994
+ break;
995
+ default:
996
+ UNREACHABLE();
997
+ break;
998
+ }
999
+
1000
+ // For integer array types:
1001
+ // r0: value
1002
+ // For floating-point array type
1003
+ // s0: value (if VFP3 is supported)
1004
+ // r0: value (if VFP3 is not supported)
1005
+
1006
+ if (array_type == kExternalIntArray) {
1007
+ // For the Int and UnsignedInt array types, we need to see whether
1008
+ // the value can be represented in a Smi. If not, we need to convert
1009
+ // it to a HeapNumber.
1010
+ Label box_int;
1011
+ __ cmp(r0, Operand(0xC0000000));
1012
+ __ b(mi, &box_int);
1013
+ __ mov(r0, Operand(r0, LSL, kSmiTagSize));
1014
+ __ Ret();
1015
+
1016
+ __ bind(&box_int);
1017
+
1018
+ __ mov(r1, r0);
1019
+ // Allocate a HeapNumber for the int and perform int-to-double
1020
+ // conversion.
1021
+ __ AllocateHeapNumber(r0, r3, r4, &slow);
1022
+
1023
+ if (CpuFeatures::IsSupported(VFP3)) {
1024
+ CpuFeatures::Scope scope(VFP3);
1025
+ __ vmov(s0, r1);
1026
+ __ vcvt_f64_s32(d0, s0);
1027
+ __ sub(r1, r0, Operand(kHeapObjectTag));
1028
+ __ vstr(d0, r1, HeapNumber::kValueOffset);
1029
+ __ Ret();
1030
+ } else {
1031
+ WriteInt32ToHeapNumberStub stub(r1, r0, r3);
1032
+ __ TailCallStub(&stub);
1033
+ }
1034
+ } else if (array_type == kExternalUnsignedIntArray) {
1035
+ // The test is different for unsigned int values. Since we need
1036
+ // the value to be in the range of a positive smi, we can't
1037
+ // handle either of the top two bits being set in the value.
1038
+ if (CpuFeatures::IsSupported(VFP3)) {
1039
+ CpuFeatures::Scope scope(VFP3);
1040
+ Label box_int, done;
1041
+ __ tst(r0, Operand(0xC0000000));
1042
+ __ b(ne, &box_int);
1043
+
1044
+ __ mov(r0, Operand(r0, LSL, kSmiTagSize));
1045
+ __ Ret();
1046
+
1047
+ __ bind(&box_int);
1048
+ __ vmov(s0, r0);
1049
+ __ AllocateHeapNumber(r0, r1, r2, &slow);
1050
+
1051
+ __ vcvt_f64_u32(d0, s0);
1052
+ __ sub(r1, r0, Operand(kHeapObjectTag));
1053
+ __ vstr(d0, r1, HeapNumber::kValueOffset);
1054
+ __ Ret();
1055
+ } else {
1056
+ // Check whether unsigned integer fits into smi.
1057
+ Label box_int_0, box_int_1, done;
1058
+ __ tst(r0, Operand(0x80000000));
1059
+ __ b(ne, &box_int_0);
1060
+ __ tst(r0, Operand(0x40000000));
1061
+ __ b(ne, &box_int_1);
1062
+
1063
+ // Tag integer as smi and return it.
1064
+ __ mov(r0, Operand(r0, LSL, kSmiTagSize));
1065
+ __ Ret();
1066
+
1067
+ __ bind(&box_int_0);
1068
+ // Integer does not have leading zeros.
1069
+ GenerateUInt2Double(masm, r0, r1, r2, 0);
1070
+ __ b(&done);
1071
+
1072
+ __ bind(&box_int_1);
1073
+ // Integer has one leading zero.
1074
+ GenerateUInt2Double(masm, r0, r1, r2, 1);
1075
+
1076
+ __ bind(&done);
1077
+ // Integer was converted to double in registers r0:r1.
1078
+ // Wrap it into a HeapNumber.
1079
+ __ AllocateHeapNumber(r2, r3, r5, &slow);
1080
+
1081
+ __ str(r0, FieldMemOperand(r2, HeapNumber::kExponentOffset));
1082
+ __ str(r1, FieldMemOperand(r2, HeapNumber::kMantissaOffset));
1083
+
1084
+ __ mov(r0, r2);
1085
+
1086
+ __ Ret();
1087
+ }
1088
+ } else if (array_type == kExternalFloatArray) {
1089
+ // For the floating-point array type, we need to always allocate a
1090
+ // HeapNumber.
1091
+ if (CpuFeatures::IsSupported(VFP3)) {
1092
+ CpuFeatures::Scope scope(VFP3);
1093
+ __ AllocateHeapNumber(r0, r1, r2, &slow);
1094
+ __ vcvt_f64_f32(d0, s0);
1095
+ __ sub(r1, r0, Operand(kHeapObjectTag));
1096
+ __ vstr(d0, r1, HeapNumber::kValueOffset);
1097
+ __ Ret();
1098
+ } else {
1099
+ __ AllocateHeapNumber(r3, r1, r2, &slow);
1100
+ // VFP is not available, do manual single to double conversion.
1101
+
1102
+ // r0: floating point value (binary32)
1103
+
1104
+ // Extract mantissa to r1.
1105
+ __ and_(r1, r0, Operand(kBinary32MantissaMask));
1106
+
1107
+ // Extract exponent to r2.
1108
+ __ mov(r2, Operand(r0, LSR, kBinary32MantissaBits));
1109
+ __ and_(r2, r2, Operand(kBinary32ExponentMask >> kBinary32MantissaBits));
1110
+
1111
+ Label exponent_rebiased;
1112
+ __ teq(r2, Operand(0x00));
1113
+ __ b(eq, &exponent_rebiased);
1114
+
1115
+ __ teq(r2, Operand(0xff));
1116
+ __ mov(r2, Operand(0x7ff), LeaveCC, eq);
1117
+ __ b(eq, &exponent_rebiased);
1118
+
1119
+ // Rebias exponent.
1120
+ __ add(r2,
1121
+ r2,
1122
+ Operand(-kBinary32ExponentBias + HeapNumber::kExponentBias));
1123
+
1124
+ __ bind(&exponent_rebiased);
1125
+ __ and_(r0, r0, Operand(kBinary32SignMask));
1126
+ __ orr(r0, r0, Operand(r2, LSL, HeapNumber::kMantissaBitsInTopWord));
1127
+
1128
+ // Shift mantissa.
1129
+ static const int kMantissaShiftForHiWord =
1130
+ kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord;
1131
+
1132
+ static const int kMantissaShiftForLoWord =
1133
+ kBitsPerInt - kMantissaShiftForHiWord;
1134
+
1135
+ __ orr(r0, r0, Operand(r1, LSR, kMantissaShiftForHiWord));
1136
+ __ mov(r1, Operand(r1, LSL, kMantissaShiftForLoWord));
1137
+
1138
+ __ str(r0, FieldMemOperand(r3, HeapNumber::kExponentOffset));
1139
+ __ str(r1, FieldMemOperand(r3, HeapNumber::kMantissaOffset));
1140
+ __ mov(r0, r3);
1141
+ __ Ret();
1142
+ }
1143
+
1144
+ } else {
1145
+ __ mov(r0, Operand(r0, LSL, kSmiTagSize));
1146
+ __ Ret();
1147
+ }
1148
+
1149
+ // Slow case: Load name and receiver from stack and jump to runtime.
1150
+ __ bind(&slow);
1151
+ __ IncrementCounter(&Counters::keyed_load_external_array_slow, 1, r0, r1);
1152
+ __ ldr(r0, MemOperand(sp, 0));
1153
+ GenerateRuntimeGetProperty(masm);
1154
+ }
1155
+
1156
+
1157
+ void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
1158
+ // ---------- S t a t e --------------
1159
+ // -- lr : return address
1160
+ // -- r0 : key
1161
+ // -- sp[0] : key
1162
+ // -- sp[4] : receiver
1163
+ // -----------------------------------
1164
+ Label slow;
1165
+
1166
+ // Get the object from the stack.
1167
+ __ ldr(r1, MemOperand(sp, kPointerSize));
1168
+
1169
+ // Check that the receiver isn't a smi.
1170
+ __ BranchOnSmi(r1, &slow);
1171
+
1172
+ // Check that the key is a smi.
1173
+ __ BranchOnNotSmi(r0, &slow);
1174
+
1175
+ // Get the map of the receiver.
1176
+ __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
1177
+
1178
+ // Check that it has indexed interceptor and access checks
1179
+ // are not enabled for this object.
1180
+ __ ldrb(r3, FieldMemOperand(r2, Map::kBitFieldOffset));
1181
+ __ and_(r3, r3, Operand(kSlowCaseBitFieldMask));
1182
+ __ cmp(r3, Operand(1 << Map::kHasIndexedInterceptor));
1183
+ __ b(ne, &slow);
1184
+
1185
+ // Everything is fine, call runtime.
1186
+ __ Push(r1, r0); // Receiver, key.
1187
+
1188
+ // Perform tail call to the entry.
1189
+ __ TailCallExternalReference(ExternalReference(
1190
+ IC_Utility(kKeyedLoadPropertyWithInterceptor)), 2, 1);
1191
+
1192
+ __ bind(&slow);
1193
+ GenerateMiss(masm);
1194
+ }
1195
+
1196
+
1197
+ void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) {
1198
+ // ---------- S t a t e --------------
1199
+ // -- r0 : value
1200
+ // -- lr : return address
1201
+ // -- sp[0] : key
1202
+ // -- sp[1] : receiver
1203
+ // -----------------------------------
1204
+
1205
+ __ ldm(ia, sp, r2.bit() | r3.bit());
1206
+ __ Push(r3, r2, r0);
1207
+
1208
+ ExternalReference ref = ExternalReference(IC_Utility(kKeyedStoreIC_Miss));
1209
+ __ TailCallExternalReference(ref, 3, 1);
1210
+ }
1211
+
1212
+
1213
+ void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm) {
1214
+ // ---------- S t a t e --------------
1215
+ // -- r0 : value
1216
+ // -- lr : return address
1217
+ // -- sp[0] : key
1218
+ // -- sp[1] : receiver
1219
+ // -----------------------------------
1220
+ __ ldm(ia, sp, r1.bit() | r3.bit()); // r0 == value, r1 == key, r3 == object
1221
+ __ Push(r3, r1, r0);
1222
+
1223
+ __ TailCallRuntime(Runtime::kSetProperty, 3, 1);
1224
+ }
1225
+
1226
+
1227
+ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
1228
+ // ---------- S t a t e --------------
1229
+ // -- r0 : value
1230
+ // -- lr : return address
1231
+ // -- sp[0] : key
1232
+ // -- sp[1] : receiver
1233
+ // -----------------------------------
1234
+ Label slow, fast, array, extra, exit, check_pixel_array;
1235
+
1236
+ // Get the key and the object from the stack.
1237
+ __ ldm(ia, sp, r1.bit() | r3.bit()); // r1 = key, r3 = receiver
1238
+ // Check that the key is a smi.
1239
+ __ tst(r1, Operand(kSmiTagMask));
1240
+ __ b(ne, &slow);
1241
+ // Check that the object isn't a smi.
1242
+ __ tst(r3, Operand(kSmiTagMask));
1243
+ __ b(eq, &slow);
1244
+ // Get the map of the object.
1245
+ __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
1246
+ // Check that the receiver does not require access checks. We need
1247
+ // to do this because this generic stub does not perform map checks.
1248
+ __ ldrb(ip, FieldMemOperand(r2, Map::kBitFieldOffset));
1249
+ __ tst(ip, Operand(1 << Map::kIsAccessCheckNeeded));
1250
+ __ b(ne, &slow);
1251
+ // Check if the object is a JS array or not.
1252
+ __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
1253
+ __ cmp(r2, Operand(JS_ARRAY_TYPE));
1254
+ // r1 == key.
1255
+ __ b(eq, &array);
1256
+ // Check that the object is some kind of JS object.
1257
+ __ cmp(r2, Operand(FIRST_JS_OBJECT_TYPE));
1258
+ __ b(lt, &slow);
1259
+
1260
+
1261
+ // Object case: Check key against length in the elements array.
1262
+ __ ldr(r3, FieldMemOperand(r3, JSObject::kElementsOffset));
1263
+ // Check that the object is in fast mode (not dictionary).
1264
+ __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
1265
+ __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
1266
+ __ cmp(r2, ip);
1267
+ __ b(ne, &check_pixel_array);
1268
+ // Untag the key (for checking against untagged length in the fixed array).
1269
+ __ mov(r1, Operand(r1, ASR, kSmiTagSize));
1270
+ // Compute address to store into and check array bounds.
1271
+ __ add(r2, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1272
+ __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2));
1273
+ __ ldr(ip, FieldMemOperand(r3, FixedArray::kLengthOffset));
1274
+ __ cmp(r1, Operand(ip));
1275
+ __ b(lo, &fast);
1276
+
1277
+
1278
+ // Slow case:
1279
+ __ bind(&slow);
1280
+ GenerateRuntimeSetProperty(masm);
1281
+
1282
+ // Check whether the elements is a pixel array.
1283
+ // r0: value
1284
+ // r1: index (as a smi), zero-extended.
1285
+ // r3: elements array
1286
+ __ bind(&check_pixel_array);
1287
+ __ LoadRoot(ip, Heap::kPixelArrayMapRootIndex);
1288
+ __ cmp(r2, ip);
1289
+ __ b(ne, &slow);
1290
+ // Check that the value is a smi. If a conversion is needed call into the
1291
+ // runtime to convert and clamp.
1292
+ __ BranchOnNotSmi(r0, &slow);
1293
+ __ mov(r1, Operand(r1, ASR, kSmiTagSize)); // Untag the key.
1294
+ __ ldr(ip, FieldMemOperand(r3, PixelArray::kLengthOffset));
1295
+ __ cmp(r1, Operand(ip));
1296
+ __ b(hs, &slow);
1297
+ __ mov(r4, r0); // Save the value.
1298
+ __ mov(r0, Operand(r0, ASR, kSmiTagSize)); // Untag the value.
1299
+ { // Clamp the value to [0..255].
1300
+ Label done;
1301
+ __ tst(r0, Operand(0xFFFFFF00));
1302
+ __ b(eq, &done);
1303
+ __ mov(r0, Operand(0), LeaveCC, mi); // 0 if negative.
1304
+ __ mov(r0, Operand(255), LeaveCC, pl); // 255 if positive.
1305
+ __ bind(&done);
1306
+ }
1307
+ __ ldr(r2, FieldMemOperand(r3, PixelArray::kExternalPointerOffset));
1308
+ __ strb(r0, MemOperand(r2, r1));
1309
+ __ mov(r0, Operand(r4)); // Return the original value.
1310
+ __ Ret();
1311
+
1312
+
1313
+ // Extra capacity case: Check if there is extra capacity to
1314
+ // perform the store and update the length. Used for adding one
1315
+ // element to the array by writing to array[array.length].
1316
+ // r0 == value, r1 == key, r2 == elements, r3 == object
1317
+ __ bind(&extra);
1318
+ __ b(ne, &slow); // do not leave holes in the array
1319
+ __ mov(r1, Operand(r1, ASR, kSmiTagSize)); // untag
1320
+ __ ldr(ip, FieldMemOperand(r2, Array::kLengthOffset));
1321
+ __ cmp(r1, Operand(ip));
1322
+ __ b(hs, &slow);
1323
+ __ mov(r1, Operand(r1, LSL, kSmiTagSize)); // restore tag
1324
+ __ add(r1, r1, Operand(1 << kSmiTagSize)); // and increment
1325
+ __ str(r1, FieldMemOperand(r3, JSArray::kLengthOffset));
1326
+ __ mov(r3, Operand(r2));
1327
+ // NOTE: Computing the address to store into must take the fact
1328
+ // that the key has been incremented into account.
1329
+ int displacement = FixedArray::kHeaderSize - kHeapObjectTag -
1330
+ ((1 << kSmiTagSize) * 2);
1331
+ __ add(r2, r2, Operand(displacement));
1332
+ __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
1333
+ __ b(&fast);
1334
+
1335
+
1336
+ // Array case: Get the length and the elements array from the JS
1337
+ // array. Check that the array is in fast mode; if it is the
1338
+ // length is always a smi.
1339
+ // r0 == value, r3 == object
1340
+ __ bind(&array);
1341
+ __ ldr(r2, FieldMemOperand(r3, JSObject::kElementsOffset));
1342
+ __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
1343
+ __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
1344
+ __ cmp(r1, ip);
1345
+ __ b(ne, &slow);
1346
+
1347
+ // Check the key against the length in the array, compute the
1348
+ // address to store into and fall through to fast case.
1349
+ __ ldr(r1, MemOperand(sp)); // restore key
1350
+ // r0 == value, r1 == key, r2 == elements, r3 == object.
1351
+ __ ldr(ip, FieldMemOperand(r3, JSArray::kLengthOffset));
1352
+ __ cmp(r1, Operand(ip));
1353
+ __ b(hs, &extra);
1354
+ __ mov(r3, Operand(r2));
1355
+ __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1356
+ __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
1357
+
1358
+
1359
+ // Fast case: Do the store.
1360
+ // r0 == value, r2 == address to store into, r3 == elements
1361
+ __ bind(&fast);
1362
+ __ str(r0, MemOperand(r2));
1363
+ // Skip write barrier if the written value is a smi.
1364
+ __ tst(r0, Operand(kSmiTagMask));
1365
+ __ b(eq, &exit);
1366
+ // Update write barrier for the elements array address.
1367
+ __ sub(r1, r2, Operand(r3));
1368
+ __ RecordWrite(r3, r1, r2);
1369
+
1370
+ __ bind(&exit);
1371
+ __ Ret();
1372
+ }
1373
+
1374
+
1375
+ // Convert int passed in register ival to IEE 754 single precision
1376
+ // floating point value and store it into register fval.
1377
+ // If VFP3 is available use it for conversion.
1378
+ static void ConvertIntToFloat(MacroAssembler* masm,
1379
+ Register ival,
1380
+ Register fval,
1381
+ Register scratch1,
1382
+ Register scratch2) {
1383
+ if (CpuFeatures::IsSupported(VFP3)) {
1384
+ CpuFeatures::Scope scope(VFP3);
1385
+ __ vmov(s0, ival);
1386
+ __ vcvt_f32_s32(s0, s0);
1387
+ __ vmov(fval, s0);
1388
+ } else {
1389
+ Label not_special, done;
1390
+ // Move sign bit from source to destination. This works because the sign
1391
+ // bit in the exponent word of the double has the same position and polarity
1392
+ // as the 2's complement sign bit in a Smi.
1393
+ ASSERT(kBinary32SignMask == 0x80000000u);
1394
+
1395
+ __ and_(fval, ival, Operand(kBinary32SignMask), SetCC);
1396
+ // Negate value if it is negative.
1397
+ __ rsb(ival, ival, Operand(0), LeaveCC, ne);
1398
+
1399
+ // We have -1, 0 or 1, which we treat specially. Register ival contains
1400
+ // absolute value: it is either equal to 1 (special case of -1 and 1),
1401
+ // greater than 1 (not a special case) or less than 1 (special case of 0).
1402
+ __ cmp(ival, Operand(1));
1403
+ __ b(gt, &not_special);
1404
+
1405
+ // For 1 or -1 we need to or in the 0 exponent (biased).
1406
+ static const uint32_t exponent_word_for_1 =
1407
+ kBinary32ExponentBias << kBinary32ExponentShift;
1408
+
1409
+ __ orr(fval, fval, Operand(exponent_word_for_1), LeaveCC, eq);
1410
+ __ b(&done);
1411
+
1412
+ __ bind(&not_special);
1413
+ // Count leading zeros.
1414
+ // Gets the wrong answer for 0, but we already checked for that case above.
1415
+ Register zeros = scratch2;
1416
+ __ CountLeadingZeros(ival, scratch1, zeros);
1417
+
1418
+ // Compute exponent and or it into the exponent register.
1419
+ __ rsb(scratch1,
1420
+ zeros,
1421
+ Operand((kBitsPerInt - 1) + kBinary32ExponentBias));
1422
+
1423
+ __ orr(fval,
1424
+ fval,
1425
+ Operand(scratch1, LSL, kBinary32ExponentShift));
1426
+
1427
+ // Shift up the source chopping the top bit off.
1428
+ __ add(zeros, zeros, Operand(1));
1429
+ // This wouldn't work for 1 and -1 as the shift would be 32 which means 0.
1430
+ __ mov(ival, Operand(ival, LSL, zeros));
1431
+ // And the top (top 20 bits).
1432
+ __ orr(fval,
1433
+ fval,
1434
+ Operand(ival, LSR, kBitsPerInt - kBinary32MantissaBits));
1435
+
1436
+ __ bind(&done);
1437
+ }
1438
+ }
1439
+
1440
+
1441
+ static bool IsElementTypeSigned(ExternalArrayType array_type) {
1442
+ switch (array_type) {
1443
+ case kExternalByteArray:
1444
+ case kExternalShortArray:
1445
+ case kExternalIntArray:
1446
+ return true;
1447
+
1448
+ case kExternalUnsignedByteArray:
1449
+ case kExternalUnsignedShortArray:
1450
+ case kExternalUnsignedIntArray:
1451
+ return false;
1452
+
1453
+ default:
1454
+ UNREACHABLE();
1455
+ return false;
1456
+ }
1457
+ }
1458
+
1459
+
1460
+ void KeyedStoreIC::GenerateExternalArray(MacroAssembler* masm,
1461
+ ExternalArrayType array_type) {
1462
+ // ---------- S t a t e --------------
1463
+ // -- r0 : value
1464
+ // -- lr : return address
1465
+ // -- sp[0] : key
1466
+ // -- sp[1] : receiver
1467
+ // -----------------------------------
1468
+ Label slow, check_heap_number;
1469
+
1470
+ // Get the key and the object from the stack.
1471
+ __ ldm(ia, sp, r1.bit() | r2.bit()); // r1 = key, r2 = receiver
1472
+
1473
+ // Check that the object isn't a smi.
1474
+ __ BranchOnSmi(r2, &slow);
1475
+
1476
+ // Check that the object is a JS object. Load map into r3
1477
+ __ CompareObjectType(r2, r3, r4, FIRST_JS_OBJECT_TYPE);
1478
+ __ b(le, &slow);
1479
+
1480
+ // Check that the receiver does not require access checks. We need
1481
+ // to do this because this generic stub does not perform map checks.
1482
+ __ ldrb(ip, FieldMemOperand(r3, Map::kBitFieldOffset));
1483
+ __ tst(ip, Operand(1 << Map::kIsAccessCheckNeeded));
1484
+ __ b(ne, &slow);
1485
+
1486
+ // Check that the key is a smi.
1487
+ __ BranchOnNotSmi(r1, &slow);
1488
+
1489
+ // Check that the elements array is the appropriate type of
1490
+ // ExternalArray.
1491
+ // r0: value
1492
+ // r1: index (smi)
1493
+ // r2: object
1494
+ __ ldr(r2, FieldMemOperand(r2, JSObject::kElementsOffset));
1495
+ __ ldr(r3, FieldMemOperand(r2, HeapObject::kMapOffset));
1496
+ __ LoadRoot(ip, Heap::RootIndexForExternalArrayType(array_type));
1497
+ __ cmp(r3, ip);
1498
+ __ b(ne, &slow);
1499
+
1500
+ // Check that the index is in range.
1501
+ __ mov(r1, Operand(r1, ASR, kSmiTagSize)); // Untag the index.
1502
+ __ ldr(ip, FieldMemOperand(r2, ExternalArray::kLengthOffset));
1503
+ __ cmp(r1, ip);
1504
+ // Unsigned comparison catches both negative and too-large values.
1505
+ __ b(hs, &slow);
1506
+
1507
+ // Handle both smis and HeapNumbers in the fast path. Go to the
1508
+ // runtime for all other kinds of values.
1509
+ // r0: value
1510
+ // r1: index (integer)
1511
+ // r2: array
1512
+ __ BranchOnNotSmi(r0, &check_heap_number);
1513
+ __ mov(r3, Operand(r0, ASR, kSmiTagSize)); // Untag the value.
1514
+ __ ldr(r2, FieldMemOperand(r2, ExternalArray::kExternalPointerOffset));
1515
+
1516
+ // r1: index (integer)
1517
+ // r2: base pointer of external storage
1518
+ // r3: value (integer)
1519
+ switch (array_type) {
1520
+ case kExternalByteArray:
1521
+ case kExternalUnsignedByteArray:
1522
+ __ strb(r3, MemOperand(r2, r1, LSL, 0));
1523
+ break;
1524
+ case kExternalShortArray:
1525
+ case kExternalUnsignedShortArray:
1526
+ __ strh(r3, MemOperand(r2, r1, LSL, 1));
1527
+ break;
1528
+ case kExternalIntArray:
1529
+ case kExternalUnsignedIntArray:
1530
+ __ str(r3, MemOperand(r2, r1, LSL, 2));
1531
+ break;
1532
+ case kExternalFloatArray:
1533
+ // Need to perform int-to-float conversion.
1534
+ ConvertIntToFloat(masm, r3, r4, r5, r6);
1535
+ __ str(r4, MemOperand(r2, r1, LSL, 2));
1536
+ break;
1537
+ default:
1538
+ UNREACHABLE();
1539
+ break;
1540
+ }
1541
+
1542
+ // r0: value
1543
+ __ Ret();
1544
+
1545
+
1546
+ // r0: value
1547
+ // r1: index (integer)
1548
+ // r2: external array object
1549
+ __ bind(&check_heap_number);
1550
+ __ CompareObjectType(r0, r3, r4, HEAP_NUMBER_TYPE);
1551
+ __ b(ne, &slow);
1552
+
1553
+ __ ldr(r2, FieldMemOperand(r2, ExternalArray::kExternalPointerOffset));
1554
+
1555
+ // The WebGL specification leaves the behavior of storing NaN and
1556
+ // +/-Infinity into integer arrays basically undefined. For more
1557
+ // reproducible behavior, convert these to zero.
1558
+ if (CpuFeatures::IsSupported(VFP3)) {
1559
+ CpuFeatures::Scope scope(VFP3);
1560
+
1561
+ // vldr requires offset to be a multiple of 4 so we can not
1562
+ // include -kHeapObjectTag into it.
1563
+ __ sub(r3, r0, Operand(kHeapObjectTag));
1564
+ __ vldr(d0, r3, HeapNumber::kValueOffset);
1565
+
1566
+ if (array_type == kExternalFloatArray) {
1567
+ __ vcvt_f32_f64(s0, d0);
1568
+ __ vmov(r3, s0);
1569
+ __ str(r3, MemOperand(r2, r1, LSL, 2));
1570
+ } else {
1571
+ Label done;
1572
+
1573
+ // Need to perform float-to-int conversion.
1574
+ // Test for NaN.
1575
+ __ vcmp(d0, d0);
1576
+ // Move vector status bits to normal status bits.
1577
+ __ vmrs(v8::internal::pc);
1578
+ __ mov(r3, Operand(0), LeaveCC, vs); // NaN converts to 0
1579
+ __ b(vs, &done);
1580
+
1581
+ // Test whether exponent equal to 0x7FF (infinity or NaN)
1582
+ __ vmov(r4, r3, d0);
1583
+ __ mov(r5, Operand(0x7FF00000));
1584
+ __ and_(r3, r3, Operand(r5));
1585
+ __ teq(r3, Operand(r5));
1586
+ __ mov(r3, Operand(0), LeaveCC, eq);
1587
+
1588
+ // Not infinity or NaN simply convert to int
1589
+ if (IsElementTypeSigned(array_type)) {
1590
+ __ vcvt_s32_f64(s0, d0, ne);
1591
+ } else {
1592
+ __ vcvt_u32_f64(s0, d0, ne);
1593
+ }
1594
+
1595
+ __ vmov(r3, s0, ne);
1596
+
1597
+ __ bind(&done);
1598
+ switch (array_type) {
1599
+ case kExternalByteArray:
1600
+ case kExternalUnsignedByteArray:
1601
+ __ strb(r3, MemOperand(r2, r1, LSL, 0));
1602
+ break;
1603
+ case kExternalShortArray:
1604
+ case kExternalUnsignedShortArray:
1605
+ __ strh(r3, MemOperand(r2, r1, LSL, 1));
1606
+ break;
1607
+ case kExternalIntArray:
1608
+ case kExternalUnsignedIntArray:
1609
+ __ str(r3, MemOperand(r2, r1, LSL, 2));
1610
+ break;
1611
+ default:
1612
+ UNREACHABLE();
1613
+ break;
1614
+ }
1615
+ }
1616
+
1617
+ // r0: original value
1618
+ __ Ret();
1619
+ } else {
1620
+ // VFP3 is not available do manual conversions
1621
+ __ ldr(r3, FieldMemOperand(r0, HeapNumber::kExponentOffset));
1622
+ __ ldr(r4, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
1623
+
1624
+ if (array_type == kExternalFloatArray) {
1625
+ Label done, nan_or_infinity_or_zero;
1626
+ static const int kMantissaInHiWordShift =
1627
+ kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord;
1628
+
1629
+ static const int kMantissaInLoWordShift =
1630
+ kBitsPerInt - kMantissaInHiWordShift;
1631
+
1632
+ // Test for all special exponent values: zeros, subnormal numbers, NaNs
1633
+ // and infinities. All these should be converted to 0.
1634
+ __ mov(r5, Operand(HeapNumber::kExponentMask));
1635
+ __ and_(r6, r3, Operand(r5), SetCC);
1636
+ __ b(eq, &nan_or_infinity_or_zero);
1637
+
1638
+ __ teq(r6, Operand(r5));
1639
+ __ mov(r6, Operand(kBinary32ExponentMask), LeaveCC, eq);
1640
+ __ b(eq, &nan_or_infinity_or_zero);
1641
+
1642
+ // Rebias exponent.
1643
+ __ mov(r6, Operand(r6, LSR, HeapNumber::kExponentShift));
1644
+ __ add(r6,
1645
+ r6,
1646
+ Operand(kBinary32ExponentBias - HeapNumber::kExponentBias));
1647
+
1648
+ __ cmp(r6, Operand(kBinary32MaxExponent));
1649
+ __ and_(r3, r3, Operand(HeapNumber::kSignMask), LeaveCC, gt);
1650
+ __ orr(r3, r3, Operand(kBinary32ExponentMask), LeaveCC, gt);
1651
+ __ b(gt, &done);
1652
+
1653
+ __ cmp(r6, Operand(kBinary32MinExponent));
1654
+ __ and_(r3, r3, Operand(HeapNumber::kSignMask), LeaveCC, lt);
1655
+ __ b(lt, &done);
1656
+
1657
+ __ and_(r7, r3, Operand(HeapNumber::kSignMask));
1658
+ __ and_(r3, r3, Operand(HeapNumber::kMantissaMask));
1659
+ __ orr(r7, r7, Operand(r3, LSL, kMantissaInHiWordShift));
1660
+ __ orr(r7, r7, Operand(r4, LSR, kMantissaInLoWordShift));
1661
+ __ orr(r3, r7, Operand(r6, LSL, kBinary32ExponentShift));
1662
+
1663
+ __ bind(&done);
1664
+ __ str(r3, MemOperand(r2, r1, LSL, 2));
1665
+ __ Ret();
1666
+
1667
+ __ bind(&nan_or_infinity_or_zero);
1668
+ __ and_(r7, r3, Operand(HeapNumber::kSignMask));
1669
+ __ and_(r3, r3, Operand(HeapNumber::kMantissaMask));
1670
+ __ orr(r6, r6, r7);
1671
+ __ orr(r6, r6, Operand(r3, LSL, kMantissaInHiWordShift));
1672
+ __ orr(r3, r6, Operand(r4, LSR, kMantissaInLoWordShift));
1673
+ __ b(&done);
1674
+ } else {
1675
+ bool is_signed_type = IsElementTypeSigned(array_type);
1676
+ int meaningfull_bits = is_signed_type ? (kBitsPerInt - 1) : kBitsPerInt;
1677
+ int32_t min_value = is_signed_type ? 0x80000000 : 0x00000000;
1678
+
1679
+ Label done, sign;
1680
+
1681
+ // Test for all special exponent values: zeros, subnormal numbers, NaNs
1682
+ // and infinities. All these should be converted to 0.
1683
+ __ mov(r5, Operand(HeapNumber::kExponentMask));
1684
+ __ and_(r6, r3, Operand(r5), SetCC);
1685
+ __ mov(r3, Operand(0), LeaveCC, eq);
1686
+ __ b(eq, &done);
1687
+
1688
+ __ teq(r6, Operand(r5));
1689
+ __ mov(r3, Operand(0), LeaveCC, eq);
1690
+ __ b(eq, &done);
1691
+
1692
+ // Unbias exponent.
1693
+ __ mov(r6, Operand(r6, LSR, HeapNumber::kExponentShift));
1694
+ __ sub(r6, r6, Operand(HeapNumber::kExponentBias), SetCC);
1695
+ // If exponent is negative than result is 0.
1696
+ __ mov(r3, Operand(0), LeaveCC, mi);
1697
+ __ b(mi, &done);
1698
+
1699
+ // If exponent is too big than result is minimal value
1700
+ __ cmp(r6, Operand(meaningfull_bits - 1));
1701
+ __ mov(r3, Operand(min_value), LeaveCC, ge);
1702
+ __ b(ge, &done);
1703
+
1704
+ __ and_(r5, r3, Operand(HeapNumber::kSignMask), SetCC);
1705
+ __ and_(r3, r3, Operand(HeapNumber::kMantissaMask));
1706
+ __ orr(r3, r3, Operand(1u << HeapNumber::kMantissaBitsInTopWord));
1707
+
1708
+ __ rsb(r6, r6, Operand(HeapNumber::kMantissaBitsInTopWord), SetCC);
1709
+ __ mov(r3, Operand(r3, LSR, r6), LeaveCC, pl);
1710
+ __ b(pl, &sign);
1711
+
1712
+ __ rsb(r6, r6, Operand(0));
1713
+ __ mov(r3, Operand(r3, LSL, r6));
1714
+ __ rsb(r6, r6, Operand(meaningfull_bits));
1715
+ __ orr(r3, r3, Operand(r4, LSR, r6));
1716
+
1717
+ __ bind(&sign);
1718
+ __ teq(r5, Operand(0));
1719
+ __ rsb(r3, r3, Operand(0), LeaveCC, ne);
1720
+
1721
+ __ bind(&done);
1722
+ switch (array_type) {
1723
+ case kExternalByteArray:
1724
+ case kExternalUnsignedByteArray:
1725
+ __ strb(r3, MemOperand(r2, r1, LSL, 0));
1726
+ break;
1727
+ case kExternalShortArray:
1728
+ case kExternalUnsignedShortArray:
1729
+ __ strh(r3, MemOperand(r2, r1, LSL, 1));
1730
+ break;
1731
+ case kExternalIntArray:
1732
+ case kExternalUnsignedIntArray:
1733
+ __ str(r3, MemOperand(r2, r1, LSL, 2));
1734
+ break;
1735
+ default:
1736
+ UNREACHABLE();
1737
+ break;
1738
+ }
1739
+ }
1740
+ }
1741
+
1742
+ // Slow case: call runtime.
1743
+ __ bind(&slow);
1744
+ GenerateRuntimeSetProperty(masm);
1745
+ }
1746
+
1747
+
1748
+ void StoreIC::GenerateMegamorphic(MacroAssembler* masm) {
1749
+ // ----------- S t a t e -------------
1750
+ // -- r0 : value
1751
+ // -- r1 : receiver
1752
+ // -- r2 : name
1753
+ // -- lr : return address
1754
+ // -----------------------------------
1755
+
1756
+ // Get the receiver from the stack and probe the stub cache.
1757
+ Code::Flags flags = Code::ComputeFlags(Code::STORE_IC,
1758
+ NOT_IN_LOOP,
1759
+ MONOMORPHIC);
1760
+ StubCache::GenerateProbe(masm, flags, r1, r2, r3, no_reg);
1761
+
1762
+ // Cache miss: Jump to runtime.
1763
+ GenerateMiss(masm);
1764
+ }
1765
+
1766
+
1767
+ void StoreIC::GenerateMiss(MacroAssembler* masm) {
1768
+ // ----------- S t a t e -------------
1769
+ // -- r0 : value
1770
+ // -- r1 : receiver
1771
+ // -- r2 : name
1772
+ // -- lr : return address
1773
+ // -----------------------------------
1774
+
1775
+ __ Push(r1, r2, r0);
1776
+
1777
+ // Perform tail call to the entry.
1778
+ ExternalReference ref = ExternalReference(IC_Utility(kStoreIC_Miss));
1779
+ __ TailCallExternalReference(ref, 3, 1);
1780
+ }
1781
+
1782
+
1783
+ void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
1784
+ // ----------- S t a t e -------------
1785
+ // -- r0 : value
1786
+ // -- r1 : receiver
1787
+ // -- r2 : name
1788
+ // -- lr : return address
1789
+ // -----------------------------------
1790
+ //
1791
+ // This accepts as a receiver anything JSObject::SetElementsLength accepts
1792
+ // (currently anything except for external and pixel arrays which means
1793
+ // anything with elements of FixedArray type.), but currently is restricted
1794
+ // to JSArray.
1795
+ // Value must be a number, but only smis are accepted as the most common case.
1796
+
1797
+ Label miss;
1798
+
1799
+ Register receiver = r1;
1800
+ Register value = r0;
1801
+ Register scratch = r3;
1802
+
1803
+ // Check that the receiver isn't a smi.
1804
+ __ BranchOnSmi(receiver, &miss);
1805
+
1806
+ // Check that the object is a JS array.
1807
+ __ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE);
1808
+ __ b(ne, &miss);
1809
+
1810
+ // Check that elements are FixedArray.
1811
+ __ ldr(scratch, FieldMemOperand(receiver, JSArray::kElementsOffset));
1812
+ __ CompareObjectType(scratch, scratch, scratch, FIXED_ARRAY_TYPE);
1813
+ __ b(ne, &miss);
1814
+
1815
+ // Check that value is a smi.
1816
+ __ BranchOnNotSmi(value, &miss);
1817
+
1818
+ // Prepare tail call to StoreIC_ArrayLength.
1819
+ __ Push(receiver, value);
1820
+
1821
+ ExternalReference ref = ExternalReference(IC_Utility(kStoreIC_ArrayLength));
1822
+ __ TailCallExternalReference(ref, 2, 1);
1823
+
1824
+ __ bind(&miss);
1825
+
1826
+ GenerateMiss(masm);
1827
+ }
1828
+
1829
+
1830
+ #undef __
1831
+
1832
+
1833
+ } } // namespace v8::internal