mustang 0.0.1 → 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (560) hide show
  1. data/.rspec +1 -0
  2. data/Isolate +9 -0
  3. data/README.md +6 -12
  4. data/Rakefile +30 -4
  5. data/TODO.md +9 -0
  6. data/ext/v8/extconf.rb +56 -0
  7. data/ext/v8/v8.cpp +37 -0
  8. data/ext/v8/v8_array.cpp +161 -0
  9. data/ext/v8/v8_array.h +17 -0
  10. data/ext/v8/v8_base.cpp +147 -0
  11. data/ext/v8/v8_base.h +23 -0
  12. data/ext/v8/v8_cast.cpp +151 -0
  13. data/ext/v8/v8_cast.h +64 -0
  14. data/ext/v8/v8_context.cpp +174 -0
  15. data/ext/v8/v8_context.h +12 -0
  16. data/ext/v8/v8_date.cpp +61 -0
  17. data/ext/v8/v8_date.h +16 -0
  18. data/ext/v8/v8_errors.cpp +147 -0
  19. data/ext/v8/v8_errors.h +19 -0
  20. data/ext/v8/v8_external.cpp +66 -0
  21. data/ext/v8/v8_external.h +16 -0
  22. data/ext/v8/v8_function.cpp +182 -0
  23. data/ext/v8/v8_function.h +14 -0
  24. data/ext/v8/v8_integer.cpp +70 -0
  25. data/ext/v8/v8_integer.h +16 -0
  26. data/ext/v8/v8_macros.h +30 -0
  27. data/ext/v8/v8_main.cpp +53 -0
  28. data/ext/v8/v8_main.h +13 -0
  29. data/ext/v8/v8_number.cpp +62 -0
  30. data/ext/v8/v8_number.h +16 -0
  31. data/ext/v8/v8_object.cpp +172 -0
  32. data/ext/v8/v8_object.h +17 -0
  33. data/ext/v8/v8_ref.cpp +72 -0
  34. data/ext/v8/v8_ref.h +43 -0
  35. data/ext/v8/v8_regexp.cpp +148 -0
  36. data/ext/v8/v8_regexp.h +16 -0
  37. data/ext/v8/v8_string.cpp +78 -0
  38. data/ext/v8/v8_string.h +16 -0
  39. data/ext/v8/v8_value.cpp +370 -0
  40. data/ext/v8/v8_value.h +19 -0
  41. data/gemspec.yml +2 -1
  42. data/lib/core_ext/class.rb +14 -0
  43. data/lib/core_ext/object.rb +12 -0
  44. data/lib/core_ext/symbol.rb +23 -0
  45. data/lib/mustang.rb +44 -0
  46. data/lib/mustang/context.rb +69 -0
  47. data/lib/mustang/errors.rb +36 -0
  48. data/lib/support/delegated.rb +25 -0
  49. data/lib/v8/array.rb +21 -0
  50. data/lib/v8/context.rb +13 -0
  51. data/lib/v8/date.rb +20 -0
  52. data/lib/v8/error.rb +15 -0
  53. data/lib/v8/external.rb +16 -0
  54. data/lib/v8/function.rb +11 -0
  55. data/lib/v8/integer.rb +16 -0
  56. data/lib/v8/number.rb +16 -0
  57. data/lib/v8/object.rb +66 -0
  58. data/lib/v8/regexp.rb +23 -0
  59. data/lib/v8/string.rb +27 -0
  60. data/mustang.gemspec +3 -0
  61. data/spec/core_ext/class_spec.rb +19 -0
  62. data/spec/core_ext/object_spec.rb +19 -0
  63. data/spec/core_ext/symbol_spec.rb +27 -0
  64. data/spec/fixtures/test1.js +2 -0
  65. data/spec/fixtures/test2.js +2 -0
  66. data/spec/spec_helper.rb +20 -0
  67. data/spec/v8/array_spec.rb +88 -0
  68. data/spec/v8/cast_spec.rb +151 -0
  69. data/spec/v8/context_spec.rb +78 -0
  70. data/spec/v8/data_spec.rb +39 -0
  71. data/spec/v8/date_spec.rb +45 -0
  72. data/spec/v8/empty_spec.rb +27 -0
  73. data/spec/v8/errors_spec.rb +142 -0
  74. data/spec/v8/external_spec.rb +44 -0
  75. data/spec/v8/function_spec.rb +170 -0
  76. data/spec/v8/integer_spec.rb +41 -0
  77. data/spec/v8/main_spec.rb +18 -0
  78. data/spec/v8/null_spec.rb +27 -0
  79. data/spec/v8/number_spec.rb +40 -0
  80. data/spec/v8/object_spec.rb +79 -0
  81. data/spec/v8/primitive_spec.rb +9 -0
  82. data/spec/v8/regexp_spec.rb +65 -0
  83. data/spec/v8/string_spec.rb +48 -0
  84. data/spec/v8/undefined_spec.rb +27 -0
  85. data/spec/v8/value_spec.rb +215 -0
  86. data/vendor/v8/.gitignore +2 -0
  87. data/vendor/v8/AUTHORS +3 -1
  88. data/vendor/v8/ChangeLog +117 -0
  89. data/vendor/v8/SConstruct +334 -53
  90. data/vendor/v8/include/v8-debug.h +21 -11
  91. data/vendor/v8/include/v8-preparser.h +1 -1
  92. data/vendor/v8/include/v8-profiler.h +122 -43
  93. data/vendor/v8/include/v8-testing.h +5 -0
  94. data/vendor/v8/include/v8.h +171 -17
  95. data/vendor/v8/preparser/SConscript +38 -0
  96. data/vendor/v8/preparser/preparser-process.cc +77 -114
  97. data/vendor/v8/samples/shell.cc +232 -46
  98. data/vendor/v8/src/SConscript +29 -5
  99. data/vendor/v8/src/accessors.cc +70 -211
  100. data/vendor/v8/{test/cctest/test-mips.cc → src/allocation-inl.h} +15 -18
  101. data/vendor/v8/src/allocation.cc +0 -82
  102. data/vendor/v8/src/allocation.h +9 -42
  103. data/vendor/v8/src/api.cc +1645 -1156
  104. data/vendor/v8/src/api.h +76 -12
  105. data/vendor/v8/src/apiutils.h +0 -7
  106. data/vendor/v8/src/arguments.h +15 -4
  107. data/vendor/v8/src/arm/assembler-arm-inl.h +10 -9
  108. data/vendor/v8/src/arm/assembler-arm.cc +62 -23
  109. data/vendor/v8/src/arm/assembler-arm.h +76 -11
  110. data/vendor/v8/src/arm/builtins-arm.cc +39 -33
  111. data/vendor/v8/src/arm/code-stubs-arm.cc +1182 -402
  112. data/vendor/v8/src/arm/code-stubs-arm.h +20 -54
  113. data/vendor/v8/src/arm/codegen-arm.cc +159 -106
  114. data/vendor/v8/src/arm/codegen-arm.h +6 -6
  115. data/vendor/v8/src/arm/constants-arm.h +16 -1
  116. data/vendor/v8/src/arm/cpu-arm.cc +7 -5
  117. data/vendor/v8/src/arm/debug-arm.cc +6 -4
  118. data/vendor/v8/src/arm/deoptimizer-arm.cc +51 -14
  119. data/vendor/v8/src/arm/disasm-arm.cc +47 -15
  120. data/vendor/v8/src/arm/frames-arm.h +1 -1
  121. data/vendor/v8/src/arm/full-codegen-arm.cc +724 -408
  122. data/vendor/v8/src/arm/ic-arm.cc +90 -85
  123. data/vendor/v8/src/arm/lithium-arm.cc +140 -69
  124. data/vendor/v8/src/arm/lithium-arm.h +161 -46
  125. data/vendor/v8/src/arm/lithium-codegen-arm.cc +567 -297
  126. data/vendor/v8/src/arm/lithium-codegen-arm.h +21 -9
  127. data/vendor/v8/src/arm/lithium-gap-resolver-arm.cc +2 -0
  128. data/vendor/v8/src/arm/macro-assembler-arm.cc +457 -96
  129. data/vendor/v8/src/arm/macro-assembler-arm.h +115 -18
  130. data/vendor/v8/src/arm/regexp-macro-assembler-arm.cc +20 -13
  131. data/vendor/v8/src/arm/regexp-macro-assembler-arm.h +1 -0
  132. data/vendor/v8/src/arm/simulator-arm.cc +184 -101
  133. data/vendor/v8/src/arm/simulator-arm.h +26 -21
  134. data/vendor/v8/src/arm/stub-cache-arm.cc +450 -467
  135. data/vendor/v8/src/arm/virtual-frame-arm.cc +14 -12
  136. data/vendor/v8/src/arm/virtual-frame-arm.h +11 -8
  137. data/vendor/v8/src/array.js +35 -18
  138. data/vendor/v8/src/assembler.cc +186 -92
  139. data/vendor/v8/src/assembler.h +106 -69
  140. data/vendor/v8/src/ast-inl.h +5 -0
  141. data/vendor/v8/src/ast.cc +46 -35
  142. data/vendor/v8/src/ast.h +107 -50
  143. data/vendor/v8/src/atomicops.h +2 -0
  144. data/vendor/v8/src/atomicops_internals_mips_gcc.h +169 -0
  145. data/vendor/v8/src/bootstrapper.cc +649 -399
  146. data/vendor/v8/src/bootstrapper.h +94 -27
  147. data/vendor/v8/src/builtins.cc +359 -227
  148. data/vendor/v8/src/builtins.h +157 -123
  149. data/vendor/v8/src/checks.cc +2 -2
  150. data/vendor/v8/src/checks.h +4 -0
  151. data/vendor/v8/src/code-stubs.cc +27 -17
  152. data/vendor/v8/src/code-stubs.h +38 -17
  153. data/vendor/v8/src/codegen-inl.h +5 -1
  154. data/vendor/v8/src/codegen.cc +27 -17
  155. data/vendor/v8/src/codegen.h +9 -9
  156. data/vendor/v8/src/compilation-cache.cc +92 -206
  157. data/vendor/v8/src/compilation-cache.h +205 -30
  158. data/vendor/v8/src/compiler.cc +107 -120
  159. data/vendor/v8/src/compiler.h +17 -2
  160. data/vendor/v8/src/contexts.cc +22 -15
  161. data/vendor/v8/src/contexts.h +14 -8
  162. data/vendor/v8/src/conversions.cc +86 -30
  163. data/vendor/v8/src/counters.cc +19 -4
  164. data/vendor/v8/src/counters.h +28 -16
  165. data/vendor/v8/src/cpu-profiler-inl.h +4 -3
  166. data/vendor/v8/src/cpu-profiler.cc +123 -72
  167. data/vendor/v8/src/cpu-profiler.h +33 -19
  168. data/vendor/v8/src/cpu.h +2 -0
  169. data/vendor/v8/src/d8-debug.cc +3 -3
  170. data/vendor/v8/src/d8-debug.h +7 -6
  171. data/vendor/v8/src/d8-posix.cc +2 -0
  172. data/vendor/v8/src/d8.cc +22 -12
  173. data/vendor/v8/src/d8.gyp +3 -0
  174. data/vendor/v8/src/d8.js +618 -0
  175. data/vendor/v8/src/data-flow.h +3 -3
  176. data/vendor/v8/src/dateparser.h +4 -2
  177. data/vendor/v8/src/debug-agent.cc +10 -9
  178. data/vendor/v8/src/debug-agent.h +9 -11
  179. data/vendor/v8/src/debug-debugger.js +121 -0
  180. data/vendor/v8/src/debug.cc +331 -227
  181. data/vendor/v8/src/debug.h +248 -219
  182. data/vendor/v8/src/deoptimizer.cc +173 -62
  183. data/vendor/v8/src/deoptimizer.h +119 -19
  184. data/vendor/v8/src/disasm.h +3 -0
  185. data/vendor/v8/src/disassembler.cc +10 -9
  186. data/vendor/v8/src/execution.cc +185 -129
  187. data/vendor/v8/src/execution.h +47 -78
  188. data/vendor/v8/src/extensions/experimental/break-iterator.cc +250 -0
  189. data/vendor/v8/src/extensions/experimental/break-iterator.h +89 -0
  190. data/vendor/v8/src/extensions/experimental/experimental.gyp +2 -0
  191. data/vendor/v8/src/extensions/experimental/i18n-extension.cc +22 -2
  192. data/vendor/v8/src/extensions/externalize-string-extension.cc +2 -2
  193. data/vendor/v8/src/extensions/gc-extension.cc +1 -1
  194. data/vendor/v8/src/factory.cc +261 -154
  195. data/vendor/v8/src/factory.h +162 -158
  196. data/vendor/v8/src/flag-definitions.h +17 -11
  197. data/vendor/v8/src/frame-element.cc +0 -5
  198. data/vendor/v8/src/frame-element.h +9 -13
  199. data/vendor/v8/src/frames-inl.h +7 -0
  200. data/vendor/v8/src/frames.cc +56 -46
  201. data/vendor/v8/src/frames.h +36 -25
  202. data/vendor/v8/src/full-codegen.cc +15 -24
  203. data/vendor/v8/src/full-codegen.h +13 -41
  204. data/vendor/v8/src/func-name-inferrer.cc +7 -6
  205. data/vendor/v8/src/func-name-inferrer.h +1 -1
  206. data/vendor/v8/src/gdb-jit.cc +1 -0
  207. data/vendor/v8/src/global-handles.cc +118 -56
  208. data/vendor/v8/src/global-handles.h +98 -40
  209. data/vendor/v8/src/globals.h +2 -2
  210. data/vendor/v8/src/handles-inl.h +106 -9
  211. data/vendor/v8/src/handles.cc +220 -157
  212. data/vendor/v8/src/handles.h +38 -59
  213. data/vendor/v8/src/hashmap.h +3 -3
  214. data/vendor/v8/src/heap-inl.h +141 -25
  215. data/vendor/v8/src/heap-profiler.cc +117 -63
  216. data/vendor/v8/src/heap-profiler.h +38 -21
  217. data/vendor/v8/src/heap.cc +805 -564
  218. data/vendor/v8/src/heap.h +640 -594
  219. data/vendor/v8/src/hydrogen-instructions.cc +216 -73
  220. data/vendor/v8/src/hydrogen-instructions.h +259 -124
  221. data/vendor/v8/src/hydrogen.cc +996 -1171
  222. data/vendor/v8/src/hydrogen.h +163 -144
  223. data/vendor/v8/src/ia32/assembler-ia32-inl.h +12 -11
  224. data/vendor/v8/src/ia32/assembler-ia32.cc +85 -39
  225. data/vendor/v8/src/ia32/assembler-ia32.h +82 -16
  226. data/vendor/v8/src/ia32/builtins-ia32.cc +64 -58
  227. data/vendor/v8/src/ia32/code-stubs-ia32.cc +248 -324
  228. data/vendor/v8/src/ia32/code-stubs-ia32.h +3 -44
  229. data/vendor/v8/src/ia32/codegen-ia32.cc +217 -165
  230. data/vendor/v8/src/ia32/codegen-ia32.h +3 -0
  231. data/vendor/v8/src/ia32/cpu-ia32.cc +6 -5
  232. data/vendor/v8/src/ia32/debug-ia32.cc +8 -5
  233. data/vendor/v8/src/ia32/deoptimizer-ia32.cc +124 -14
  234. data/vendor/v8/src/ia32/disasm-ia32.cc +85 -62
  235. data/vendor/v8/src/ia32/frames-ia32.h +1 -1
  236. data/vendor/v8/src/ia32/full-codegen-ia32.cc +348 -435
  237. data/vendor/v8/src/ia32/ic-ia32.cc +91 -91
  238. data/vendor/v8/src/ia32/lithium-codegen-ia32.cc +500 -255
  239. data/vendor/v8/src/ia32/lithium-codegen-ia32.h +13 -4
  240. data/vendor/v8/src/ia32/lithium-gap-resolver-ia32.cc +6 -0
  241. data/vendor/v8/src/ia32/lithium-ia32.cc +122 -45
  242. data/vendor/v8/src/ia32/lithium-ia32.h +128 -41
  243. data/vendor/v8/src/ia32/macro-assembler-ia32.cc +109 -84
  244. data/vendor/v8/src/ia32/macro-assembler-ia32.h +18 -9
  245. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.cc +26 -15
  246. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.h +1 -0
  247. data/vendor/v8/src/ia32/register-allocator-ia32.cc +30 -30
  248. data/vendor/v8/src/ia32/simulator-ia32.h +4 -4
  249. data/vendor/v8/src/ia32/stub-cache-ia32.cc +383 -400
  250. data/vendor/v8/src/ia32/virtual-frame-ia32.cc +36 -13
  251. data/vendor/v8/src/ia32/virtual-frame-ia32.h +11 -5
  252. data/vendor/v8/src/ic-inl.h +12 -2
  253. data/vendor/v8/src/ic.cc +304 -221
  254. data/vendor/v8/src/ic.h +115 -58
  255. data/vendor/v8/src/interpreter-irregexp.cc +25 -21
  256. data/vendor/v8/src/interpreter-irregexp.h +2 -1
  257. data/vendor/v8/src/isolate.cc +883 -0
  258. data/vendor/v8/src/isolate.h +1304 -0
  259. data/vendor/v8/src/json.js +10 -10
  260. data/vendor/v8/src/jsregexp.cc +111 -80
  261. data/vendor/v8/src/jsregexp.h +6 -7
  262. data/vendor/v8/src/jump-target-heavy.cc +5 -8
  263. data/vendor/v8/src/jump-target-heavy.h +0 -6
  264. data/vendor/v8/src/jump-target-inl.h +1 -1
  265. data/vendor/v8/src/jump-target-light.cc +3 -3
  266. data/vendor/v8/src/lithium-allocator-inl.h +2 -0
  267. data/vendor/v8/src/lithium-allocator.cc +42 -30
  268. data/vendor/v8/src/lithium-allocator.h +8 -22
  269. data/vendor/v8/src/lithium.cc +1 -0
  270. data/vendor/v8/src/liveedit.cc +141 -99
  271. data/vendor/v8/src/liveedit.h +7 -2
  272. data/vendor/v8/src/liveobjectlist-inl.h +90 -0
  273. data/vendor/v8/src/liveobjectlist.cc +2537 -1
  274. data/vendor/v8/src/liveobjectlist.h +245 -35
  275. data/vendor/v8/src/log-utils.cc +122 -35
  276. data/vendor/v8/src/log-utils.h +33 -36
  277. data/vendor/v8/src/log.cc +299 -241
  278. data/vendor/v8/src/log.h +177 -110
  279. data/vendor/v8/src/mark-compact.cc +612 -470
  280. data/vendor/v8/src/mark-compact.h +153 -80
  281. data/vendor/v8/src/messages.cc +16 -14
  282. data/vendor/v8/src/messages.js +30 -7
  283. data/vendor/v8/src/mips/assembler-mips-inl.h +155 -35
  284. data/vendor/v8/src/mips/assembler-mips.cc +1093 -219
  285. data/vendor/v8/src/mips/assembler-mips.h +552 -153
  286. data/vendor/v8/src/mips/builtins-mips.cc +43 -100
  287. data/vendor/v8/src/mips/code-stubs-mips.cc +752 -0
  288. data/vendor/v8/src/mips/code-stubs-mips.h +511 -0
  289. data/vendor/v8/src/mips/codegen-mips-inl.h +8 -14
  290. data/vendor/v8/src/mips/codegen-mips.cc +672 -896
  291. data/vendor/v8/src/mips/codegen-mips.h +271 -69
  292. data/vendor/v8/src/mips/constants-mips.cc +44 -20
  293. data/vendor/v8/src/mips/constants-mips.h +238 -40
  294. data/vendor/v8/src/mips/cpu-mips.cc +20 -3
  295. data/vendor/v8/src/mips/debug-mips.cc +35 -7
  296. data/vendor/v8/src/mips/deoptimizer-mips.cc +91 -0
  297. data/vendor/v8/src/mips/disasm-mips.cc +329 -93
  298. data/vendor/v8/src/mips/frames-mips.cc +2 -50
  299. data/vendor/v8/src/mips/frames-mips.h +24 -9
  300. data/vendor/v8/src/mips/full-codegen-mips.cc +473 -23
  301. data/vendor/v8/src/mips/ic-mips.cc +81 -45
  302. data/vendor/v8/src/mips/jump-target-mips.cc +11 -106
  303. data/vendor/v8/src/mips/lithium-codegen-mips.h +65 -0
  304. data/vendor/v8/src/mips/lithium-mips.h +304 -0
  305. data/vendor/v8/src/mips/macro-assembler-mips.cc +2391 -390
  306. data/vendor/v8/src/mips/macro-assembler-mips.h +718 -121
  307. data/vendor/v8/src/mips/regexp-macro-assembler-mips.cc +478 -0
  308. data/vendor/v8/src/mips/regexp-macro-assembler-mips.h +250 -0
  309. data/vendor/v8/src/mips/register-allocator-mips-inl.h +0 -3
  310. data/vendor/v8/src/mips/register-allocator-mips.h +3 -2
  311. data/vendor/v8/src/mips/simulator-mips.cc +1009 -221
  312. data/vendor/v8/src/mips/simulator-mips.h +119 -36
  313. data/vendor/v8/src/mips/stub-cache-mips.cc +331 -148
  314. data/vendor/v8/src/mips/{fast-codegen-mips.cc → virtual-frame-mips-inl.h} +11 -30
  315. data/vendor/v8/src/mips/virtual-frame-mips.cc +137 -149
  316. data/vendor/v8/src/mips/virtual-frame-mips.h +294 -312
  317. data/vendor/v8/src/mirror-debugger.js +9 -8
  318. data/vendor/v8/src/mksnapshot.cc +2 -2
  319. data/vendor/v8/src/objects-debug.cc +16 -16
  320. data/vendor/v8/src/objects-inl.h +421 -195
  321. data/vendor/v8/src/objects-printer.cc +7 -7
  322. data/vendor/v8/src/objects-visiting.cc +1 -1
  323. data/vendor/v8/src/objects-visiting.h +33 -12
  324. data/vendor/v8/src/objects.cc +935 -658
  325. data/vendor/v8/src/objects.h +234 -139
  326. data/vendor/v8/src/parser.cc +484 -439
  327. data/vendor/v8/src/parser.h +35 -14
  328. data/vendor/v8/src/platform-cygwin.cc +173 -107
  329. data/vendor/v8/src/platform-freebsd.cc +224 -72
  330. data/vendor/v8/src/platform-linux.cc +234 -95
  331. data/vendor/v8/src/platform-macos.cc +215 -82
  332. data/vendor/v8/src/platform-nullos.cc +9 -3
  333. data/vendor/v8/src/platform-openbsd.cc +22 -7
  334. data/vendor/v8/src/platform-posix.cc +30 -5
  335. data/vendor/v8/src/platform-solaris.cc +120 -38
  336. data/vendor/v8/src/platform-tls-mac.h +62 -0
  337. data/vendor/v8/src/platform-tls-win32.h +62 -0
  338. data/vendor/v8/src/platform-tls.h +50 -0
  339. data/vendor/v8/src/platform-win32.cc +195 -97
  340. data/vendor/v8/src/platform.h +72 -15
  341. data/vendor/v8/src/preparse-data.cc +2 -0
  342. data/vendor/v8/src/preparser-api.cc +8 -2
  343. data/vendor/v8/src/preparser.cc +1 -1
  344. data/vendor/v8/src/prettyprinter.cc +43 -52
  345. data/vendor/v8/src/prettyprinter.h +1 -1
  346. data/vendor/v8/src/profile-generator-inl.h +0 -28
  347. data/vendor/v8/src/profile-generator.cc +942 -685
  348. data/vendor/v8/src/profile-generator.h +210 -176
  349. data/vendor/v8/src/property.cc +6 -0
  350. data/vendor/v8/src/property.h +14 -3
  351. data/vendor/v8/src/regexp-macro-assembler-irregexp.cc +1 -1
  352. data/vendor/v8/src/regexp-macro-assembler.cc +28 -19
  353. data/vendor/v8/src/regexp-macro-assembler.h +11 -6
  354. data/vendor/v8/src/regexp-stack.cc +18 -10
  355. data/vendor/v8/src/regexp-stack.h +45 -21
  356. data/vendor/v8/src/regexp.js +3 -3
  357. data/vendor/v8/src/register-allocator-inl.h +3 -3
  358. data/vendor/v8/src/register-allocator.cc +1 -7
  359. data/vendor/v8/src/register-allocator.h +5 -15
  360. data/vendor/v8/src/rewriter.cc +2 -1
  361. data/vendor/v8/src/runtime-profiler.cc +158 -128
  362. data/vendor/v8/src/runtime-profiler.h +131 -15
  363. data/vendor/v8/src/runtime.cc +2409 -1692
  364. data/vendor/v8/src/runtime.h +93 -17
  365. data/vendor/v8/src/safepoint-table.cc +3 -0
  366. data/vendor/v8/src/safepoint-table.h +9 -3
  367. data/vendor/v8/src/scanner-base.cc +21 -28
  368. data/vendor/v8/src/scanner-base.h +22 -11
  369. data/vendor/v8/src/scanner.cc +3 -5
  370. data/vendor/v8/src/scanner.h +4 -2
  371. data/vendor/v8/src/scopeinfo.cc +11 -16
  372. data/vendor/v8/src/scopeinfo.h +26 -15
  373. data/vendor/v8/src/scopes.cc +67 -37
  374. data/vendor/v8/src/scopes.h +26 -12
  375. data/vendor/v8/src/serialize.cc +193 -154
  376. data/vendor/v8/src/serialize.h +41 -36
  377. data/vendor/v8/src/small-pointer-list.h +163 -0
  378. data/vendor/v8/src/snapshot-common.cc +1 -1
  379. data/vendor/v8/src/snapshot.h +3 -1
  380. data/vendor/v8/src/spaces-inl.h +30 -25
  381. data/vendor/v8/src/spaces.cc +263 -370
  382. data/vendor/v8/src/spaces.h +178 -166
  383. data/vendor/v8/src/string-search.cc +4 -3
  384. data/vendor/v8/src/string-search.h +21 -20
  385. data/vendor/v8/src/string-stream.cc +32 -24
  386. data/vendor/v8/src/string.js +7 -7
  387. data/vendor/v8/src/stub-cache.cc +324 -248
  388. data/vendor/v8/src/stub-cache.h +181 -155
  389. data/vendor/v8/src/token.cc +3 -3
  390. data/vendor/v8/src/token.h +3 -3
  391. data/vendor/v8/src/top.cc +218 -390
  392. data/vendor/v8/src/type-info.cc +98 -32
  393. data/vendor/v8/src/type-info.h +10 -3
  394. data/vendor/v8/src/unicode.cc +1 -1
  395. data/vendor/v8/src/unicode.h +1 -1
  396. data/vendor/v8/src/utils.h +3 -0
  397. data/vendor/v8/src/v8-counters.cc +18 -11
  398. data/vendor/v8/src/v8-counters.h +34 -13
  399. data/vendor/v8/src/v8.cc +66 -121
  400. data/vendor/v8/src/v8.h +7 -4
  401. data/vendor/v8/src/v8globals.h +18 -12
  402. data/vendor/v8/src/{memory.h → v8memory.h} +0 -0
  403. data/vendor/v8/src/v8natives.js +59 -18
  404. data/vendor/v8/src/v8threads.cc +127 -114
  405. data/vendor/v8/src/v8threads.h +42 -35
  406. data/vendor/v8/src/v8utils.h +2 -39
  407. data/vendor/v8/src/variables.h +1 -1
  408. data/vendor/v8/src/version.cc +26 -5
  409. data/vendor/v8/src/version.h +4 -0
  410. data/vendor/v8/src/virtual-frame-heavy-inl.h +2 -4
  411. data/vendor/v8/src/virtual-frame-light-inl.h +5 -4
  412. data/vendor/v8/src/vm-state-inl.h +21 -17
  413. data/vendor/v8/src/vm-state.h +7 -5
  414. data/vendor/v8/src/win32-headers.h +1 -0
  415. data/vendor/v8/src/x64/assembler-x64-inl.h +12 -11
  416. data/vendor/v8/src/x64/assembler-x64.cc +80 -40
  417. data/vendor/v8/src/x64/assembler-x64.h +67 -17
  418. data/vendor/v8/src/x64/builtins-x64.cc +34 -33
  419. data/vendor/v8/src/x64/code-stubs-x64.cc +636 -377
  420. data/vendor/v8/src/x64/code-stubs-x64.h +14 -48
  421. data/vendor/v8/src/x64/codegen-x64-inl.h +1 -1
  422. data/vendor/v8/src/x64/codegen-x64.cc +158 -136
  423. data/vendor/v8/src/x64/codegen-x64.h +4 -1
  424. data/vendor/v8/src/x64/cpu-x64.cc +7 -5
  425. data/vendor/v8/src/x64/debug-x64.cc +8 -6
  426. data/vendor/v8/src/x64/deoptimizer-x64.cc +195 -20
  427. data/vendor/v8/src/x64/disasm-x64.cc +42 -23
  428. data/vendor/v8/src/x64/frames-x64.cc +1 -1
  429. data/vendor/v8/src/x64/frames-x64.h +2 -2
  430. data/vendor/v8/src/x64/full-codegen-x64.cc +780 -218
  431. data/vendor/v8/src/x64/ic-x64.cc +77 -79
  432. data/vendor/v8/src/x64/jump-target-x64.cc +1 -1
  433. data/vendor/v8/src/x64/lithium-codegen-x64.cc +698 -181
  434. data/vendor/v8/src/x64/lithium-codegen-x64.h +31 -6
  435. data/vendor/v8/src/x64/lithium-x64.cc +136 -54
  436. data/vendor/v8/src/x64/lithium-x64.h +142 -51
  437. data/vendor/v8/src/x64/macro-assembler-x64.cc +456 -187
  438. data/vendor/v8/src/x64/macro-assembler-x64.h +166 -34
  439. data/vendor/v8/src/x64/regexp-macro-assembler-x64.cc +44 -28
  440. data/vendor/v8/src/x64/regexp-macro-assembler-x64.h +8 -4
  441. data/vendor/v8/src/x64/register-allocator-x64-inl.h +3 -3
  442. data/vendor/v8/src/x64/register-allocator-x64.cc +12 -8
  443. data/vendor/v8/src/x64/simulator-x64.h +5 -5
  444. data/vendor/v8/src/x64/stub-cache-x64.cc +299 -344
  445. data/vendor/v8/src/x64/virtual-frame-x64.cc +37 -13
  446. data/vendor/v8/src/x64/virtual-frame-x64.h +13 -7
  447. data/vendor/v8/src/zone-inl.h +49 -3
  448. data/vendor/v8/src/zone.cc +42 -41
  449. data/vendor/v8/src/zone.h +37 -34
  450. data/vendor/v8/test/benchmarks/testcfg.py +100 -0
  451. data/vendor/v8/test/cctest/SConscript +5 -4
  452. data/vendor/v8/test/cctest/cctest.h +3 -2
  453. data/vendor/v8/test/cctest/cctest.status +6 -11
  454. data/vendor/v8/test/cctest/test-accessors.cc +3 -3
  455. data/vendor/v8/test/cctest/test-alloc.cc +39 -33
  456. data/vendor/v8/test/cctest/test-api.cc +1092 -205
  457. data/vendor/v8/test/cctest/test-assembler-arm.cc +39 -25
  458. data/vendor/v8/test/cctest/test-assembler-ia32.cc +36 -37
  459. data/vendor/v8/test/cctest/test-assembler-mips.cc +1098 -40
  460. data/vendor/v8/test/cctest/test-assembler-x64.cc +32 -25
  461. data/vendor/v8/test/cctest/test-ast.cc +1 -0
  462. data/vendor/v8/test/cctest/test-circular-queue.cc +8 -5
  463. data/vendor/v8/test/cctest/test-compiler.cc +24 -24
  464. data/vendor/v8/test/cctest/test-cpu-profiler.cc +140 -5
  465. data/vendor/v8/test/cctest/test-dataflow.cc +1 -0
  466. data/vendor/v8/test/cctest/test-debug.cc +136 -77
  467. data/vendor/v8/test/cctest/test-decls.cc +1 -1
  468. data/vendor/v8/test/cctest/test-deoptimization.cc +25 -24
  469. data/vendor/v8/test/cctest/test-disasm-arm.cc +9 -4
  470. data/vendor/v8/test/cctest/test-disasm-ia32.cc +10 -8
  471. data/vendor/v8/test/cctest/test-func-name-inference.cc +10 -4
  472. data/vendor/v8/test/cctest/test-heap-profiler.cc +226 -164
  473. data/vendor/v8/test/cctest/test-heap.cc +240 -217
  474. data/vendor/v8/test/cctest/test-liveedit.cc +1 -0
  475. data/vendor/v8/test/cctest/test-log-stack-tracer.cc +18 -20
  476. data/vendor/v8/test/cctest/test-log.cc +114 -108
  477. data/vendor/v8/test/cctest/test-macro-assembler-x64.cc +247 -177
  478. data/vendor/v8/test/cctest/test-mark-compact.cc +129 -90
  479. data/vendor/v8/test/cctest/test-parsing.cc +15 -14
  480. data/vendor/v8/test/cctest/test-platform-linux.cc +1 -0
  481. data/vendor/v8/test/cctest/test-platform-tls.cc +66 -0
  482. data/vendor/v8/test/cctest/test-platform-win32.cc +1 -0
  483. data/vendor/v8/test/cctest/test-profile-generator.cc +1 -1
  484. data/vendor/v8/test/cctest/test-regexp.cc +53 -41
  485. data/vendor/v8/test/cctest/test-reloc-info.cc +18 -11
  486. data/vendor/v8/test/cctest/test-serialize.cc +44 -43
  487. data/vendor/v8/test/cctest/test-sockets.cc +8 -3
  488. data/vendor/v8/test/cctest/test-spaces.cc +47 -29
  489. data/vendor/v8/test/cctest/test-strings.cc +20 -20
  490. data/vendor/v8/test/cctest/test-thread-termination.cc +8 -3
  491. data/vendor/v8/test/cctest/test-threads.cc +5 -3
  492. data/vendor/v8/test/cctest/test-utils.cc +5 -4
  493. data/vendor/v8/test/cctest/testcfg.py +7 -3
  494. data/vendor/v8/test/es5conform/es5conform.status +2 -77
  495. data/vendor/v8/test/es5conform/testcfg.py +1 -1
  496. data/vendor/v8/test/message/testcfg.py +1 -1
  497. data/vendor/v8/test/mjsunit/accessors-on-global-object.js +3 -3
  498. data/vendor/v8/test/mjsunit/array-concat.js +43 -1
  499. data/vendor/v8/test/mjsunit/array-join.js +25 -0
  500. data/vendor/v8/test/mjsunit/bitops-info.js +7 -1
  501. data/vendor/v8/test/mjsunit/compiler/array-length.js +2 -2
  502. data/vendor/v8/test/mjsunit/compiler/global-accessors.js +47 -0
  503. data/vendor/v8/test/mjsunit/compiler/pic.js +1 -1
  504. data/vendor/v8/test/mjsunit/compiler/regress-loadfield.js +65 -0
  505. data/vendor/v8/test/mjsunit/math-sqrt.js +5 -1
  506. data/vendor/v8/test/mjsunit/mjsunit.js +59 -8
  507. data/vendor/v8/test/mjsunit/mjsunit.status +0 -12
  508. data/vendor/v8/test/mjsunit/mul-exhaustive.js +129 -11
  509. data/vendor/v8/test/mjsunit/negate-zero.js +1 -1
  510. data/vendor/v8/test/mjsunit/object-freeze.js +5 -13
  511. data/vendor/v8/test/mjsunit/object-prevent-extensions.js +9 -50
  512. data/vendor/v8/test/mjsunit/object-seal.js +4 -13
  513. data/vendor/v8/test/mjsunit/override-eval-with-non-function.js +36 -0
  514. data/vendor/v8/test/mjsunit/regress/regress-1145.js +54 -0
  515. data/vendor/v8/test/mjsunit/regress/regress-1172-bis.js +37 -0
  516. data/vendor/v8/test/mjsunit/regress/regress-1181.js +54 -0
  517. data/vendor/v8/test/mjsunit/regress/regress-1207.js +35 -0
  518. data/vendor/v8/test/mjsunit/regress/regress-1209.js +34 -0
  519. data/vendor/v8/test/mjsunit/regress/regress-1210.js +48 -0
  520. data/vendor/v8/test/mjsunit/regress/regress-1213.js +43 -0
  521. data/vendor/v8/test/mjsunit/regress/regress-1218.js +29 -0
  522. data/vendor/v8/test/mjsunit/regress/regress-1229.js +79 -0
  523. data/vendor/v8/test/mjsunit/regress/regress-1233.js +47 -0
  524. data/vendor/v8/test/mjsunit/regress/regress-1236.js +34 -0
  525. data/vendor/v8/test/mjsunit/regress/regress-1237.js +36 -0
  526. data/vendor/v8/test/mjsunit/regress/regress-1240.js +39 -0
  527. data/vendor/v8/test/mjsunit/regress/regress-1257.js +58 -0
  528. data/vendor/v8/test/mjsunit/regress/regress-1278.js +69 -0
  529. data/vendor/v8/test/mjsunit/regress/regress-create-exception.js +1 -0
  530. data/vendor/v8/test/mjsunit/regress/regress-lazy-deopt-reloc.js +52 -0
  531. data/vendor/v8/test/mjsunit/sin-cos.js +15 -10
  532. data/vendor/v8/test/mjsunit/smi-negative-zero.js +2 -2
  533. data/vendor/v8/test/mjsunit/str-to-num.js +1 -1
  534. data/vendor/v8/test/mjsunit/strict-mode.js +435 -0
  535. data/vendor/v8/test/mjsunit/testcfg.py +23 -6
  536. data/vendor/v8/test/mozilla/mozilla.status +0 -2
  537. data/vendor/v8/test/mozilla/testcfg.py +1 -1
  538. data/vendor/v8/test/preparser/empty.js +28 -0
  539. data/vendor/v8/test/preparser/functions-only.js +38 -0
  540. data/vendor/v8/test/preparser/non-alphanum.js +34 -0
  541. data/vendor/v8/test/preparser/symbols-only.js +49 -0
  542. data/vendor/v8/test/preparser/testcfg.py +90 -0
  543. data/vendor/v8/test/sputnik/testcfg.py +1 -1
  544. data/vendor/v8/test/test262/README +16 -0
  545. data/vendor/v8/test/test262/harness-adapt.js +80 -0
  546. data/vendor/v8/test/test262/test262.status +1506 -0
  547. data/vendor/v8/test/test262/testcfg.py +123 -0
  548. data/vendor/v8/tools/freebsd-tick-processor +10 -0
  549. data/vendor/v8/tools/gyp/v8.gyp +8 -33
  550. data/vendor/v8/tools/linux-tick-processor +5 -3
  551. data/vendor/v8/tools/test.py +37 -14
  552. data/vendor/v8/tools/tickprocessor.js +22 -8
  553. data/vendor/v8/tools/visual_studio/v8_base.vcproj +13 -1
  554. data/vendor/v8/tools/visual_studio/v8_base_arm.vcproj +5 -1
  555. data/vendor/v8/tools/visual_studio/v8_base_x64.vcproj +5 -1
  556. data/vendor/v8/tools/visual_studio/x64.vsprops +1 -0
  557. metadata +1495 -1341
  558. data/ext/extconf.rb +0 -22
  559. data/ext/mustang.cpp +0 -58
  560. data/vendor/v8/src/top.h +0 -608
@@ -49,18 +49,19 @@ namespace internal {
49
49
  (entry(p0, p1, p2, p3, p4))
50
50
 
51
51
  typedef int (*arm_regexp_matcher)(String*, int, const byte*, const byte*,
52
- void*, int*, Address, int);
52
+ void*, int*, Address, int, Isolate*);
53
53
 
54
54
 
55
55
  // Call the generated regexp code directly. The code at the entry address
56
56
  // should act as a function matching the type arm_regexp_matcher.
57
57
  // The fifth argument is a dummy that reserves the space used for
58
58
  // the return address added by the ExitFrame in native calls.
59
- #define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6) \
60
- (FUNCTION_CAST<arm_regexp_matcher>(entry)(p0, p1, p2, p3, NULL, p4, p5, p6))
59
+ #define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7) \
60
+ (FUNCTION_CAST<arm_regexp_matcher>(entry)( \
61
+ p0, p1, p2, p3, NULL, p4, p5, p6, p7))
61
62
 
62
63
  #define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
63
- (reinterpret_cast<TryCatch*>(try_catch_address))
64
+ reinterpret_cast<TryCatch*>(try_catch_address)
64
65
 
65
66
  // The stack limit beyond which we will throw stack overflow errors in
66
67
  // generated code. Because generated code on arm uses the C stack, we
@@ -123,7 +124,7 @@ class CachePage {
123
124
 
124
125
  class Simulator {
125
126
  public:
126
- friend class Debugger;
127
+ friend class ArmDebugger;
127
128
  enum Register {
128
129
  no_reg = -1,
129
130
  r0 = 0, r1, r2, r3, r4, r5, r6, r7,
@@ -147,7 +148,7 @@ class Simulator {
147
148
 
148
149
  // The currently executing Simulator instance. Potentially there can be one
149
150
  // for each native thread.
150
- static Simulator* current();
151
+ static Simulator* current(v8::internal::Isolate* isolate);
151
152
 
152
153
  // Accessors for register state. Reading the pc value adheres to the ARM
153
154
  // architecture specification and is off by a 8 from the currently executing
@@ -191,7 +192,8 @@ class Simulator {
191
192
  uintptr_t PopAddress();
192
193
 
193
194
  // ICache checking.
194
- static void FlushICache(void* start, size_t size);
195
+ static void FlushICache(v8::internal::HashMap* i_cache, void* start,
196
+ size_t size);
195
197
 
196
198
  // Returns true if pc register contains one of the 'special_values' defined
197
199
  // below (bad_lr, end_sim_pc).
@@ -287,9 +289,10 @@ class Simulator {
287
289
  void InstructionDecode(Instruction* instr);
288
290
 
289
291
  // ICache.
290
- static void CheckICache(Instruction* instr);
291
- static void FlushOnePage(intptr_t start, int size);
292
- static CachePage* GetCachePage(void* page);
292
+ static void CheckICache(v8::internal::HashMap* i_cache, Instruction* instr);
293
+ static void FlushOnePage(v8::internal::HashMap* i_cache, intptr_t start,
294
+ int size);
295
+ static CachePage* GetCachePage(v8::internal::HashMap* i_cache, void* page);
293
296
 
294
297
  // Runtime call support.
295
298
  static void* RedirectExternalReference(
@@ -333,15 +336,16 @@ class Simulator {
333
336
  char* stack_;
334
337
  bool pc_modified_;
335
338
  int icount_;
336
- static bool initialized_;
337
339
 
338
340
  // Icache simulation
339
- static v8::internal::HashMap* i_cache_;
341
+ v8::internal::HashMap* i_cache_;
340
342
 
341
343
  // Registered breakpoints.
342
344
  Instruction* break_pc_;
343
345
  Instr break_instr_;
344
346
 
347
+ v8::internal::Isolate* isolate_;
348
+
345
349
  // A stop is watched if its code is less than kNumOfWatchedStops.
346
350
  // Only watched stops support enabling/disabling and the counter feature.
347
351
  static const uint32_t kNumOfWatchedStops = 256;
@@ -364,15 +368,16 @@ class Simulator {
364
368
  // When running with the simulator transition into simulated execution at this
365
369
  // point.
366
370
  #define CALL_GENERATED_CODE(entry, p0, p1, p2, p3, p4) \
367
- reinterpret_cast<Object*>(Simulator::current()->Call( \
371
+ reinterpret_cast<Object*>(Simulator::current(Isolate::Current())->Call( \
368
372
  FUNCTION_ADDR(entry), 5, p0, p1, p2, p3, p4))
369
373
 
370
- #define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6) \
371
- Simulator::current()->Call(entry, 8, p0, p1, p2, p3, NULL, p4, p5, p6)
374
+ #define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7) \
375
+ Simulator::current(Isolate::Current())->Call( \
376
+ entry, 9, p0, p1, p2, p3, NULL, p4, p5, p6, p7)
372
377
 
373
- #define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
374
- try_catch_address == \
375
- NULL ? NULL : *(reinterpret_cast<TryCatch**>(try_catch_address))
378
+ #define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
379
+ try_catch_address == NULL ? \
380
+ NULL : *(reinterpret_cast<TryCatch**>(try_catch_address))
376
381
 
377
382
 
378
383
  // The simulator has its own stack. Thus it has a different stack limit from
@@ -383,16 +388,16 @@ class Simulator {
383
388
  class SimulatorStack : public v8::internal::AllStatic {
384
389
  public:
385
390
  static inline uintptr_t JsLimitFromCLimit(uintptr_t c_limit) {
386
- return Simulator::current()->StackLimit();
391
+ return Simulator::current(Isolate::Current())->StackLimit();
387
392
  }
388
393
 
389
394
  static inline uintptr_t RegisterCTryCatch(uintptr_t try_catch_address) {
390
- Simulator* sim = Simulator::current();
395
+ Simulator* sim = Simulator::current(Isolate::Current());
391
396
  return sim->PushAddress(try_catch_address);
392
397
  }
393
398
 
394
399
  static inline void UnregisterCTryCatch() {
395
- Simulator::current()->PopAddress();
400
+ Simulator::current(Isolate::Current())->PopAddress();
396
401
  }
397
402
  };
398
403
 
@@ -39,15 +39,16 @@ namespace internal {
39
39
  #define __ ACCESS_MASM(masm)
40
40
 
41
41
 
42
- static void ProbeTable(MacroAssembler* masm,
42
+ static void ProbeTable(Isolate* isolate,
43
+ MacroAssembler* masm,
43
44
  Code::Flags flags,
44
45
  StubCache::Table table,
45
46
  Register name,
46
47
  Register offset,
47
48
  Register scratch,
48
49
  Register scratch2) {
49
- ExternalReference key_offset(SCTableReference::keyReference(table));
50
- ExternalReference value_offset(SCTableReference::valueReference(table));
50
+ ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
51
+ ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
51
52
 
52
53
  uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
53
54
  uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
@@ -101,8 +102,9 @@ static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
101
102
  Register scratch0,
102
103
  Register scratch1) {
103
104
  ASSERT(name->IsSymbol());
104
- __ IncrementCounter(&Counters::negative_lookups, 1, scratch0, scratch1);
105
- __ IncrementCounter(&Counters::negative_lookups_miss, 1, scratch0, scratch1);
105
+ Counters* counters = masm->isolate()->counters();
106
+ __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
107
+ __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
106
108
 
107
109
  Label done;
108
110
 
@@ -198,7 +200,7 @@ static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
198
200
  }
199
201
  }
200
202
  __ bind(&done);
201
- __ DecrementCounter(&Counters::negative_lookups_miss, 1, scratch0, scratch1);
203
+ __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
202
204
  }
203
205
 
204
206
 
@@ -209,6 +211,7 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
209
211
  Register scratch,
210
212
  Register extra,
211
213
  Register extra2) {
214
+ Isolate* isolate = masm->isolate();
212
215
  Label miss;
213
216
 
214
217
  // Make sure that code is valid. The shifting code relies on the
@@ -248,7 +251,7 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
248
251
  Operand((kPrimaryTableSize - 1) << kHeapObjectTagSize));
249
252
 
250
253
  // Probe the primary table.
251
- ProbeTable(masm, flags, kPrimary, name, scratch, extra, extra2);
254
+ ProbeTable(isolate, masm, flags, kPrimary, name, scratch, extra, extra2);
252
255
 
253
256
  // Primary miss: Compute hash for secondary probe.
254
257
  __ sub(scratch, scratch, Operand(name));
@@ -258,7 +261,7 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
258
261
  Operand((kSecondaryTableSize - 1) << kHeapObjectTagSize));
259
262
 
260
263
  // Probe the secondary table.
261
- ProbeTable(masm, flags, kSecondary, name, scratch, extra, extra2);
264
+ ProbeTable(isolate, masm, flags, kSecondary, name, scratch, extra, extra2);
262
265
 
263
266
  // Cache miss: Fall-through and let caller handle the miss by
264
267
  // entering the runtime system.
@@ -286,13 +289,15 @@ void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
286
289
 
287
290
  void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
288
291
  MacroAssembler* masm, int index, Register prototype, Label* miss) {
292
+ Isolate* isolate = masm->isolate();
289
293
  // Check we're still in the same context.
290
294
  __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
291
- __ Move(ip, Top::global());
295
+ __ Move(ip, isolate->global());
292
296
  __ cmp(prototype, ip);
293
297
  __ b(ne, miss);
294
298
  // Get the global function with the given index.
295
- JSFunction* function = JSFunction::cast(Top::global_context()->get(index));
299
+ JSFunction* function =
300
+ JSFunction::cast(isolate->global_context()->get(index));
296
301
  // Load its initial map. The global functions all have initial maps.
297
302
  __ Move(prototype, Handle<Map>(function->initial_map()));
298
303
  // Load the prototype from the initial map.
@@ -450,8 +455,10 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
450
455
  __ mov(r2, Operand(Handle<Map>(transition)));
451
456
  __ Push(r2, r0);
452
457
  __ TailCallExternalReference(
453
- ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage)),
454
- 3, 1);
458
+ ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
459
+ masm->isolate()),
460
+ 3,
461
+ 1);
455
462
  return;
456
463
  }
457
464
 
@@ -505,9 +512,9 @@ void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
505
512
  ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
506
513
  Code* code = NULL;
507
514
  if (kind == Code::LOAD_IC) {
508
- code = Builtins::builtin(Builtins::LoadIC_Miss);
515
+ code = masm->isolate()->builtins()->builtin(Builtins::kLoadIC_Miss);
509
516
  } else {
510
- code = Builtins::builtin(Builtins::KeyedLoadIC_Miss);
517
+ code = masm->isolate()->builtins()->builtin(Builtins::kKeyedLoadIC_Miss);
511
518
  }
512
519
 
513
520
  Handle<Code> ic(code);
@@ -548,7 +555,7 @@ static void PushInterceptorArguments(MacroAssembler* masm,
548
555
  JSObject* holder_obj) {
549
556
  __ push(name);
550
557
  InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
551
- ASSERT(!Heap::InNewSpace(interceptor));
558
+ ASSERT(!masm->isolate()->heap()->InNewSpace(interceptor));
552
559
  Register scratch = name;
553
560
  __ mov(scratch, Operand(Handle<Object>(interceptor)));
554
561
  __ push(scratch);
@@ -567,7 +574,8 @@ static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
567
574
  PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
568
575
 
569
576
  ExternalReference ref =
570
- ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly));
577
+ ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
578
+ masm->isolate());
571
579
  __ mov(r0, Operand(5));
572
580
  __ mov(r1, Operand(ref));
573
581
 
@@ -616,7 +624,7 @@ static MaybeObject* GenerateFastApiDirectCall(MacroAssembler* masm,
616
624
  // Pass the additional arguments FastHandleApiCall expects.
617
625
  Object* call_data = optimization.api_call_info()->data();
618
626
  Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info());
619
- if (Heap::InNewSpace(call_data)) {
627
+ if (masm->isolate()->heap()->InNewSpace(call_data)) {
620
628
  __ Move(r0, api_call_info_handle);
621
629
  __ ldr(r6, FieldMemOperand(r0, CallHandlerInfo::kDataOffset));
622
630
  } else {
@@ -655,12 +663,11 @@ static MaybeObject* GenerateFastApiDirectCall(MacroAssembler* masm,
655
663
  // already generated). Do not allow the assembler to perform a
656
664
  // garbage collection but instead return the allocation failure
657
665
  // object.
658
- MaybeObject* result = masm->TryCallApiFunctionAndReturn(
659
- &fun, argc + kFastApiCallArguments + 1);
660
- if (result->IsFailure()) {
661
- return result;
662
- }
663
- return Heap::undefined_value();
666
+ const int kStackUnwindSpace = argc + kFastApiCallArguments + 1;
667
+ ExternalReference ref = ExternalReference(&fun,
668
+ ExternalReference::DIRECT_API_CALL,
669
+ masm->isolate());
670
+ return masm->TryCallApiFunctionAndReturn(ref, kStackUnwindSpace);
664
671
  }
665
672
 
666
673
  class CallInterceptorCompiler BASE_EMBEDDED {
@@ -712,7 +719,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
712
719
  name,
713
720
  holder,
714
721
  miss);
715
- return Heap::undefined_value();
722
+ return masm->isolate()->heap()->undefined_value();
716
723
  }
717
724
  }
718
725
 
@@ -731,6 +738,8 @@ class CallInterceptorCompiler BASE_EMBEDDED {
731
738
  ASSERT(optimization.is_constant_call());
732
739
  ASSERT(!lookup->holder()->IsGlobalObject());
733
740
 
741
+ Counters* counters = masm->isolate()->counters();
742
+
734
743
  int depth1 = kInvalidProtoDepth;
735
744
  int depth2 = kInvalidProtoDepth;
736
745
  bool can_do_fast_api_call = false;
@@ -748,11 +757,11 @@ class CallInterceptorCompiler BASE_EMBEDDED {
748
757
  (depth2 != kInvalidProtoDepth);
749
758
  }
750
759
 
751
- __ IncrementCounter(&Counters::call_const_interceptor, 1,
760
+ __ IncrementCounter(counters->call_const_interceptor(), 1,
752
761
  scratch1, scratch2);
753
762
 
754
763
  if (can_do_fast_api_call) {
755
- __ IncrementCounter(&Counters::call_const_interceptor_fast_api, 1,
764
+ __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1,
756
765
  scratch1, scratch2);
757
766
  ReserveSpaceForFastApiCall(masm, scratch1);
758
767
  }
@@ -813,7 +822,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
813
822
  FreeSpaceForFastApiCall(masm);
814
823
  }
815
824
 
816
- return Heap::undefined_value();
825
+ return masm->isolate()->heap()->undefined_value();
817
826
  }
818
827
 
819
828
  void CompileRegular(MacroAssembler* masm,
@@ -842,9 +851,9 @@ class CallInterceptorCompiler BASE_EMBEDDED {
842
851
  interceptor_holder);
843
852
 
844
853
  __ CallExternalReference(
845
- ExternalReference(
846
- IC_Utility(IC::kLoadPropertyWithInterceptorForCall)),
847
- 5);
854
+ ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
855
+ masm->isolate()),
856
+ 5);
848
857
 
849
858
  // Restore the name_ register.
850
859
  __ pop(name_);
@@ -1082,7 +1091,7 @@ Register StubCompiler::CheckPrototypes(JSObject* object,
1082
1091
  !current->IsJSGlobalObject() &&
1083
1092
  !current->IsJSGlobalProxy()) {
1084
1093
  if (!name->IsSymbol()) {
1085
- MaybeObject* maybe_lookup_result = Heap::LookupSymbol(name);
1094
+ MaybeObject* maybe_lookup_result = heap()->LookupSymbol(name);
1086
1095
  Object* lookup_result = NULL; // Initialization to please compiler.
1087
1096
  if (!maybe_lookup_result->ToObject(&lookup_result)) {
1088
1097
  set_failure(Failure::cast(maybe_lookup_result));
@@ -1102,7 +1111,7 @@ Register StubCompiler::CheckPrototypes(JSObject* object,
1102
1111
  __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1103
1112
  reg = holder_reg; // from now the object is in holder_reg
1104
1113
  __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1105
- } else if (Heap::InNewSpace(prototype)) {
1114
+ } else if (heap()->InNewSpace(prototype)) {
1106
1115
  // Get the map of the current object.
1107
1116
  __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1108
1117
  __ cmp(scratch1, Operand(Handle<Map>(current->map())));
@@ -1156,7 +1165,7 @@ Register StubCompiler::CheckPrototypes(JSObject* object,
1156
1165
  __ b(ne, miss);
1157
1166
 
1158
1167
  // Log the check depth.
1159
- LOG(IntEvent("check-maps-depth", depth + 1));
1168
+ LOG(masm()->isolate(), IntEvent("check-maps-depth", depth + 1));
1160
1169
 
1161
1170
  // Perform security check for access to the global object.
1162
1171
  ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
@@ -1245,18 +1254,40 @@ MaybeObject* StubCompiler::GenerateLoadCallback(JSObject* object,
1245
1254
  CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3,
1246
1255
  name, miss);
1247
1256
 
1248
- // Push the arguments on the JS stack of the caller.
1249
- __ push(receiver); // Receiver.
1250
- __ mov(scratch3, Operand(Handle<AccessorInfo>(callback))); // callback data
1251
- __ ldr(ip, FieldMemOperand(scratch3, AccessorInfo::kDataOffset));
1252
- __ Push(reg, ip, scratch3, name_reg);
1257
+ // Build AccessorInfo::args_ list on the stack and push property name below
1258
+ // the exit frame to make GC aware of them and store pointers to them.
1259
+ __ push(receiver);
1260
+ __ mov(scratch2, sp); // scratch2 = AccessorInfo::args_
1261
+ Handle<AccessorInfo> callback_handle(callback);
1262
+ if (heap()->InNewSpace(callback_handle->data())) {
1263
+ __ Move(scratch3, callback_handle);
1264
+ __ ldr(scratch3, FieldMemOperand(scratch3, AccessorInfo::kDataOffset));
1265
+ } else {
1266
+ __ Move(scratch3, Handle<Object>(callback_handle->data()));
1267
+ }
1268
+ __ Push(reg, scratch3, name_reg);
1269
+ __ mov(r0, sp); // r0 = Handle<String>
1270
+
1271
+ Address getter_address = v8::ToCData<Address>(callback->getter());
1272
+ ApiFunction fun(getter_address);
1253
1273
 
1254
- // Do tail-call to the runtime system.
1255
- ExternalReference load_callback_property =
1256
- ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
1257
- __ TailCallExternalReference(load_callback_property, 5, 1);
1274
+ const int kApiStackSpace = 1;
1275
+ __ EnterExitFrame(false, kApiStackSpace);
1276
+ // Create AccessorInfo instance on the stack above the exit frame with
1277
+ // scratch2 (internal::Object **args_) as the data.
1278
+ __ str(scratch2, MemOperand(sp, 1 * kPointerSize));
1279
+ __ add(r1, sp, Operand(1 * kPointerSize)); // r1 = AccessorInfo&
1258
1280
 
1259
- return Heap::undefined_value(); // Success.
1281
+ // Emitting a stub call may try to allocate (if the code is not
1282
+ // already generated). Do not allow the assembler to perform a
1283
+ // garbage collection but instead return the allocation failure
1284
+ // object.
1285
+ const int kStackUnwindSpace = 4;
1286
+ ExternalReference ref =
1287
+ ExternalReference(&fun,
1288
+ ExternalReference::DIRECT_GETTER_CALL,
1289
+ masm()->isolate());
1290
+ return masm()->TryCallApiFunctionAndReturn(ref, kStackUnwindSpace);
1260
1291
  }
1261
1292
 
1262
1293
 
@@ -1384,7 +1415,8 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object,
1384
1415
  }
1385
1416
 
1386
1417
  ExternalReference ref =
1387
- ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
1418
+ ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
1419
+ masm()->isolate());
1388
1420
  __ TailCallExternalReference(ref, 5, 1);
1389
1421
  }
1390
1422
  } else { // !compile_followup_inline
@@ -1396,8 +1428,9 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object,
1396
1428
  PushInterceptorArguments(masm(), receiver, holder_reg,
1397
1429
  name_reg, interceptor_holder);
1398
1430
 
1399
- ExternalReference ref = ExternalReference(
1400
- IC_Utility(IC::kLoadPropertyWithInterceptorForLoad));
1431
+ ExternalReference ref =
1432
+ ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
1433
+ masm()->isolate());
1401
1434
  __ TailCallExternalReference(ref, 5, 1);
1402
1435
  }
1403
1436
  }
@@ -1444,7 +1477,7 @@ void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
1444
1477
  __ ldr(r1, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset));
1445
1478
 
1446
1479
  // Check that the cell contains the same function.
1447
- if (Heap::InNewSpace(function)) {
1480
+ if (heap()->InNewSpace(function)) {
1448
1481
  // We can't embed a pointer to a function in new space so we have
1449
1482
  // to verify that the shared function info is unchanged. This has
1450
1483
  // the nice side effect that multiple closures based on the same
@@ -1468,8 +1501,8 @@ void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
1468
1501
 
1469
1502
 
1470
1503
  MaybeObject* CallStubCompiler::GenerateMissBranch() {
1471
- MaybeObject* maybe_obj = StubCache::ComputeCallMiss(arguments().immediate(),
1472
- kind_);
1504
+ MaybeObject* maybe_obj = masm()->isolate()->stub_cache()->ComputeCallMiss(
1505
+ arguments().immediate(), kind_);
1473
1506
  Object* obj;
1474
1507
  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1475
1508
  __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
@@ -1505,10 +1538,8 @@ MaybeObject* CallStubCompiler::CompileCallField(JSObject* object,
1505
1538
 
1506
1539
  // Handle call cache miss.
1507
1540
  __ bind(&miss);
1508
- Object* obj;
1509
- { MaybeObject* maybe_obj = GenerateMissBranch();
1510
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1511
- }
1541
+ MaybeObject* maybe_result = GenerateMissBranch();
1542
+ if (maybe_result->IsFailure()) return maybe_result;
1512
1543
 
1513
1544
  // Return the generated code.
1514
1545
  return GetCode(FIELD, name);
@@ -1529,7 +1560,7 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
1529
1560
  // -----------------------------------
1530
1561
 
1531
1562
  // If object is not an array, bail out to regular call.
1532
- if (!object->IsJSArray() || cell != NULL) return Heap::undefined_value();
1563
+ if (!object->IsJSArray() || cell != NULL) return heap()->undefined_value();
1533
1564
 
1534
1565
  Label miss;
1535
1566
 
@@ -1615,10 +1646,11 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
1615
1646
  __ b(&call_builtin);
1616
1647
  }
1617
1648
 
1649
+ Isolate* isolate = masm()->isolate();
1618
1650
  ExternalReference new_space_allocation_top =
1619
- ExternalReference::new_space_allocation_top_address();
1651
+ ExternalReference::new_space_allocation_top_address(isolate);
1620
1652
  ExternalReference new_space_allocation_limit =
1621
- ExternalReference::new_space_allocation_limit_address();
1653
+ ExternalReference::new_space_allocation_limit_address(isolate);
1622
1654
 
1623
1655
  const int kAllocationDelta = 4;
1624
1656
  // Load top and check if it is the end of elements.
@@ -1658,17 +1690,16 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
1658
1690
  __ Ret();
1659
1691
  }
1660
1692
  __ bind(&call_builtin);
1661
- __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush),
1693
+ __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush,
1694
+ masm()->isolate()),
1662
1695
  argc + 1,
1663
1696
  1);
1664
1697
  }
1665
1698
 
1666
1699
  // Handle call cache miss.
1667
1700
  __ bind(&miss);
1668
- Object* obj;
1669
- { MaybeObject* maybe_obj = GenerateMissBranch();
1670
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1671
- }
1701
+ MaybeObject* maybe_result = GenerateMissBranch();
1702
+ if (maybe_result->IsFailure()) return maybe_result;
1672
1703
 
1673
1704
  // Return the generated code.
1674
1705
  return GetCode(function);
@@ -1689,7 +1720,7 @@ MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object,
1689
1720
  // -----------------------------------
1690
1721
 
1691
1722
  // If object is not an array, bail out to regular call.
1692
- if (!object->IsJSArray() || cell != NULL) return Heap::undefined_value();
1723
+ if (!object->IsJSArray() || cell != NULL) return heap()->undefined_value();
1693
1724
 
1694
1725
  Label miss, return_undefined, call_builtin;
1695
1726
 
@@ -1745,16 +1776,15 @@ MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object,
1745
1776
  __ Ret();
1746
1777
 
1747
1778
  __ bind(&call_builtin);
1748
- __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop),
1779
+ __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop,
1780
+ masm()->isolate()),
1749
1781
  argc + 1,
1750
1782
  1);
1751
1783
 
1752
1784
  // Handle call cache miss.
1753
1785
  __ bind(&miss);
1754
- Object* obj;
1755
- { MaybeObject* maybe_obj = GenerateMissBranch();
1756
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1757
- }
1786
+ MaybeObject* maybe_result = GenerateMissBranch();
1787
+ if (maybe_result->IsFailure()) return maybe_result;
1758
1788
 
1759
1789
  // Return the generated code.
1760
1790
  return GetCode(function);
@@ -1776,7 +1806,7 @@ MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall(
1776
1806
  // -----------------------------------
1777
1807
 
1778
1808
  // If object is not a string, bail out to regular call.
1779
- if (!object->IsString() || cell != NULL) return Heap::undefined_value();
1809
+ if (!object->IsString() || cell != NULL) return heap()->undefined_value();
1780
1810
 
1781
1811
  const int argc = arguments().immediate();
1782
1812
 
@@ -1837,10 +1867,8 @@ MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall(
1837
1867
  // Restore function name in r2.
1838
1868
  __ Move(r2, Handle<String>(name));
1839
1869
  __ bind(&name_miss);
1840
- Object* obj;
1841
- { MaybeObject* maybe_obj = GenerateMissBranch();
1842
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1843
- }
1870
+ MaybeObject* maybe_result = GenerateMissBranch();
1871
+ if (maybe_result->IsFailure()) return maybe_result;
1844
1872
 
1845
1873
  // Return the generated code.
1846
1874
  return GetCode(function);
@@ -1862,7 +1890,7 @@ MaybeObject* CallStubCompiler::CompileStringCharAtCall(
1862
1890
  // -----------------------------------
1863
1891
 
1864
1892
  // If object is not a string, bail out to regular call.
1865
- if (!object->IsString() || cell != NULL) return Heap::undefined_value();
1893
+ if (!object->IsString() || cell != NULL) return heap()->undefined_value();
1866
1894
 
1867
1895
  const int argc = arguments().immediate();
1868
1896
 
@@ -1925,10 +1953,8 @@ MaybeObject* CallStubCompiler::CompileStringCharAtCall(
1925
1953
  // Restore function name in r2.
1926
1954
  __ Move(r2, Handle<String>(name));
1927
1955
  __ bind(&name_miss);
1928
- Object* obj;
1929
- { MaybeObject* maybe_obj = GenerateMissBranch();
1930
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1931
- }
1956
+ MaybeObject* maybe_result = GenerateMissBranch();
1957
+ if (maybe_result->IsFailure()) return maybe_result;
1932
1958
 
1933
1959
  // Return the generated code.
1934
1960
  return GetCode(function);
@@ -1953,7 +1979,7 @@ MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall(
1953
1979
 
1954
1980
  // If the object is not a JSObject or we got an unexpected number of
1955
1981
  // arguments, bail out to the regular call.
1956
- if (!object->IsJSObject() || argc != 1) return Heap::undefined_value();
1982
+ if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
1957
1983
 
1958
1984
  Label miss;
1959
1985
  GenerateNameCheck(name, &miss);
@@ -2001,10 +2027,8 @@ MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall(
2001
2027
 
2002
2028
  __ bind(&miss);
2003
2029
  // r2: function name.
2004
- Object* obj;
2005
- { MaybeObject* maybe_obj = GenerateMissBranch();
2006
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2007
- }
2030
+ MaybeObject* maybe_result = GenerateMissBranch();
2031
+ if (maybe_result->IsFailure()) return maybe_result;
2008
2032
 
2009
2033
  // Return the generated code.
2010
2034
  return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
@@ -2024,14 +2048,17 @@ MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object,
2024
2048
  // -- sp[argc * 4] : receiver
2025
2049
  // -----------------------------------
2026
2050
 
2027
- if (!CpuFeatures::IsSupported(VFP3)) return Heap::undefined_value();
2051
+ if (!CpuFeatures::IsSupported(VFP3)) {
2052
+ return heap()->undefined_value();
2053
+ }
2054
+
2028
2055
  CpuFeatures::Scope scope_vfp3(VFP3);
2029
2056
 
2030
2057
  const int argc = arguments().immediate();
2031
2058
 
2032
2059
  // If the object is not a JSObject or we got an unexpected number of
2033
2060
  // arguments, bail out to the regular call.
2034
- if (!object->IsJSObject() || argc != 1) return Heap::undefined_value();
2061
+ if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
2035
2062
 
2036
2063
  Label miss, slow;
2037
2064
  GenerateNameCheck(name, &miss);
@@ -2148,8 +2175,8 @@ MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object,
2148
2175
 
2149
2176
  __ bind(&miss);
2150
2177
  // r2: function name.
2151
- MaybeObject* obj = GenerateMissBranch();
2152
- if (obj->IsFailure()) return obj;
2178
+ MaybeObject* maybe_result = GenerateMissBranch();
2179
+ if (maybe_result->IsFailure()) return maybe_result;
2153
2180
 
2154
2181
  // Return the generated code.
2155
2182
  return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
@@ -2173,7 +2200,7 @@ MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object,
2173
2200
 
2174
2201
  // If the object is not a JSObject or we got an unexpected number of
2175
2202
  // arguments, bail out to the regular call.
2176
- if (!object->IsJSObject() || argc != 1) return Heap::undefined_value();
2203
+ if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
2177
2204
 
2178
2205
  Label miss;
2179
2206
  GenerateNameCheck(name, &miss);
@@ -2250,16 +2277,68 @@ MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object,
2250
2277
 
2251
2278
  __ bind(&miss);
2252
2279
  // r2: function name.
2253
- Object* obj;
2254
- { MaybeObject* maybe_obj = GenerateMissBranch();
2255
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2256
- }
2280
+ MaybeObject* maybe_result = GenerateMissBranch();
2281
+ if (maybe_result->IsFailure()) return maybe_result;
2257
2282
 
2258
2283
  // Return the generated code.
2259
2284
  return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
2260
2285
  }
2261
2286
 
2262
2287
 
2288
+ MaybeObject* CallStubCompiler::CompileFastApiCall(
2289
+ const CallOptimization& optimization,
2290
+ Object* object,
2291
+ JSObject* holder,
2292
+ JSGlobalPropertyCell* cell,
2293
+ JSFunction* function,
2294
+ String* name) {
2295
+ Counters* counters = isolate()->counters();
2296
+
2297
+ ASSERT(optimization.is_simple_api_call());
2298
+ // Bail out if object is a global object as we don't want to
2299
+ // repatch it to global receiver.
2300
+ if (object->IsGlobalObject()) return heap()->undefined_value();
2301
+ if (cell != NULL) return heap()->undefined_value();
2302
+ int depth = optimization.GetPrototypeDepthOfExpectedType(
2303
+ JSObject::cast(object), holder);
2304
+ if (depth == kInvalidProtoDepth) return heap()->undefined_value();
2305
+
2306
+ Label miss, miss_before_stack_reserved;
2307
+
2308
+ GenerateNameCheck(name, &miss_before_stack_reserved);
2309
+
2310
+ // Get the receiver from the stack.
2311
+ const int argc = arguments().immediate();
2312
+ __ ldr(r1, MemOperand(sp, argc * kPointerSize));
2313
+
2314
+ // Check that the receiver isn't a smi.
2315
+ __ tst(r1, Operand(kSmiTagMask));
2316
+ __ b(eq, &miss_before_stack_reserved);
2317
+
2318
+ __ IncrementCounter(counters->call_const(), 1, r0, r3);
2319
+ __ IncrementCounter(counters->call_const_fast_api(), 1, r0, r3);
2320
+
2321
+ ReserveSpaceForFastApiCall(masm(), r0);
2322
+
2323
+ // Check that the maps haven't changed and find a Holder as a side effect.
2324
+ CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
2325
+ depth, &miss);
2326
+
2327
+ MaybeObject* result = GenerateFastApiDirectCall(masm(), optimization, argc);
2328
+ if (result->IsFailure()) return result;
2329
+
2330
+ __ bind(&miss);
2331
+ FreeSpaceForFastApiCall(masm());
2332
+
2333
+ __ bind(&miss_before_stack_reserved);
2334
+ MaybeObject* maybe_result = GenerateMissBranch();
2335
+ if (maybe_result->IsFailure()) return maybe_result;
2336
+
2337
+ // Return the generated code.
2338
+ return GetCode(function);
2339
+ }
2340
+
2341
+
2263
2342
  MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
2264
2343
  JSObject* holder,
2265
2344
  JSFunction* function,
@@ -2269,22 +2348,18 @@ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
2269
2348
  // -- r2 : name
2270
2349
  // -- lr : return address
2271
2350
  // -----------------------------------
2272
- SharedFunctionInfo* function_info = function->shared();
2273
- if (function_info->HasBuiltinFunctionId()) {
2274
- BuiltinFunctionId id = function_info->builtin_function_id();
2351
+ if (HasCustomCallGenerator(function)) {
2275
2352
  MaybeObject* maybe_result = CompileCustomCall(
2276
- id, object, holder, NULL, function, name);
2353
+ object, holder, NULL, function, name);
2277
2354
  Object* result;
2278
2355
  if (!maybe_result->ToObject(&result)) return maybe_result;
2279
2356
  // undefined means bail out to regular compiler.
2280
- if (!result->IsUndefined()) {
2281
- return result;
2282
- }
2357
+ if (!result->IsUndefined()) return result;
2283
2358
  }
2284
2359
 
2285
- Label miss_in_smi_check;
2360
+ Label miss;
2286
2361
 
2287
- GenerateNameCheck(name, &miss_in_smi_check);
2362
+ GenerateNameCheck(name, &miss);
2288
2363
 
2289
2364
  // Get the receiver from the stack
2290
2365
  const int argc = arguments().immediate();
@@ -2293,39 +2368,26 @@ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
2293
2368
  // Check that the receiver isn't a smi.
2294
2369
  if (check != NUMBER_CHECK) {
2295
2370
  __ tst(r1, Operand(kSmiTagMask));
2296
- __ b(eq, &miss_in_smi_check);
2371
+ __ b(eq, &miss);
2297
2372
  }
2298
2373
 
2299
2374
  // Make sure that it's okay not to patch the on stack receiver
2300
2375
  // unless we're doing a receiver map check.
2301
2376
  ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2302
2377
 
2303
- CallOptimization optimization(function);
2304
- int depth = kInvalidProtoDepth;
2305
- Label miss;
2306
-
2378
+ SharedFunctionInfo* function_info = function->shared();
2307
2379
  switch (check) {
2308
2380
  case RECEIVER_MAP_CHECK:
2309
- __ IncrementCounter(&Counters::call_const, 1, r0, r3);
2310
-
2311
- if (optimization.is_simple_api_call() && !object->IsGlobalObject()) {
2312
- depth = optimization.GetPrototypeDepthOfExpectedType(
2313
- JSObject::cast(object), holder);
2314
- }
2315
-
2316
- if (depth != kInvalidProtoDepth) {
2317
- __ IncrementCounter(&Counters::call_const_fast_api, 1, r0, r3);
2318
- ReserveSpaceForFastApiCall(masm(), r0);
2319
- }
2381
+ __ IncrementCounter(masm()->isolate()->counters()->call_const(),
2382
+ 1, r0, r3);
2320
2383
 
2321
2384
  // Check that the maps haven't changed.
2322
2385
  CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
2323
- depth, &miss);
2386
+ &miss);
2324
2387
 
2325
2388
  // Patch the receiver on the stack with the global proxy if
2326
2389
  // necessary.
2327
2390
  if (object->IsGlobalObject()) {
2328
- ASSERT(depth == kInvalidProtoDepth);
2329
2391
  __ ldr(r3, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
2330
2392
  __ str(r3, MemOperand(sp, argc * kPointerSize));
2331
2393
  }
@@ -2398,24 +2460,12 @@ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
2398
2460
  UNREACHABLE();
2399
2461
  }
2400
2462
 
2401
- if (depth != kInvalidProtoDepth) {
2402
- MaybeObject* result = GenerateFastApiDirectCall(masm(), optimization, argc);
2403
- if (result->IsFailure()) return result;
2404
- } else {
2405
- __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2406
- }
2463
+ __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2407
2464
 
2408
2465
  // Handle call cache miss.
2409
2466
  __ bind(&miss);
2410
- if (depth != kInvalidProtoDepth) {
2411
- FreeSpaceForFastApiCall(masm());
2412
- }
2413
-
2414
- __ bind(&miss_in_smi_check);
2415
- Object* obj;
2416
- { MaybeObject* maybe_obj = GenerateMissBranch();
2417
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2418
- }
2467
+ MaybeObject* maybe_result = GenerateMissBranch();
2468
+ if (maybe_result->IsFailure()) return maybe_result;
2419
2469
 
2420
2470
  // Return the generated code.
2421
2471
  return GetCode(function);
@@ -2467,10 +2517,8 @@ MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object,
2467
2517
 
2468
2518
  // Handle call cache miss.
2469
2519
  __ bind(&miss);
2470
- Object* obj;
2471
- { MaybeObject* maybe_obj = GenerateMissBranch();
2472
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2473
- }
2520
+ MaybeObject* maybe_result = GenerateMissBranch();
2521
+ if (maybe_result->IsFailure()) return maybe_result;
2474
2522
 
2475
2523
  // Return the generated code.
2476
2524
  return GetCode(INTERCEPTOR, name);
@@ -2487,11 +2535,9 @@ MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object,
2487
2535
  // -- lr : return address
2488
2536
  // -----------------------------------
2489
2537
 
2490
- SharedFunctionInfo* function_info = function->shared();
2491
- if (function_info->HasBuiltinFunctionId()) {
2492
- BuiltinFunctionId id = function_info->builtin_function_id();
2538
+ if (HasCustomCallGenerator(function)) {
2493
2539
  MaybeObject* maybe_result = CompileCustomCall(
2494
- id, object, holder, cell, function, name);
2540
+ object, holder, cell, function, name);
2495
2541
  Object* result;
2496
2542
  if (!maybe_result->ToObject(&result)) return maybe_result;
2497
2543
  // undefined means bail out to regular compiler.
@@ -2520,7 +2566,8 @@ MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object,
2520
2566
  __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
2521
2567
 
2522
2568
  // Jump to the cached code (tail call).
2523
- __ IncrementCounter(&Counters::call_global_inline, 1, r3, r4);
2569
+ Counters* counters = masm()->isolate()->counters();
2570
+ __ IncrementCounter(counters->call_global_inline(), 1, r3, r4);
2524
2571
  ASSERT(function->is_compiled());
2525
2572
  Handle<Code> code(function->code());
2526
2573
  ParameterCount expected(function->shared()->formal_parameter_count());
@@ -2537,11 +2584,9 @@ MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object,
2537
2584
 
2538
2585
  // Handle call cache miss.
2539
2586
  __ bind(&miss);
2540
- __ IncrementCounter(&Counters::call_global_inline_miss, 1, r1, r3);
2541
- Object* obj;
2542
- { MaybeObject* maybe_obj = GenerateMissBranch();
2543
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2544
- }
2587
+ __ IncrementCounter(counters->call_global_inline_miss(), 1, r1, r3);
2588
+ MaybeObject* maybe_result = GenerateMissBranch();
2589
+ if (maybe_result->IsFailure()) return maybe_result;
2545
2590
 
2546
2591
  // Return the generated code.
2547
2592
  return GetCode(NORMAL, name);
@@ -2567,7 +2612,7 @@ MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object,
2567
2612
  r1, r2, r3,
2568
2613
  &miss);
2569
2614
  __ bind(&miss);
2570
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2615
+ Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2571
2616
  __ Jump(ic, RelocInfo::CODE_TARGET);
2572
2617
 
2573
2618
  // Return the generated code.
@@ -2610,12 +2655,13 @@ MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
2610
2655
 
2611
2656
  // Do tail-call to the runtime system.
2612
2657
  ExternalReference store_callback_property =
2613
- ExternalReference(IC_Utility(IC::kStoreCallbackProperty));
2658
+ ExternalReference(IC_Utility(IC::kStoreCallbackProperty),
2659
+ masm()->isolate());
2614
2660
  __ TailCallExternalReference(store_callback_property, 4, 1);
2615
2661
 
2616
2662
  // Handle store cache miss.
2617
2663
  __ bind(&miss);
2618
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2664
+ Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2619
2665
  __ Jump(ic, RelocInfo::CODE_TARGET);
2620
2666
 
2621
2667
  // Return the generated code.
@@ -2653,14 +2699,18 @@ MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
2653
2699
 
2654
2700
  __ Push(r1, r2, r0); // Receiver, name, value.
2655
2701
 
2702
+ __ mov(r0, Operand(Smi::FromInt(strict_mode_)));
2703
+ __ push(r0); // strict mode
2704
+
2656
2705
  // Do tail-call to the runtime system.
2657
2706
  ExternalReference store_ic_property =
2658
- ExternalReference(IC_Utility(IC::kStoreInterceptorProperty));
2659
- __ TailCallExternalReference(store_ic_property, 3, 1);
2707
+ ExternalReference(IC_Utility(IC::kStoreInterceptorProperty),
2708
+ masm()->isolate());
2709
+ __ TailCallExternalReference(store_ic_property, 4, 1);
2660
2710
 
2661
2711
  // Handle store cache miss.
2662
2712
  __ bind(&miss);
2663
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2713
+ Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2664
2714
  __ Jump(ic, RelocInfo::CODE_TARGET);
2665
2715
 
2666
2716
  // Return the generated code.
@@ -2697,13 +2747,14 @@ MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
2697
2747
  // Store the value in the cell.
2698
2748
  __ str(r0, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset));
2699
2749
 
2700
- __ IncrementCounter(&Counters::named_store_global_inline, 1, r4, r3);
2750
+ Counters* counters = masm()->isolate()->counters();
2751
+ __ IncrementCounter(counters->named_store_global_inline(), 1, r4, r3);
2701
2752
  __ Ret();
2702
2753
 
2703
2754
  // Handle store cache miss.
2704
2755
  __ bind(&miss);
2705
- __ IncrementCounter(&Counters::named_store_global_inline_miss, 1, r4, r3);
2706
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2756
+ __ IncrementCounter(counters->named_store_global_inline_miss(), 1, r4, r3);
2757
+ Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2707
2758
  __ Jump(ic, RelocInfo::CODE_TARGET);
2708
2759
 
2709
2760
  // Return the generated code.
@@ -2750,7 +2801,7 @@ MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
2750
2801
  GenerateLoadMiss(masm(), Code::LOAD_IC);
2751
2802
 
2752
2803
  // Return the generated code.
2753
- return GetCode(NONEXISTENT, Heap::empty_string());
2804
+ return GetCode(NONEXISTENT, heap()->empty_string());
2754
2805
  }
2755
2806
 
2756
2807
 
@@ -2885,11 +2936,12 @@ MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
2885
2936
  }
2886
2937
 
2887
2938
  __ mov(r0, r4);
2888
- __ IncrementCounter(&Counters::named_load_global_stub, 1, r1, r3);
2939
+ Counters* counters = masm()->isolate()->counters();
2940
+ __ IncrementCounter(counters->named_load_global_stub(), 1, r1, r3);
2889
2941
  __ Ret();
2890
2942
 
2891
2943
  __ bind(&miss);
2892
- __ IncrementCounter(&Counters::named_load_global_stub_miss, 1, r1, r3);
2944
+ __ IncrementCounter(counters->named_load_global_stub_miss(), 1, r1, r3);
2893
2945
  GenerateLoadMiss(masm(), Code::LOAD_IC);
2894
2946
 
2895
2947
  // Return the generated code.
@@ -3034,7 +3086,9 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
3034
3086
  // -- r1 : receiver
3035
3087
  // -----------------------------------
3036
3088
  Label miss;
3037
- __ IncrementCounter(&Counters::keyed_load_string_length, 1, r2, r3);
3089
+
3090
+ Counters* counters = masm()->isolate()->counters();
3091
+ __ IncrementCounter(counters->keyed_load_string_length(), 1, r2, r3);
3038
3092
 
3039
3093
  // Check the key is the cached one.
3040
3094
  __ cmp(r0, Operand(Handle<String>(name)));
@@ -3042,7 +3096,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
3042
3096
 
3043
3097
  GenerateLoadStringLength(masm(), r1, r2, r3, &miss, true);
3044
3098
  __ bind(&miss);
3045
- __ DecrementCounter(&Counters::keyed_load_string_length, 1, r2, r3);
3099
+ __ DecrementCounter(counters->keyed_load_string_length(), 1, r2, r3);
3046
3100
 
3047
3101
  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3048
3102
 
@@ -3058,7 +3112,8 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
3058
3112
  // -----------------------------------
3059
3113
  Label miss;
3060
3114
 
3061
- __ IncrementCounter(&Counters::keyed_load_function_prototype, 1, r2, r3);
3115
+ Counters* counters = masm()->isolate()->counters();
3116
+ __ IncrementCounter(counters->keyed_load_function_prototype(), 1, r2, r3);
3062
3117
 
3063
3118
  // Check the name hasn't changed.
3064
3119
  __ cmp(r0, Operand(Handle<String>(name)));
@@ -3066,7 +3121,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
3066
3121
 
3067
3122
  GenerateLoadFunctionPrototype(masm(), r1, r2, r3, &miss);
3068
3123
  __ bind(&miss);
3069
- __ DecrementCounter(&Counters::keyed_load_function_prototype, 1, r2, r3);
3124
+ __ DecrementCounter(counters->keyed_load_function_prototype(), 1, r2, r3);
3070
3125
  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3071
3126
 
3072
3127
  return GetCode(CALLBACKS, name);
@@ -3122,38 +3177,6 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) {
3122
3177
  }
3123
3178
 
3124
3179
 
3125
- MaybeObject* KeyedLoadStubCompiler::CompileLoadPixelArray(JSObject* receiver) {
3126
- // ----------- S t a t e -------------
3127
- // -- lr : return address
3128
- // -- r0 : key
3129
- // -- r1 : receiver
3130
- // -----------------------------------
3131
- Label miss;
3132
-
3133
- // Check that the map matches.
3134
- __ CheckMap(r1, r2, Handle<Map>(receiver->map()), &miss, false);
3135
-
3136
- GenerateFastPixelArrayLoad(masm(),
3137
- r1,
3138
- r0,
3139
- r2,
3140
- r3,
3141
- r4,
3142
- r5,
3143
- r0,
3144
- &miss,
3145
- &miss,
3146
- &miss);
3147
-
3148
- __ bind(&miss);
3149
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Miss));
3150
- __ Jump(ic, RelocInfo::CODE_TARGET);
3151
-
3152
- // Return the generated code.
3153
- return GetCode(NORMAL, NULL);
3154
- }
3155
-
3156
-
3157
3180
  MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
3158
3181
  int index,
3159
3182
  Map* transition,
@@ -3166,7 +3189,8 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
3166
3189
  // -----------------------------------
3167
3190
  Label miss;
3168
3191
 
3169
- __ IncrementCounter(&Counters::keyed_store_field, 1, r3, r4);
3192
+ Counters* counters = masm()->isolate()->counters();
3193
+ __ IncrementCounter(counters->keyed_store_field(), 1, r3, r4);
3170
3194
 
3171
3195
  // Check that the name has not changed.
3172
3196
  __ cmp(r1, Operand(Handle<String>(name)));
@@ -3182,9 +3206,8 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
3182
3206
  &miss);
3183
3207
  __ bind(&miss);
3184
3208
 
3185
- __ DecrementCounter(&Counters::keyed_store_field, 1, r3, r4);
3186
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
3187
-
3209
+ __ DecrementCounter(counters->keyed_store_field(), 1, r3, r4);
3210
+ Handle<Code> ic = masm()->isolate()->builtins()->KeyedStoreIC_Miss();
3188
3211
  __ Jump(ic, RelocInfo::CODE_TARGET);
3189
3212
 
3190
3213
  // Return the generated code.
@@ -3227,7 +3250,7 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
3227
3250
  __ ldr(elements_reg,
3228
3251
  FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
3229
3252
  __ ldr(scratch, FieldMemOperand(elements_reg, HeapObject::kMapOffset));
3230
- __ cmp(scratch, Operand(Handle<Map>(Factory::fixed_array_map())));
3253
+ __ cmp(scratch, Operand(Handle<Map>(factory()->fixed_array_map())));
3231
3254
  __ b(ne, &miss);
3232
3255
 
3233
3256
  // Check that the key is within bounds.
@@ -3254,48 +3277,7 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
3254
3277
  __ Ret();
3255
3278
 
3256
3279
  __ bind(&miss);
3257
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
3258
- __ Jump(ic, RelocInfo::CODE_TARGET);
3259
-
3260
- // Return the generated code.
3261
- return GetCode(NORMAL, NULL);
3262
- }
3263
-
3264
-
3265
- MaybeObject* KeyedStoreStubCompiler::CompileStorePixelArray(
3266
- JSObject* receiver) {
3267
- // ----------- S t a t e -------------
3268
- // -- r0 : value
3269
- // -- r1 : key
3270
- // -- r2 : receiver
3271
- // -- r3 : scratch
3272
- // -- r4 : scratch
3273
- // -- r5 : scratch
3274
- // -- r6 : scratch
3275
- // -- lr : return address
3276
- // -----------------------------------
3277
- Label miss;
3278
-
3279
- // Check that the map matches.
3280
- __ CheckMap(r2, r6, Handle<Map>(receiver->map()), &miss, false);
3281
-
3282
- GenerateFastPixelArrayStore(masm(),
3283
- r2,
3284
- r1,
3285
- r0,
3286
- r3,
3287
- r4,
3288
- r5,
3289
- r6,
3290
- true,
3291
- true,
3292
- &miss,
3293
- &miss,
3294
- NULL,
3295
- &miss);
3296
-
3297
- __ bind(&miss);
3298
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
3280
+ Handle<Code> ic = masm()->isolate()->builtins()->KeyedStoreIC_Miss();
3299
3281
  __ Jump(ic, RelocInfo::CODE_TARGET);
3300
3282
 
3301
3283
  // Return the generated code.
@@ -3431,16 +3413,16 @@ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
3431
3413
  // Remove caller arguments and receiver from the stack and return.
3432
3414
  __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2));
3433
3415
  __ add(sp, sp, Operand(kPointerSize));
3434
- __ IncrementCounter(&Counters::constructed_objects, 1, r1, r2);
3435
- __ IncrementCounter(&Counters::constructed_objects_stub, 1, r1, r2);
3416
+ Counters* counters = masm()->isolate()->counters();
3417
+ __ IncrementCounter(counters->constructed_objects(), 1, r1, r2);
3418
+ __ IncrementCounter(counters->constructed_objects_stub(), 1, r1, r2);
3436
3419
  __ Jump(lr);
3437
3420
 
3438
3421
  // Jump to the generic stub in case the specialized code cannot handle the
3439
3422
  // construction.
3440
3423
  __ bind(&generic_stub_call);
3441
- Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
3442
- Handle<Code> generic_construct_stub(code);
3443
- __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
3424
+ Handle<Code> code = masm()->isolate()->builtins()->JSConstructStubGeneric();
3425
+ __ Jump(code, RelocInfo::CODE_TARGET);
3444
3426
 
3445
3427
  // Return the generated code.
3446
3428
  return GetCode();
@@ -3467,7 +3449,9 @@ static bool IsElementTypeSigned(ExternalArrayType array_type) {
3467
3449
 
3468
3450
 
3469
3451
  MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
3470
- ExternalArrayType array_type, Code::Flags flags) {
3452
+ JSObject* receiver_object,
3453
+ ExternalArrayType array_type,
3454
+ Code::Flags flags) {
3471
3455
  // ---------- S t a t e --------------
3472
3456
  // -- lr : return address
3473
3457
  // -- r0 : key
@@ -3484,24 +3468,13 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
3484
3468
  // Check that the key is a smi.
3485
3469
  __ JumpIfNotSmi(key, &slow);
3486
3470
 
3487
- // Check that the object is a JS object. Load map into r2.
3488
- __ CompareObjectType(receiver, r2, r3, FIRST_JS_OBJECT_TYPE);
3489
- __ b(lt, &slow);
3490
-
3491
- // Check that the receiver does not require access checks. We need
3492
- // to check this explicitly since this generic stub does not perform
3493
- // map checks.
3494
- __ ldrb(r3, FieldMemOperand(r2, Map::kBitFieldOffset));
3495
- __ tst(r3, Operand(1 << Map::kIsAccessCheckNeeded));
3471
+ // Make sure that we've got the right map.
3472
+ __ ldr(r2, FieldMemOperand(receiver, HeapObject::kMapOffset));
3473
+ __ cmp(r2, Operand(Handle<Map>(receiver_object->map())));
3496
3474
  __ b(ne, &slow);
3497
3475
 
3498
- // Check that the elements array is the appropriate type of
3499
- // ExternalArray.
3500
3476
  __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3501
- __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
3502
- __ LoadRoot(ip, Heap::RootIndexForExternalArrayType(array_type));
3503
- __ cmp(r2, ip);
3504
- __ b(ne, &slow);
3477
+ // r3: elements array
3505
3478
 
3506
3479
  // Check that the index is in range.
3507
3480
  __ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset));
@@ -3509,7 +3482,6 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
3509
3482
  // Unsigned comparison catches both negative and too-large values.
3510
3483
  __ b(lo, &slow);
3511
3484
 
3512
- // r3: elements array
3513
3485
  __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
3514
3486
  // r3: base pointer of external storage
3515
3487
 
@@ -3522,6 +3494,7 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
3522
3494
  case kExternalByteArray:
3523
3495
  __ ldrsb(value, MemOperand(r3, key, LSR, 1));
3524
3496
  break;
3497
+ case kExternalPixelArray:
3525
3498
  case kExternalUnsignedByteArray:
3526
3499
  __ ldrb(value, MemOperand(r3, key, LSR, 1));
3527
3500
  break;
@@ -3730,7 +3703,9 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
3730
3703
 
3731
3704
  // Slow case, key and receiver still in r0 and r1.
3732
3705
  __ bind(&slow);
3733
- __ IncrementCounter(&Counters::keyed_load_external_array_slow, 1, r2, r3);
3706
+ __ IncrementCounter(
3707
+ masm()->isolate()->counters()->keyed_load_external_array_slow(),
3708
+ 1, r2, r3);
3734
3709
 
3735
3710
  // ---------- S t a t e --------------
3736
3711
  // -- lr : return address
@@ -3747,7 +3722,9 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
3747
3722
 
3748
3723
 
3749
3724
  MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
3750
- ExternalArrayType array_type, Code::Flags flags) {
3725
+ JSObject* receiver_object,
3726
+ ExternalArrayType array_type,
3727
+ Code::Flags flags) {
3751
3728
  // ---------- S t a t e --------------
3752
3729
  // -- r0 : value
3753
3730
  // -- r1 : key
@@ -3765,28 +3742,18 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
3765
3742
  // Check that the object isn't a smi.
3766
3743
  __ JumpIfSmi(receiver, &slow);
3767
3744
 
3768
- // Check that the object is a JS object. Load map into r3.
3769
- __ CompareObjectType(receiver, r3, r4, FIRST_JS_OBJECT_TYPE);
3770
- __ b(le, &slow);
3771
-
3772
- // Check that the receiver does not require access checks. We need
3773
- // to do this because this generic stub does not perform map checks.
3774
- __ ldrb(ip, FieldMemOperand(r3, Map::kBitFieldOffset));
3775
- __ tst(ip, Operand(1 << Map::kIsAccessCheckNeeded));
3745
+ // Make sure that we've got the right map.
3746
+ __ ldr(r3, FieldMemOperand(receiver, HeapObject::kMapOffset));
3747
+ __ cmp(r3, Operand(Handle<Map>(receiver_object->map())));
3776
3748
  __ b(ne, &slow);
3777
3749
 
3750
+ __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3751
+
3778
3752
  // Check that the key is a smi.
3779
3753
  __ JumpIfNotSmi(key, &slow);
3780
3754
 
3781
- // Check that the elements array is the appropriate type of ExternalArray.
3782
- __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3783
- __ ldr(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
3784
- __ LoadRoot(ip, Heap::RootIndexForExternalArrayType(array_type));
3785
- __ cmp(r4, ip);
3786
- __ b(ne, &slow);
3787
-
3788
- // Check that the index is in range.
3789
- __ mov(r4, Operand(key, ASR, kSmiTagSize)); // Untag the index.
3755
+ // Check that the index is in range
3756
+ __ SmiUntag(r4, key);
3790
3757
  __ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset));
3791
3758
  __ cmp(r4, ip);
3792
3759
  // Unsigned comparison catches both negative and too-large values.
@@ -3796,14 +3763,24 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
3796
3763
  // runtime for all other kinds of values.
3797
3764
  // r3: external array.
3798
3765
  // r4: key (integer).
3799
- __ JumpIfNotSmi(value, &check_heap_number);
3800
- __ mov(r5, Operand(value, ASR, kSmiTagSize)); // Untag the value.
3766
+ if (array_type == kExternalPixelArray) {
3767
+ // Double to pixel conversion is only implemented in the runtime for now.
3768
+ __ JumpIfNotSmi(value, &slow);
3769
+ } else {
3770
+ __ JumpIfNotSmi(value, &check_heap_number);
3771
+ }
3772
+ __ SmiUntag(r5, value);
3801
3773
  __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
3802
3774
 
3803
3775
  // r3: base pointer of external storage.
3804
3776
  // r4: key (integer).
3805
3777
  // r5: value (integer).
3806
3778
  switch (array_type) {
3779
+ case kExternalPixelArray:
3780
+ // Clamp the value to [0..255].
3781
+ __ Usat(r5, 8, Operand(r5));
3782
+ __ strb(r5, MemOperand(r3, r4, LSL, 0));
3783
+ break;
3807
3784
  case kExternalByteArray:
3808
3785
  case kExternalUnsignedByteArray:
3809
3786
  __ strb(r5, MemOperand(r3, r4, LSL, 0));
@@ -3828,198 +3805,199 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
3828
3805
  // Entry registers are intact, r0 holds the value which is the return value.
3829
3806
  __ Ret();
3830
3807
 
3808
+ if (array_type != kExternalPixelArray) {
3809
+ // r3: external array.
3810
+ // r4: index (integer).
3811
+ __ bind(&check_heap_number);
3812
+ __ CompareObjectType(value, r5, r6, HEAP_NUMBER_TYPE);
3813
+ __ b(ne, &slow);
3831
3814
 
3832
- // r3: external array.
3833
- // r4: index (integer).
3834
- __ bind(&check_heap_number);
3835
- __ CompareObjectType(value, r5, r6, HEAP_NUMBER_TYPE);
3836
- __ b(ne, &slow);
3837
-
3838
- __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
3839
-
3840
- // r3: base pointer of external storage.
3841
- // r4: key (integer).
3815
+ __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
3842
3816
 
3843
- // The WebGL specification leaves the behavior of storing NaN and
3844
- // +/-Infinity into integer arrays basically undefined. For more
3845
- // reproducible behavior, convert these to zero.
3846
- if (CpuFeatures::IsSupported(VFP3)) {
3847
- CpuFeatures::Scope scope(VFP3);
3817
+ // r3: base pointer of external storage.
3818
+ // r4: key (integer).
3848
3819
 
3820
+ // The WebGL specification leaves the behavior of storing NaN and
3821
+ // +/-Infinity into integer arrays basically undefined. For more
3822
+ // reproducible behavior, convert these to zero.
3823
+ if (CpuFeatures::IsSupported(VFP3)) {
3824
+ CpuFeatures::Scope scope(VFP3);
3849
3825
 
3850
- if (array_type == kExternalFloatArray) {
3851
- // vldr requires offset to be a multiple of 4 so we can not
3852
- // include -kHeapObjectTag into it.
3853
- __ sub(r5, r0, Operand(kHeapObjectTag));
3854
- __ vldr(d0, r5, HeapNumber::kValueOffset);
3855
- __ add(r5, r3, Operand(r4, LSL, 2));
3856
- __ vcvt_f32_f64(s0, d0);
3857
- __ vstr(s0, r5, 0);
3858
- } else {
3859
- // Need to perform float-to-int conversion.
3860
- // Test for NaN or infinity (both give zero).
3861
- __ ldr(r6, FieldMemOperand(value, HeapNumber::kExponentOffset));
3862
-
3863
- // Hoisted load. vldr requires offset to be a multiple of 4 so we can not
3864
- // include -kHeapObjectTag into it.
3865
- __ sub(r5, value, Operand(kHeapObjectTag));
3866
- __ vldr(d0, r5, HeapNumber::kValueOffset);
3867
-
3868
- __ Sbfx(r6, r6, HeapNumber::kExponentShift, HeapNumber::kExponentBits);
3869
- // NaNs and Infinities have all-one exponents so they sign extend to -1.
3870
- __ cmp(r6, Operand(-1));
3871
- __ mov(r5, Operand(0), LeaveCC, eq);
3872
-
3873
- // Not infinity or NaN simply convert to int.
3874
- if (IsElementTypeSigned(array_type)) {
3875
- __ vcvt_s32_f64(s0, d0, kDefaultRoundToZero, ne);
3826
+ if (array_type == kExternalFloatArray) {
3827
+ // vldr requires offset to be a multiple of 4 so we can not
3828
+ // include -kHeapObjectTag into it.
3829
+ __ sub(r5, r0, Operand(kHeapObjectTag));
3830
+ __ vldr(d0, r5, HeapNumber::kValueOffset);
3831
+ __ add(r5, r3, Operand(r4, LSL, 2));
3832
+ __ vcvt_f32_f64(s0, d0);
3833
+ __ vstr(s0, r5, 0);
3876
3834
  } else {
3877
- __ vcvt_u32_f64(s0, d0, kDefaultRoundToZero, ne);
3878
- }
3879
- __ vmov(r5, s0, ne);
3880
-
3881
- switch (array_type) {
3882
- case kExternalByteArray:
3883
- case kExternalUnsignedByteArray:
3884
- __ strb(r5, MemOperand(r3, r4, LSL, 0));
3885
- break;
3886
- case kExternalShortArray:
3887
- case kExternalUnsignedShortArray:
3888
- __ strh(r5, MemOperand(r3, r4, LSL, 1));
3889
- break;
3890
- case kExternalIntArray:
3891
- case kExternalUnsignedIntArray:
3892
- __ str(r5, MemOperand(r3, r4, LSL, 2));
3893
- break;
3894
- default:
3895
- UNREACHABLE();
3896
- break;
3835
+ // Need to perform float-to-int conversion.
3836
+ // Test for NaN or infinity (both give zero).
3837
+ __ ldr(r6, FieldMemOperand(value, HeapNumber::kExponentOffset));
3838
+
3839
+ // Hoisted load. vldr requires offset to be a multiple of 4 so we can
3840
+ // not include -kHeapObjectTag into it.
3841
+ __ sub(r5, value, Operand(kHeapObjectTag));
3842
+ __ vldr(d0, r5, HeapNumber::kValueOffset);
3843
+
3844
+ __ Sbfx(r6, r6, HeapNumber::kExponentShift, HeapNumber::kExponentBits);
3845
+ // NaNs and Infinities have all-one exponents so they sign extend to -1.
3846
+ __ cmp(r6, Operand(-1));
3847
+ __ mov(r5, Operand(0), LeaveCC, eq);
3848
+
3849
+ // Not infinity or NaN simply convert to int.
3850
+ if (IsElementTypeSigned(array_type)) {
3851
+ __ vcvt_s32_f64(s0, d0, kDefaultRoundToZero, ne);
3852
+ } else {
3853
+ __ vcvt_u32_f64(s0, d0, kDefaultRoundToZero, ne);
3854
+ }
3855
+ __ vmov(r5, s0, ne);
3856
+
3857
+ switch (array_type) {
3858
+ case kExternalByteArray:
3859
+ case kExternalUnsignedByteArray:
3860
+ __ strb(r5, MemOperand(r3, r4, LSL, 0));
3861
+ break;
3862
+ case kExternalShortArray:
3863
+ case kExternalUnsignedShortArray:
3864
+ __ strh(r5, MemOperand(r3, r4, LSL, 1));
3865
+ break;
3866
+ case kExternalIntArray:
3867
+ case kExternalUnsignedIntArray:
3868
+ __ str(r5, MemOperand(r3, r4, LSL, 2));
3869
+ break;
3870
+ default:
3871
+ UNREACHABLE();
3872
+ break;
3873
+ }
3897
3874
  }
3898
- }
3899
-
3900
- // Entry registers are intact, r0 holds the value which is the return value.
3901
- __ Ret();
3902
- } else {
3903
- // VFP3 is not available do manual conversions.
3904
- __ ldr(r5, FieldMemOperand(value, HeapNumber::kExponentOffset));
3905
- __ ldr(r6, FieldMemOperand(value, HeapNumber::kMantissaOffset));
3906
-
3907
- if (array_type == kExternalFloatArray) {
3908
- Label done, nan_or_infinity_or_zero;
3909
- static const int kMantissaInHiWordShift =
3910
- kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord;
3911
-
3912
- static const int kMantissaInLoWordShift =
3913
- kBitsPerInt - kMantissaInHiWordShift;
3914
-
3915
- // Test for all special exponent values: zeros, subnormal numbers, NaNs
3916
- // and infinities. All these should be converted to 0.
3917
- __ mov(r7, Operand(HeapNumber::kExponentMask));
3918
- __ and_(r9, r5, Operand(r7), SetCC);
3919
- __ b(eq, &nan_or_infinity_or_zero);
3920
-
3921
- __ teq(r9, Operand(r7));
3922
- __ mov(r9, Operand(kBinary32ExponentMask), LeaveCC, eq);
3923
- __ b(eq, &nan_or_infinity_or_zero);
3924
3875
 
3925
- // Rebias exponent.
3926
- __ mov(r9, Operand(r9, LSR, HeapNumber::kExponentShift));
3927
- __ add(r9,
3928
- r9,
3929
- Operand(kBinary32ExponentBias - HeapNumber::kExponentBias));
3930
-
3931
- __ cmp(r9, Operand(kBinary32MaxExponent));
3932
- __ and_(r5, r5, Operand(HeapNumber::kSignMask), LeaveCC, gt);
3933
- __ orr(r5, r5, Operand(kBinary32ExponentMask), LeaveCC, gt);
3934
- __ b(gt, &done);
3935
-
3936
- __ cmp(r9, Operand(kBinary32MinExponent));
3937
- __ and_(r5, r5, Operand(HeapNumber::kSignMask), LeaveCC, lt);
3938
- __ b(lt, &done);
3939
-
3940
- __ and_(r7, r5, Operand(HeapNumber::kSignMask));
3941
- __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
3942
- __ orr(r7, r7, Operand(r5, LSL, kMantissaInHiWordShift));
3943
- __ orr(r7, r7, Operand(r6, LSR, kMantissaInLoWordShift));
3944
- __ orr(r5, r7, Operand(r9, LSL, kBinary32ExponentShift));
3945
-
3946
- __ bind(&done);
3947
- __ str(r5, MemOperand(r3, r4, LSL, 2));
3948
3876
  // Entry registers are intact, r0 holds the value which is the return
3949
3877
  // value.
3950
3878
  __ Ret();
3951
-
3952
- __ bind(&nan_or_infinity_or_zero);
3953
- __ and_(r7, r5, Operand(HeapNumber::kSignMask));
3954
- __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
3955
- __ orr(r9, r9, r7);
3956
- __ orr(r9, r9, Operand(r5, LSL, kMantissaInHiWordShift));
3957
- __ orr(r5, r9, Operand(r6, LSR, kMantissaInLoWordShift));
3958
- __ b(&done);
3959
3879
  } else {
3960
- bool is_signed_type = IsElementTypeSigned(array_type);
3961
- int meaningfull_bits = is_signed_type ? (kBitsPerInt - 1) : kBitsPerInt;
3962
- int32_t min_value = is_signed_type ? 0x80000000 : 0x00000000;
3963
-
3964
- Label done, sign;
3965
-
3966
- // Test for all special exponent values: zeros, subnormal numbers, NaNs
3967
- // and infinities. All these should be converted to 0.
3968
- __ mov(r7, Operand(HeapNumber::kExponentMask));
3969
- __ and_(r9, r5, Operand(r7), SetCC);
3970
- __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, eq);
3971
- __ b(eq, &done);
3972
-
3973
- __ teq(r9, Operand(r7));
3974
- __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, eq);
3975
- __ b(eq, &done);
3976
-
3977
- // Unbias exponent.
3978
- __ mov(r9, Operand(r9, LSR, HeapNumber::kExponentShift));
3979
- __ sub(r9, r9, Operand(HeapNumber::kExponentBias), SetCC);
3980
- // If exponent is negative then result is 0.
3981
- __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, mi);
3982
- __ b(mi, &done);
3983
-
3984
- // If exponent is too big then result is minimal value.
3985
- __ cmp(r9, Operand(meaningfull_bits - 1));
3986
- __ mov(r5, Operand(min_value), LeaveCC, ge);
3987
- __ b(ge, &done);
3988
-
3989
- __ and_(r7, r5, Operand(HeapNumber::kSignMask), SetCC);
3990
- __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
3991
- __ orr(r5, r5, Operand(1u << HeapNumber::kMantissaBitsInTopWord));
3992
-
3993
- __ rsb(r9, r9, Operand(HeapNumber::kMantissaBitsInTopWord), SetCC);
3994
- __ mov(r5, Operand(r5, LSR, r9), LeaveCC, pl);
3995
- __ b(pl, &sign);
3996
-
3997
- __ rsb(r9, r9, Operand(0, RelocInfo::NONE));
3998
- __ mov(r5, Operand(r5, LSL, r9));
3999
- __ rsb(r9, r9, Operand(meaningfull_bits));
4000
- __ orr(r5, r5, Operand(r6, LSR, r9));
4001
-
4002
- __ bind(&sign);
4003
- __ teq(r7, Operand(0, RelocInfo::NONE));
4004
- __ rsb(r5, r5, Operand(0, RelocInfo::NONE), LeaveCC, ne);
4005
-
4006
- __ bind(&done);
4007
- switch (array_type) {
4008
- case kExternalByteArray:
4009
- case kExternalUnsignedByteArray:
4010
- __ strb(r5, MemOperand(r3, r4, LSL, 0));
4011
- break;
4012
- case kExternalShortArray:
4013
- case kExternalUnsignedShortArray:
4014
- __ strh(r5, MemOperand(r3, r4, LSL, 1));
4015
- break;
4016
- case kExternalIntArray:
4017
- case kExternalUnsignedIntArray:
4018
- __ str(r5, MemOperand(r3, r4, LSL, 2));
4019
- break;
4020
- default:
4021
- UNREACHABLE();
4022
- break;
3880
+ // VFP3 is not available do manual conversions.
3881
+ __ ldr(r5, FieldMemOperand(value, HeapNumber::kExponentOffset));
3882
+ __ ldr(r6, FieldMemOperand(value, HeapNumber::kMantissaOffset));
3883
+
3884
+ if (array_type == kExternalFloatArray) {
3885
+ Label done, nan_or_infinity_or_zero;
3886
+ static const int kMantissaInHiWordShift =
3887
+ kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord;
3888
+
3889
+ static const int kMantissaInLoWordShift =
3890
+ kBitsPerInt - kMantissaInHiWordShift;
3891
+
3892
+ // Test for all special exponent values: zeros, subnormal numbers, NaNs
3893
+ // and infinities. All these should be converted to 0.
3894
+ __ mov(r7, Operand(HeapNumber::kExponentMask));
3895
+ __ and_(r9, r5, Operand(r7), SetCC);
3896
+ __ b(eq, &nan_or_infinity_or_zero);
3897
+
3898
+ __ teq(r9, Operand(r7));
3899
+ __ mov(r9, Operand(kBinary32ExponentMask), LeaveCC, eq);
3900
+ __ b(eq, &nan_or_infinity_or_zero);
3901
+
3902
+ // Rebias exponent.
3903
+ __ mov(r9, Operand(r9, LSR, HeapNumber::kExponentShift));
3904
+ __ add(r9,
3905
+ r9,
3906
+ Operand(kBinary32ExponentBias - HeapNumber::kExponentBias));
3907
+
3908
+ __ cmp(r9, Operand(kBinary32MaxExponent));
3909
+ __ and_(r5, r5, Operand(HeapNumber::kSignMask), LeaveCC, gt);
3910
+ __ orr(r5, r5, Operand(kBinary32ExponentMask), LeaveCC, gt);
3911
+ __ b(gt, &done);
3912
+
3913
+ __ cmp(r9, Operand(kBinary32MinExponent));
3914
+ __ and_(r5, r5, Operand(HeapNumber::kSignMask), LeaveCC, lt);
3915
+ __ b(lt, &done);
3916
+
3917
+ __ and_(r7, r5, Operand(HeapNumber::kSignMask));
3918
+ __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
3919
+ __ orr(r7, r7, Operand(r5, LSL, kMantissaInHiWordShift));
3920
+ __ orr(r7, r7, Operand(r6, LSR, kMantissaInLoWordShift));
3921
+ __ orr(r5, r7, Operand(r9, LSL, kBinary32ExponentShift));
3922
+
3923
+ __ bind(&done);
3924
+ __ str(r5, MemOperand(r3, r4, LSL, 2));
3925
+ // Entry registers are intact, r0 holds the value which is the return
3926
+ // value.
3927
+ __ Ret();
3928
+
3929
+ __ bind(&nan_or_infinity_or_zero);
3930
+ __ and_(r7, r5, Operand(HeapNumber::kSignMask));
3931
+ __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
3932
+ __ orr(r9, r9, r7);
3933
+ __ orr(r9, r9, Operand(r5, LSL, kMantissaInHiWordShift));
3934
+ __ orr(r5, r9, Operand(r6, LSR, kMantissaInLoWordShift));
3935
+ __ b(&done);
3936
+ } else {
3937
+ bool is_signed_type = IsElementTypeSigned(array_type);
3938
+ int meaningfull_bits = is_signed_type ? (kBitsPerInt - 1) : kBitsPerInt;
3939
+ int32_t min_value = is_signed_type ? 0x80000000 : 0x00000000;
3940
+
3941
+ Label done, sign;
3942
+
3943
+ // Test for all special exponent values: zeros, subnormal numbers, NaNs
3944
+ // and infinities. All these should be converted to 0.
3945
+ __ mov(r7, Operand(HeapNumber::kExponentMask));
3946
+ __ and_(r9, r5, Operand(r7), SetCC);
3947
+ __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, eq);
3948
+ __ b(eq, &done);
3949
+
3950
+ __ teq(r9, Operand(r7));
3951
+ __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, eq);
3952
+ __ b(eq, &done);
3953
+
3954
+ // Unbias exponent.
3955
+ __ mov(r9, Operand(r9, LSR, HeapNumber::kExponentShift));
3956
+ __ sub(r9, r9, Operand(HeapNumber::kExponentBias), SetCC);
3957
+ // If exponent is negative then result is 0.
3958
+ __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, mi);
3959
+ __ b(mi, &done);
3960
+
3961
+ // If exponent is too big then result is minimal value.
3962
+ __ cmp(r9, Operand(meaningfull_bits - 1));
3963
+ __ mov(r5, Operand(min_value), LeaveCC, ge);
3964
+ __ b(ge, &done);
3965
+
3966
+ __ and_(r7, r5, Operand(HeapNumber::kSignMask), SetCC);
3967
+ __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
3968
+ __ orr(r5, r5, Operand(1u << HeapNumber::kMantissaBitsInTopWord));
3969
+
3970
+ __ rsb(r9, r9, Operand(HeapNumber::kMantissaBitsInTopWord), SetCC);
3971
+ __ mov(r5, Operand(r5, LSR, r9), LeaveCC, pl);
3972
+ __ b(pl, &sign);
3973
+
3974
+ __ rsb(r9, r9, Operand(0, RelocInfo::NONE));
3975
+ __ mov(r5, Operand(r5, LSL, r9));
3976
+ __ rsb(r9, r9, Operand(meaningfull_bits));
3977
+ __ orr(r5, r5, Operand(r6, LSR, r9));
3978
+
3979
+ __ bind(&sign);
3980
+ __ teq(r7, Operand(0, RelocInfo::NONE));
3981
+ __ rsb(r5, r5, Operand(0, RelocInfo::NONE), LeaveCC, ne);
3982
+
3983
+ __ bind(&done);
3984
+ switch (array_type) {
3985
+ case kExternalByteArray:
3986
+ case kExternalUnsignedByteArray:
3987
+ __ strb(r5, MemOperand(r3, r4, LSL, 0));
3988
+ break;
3989
+ case kExternalShortArray:
3990
+ case kExternalUnsignedShortArray:
3991
+ __ strh(r5, MemOperand(r3, r4, LSL, 1));
3992
+ break;
3993
+ case kExternalIntArray:
3994
+ case kExternalUnsignedIntArray:
3995
+ __ str(r5, MemOperand(r3, r4, LSL, 2));
3996
+ break;
3997
+ default:
3998
+ UNREACHABLE();
3999
+ break;
4000
+ }
4023
4001
  }
4024
4002
  }
4025
4003
  }
@@ -4038,7 +4016,12 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
4038
4016
  // Push receiver, key and value for runtime call.
4039
4017
  __ Push(r2, r1, r0);
4040
4018
 
4041
- __ TailCallRuntime(Runtime::kSetProperty, 3, 1);
4019
+ __ mov(r1, Operand(Smi::FromInt(NONE))); // PropertyAttributes
4020
+ __ mov(r0, Operand(Smi::FromInt(
4021
+ Code::ExtractExtraICStateFromFlags(flags) & kStrictMode)));
4022
+ __ Push(r1, r0);
4023
+
4024
+ __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
4042
4025
 
4043
4026
  return GetCode(flags);
4044
4027
  }