mustang 0.0.1 → 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (560) hide show
  1. data/.rspec +1 -0
  2. data/Isolate +9 -0
  3. data/README.md +6 -12
  4. data/Rakefile +30 -4
  5. data/TODO.md +9 -0
  6. data/ext/v8/extconf.rb +56 -0
  7. data/ext/v8/v8.cpp +37 -0
  8. data/ext/v8/v8_array.cpp +161 -0
  9. data/ext/v8/v8_array.h +17 -0
  10. data/ext/v8/v8_base.cpp +147 -0
  11. data/ext/v8/v8_base.h +23 -0
  12. data/ext/v8/v8_cast.cpp +151 -0
  13. data/ext/v8/v8_cast.h +64 -0
  14. data/ext/v8/v8_context.cpp +174 -0
  15. data/ext/v8/v8_context.h +12 -0
  16. data/ext/v8/v8_date.cpp +61 -0
  17. data/ext/v8/v8_date.h +16 -0
  18. data/ext/v8/v8_errors.cpp +147 -0
  19. data/ext/v8/v8_errors.h +19 -0
  20. data/ext/v8/v8_external.cpp +66 -0
  21. data/ext/v8/v8_external.h +16 -0
  22. data/ext/v8/v8_function.cpp +182 -0
  23. data/ext/v8/v8_function.h +14 -0
  24. data/ext/v8/v8_integer.cpp +70 -0
  25. data/ext/v8/v8_integer.h +16 -0
  26. data/ext/v8/v8_macros.h +30 -0
  27. data/ext/v8/v8_main.cpp +53 -0
  28. data/ext/v8/v8_main.h +13 -0
  29. data/ext/v8/v8_number.cpp +62 -0
  30. data/ext/v8/v8_number.h +16 -0
  31. data/ext/v8/v8_object.cpp +172 -0
  32. data/ext/v8/v8_object.h +17 -0
  33. data/ext/v8/v8_ref.cpp +72 -0
  34. data/ext/v8/v8_ref.h +43 -0
  35. data/ext/v8/v8_regexp.cpp +148 -0
  36. data/ext/v8/v8_regexp.h +16 -0
  37. data/ext/v8/v8_string.cpp +78 -0
  38. data/ext/v8/v8_string.h +16 -0
  39. data/ext/v8/v8_value.cpp +370 -0
  40. data/ext/v8/v8_value.h +19 -0
  41. data/gemspec.yml +2 -1
  42. data/lib/core_ext/class.rb +14 -0
  43. data/lib/core_ext/object.rb +12 -0
  44. data/lib/core_ext/symbol.rb +23 -0
  45. data/lib/mustang.rb +44 -0
  46. data/lib/mustang/context.rb +69 -0
  47. data/lib/mustang/errors.rb +36 -0
  48. data/lib/support/delegated.rb +25 -0
  49. data/lib/v8/array.rb +21 -0
  50. data/lib/v8/context.rb +13 -0
  51. data/lib/v8/date.rb +20 -0
  52. data/lib/v8/error.rb +15 -0
  53. data/lib/v8/external.rb +16 -0
  54. data/lib/v8/function.rb +11 -0
  55. data/lib/v8/integer.rb +16 -0
  56. data/lib/v8/number.rb +16 -0
  57. data/lib/v8/object.rb +66 -0
  58. data/lib/v8/regexp.rb +23 -0
  59. data/lib/v8/string.rb +27 -0
  60. data/mustang.gemspec +3 -0
  61. data/spec/core_ext/class_spec.rb +19 -0
  62. data/spec/core_ext/object_spec.rb +19 -0
  63. data/spec/core_ext/symbol_spec.rb +27 -0
  64. data/spec/fixtures/test1.js +2 -0
  65. data/spec/fixtures/test2.js +2 -0
  66. data/spec/spec_helper.rb +20 -0
  67. data/spec/v8/array_spec.rb +88 -0
  68. data/spec/v8/cast_spec.rb +151 -0
  69. data/spec/v8/context_spec.rb +78 -0
  70. data/spec/v8/data_spec.rb +39 -0
  71. data/spec/v8/date_spec.rb +45 -0
  72. data/spec/v8/empty_spec.rb +27 -0
  73. data/spec/v8/errors_spec.rb +142 -0
  74. data/spec/v8/external_spec.rb +44 -0
  75. data/spec/v8/function_spec.rb +170 -0
  76. data/spec/v8/integer_spec.rb +41 -0
  77. data/spec/v8/main_spec.rb +18 -0
  78. data/spec/v8/null_spec.rb +27 -0
  79. data/spec/v8/number_spec.rb +40 -0
  80. data/spec/v8/object_spec.rb +79 -0
  81. data/spec/v8/primitive_spec.rb +9 -0
  82. data/spec/v8/regexp_spec.rb +65 -0
  83. data/spec/v8/string_spec.rb +48 -0
  84. data/spec/v8/undefined_spec.rb +27 -0
  85. data/spec/v8/value_spec.rb +215 -0
  86. data/vendor/v8/.gitignore +2 -0
  87. data/vendor/v8/AUTHORS +3 -1
  88. data/vendor/v8/ChangeLog +117 -0
  89. data/vendor/v8/SConstruct +334 -53
  90. data/vendor/v8/include/v8-debug.h +21 -11
  91. data/vendor/v8/include/v8-preparser.h +1 -1
  92. data/vendor/v8/include/v8-profiler.h +122 -43
  93. data/vendor/v8/include/v8-testing.h +5 -0
  94. data/vendor/v8/include/v8.h +171 -17
  95. data/vendor/v8/preparser/SConscript +38 -0
  96. data/vendor/v8/preparser/preparser-process.cc +77 -114
  97. data/vendor/v8/samples/shell.cc +232 -46
  98. data/vendor/v8/src/SConscript +29 -5
  99. data/vendor/v8/src/accessors.cc +70 -211
  100. data/vendor/v8/{test/cctest/test-mips.cc → src/allocation-inl.h} +15 -18
  101. data/vendor/v8/src/allocation.cc +0 -82
  102. data/vendor/v8/src/allocation.h +9 -42
  103. data/vendor/v8/src/api.cc +1645 -1156
  104. data/vendor/v8/src/api.h +76 -12
  105. data/vendor/v8/src/apiutils.h +0 -7
  106. data/vendor/v8/src/arguments.h +15 -4
  107. data/vendor/v8/src/arm/assembler-arm-inl.h +10 -9
  108. data/vendor/v8/src/arm/assembler-arm.cc +62 -23
  109. data/vendor/v8/src/arm/assembler-arm.h +76 -11
  110. data/vendor/v8/src/arm/builtins-arm.cc +39 -33
  111. data/vendor/v8/src/arm/code-stubs-arm.cc +1182 -402
  112. data/vendor/v8/src/arm/code-stubs-arm.h +20 -54
  113. data/vendor/v8/src/arm/codegen-arm.cc +159 -106
  114. data/vendor/v8/src/arm/codegen-arm.h +6 -6
  115. data/vendor/v8/src/arm/constants-arm.h +16 -1
  116. data/vendor/v8/src/arm/cpu-arm.cc +7 -5
  117. data/vendor/v8/src/arm/debug-arm.cc +6 -4
  118. data/vendor/v8/src/arm/deoptimizer-arm.cc +51 -14
  119. data/vendor/v8/src/arm/disasm-arm.cc +47 -15
  120. data/vendor/v8/src/arm/frames-arm.h +1 -1
  121. data/vendor/v8/src/arm/full-codegen-arm.cc +724 -408
  122. data/vendor/v8/src/arm/ic-arm.cc +90 -85
  123. data/vendor/v8/src/arm/lithium-arm.cc +140 -69
  124. data/vendor/v8/src/arm/lithium-arm.h +161 -46
  125. data/vendor/v8/src/arm/lithium-codegen-arm.cc +567 -297
  126. data/vendor/v8/src/arm/lithium-codegen-arm.h +21 -9
  127. data/vendor/v8/src/arm/lithium-gap-resolver-arm.cc +2 -0
  128. data/vendor/v8/src/arm/macro-assembler-arm.cc +457 -96
  129. data/vendor/v8/src/arm/macro-assembler-arm.h +115 -18
  130. data/vendor/v8/src/arm/regexp-macro-assembler-arm.cc +20 -13
  131. data/vendor/v8/src/arm/regexp-macro-assembler-arm.h +1 -0
  132. data/vendor/v8/src/arm/simulator-arm.cc +184 -101
  133. data/vendor/v8/src/arm/simulator-arm.h +26 -21
  134. data/vendor/v8/src/arm/stub-cache-arm.cc +450 -467
  135. data/vendor/v8/src/arm/virtual-frame-arm.cc +14 -12
  136. data/vendor/v8/src/arm/virtual-frame-arm.h +11 -8
  137. data/vendor/v8/src/array.js +35 -18
  138. data/vendor/v8/src/assembler.cc +186 -92
  139. data/vendor/v8/src/assembler.h +106 -69
  140. data/vendor/v8/src/ast-inl.h +5 -0
  141. data/vendor/v8/src/ast.cc +46 -35
  142. data/vendor/v8/src/ast.h +107 -50
  143. data/vendor/v8/src/atomicops.h +2 -0
  144. data/vendor/v8/src/atomicops_internals_mips_gcc.h +169 -0
  145. data/vendor/v8/src/bootstrapper.cc +649 -399
  146. data/vendor/v8/src/bootstrapper.h +94 -27
  147. data/vendor/v8/src/builtins.cc +359 -227
  148. data/vendor/v8/src/builtins.h +157 -123
  149. data/vendor/v8/src/checks.cc +2 -2
  150. data/vendor/v8/src/checks.h +4 -0
  151. data/vendor/v8/src/code-stubs.cc +27 -17
  152. data/vendor/v8/src/code-stubs.h +38 -17
  153. data/vendor/v8/src/codegen-inl.h +5 -1
  154. data/vendor/v8/src/codegen.cc +27 -17
  155. data/vendor/v8/src/codegen.h +9 -9
  156. data/vendor/v8/src/compilation-cache.cc +92 -206
  157. data/vendor/v8/src/compilation-cache.h +205 -30
  158. data/vendor/v8/src/compiler.cc +107 -120
  159. data/vendor/v8/src/compiler.h +17 -2
  160. data/vendor/v8/src/contexts.cc +22 -15
  161. data/vendor/v8/src/contexts.h +14 -8
  162. data/vendor/v8/src/conversions.cc +86 -30
  163. data/vendor/v8/src/counters.cc +19 -4
  164. data/vendor/v8/src/counters.h +28 -16
  165. data/vendor/v8/src/cpu-profiler-inl.h +4 -3
  166. data/vendor/v8/src/cpu-profiler.cc +123 -72
  167. data/vendor/v8/src/cpu-profiler.h +33 -19
  168. data/vendor/v8/src/cpu.h +2 -0
  169. data/vendor/v8/src/d8-debug.cc +3 -3
  170. data/vendor/v8/src/d8-debug.h +7 -6
  171. data/vendor/v8/src/d8-posix.cc +2 -0
  172. data/vendor/v8/src/d8.cc +22 -12
  173. data/vendor/v8/src/d8.gyp +3 -0
  174. data/vendor/v8/src/d8.js +618 -0
  175. data/vendor/v8/src/data-flow.h +3 -3
  176. data/vendor/v8/src/dateparser.h +4 -2
  177. data/vendor/v8/src/debug-agent.cc +10 -9
  178. data/vendor/v8/src/debug-agent.h +9 -11
  179. data/vendor/v8/src/debug-debugger.js +121 -0
  180. data/vendor/v8/src/debug.cc +331 -227
  181. data/vendor/v8/src/debug.h +248 -219
  182. data/vendor/v8/src/deoptimizer.cc +173 -62
  183. data/vendor/v8/src/deoptimizer.h +119 -19
  184. data/vendor/v8/src/disasm.h +3 -0
  185. data/vendor/v8/src/disassembler.cc +10 -9
  186. data/vendor/v8/src/execution.cc +185 -129
  187. data/vendor/v8/src/execution.h +47 -78
  188. data/vendor/v8/src/extensions/experimental/break-iterator.cc +250 -0
  189. data/vendor/v8/src/extensions/experimental/break-iterator.h +89 -0
  190. data/vendor/v8/src/extensions/experimental/experimental.gyp +2 -0
  191. data/vendor/v8/src/extensions/experimental/i18n-extension.cc +22 -2
  192. data/vendor/v8/src/extensions/externalize-string-extension.cc +2 -2
  193. data/vendor/v8/src/extensions/gc-extension.cc +1 -1
  194. data/vendor/v8/src/factory.cc +261 -154
  195. data/vendor/v8/src/factory.h +162 -158
  196. data/vendor/v8/src/flag-definitions.h +17 -11
  197. data/vendor/v8/src/frame-element.cc +0 -5
  198. data/vendor/v8/src/frame-element.h +9 -13
  199. data/vendor/v8/src/frames-inl.h +7 -0
  200. data/vendor/v8/src/frames.cc +56 -46
  201. data/vendor/v8/src/frames.h +36 -25
  202. data/vendor/v8/src/full-codegen.cc +15 -24
  203. data/vendor/v8/src/full-codegen.h +13 -41
  204. data/vendor/v8/src/func-name-inferrer.cc +7 -6
  205. data/vendor/v8/src/func-name-inferrer.h +1 -1
  206. data/vendor/v8/src/gdb-jit.cc +1 -0
  207. data/vendor/v8/src/global-handles.cc +118 -56
  208. data/vendor/v8/src/global-handles.h +98 -40
  209. data/vendor/v8/src/globals.h +2 -2
  210. data/vendor/v8/src/handles-inl.h +106 -9
  211. data/vendor/v8/src/handles.cc +220 -157
  212. data/vendor/v8/src/handles.h +38 -59
  213. data/vendor/v8/src/hashmap.h +3 -3
  214. data/vendor/v8/src/heap-inl.h +141 -25
  215. data/vendor/v8/src/heap-profiler.cc +117 -63
  216. data/vendor/v8/src/heap-profiler.h +38 -21
  217. data/vendor/v8/src/heap.cc +805 -564
  218. data/vendor/v8/src/heap.h +640 -594
  219. data/vendor/v8/src/hydrogen-instructions.cc +216 -73
  220. data/vendor/v8/src/hydrogen-instructions.h +259 -124
  221. data/vendor/v8/src/hydrogen.cc +996 -1171
  222. data/vendor/v8/src/hydrogen.h +163 -144
  223. data/vendor/v8/src/ia32/assembler-ia32-inl.h +12 -11
  224. data/vendor/v8/src/ia32/assembler-ia32.cc +85 -39
  225. data/vendor/v8/src/ia32/assembler-ia32.h +82 -16
  226. data/vendor/v8/src/ia32/builtins-ia32.cc +64 -58
  227. data/vendor/v8/src/ia32/code-stubs-ia32.cc +248 -324
  228. data/vendor/v8/src/ia32/code-stubs-ia32.h +3 -44
  229. data/vendor/v8/src/ia32/codegen-ia32.cc +217 -165
  230. data/vendor/v8/src/ia32/codegen-ia32.h +3 -0
  231. data/vendor/v8/src/ia32/cpu-ia32.cc +6 -5
  232. data/vendor/v8/src/ia32/debug-ia32.cc +8 -5
  233. data/vendor/v8/src/ia32/deoptimizer-ia32.cc +124 -14
  234. data/vendor/v8/src/ia32/disasm-ia32.cc +85 -62
  235. data/vendor/v8/src/ia32/frames-ia32.h +1 -1
  236. data/vendor/v8/src/ia32/full-codegen-ia32.cc +348 -435
  237. data/vendor/v8/src/ia32/ic-ia32.cc +91 -91
  238. data/vendor/v8/src/ia32/lithium-codegen-ia32.cc +500 -255
  239. data/vendor/v8/src/ia32/lithium-codegen-ia32.h +13 -4
  240. data/vendor/v8/src/ia32/lithium-gap-resolver-ia32.cc +6 -0
  241. data/vendor/v8/src/ia32/lithium-ia32.cc +122 -45
  242. data/vendor/v8/src/ia32/lithium-ia32.h +128 -41
  243. data/vendor/v8/src/ia32/macro-assembler-ia32.cc +109 -84
  244. data/vendor/v8/src/ia32/macro-assembler-ia32.h +18 -9
  245. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.cc +26 -15
  246. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.h +1 -0
  247. data/vendor/v8/src/ia32/register-allocator-ia32.cc +30 -30
  248. data/vendor/v8/src/ia32/simulator-ia32.h +4 -4
  249. data/vendor/v8/src/ia32/stub-cache-ia32.cc +383 -400
  250. data/vendor/v8/src/ia32/virtual-frame-ia32.cc +36 -13
  251. data/vendor/v8/src/ia32/virtual-frame-ia32.h +11 -5
  252. data/vendor/v8/src/ic-inl.h +12 -2
  253. data/vendor/v8/src/ic.cc +304 -221
  254. data/vendor/v8/src/ic.h +115 -58
  255. data/vendor/v8/src/interpreter-irregexp.cc +25 -21
  256. data/vendor/v8/src/interpreter-irregexp.h +2 -1
  257. data/vendor/v8/src/isolate.cc +883 -0
  258. data/vendor/v8/src/isolate.h +1304 -0
  259. data/vendor/v8/src/json.js +10 -10
  260. data/vendor/v8/src/jsregexp.cc +111 -80
  261. data/vendor/v8/src/jsregexp.h +6 -7
  262. data/vendor/v8/src/jump-target-heavy.cc +5 -8
  263. data/vendor/v8/src/jump-target-heavy.h +0 -6
  264. data/vendor/v8/src/jump-target-inl.h +1 -1
  265. data/vendor/v8/src/jump-target-light.cc +3 -3
  266. data/vendor/v8/src/lithium-allocator-inl.h +2 -0
  267. data/vendor/v8/src/lithium-allocator.cc +42 -30
  268. data/vendor/v8/src/lithium-allocator.h +8 -22
  269. data/vendor/v8/src/lithium.cc +1 -0
  270. data/vendor/v8/src/liveedit.cc +141 -99
  271. data/vendor/v8/src/liveedit.h +7 -2
  272. data/vendor/v8/src/liveobjectlist-inl.h +90 -0
  273. data/vendor/v8/src/liveobjectlist.cc +2537 -1
  274. data/vendor/v8/src/liveobjectlist.h +245 -35
  275. data/vendor/v8/src/log-utils.cc +122 -35
  276. data/vendor/v8/src/log-utils.h +33 -36
  277. data/vendor/v8/src/log.cc +299 -241
  278. data/vendor/v8/src/log.h +177 -110
  279. data/vendor/v8/src/mark-compact.cc +612 -470
  280. data/vendor/v8/src/mark-compact.h +153 -80
  281. data/vendor/v8/src/messages.cc +16 -14
  282. data/vendor/v8/src/messages.js +30 -7
  283. data/vendor/v8/src/mips/assembler-mips-inl.h +155 -35
  284. data/vendor/v8/src/mips/assembler-mips.cc +1093 -219
  285. data/vendor/v8/src/mips/assembler-mips.h +552 -153
  286. data/vendor/v8/src/mips/builtins-mips.cc +43 -100
  287. data/vendor/v8/src/mips/code-stubs-mips.cc +752 -0
  288. data/vendor/v8/src/mips/code-stubs-mips.h +511 -0
  289. data/vendor/v8/src/mips/codegen-mips-inl.h +8 -14
  290. data/vendor/v8/src/mips/codegen-mips.cc +672 -896
  291. data/vendor/v8/src/mips/codegen-mips.h +271 -69
  292. data/vendor/v8/src/mips/constants-mips.cc +44 -20
  293. data/vendor/v8/src/mips/constants-mips.h +238 -40
  294. data/vendor/v8/src/mips/cpu-mips.cc +20 -3
  295. data/vendor/v8/src/mips/debug-mips.cc +35 -7
  296. data/vendor/v8/src/mips/deoptimizer-mips.cc +91 -0
  297. data/vendor/v8/src/mips/disasm-mips.cc +329 -93
  298. data/vendor/v8/src/mips/frames-mips.cc +2 -50
  299. data/vendor/v8/src/mips/frames-mips.h +24 -9
  300. data/vendor/v8/src/mips/full-codegen-mips.cc +473 -23
  301. data/vendor/v8/src/mips/ic-mips.cc +81 -45
  302. data/vendor/v8/src/mips/jump-target-mips.cc +11 -106
  303. data/vendor/v8/src/mips/lithium-codegen-mips.h +65 -0
  304. data/vendor/v8/src/mips/lithium-mips.h +304 -0
  305. data/vendor/v8/src/mips/macro-assembler-mips.cc +2391 -390
  306. data/vendor/v8/src/mips/macro-assembler-mips.h +718 -121
  307. data/vendor/v8/src/mips/regexp-macro-assembler-mips.cc +478 -0
  308. data/vendor/v8/src/mips/regexp-macro-assembler-mips.h +250 -0
  309. data/vendor/v8/src/mips/register-allocator-mips-inl.h +0 -3
  310. data/vendor/v8/src/mips/register-allocator-mips.h +3 -2
  311. data/vendor/v8/src/mips/simulator-mips.cc +1009 -221
  312. data/vendor/v8/src/mips/simulator-mips.h +119 -36
  313. data/vendor/v8/src/mips/stub-cache-mips.cc +331 -148
  314. data/vendor/v8/src/mips/{fast-codegen-mips.cc → virtual-frame-mips-inl.h} +11 -30
  315. data/vendor/v8/src/mips/virtual-frame-mips.cc +137 -149
  316. data/vendor/v8/src/mips/virtual-frame-mips.h +294 -312
  317. data/vendor/v8/src/mirror-debugger.js +9 -8
  318. data/vendor/v8/src/mksnapshot.cc +2 -2
  319. data/vendor/v8/src/objects-debug.cc +16 -16
  320. data/vendor/v8/src/objects-inl.h +421 -195
  321. data/vendor/v8/src/objects-printer.cc +7 -7
  322. data/vendor/v8/src/objects-visiting.cc +1 -1
  323. data/vendor/v8/src/objects-visiting.h +33 -12
  324. data/vendor/v8/src/objects.cc +935 -658
  325. data/vendor/v8/src/objects.h +234 -139
  326. data/vendor/v8/src/parser.cc +484 -439
  327. data/vendor/v8/src/parser.h +35 -14
  328. data/vendor/v8/src/platform-cygwin.cc +173 -107
  329. data/vendor/v8/src/platform-freebsd.cc +224 -72
  330. data/vendor/v8/src/platform-linux.cc +234 -95
  331. data/vendor/v8/src/platform-macos.cc +215 -82
  332. data/vendor/v8/src/platform-nullos.cc +9 -3
  333. data/vendor/v8/src/platform-openbsd.cc +22 -7
  334. data/vendor/v8/src/platform-posix.cc +30 -5
  335. data/vendor/v8/src/platform-solaris.cc +120 -38
  336. data/vendor/v8/src/platform-tls-mac.h +62 -0
  337. data/vendor/v8/src/platform-tls-win32.h +62 -0
  338. data/vendor/v8/src/platform-tls.h +50 -0
  339. data/vendor/v8/src/platform-win32.cc +195 -97
  340. data/vendor/v8/src/platform.h +72 -15
  341. data/vendor/v8/src/preparse-data.cc +2 -0
  342. data/vendor/v8/src/preparser-api.cc +8 -2
  343. data/vendor/v8/src/preparser.cc +1 -1
  344. data/vendor/v8/src/prettyprinter.cc +43 -52
  345. data/vendor/v8/src/prettyprinter.h +1 -1
  346. data/vendor/v8/src/profile-generator-inl.h +0 -28
  347. data/vendor/v8/src/profile-generator.cc +942 -685
  348. data/vendor/v8/src/profile-generator.h +210 -176
  349. data/vendor/v8/src/property.cc +6 -0
  350. data/vendor/v8/src/property.h +14 -3
  351. data/vendor/v8/src/regexp-macro-assembler-irregexp.cc +1 -1
  352. data/vendor/v8/src/regexp-macro-assembler.cc +28 -19
  353. data/vendor/v8/src/regexp-macro-assembler.h +11 -6
  354. data/vendor/v8/src/regexp-stack.cc +18 -10
  355. data/vendor/v8/src/regexp-stack.h +45 -21
  356. data/vendor/v8/src/regexp.js +3 -3
  357. data/vendor/v8/src/register-allocator-inl.h +3 -3
  358. data/vendor/v8/src/register-allocator.cc +1 -7
  359. data/vendor/v8/src/register-allocator.h +5 -15
  360. data/vendor/v8/src/rewriter.cc +2 -1
  361. data/vendor/v8/src/runtime-profiler.cc +158 -128
  362. data/vendor/v8/src/runtime-profiler.h +131 -15
  363. data/vendor/v8/src/runtime.cc +2409 -1692
  364. data/vendor/v8/src/runtime.h +93 -17
  365. data/vendor/v8/src/safepoint-table.cc +3 -0
  366. data/vendor/v8/src/safepoint-table.h +9 -3
  367. data/vendor/v8/src/scanner-base.cc +21 -28
  368. data/vendor/v8/src/scanner-base.h +22 -11
  369. data/vendor/v8/src/scanner.cc +3 -5
  370. data/vendor/v8/src/scanner.h +4 -2
  371. data/vendor/v8/src/scopeinfo.cc +11 -16
  372. data/vendor/v8/src/scopeinfo.h +26 -15
  373. data/vendor/v8/src/scopes.cc +67 -37
  374. data/vendor/v8/src/scopes.h +26 -12
  375. data/vendor/v8/src/serialize.cc +193 -154
  376. data/vendor/v8/src/serialize.h +41 -36
  377. data/vendor/v8/src/small-pointer-list.h +163 -0
  378. data/vendor/v8/src/snapshot-common.cc +1 -1
  379. data/vendor/v8/src/snapshot.h +3 -1
  380. data/vendor/v8/src/spaces-inl.h +30 -25
  381. data/vendor/v8/src/spaces.cc +263 -370
  382. data/vendor/v8/src/spaces.h +178 -166
  383. data/vendor/v8/src/string-search.cc +4 -3
  384. data/vendor/v8/src/string-search.h +21 -20
  385. data/vendor/v8/src/string-stream.cc +32 -24
  386. data/vendor/v8/src/string.js +7 -7
  387. data/vendor/v8/src/stub-cache.cc +324 -248
  388. data/vendor/v8/src/stub-cache.h +181 -155
  389. data/vendor/v8/src/token.cc +3 -3
  390. data/vendor/v8/src/token.h +3 -3
  391. data/vendor/v8/src/top.cc +218 -390
  392. data/vendor/v8/src/type-info.cc +98 -32
  393. data/vendor/v8/src/type-info.h +10 -3
  394. data/vendor/v8/src/unicode.cc +1 -1
  395. data/vendor/v8/src/unicode.h +1 -1
  396. data/vendor/v8/src/utils.h +3 -0
  397. data/vendor/v8/src/v8-counters.cc +18 -11
  398. data/vendor/v8/src/v8-counters.h +34 -13
  399. data/vendor/v8/src/v8.cc +66 -121
  400. data/vendor/v8/src/v8.h +7 -4
  401. data/vendor/v8/src/v8globals.h +18 -12
  402. data/vendor/v8/src/{memory.h → v8memory.h} +0 -0
  403. data/vendor/v8/src/v8natives.js +59 -18
  404. data/vendor/v8/src/v8threads.cc +127 -114
  405. data/vendor/v8/src/v8threads.h +42 -35
  406. data/vendor/v8/src/v8utils.h +2 -39
  407. data/vendor/v8/src/variables.h +1 -1
  408. data/vendor/v8/src/version.cc +26 -5
  409. data/vendor/v8/src/version.h +4 -0
  410. data/vendor/v8/src/virtual-frame-heavy-inl.h +2 -4
  411. data/vendor/v8/src/virtual-frame-light-inl.h +5 -4
  412. data/vendor/v8/src/vm-state-inl.h +21 -17
  413. data/vendor/v8/src/vm-state.h +7 -5
  414. data/vendor/v8/src/win32-headers.h +1 -0
  415. data/vendor/v8/src/x64/assembler-x64-inl.h +12 -11
  416. data/vendor/v8/src/x64/assembler-x64.cc +80 -40
  417. data/vendor/v8/src/x64/assembler-x64.h +67 -17
  418. data/vendor/v8/src/x64/builtins-x64.cc +34 -33
  419. data/vendor/v8/src/x64/code-stubs-x64.cc +636 -377
  420. data/vendor/v8/src/x64/code-stubs-x64.h +14 -48
  421. data/vendor/v8/src/x64/codegen-x64-inl.h +1 -1
  422. data/vendor/v8/src/x64/codegen-x64.cc +158 -136
  423. data/vendor/v8/src/x64/codegen-x64.h +4 -1
  424. data/vendor/v8/src/x64/cpu-x64.cc +7 -5
  425. data/vendor/v8/src/x64/debug-x64.cc +8 -6
  426. data/vendor/v8/src/x64/deoptimizer-x64.cc +195 -20
  427. data/vendor/v8/src/x64/disasm-x64.cc +42 -23
  428. data/vendor/v8/src/x64/frames-x64.cc +1 -1
  429. data/vendor/v8/src/x64/frames-x64.h +2 -2
  430. data/vendor/v8/src/x64/full-codegen-x64.cc +780 -218
  431. data/vendor/v8/src/x64/ic-x64.cc +77 -79
  432. data/vendor/v8/src/x64/jump-target-x64.cc +1 -1
  433. data/vendor/v8/src/x64/lithium-codegen-x64.cc +698 -181
  434. data/vendor/v8/src/x64/lithium-codegen-x64.h +31 -6
  435. data/vendor/v8/src/x64/lithium-x64.cc +136 -54
  436. data/vendor/v8/src/x64/lithium-x64.h +142 -51
  437. data/vendor/v8/src/x64/macro-assembler-x64.cc +456 -187
  438. data/vendor/v8/src/x64/macro-assembler-x64.h +166 -34
  439. data/vendor/v8/src/x64/regexp-macro-assembler-x64.cc +44 -28
  440. data/vendor/v8/src/x64/regexp-macro-assembler-x64.h +8 -4
  441. data/vendor/v8/src/x64/register-allocator-x64-inl.h +3 -3
  442. data/vendor/v8/src/x64/register-allocator-x64.cc +12 -8
  443. data/vendor/v8/src/x64/simulator-x64.h +5 -5
  444. data/vendor/v8/src/x64/stub-cache-x64.cc +299 -344
  445. data/vendor/v8/src/x64/virtual-frame-x64.cc +37 -13
  446. data/vendor/v8/src/x64/virtual-frame-x64.h +13 -7
  447. data/vendor/v8/src/zone-inl.h +49 -3
  448. data/vendor/v8/src/zone.cc +42 -41
  449. data/vendor/v8/src/zone.h +37 -34
  450. data/vendor/v8/test/benchmarks/testcfg.py +100 -0
  451. data/vendor/v8/test/cctest/SConscript +5 -4
  452. data/vendor/v8/test/cctest/cctest.h +3 -2
  453. data/vendor/v8/test/cctest/cctest.status +6 -11
  454. data/vendor/v8/test/cctest/test-accessors.cc +3 -3
  455. data/vendor/v8/test/cctest/test-alloc.cc +39 -33
  456. data/vendor/v8/test/cctest/test-api.cc +1092 -205
  457. data/vendor/v8/test/cctest/test-assembler-arm.cc +39 -25
  458. data/vendor/v8/test/cctest/test-assembler-ia32.cc +36 -37
  459. data/vendor/v8/test/cctest/test-assembler-mips.cc +1098 -40
  460. data/vendor/v8/test/cctest/test-assembler-x64.cc +32 -25
  461. data/vendor/v8/test/cctest/test-ast.cc +1 -0
  462. data/vendor/v8/test/cctest/test-circular-queue.cc +8 -5
  463. data/vendor/v8/test/cctest/test-compiler.cc +24 -24
  464. data/vendor/v8/test/cctest/test-cpu-profiler.cc +140 -5
  465. data/vendor/v8/test/cctest/test-dataflow.cc +1 -0
  466. data/vendor/v8/test/cctest/test-debug.cc +136 -77
  467. data/vendor/v8/test/cctest/test-decls.cc +1 -1
  468. data/vendor/v8/test/cctest/test-deoptimization.cc +25 -24
  469. data/vendor/v8/test/cctest/test-disasm-arm.cc +9 -4
  470. data/vendor/v8/test/cctest/test-disasm-ia32.cc +10 -8
  471. data/vendor/v8/test/cctest/test-func-name-inference.cc +10 -4
  472. data/vendor/v8/test/cctest/test-heap-profiler.cc +226 -164
  473. data/vendor/v8/test/cctest/test-heap.cc +240 -217
  474. data/vendor/v8/test/cctest/test-liveedit.cc +1 -0
  475. data/vendor/v8/test/cctest/test-log-stack-tracer.cc +18 -20
  476. data/vendor/v8/test/cctest/test-log.cc +114 -108
  477. data/vendor/v8/test/cctest/test-macro-assembler-x64.cc +247 -177
  478. data/vendor/v8/test/cctest/test-mark-compact.cc +129 -90
  479. data/vendor/v8/test/cctest/test-parsing.cc +15 -14
  480. data/vendor/v8/test/cctest/test-platform-linux.cc +1 -0
  481. data/vendor/v8/test/cctest/test-platform-tls.cc +66 -0
  482. data/vendor/v8/test/cctest/test-platform-win32.cc +1 -0
  483. data/vendor/v8/test/cctest/test-profile-generator.cc +1 -1
  484. data/vendor/v8/test/cctest/test-regexp.cc +53 -41
  485. data/vendor/v8/test/cctest/test-reloc-info.cc +18 -11
  486. data/vendor/v8/test/cctest/test-serialize.cc +44 -43
  487. data/vendor/v8/test/cctest/test-sockets.cc +8 -3
  488. data/vendor/v8/test/cctest/test-spaces.cc +47 -29
  489. data/vendor/v8/test/cctest/test-strings.cc +20 -20
  490. data/vendor/v8/test/cctest/test-thread-termination.cc +8 -3
  491. data/vendor/v8/test/cctest/test-threads.cc +5 -3
  492. data/vendor/v8/test/cctest/test-utils.cc +5 -4
  493. data/vendor/v8/test/cctest/testcfg.py +7 -3
  494. data/vendor/v8/test/es5conform/es5conform.status +2 -77
  495. data/vendor/v8/test/es5conform/testcfg.py +1 -1
  496. data/vendor/v8/test/message/testcfg.py +1 -1
  497. data/vendor/v8/test/mjsunit/accessors-on-global-object.js +3 -3
  498. data/vendor/v8/test/mjsunit/array-concat.js +43 -1
  499. data/vendor/v8/test/mjsunit/array-join.js +25 -0
  500. data/vendor/v8/test/mjsunit/bitops-info.js +7 -1
  501. data/vendor/v8/test/mjsunit/compiler/array-length.js +2 -2
  502. data/vendor/v8/test/mjsunit/compiler/global-accessors.js +47 -0
  503. data/vendor/v8/test/mjsunit/compiler/pic.js +1 -1
  504. data/vendor/v8/test/mjsunit/compiler/regress-loadfield.js +65 -0
  505. data/vendor/v8/test/mjsunit/math-sqrt.js +5 -1
  506. data/vendor/v8/test/mjsunit/mjsunit.js +59 -8
  507. data/vendor/v8/test/mjsunit/mjsunit.status +0 -12
  508. data/vendor/v8/test/mjsunit/mul-exhaustive.js +129 -11
  509. data/vendor/v8/test/mjsunit/negate-zero.js +1 -1
  510. data/vendor/v8/test/mjsunit/object-freeze.js +5 -13
  511. data/vendor/v8/test/mjsunit/object-prevent-extensions.js +9 -50
  512. data/vendor/v8/test/mjsunit/object-seal.js +4 -13
  513. data/vendor/v8/test/mjsunit/override-eval-with-non-function.js +36 -0
  514. data/vendor/v8/test/mjsunit/regress/regress-1145.js +54 -0
  515. data/vendor/v8/test/mjsunit/regress/regress-1172-bis.js +37 -0
  516. data/vendor/v8/test/mjsunit/regress/regress-1181.js +54 -0
  517. data/vendor/v8/test/mjsunit/regress/regress-1207.js +35 -0
  518. data/vendor/v8/test/mjsunit/regress/regress-1209.js +34 -0
  519. data/vendor/v8/test/mjsunit/regress/regress-1210.js +48 -0
  520. data/vendor/v8/test/mjsunit/regress/regress-1213.js +43 -0
  521. data/vendor/v8/test/mjsunit/regress/regress-1218.js +29 -0
  522. data/vendor/v8/test/mjsunit/regress/regress-1229.js +79 -0
  523. data/vendor/v8/test/mjsunit/regress/regress-1233.js +47 -0
  524. data/vendor/v8/test/mjsunit/regress/regress-1236.js +34 -0
  525. data/vendor/v8/test/mjsunit/regress/regress-1237.js +36 -0
  526. data/vendor/v8/test/mjsunit/regress/regress-1240.js +39 -0
  527. data/vendor/v8/test/mjsunit/regress/regress-1257.js +58 -0
  528. data/vendor/v8/test/mjsunit/regress/regress-1278.js +69 -0
  529. data/vendor/v8/test/mjsunit/regress/regress-create-exception.js +1 -0
  530. data/vendor/v8/test/mjsunit/regress/regress-lazy-deopt-reloc.js +52 -0
  531. data/vendor/v8/test/mjsunit/sin-cos.js +15 -10
  532. data/vendor/v8/test/mjsunit/smi-negative-zero.js +2 -2
  533. data/vendor/v8/test/mjsunit/str-to-num.js +1 -1
  534. data/vendor/v8/test/mjsunit/strict-mode.js +435 -0
  535. data/vendor/v8/test/mjsunit/testcfg.py +23 -6
  536. data/vendor/v8/test/mozilla/mozilla.status +0 -2
  537. data/vendor/v8/test/mozilla/testcfg.py +1 -1
  538. data/vendor/v8/test/preparser/empty.js +28 -0
  539. data/vendor/v8/test/preparser/functions-only.js +38 -0
  540. data/vendor/v8/test/preparser/non-alphanum.js +34 -0
  541. data/vendor/v8/test/preparser/symbols-only.js +49 -0
  542. data/vendor/v8/test/preparser/testcfg.py +90 -0
  543. data/vendor/v8/test/sputnik/testcfg.py +1 -1
  544. data/vendor/v8/test/test262/README +16 -0
  545. data/vendor/v8/test/test262/harness-adapt.js +80 -0
  546. data/vendor/v8/test/test262/test262.status +1506 -0
  547. data/vendor/v8/test/test262/testcfg.py +123 -0
  548. data/vendor/v8/tools/freebsd-tick-processor +10 -0
  549. data/vendor/v8/tools/gyp/v8.gyp +8 -33
  550. data/vendor/v8/tools/linux-tick-processor +5 -3
  551. data/vendor/v8/tools/test.py +37 -14
  552. data/vendor/v8/tools/tickprocessor.js +22 -8
  553. data/vendor/v8/tools/visual_studio/v8_base.vcproj +13 -1
  554. data/vendor/v8/tools/visual_studio/v8_base_arm.vcproj +5 -1
  555. data/vendor/v8/tools/visual_studio/v8_base_x64.vcproj +5 -1
  556. data/vendor/v8/tools/visual_studio/x64.vsprops +1 -0
  557. metadata +1495 -1341
  558. data/ext/extconf.rb +0 -22
  559. data/ext/mustang.cpp +0 -58
  560. data/vendor/v8/src/top.h +0 -608
@@ -1,4 +1,4 @@
1
- // Copyright 2009 the V8 project authors. All rights reserved.
1
+ // Copyright 2010 the V8 project authors. All rights reserved.
2
2
  // Redistribution and use in source and binary forms, with or without
3
3
  // modification, are permitted provided that the following conditions are
4
4
  // met:
@@ -104,7 +104,8 @@ class RegExpMacroAssemblerX64: public NativeRegExpMacroAssembler {
104
104
  Handle<String> subject,
105
105
  int* offsets_vector,
106
106
  int offsets_vector_length,
107
- int previous_index);
107
+ int previous_index,
108
+ Isolate* isolate);
108
109
 
109
110
  static Result Execute(Code* code,
110
111
  String* input,
@@ -142,6 +143,7 @@ class RegExpMacroAssemblerX64: public NativeRegExpMacroAssembler {
142
143
  static const int kStackHighEnd = kRegisterOutput + kPointerSize;
143
144
  // DirectCall is passed as 32 bit int (values 0 or 1).
144
145
  static const int kDirectCall = kStackHighEnd + kPointerSize;
146
+ static const int kIsolate = kDirectCall + kPointerSize;
145
147
  #else
146
148
  // In AMD64 ABI Calling Convention, the first six integer parameters
147
149
  // are passed as registers, and caller must allocate space on the stack
@@ -153,6 +155,7 @@ class RegExpMacroAssemblerX64: public NativeRegExpMacroAssembler {
153
155
  static const int kRegisterOutput = kInputEnd - kPointerSize;
154
156
  static const int kStackHighEnd = kRegisterOutput - kPointerSize;
155
157
  static const int kDirectCall = kFrameAlign;
158
+ static const int kIsolate = kDirectCall + kPointerSize;
156
159
  #endif
157
160
 
158
161
  #ifdef _WIN64
@@ -215,7 +218,7 @@ class RegExpMacroAssemblerX64: public NativeRegExpMacroAssembler {
215
218
  void BranchOrBacktrack(Condition condition, Label* to);
216
219
 
217
220
  void MarkPositionForCodeRelativeFixup() {
218
- code_relative_fixup_positions_.Add(masm_->pc_offset());
221
+ code_relative_fixup_positions_.Add(masm_.pc_offset());
219
222
  }
220
223
 
221
224
  void FixupCodeRelativePositions();
@@ -247,7 +250,8 @@ class RegExpMacroAssemblerX64: public NativeRegExpMacroAssembler {
247
250
  // Increments the stack pointer (rcx) by a word size.
248
251
  inline void Drop();
249
252
 
250
- MacroAssembler* masm_;
253
+ MacroAssembler masm_;
254
+ MacroAssembler::NoRootArrayScope no_root_array_scope_;
251
255
 
252
256
  ZoneList<int> code_relative_fixup_positions_;
253
257
 
@@ -60,10 +60,10 @@ int RegisterAllocator::ToNumber(Register reg) {
60
60
  6, // r9
61
61
  -1, // r10 Scratch register.
62
62
  8, // r11
63
- 9, // r12
63
+ -1, // r12 Smi constant.
64
64
  -1, // r13 Roots array. This is callee saved.
65
65
  7, // r14
66
- -1 // r15 Smi constant register.
66
+ 9 // r15
67
67
  };
68
68
  return kNumbers[reg.code()];
69
69
  }
@@ -72,7 +72,7 @@ int RegisterAllocator::ToNumber(Register reg) {
72
72
  Register RegisterAllocator::ToRegister(int num) {
73
73
  ASSERT(num >= 0 && num < kNumRegisters);
74
74
  const Register kRegisters[] =
75
- { rax, rbx, rcx, rdx, rdi, r8, r9, r14, r11, r12 };
75
+ { rax, rbx, rcx, rdx, rdi, r8, r9, r14, r11, r15 };
76
76
  return kRegisters[num];
77
77
  }
78
78
 
@@ -42,9 +42,11 @@ namespace internal {
42
42
  void Result::ToRegister() {
43
43
  ASSERT(is_valid());
44
44
  if (is_constant()) {
45
- Result fresh = CodeGeneratorScope::Current()->allocator()->Allocate();
45
+ CodeGenerator* code_generator =
46
+ CodeGeneratorScope::Current(Isolate::Current());
47
+ Result fresh = code_generator->allocator()->Allocate();
46
48
  ASSERT(fresh.is_valid());
47
- CodeGeneratorScope::Current()->masm()->Move(fresh.reg(), handle());
49
+ code_generator->masm()->Move(fresh.reg(), handle());
48
50
  // This result becomes a copy of the fresh one.
49
51
  fresh.set_type_info(type_info());
50
52
  *this = fresh;
@@ -55,21 +57,23 @@ void Result::ToRegister() {
55
57
 
56
58
  void Result::ToRegister(Register target) {
57
59
  ASSERT(is_valid());
60
+ CodeGenerator* code_generator =
61
+ CodeGeneratorScope::Current(Isolate::Current());
58
62
  if (!is_register() || !reg().is(target)) {
59
- Result fresh = CodeGeneratorScope::Current()->allocator()->Allocate(target);
63
+ Result fresh = code_generator->allocator()->Allocate(target);
60
64
  ASSERT(fresh.is_valid());
61
65
  if (is_register()) {
62
- CodeGeneratorScope::Current()->masm()->movq(fresh.reg(), reg());
66
+ code_generator->masm()->movq(fresh.reg(), reg());
63
67
  } else {
64
68
  ASSERT(is_constant());
65
- CodeGeneratorScope::Current()->masm()->Move(fresh.reg(), handle());
69
+ code_generator->masm()->Move(fresh.reg(), handle());
66
70
  }
67
71
  fresh.set_type_info(type_info());
68
72
  *this = fresh;
69
73
  } else if (is_register() && reg().is(target)) {
70
- ASSERT(CodeGeneratorScope::Current()->has_valid_frame());
71
- CodeGeneratorScope::Current()->frame()->Spill(target);
72
- ASSERT(CodeGeneratorScope::Current()->allocator()->count(target) == 1);
74
+ ASSERT(code_generator->has_valid_frame());
75
+ code_generator->frame()->Spill(target);
76
+ ASSERT(code_generator->allocator()->count(target) == 1);
73
77
  }
74
78
  ASSERT(is_register());
75
79
  ASSERT(reg().is(target));
@@ -1,4 +1,4 @@
1
- // Copyright 2009 the V8 project authors. All rights reserved.
1
+ // Copyright 2011 the V8 project authors. All rights reserved.
2
2
  // Redistribution and use in source and binary forms, with or without
3
3
  // modification, are permitted provided that the following conditions are
4
4
  // met:
@@ -40,12 +40,12 @@ namespace internal {
40
40
  (entry(p0, p1, p2, p3, p4))
41
41
 
42
42
  typedef int (*regexp_matcher)(String*, int, const byte*,
43
- const byte*, int*, Address, int);
43
+ const byte*, int*, Address, int, Isolate*);
44
44
 
45
45
  // Call the generated regexp code directly. The code at the entry address should
46
- // expect seven int/pointer sized arguments and return an int.
47
- #define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6) \
48
- (FUNCTION_CAST<regexp_matcher>(entry)(p0, p1, p2, p3, p4, p5, p6))
46
+ // expect eight int/pointer sized arguments and return an int.
47
+ #define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7) \
48
+ (FUNCTION_CAST<regexp_matcher>(entry)(p0, p1, p2, p3, p4, p5, p6, p7))
49
49
 
50
50
  #define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
51
51
  (reinterpret_cast<TryCatch*>(try_catch_address))
@@ -1,4 +1,4 @@
1
- // Copyright 2010 the V8 project authors. All rights reserved.
1
+ // Copyright 2011 the V8 project authors. All rights reserved.
2
2
  // Redistribution and use in source and binary forms, with or without
3
3
  // modification, are permitted provided that the following conditions are
4
4
  // met:
@@ -39,7 +39,8 @@ namespace internal {
39
39
  #define __ ACCESS_MASM(masm)
40
40
 
41
41
 
42
- static void ProbeTable(MacroAssembler* masm,
42
+ static void ProbeTable(Isolate* isolate,
43
+ MacroAssembler* masm,
43
44
  Code::Flags flags,
44
45
  StubCache::Table table,
45
46
  Register name,
@@ -48,10 +49,10 @@ static void ProbeTable(MacroAssembler* masm,
48
49
  ASSERT_EQ(16, sizeof(StubCache::Entry));
49
50
  // The offset register holds the entry offset times four (due to masking
50
51
  // and shifting optimizations).
51
- ExternalReference key_offset(SCTableReference::keyReference(table));
52
+ ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
52
53
  Label miss;
53
54
 
54
- __ movq(kScratchRegister, key_offset);
55
+ __ LoadAddress(kScratchRegister, key_offset);
55
56
  // Check that the key in the entry matches the name.
56
57
  // Multiply entry offset by 16 to get the entry address. Since the
57
58
  // offset register already holds the entry offset times four, multiply
@@ -88,8 +89,9 @@ static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
88
89
  Register r0,
89
90
  Register r1) {
90
91
  ASSERT(name->IsSymbol());
91
- __ IncrementCounter(&Counters::negative_lookups, 1);
92
- __ IncrementCounter(&Counters::negative_lookups_miss, 1);
92
+ Counters* counters = masm->isolate()->counters();
93
+ __ IncrementCounter(counters->negative_lookups(), 1);
94
+ __ IncrementCounter(counters->negative_lookups_miss(), 1);
93
95
 
94
96
  Label done;
95
97
  __ movq(r0, FieldOperand(receiver, HeapObject::kMapOffset));
@@ -151,7 +153,7 @@ static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
151
153
  ASSERT_EQ(kSmiTagSize, 1);
152
154
  __ movq(entity_name, Operand(properties, index, times_pointer_size,
153
155
  kElementsStartOffset - kHeapObjectTag));
154
- __ Cmp(entity_name, Factory::undefined_value());
156
+ __ Cmp(entity_name, masm->isolate()->factory()->undefined_value());
155
157
  // __ jmp(miss_label);
156
158
  if (i != kProbes - 1) {
157
159
  __ j(equal, &done);
@@ -172,7 +174,7 @@ static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
172
174
  }
173
175
 
174
176
  __ bind(&done);
175
- __ DecrementCounter(&Counters::negative_lookups_miss, 1);
177
+ __ DecrementCounter(counters->negative_lookups_miss(), 1);
176
178
  }
177
179
 
178
180
 
@@ -183,6 +185,7 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
183
185
  Register scratch,
184
186
  Register extra,
185
187
  Register extra2) {
188
+ Isolate* isolate = masm->isolate();
186
189
  Label miss;
187
190
  USE(extra); // The register extra is not used on the X64 platform.
188
191
  USE(extra2); // The register extra2 is not used on the X64 platform.
@@ -212,7 +215,7 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
212
215
  __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
213
216
 
214
217
  // Probe the primary table.
215
- ProbeTable(masm, flags, kPrimary, name, scratch);
218
+ ProbeTable(isolate, masm, flags, kPrimary, name, scratch);
216
219
 
217
220
  // Primary miss: Compute hash for secondary probe.
218
221
  __ movl(scratch, FieldOperand(name, String::kHashFieldOffset));
@@ -224,7 +227,7 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
224
227
  __ and_(scratch, Immediate((kSecondaryTableSize - 1) << kHeapObjectTagSize));
225
228
 
226
229
  // Probe the secondary table.
227
- ProbeTable(masm, flags, kSecondary, name, scratch);
230
+ ProbeTable(isolate, masm, flags, kSecondary, name, scratch);
228
231
 
229
232
  // Cache miss: Fall-through and let caller handle the miss by
230
233
  // entering the runtime system.
@@ -253,13 +256,15 @@ void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
253
256
 
254
257
  void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
255
258
  MacroAssembler* masm, int index, Register prototype, Label* miss) {
259
+ Isolate* isolate = masm->isolate();
256
260
  // Check we're still in the same context.
257
- __ Move(prototype, Top::global());
261
+ __ Move(prototype, isolate->global());
258
262
  __ cmpq(Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)),
259
263
  prototype);
260
264
  __ j(not_equal, miss);
261
265
  // Get the global function with the given index.
262
- JSFunction* function = JSFunction::cast(Top::global_context()->get(index));
266
+ JSFunction* function =
267
+ JSFunction::cast(isolate->global_context()->get(index));
263
268
  // Load its initial map. The global functions all have initial maps.
264
269
  __ Move(prototype, Handle<Map>(function->initial_map()));
265
270
  // Load the prototype from the initial map.
@@ -375,7 +380,7 @@ static void PushInterceptorArguments(MacroAssembler* masm,
375
380
  JSObject* holder_obj) {
376
381
  __ push(name);
377
382
  InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
378
- ASSERT(!Heap::InNewSpace(interceptor));
383
+ ASSERT(!masm->isolate()->heap()->InNewSpace(interceptor));
379
384
  __ Move(kScratchRegister, Handle<Object>(interceptor));
380
385
  __ push(kScratchRegister);
381
386
  __ push(receiver);
@@ -392,9 +397,10 @@ static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
392
397
  PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
393
398
 
394
399
  ExternalReference ref =
395
- ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly));
400
+ ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
401
+ masm->isolate());
396
402
  __ movq(rax, Immediate(5));
397
- __ movq(rbx, ref);
403
+ __ LoadAddress(rbx, ref);
398
404
 
399
405
  CEntryStub stub(1);
400
406
  __ CallStub(&stub);
@@ -466,7 +472,7 @@ static MaybeObject* GenerateFastApiCall(MacroAssembler* masm,
466
472
  __ movq(Operand(rsp, 2 * kPointerSize), rdi);
467
473
  Object* call_data = optimization.api_call_info()->data();
468
474
  Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info());
469
- if (Heap::InNewSpace(call_data)) {
475
+ if (masm->isolate()->heap()->InNewSpace(call_data)) {
470
476
  __ Move(rcx, api_call_info_handle);
471
477
  __ movq(rbx, FieldOperand(rcx, CallHandlerInfo::kDataOffset));
472
478
  __ movq(Operand(rsp, 3 * kPointerSize), rbx);
@@ -561,7 +567,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
561
567
  name,
562
568
  holder,
563
569
  miss);
564
- return Heap::undefined_value(); // Success.
570
+ return masm->isolate()->heap()->undefined_value(); // Success.
565
571
  }
566
572
  }
567
573
 
@@ -597,10 +603,11 @@ class CallInterceptorCompiler BASE_EMBEDDED {
597
603
  (depth2 != kInvalidProtoDepth);
598
604
  }
599
605
 
600
- __ IncrementCounter(&Counters::call_const_interceptor, 1);
606
+ Counters* counters = masm->isolate()->counters();
607
+ __ IncrementCounter(counters->call_const_interceptor(), 1);
601
608
 
602
609
  if (can_do_fast_api_call) {
603
- __ IncrementCounter(&Counters::call_const_interceptor_fast_api, 1);
610
+ __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1);
604
611
  ReserveSpaceForFastApiCall(masm, scratch1);
605
612
  }
606
613
 
@@ -660,7 +667,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
660
667
  FreeSpaceForFastApiCall(masm, scratch1);
661
668
  }
662
669
 
663
- return Heap::undefined_value(); // Success.
670
+ return masm->isolate()->heap()->undefined_value(); // Success.
664
671
  }
665
672
 
666
673
  void CompileRegular(MacroAssembler* masm,
@@ -688,7 +695,8 @@ class CallInterceptorCompiler BASE_EMBEDDED {
688
695
  interceptor_holder);
689
696
 
690
697
  __ CallExternalReference(
691
- ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall)),
698
+ ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
699
+ masm->isolate()),
692
700
  5);
693
701
 
694
702
  // Restore the name_ register.
@@ -729,9 +737,9 @@ void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
729
737
  ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
730
738
  Code* code = NULL;
731
739
  if (kind == Code::LOAD_IC) {
732
- code = Builtins::builtin(Builtins::LoadIC_Miss);
740
+ code = masm->isolate()->builtins()->builtin(Builtins::kLoadIC_Miss);
733
741
  } else {
734
- code = Builtins::builtin(Builtins::KeyedLoadIC_Miss);
742
+ code = masm->isolate()->builtins()->builtin(Builtins::kKeyedLoadIC_Miss);
735
743
  }
736
744
 
737
745
  Handle<Code> ic(code);
@@ -776,7 +784,10 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
776
784
  __ push(rax);
777
785
  __ push(scratch);
778
786
  __ TailCallExternalReference(
779
- ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage)), 3, 1);
787
+ ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
788
+ masm->isolate()),
789
+ 3,
790
+ 1);
780
791
  return;
781
792
  }
782
793
 
@@ -836,7 +847,7 @@ MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell(
836
847
  ASSERT(cell->value()->IsTheHole());
837
848
  __ Move(scratch, Handle<Object>(cell));
838
849
  __ Cmp(FieldOperand(scratch, JSGlobalPropertyCell::kValueOffset),
839
- Factory::the_hole_value());
850
+ masm->isolate()->factory()->the_hole_value());
840
851
  __ j(not_equal, miss);
841
852
  return cell;
842
853
  }
@@ -885,7 +896,7 @@ Register StubCompiler::CheckPrototypes(JSObject* object,
885
896
  !current->IsJSGlobalObject() &&
886
897
  !current->IsJSGlobalProxy()) {
887
898
  if (!name->IsSymbol()) {
888
- MaybeObject* lookup_result = Heap::LookupSymbol(name);
899
+ MaybeObject* lookup_result = heap()->LookupSymbol(name);
889
900
  if (lookup_result->IsFailure()) {
890
901
  set_failure(Failure::cast(lookup_result));
891
902
  return reg;
@@ -905,7 +916,7 @@ Register StubCompiler::CheckPrototypes(JSObject* object,
905
916
  __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
906
917
  reg = holder_reg; // from now the object is in holder_reg
907
918
  __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
908
- } else if (Heap::InNewSpace(prototype)) {
919
+ } else if (heap()->InNewSpace(prototype)) {
909
920
  // Get the map of the current object.
910
921
  __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
911
922
  __ Cmp(scratch1, Handle<Map>(current->map()));
@@ -956,7 +967,7 @@ Register StubCompiler::CheckPrototypes(JSObject* object,
956
967
  __ j(not_equal, miss);
957
968
 
958
969
  // Log the check depth.
959
- LOG(IntEvent("check-maps-depth", depth + 1));
970
+ LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
960
971
 
961
972
  // Perform security check for access to the global object and return
962
973
  // the holder register.
@@ -1039,7 +1050,7 @@ MaybeObject* StubCompiler::GenerateLoadCallback(JSObject* object,
1039
1050
 
1040
1051
  __ push(receiver); // receiver
1041
1052
  __ push(reg); // holder
1042
- if (Heap::InNewSpace(callback_handle->data())) {
1053
+ if (heap()->InNewSpace(callback_handle->data())) {
1043
1054
  __ Move(scratch1, callback_handle);
1044
1055
  __ push(FieldOperand(scratch1, AccessorInfo::kDataOffset)); // data
1045
1056
  } else {
@@ -1230,7 +1241,8 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object,
1230
1241
  __ push(scratch2); // restore return address
1231
1242
 
1232
1243
  ExternalReference ref =
1233
- ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
1244
+ ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
1245
+ isolate());
1234
1246
  __ TailCallExternalReference(ref, 5, 1);
1235
1247
  }
1236
1248
  } else { // !compile_followup_inline
@@ -1245,7 +1257,7 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object,
1245
1257
  __ push(scratch2); // restore old return address
1246
1258
 
1247
1259
  ExternalReference ref = ExternalReference(
1248
- IC_Utility(IC::kLoadPropertyWithInterceptorForLoad));
1260
+ IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), isolate());
1249
1261
  __ TailCallExternalReference(ref, 5, 1);
1250
1262
  }
1251
1263
  }
@@ -1291,7 +1303,7 @@ void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
1291
1303
  __ movq(rdi, FieldOperand(rdi, JSGlobalPropertyCell::kValueOffset));
1292
1304
 
1293
1305
  // Check that the cell contains the same function.
1294
- if (Heap::InNewSpace(function)) {
1306
+ if (heap()->InNewSpace(function)) {
1295
1307
  // We can't embed a pointer to a function in new space so we have
1296
1308
  // to verify that the shared function info is unchanged. This has
1297
1309
  // the nice side effect that multiple closures based on the same
@@ -1313,8 +1325,8 @@ void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
1313
1325
 
1314
1326
 
1315
1327
  MaybeObject* CallStubCompiler::GenerateMissBranch() {
1316
- MaybeObject* maybe_obj = StubCache::ComputeCallMiss(arguments().immediate(),
1317
- kind_);
1328
+ MaybeObject* maybe_obj = isolate()->stub_cache()->ComputeCallMiss(
1329
+ arguments().immediate(), kind_);
1318
1330
  Object* obj;
1319
1331
  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1320
1332
  __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
@@ -1369,10 +1381,8 @@ MaybeObject* CallStubCompiler::CompileCallField(JSObject* object,
1369
1381
 
1370
1382
  // Handle call cache miss.
1371
1383
  __ bind(&miss);
1372
- Object* obj;
1373
- { MaybeObject* maybe_obj = GenerateMissBranch();
1374
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1375
- }
1384
+ MaybeObject* maybe_result = GenerateMissBranch();
1385
+ if (maybe_result->IsFailure()) return maybe_result;
1376
1386
 
1377
1387
  // Return the generated code.
1378
1388
  return GetCode(FIELD, name);
@@ -1393,7 +1403,7 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
1393
1403
  // -----------------------------------
1394
1404
 
1395
1405
  // If object is not an array, bail out to regular call.
1396
- if (!object->IsJSArray() || cell != NULL) return Heap::undefined_value();
1406
+ if (!object->IsJSArray() || cell != NULL) return heap()->undefined_value();
1397
1407
 
1398
1408
  Label miss;
1399
1409
 
@@ -1427,7 +1437,7 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
1427
1437
 
1428
1438
  // Check that the elements are in fast mode and writable.
1429
1439
  __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
1430
- Factory::fixed_array_map());
1440
+ factory()->fixed_array_map());
1431
1441
  __ j(not_equal, &call_builtin);
1432
1442
 
1433
1443
  if (argc == 1) { // Otherwise fall through to call builtin.
@@ -1477,14 +1487,13 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
1477
1487
  }
1478
1488
 
1479
1489
  ExternalReference new_space_allocation_top =
1480
- ExternalReference::new_space_allocation_top_address();
1490
+ ExternalReference::new_space_allocation_top_address(isolate());
1481
1491
  ExternalReference new_space_allocation_limit =
1482
- ExternalReference::new_space_allocation_limit_address();
1492
+ ExternalReference::new_space_allocation_limit_address(isolate());
1483
1493
 
1484
1494
  const int kAllocationDelta = 4;
1485
1495
  // Load top.
1486
- __ movq(rcx, new_space_allocation_top);
1487
- __ movq(rcx, Operand(rcx, 0));
1496
+ __ Load(rcx, new_space_allocation_top);
1488
1497
 
1489
1498
  // Check if it's the end of elements.
1490
1499
  __ lea(rdx, FieldOperand(rbx,
@@ -1493,13 +1502,13 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
1493
1502
  __ cmpq(rdx, rcx);
1494
1503
  __ j(not_equal, &call_builtin);
1495
1504
  __ addq(rcx, Immediate(kAllocationDelta * kPointerSize));
1496
- __ movq(kScratchRegister, new_space_allocation_limit);
1497
- __ cmpq(rcx, Operand(kScratchRegister, 0));
1505
+ Operand limit_operand =
1506
+ masm()->ExternalOperand(new_space_allocation_limit);
1507
+ __ cmpq(rcx, limit_operand);
1498
1508
  __ j(above, &call_builtin);
1499
1509
 
1500
1510
  // We fit and could grow elements.
1501
- __ movq(kScratchRegister, new_space_allocation_top);
1502
- __ movq(Operand(kScratchRegister, 0), rcx);
1511
+ __ Store(new_space_allocation_top, rcx);
1503
1512
  __ movq(rcx, Operand(rsp, argc * kPointerSize));
1504
1513
 
1505
1514
  // Push the argument...
@@ -1526,16 +1535,15 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
1526
1535
  }
1527
1536
 
1528
1537
  __ bind(&call_builtin);
1529
- __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush),
1538
+ __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush,
1539
+ isolate()),
1530
1540
  argc + 1,
1531
1541
  1);
1532
1542
  }
1533
1543
 
1534
1544
  __ bind(&miss);
1535
- Object* obj;
1536
- { MaybeObject* maybe_obj = GenerateMissBranch();
1537
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1538
- }
1545
+ MaybeObject* maybe_result = GenerateMissBranch();
1546
+ if (maybe_result->IsFailure()) return maybe_result;
1539
1547
 
1540
1548
  // Return the generated code.
1541
1549
  return GetCode(function);
@@ -1556,7 +1564,7 @@ MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object,
1556
1564
  // -----------------------------------
1557
1565
 
1558
1566
  // If object is not an array, bail out to regular call.
1559
- if (!object->IsJSArray() || cell != NULL) return Heap::undefined_value();
1567
+ if (!object->IsJSArray() || cell != NULL) return heap()->undefined_value();
1560
1568
 
1561
1569
  Label miss, return_undefined, call_builtin;
1562
1570
 
@@ -1611,15 +1619,14 @@ MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object,
1611
1619
  __ ret((argc + 1) * kPointerSize);
1612
1620
 
1613
1621
  __ bind(&call_builtin);
1614
- __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop),
1615
- argc + 1,
1616
- 1);
1622
+ __ TailCallExternalReference(
1623
+ ExternalReference(Builtins::c_ArrayPop, isolate()),
1624
+ argc + 1,
1625
+ 1);
1617
1626
 
1618
1627
  __ bind(&miss);
1619
- Object* obj;
1620
- { MaybeObject* maybe_obj = GenerateMissBranch();
1621
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1622
- }
1628
+ MaybeObject* maybe_result = GenerateMissBranch();
1629
+ if (maybe_result->IsFailure()) return maybe_result;
1623
1630
 
1624
1631
  // Return the generated code.
1625
1632
  return GetCode(function);
@@ -1641,7 +1648,7 @@ MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall(
1641
1648
  // -----------------------------------
1642
1649
 
1643
1650
  // If object is not a string, bail out to regular call.
1644
- if (!object->IsString() || cell != NULL) return Heap::undefined_value();
1651
+ if (!object->IsString() || cell != NULL) return heap()->undefined_value();
1645
1652
 
1646
1653
  const int argc = arguments().immediate();
1647
1654
 
@@ -1700,10 +1707,8 @@ MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall(
1700
1707
  // Restore function name in rcx.
1701
1708
  __ Move(rcx, Handle<String>(name));
1702
1709
  __ bind(&name_miss);
1703
- Object* obj;
1704
- { MaybeObject* maybe_obj = GenerateMissBranch();
1705
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1706
- }
1710
+ MaybeObject* maybe_result = GenerateMissBranch();
1711
+ if (maybe_result->IsFailure()) return maybe_result;
1707
1712
 
1708
1713
  // Return the generated code.
1709
1714
  return GetCode(function);
@@ -1725,7 +1730,7 @@ MaybeObject* CallStubCompiler::CompileStringCharAtCall(
1725
1730
  // -----------------------------------
1726
1731
 
1727
1732
  // If object is not a string, bail out to regular call.
1728
- if (!object->IsString() || cell != NULL) return Heap::undefined_value();
1733
+ if (!object->IsString() || cell != NULL) return heap()->undefined_value();
1729
1734
 
1730
1735
  const int argc = arguments().immediate();
1731
1736
 
@@ -1786,10 +1791,8 @@ MaybeObject* CallStubCompiler::CompileStringCharAtCall(
1786
1791
  // Restore function name in rcx.
1787
1792
  __ Move(rcx, Handle<String>(name));
1788
1793
  __ bind(&name_miss);
1789
- Object* obj;
1790
- { MaybeObject* maybe_obj = GenerateMissBranch();
1791
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1792
- }
1794
+ MaybeObject* maybe_result = GenerateMissBranch();
1795
+ if (maybe_result->IsFailure()) return maybe_result;
1793
1796
 
1794
1797
  // Return the generated code.
1795
1798
  return GetCode(function);
@@ -1814,7 +1817,7 @@ MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall(
1814
1817
 
1815
1818
  // If the object is not a JSObject or we got an unexpected number of
1816
1819
  // arguments, bail out to the regular call.
1817
- if (!object->IsJSObject() || argc != 1) return Heap::undefined_value();
1820
+ if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
1818
1821
 
1819
1822
  Label miss;
1820
1823
  GenerateNameCheck(name, &miss);
@@ -1857,10 +1860,8 @@ MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall(
1857
1860
 
1858
1861
  __ bind(&miss);
1859
1862
  // rcx: function name.
1860
- Object* obj;
1861
- { MaybeObject* maybe_obj = GenerateMissBranch();
1862
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1863
- }
1863
+ MaybeObject* maybe_result = GenerateMissBranch();
1864
+ if (maybe_result->IsFailure()) return maybe_result;
1864
1865
 
1865
1866
  // Return the generated code.
1866
1867
  return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
@@ -1873,7 +1874,7 @@ MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object,
1873
1874
  JSFunction* function,
1874
1875
  String* name) {
1875
1876
  // TODO(872): implement this.
1876
- return Heap::undefined_value();
1877
+ return heap()->undefined_value();
1877
1878
  }
1878
1879
 
1879
1880
 
@@ -1894,7 +1895,7 @@ MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object,
1894
1895
 
1895
1896
  // If the object is not a JSObject or we got an unexpected number of
1896
1897
  // arguments, bail out to the regular call.
1897
- if (!object->IsJSObject() || argc != 1) return Heap::undefined_value();
1898
+ if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
1898
1899
 
1899
1900
  Label miss;
1900
1901
  GenerateNameCheck(name, &miss);
@@ -1943,7 +1944,7 @@ MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object,
1943
1944
 
1944
1945
  // Check if the argument is a heap number and load its value.
1945
1946
  __ bind(&not_smi);
1946
- __ CheckMap(rax, Factory::heap_number_map(), &slow, true);
1947
+ __ CheckMap(rax, factory()->heap_number_map(), &slow, true);
1947
1948
  __ movq(rbx, FieldOperand(rax, HeapNumber::kValueOffset));
1948
1949
 
1949
1950
  // Check the sign of the argument. If the argument is positive,
@@ -1972,16 +1973,72 @@ MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object,
1972
1973
 
1973
1974
  __ bind(&miss);
1974
1975
  // rcx: function name.
1975
- Object* obj;
1976
- { MaybeObject* maybe_obj = GenerateMissBranch();
1977
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1978
- }
1976
+ MaybeObject* maybe_result = GenerateMissBranch();
1977
+ if (maybe_result->IsFailure()) return maybe_result;
1979
1978
 
1980
1979
  // Return the generated code.
1981
1980
  return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
1982
1981
  }
1983
1982
 
1984
1983
 
1984
+ MaybeObject* CallStubCompiler::CompileFastApiCall(
1985
+ const CallOptimization& optimization,
1986
+ Object* object,
1987
+ JSObject* holder,
1988
+ JSGlobalPropertyCell* cell,
1989
+ JSFunction* function,
1990
+ String* name) {
1991
+ ASSERT(optimization.is_simple_api_call());
1992
+ // Bail out if object is a global object as we don't want to
1993
+ // repatch it to global receiver.
1994
+ if (object->IsGlobalObject()) return heap()->undefined_value();
1995
+ if (cell != NULL) return heap()->undefined_value();
1996
+ int depth = optimization.GetPrototypeDepthOfExpectedType(
1997
+ JSObject::cast(object), holder);
1998
+ if (depth == kInvalidProtoDepth) return heap()->undefined_value();
1999
+
2000
+ Label miss, miss_before_stack_reserved;
2001
+
2002
+ GenerateNameCheck(name, &miss_before_stack_reserved);
2003
+
2004
+ // Get the receiver from the stack.
2005
+ const int argc = arguments().immediate();
2006
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
2007
+
2008
+ // Check that the receiver isn't a smi.
2009
+ __ JumpIfSmi(rdx, &miss_before_stack_reserved);
2010
+
2011
+ Counters* counters = isolate()->counters();
2012
+ __ IncrementCounter(counters->call_const(), 1);
2013
+ __ IncrementCounter(counters->call_const_fast_api(), 1);
2014
+
2015
+ // Allocate space for v8::Arguments implicit values. Must be initialized
2016
+ // before calling any runtime function.
2017
+ __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
2018
+
2019
+ // Check that the maps haven't changed and find a Holder as a side effect.
2020
+ CheckPrototypes(JSObject::cast(object), rdx, holder,
2021
+ rbx, rax, rdi, name, depth, &miss);
2022
+
2023
+ // Move the return address on top of the stack.
2024
+ __ movq(rax, Operand(rsp, 3 * kPointerSize));
2025
+ __ movq(Operand(rsp, 0 * kPointerSize), rax);
2026
+
2027
+ MaybeObject* result = GenerateFastApiCall(masm(), optimization, argc);
2028
+ if (result->IsFailure()) return result;
2029
+
2030
+ __ bind(&miss);
2031
+ __ addq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
2032
+
2033
+ __ bind(&miss_before_stack_reserved);
2034
+ MaybeObject* maybe_result = GenerateMissBranch();
2035
+ if (maybe_result->IsFailure()) return maybe_result;
2036
+
2037
+ // Return the generated code.
2038
+ return GetCode(function);
2039
+ }
2040
+
2041
+
1985
2042
  MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
1986
2043
  JSObject* holder,
1987
2044
  JSFunction* function,
@@ -1997,20 +2054,18 @@ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
1997
2054
  // rsp[(argc + 1) * 8] : argument 0 = receiver
1998
2055
  // -----------------------------------
1999
2056
 
2000
- SharedFunctionInfo* function_info = function->shared();
2001
- if (function_info->HasBuiltinFunctionId()) {
2002
- BuiltinFunctionId id = function_info->builtin_function_id();
2057
+ if (HasCustomCallGenerator(function)) {
2003
2058
  MaybeObject* maybe_result = CompileCustomCall(
2004
- id, object, holder, NULL, function, name);
2059
+ object, holder, NULL, function, name);
2005
2060
  Object* result;
2006
2061
  if (!maybe_result->ToObject(&result)) return maybe_result;
2007
2062
  // undefined means bail out to regular compiler.
2008
2063
  if (!result->IsUndefined()) return result;
2009
2064
  }
2010
2065
 
2011
- Label miss_in_smi_check;
2066
+ Label miss;
2012
2067
 
2013
- GenerateNameCheck(name, &miss_in_smi_check);
2068
+ GenerateNameCheck(name, &miss);
2014
2069
 
2015
2070
  // Get the receiver from the stack.
2016
2071
  const int argc = arguments().immediate();
@@ -2018,42 +2073,26 @@ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
2018
2073
 
2019
2074
  // Check that the receiver isn't a smi.
2020
2075
  if (check != NUMBER_CHECK) {
2021
- __ JumpIfSmi(rdx, &miss_in_smi_check);
2076
+ __ JumpIfSmi(rdx, &miss);
2022
2077
  }
2023
2078
 
2024
2079
  // Make sure that it's okay not to patch the on stack receiver
2025
2080
  // unless we're doing a receiver map check.
2026
2081
  ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2027
2082
 
2028
- CallOptimization optimization(function);
2029
- int depth = kInvalidProtoDepth;
2030
- Label miss;
2031
-
2083
+ Counters* counters = isolate()->counters();
2084
+ SharedFunctionInfo* function_info = function->shared();
2032
2085
  switch (check) {
2033
2086
  case RECEIVER_MAP_CHECK:
2034
- __ IncrementCounter(&Counters::call_const, 1);
2035
-
2036
- if (optimization.is_simple_api_call() && !object->IsGlobalObject()) {
2037
- depth = optimization.GetPrototypeDepthOfExpectedType(
2038
- JSObject::cast(object), holder);
2039
- }
2040
-
2041
- if (depth != kInvalidProtoDepth) {
2042
- __ IncrementCounter(&Counters::call_const_fast_api, 1);
2043
-
2044
- // Allocate space for v8::Arguments implicit values. Must be initialized
2045
- // before to call any runtime function.
2046
- __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
2047
- }
2087
+ __ IncrementCounter(counters->call_const(), 1);
2048
2088
 
2049
2089
  // Check that the maps haven't changed.
2050
2090
  CheckPrototypes(JSObject::cast(object), rdx, holder,
2051
- rbx, rax, rdi, name, depth, &miss);
2091
+ rbx, rax, rdi, name, &miss);
2052
2092
 
2053
2093
  // Patch the receiver on the stack with the global proxy if
2054
2094
  // necessary.
2055
2095
  if (object->IsGlobalObject()) {
2056
- ASSERT(depth == kInvalidProtoDepth);
2057
2096
  __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
2058
2097
  __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
2059
2098
  }
@@ -2123,31 +2162,12 @@ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
2123
2162
  UNREACHABLE();
2124
2163
  }
2125
2164
 
2126
- if (depth != kInvalidProtoDepth) {
2127
- // Move the return address on top of the stack.
2128
- __ movq(rax, Operand(rsp, 3 * kPointerSize));
2129
- __ movq(Operand(rsp, 0 * kPointerSize), rax);
2130
-
2131
- // rsp[2 * kPointerSize] is uninitialized, rsp[3 * kPointerSize] contains
2132
- // duplicate of return address and will be overwritten.
2133
- MaybeObject* result = GenerateFastApiCall(masm(), optimization, argc);
2134
- if (result->IsFailure()) return result;
2135
- } else {
2136
- __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2137
- }
2165
+ __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2138
2166
 
2139
2167
  // Handle call cache miss.
2140
2168
  __ bind(&miss);
2141
- if (depth != kInvalidProtoDepth) {
2142
- __ addq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
2143
- }
2144
-
2145
- // Handle call cache miss.
2146
- __ bind(&miss_in_smi_check);
2147
- Object* obj;
2148
- { MaybeObject* maybe_obj = GenerateMissBranch();
2149
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2150
- }
2169
+ MaybeObject* maybe_result = GenerateMissBranch();
2170
+ if (maybe_result->IsFailure()) return maybe_result;
2151
2171
 
2152
2172
  // Return the generated code.
2153
2173
  return GetCode(function);
@@ -2213,10 +2233,8 @@ MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object,
2213
2233
 
2214
2234
  // Handle load cache miss.
2215
2235
  __ bind(&miss);
2216
- Object* obj;
2217
- { MaybeObject* maybe_obj = GenerateMissBranch();
2218
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2219
- }
2236
+ MaybeObject* maybe_result = GenerateMissBranch();
2237
+ if (maybe_result->IsFailure()) return maybe_result;
2220
2238
 
2221
2239
  // Return the generated code.
2222
2240
  return GetCode(INTERCEPTOR, name);
@@ -2238,11 +2256,9 @@ MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object,
2238
2256
  // rsp[(argc + 1) * 8] : argument 0 = receiver
2239
2257
  // -----------------------------------
2240
2258
 
2241
- SharedFunctionInfo* function_info = function->shared();
2242
- if (function_info->HasBuiltinFunctionId()) {
2243
- BuiltinFunctionId id = function_info->builtin_function_id();
2259
+ if (HasCustomCallGenerator(function)) {
2244
2260
  MaybeObject* maybe_result = CompileCustomCall(
2245
- id, object, holder, cell, function, name);
2261
+ object, holder, cell, function, name);
2246
2262
  Object* result;
2247
2263
  if (!maybe_result->ToObject(&result)) return maybe_result;
2248
2264
  // undefined means bail out to regular compiler.
@@ -2270,7 +2286,8 @@ MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object,
2270
2286
  __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2271
2287
 
2272
2288
  // Jump to the cached code (tail call).
2273
- __ IncrementCounter(&Counters::call_global_inline, 1);
2289
+ Counters* counters = isolate()->counters();
2290
+ __ IncrementCounter(counters->call_global_inline(), 1);
2274
2291
  ASSERT(function->is_compiled());
2275
2292
  ParameterCount expected(function->shared()->formal_parameter_count());
2276
2293
  if (V8::UseCrankshaft()) {
@@ -2286,11 +2303,9 @@ MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object,
2286
2303
  }
2287
2304
  // Handle call cache miss.
2288
2305
  __ bind(&miss);
2289
- __ IncrementCounter(&Counters::call_global_inline_miss, 1);
2290
- Object* obj;
2291
- { MaybeObject* maybe_obj = GenerateMissBranch();
2292
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2293
- }
2306
+ __ IncrementCounter(counters->call_global_inline_miss(), 1);
2307
+ MaybeObject* maybe_result = GenerateMissBranch();
2308
+ if (maybe_result->IsFailure()) return maybe_result;
2294
2309
 
2295
2310
  // Return the generated code.
2296
2311
  return GetCode(NORMAL, name);
@@ -2319,7 +2334,7 @@ MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object,
2319
2334
 
2320
2335
  // Handle store cache miss.
2321
2336
  __ bind(&miss);
2322
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2337
+ Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2323
2338
  __ Jump(ic, RelocInfo::CODE_TARGET);
2324
2339
 
2325
2340
  // Return the generated code.
@@ -2364,12 +2379,12 @@ MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
2364
2379
 
2365
2380
  // Do tail-call to the runtime system.
2366
2381
  ExternalReference store_callback_property =
2367
- ExternalReference(IC_Utility(IC::kStoreCallbackProperty));
2382
+ ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
2368
2383
  __ TailCallExternalReference(store_callback_property, 4, 1);
2369
2384
 
2370
2385
  // Handle store cache miss.
2371
2386
  __ bind(&miss);
2372
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2387
+ Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2373
2388
  __ Jump(ic, RelocInfo::CODE_TARGET);
2374
2389
 
2375
2390
  // Return the generated code.
@@ -2408,16 +2423,17 @@ MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
2408
2423
  __ push(rdx); // receiver
2409
2424
  __ push(rcx); // name
2410
2425
  __ push(rax); // value
2426
+ __ Push(Smi::FromInt(strict_mode_));
2411
2427
  __ push(rbx); // restore return address
2412
2428
 
2413
2429
  // Do tail-call to the runtime system.
2414
2430
  ExternalReference store_ic_property =
2415
- ExternalReference(IC_Utility(IC::kStoreInterceptorProperty));
2416
- __ TailCallExternalReference(store_ic_property, 3, 1);
2431
+ ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
2432
+ __ TailCallExternalReference(store_ic_property, 4, 1);
2417
2433
 
2418
2434
  // Handle store cache miss.
2419
2435
  __ bind(&miss);
2420
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2436
+ Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2421
2437
  __ Jump(ic, RelocInfo::CODE_TARGET);
2422
2438
 
2423
2439
  // Return the generated code.
@@ -2454,13 +2470,14 @@ MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
2454
2470
  __ movq(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset), rax);
2455
2471
 
2456
2472
  // Return the value (register rax).
2457
- __ IncrementCounter(&Counters::named_store_global_inline, 1);
2473
+ Counters* counters = isolate()->counters();
2474
+ __ IncrementCounter(counters->named_store_global_inline(), 1);
2458
2475
  __ ret(0);
2459
2476
 
2460
2477
  // Handle store cache miss.
2461
2478
  __ bind(&miss);
2462
- __ IncrementCounter(&Counters::named_store_global_inline_miss, 1);
2463
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2479
+ __ IncrementCounter(counters->named_store_global_inline_miss(), 1);
2480
+ Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2464
2481
  __ Jump(ic, RelocInfo::CODE_TARGET);
2465
2482
 
2466
2483
  // Return the generated code.
@@ -2480,7 +2497,8 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
2480
2497
  // -----------------------------------
2481
2498
  Label miss;
2482
2499
 
2483
- __ IncrementCounter(&Counters::keyed_store_field, 1);
2500
+ Counters* counters = isolate()->counters();
2501
+ __ IncrementCounter(counters->keyed_store_field(), 1);
2484
2502
 
2485
2503
  // Check that the name has not changed.
2486
2504
  __ Cmp(rcx, Handle<String>(name));
@@ -2496,8 +2514,8 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
2496
2514
 
2497
2515
  // Handle store cache miss.
2498
2516
  __ bind(&miss);
2499
- __ DecrementCounter(&Counters::keyed_store_field, 1);
2500
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
2517
+ __ DecrementCounter(counters->keyed_store_field(), 1);
2518
+ Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
2501
2519
  __ Jump(ic, RelocInfo::CODE_TARGET);
2502
2520
 
2503
2521
  // Return the generated code.
@@ -2529,7 +2547,7 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
2529
2547
  // Get the elements array and make sure it is a fast element array, not 'cow'.
2530
2548
  __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset));
2531
2549
  __ Cmp(FieldOperand(rdi, HeapObject::kMapOffset),
2532
- Factory::fixed_array_map());
2550
+ factory()->fixed_array_map());
2533
2551
  __ j(not_equal, &miss);
2534
2552
 
2535
2553
  // Check that the key is within bounds.
@@ -2554,44 +2572,7 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
2554
2572
 
2555
2573
  // Handle store cache miss.
2556
2574
  __ bind(&miss);
2557
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
2558
- __ jmp(ic, RelocInfo::CODE_TARGET);
2559
-
2560
- // Return the generated code.
2561
- return GetCode(NORMAL, NULL);
2562
- }
2563
-
2564
-
2565
- MaybeObject* KeyedStoreStubCompiler::CompileStorePixelArray(
2566
- JSObject* receiver) {
2567
- // ----------- S t a t e -------------
2568
- // -- rax : value
2569
- // -- rcx : key
2570
- // -- rdx : receiver
2571
- // -- rsp[0] : return address
2572
- // -----------------------------------
2573
- Label miss;
2574
-
2575
- // Check that the map matches.
2576
- __ CheckMap(rdx, Handle<Map>(receiver->map()), &miss, false);
2577
-
2578
- // Do the load.
2579
- GenerateFastPixelArrayStore(masm(),
2580
- rdx,
2581
- rcx,
2582
- rax,
2583
- rdi,
2584
- rbx,
2585
- true,
2586
- false,
2587
- &miss,
2588
- &miss,
2589
- NULL,
2590
- &miss);
2591
-
2592
- // Handle store cache miss.
2593
- __ bind(&miss);
2594
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
2575
+ Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
2595
2576
  __ jmp(ic, RelocInfo::CODE_TARGET);
2596
2577
 
2597
2578
  // Return the generated code.
@@ -2640,7 +2621,7 @@ MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
2640
2621
  GenerateLoadMiss(masm(), Code::LOAD_IC);
2641
2622
 
2642
2623
  // Return the generated code.
2643
- return GetCode(NONEXISTENT, Heap::empty_string());
2624
+ return GetCode(NONEXISTENT, heap()->empty_string());
2644
2625
  }
2645
2626
 
2646
2627
 
@@ -2779,12 +2760,13 @@ MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
2779
2760
  __ Check(not_equal, "DontDelete cells can't contain the hole");
2780
2761
  }
2781
2762
 
2782
- __ IncrementCounter(&Counters::named_load_global_stub, 1);
2763
+ Counters* counters = isolate()->counters();
2764
+ __ IncrementCounter(counters->named_load_global_stub(), 1);
2783
2765
  __ movq(rax, rbx);
2784
2766
  __ ret(0);
2785
2767
 
2786
2768
  __ bind(&miss);
2787
- __ IncrementCounter(&Counters::named_load_global_stub_miss, 1);
2769
+ __ IncrementCounter(counters->named_load_global_stub_miss(), 1);
2788
2770
  GenerateLoadMiss(masm(), Code::LOAD_IC);
2789
2771
 
2790
2772
  // Return the generated code.
@@ -2803,7 +2785,8 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
2803
2785
  // -----------------------------------
2804
2786
  Label miss;
2805
2787
 
2806
- __ IncrementCounter(&Counters::keyed_load_field, 1);
2788
+ Counters* counters = isolate()->counters();
2789
+ __ IncrementCounter(counters->keyed_load_field(), 1);
2807
2790
 
2808
2791
  // Check that the name has not changed.
2809
2792
  __ Cmp(rax, Handle<String>(name));
@@ -2812,7 +2795,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
2812
2795
  GenerateLoadField(receiver, holder, rdx, rbx, rcx, rdi, index, name, &miss);
2813
2796
 
2814
2797
  __ bind(&miss);
2815
- __ DecrementCounter(&Counters::keyed_load_field, 1);
2798
+ __ DecrementCounter(counters->keyed_load_field(), 1);
2816
2799
  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2817
2800
 
2818
2801
  // Return the generated code.
@@ -2832,7 +2815,8 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback(
2832
2815
  // -----------------------------------
2833
2816
  Label miss;
2834
2817
 
2835
- __ IncrementCounter(&Counters::keyed_load_callback, 1);
2818
+ Counters* counters = isolate()->counters();
2819
+ __ IncrementCounter(counters->keyed_load_callback(), 1);
2836
2820
 
2837
2821
  // Check that the name has not changed.
2838
2822
  __ Cmp(rax, Handle<String>(name));
@@ -2847,7 +2831,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback(
2847
2831
 
2848
2832
  __ bind(&miss);
2849
2833
 
2850
- __ DecrementCounter(&Counters::keyed_load_callback, 1);
2834
+ __ DecrementCounter(counters->keyed_load_callback(), 1);
2851
2835
  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2852
2836
 
2853
2837
  // Return the generated code.
@@ -2866,7 +2850,8 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
2866
2850
  // -----------------------------------
2867
2851
  Label miss;
2868
2852
 
2869
- __ IncrementCounter(&Counters::keyed_load_constant_function, 1);
2853
+ Counters* counters = isolate()->counters();
2854
+ __ IncrementCounter(counters->keyed_load_constant_function(), 1);
2870
2855
 
2871
2856
  // Check that the name has not changed.
2872
2857
  __ Cmp(rax, Handle<String>(name));
@@ -2875,7 +2860,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
2875
2860
  GenerateLoadConstant(receiver, holder, rdx, rbx, rcx, rdi,
2876
2861
  value, name, &miss);
2877
2862
  __ bind(&miss);
2878
- __ DecrementCounter(&Counters::keyed_load_constant_function, 1);
2863
+ __ DecrementCounter(counters->keyed_load_constant_function(), 1);
2879
2864
  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2880
2865
 
2881
2866
  // Return the generated code.
@@ -2893,7 +2878,8 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
2893
2878
  // -----------------------------------
2894
2879
  Label miss;
2895
2880
 
2896
- __ IncrementCounter(&Counters::keyed_load_interceptor, 1);
2881
+ Counters* counters = isolate()->counters();
2882
+ __ IncrementCounter(counters->keyed_load_interceptor(), 1);
2897
2883
 
2898
2884
  // Check that the name has not changed.
2899
2885
  __ Cmp(rax, Handle<String>(name));
@@ -2912,7 +2898,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
2912
2898
  name,
2913
2899
  &miss);
2914
2900
  __ bind(&miss);
2915
- __ DecrementCounter(&Counters::keyed_load_interceptor, 1);
2901
+ __ DecrementCounter(counters->keyed_load_interceptor(), 1);
2916
2902
  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2917
2903
 
2918
2904
  // Return the generated code.
@@ -2928,7 +2914,8 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
2928
2914
  // -----------------------------------
2929
2915
  Label miss;
2930
2916
 
2931
- __ IncrementCounter(&Counters::keyed_load_array_length, 1);
2917
+ Counters* counters = isolate()->counters();
2918
+ __ IncrementCounter(counters->keyed_load_array_length(), 1);
2932
2919
 
2933
2920
  // Check that the name has not changed.
2934
2921
  __ Cmp(rax, Handle<String>(name));
@@ -2936,7 +2923,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
2936
2923
 
2937
2924
  GenerateLoadArrayLength(masm(), rdx, rcx, &miss);
2938
2925
  __ bind(&miss);
2939
- __ DecrementCounter(&Counters::keyed_load_array_length, 1);
2926
+ __ DecrementCounter(counters->keyed_load_array_length(), 1);
2940
2927
  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2941
2928
 
2942
2929
  // Return the generated code.
@@ -2952,7 +2939,8 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
2952
2939
  // -----------------------------------
2953
2940
  Label miss;
2954
2941
 
2955
- __ IncrementCounter(&Counters::keyed_load_string_length, 1);
2942
+ Counters* counters = isolate()->counters();
2943
+ __ IncrementCounter(counters->keyed_load_string_length(), 1);
2956
2944
 
2957
2945
  // Check that the name has not changed.
2958
2946
  __ Cmp(rax, Handle<String>(name));
@@ -2960,7 +2948,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
2960
2948
 
2961
2949
  GenerateLoadStringLength(masm(), rdx, rcx, rbx, &miss, true);
2962
2950
  __ bind(&miss);
2963
- __ DecrementCounter(&Counters::keyed_load_string_length, 1);
2951
+ __ DecrementCounter(counters->keyed_load_string_length(), 1);
2964
2952
  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2965
2953
 
2966
2954
  // Return the generated code.
@@ -2976,7 +2964,8 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
2976
2964
  // -----------------------------------
2977
2965
  Label miss;
2978
2966
 
2979
- __ IncrementCounter(&Counters::keyed_load_function_prototype, 1);
2967
+ Counters* counters = isolate()->counters();
2968
+ __ IncrementCounter(counters->keyed_load_function_prototype(), 1);
2980
2969
 
2981
2970
  // Check that the name has not changed.
2982
2971
  __ Cmp(rax, Handle<String>(name));
@@ -2984,7 +2973,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
2984
2973
 
2985
2974
  GenerateLoadFunctionPrototype(masm(), rdx, rcx, rbx, &miss);
2986
2975
  __ bind(&miss);
2987
- __ DecrementCounter(&Counters::keyed_load_function_prototype, 1);
2976
+ __ DecrementCounter(counters->keyed_load_function_prototype(), 1);
2988
2977
  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2989
2978
 
2990
2979
  // Return the generated code.
@@ -2996,7 +2985,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) {
2996
2985
  // ----------- S t a t e -------------
2997
2986
  // -- rax : key
2998
2987
  // -- rdx : receiver
2999
- // -- esp[0] : return address
2988
+ // -- rsp[0] : return address
3000
2989
  // -----------------------------------
3001
2990
  Label miss;
3002
2991
 
@@ -3038,35 +3027,6 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) {
3038
3027
  }
3039
3028
 
3040
3029
 
3041
- MaybeObject* KeyedLoadStubCompiler::CompileLoadPixelArray(JSObject* receiver) {
3042
- // ----------- S t a t e -------------
3043
- // -- rax : key
3044
- // -- rdx : receiver
3045
- // -- esp[0] : return address
3046
- // -----------------------------------
3047
- Label miss;
3048
-
3049
- // Check that the map matches.
3050
- __ CheckMap(rdx, Handle<Map>(receiver->map()), &miss, false);
3051
-
3052
- GenerateFastPixelArrayLoad(masm(),
3053
- rdx,
3054
- rax,
3055
- rbx,
3056
- rcx,
3057
- rax,
3058
- &miss,
3059
- &miss,
3060
- &miss);
3061
-
3062
- __ bind(&miss);
3063
- GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3064
-
3065
- // Return the generated code.
3066
- return GetCode(NORMAL, NULL);
3067
- }
3068
-
3069
-
3070
3030
  // Specialized stub for constructing objects from functions which only have only
3071
3031
  // simple assignments of the form this.x = ...; in their body.
3072
3032
  MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
@@ -3079,7 +3039,7 @@ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
3079
3039
  Label generic_stub_call;
3080
3040
 
3081
3041
  // Use r8 for holding undefined which is used in several places below.
3082
- __ Move(r8, Factory::undefined_value());
3042
+ __ Move(r8, factory()->undefined_value());
3083
3043
 
3084
3044
  #ifdef ENABLE_DEBUGGER_SUPPORT
3085
3045
  // Check to see whether there are any break points in the function code. If
@@ -3123,7 +3083,7 @@ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
3123
3083
  // rbx: initial map
3124
3084
  // rdx: JSObject (untagged)
3125
3085
  __ movq(Operand(rdx, JSObject::kMapOffset), rbx);
3126
- __ Move(rbx, Factory::empty_fixed_array());
3086
+ __ Move(rbx, factory()->empty_fixed_array());
3127
3087
  __ movq(Operand(rdx, JSObject::kPropertiesOffset), rbx);
3128
3088
  __ movq(Operand(rdx, JSObject::kElementsOffset), rbx);
3129
3089
 
@@ -3182,14 +3142,16 @@ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
3182
3142
  __ pop(rcx);
3183
3143
  __ lea(rsp, Operand(rsp, rbx, times_pointer_size, 1 * kPointerSize));
3184
3144
  __ push(rcx);
3185
- __ IncrementCounter(&Counters::constructed_objects, 1);
3186
- __ IncrementCounter(&Counters::constructed_objects_stub, 1);
3145
+ Counters* counters = isolate()->counters();
3146
+ __ IncrementCounter(counters->constructed_objects(), 1);
3147
+ __ IncrementCounter(counters->constructed_objects_stub(), 1);
3187
3148
  __ ret(0);
3188
3149
 
3189
3150
  // Jump to the generic stub in case the specialized code cannot handle the
3190
3151
  // construction.
3191
3152
  __ bind(&generic_stub_call);
3192
- Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
3153
+ Code* code =
3154
+ isolate()->builtins()->builtin(Builtins::kJSConstructStubGeneric);
3193
3155
  Handle<Code> generic_construct_stub(code);
3194
3156
  __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
3195
3157
 
@@ -3199,7 +3161,7 @@ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
3199
3161
 
3200
3162
 
3201
3163
  MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
3202
- ExternalArrayType array_type, Code::Flags flags) {
3164
+ JSObject* receiver, ExternalArrayType array_type, Code::Flags flags) {
3203
3165
  // ----------- S t a t e -------------
3204
3166
  // -- rax : key
3205
3167
  // -- rdx : receiver
@@ -3213,24 +3175,9 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
3213
3175
  // Check that the key is a smi.
3214
3176
  __ JumpIfNotSmi(rax, &slow);
3215
3177
 
3216
- // Check that the object is a JS object.
3217
- __ CmpObjectType(rdx, JS_OBJECT_TYPE, rcx);
3218
- __ j(not_equal, &slow);
3219
- // Check that the receiver does not require access checks. We need
3220
- // to check this explicitly since this generic stub does not perform
3221
- // map checks. The map is already in rdx.
3222
- __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
3223
- Immediate(1 << Map::kIsAccessCheckNeeded));
3224
- __ j(not_zero, &slow);
3225
-
3226
- // Check that the elements array is the appropriate type of
3227
- // ExternalArray.
3228
- // rax: index (as a smi)
3229
- // rdx: JSObject
3178
+ // Check that the map matches.
3179
+ __ CheckMap(rdx, Handle<Map>(receiver->map()), &slow, false);
3230
3180
  __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
3231
- __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
3232
- Heap::RootIndexForExternalArrayType(array_type));
3233
- __ j(not_equal, &slow);
3234
3181
 
3235
3182
  // Check that the index is in range.
3236
3183
  __ SmiToInteger32(rcx, rax);
@@ -3248,6 +3195,7 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
3248
3195
  case kExternalByteArray:
3249
3196
  __ movsxbq(rcx, Operand(rbx, rcx, times_1, 0));
3250
3197
  break;
3198
+ case kExternalPixelArray:
3251
3199
  case kExternalUnsignedByteArray:
3252
3200
  __ movzxbq(rcx, Operand(rbx, rcx, times_1, 0));
3253
3201
  break;
@@ -3318,7 +3266,8 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
3318
3266
 
3319
3267
  // Slow case: Jump to runtime.
3320
3268
  __ bind(&slow);
3321
- __ IncrementCounter(&Counters::keyed_load_external_array_slow, 1);
3269
+ Counters* counters = isolate()->counters();
3270
+ __ IncrementCounter(counters->keyed_load_external_array_slow(), 1);
3322
3271
 
3323
3272
  // ----------- S t a t e -------------
3324
3273
  // -- rax : key
@@ -3340,7 +3289,7 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
3340
3289
 
3341
3290
 
3342
3291
  MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
3343
- ExternalArrayType array_type, Code::Flags flags) {
3292
+ JSObject* receiver, ExternalArrayType array_type, Code::Flags flags) {
3344
3293
  // ----------- S t a t e -------------
3345
3294
  // -- rax : value
3346
3295
  // -- rcx : key
@@ -3351,29 +3300,13 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
3351
3300
 
3352
3301
  // Check that the object isn't a smi.
3353
3302
  __ JumpIfSmi(rdx, &slow);
3354
- // Get the map from the receiver.
3355
- __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
3356
- // Check that the receiver does not require access checks. We need
3357
- // to do this because this generic stub does not perform map checks.
3358
- __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
3359
- Immediate(1 << Map::kIsAccessCheckNeeded));
3360
- __ j(not_zero, &slow);
3361
- // Check that the key is a smi.
3362
- __ JumpIfNotSmi(rcx, &slow);
3363
3303
 
3364
- // Check that the object is a JS object.
3365
- __ CmpInstanceType(rbx, JS_OBJECT_TYPE);
3366
- __ j(not_equal, &slow);
3367
-
3368
- // Check that the elements array is the appropriate type of
3369
- // ExternalArray.
3370
- // rax: value
3371
- // rcx: key (a smi)
3372
- // rdx: receiver (a JSObject)
3304
+ // Check that the map matches.
3305
+ __ CheckMap(rdx, Handle<Map>(receiver->map()), &slow, false);
3373
3306
  __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
3374
- __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
3375
- Heap::RootIndexForExternalArrayType(array_type));
3376
- __ j(not_equal, &slow);
3307
+
3308
+ // Check that the key is a smi.
3309
+ __ JumpIfNotSmi(rcx, &slow);
3377
3310
 
3378
3311
  // Check that the index is in range.
3379
3312
  __ SmiToInteger32(rdi, rcx); // Untag the index.
@@ -3389,12 +3322,28 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
3389
3322
  // rbx: elements array
3390
3323
  // rdi: untagged key
3391
3324
  NearLabel check_heap_number;
3392
- __ JumpIfNotSmi(rax, &check_heap_number);
3325
+ if (array_type == kExternalPixelArray) {
3326
+ // Float to pixel conversion is only implemented in the runtime for now.
3327
+ __ JumpIfNotSmi(rax, &slow);
3328
+ } else {
3329
+ __ JumpIfNotSmi(rax, &check_heap_number);
3330
+ }
3393
3331
  // No more branches to slow case on this path. Key and receiver not needed.
3394
3332
  __ SmiToInteger32(rdx, rax);
3395
3333
  __ movq(rbx, FieldOperand(rbx, ExternalArray::kExternalPointerOffset));
3396
3334
  // rbx: base pointer of external storage
3397
3335
  switch (array_type) {
3336
+ case kExternalPixelArray:
3337
+ { // Clamp the value to [0..255].
3338
+ NearLabel done;
3339
+ __ testl(rdx, Immediate(0xFFFFFF00));
3340
+ __ j(zero, &done);
3341
+ __ setcc(negative, rdx); // 1 if negative, 0 if positive.
3342
+ __ decb(rdx); // 0 if negative, 255 if positive.
3343
+ __ bind(&done);
3344
+ }
3345
+ __ movb(Operand(rbx, rdi, times_1, 0), rdx);
3346
+ break;
3398
3347
  case kExternalByteArray:
3399
3348
  case kExternalUnsignedByteArray:
3400
3349
  __ movb(Operand(rbx, rdi, times_1, 0), rdx);
@@ -3418,62 +3367,65 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
3418
3367
  }
3419
3368
  __ ret(0);
3420
3369
 
3421
- __ bind(&check_heap_number);
3422
- // rax: value
3423
- // rcx: key (a smi)
3424
- // rdx: receiver (a JSObject)
3425
- // rbx: elements array
3426
- // rdi: untagged key
3427
- __ CmpObjectType(rax, HEAP_NUMBER_TYPE, kScratchRegister);
3428
- __ j(not_equal, &slow);
3429
- // No more branches to slow case on this path.
3430
-
3431
- // The WebGL specification leaves the behavior of storing NaN and
3432
- // +/-Infinity into integer arrays basically undefined. For more
3433
- // reproducible behavior, convert these to zero.
3434
- __ movsd(xmm0, FieldOperand(rax, HeapNumber::kValueOffset));
3435
- __ movq(rbx, FieldOperand(rbx, ExternalArray::kExternalPointerOffset));
3436
- // rdi: untagged index
3437
- // rbx: base pointer of external storage
3438
- // top of FPU stack: value
3439
- if (array_type == kExternalFloatArray) {
3440
- __ cvtsd2ss(xmm0, xmm0);
3441
- __ movss(Operand(rbx, rdi, times_4, 0), xmm0);
3442
- __ ret(0);
3443
- } else {
3444
- // Perform float-to-int conversion with truncation (round-to-zero)
3445
- // behavior.
3446
-
3447
- // Convert to int32 and store the low byte/word.
3448
- // If the value is NaN or +/-infinity, the result is 0x80000000,
3449
- // which is automatically zero when taken mod 2^n, n < 32.
3450
- // rdx: value (converted to an untagged integer)
3370
+ // TODO(danno): handle heap number -> pixel array conversion
3371
+ if (array_type != kExternalPixelArray) {
3372
+ __ bind(&check_heap_number);
3373
+ // rax: value
3374
+ // rcx: key (a smi)
3375
+ // rdx: receiver (a JSObject)
3376
+ // rbx: elements array
3377
+ // rdi: untagged key
3378
+ __ CmpObjectType(rax, HEAP_NUMBER_TYPE, kScratchRegister);
3379
+ __ j(not_equal, &slow);
3380
+ // No more branches to slow case on this path.
3381
+
3382
+ // The WebGL specification leaves the behavior of storing NaN and
3383
+ // +/-Infinity into integer arrays basically undefined. For more
3384
+ // reproducible behavior, convert these to zero.
3385
+ __ movsd(xmm0, FieldOperand(rax, HeapNumber::kValueOffset));
3386
+ __ movq(rbx, FieldOperand(rbx, ExternalArray::kExternalPointerOffset));
3451
3387
  // rdi: untagged index
3452
3388
  // rbx: base pointer of external storage
3453
- switch (array_type) {
3454
- case kExternalByteArray:
3455
- case kExternalUnsignedByteArray:
3456
- __ cvttsd2si(rdx, xmm0);
3457
- __ movb(Operand(rbx, rdi, times_1, 0), rdx);
3458
- break;
3459
- case kExternalShortArray:
3460
- case kExternalUnsignedShortArray:
3461
- __ cvttsd2si(rdx, xmm0);
3462
- __ movw(Operand(rbx, rdi, times_2, 0), rdx);
3463
- break;
3464
- case kExternalIntArray:
3465
- case kExternalUnsignedIntArray: {
3466
- // Convert to int64, so that NaN and infinities become
3467
- // 0x8000000000000000, which is zero mod 2^32.
3468
- __ cvttsd2siq(rdx, xmm0);
3469
- __ movl(Operand(rbx, rdi, times_4, 0), rdx);
3470
- break;
3389
+ // top of FPU stack: value
3390
+ if (array_type == kExternalFloatArray) {
3391
+ __ cvtsd2ss(xmm0, xmm0);
3392
+ __ movss(Operand(rbx, rdi, times_4, 0), xmm0);
3393
+ __ ret(0);
3394
+ } else {
3395
+ // Perform float-to-int conversion with truncation (round-to-zero)
3396
+ // behavior.
3397
+
3398
+ // Convert to int32 and store the low byte/word.
3399
+ // If the value is NaN or +/-infinity, the result is 0x80000000,
3400
+ // which is automatically zero when taken mod 2^n, n < 32.
3401
+ // rdx: value (converted to an untagged integer)
3402
+ // rdi: untagged index
3403
+ // rbx: base pointer of external storage
3404
+ switch (array_type) {
3405
+ case kExternalByteArray:
3406
+ case kExternalUnsignedByteArray:
3407
+ __ cvttsd2si(rdx, xmm0);
3408
+ __ movb(Operand(rbx, rdi, times_1, 0), rdx);
3409
+ break;
3410
+ case kExternalShortArray:
3411
+ case kExternalUnsignedShortArray:
3412
+ __ cvttsd2si(rdx, xmm0);
3413
+ __ movw(Operand(rbx, rdi, times_2, 0), rdx);
3414
+ break;
3415
+ case kExternalIntArray:
3416
+ case kExternalUnsignedIntArray: {
3417
+ // Convert to int64, so that NaN and infinities become
3418
+ // 0x8000000000000000, which is zero mod 2^32.
3419
+ __ cvttsd2siq(rdx, xmm0);
3420
+ __ movl(Operand(rbx, rdi, times_4, 0), rdx);
3421
+ break;
3422
+ }
3423
+ default:
3424
+ UNREACHABLE();
3425
+ break;
3471
3426
  }
3472
- default:
3473
- UNREACHABLE();
3474
- break;
3427
+ __ ret(0);
3475
3428
  }
3476
- __ ret(0);
3477
3429
  }
3478
3430
 
3479
3431
  // Slow case: call runtime.
@@ -3490,10 +3442,13 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
3490
3442
  __ push(rdx); // receiver
3491
3443
  __ push(rcx); // key
3492
3444
  __ push(rax); // value
3445
+ __ Push(Smi::FromInt(NONE)); // PropertyAttributes
3446
+ __ Push(Smi::FromInt(
3447
+ Code::ExtractExtraICStateFromFlags(flags) & kStrictMode));
3493
3448
  __ push(rbx); // return address
3494
3449
 
3495
3450
  // Do tail-call to runtime routine.
3496
- __ TailCallRuntime(Runtime::kSetProperty, 3, 1);
3451
+ __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
3497
3452
 
3498
3453
  return GetCode(flags);
3499
3454
  }