mustang 0.0.1 → 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (560) hide show
  1. data/.rspec +1 -0
  2. data/Isolate +9 -0
  3. data/README.md +6 -12
  4. data/Rakefile +30 -4
  5. data/TODO.md +9 -0
  6. data/ext/v8/extconf.rb +56 -0
  7. data/ext/v8/v8.cpp +37 -0
  8. data/ext/v8/v8_array.cpp +161 -0
  9. data/ext/v8/v8_array.h +17 -0
  10. data/ext/v8/v8_base.cpp +147 -0
  11. data/ext/v8/v8_base.h +23 -0
  12. data/ext/v8/v8_cast.cpp +151 -0
  13. data/ext/v8/v8_cast.h +64 -0
  14. data/ext/v8/v8_context.cpp +174 -0
  15. data/ext/v8/v8_context.h +12 -0
  16. data/ext/v8/v8_date.cpp +61 -0
  17. data/ext/v8/v8_date.h +16 -0
  18. data/ext/v8/v8_errors.cpp +147 -0
  19. data/ext/v8/v8_errors.h +19 -0
  20. data/ext/v8/v8_external.cpp +66 -0
  21. data/ext/v8/v8_external.h +16 -0
  22. data/ext/v8/v8_function.cpp +182 -0
  23. data/ext/v8/v8_function.h +14 -0
  24. data/ext/v8/v8_integer.cpp +70 -0
  25. data/ext/v8/v8_integer.h +16 -0
  26. data/ext/v8/v8_macros.h +30 -0
  27. data/ext/v8/v8_main.cpp +53 -0
  28. data/ext/v8/v8_main.h +13 -0
  29. data/ext/v8/v8_number.cpp +62 -0
  30. data/ext/v8/v8_number.h +16 -0
  31. data/ext/v8/v8_object.cpp +172 -0
  32. data/ext/v8/v8_object.h +17 -0
  33. data/ext/v8/v8_ref.cpp +72 -0
  34. data/ext/v8/v8_ref.h +43 -0
  35. data/ext/v8/v8_regexp.cpp +148 -0
  36. data/ext/v8/v8_regexp.h +16 -0
  37. data/ext/v8/v8_string.cpp +78 -0
  38. data/ext/v8/v8_string.h +16 -0
  39. data/ext/v8/v8_value.cpp +370 -0
  40. data/ext/v8/v8_value.h +19 -0
  41. data/gemspec.yml +2 -1
  42. data/lib/core_ext/class.rb +14 -0
  43. data/lib/core_ext/object.rb +12 -0
  44. data/lib/core_ext/symbol.rb +23 -0
  45. data/lib/mustang.rb +44 -0
  46. data/lib/mustang/context.rb +69 -0
  47. data/lib/mustang/errors.rb +36 -0
  48. data/lib/support/delegated.rb +25 -0
  49. data/lib/v8/array.rb +21 -0
  50. data/lib/v8/context.rb +13 -0
  51. data/lib/v8/date.rb +20 -0
  52. data/lib/v8/error.rb +15 -0
  53. data/lib/v8/external.rb +16 -0
  54. data/lib/v8/function.rb +11 -0
  55. data/lib/v8/integer.rb +16 -0
  56. data/lib/v8/number.rb +16 -0
  57. data/lib/v8/object.rb +66 -0
  58. data/lib/v8/regexp.rb +23 -0
  59. data/lib/v8/string.rb +27 -0
  60. data/mustang.gemspec +3 -0
  61. data/spec/core_ext/class_spec.rb +19 -0
  62. data/spec/core_ext/object_spec.rb +19 -0
  63. data/spec/core_ext/symbol_spec.rb +27 -0
  64. data/spec/fixtures/test1.js +2 -0
  65. data/spec/fixtures/test2.js +2 -0
  66. data/spec/spec_helper.rb +20 -0
  67. data/spec/v8/array_spec.rb +88 -0
  68. data/spec/v8/cast_spec.rb +151 -0
  69. data/spec/v8/context_spec.rb +78 -0
  70. data/spec/v8/data_spec.rb +39 -0
  71. data/spec/v8/date_spec.rb +45 -0
  72. data/spec/v8/empty_spec.rb +27 -0
  73. data/spec/v8/errors_spec.rb +142 -0
  74. data/spec/v8/external_spec.rb +44 -0
  75. data/spec/v8/function_spec.rb +170 -0
  76. data/spec/v8/integer_spec.rb +41 -0
  77. data/spec/v8/main_spec.rb +18 -0
  78. data/spec/v8/null_spec.rb +27 -0
  79. data/spec/v8/number_spec.rb +40 -0
  80. data/spec/v8/object_spec.rb +79 -0
  81. data/spec/v8/primitive_spec.rb +9 -0
  82. data/spec/v8/regexp_spec.rb +65 -0
  83. data/spec/v8/string_spec.rb +48 -0
  84. data/spec/v8/undefined_spec.rb +27 -0
  85. data/spec/v8/value_spec.rb +215 -0
  86. data/vendor/v8/.gitignore +2 -0
  87. data/vendor/v8/AUTHORS +3 -1
  88. data/vendor/v8/ChangeLog +117 -0
  89. data/vendor/v8/SConstruct +334 -53
  90. data/vendor/v8/include/v8-debug.h +21 -11
  91. data/vendor/v8/include/v8-preparser.h +1 -1
  92. data/vendor/v8/include/v8-profiler.h +122 -43
  93. data/vendor/v8/include/v8-testing.h +5 -0
  94. data/vendor/v8/include/v8.h +171 -17
  95. data/vendor/v8/preparser/SConscript +38 -0
  96. data/vendor/v8/preparser/preparser-process.cc +77 -114
  97. data/vendor/v8/samples/shell.cc +232 -46
  98. data/vendor/v8/src/SConscript +29 -5
  99. data/vendor/v8/src/accessors.cc +70 -211
  100. data/vendor/v8/{test/cctest/test-mips.cc → src/allocation-inl.h} +15 -18
  101. data/vendor/v8/src/allocation.cc +0 -82
  102. data/vendor/v8/src/allocation.h +9 -42
  103. data/vendor/v8/src/api.cc +1645 -1156
  104. data/vendor/v8/src/api.h +76 -12
  105. data/vendor/v8/src/apiutils.h +0 -7
  106. data/vendor/v8/src/arguments.h +15 -4
  107. data/vendor/v8/src/arm/assembler-arm-inl.h +10 -9
  108. data/vendor/v8/src/arm/assembler-arm.cc +62 -23
  109. data/vendor/v8/src/arm/assembler-arm.h +76 -11
  110. data/vendor/v8/src/arm/builtins-arm.cc +39 -33
  111. data/vendor/v8/src/arm/code-stubs-arm.cc +1182 -402
  112. data/vendor/v8/src/arm/code-stubs-arm.h +20 -54
  113. data/vendor/v8/src/arm/codegen-arm.cc +159 -106
  114. data/vendor/v8/src/arm/codegen-arm.h +6 -6
  115. data/vendor/v8/src/arm/constants-arm.h +16 -1
  116. data/vendor/v8/src/arm/cpu-arm.cc +7 -5
  117. data/vendor/v8/src/arm/debug-arm.cc +6 -4
  118. data/vendor/v8/src/arm/deoptimizer-arm.cc +51 -14
  119. data/vendor/v8/src/arm/disasm-arm.cc +47 -15
  120. data/vendor/v8/src/arm/frames-arm.h +1 -1
  121. data/vendor/v8/src/arm/full-codegen-arm.cc +724 -408
  122. data/vendor/v8/src/arm/ic-arm.cc +90 -85
  123. data/vendor/v8/src/arm/lithium-arm.cc +140 -69
  124. data/vendor/v8/src/arm/lithium-arm.h +161 -46
  125. data/vendor/v8/src/arm/lithium-codegen-arm.cc +567 -297
  126. data/vendor/v8/src/arm/lithium-codegen-arm.h +21 -9
  127. data/vendor/v8/src/arm/lithium-gap-resolver-arm.cc +2 -0
  128. data/vendor/v8/src/arm/macro-assembler-arm.cc +457 -96
  129. data/vendor/v8/src/arm/macro-assembler-arm.h +115 -18
  130. data/vendor/v8/src/arm/regexp-macro-assembler-arm.cc +20 -13
  131. data/vendor/v8/src/arm/regexp-macro-assembler-arm.h +1 -0
  132. data/vendor/v8/src/arm/simulator-arm.cc +184 -101
  133. data/vendor/v8/src/arm/simulator-arm.h +26 -21
  134. data/vendor/v8/src/arm/stub-cache-arm.cc +450 -467
  135. data/vendor/v8/src/arm/virtual-frame-arm.cc +14 -12
  136. data/vendor/v8/src/arm/virtual-frame-arm.h +11 -8
  137. data/vendor/v8/src/array.js +35 -18
  138. data/vendor/v8/src/assembler.cc +186 -92
  139. data/vendor/v8/src/assembler.h +106 -69
  140. data/vendor/v8/src/ast-inl.h +5 -0
  141. data/vendor/v8/src/ast.cc +46 -35
  142. data/vendor/v8/src/ast.h +107 -50
  143. data/vendor/v8/src/atomicops.h +2 -0
  144. data/vendor/v8/src/atomicops_internals_mips_gcc.h +169 -0
  145. data/vendor/v8/src/bootstrapper.cc +649 -399
  146. data/vendor/v8/src/bootstrapper.h +94 -27
  147. data/vendor/v8/src/builtins.cc +359 -227
  148. data/vendor/v8/src/builtins.h +157 -123
  149. data/vendor/v8/src/checks.cc +2 -2
  150. data/vendor/v8/src/checks.h +4 -0
  151. data/vendor/v8/src/code-stubs.cc +27 -17
  152. data/vendor/v8/src/code-stubs.h +38 -17
  153. data/vendor/v8/src/codegen-inl.h +5 -1
  154. data/vendor/v8/src/codegen.cc +27 -17
  155. data/vendor/v8/src/codegen.h +9 -9
  156. data/vendor/v8/src/compilation-cache.cc +92 -206
  157. data/vendor/v8/src/compilation-cache.h +205 -30
  158. data/vendor/v8/src/compiler.cc +107 -120
  159. data/vendor/v8/src/compiler.h +17 -2
  160. data/vendor/v8/src/contexts.cc +22 -15
  161. data/vendor/v8/src/contexts.h +14 -8
  162. data/vendor/v8/src/conversions.cc +86 -30
  163. data/vendor/v8/src/counters.cc +19 -4
  164. data/vendor/v8/src/counters.h +28 -16
  165. data/vendor/v8/src/cpu-profiler-inl.h +4 -3
  166. data/vendor/v8/src/cpu-profiler.cc +123 -72
  167. data/vendor/v8/src/cpu-profiler.h +33 -19
  168. data/vendor/v8/src/cpu.h +2 -0
  169. data/vendor/v8/src/d8-debug.cc +3 -3
  170. data/vendor/v8/src/d8-debug.h +7 -6
  171. data/vendor/v8/src/d8-posix.cc +2 -0
  172. data/vendor/v8/src/d8.cc +22 -12
  173. data/vendor/v8/src/d8.gyp +3 -0
  174. data/vendor/v8/src/d8.js +618 -0
  175. data/vendor/v8/src/data-flow.h +3 -3
  176. data/vendor/v8/src/dateparser.h +4 -2
  177. data/vendor/v8/src/debug-agent.cc +10 -9
  178. data/vendor/v8/src/debug-agent.h +9 -11
  179. data/vendor/v8/src/debug-debugger.js +121 -0
  180. data/vendor/v8/src/debug.cc +331 -227
  181. data/vendor/v8/src/debug.h +248 -219
  182. data/vendor/v8/src/deoptimizer.cc +173 -62
  183. data/vendor/v8/src/deoptimizer.h +119 -19
  184. data/vendor/v8/src/disasm.h +3 -0
  185. data/vendor/v8/src/disassembler.cc +10 -9
  186. data/vendor/v8/src/execution.cc +185 -129
  187. data/vendor/v8/src/execution.h +47 -78
  188. data/vendor/v8/src/extensions/experimental/break-iterator.cc +250 -0
  189. data/vendor/v8/src/extensions/experimental/break-iterator.h +89 -0
  190. data/vendor/v8/src/extensions/experimental/experimental.gyp +2 -0
  191. data/vendor/v8/src/extensions/experimental/i18n-extension.cc +22 -2
  192. data/vendor/v8/src/extensions/externalize-string-extension.cc +2 -2
  193. data/vendor/v8/src/extensions/gc-extension.cc +1 -1
  194. data/vendor/v8/src/factory.cc +261 -154
  195. data/vendor/v8/src/factory.h +162 -158
  196. data/vendor/v8/src/flag-definitions.h +17 -11
  197. data/vendor/v8/src/frame-element.cc +0 -5
  198. data/vendor/v8/src/frame-element.h +9 -13
  199. data/vendor/v8/src/frames-inl.h +7 -0
  200. data/vendor/v8/src/frames.cc +56 -46
  201. data/vendor/v8/src/frames.h +36 -25
  202. data/vendor/v8/src/full-codegen.cc +15 -24
  203. data/vendor/v8/src/full-codegen.h +13 -41
  204. data/vendor/v8/src/func-name-inferrer.cc +7 -6
  205. data/vendor/v8/src/func-name-inferrer.h +1 -1
  206. data/vendor/v8/src/gdb-jit.cc +1 -0
  207. data/vendor/v8/src/global-handles.cc +118 -56
  208. data/vendor/v8/src/global-handles.h +98 -40
  209. data/vendor/v8/src/globals.h +2 -2
  210. data/vendor/v8/src/handles-inl.h +106 -9
  211. data/vendor/v8/src/handles.cc +220 -157
  212. data/vendor/v8/src/handles.h +38 -59
  213. data/vendor/v8/src/hashmap.h +3 -3
  214. data/vendor/v8/src/heap-inl.h +141 -25
  215. data/vendor/v8/src/heap-profiler.cc +117 -63
  216. data/vendor/v8/src/heap-profiler.h +38 -21
  217. data/vendor/v8/src/heap.cc +805 -564
  218. data/vendor/v8/src/heap.h +640 -594
  219. data/vendor/v8/src/hydrogen-instructions.cc +216 -73
  220. data/vendor/v8/src/hydrogen-instructions.h +259 -124
  221. data/vendor/v8/src/hydrogen.cc +996 -1171
  222. data/vendor/v8/src/hydrogen.h +163 -144
  223. data/vendor/v8/src/ia32/assembler-ia32-inl.h +12 -11
  224. data/vendor/v8/src/ia32/assembler-ia32.cc +85 -39
  225. data/vendor/v8/src/ia32/assembler-ia32.h +82 -16
  226. data/vendor/v8/src/ia32/builtins-ia32.cc +64 -58
  227. data/vendor/v8/src/ia32/code-stubs-ia32.cc +248 -324
  228. data/vendor/v8/src/ia32/code-stubs-ia32.h +3 -44
  229. data/vendor/v8/src/ia32/codegen-ia32.cc +217 -165
  230. data/vendor/v8/src/ia32/codegen-ia32.h +3 -0
  231. data/vendor/v8/src/ia32/cpu-ia32.cc +6 -5
  232. data/vendor/v8/src/ia32/debug-ia32.cc +8 -5
  233. data/vendor/v8/src/ia32/deoptimizer-ia32.cc +124 -14
  234. data/vendor/v8/src/ia32/disasm-ia32.cc +85 -62
  235. data/vendor/v8/src/ia32/frames-ia32.h +1 -1
  236. data/vendor/v8/src/ia32/full-codegen-ia32.cc +348 -435
  237. data/vendor/v8/src/ia32/ic-ia32.cc +91 -91
  238. data/vendor/v8/src/ia32/lithium-codegen-ia32.cc +500 -255
  239. data/vendor/v8/src/ia32/lithium-codegen-ia32.h +13 -4
  240. data/vendor/v8/src/ia32/lithium-gap-resolver-ia32.cc +6 -0
  241. data/vendor/v8/src/ia32/lithium-ia32.cc +122 -45
  242. data/vendor/v8/src/ia32/lithium-ia32.h +128 -41
  243. data/vendor/v8/src/ia32/macro-assembler-ia32.cc +109 -84
  244. data/vendor/v8/src/ia32/macro-assembler-ia32.h +18 -9
  245. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.cc +26 -15
  246. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.h +1 -0
  247. data/vendor/v8/src/ia32/register-allocator-ia32.cc +30 -30
  248. data/vendor/v8/src/ia32/simulator-ia32.h +4 -4
  249. data/vendor/v8/src/ia32/stub-cache-ia32.cc +383 -400
  250. data/vendor/v8/src/ia32/virtual-frame-ia32.cc +36 -13
  251. data/vendor/v8/src/ia32/virtual-frame-ia32.h +11 -5
  252. data/vendor/v8/src/ic-inl.h +12 -2
  253. data/vendor/v8/src/ic.cc +304 -221
  254. data/vendor/v8/src/ic.h +115 -58
  255. data/vendor/v8/src/interpreter-irregexp.cc +25 -21
  256. data/vendor/v8/src/interpreter-irregexp.h +2 -1
  257. data/vendor/v8/src/isolate.cc +883 -0
  258. data/vendor/v8/src/isolate.h +1304 -0
  259. data/vendor/v8/src/json.js +10 -10
  260. data/vendor/v8/src/jsregexp.cc +111 -80
  261. data/vendor/v8/src/jsregexp.h +6 -7
  262. data/vendor/v8/src/jump-target-heavy.cc +5 -8
  263. data/vendor/v8/src/jump-target-heavy.h +0 -6
  264. data/vendor/v8/src/jump-target-inl.h +1 -1
  265. data/vendor/v8/src/jump-target-light.cc +3 -3
  266. data/vendor/v8/src/lithium-allocator-inl.h +2 -0
  267. data/vendor/v8/src/lithium-allocator.cc +42 -30
  268. data/vendor/v8/src/lithium-allocator.h +8 -22
  269. data/vendor/v8/src/lithium.cc +1 -0
  270. data/vendor/v8/src/liveedit.cc +141 -99
  271. data/vendor/v8/src/liveedit.h +7 -2
  272. data/vendor/v8/src/liveobjectlist-inl.h +90 -0
  273. data/vendor/v8/src/liveobjectlist.cc +2537 -1
  274. data/vendor/v8/src/liveobjectlist.h +245 -35
  275. data/vendor/v8/src/log-utils.cc +122 -35
  276. data/vendor/v8/src/log-utils.h +33 -36
  277. data/vendor/v8/src/log.cc +299 -241
  278. data/vendor/v8/src/log.h +177 -110
  279. data/vendor/v8/src/mark-compact.cc +612 -470
  280. data/vendor/v8/src/mark-compact.h +153 -80
  281. data/vendor/v8/src/messages.cc +16 -14
  282. data/vendor/v8/src/messages.js +30 -7
  283. data/vendor/v8/src/mips/assembler-mips-inl.h +155 -35
  284. data/vendor/v8/src/mips/assembler-mips.cc +1093 -219
  285. data/vendor/v8/src/mips/assembler-mips.h +552 -153
  286. data/vendor/v8/src/mips/builtins-mips.cc +43 -100
  287. data/vendor/v8/src/mips/code-stubs-mips.cc +752 -0
  288. data/vendor/v8/src/mips/code-stubs-mips.h +511 -0
  289. data/vendor/v8/src/mips/codegen-mips-inl.h +8 -14
  290. data/vendor/v8/src/mips/codegen-mips.cc +672 -896
  291. data/vendor/v8/src/mips/codegen-mips.h +271 -69
  292. data/vendor/v8/src/mips/constants-mips.cc +44 -20
  293. data/vendor/v8/src/mips/constants-mips.h +238 -40
  294. data/vendor/v8/src/mips/cpu-mips.cc +20 -3
  295. data/vendor/v8/src/mips/debug-mips.cc +35 -7
  296. data/vendor/v8/src/mips/deoptimizer-mips.cc +91 -0
  297. data/vendor/v8/src/mips/disasm-mips.cc +329 -93
  298. data/vendor/v8/src/mips/frames-mips.cc +2 -50
  299. data/vendor/v8/src/mips/frames-mips.h +24 -9
  300. data/vendor/v8/src/mips/full-codegen-mips.cc +473 -23
  301. data/vendor/v8/src/mips/ic-mips.cc +81 -45
  302. data/vendor/v8/src/mips/jump-target-mips.cc +11 -106
  303. data/vendor/v8/src/mips/lithium-codegen-mips.h +65 -0
  304. data/vendor/v8/src/mips/lithium-mips.h +304 -0
  305. data/vendor/v8/src/mips/macro-assembler-mips.cc +2391 -390
  306. data/vendor/v8/src/mips/macro-assembler-mips.h +718 -121
  307. data/vendor/v8/src/mips/regexp-macro-assembler-mips.cc +478 -0
  308. data/vendor/v8/src/mips/regexp-macro-assembler-mips.h +250 -0
  309. data/vendor/v8/src/mips/register-allocator-mips-inl.h +0 -3
  310. data/vendor/v8/src/mips/register-allocator-mips.h +3 -2
  311. data/vendor/v8/src/mips/simulator-mips.cc +1009 -221
  312. data/vendor/v8/src/mips/simulator-mips.h +119 -36
  313. data/vendor/v8/src/mips/stub-cache-mips.cc +331 -148
  314. data/vendor/v8/src/mips/{fast-codegen-mips.cc → virtual-frame-mips-inl.h} +11 -30
  315. data/vendor/v8/src/mips/virtual-frame-mips.cc +137 -149
  316. data/vendor/v8/src/mips/virtual-frame-mips.h +294 -312
  317. data/vendor/v8/src/mirror-debugger.js +9 -8
  318. data/vendor/v8/src/mksnapshot.cc +2 -2
  319. data/vendor/v8/src/objects-debug.cc +16 -16
  320. data/vendor/v8/src/objects-inl.h +421 -195
  321. data/vendor/v8/src/objects-printer.cc +7 -7
  322. data/vendor/v8/src/objects-visiting.cc +1 -1
  323. data/vendor/v8/src/objects-visiting.h +33 -12
  324. data/vendor/v8/src/objects.cc +935 -658
  325. data/vendor/v8/src/objects.h +234 -139
  326. data/vendor/v8/src/parser.cc +484 -439
  327. data/vendor/v8/src/parser.h +35 -14
  328. data/vendor/v8/src/platform-cygwin.cc +173 -107
  329. data/vendor/v8/src/platform-freebsd.cc +224 -72
  330. data/vendor/v8/src/platform-linux.cc +234 -95
  331. data/vendor/v8/src/platform-macos.cc +215 -82
  332. data/vendor/v8/src/platform-nullos.cc +9 -3
  333. data/vendor/v8/src/platform-openbsd.cc +22 -7
  334. data/vendor/v8/src/platform-posix.cc +30 -5
  335. data/vendor/v8/src/platform-solaris.cc +120 -38
  336. data/vendor/v8/src/platform-tls-mac.h +62 -0
  337. data/vendor/v8/src/platform-tls-win32.h +62 -0
  338. data/vendor/v8/src/platform-tls.h +50 -0
  339. data/vendor/v8/src/platform-win32.cc +195 -97
  340. data/vendor/v8/src/platform.h +72 -15
  341. data/vendor/v8/src/preparse-data.cc +2 -0
  342. data/vendor/v8/src/preparser-api.cc +8 -2
  343. data/vendor/v8/src/preparser.cc +1 -1
  344. data/vendor/v8/src/prettyprinter.cc +43 -52
  345. data/vendor/v8/src/prettyprinter.h +1 -1
  346. data/vendor/v8/src/profile-generator-inl.h +0 -28
  347. data/vendor/v8/src/profile-generator.cc +942 -685
  348. data/vendor/v8/src/profile-generator.h +210 -176
  349. data/vendor/v8/src/property.cc +6 -0
  350. data/vendor/v8/src/property.h +14 -3
  351. data/vendor/v8/src/regexp-macro-assembler-irregexp.cc +1 -1
  352. data/vendor/v8/src/regexp-macro-assembler.cc +28 -19
  353. data/vendor/v8/src/regexp-macro-assembler.h +11 -6
  354. data/vendor/v8/src/regexp-stack.cc +18 -10
  355. data/vendor/v8/src/regexp-stack.h +45 -21
  356. data/vendor/v8/src/regexp.js +3 -3
  357. data/vendor/v8/src/register-allocator-inl.h +3 -3
  358. data/vendor/v8/src/register-allocator.cc +1 -7
  359. data/vendor/v8/src/register-allocator.h +5 -15
  360. data/vendor/v8/src/rewriter.cc +2 -1
  361. data/vendor/v8/src/runtime-profiler.cc +158 -128
  362. data/vendor/v8/src/runtime-profiler.h +131 -15
  363. data/vendor/v8/src/runtime.cc +2409 -1692
  364. data/vendor/v8/src/runtime.h +93 -17
  365. data/vendor/v8/src/safepoint-table.cc +3 -0
  366. data/vendor/v8/src/safepoint-table.h +9 -3
  367. data/vendor/v8/src/scanner-base.cc +21 -28
  368. data/vendor/v8/src/scanner-base.h +22 -11
  369. data/vendor/v8/src/scanner.cc +3 -5
  370. data/vendor/v8/src/scanner.h +4 -2
  371. data/vendor/v8/src/scopeinfo.cc +11 -16
  372. data/vendor/v8/src/scopeinfo.h +26 -15
  373. data/vendor/v8/src/scopes.cc +67 -37
  374. data/vendor/v8/src/scopes.h +26 -12
  375. data/vendor/v8/src/serialize.cc +193 -154
  376. data/vendor/v8/src/serialize.h +41 -36
  377. data/vendor/v8/src/small-pointer-list.h +163 -0
  378. data/vendor/v8/src/snapshot-common.cc +1 -1
  379. data/vendor/v8/src/snapshot.h +3 -1
  380. data/vendor/v8/src/spaces-inl.h +30 -25
  381. data/vendor/v8/src/spaces.cc +263 -370
  382. data/vendor/v8/src/spaces.h +178 -166
  383. data/vendor/v8/src/string-search.cc +4 -3
  384. data/vendor/v8/src/string-search.h +21 -20
  385. data/vendor/v8/src/string-stream.cc +32 -24
  386. data/vendor/v8/src/string.js +7 -7
  387. data/vendor/v8/src/stub-cache.cc +324 -248
  388. data/vendor/v8/src/stub-cache.h +181 -155
  389. data/vendor/v8/src/token.cc +3 -3
  390. data/vendor/v8/src/token.h +3 -3
  391. data/vendor/v8/src/top.cc +218 -390
  392. data/vendor/v8/src/type-info.cc +98 -32
  393. data/vendor/v8/src/type-info.h +10 -3
  394. data/vendor/v8/src/unicode.cc +1 -1
  395. data/vendor/v8/src/unicode.h +1 -1
  396. data/vendor/v8/src/utils.h +3 -0
  397. data/vendor/v8/src/v8-counters.cc +18 -11
  398. data/vendor/v8/src/v8-counters.h +34 -13
  399. data/vendor/v8/src/v8.cc +66 -121
  400. data/vendor/v8/src/v8.h +7 -4
  401. data/vendor/v8/src/v8globals.h +18 -12
  402. data/vendor/v8/src/{memory.h → v8memory.h} +0 -0
  403. data/vendor/v8/src/v8natives.js +59 -18
  404. data/vendor/v8/src/v8threads.cc +127 -114
  405. data/vendor/v8/src/v8threads.h +42 -35
  406. data/vendor/v8/src/v8utils.h +2 -39
  407. data/vendor/v8/src/variables.h +1 -1
  408. data/vendor/v8/src/version.cc +26 -5
  409. data/vendor/v8/src/version.h +4 -0
  410. data/vendor/v8/src/virtual-frame-heavy-inl.h +2 -4
  411. data/vendor/v8/src/virtual-frame-light-inl.h +5 -4
  412. data/vendor/v8/src/vm-state-inl.h +21 -17
  413. data/vendor/v8/src/vm-state.h +7 -5
  414. data/vendor/v8/src/win32-headers.h +1 -0
  415. data/vendor/v8/src/x64/assembler-x64-inl.h +12 -11
  416. data/vendor/v8/src/x64/assembler-x64.cc +80 -40
  417. data/vendor/v8/src/x64/assembler-x64.h +67 -17
  418. data/vendor/v8/src/x64/builtins-x64.cc +34 -33
  419. data/vendor/v8/src/x64/code-stubs-x64.cc +636 -377
  420. data/vendor/v8/src/x64/code-stubs-x64.h +14 -48
  421. data/vendor/v8/src/x64/codegen-x64-inl.h +1 -1
  422. data/vendor/v8/src/x64/codegen-x64.cc +158 -136
  423. data/vendor/v8/src/x64/codegen-x64.h +4 -1
  424. data/vendor/v8/src/x64/cpu-x64.cc +7 -5
  425. data/vendor/v8/src/x64/debug-x64.cc +8 -6
  426. data/vendor/v8/src/x64/deoptimizer-x64.cc +195 -20
  427. data/vendor/v8/src/x64/disasm-x64.cc +42 -23
  428. data/vendor/v8/src/x64/frames-x64.cc +1 -1
  429. data/vendor/v8/src/x64/frames-x64.h +2 -2
  430. data/vendor/v8/src/x64/full-codegen-x64.cc +780 -218
  431. data/vendor/v8/src/x64/ic-x64.cc +77 -79
  432. data/vendor/v8/src/x64/jump-target-x64.cc +1 -1
  433. data/vendor/v8/src/x64/lithium-codegen-x64.cc +698 -181
  434. data/vendor/v8/src/x64/lithium-codegen-x64.h +31 -6
  435. data/vendor/v8/src/x64/lithium-x64.cc +136 -54
  436. data/vendor/v8/src/x64/lithium-x64.h +142 -51
  437. data/vendor/v8/src/x64/macro-assembler-x64.cc +456 -187
  438. data/vendor/v8/src/x64/macro-assembler-x64.h +166 -34
  439. data/vendor/v8/src/x64/regexp-macro-assembler-x64.cc +44 -28
  440. data/vendor/v8/src/x64/regexp-macro-assembler-x64.h +8 -4
  441. data/vendor/v8/src/x64/register-allocator-x64-inl.h +3 -3
  442. data/vendor/v8/src/x64/register-allocator-x64.cc +12 -8
  443. data/vendor/v8/src/x64/simulator-x64.h +5 -5
  444. data/vendor/v8/src/x64/stub-cache-x64.cc +299 -344
  445. data/vendor/v8/src/x64/virtual-frame-x64.cc +37 -13
  446. data/vendor/v8/src/x64/virtual-frame-x64.h +13 -7
  447. data/vendor/v8/src/zone-inl.h +49 -3
  448. data/vendor/v8/src/zone.cc +42 -41
  449. data/vendor/v8/src/zone.h +37 -34
  450. data/vendor/v8/test/benchmarks/testcfg.py +100 -0
  451. data/vendor/v8/test/cctest/SConscript +5 -4
  452. data/vendor/v8/test/cctest/cctest.h +3 -2
  453. data/vendor/v8/test/cctest/cctest.status +6 -11
  454. data/vendor/v8/test/cctest/test-accessors.cc +3 -3
  455. data/vendor/v8/test/cctest/test-alloc.cc +39 -33
  456. data/vendor/v8/test/cctest/test-api.cc +1092 -205
  457. data/vendor/v8/test/cctest/test-assembler-arm.cc +39 -25
  458. data/vendor/v8/test/cctest/test-assembler-ia32.cc +36 -37
  459. data/vendor/v8/test/cctest/test-assembler-mips.cc +1098 -40
  460. data/vendor/v8/test/cctest/test-assembler-x64.cc +32 -25
  461. data/vendor/v8/test/cctest/test-ast.cc +1 -0
  462. data/vendor/v8/test/cctest/test-circular-queue.cc +8 -5
  463. data/vendor/v8/test/cctest/test-compiler.cc +24 -24
  464. data/vendor/v8/test/cctest/test-cpu-profiler.cc +140 -5
  465. data/vendor/v8/test/cctest/test-dataflow.cc +1 -0
  466. data/vendor/v8/test/cctest/test-debug.cc +136 -77
  467. data/vendor/v8/test/cctest/test-decls.cc +1 -1
  468. data/vendor/v8/test/cctest/test-deoptimization.cc +25 -24
  469. data/vendor/v8/test/cctest/test-disasm-arm.cc +9 -4
  470. data/vendor/v8/test/cctest/test-disasm-ia32.cc +10 -8
  471. data/vendor/v8/test/cctest/test-func-name-inference.cc +10 -4
  472. data/vendor/v8/test/cctest/test-heap-profiler.cc +226 -164
  473. data/vendor/v8/test/cctest/test-heap.cc +240 -217
  474. data/vendor/v8/test/cctest/test-liveedit.cc +1 -0
  475. data/vendor/v8/test/cctest/test-log-stack-tracer.cc +18 -20
  476. data/vendor/v8/test/cctest/test-log.cc +114 -108
  477. data/vendor/v8/test/cctest/test-macro-assembler-x64.cc +247 -177
  478. data/vendor/v8/test/cctest/test-mark-compact.cc +129 -90
  479. data/vendor/v8/test/cctest/test-parsing.cc +15 -14
  480. data/vendor/v8/test/cctest/test-platform-linux.cc +1 -0
  481. data/vendor/v8/test/cctest/test-platform-tls.cc +66 -0
  482. data/vendor/v8/test/cctest/test-platform-win32.cc +1 -0
  483. data/vendor/v8/test/cctest/test-profile-generator.cc +1 -1
  484. data/vendor/v8/test/cctest/test-regexp.cc +53 -41
  485. data/vendor/v8/test/cctest/test-reloc-info.cc +18 -11
  486. data/vendor/v8/test/cctest/test-serialize.cc +44 -43
  487. data/vendor/v8/test/cctest/test-sockets.cc +8 -3
  488. data/vendor/v8/test/cctest/test-spaces.cc +47 -29
  489. data/vendor/v8/test/cctest/test-strings.cc +20 -20
  490. data/vendor/v8/test/cctest/test-thread-termination.cc +8 -3
  491. data/vendor/v8/test/cctest/test-threads.cc +5 -3
  492. data/vendor/v8/test/cctest/test-utils.cc +5 -4
  493. data/vendor/v8/test/cctest/testcfg.py +7 -3
  494. data/vendor/v8/test/es5conform/es5conform.status +2 -77
  495. data/vendor/v8/test/es5conform/testcfg.py +1 -1
  496. data/vendor/v8/test/message/testcfg.py +1 -1
  497. data/vendor/v8/test/mjsunit/accessors-on-global-object.js +3 -3
  498. data/vendor/v8/test/mjsunit/array-concat.js +43 -1
  499. data/vendor/v8/test/mjsunit/array-join.js +25 -0
  500. data/vendor/v8/test/mjsunit/bitops-info.js +7 -1
  501. data/vendor/v8/test/mjsunit/compiler/array-length.js +2 -2
  502. data/vendor/v8/test/mjsunit/compiler/global-accessors.js +47 -0
  503. data/vendor/v8/test/mjsunit/compiler/pic.js +1 -1
  504. data/vendor/v8/test/mjsunit/compiler/regress-loadfield.js +65 -0
  505. data/vendor/v8/test/mjsunit/math-sqrt.js +5 -1
  506. data/vendor/v8/test/mjsunit/mjsunit.js +59 -8
  507. data/vendor/v8/test/mjsunit/mjsunit.status +0 -12
  508. data/vendor/v8/test/mjsunit/mul-exhaustive.js +129 -11
  509. data/vendor/v8/test/mjsunit/negate-zero.js +1 -1
  510. data/vendor/v8/test/mjsunit/object-freeze.js +5 -13
  511. data/vendor/v8/test/mjsunit/object-prevent-extensions.js +9 -50
  512. data/vendor/v8/test/mjsunit/object-seal.js +4 -13
  513. data/vendor/v8/test/mjsunit/override-eval-with-non-function.js +36 -0
  514. data/vendor/v8/test/mjsunit/regress/regress-1145.js +54 -0
  515. data/vendor/v8/test/mjsunit/regress/regress-1172-bis.js +37 -0
  516. data/vendor/v8/test/mjsunit/regress/regress-1181.js +54 -0
  517. data/vendor/v8/test/mjsunit/regress/regress-1207.js +35 -0
  518. data/vendor/v8/test/mjsunit/regress/regress-1209.js +34 -0
  519. data/vendor/v8/test/mjsunit/regress/regress-1210.js +48 -0
  520. data/vendor/v8/test/mjsunit/regress/regress-1213.js +43 -0
  521. data/vendor/v8/test/mjsunit/regress/regress-1218.js +29 -0
  522. data/vendor/v8/test/mjsunit/regress/regress-1229.js +79 -0
  523. data/vendor/v8/test/mjsunit/regress/regress-1233.js +47 -0
  524. data/vendor/v8/test/mjsunit/regress/regress-1236.js +34 -0
  525. data/vendor/v8/test/mjsunit/regress/regress-1237.js +36 -0
  526. data/vendor/v8/test/mjsunit/regress/regress-1240.js +39 -0
  527. data/vendor/v8/test/mjsunit/regress/regress-1257.js +58 -0
  528. data/vendor/v8/test/mjsunit/regress/regress-1278.js +69 -0
  529. data/vendor/v8/test/mjsunit/regress/regress-create-exception.js +1 -0
  530. data/vendor/v8/test/mjsunit/regress/regress-lazy-deopt-reloc.js +52 -0
  531. data/vendor/v8/test/mjsunit/sin-cos.js +15 -10
  532. data/vendor/v8/test/mjsunit/smi-negative-zero.js +2 -2
  533. data/vendor/v8/test/mjsunit/str-to-num.js +1 -1
  534. data/vendor/v8/test/mjsunit/strict-mode.js +435 -0
  535. data/vendor/v8/test/mjsunit/testcfg.py +23 -6
  536. data/vendor/v8/test/mozilla/mozilla.status +0 -2
  537. data/vendor/v8/test/mozilla/testcfg.py +1 -1
  538. data/vendor/v8/test/preparser/empty.js +28 -0
  539. data/vendor/v8/test/preparser/functions-only.js +38 -0
  540. data/vendor/v8/test/preparser/non-alphanum.js +34 -0
  541. data/vendor/v8/test/preparser/symbols-only.js +49 -0
  542. data/vendor/v8/test/preparser/testcfg.py +90 -0
  543. data/vendor/v8/test/sputnik/testcfg.py +1 -1
  544. data/vendor/v8/test/test262/README +16 -0
  545. data/vendor/v8/test/test262/harness-adapt.js +80 -0
  546. data/vendor/v8/test/test262/test262.status +1506 -0
  547. data/vendor/v8/test/test262/testcfg.py +123 -0
  548. data/vendor/v8/tools/freebsd-tick-processor +10 -0
  549. data/vendor/v8/tools/gyp/v8.gyp +8 -33
  550. data/vendor/v8/tools/linux-tick-processor +5 -3
  551. data/vendor/v8/tools/test.py +37 -14
  552. data/vendor/v8/tools/tickprocessor.js +22 -8
  553. data/vendor/v8/tools/visual_studio/v8_base.vcproj +13 -1
  554. data/vendor/v8/tools/visual_studio/v8_base_arm.vcproj +5 -1
  555. data/vendor/v8/tools/visual_studio/v8_base_x64.vcproj +5 -1
  556. data/vendor/v8/tools/visual_studio/x64.vsprops +1 -0
  557. metadata +1495 -1341
  558. data/ext/extconf.rb +0 -22
  559. data/ext/mustang.cpp +0 -58
  560. data/vendor/v8/src/top.h +0 -608
@@ -1,4 +1,4 @@
1
- // Copyright 2010 the V8 project authors. All rights reserved.
1
+ // Copyright 2011 the V8 project authors. All rights reserved.
2
2
  // Redistribution and use in source and binary forms, with or without
3
3
  // modification, are permitted provided that the following conditions are
4
4
  // met:
@@ -69,7 +69,7 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm,
69
69
  // JumpToExternalReference expects rax to contain the number of arguments
70
70
  // including the receiver and the extra arguments.
71
71
  __ addq(rax, Immediate(num_extra_args + 1));
72
- __ JumpToExternalReference(ExternalReference(id), 1);
72
+ __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1);
73
73
  }
74
74
 
75
75
 
@@ -98,7 +98,7 @@ void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
98
98
  // Set expected number of arguments to zero (not changing rax).
99
99
  __ movq(rbx, Immediate(0));
100
100
  __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
101
- __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
101
+ __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
102
102
  RelocInfo::CODE_TARGET);
103
103
  }
104
104
 
@@ -127,7 +127,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
127
127
 
128
128
  #ifdef ENABLE_DEBUGGER_SUPPORT
129
129
  ExternalReference debug_step_in_fp =
130
- ExternalReference::debug_step_in_fp_address();
130
+ ExternalReference::debug_step_in_fp_address(masm->isolate());
131
131
  __ movq(kScratchRegister, debug_step_in_fp);
132
132
  __ cmpq(Operand(kScratchRegister, 0), Immediate(0));
133
133
  __ j(not_equal, &rt_call);
@@ -339,8 +339,8 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
339
339
  // Call the function.
340
340
  if (is_api_function) {
341
341
  __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
342
- Handle<Code> code = Handle<Code>(
343
- Builtins::builtin(Builtins::HandleApiCallConstruct));
342
+ Handle<Code> code =
343
+ masm->isolate()->builtins()->HandleApiCallConstruct();
344
344
  ParameterCount expected(0);
345
345
  __ InvokeCode(code, expected, expected,
346
346
  RelocInfo::CODE_TARGET, CALL_FUNCTION);
@@ -379,7 +379,8 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
379
379
  SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
380
380
  __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
381
381
  __ push(rcx);
382
- __ IncrementCounter(&Counters::constructed_objects, 1);
382
+ Counters* counters = masm->isolate()->counters();
383
+ __ IncrementCounter(counters->constructed_objects(), 1);
383
384
  __ ret(0);
384
385
  }
385
386
 
@@ -492,7 +493,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
492
493
  // Invoke the code.
493
494
  if (is_construct) {
494
495
  // Expects rdi to hold function pointer.
495
- __ Call(Handle<Code>(Builtins::builtin(Builtins::JSConstructCall)),
496
+ __ Call(masm->isolate()->builtins()->JSConstructCall(),
496
497
  RelocInfo::CODE_TARGET);
497
498
  } else {
498
499
  ParameterCount actual(rax);
@@ -630,7 +631,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
630
631
  __ testq(rax, rax);
631
632
  __ j(not_zero, &done);
632
633
  __ pop(rbx);
633
- __ Push(Factory::undefined_value());
634
+ __ Push(FACTORY->undefined_value());
634
635
  __ push(rbx);
635
636
  __ incq(rax);
636
637
  __ bind(&done);
@@ -733,7 +734,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
733
734
  __ j(not_zero, &function);
734
735
  __ Set(rbx, 0);
735
736
  __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
736
- __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
737
+ __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
737
738
  RelocInfo::CODE_TARGET);
738
739
  __ bind(&function);
739
740
  }
@@ -748,7 +749,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
748
749
  __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
749
750
  __ cmpq(rax, rbx);
750
751
  __ j(not_equal,
751
- Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
752
+ masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
752
753
  RelocInfo::CODE_TARGET);
753
754
 
754
755
  ParameterCount expected(0);
@@ -863,7 +864,8 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
863
864
  __ movq(rdx, Operand(rbp, kArgumentsOffset)); // load arguments
864
865
 
865
866
  // Use inline caching to speed up access to arguments.
866
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
867
+ Handle<Code> ic =
868
+ masm->isolate()->builtins()->KeyedLoadIC_Initialize();
867
869
  __ Call(ic, RelocInfo::CODE_TARGET);
868
870
  // It is important that we do not have a test instruction after the
869
871
  // call. A test instruction after the call is used to indicate that
@@ -935,7 +937,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
935
937
  // scratch2: start of next object
936
938
  __ movq(FieldOperand(result, JSObject::kMapOffset), scratch1);
937
939
  __ Move(FieldOperand(result, JSArray::kPropertiesOffset),
938
- Factory::empty_fixed_array());
940
+ FACTORY->empty_fixed_array());
939
941
  // Field JSArray::kElementsOffset is initialized later.
940
942
  __ Move(FieldOperand(result, JSArray::kLengthOffset), Smi::FromInt(0));
941
943
 
@@ -943,7 +945,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
943
945
  // fixed array.
944
946
  if (initial_capacity == 0) {
945
947
  __ Move(FieldOperand(result, JSArray::kElementsOffset),
946
- Factory::empty_fixed_array());
948
+ FACTORY->empty_fixed_array());
947
949
  return;
948
950
  }
949
951
 
@@ -960,7 +962,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
960
962
  // scratch1: elements array
961
963
  // scratch2: start of next object
962
964
  __ Move(FieldOperand(scratch1, HeapObject::kMapOffset),
963
- Factory::fixed_array_map());
965
+ FACTORY->fixed_array_map());
964
966
  __ Move(FieldOperand(scratch1, FixedArray::kLengthOffset),
965
967
  Smi::FromInt(initial_capacity));
966
968
 
@@ -968,7 +970,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
968
970
  // Reconsider loop unfolding if kPreallocatedArrayElements gets changed.
969
971
  static const int kLoopUnfoldLimit = 4;
970
972
  ASSERT(kPreallocatedArrayElements <= kLoopUnfoldLimit);
971
- __ Move(scratch3, Factory::the_hole_value());
973
+ __ Move(scratch3, FACTORY->the_hole_value());
972
974
  if (initial_capacity <= kLoopUnfoldLimit) {
973
975
  // Use a scratch register here to have only one reloc info when unfolding
974
976
  // the loop.
@@ -1052,7 +1054,7 @@ static void AllocateJSArray(MacroAssembler* masm,
1052
1054
  // array_size: size of array (smi)
1053
1055
  __ bind(&allocated);
1054
1056
  __ movq(FieldOperand(result, JSObject::kMapOffset), elements_array);
1055
- __ Move(elements_array, Factory::empty_fixed_array());
1057
+ __ Move(elements_array, FACTORY->empty_fixed_array());
1056
1058
  __ movq(FieldOperand(result, JSArray::kPropertiesOffset), elements_array);
1057
1059
  // Field JSArray::kElementsOffset is initialized later.
1058
1060
  __ movq(FieldOperand(result, JSArray::kLengthOffset), array_size);
@@ -1071,7 +1073,7 @@ static void AllocateJSArray(MacroAssembler* masm,
1071
1073
  // elements_array_end: start of next object
1072
1074
  // array_size: size of array (smi)
1073
1075
  __ Move(FieldOperand(elements_array, JSObject::kMapOffset),
1074
- Factory::fixed_array_map());
1076
+ FACTORY->fixed_array_map());
1075
1077
  Label not_empty_2, fill_array;
1076
1078
  __ SmiTest(array_size);
1077
1079
  __ j(not_zero, &not_empty_2);
@@ -1092,7 +1094,7 @@ static void AllocateJSArray(MacroAssembler* masm,
1092
1094
  __ bind(&fill_array);
1093
1095
  if (fill_with_hole) {
1094
1096
  Label loop, entry;
1095
- __ Move(scratch, Factory::the_hole_value());
1097
+ __ Move(scratch, FACTORY->the_hole_value());
1096
1098
  __ lea(elements_array, Operand(elements_array,
1097
1099
  FixedArray::kHeaderSize - kHeapObjectTag));
1098
1100
  __ jmp(&entry);
@@ -1137,7 +1139,8 @@ static void ArrayNativeCode(MacroAssembler* masm,
1137
1139
  r8,
1138
1140
  kPreallocatedArrayElements,
1139
1141
  call_generic_code);
1140
- __ IncrementCounter(&Counters::array_function_native, 1);
1142
+ Counters* counters = masm->isolate()->counters();
1143
+ __ IncrementCounter(counters->array_function_native(), 1);
1141
1144
  __ movq(rax, rbx);
1142
1145
  __ ret(kPointerSize);
1143
1146
 
@@ -1168,7 +1171,7 @@ static void ArrayNativeCode(MacroAssembler* masm,
1168
1171
  r9,
1169
1172
  true,
1170
1173
  call_generic_code);
1171
- __ IncrementCounter(&Counters::array_function_native, 1);
1174
+ __ IncrementCounter(counters->array_function_native(), 1);
1172
1175
  __ movq(rax, rbx);
1173
1176
  __ ret(2 * kPointerSize);
1174
1177
 
@@ -1190,7 +1193,7 @@ static void ArrayNativeCode(MacroAssembler* masm,
1190
1193
  r9,
1191
1194
  false,
1192
1195
  call_generic_code);
1193
- __ IncrementCounter(&Counters::array_function_native, 1);
1196
+ __ IncrementCounter(counters->array_function_native(), 1);
1194
1197
 
1195
1198
  // rax: argc
1196
1199
  // rbx: JSArray
@@ -1248,7 +1251,7 @@ void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1248
1251
  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rdi);
1249
1252
 
1250
1253
  if (FLAG_debug_code) {
1251
- // Initial map for the builtin Array function shoud be a map.
1254
+ // Initial map for the builtin Array functions should be maps.
1252
1255
  __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1253
1256
  // Will both indicate a NULL and a Smi.
1254
1257
  ASSERT(kSmiTag == 0);
@@ -1264,8 +1267,8 @@ void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1264
1267
  // Jump to the generic array code in case the specialized code cannot handle
1265
1268
  // the construction.
1266
1269
  __ bind(&generic_array_code);
1267
- Code* code = Builtins::builtin(Builtins::ArrayCodeGeneric);
1268
- Handle<Code> array_code(code);
1270
+ Handle<Code> array_code =
1271
+ masm->isolate()->builtins()->ArrayCodeGeneric();
1269
1272
  __ Jump(array_code, RelocInfo::CODE_TARGET);
1270
1273
  }
1271
1274
 
@@ -1280,11 +1283,8 @@ void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
1280
1283
  Label generic_constructor;
1281
1284
 
1282
1285
  if (FLAG_debug_code) {
1283
- // The array construct code is only set for the builtin Array function which
1284
- // does always have a map.
1285
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rbx);
1286
- __ cmpq(rdi, rbx);
1287
- __ Check(equal, "Unexpected Array function");
1286
+ // The array construct code is only set for the builtin and internal
1287
+ // Array functions which always have a map.
1288
1288
  // Initial map for the builtin Array function should be a map.
1289
1289
  __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1290
1290
  // Will both indicate a NULL and a Smi.
@@ -1301,8 +1301,8 @@ void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
1301
1301
  // Jump to the generic construct code in case the specialized code cannot
1302
1302
  // handle the construction.
1303
1303
  __ bind(&generic_constructor);
1304
- Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
1305
- Handle<Code> generic_construct_stub(code);
1304
+ Handle<Code> generic_construct_stub =
1305
+ masm->isolate()->builtins()->JSConstructStubGeneric();
1306
1306
  __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
1307
1307
  }
1308
1308
 
@@ -1356,7 +1356,8 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1356
1356
  // -----------------------------------
1357
1357
 
1358
1358
  Label invoke, dont_adapt_arguments;
1359
- __ IncrementCounter(&Counters::arguments_adaptors, 1);
1359
+ Counters* counters = masm->isolate()->counters();
1360
+ __ IncrementCounter(counters->arguments_adaptors(), 1);
1360
1361
 
1361
1362
  Label enough, too_few;
1362
1363
  __ cmpq(rax, rbx);
@@ -46,8 +46,8 @@ void ToNumberStub::Generate(MacroAssembler* masm) {
46
46
  __ Ret();
47
47
 
48
48
  __ bind(&check_heap_number);
49
- __ Move(rbx, Factory::heap_number_map());
50
- __ cmpq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
49
+ __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
50
+ Heap::kHeapNumberMapRootIndex);
51
51
  __ j(not_equal, &call_builtin);
52
52
  __ Ret();
53
53
 
@@ -68,11 +68,15 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
68
68
  // Get the function info from the stack.
69
69
  __ movq(rdx, Operand(rsp, 1 * kPointerSize));
70
70
 
71
+ int map_index = strict_mode_ == kStrictMode
72
+ ? Context::STRICT_MODE_FUNCTION_MAP_INDEX
73
+ : Context::FUNCTION_MAP_INDEX;
74
+
71
75
  // Compute the function map in the current global context and set that
72
76
  // as the map of the allocated object.
73
77
  __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
74
78
  __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset));
75
- __ movq(rcx, Operand(rcx, Context::SlotOffset(Context::FUNCTION_MAP_INDEX)));
79
+ __ movq(rcx, Operand(rcx, Context::SlotOffset(map_index)));
76
80
  __ movq(FieldOperand(rax, JSObject::kMapOffset), rcx);
77
81
 
78
82
  // Initialize the rest of the function. We don't have to update the
@@ -104,7 +108,7 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
104
108
  __ pop(rdx);
105
109
  __ push(rsi);
106
110
  __ push(rdx);
107
- __ Push(Factory::false_value());
111
+ __ PushRoot(Heap::kFalseValueRootIndex);
108
112
  __ push(rcx); // Restore return address.
109
113
  __ TailCallRuntime(Runtime::kNewClosure, 3, 1);
110
114
  }
@@ -280,7 +284,8 @@ void ToBooleanStub::Generate(MacroAssembler* masm) {
280
284
  const char* GenericBinaryOpStub::GetName() {
281
285
  if (name_ != NULL) return name_;
282
286
  const int kMaxNameLength = 100;
283
- name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
287
+ name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
288
+ kMaxNameLength);
284
289
  if (name_ == NULL) return "OOM";
285
290
  const char* op_name = Token::Name(op_);
286
291
  const char* overwrite_name;
@@ -354,7 +359,8 @@ void GenericBinaryOpStub::GenerateCall(
354
359
 
355
360
  // Update flags to indicate that arguments are in registers.
356
361
  SetArgsInRegisters();
357
- __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
362
+ Counters* counters = masm->isolate()->counters();
363
+ __ IncrementCounter(counters->generic_binary_stub_calls_regs(), 1);
358
364
  }
359
365
 
360
366
  // Call the stub.
@@ -390,7 +396,8 @@ void GenericBinaryOpStub::GenerateCall(
390
396
 
391
397
  // Update flags to indicate that arguments are in registers.
392
398
  SetArgsInRegisters();
393
- __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
399
+ Counters* counters = masm->isolate()->counters();
400
+ __ IncrementCounter(counters->generic_binary_stub_calls_regs(), 1);
394
401
  }
395
402
 
396
403
  // Call the stub.
@@ -425,7 +432,8 @@ void GenericBinaryOpStub::GenerateCall(
425
432
  }
426
433
  // Update flags to indicate that arguments are in registers.
427
434
  SetArgsInRegisters();
428
- __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
435
+ Counters* counters = masm->isolate()->counters();
436
+ __ IncrementCounter(counters->generic_binary_stub_calls_regs(), 1);
429
437
  }
430
438
 
431
439
  // Call the stub.
@@ -995,7 +1003,7 @@ void GenericBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
995
1003
 
996
1004
  // Perform patching to an appropriate fast case and return the result.
997
1005
  __ TailCallExternalReference(
998
- ExternalReference(IC_Utility(IC::kBinaryOp_Patch)),
1006
+ ExternalReference(IC_Utility(IC::kBinaryOp_Patch), masm->isolate()),
999
1007
  5,
1000
1008
  1);
1001
1009
  }
@@ -1031,7 +1039,8 @@ void TypeRecordingBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
1031
1039
  // Patch the caller to an appropriate specialized stub and return the
1032
1040
  // operation result to the caller of the stub.
1033
1041
  __ TailCallExternalReference(
1034
- ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch)),
1042
+ ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch),
1043
+ masm->isolate()),
1035
1044
  5,
1036
1045
  1);
1037
1046
  }
@@ -1053,6 +1062,9 @@ void TypeRecordingBinaryOpStub::Generate(MacroAssembler* masm) {
1053
1062
  case TRBinaryOpIC::HEAP_NUMBER:
1054
1063
  GenerateHeapNumberStub(masm);
1055
1064
  break;
1065
+ case TRBinaryOpIC::ODDBALL:
1066
+ GenerateOddballStub(masm);
1067
+ break;
1056
1068
  case TRBinaryOpIC::STRING:
1057
1069
  GenerateStringStub(masm);
1058
1070
  break;
@@ -1068,7 +1080,8 @@ void TypeRecordingBinaryOpStub::Generate(MacroAssembler* masm) {
1068
1080
  const char* TypeRecordingBinaryOpStub::GetName() {
1069
1081
  if (name_ != NULL) return name_;
1070
1082
  const int kMaxNameLength = 100;
1071
- name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
1083
+ name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
1084
+ kMaxNameLength);
1072
1085
  if (name_ == NULL) return "OOM";
1073
1086
  const char* op_name = Token::Name(op_);
1074
1087
  const char* overwrite_name;
@@ -1428,6 +1441,39 @@ void TypeRecordingBinaryOpStub::GenerateStringStub(MacroAssembler* masm) {
1428
1441
  }
1429
1442
 
1430
1443
 
1444
+ void TypeRecordingBinaryOpStub::GenerateOddballStub(MacroAssembler* masm) {
1445
+ Label call_runtime;
1446
+
1447
+ if (op_ == Token::ADD) {
1448
+ // Handle string addition here, because it is the only operation
1449
+ // that does not do a ToNumber conversion on the operands.
1450
+ GenerateStringAddCode(masm);
1451
+ }
1452
+
1453
+ // Convert oddball arguments to numbers.
1454
+ NearLabel check, done;
1455
+ __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
1456
+ __ j(not_equal, &check);
1457
+ if (Token::IsBitOp(op_)) {
1458
+ __ xor_(rdx, rdx);
1459
+ } else {
1460
+ __ LoadRoot(rdx, Heap::kNanValueRootIndex);
1461
+ }
1462
+ __ jmp(&done);
1463
+ __ bind(&check);
1464
+ __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1465
+ __ j(not_equal, &done);
1466
+ if (Token::IsBitOp(op_)) {
1467
+ __ xor_(rax, rax);
1468
+ } else {
1469
+ __ LoadRoot(rax, Heap::kNanValueRootIndex);
1470
+ }
1471
+ __ bind(&done);
1472
+
1473
+ GenerateHeapNumberStub(masm);
1474
+ }
1475
+
1476
+
1431
1477
  void TypeRecordingBinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
1432
1478
  Label gc_required, not_number;
1433
1479
  GenerateFloatingPointCode(masm, &gc_required, &not_number);
@@ -1506,40 +1552,59 @@ void TypeRecordingBinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
1506
1552
 
1507
1553
 
1508
1554
  void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
1509
- // Input on stack:
1510
- // rsp[8]: argument (should be number).
1511
- // rsp[0]: return address.
1555
+ // TAGGED case:
1556
+ // Input:
1557
+ // rsp[8]: argument (should be number).
1558
+ // rsp[0]: return address.
1559
+ // Output:
1560
+ // rax: tagged double result.
1561
+ // UNTAGGED case:
1562
+ // Input::
1563
+ // rsp[0]: return address.
1564
+ // xmm1: untagged double input argument
1565
+ // Output:
1566
+ // xmm1: untagged double result.
1567
+
1512
1568
  Label runtime_call;
1513
1569
  Label runtime_call_clear_stack;
1514
- Label input_not_smi;
1515
- NearLabel loaded;
1516
- // Test that rax is a number.
1517
- __ movq(rax, Operand(rsp, kPointerSize));
1518
- __ JumpIfNotSmi(rax, &input_not_smi);
1519
- // Input is a smi. Untag and load it onto the FPU stack.
1520
- // Then load the bits of the double into rbx.
1521
- __ SmiToInteger32(rax, rax);
1522
- __ subq(rsp, Immediate(kPointerSize));
1523
- __ cvtlsi2sd(xmm1, rax);
1524
- __ movsd(Operand(rsp, 0), xmm1);
1525
- __ movq(rbx, xmm1);
1526
- __ movq(rdx, xmm1);
1527
- __ fld_d(Operand(rsp, 0));
1528
- __ addq(rsp, Immediate(kPointerSize));
1529
- __ jmp(&loaded);
1530
-
1531
- __ bind(&input_not_smi);
1532
- // Check if input is a HeapNumber.
1533
- __ Move(rbx, Factory::heap_number_map());
1534
- __ cmpq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
1535
- __ j(not_equal, &runtime_call);
1536
- // Input is a HeapNumber. Push it on the FPU stack and load its
1537
- // bits into rbx.
1538
- __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset));
1539
- __ movq(rbx, FieldOperand(rax, HeapNumber::kValueOffset));
1540
- __ movq(rdx, rbx);
1541
- __ bind(&loaded);
1542
- // ST[0] == double value
1570
+ Label skip_cache;
1571
+ const bool tagged = (argument_type_ == TAGGED);
1572
+ if (tagged) {
1573
+ NearLabel input_not_smi;
1574
+ NearLabel loaded;
1575
+ // Test that rax is a number.
1576
+ __ movq(rax, Operand(rsp, kPointerSize));
1577
+ __ JumpIfNotSmi(rax, &input_not_smi);
1578
+ // Input is a smi. Untag and load it onto the FPU stack.
1579
+ // Then load the bits of the double into rbx.
1580
+ __ SmiToInteger32(rax, rax);
1581
+ __ subq(rsp, Immediate(kDoubleSize));
1582
+ __ cvtlsi2sd(xmm1, rax);
1583
+ __ movsd(Operand(rsp, 0), xmm1);
1584
+ __ movq(rbx, xmm1);
1585
+ __ movq(rdx, xmm1);
1586
+ __ fld_d(Operand(rsp, 0));
1587
+ __ addq(rsp, Immediate(kDoubleSize));
1588
+ __ jmp(&loaded);
1589
+
1590
+ __ bind(&input_not_smi);
1591
+ // Check if input is a HeapNumber.
1592
+ __ LoadRoot(rbx, Heap::kHeapNumberMapRootIndex);
1593
+ __ cmpq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
1594
+ __ j(not_equal, &runtime_call);
1595
+ // Input is a HeapNumber. Push it on the FPU stack and load its
1596
+ // bits into rbx.
1597
+ __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset));
1598
+ __ movq(rbx, FieldOperand(rax, HeapNumber::kValueOffset));
1599
+ __ movq(rdx, rbx);
1600
+
1601
+ __ bind(&loaded);
1602
+ } else { // UNTAGGED.
1603
+ __ movq(rbx, xmm1);
1604
+ __ movq(rdx, xmm1);
1605
+ }
1606
+
1607
+ // ST[0] == double value, if TAGGED.
1543
1608
  // rbx = bits of double value.
1544
1609
  // rdx = also bits of double value.
1545
1610
  // Compute hash (h is 32 bits, bits are 64 and the shifts are arithmetic):
@@ -1559,23 +1624,26 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
1559
1624
  __ xorl(rcx, rdx);
1560
1625
  __ xorl(rax, rdi);
1561
1626
  __ xorl(rcx, rax);
1562
- ASSERT(IsPowerOf2(TranscendentalCache::kCacheSize));
1563
- __ andl(rcx, Immediate(TranscendentalCache::kCacheSize - 1));
1627
+ ASSERT(IsPowerOf2(TranscendentalCache::SubCache::kCacheSize));
1628
+ __ andl(rcx, Immediate(TranscendentalCache::SubCache::kCacheSize - 1));
1564
1629
 
1565
1630
  // ST[0] == double value.
1566
1631
  // rbx = bits of double value.
1567
1632
  // rcx = TranscendentalCache::hash(double value).
1568
- __ movq(rax, ExternalReference::transcendental_cache_array_address());
1569
- // rax points to cache array.
1570
- __ movq(rax, Operand(rax, type_ * sizeof(TranscendentalCache::caches_[0])));
1633
+ ExternalReference cache_array =
1634
+ ExternalReference::transcendental_cache_array_address(masm->isolate());
1635
+ __ movq(rax, cache_array);
1636
+ int cache_array_index =
1637
+ type_ * sizeof(Isolate::Current()->transcendental_cache()->caches_[0]);
1638
+ __ movq(rax, Operand(rax, cache_array_index));
1571
1639
  // rax points to the cache for the type type_.
1572
1640
  // If NULL, the cache hasn't been initialized yet, so go through runtime.
1573
1641
  __ testq(rax, rax);
1574
- __ j(zero, &runtime_call_clear_stack);
1642
+ __ j(zero, &runtime_call_clear_stack); // Only clears stack if TAGGED.
1575
1643
  #ifdef DEBUG
1576
1644
  // Check that the layout of cache elements match expectations.
1577
1645
  { // NOLINT - doesn't like a single brace on a line.
1578
- TranscendentalCache::Element test_elem[2];
1646
+ TranscendentalCache::SubCache::Element test_elem[2];
1579
1647
  char* elem_start = reinterpret_cast<char*>(&test_elem[0]);
1580
1648
  char* elem2_start = reinterpret_cast<char*>(&test_elem[1]);
1581
1649
  char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0]));
@@ -1597,30 +1665,71 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
1597
1665
  __ j(not_equal, &cache_miss);
1598
1666
  // Cache hit!
1599
1667
  __ movq(rax, Operand(rcx, 2 * kIntSize));
1600
- __ fstp(0); // Clear FPU stack.
1601
- __ ret(kPointerSize);
1668
+ if (tagged) {
1669
+ __ fstp(0); // Clear FPU stack.
1670
+ __ ret(kPointerSize);
1671
+ } else { // UNTAGGED.
1672
+ __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
1673
+ __ Ret();
1674
+ }
1602
1675
 
1603
1676
  __ bind(&cache_miss);
1604
1677
  // Update cache with new value.
1605
- Label nan_result;
1606
- GenerateOperation(masm, &nan_result);
1678
+ if (tagged) {
1607
1679
  __ AllocateHeapNumber(rax, rdi, &runtime_call_clear_stack);
1680
+ } else { // UNTAGGED.
1681
+ __ AllocateHeapNumber(rax, rdi, &skip_cache);
1682
+ __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm1);
1683
+ __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset));
1684
+ }
1685
+ GenerateOperation(masm);
1608
1686
  __ movq(Operand(rcx, 0), rbx);
1609
1687
  __ movq(Operand(rcx, 2 * kIntSize), rax);
1610
1688
  __ fstp_d(FieldOperand(rax, HeapNumber::kValueOffset));
1611
- __ ret(kPointerSize);
1612
-
1613
- __ bind(&runtime_call_clear_stack);
1614
- __ fstp(0);
1615
- __ bind(&runtime_call);
1616
- __ TailCallExternalReference(ExternalReference(RuntimeFunction()), 1, 1);
1689
+ if (tagged) {
1690
+ __ ret(kPointerSize);
1691
+ } else { // UNTAGGED.
1692
+ __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
1693
+ __ Ret();
1694
+
1695
+ // Skip cache and return answer directly, only in untagged case.
1696
+ __ bind(&skip_cache);
1697
+ __ subq(rsp, Immediate(kDoubleSize));
1698
+ __ movsd(Operand(rsp, 0), xmm1);
1699
+ __ fld_d(Operand(rsp, 0));
1700
+ GenerateOperation(masm);
1701
+ __ fstp_d(Operand(rsp, 0));
1702
+ __ movsd(xmm1, Operand(rsp, 0));
1703
+ __ addq(rsp, Immediate(kDoubleSize));
1704
+ // We return the value in xmm1 without adding it to the cache, but
1705
+ // we cause a scavenging GC so that future allocations will succeed.
1706
+ __ EnterInternalFrame();
1707
+ // Allocate an unused object bigger than a HeapNumber.
1708
+ __ Push(Smi::FromInt(2 * kDoubleSize));
1709
+ __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
1710
+ __ LeaveInternalFrame();
1711
+ __ Ret();
1712
+ }
1617
1713
 
1618
- __ bind(&nan_result);
1619
- __ fstp(0); // Remove argument from FPU stack.
1620
- __ LoadRoot(rax, Heap::kNanValueRootIndex);
1621
- __ movq(Operand(rcx, 0), rbx);
1622
- __ movq(Operand(rcx, 2 * kIntSize), rax);
1623
- __ ret(kPointerSize);
1714
+ // Call runtime, doing whatever allocation and cleanup is necessary.
1715
+ if (tagged) {
1716
+ __ bind(&runtime_call_clear_stack);
1717
+ __ fstp(0);
1718
+ __ bind(&runtime_call);
1719
+ __ TailCallExternalReference(
1720
+ ExternalReference(RuntimeFunction(), masm->isolate()), 1, 1);
1721
+ } else { // UNTAGGED.
1722
+ __ bind(&runtime_call_clear_stack);
1723
+ __ bind(&runtime_call);
1724
+ __ AllocateHeapNumber(rax, rdi, &skip_cache);
1725
+ __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm1);
1726
+ __ EnterInternalFrame();
1727
+ __ push(rax);
1728
+ __ CallRuntime(RuntimeFunction(), 1);
1729
+ __ LeaveInternalFrame();
1730
+ __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
1731
+ __ Ret();
1732
+ }
1624
1733
  }
1625
1734
 
1626
1735
 
@@ -1637,9 +1746,9 @@ Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
1637
1746
  }
1638
1747
 
1639
1748
 
1640
- void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm,
1641
- Label* on_nan_result) {
1749
+ void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
1642
1750
  // Registers:
1751
+ // rax: Newly allocated HeapNumber, which must be preserved.
1643
1752
  // rbx: Bits of input double. Must be preserved.
1644
1753
  // rcx: Pointer to cache entry. Must be preserved.
1645
1754
  // st(0): Input double
@@ -1661,9 +1770,18 @@ void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm,
1661
1770
  __ j(below, &in_range);
1662
1771
  // Check for infinity and NaN. Both return NaN for sin.
1663
1772
  __ cmpl(rdi, Immediate(0x7ff));
1664
- __ j(equal, on_nan_result);
1773
+ NearLabel non_nan_result;
1774
+ __ j(not_equal, &non_nan_result);
1775
+ // Input is +/-Infinity or NaN. Result is NaN.
1776
+ __ fstp(0);
1777
+ __ LoadRoot(kScratchRegister, Heap::kNanValueRootIndex);
1778
+ __ fld_d(FieldOperand(kScratchRegister, HeapNumber::kValueOffset));
1779
+ __ jmp(&done);
1780
+
1781
+ __ bind(&non_nan_result);
1665
1782
 
1666
1783
  // Use fpmod to restrict argument to the range +/-2*PI.
1784
+ __ movq(rdi, rax); // Save rax before using fnstsw_ax.
1667
1785
  __ fldpi();
1668
1786
  __ fadd(0);
1669
1787
  __ fld(1);
@@ -1696,6 +1814,7 @@ void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm,
1696
1814
  // FPU Stack: input % 2*pi, 2*pi,
1697
1815
  __ fstp(0);
1698
1816
  // FPU Stack: input % 2*pi
1817
+ __ movq(rax, rdi); // Restore rax, pointer to the new HeapNumber.
1699
1818
  __ bind(&in_range);
1700
1819
  switch (type_) {
1701
1820
  case TranscendentalCache::SIN:
@@ -1948,8 +2067,8 @@ void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
1948
2067
  __ AbortIfSmi(rax);
1949
2068
  }
1950
2069
 
1951
- __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
1952
- __ CompareRoot(rdx, Heap::kHeapNumberMapRootIndex);
2070
+ __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
2071
+ Heap::kHeapNumberMapRootIndex);
1953
2072
  __ j(not_equal, &slow);
1954
2073
  // Operand is a float, negate its value by flipping sign bit.
1955
2074
  __ movq(rdx, FieldOperand(rax, HeapNumber::kValueOffset));
@@ -1978,8 +2097,8 @@ void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
1978
2097
  }
1979
2098
 
1980
2099
  // Check if the operand is a heap number.
1981
- __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
1982
- __ CompareRoot(rdx, Heap::kHeapNumberMapRootIndex);
2100
+ __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
2101
+ Heap::kHeapNumberMapRootIndex);
1983
2102
  __ j(not_equal, &slow);
1984
2103
 
1985
2104
  // Convert the heap number in rax to an untagged integer in rcx.
@@ -2012,6 +2131,157 @@ void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
2012
2131
  }
2013
2132
 
2014
2133
 
2134
+ void MathPowStub::Generate(MacroAssembler* masm) {
2135
+ // Registers are used as follows:
2136
+ // rdx = base
2137
+ // rax = exponent
2138
+ // rcx = temporary, result
2139
+
2140
+ Label allocate_return, call_runtime;
2141
+
2142
+ // Load input parameters.
2143
+ __ movq(rdx, Operand(rsp, 2 * kPointerSize));
2144
+ __ movq(rax, Operand(rsp, 1 * kPointerSize));
2145
+
2146
+ // Save 1 in xmm3 - we need this several times later on.
2147
+ __ movl(rcx, Immediate(1));
2148
+ __ cvtlsi2sd(xmm3, rcx);
2149
+
2150
+ Label exponent_nonsmi;
2151
+ Label base_nonsmi;
2152
+ // If the exponent is a heap number go to that specific case.
2153
+ __ JumpIfNotSmi(rax, &exponent_nonsmi);
2154
+ __ JumpIfNotSmi(rdx, &base_nonsmi);
2155
+
2156
+ // Optimized version when both exponent and base are smis.
2157
+ Label powi;
2158
+ __ SmiToInteger32(rdx, rdx);
2159
+ __ cvtlsi2sd(xmm0, rdx);
2160
+ __ jmp(&powi);
2161
+ // Exponent is a smi and base is a heapnumber.
2162
+ __ bind(&base_nonsmi);
2163
+ __ CompareRoot(FieldOperand(rdx, HeapObject::kMapOffset),
2164
+ Heap::kHeapNumberMapRootIndex);
2165
+ __ j(not_equal, &call_runtime);
2166
+
2167
+ __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
2168
+
2169
+ // Optimized version of pow if exponent is a smi.
2170
+ // xmm0 contains the base.
2171
+ __ bind(&powi);
2172
+ __ SmiToInteger32(rax, rax);
2173
+
2174
+ // Save exponent in base as we need to check if exponent is negative later.
2175
+ // We know that base and exponent are in different registers.
2176
+ __ movq(rdx, rax);
2177
+
2178
+ // Get absolute value of exponent.
2179
+ NearLabel no_neg;
2180
+ __ cmpl(rax, Immediate(0));
2181
+ __ j(greater_equal, &no_neg);
2182
+ __ negl(rax);
2183
+ __ bind(&no_neg);
2184
+
2185
+ // Load xmm1 with 1.
2186
+ __ movsd(xmm1, xmm3);
2187
+ NearLabel while_true;
2188
+ NearLabel no_multiply;
2189
+
2190
+ __ bind(&while_true);
2191
+ __ shrl(rax, Immediate(1));
2192
+ __ j(not_carry, &no_multiply);
2193
+ __ mulsd(xmm1, xmm0);
2194
+ __ bind(&no_multiply);
2195
+ __ mulsd(xmm0, xmm0);
2196
+ __ j(not_zero, &while_true);
2197
+
2198
+ // Base has the original value of the exponent - if the exponent is
2199
+ // negative return 1/result.
2200
+ __ testl(rdx, rdx);
2201
+ __ j(positive, &allocate_return);
2202
+ // Special case if xmm1 has reached infinity.
2203
+ __ divsd(xmm3, xmm1);
2204
+ __ movsd(xmm1, xmm3);
2205
+ __ xorpd(xmm0, xmm0);
2206
+ __ ucomisd(xmm0, xmm1);
2207
+ __ j(equal, &call_runtime);
2208
+
2209
+ __ jmp(&allocate_return);
2210
+
2211
+ // Exponent (or both) is a heapnumber - no matter what we should now work
2212
+ // on doubles.
2213
+ __ bind(&exponent_nonsmi);
2214
+ __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
2215
+ Heap::kHeapNumberMapRootIndex);
2216
+ __ j(not_equal, &call_runtime);
2217
+ __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
2218
+ // Test if exponent is nan.
2219
+ __ ucomisd(xmm1, xmm1);
2220
+ __ j(parity_even, &call_runtime);
2221
+
2222
+ NearLabel base_not_smi;
2223
+ NearLabel handle_special_cases;
2224
+ __ JumpIfNotSmi(rdx, &base_not_smi);
2225
+ __ SmiToInteger32(rdx, rdx);
2226
+ __ cvtlsi2sd(xmm0, rdx);
2227
+ __ jmp(&handle_special_cases);
2228
+
2229
+ __ bind(&base_not_smi);
2230
+ __ CompareRoot(FieldOperand(rdx, HeapObject::kMapOffset),
2231
+ Heap::kHeapNumberMapRootIndex);
2232
+ __ j(not_equal, &call_runtime);
2233
+ __ movl(rcx, FieldOperand(rdx, HeapNumber::kExponentOffset));
2234
+ __ andl(rcx, Immediate(HeapNumber::kExponentMask));
2235
+ __ cmpl(rcx, Immediate(HeapNumber::kExponentMask));
2236
+ // base is NaN or +/-Infinity
2237
+ __ j(greater_equal, &call_runtime);
2238
+ __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
2239
+
2240
+ // base is in xmm0 and exponent is in xmm1.
2241
+ __ bind(&handle_special_cases);
2242
+ NearLabel not_minus_half;
2243
+ // Test for -0.5.
2244
+ // Load xmm2 with -0.5.
2245
+ __ movq(rcx, V8_UINT64_C(0xBFE0000000000000), RelocInfo::NONE);
2246
+ __ movq(xmm2, rcx);
2247
+ // xmm2 now has -0.5.
2248
+ __ ucomisd(xmm2, xmm1);
2249
+ __ j(not_equal, &not_minus_half);
2250
+
2251
+ // Calculates reciprocal of square root.
2252
+ // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
2253
+ __ xorpd(xmm1, xmm1);
2254
+ __ addsd(xmm1, xmm0);
2255
+ __ sqrtsd(xmm1, xmm1);
2256
+ __ divsd(xmm3, xmm1);
2257
+ __ movsd(xmm1, xmm3);
2258
+ __ jmp(&allocate_return);
2259
+
2260
+ // Test for 0.5.
2261
+ __ bind(&not_minus_half);
2262
+ // Load xmm2 with 0.5.
2263
+ // Since xmm3 is 1 and xmm2 is -0.5 this is simply xmm2 + xmm3.
2264
+ __ addsd(xmm2, xmm3);
2265
+ // xmm2 now has 0.5.
2266
+ __ ucomisd(xmm2, xmm1);
2267
+ __ j(not_equal, &call_runtime);
2268
+ // Calculates square root.
2269
+ // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
2270
+ __ xorpd(xmm1, xmm1);
2271
+ __ addsd(xmm1, xmm0);
2272
+ __ sqrtsd(xmm1, xmm1);
2273
+
2274
+ __ bind(&allocate_return);
2275
+ __ AllocateHeapNumber(rcx, rax, &call_runtime);
2276
+ __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm1);
2277
+ __ movq(rax, rcx);
2278
+ __ ret(2 * kPointerSize);
2279
+
2280
+ __ bind(&call_runtime);
2281
+ __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
2282
+ }
2283
+
2284
+
2015
2285
  void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
2016
2286
  // The key is in rdx and the parameter count is in rax.
2017
2287
 
@@ -2024,11 +2294,14 @@ void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
2024
2294
  Label slow;
2025
2295
  __ JumpIfNotSmi(rdx, &slow);
2026
2296
 
2027
- // Check if the calling frame is an arguments adaptor frame.
2297
+ // Check if the calling frame is an arguments adaptor frame. We look at the
2298
+ // context offset, and if the frame is not a regular one, then we find a
2299
+ // Smi instead of the context. We can't use SmiCompare here, because that
2300
+ // only works for comparing two smis.
2028
2301
  Label adaptor;
2029
2302
  __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2030
- __ SmiCompare(Operand(rbx, StandardFrameConstants::kContextOffset),
2031
- Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2303
+ __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
2304
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2032
2305
  __ j(equal, &adaptor);
2033
2306
 
2034
2307
  // Check index against formal parameters count limit passed in
@@ -2083,8 +2356,8 @@ void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
2083
2356
  // Check if the calling frame is an arguments adaptor frame.
2084
2357
  Label adaptor_frame, try_allocate, runtime;
2085
2358
  __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2086
- __ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset),
2087
- Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2359
+ __ Cmp(Operand(rdx, StandardFrameConstants::kContextOffset),
2360
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2088
2361
  __ j(equal, &adaptor_frame);
2089
2362
 
2090
2363
  // Get the length from the frame.
@@ -2111,16 +2384,16 @@ void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
2111
2384
  __ j(zero, &add_arguments_object);
2112
2385
  __ leal(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize));
2113
2386
  __ bind(&add_arguments_object);
2114
- __ addl(rcx, Immediate(Heap::kArgumentsObjectSize));
2387
+ __ addl(rcx, Immediate(GetArgumentsObjectSize()));
2115
2388
 
2116
2389
  // Do the allocation of both objects in one go.
2117
2390
  __ AllocateInNewSpace(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT);
2118
2391
 
2119
2392
  // Get the arguments boilerplate from the current (global) context.
2120
- int offset = Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX);
2121
2393
  __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2122
2394
  __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset));
2123
- __ movq(rdi, Operand(rdi, offset));
2395
+ __ movq(rdi, Operand(rdi,
2396
+ Context::SlotOffset(GetArgumentsBoilerplateIndex())));
2124
2397
 
2125
2398
  // Copy the JS object part.
2126
2399
  STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
@@ -2131,15 +2404,21 @@ void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
2131
2404
  __ movq(FieldOperand(rax, 1 * kPointerSize), rdx);
2132
2405
  __ movq(FieldOperand(rax, 2 * kPointerSize), rbx);
2133
2406
 
2134
- // Setup the callee in-object property.
2135
- ASSERT(Heap::arguments_callee_index == 0);
2136
- __ movq(kScratchRegister, Operand(rsp, 3 * kPointerSize));
2137
- __ movq(FieldOperand(rax, JSObject::kHeaderSize), kScratchRegister);
2407
+ if (type_ == NEW_NON_STRICT) {
2408
+ // Setup the callee in-object property.
2409
+ ASSERT(Heap::kArgumentsCalleeIndex == 1);
2410
+ __ movq(kScratchRegister, Operand(rsp, 3 * kPointerSize));
2411
+ __ movq(FieldOperand(rax, JSObject::kHeaderSize +
2412
+ Heap::kArgumentsCalleeIndex * kPointerSize),
2413
+ kScratchRegister);
2414
+ }
2138
2415
 
2139
2416
  // Get the length (smi tagged) and set that as an in-object property too.
2140
- ASSERT(Heap::arguments_length_index == 1);
2417
+ ASSERT(Heap::kArgumentsLengthIndex == 0);
2141
2418
  __ movq(rcx, Operand(rsp, 1 * kPointerSize));
2142
- __ movq(FieldOperand(rax, JSObject::kHeaderSize + kPointerSize), rcx);
2419
+ __ movq(FieldOperand(rax, JSObject::kHeaderSize +
2420
+ Heap::kArgumentsLengthIndex * kPointerSize),
2421
+ rcx);
2143
2422
 
2144
2423
  // If there are no actual arguments, we're done.
2145
2424
  Label done;
@@ -2151,7 +2430,7 @@ void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
2151
2430
 
2152
2431
  // Setup the elements pointer in the allocated arguments object and
2153
2432
  // initialize the header in the elements fixed array.
2154
- __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSize));
2433
+ __ lea(rdi, Operand(rax, GetArgumentsObjectSize()));
2155
2434
  __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi);
2156
2435
  __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
2157
2436
  __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
@@ -2203,14 +2482,13 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
2203
2482
  static const int kJSRegExpOffset = 4 * kPointerSize;
2204
2483
 
2205
2484
  Label runtime;
2206
-
2207
2485
  // Ensure that a RegExp stack is allocated.
2486
+ Isolate* isolate = masm->isolate();
2208
2487
  ExternalReference address_of_regexp_stack_memory_address =
2209
- ExternalReference::address_of_regexp_stack_memory_address();
2488
+ ExternalReference::address_of_regexp_stack_memory_address(isolate);
2210
2489
  ExternalReference address_of_regexp_stack_memory_size =
2211
- ExternalReference::address_of_regexp_stack_memory_size();
2212
- __ movq(kScratchRegister, address_of_regexp_stack_memory_size);
2213
- __ movq(kScratchRegister, Operand(kScratchRegister, 0));
2490
+ ExternalReference::address_of_regexp_stack_memory_size(isolate);
2491
+ __ Load(kScratchRegister, address_of_regexp_stack_memory_size);
2214
2492
  __ testq(kScratchRegister, kScratchRegister);
2215
2493
  __ j(zero, &runtime);
2216
2494
 
@@ -2221,32 +2499,32 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
2221
2499
  __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister);
2222
2500
  __ j(not_equal, &runtime);
2223
2501
  // Check that the RegExp has been compiled (data contains a fixed array).
2224
- __ movq(rcx, FieldOperand(rax, JSRegExp::kDataOffset));
2502
+ __ movq(rax, FieldOperand(rax, JSRegExp::kDataOffset));
2225
2503
  if (FLAG_debug_code) {
2226
- Condition is_smi = masm->CheckSmi(rcx);
2504
+ Condition is_smi = masm->CheckSmi(rax);
2227
2505
  __ Check(NegateCondition(is_smi),
2228
2506
  "Unexpected type for RegExp data, FixedArray expected");
2229
- __ CmpObjectType(rcx, FIXED_ARRAY_TYPE, kScratchRegister);
2507
+ __ CmpObjectType(rax, FIXED_ARRAY_TYPE, kScratchRegister);
2230
2508
  __ Check(equal, "Unexpected type for RegExp data, FixedArray expected");
2231
2509
  }
2232
2510
 
2233
- // rcx: RegExp data (FixedArray)
2511
+ // rax: RegExp data (FixedArray)
2234
2512
  // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
2235
- __ SmiToInteger32(rbx, FieldOperand(rcx, JSRegExp::kDataTagOffset));
2513
+ __ SmiToInteger32(rbx, FieldOperand(rax, JSRegExp::kDataTagOffset));
2236
2514
  __ cmpl(rbx, Immediate(JSRegExp::IRREGEXP));
2237
2515
  __ j(not_equal, &runtime);
2238
2516
 
2239
- // rcx: RegExp data (FixedArray)
2517
+ // rax: RegExp data (FixedArray)
2240
2518
  // Check that the number of captures fit in the static offsets vector buffer.
2241
2519
  __ SmiToInteger32(rdx,
2242
- FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset));
2520
+ FieldOperand(rax, JSRegExp::kIrregexpCaptureCountOffset));
2243
2521
  // Calculate number of capture registers (number_of_captures + 1) * 2.
2244
2522
  __ leal(rdx, Operand(rdx, rdx, times_1, 2));
2245
2523
  // Check that the static offsets vector buffer is large enough.
2246
2524
  __ cmpl(rdx, Immediate(OffsetsVector::kStaticOffsetsVectorSize));
2247
2525
  __ j(above, &runtime);
2248
2526
 
2249
- // rcx: RegExp data (FixedArray)
2527
+ // rax: RegExp data (FixedArray)
2250
2528
  // rdx: Number of capture registers
2251
2529
  // Check that the second argument is a string.
2252
2530
  __ movq(rdi, Operand(rsp, kSubjectOffset));
@@ -2274,7 +2552,8 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
2274
2552
  // Check that the JSArray is in fast case.
2275
2553
  __ movq(rbx, FieldOperand(rdi, JSArray::kElementsOffset));
2276
2554
  __ movq(rdi, FieldOperand(rbx, HeapObject::kMapOffset));
2277
- __ Cmp(rdi, Factory::fixed_array_map());
2555
+ __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
2556
+ Heap::kFixedArrayMapRootIndex);
2278
2557
  __ j(not_equal, &runtime);
2279
2558
  // Check that the last match info has space for the capture registers and the
2280
2559
  // additional information. Ensure no overflow in add.
@@ -2309,8 +2588,8 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
2309
2588
  __ testb(rbx, Immediate(kIsNotStringMask | kExternalStringTag));
2310
2589
  __ j(not_zero, &runtime);
2311
2590
  // String is a cons string.
2312
- __ movq(rdx, FieldOperand(rdi, ConsString::kSecondOffset));
2313
- __ Cmp(rdx, Factory::empty_string());
2591
+ __ CompareRoot(FieldOperand(rdi, ConsString::kSecondOffset),
2592
+ Heap::kEmptyStringRootIndex);
2314
2593
  __ j(not_equal, &runtime);
2315
2594
  __ movq(rdi, FieldOperand(rdi, ConsString::kFirstOffset));
2316
2595
  __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
@@ -2359,15 +2638,24 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
2359
2638
  // rcx: encoding of subject string (1 if ascii 0 if two_byte);
2360
2639
  // r11: code
2361
2640
  // All checks done. Now push arguments for native regexp code.
2362
- __ IncrementCounter(&Counters::regexp_entry_native, 1);
2641
+ Counters* counters = masm->isolate()->counters();
2642
+ __ IncrementCounter(counters->regexp_entry_native(), 1);
2363
2643
 
2364
- static const int kRegExpExecuteArguments = 7;
2644
+ // Isolates: note we add an additional parameter here (isolate pointer).
2645
+ static const int kRegExpExecuteArguments = 8;
2365
2646
  int argument_slots_on_stack =
2366
2647
  masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments);
2367
- __ EnterApiExitFrame(argument_slots_on_stack); // Clobbers rax!
2648
+ __ EnterApiExitFrame(argument_slots_on_stack);
2368
2649
 
2369
- // Argument 7: Indicate that this is a direct call from JavaScript.
2650
+ // Argument 8: Pass current isolate address.
2651
+ // __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kPointerSize),
2652
+ // Immediate(ExternalReference::isolate_address()));
2653
+ __ LoadAddress(kScratchRegister, ExternalReference::isolate_address());
2370
2654
  __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kPointerSize),
2655
+ kScratchRegister);
2656
+
2657
+ // Argument 7: Indicate that this is a direct call from JavaScript.
2658
+ __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kPointerSize),
2371
2659
  Immediate(1));
2372
2660
 
2373
2661
  // Argument 6: Start (high end) of backtracking stack memory area.
@@ -2377,14 +2665,15 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
2377
2665
  __ addq(r9, Operand(kScratchRegister, 0));
2378
2666
  // Argument 6 passed in r9 on Linux and on the stack on Windows.
2379
2667
  #ifdef _WIN64
2380
- __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kPointerSize), r9);
2668
+ __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kPointerSize), r9);
2381
2669
  #endif
2382
2670
 
2383
2671
  // Argument 5: static offsets vector buffer.
2384
- __ movq(r8, ExternalReference::address_of_static_offsets_vector());
2672
+ __ LoadAddress(r8,
2673
+ ExternalReference::address_of_static_offsets_vector(isolate));
2385
2674
  // Argument 5 passed in r8 on Linux and on the stack on Windows.
2386
2675
  #ifdef _WIN64
2387
- __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kPointerSize), r8);
2676
+ __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kPointerSize), r8);
2388
2677
  #endif
2389
2678
 
2390
2679
  // First four arguments are passed in registers on both Linux and Windows.
@@ -2425,7 +2714,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
2425
2714
  __ movq(arg2, rbx);
2426
2715
 
2427
2716
  // Argument 1: Subject string.
2428
- #ifdef WIN64_
2717
+ #ifdef _WIN64
2429
2718
  __ movq(arg1, rdi);
2430
2719
  #else
2431
2720
  // Already there in AMD64 calling convention.
@@ -2485,7 +2774,8 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
2485
2774
  __ RecordWrite(rcx, RegExpImpl::kLastInputOffset, rax, rdi);
2486
2775
 
2487
2776
  // Get the static offsets vector filled by the native regexp code.
2488
- __ movq(rcx, ExternalReference::address_of_static_offsets_vector());
2777
+ __ LoadAddress(rcx,
2778
+ ExternalReference::address_of_static_offsets_vector(isolate));
2489
2779
 
2490
2780
  // rbx: last_match_info backing store (FixedArray)
2491
2781
  // rcx: offsets vector
@@ -2517,13 +2807,15 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
2517
2807
  // stack overflow (on the backtrack stack) was detected in RegExp code but
2518
2808
  // haven't created the exception yet. Handle that in the runtime system.
2519
2809
  // TODO(592): Rerunning the RegExp to get the stack overflow exception.
2520
- ExternalReference pending_exception_address(Top::k_pending_exception_address);
2521
- __ movq(rbx, pending_exception_address);
2522
- __ movq(rax, Operand(rbx, 0));
2810
+ ExternalReference pending_exception_address(
2811
+ Isolate::k_pending_exception_address, isolate);
2812
+ Operand pending_exception_operand =
2813
+ masm->ExternalOperand(pending_exception_address, rbx);
2814
+ __ movq(rax, pending_exception_operand);
2523
2815
  __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
2524
2816
  __ cmpq(rax, rdx);
2525
2817
  __ j(equal, &runtime);
2526
- __ movq(Operand(rbx, 0), rdx);
2818
+ __ movq(pending_exception_operand, rdx);
2527
2819
 
2528
2820
  __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex);
2529
2821
  NearLabel termination_exception;
@@ -2574,8 +2866,8 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
2574
2866
  __ movq(FieldOperand(rax, HeapObject::kMapOffset), rdx);
2575
2867
 
2576
2868
  // Set empty properties FixedArray.
2577
- __ Move(FieldOperand(rax, JSObject::kPropertiesOffset),
2578
- Factory::empty_fixed_array());
2869
+ __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex);
2870
+ __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister);
2579
2871
 
2580
2872
  // Set elements to point to FixedArray allocated right after the JSArray.
2581
2873
  __ lea(rcx, Operand(rax, JSRegExpResult::kSize));
@@ -2595,13 +2887,13 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
2595
2887
  // rbx: Number of elements in array as int32.
2596
2888
 
2597
2889
  // Set map.
2598
- __ Move(FieldOperand(rcx, HeapObject::kMapOffset),
2599
- Factory::fixed_array_map());
2890
+ __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
2891
+ __ movq(FieldOperand(rcx, HeapObject::kMapOffset), kScratchRegister);
2600
2892
  // Set length.
2601
2893
  __ Integer32ToSmi(rdx, rbx);
2602
2894
  __ movq(FieldOperand(rcx, FixedArray::kLengthOffset), rdx);
2603
2895
  // Fill contents of fixed-array with the-hole.
2604
- __ Move(rdx, Factory::the_hole_value());
2896
+ __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
2605
2897
  __ lea(rcx, FieldOperand(rcx, FixedArray::kHeaderSize));
2606
2898
  // Fill fixed array elements with hole.
2607
2899
  // rax: JSArray.
@@ -2654,7 +2946,7 @@ void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
2654
2946
  Label load_result_from_cache;
2655
2947
  if (!object_is_smi) {
2656
2948
  __ JumpIfSmi(object, &is_smi);
2657
- __ CheckMap(object, Factory::heap_number_map(), not_found, true);
2949
+ __ CheckMap(object, FACTORY->heap_number_map(), not_found, true);
2658
2950
 
2659
2951
  STATIC_ASSERT(8 == kDoubleSize);
2660
2952
  __ movl(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
@@ -2699,7 +2991,8 @@ void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
2699
2991
  index,
2700
2992
  times_1,
2701
2993
  FixedArray::kHeaderSize + kPointerSize));
2702
- __ IncrementCounter(&Counters::number_to_string_native, 1);
2994
+ Counters* counters = masm->isolate()->counters();
2995
+ __ IncrementCounter(counters->number_to_string_native(), 1);
2703
2996
  }
2704
2997
 
2705
2998
 
@@ -2784,7 +3077,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
2784
3077
  __ bind(&check_for_nan);
2785
3078
  }
2786
3079
 
2787
- // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
3080
+ // Test for NaN. Sadly, we can't just compare to FACTORY->nan_value(),
2788
3081
  // so we do the second best thing - test it ourselves.
2789
3082
  // Note: if cc_ != equal, never_nan_nan_ is not used.
2790
3083
  // We cannot set rax to EQUAL until just before return because
@@ -2797,7 +3090,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
2797
3090
  NearLabel heap_number;
2798
3091
  // If it's not a heap number, then return equal for (in)equality operator.
2799
3092
  __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2800
- Factory::heap_number_map());
3093
+ FACTORY->heap_number_map());
2801
3094
  __ j(equal, &heap_number);
2802
3095
  if (cc_ != equal) {
2803
3096
  // Call runtime on identical JSObjects. Otherwise return equal.
@@ -2842,7 +3135,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
2842
3135
 
2843
3136
  // Check if the non-smi operand is a heap number.
2844
3137
  __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
2845
- Factory::heap_number_map());
3138
+ FACTORY->heap_number_map());
2846
3139
  // If heap number, handle it in the slow case.
2847
3140
  __ j(equal, &slow);
2848
3141
  // Return non-equal. ebx (the lower half of rbx) is not zero.
@@ -3075,11 +3368,17 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
3075
3368
  __ Set(rax, argc_);
3076
3369
  __ Set(rbx, 0);
3077
3370
  __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
3078
- Handle<Code> adaptor(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
3371
+ Handle<Code> adaptor =
3372
+ Isolate::Current()->builtins()->ArgumentsAdaptorTrampoline();
3079
3373
  __ Jump(adaptor, RelocInfo::CODE_TARGET);
3080
3374
  }
3081
3375
 
3082
3376
 
3377
+ bool CEntryStub::NeedsImmovableCode() {
3378
+ return false;
3379
+ }
3380
+
3381
+
3083
3382
  void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
3084
3383
  // Throw exception in eax.
3085
3384
  __ Throw(rax);
@@ -3097,7 +3396,7 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
3097
3396
  // rbp: frame pointer (restored after C call).
3098
3397
  // rsp: stack pointer (restored after C call).
3099
3398
  // r14: number of arguments including receiver (C callee-saved).
3100
- // r12: pointer to the first argument (C callee-saved).
3399
+ // r15: pointer to the first argument (C callee-saved).
3101
3400
  // This pointer is reused in LeaveExitFrame(), so it is stored in a
3102
3401
  // callee-saved register.
3103
3402
 
@@ -3127,10 +3426,10 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
3127
3426
  }
3128
3427
 
3129
3428
  ExternalReference scope_depth =
3130
- ExternalReference::heap_always_allocate_scope_depth();
3429
+ ExternalReference::heap_always_allocate_scope_depth(masm->isolate());
3131
3430
  if (always_allocate_scope) {
3132
- __ movq(kScratchRegister, scope_depth);
3133
- __ incl(Operand(kScratchRegister, 0));
3431
+ Operand scope_depth_operand = masm->ExternalOperand(scope_depth);
3432
+ __ incl(scope_depth_operand);
3134
3433
  }
3135
3434
 
3136
3435
  // Call C function.
@@ -3138,30 +3437,33 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
3138
3437
  // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9
3139
3438
  // Store Arguments object on stack, below the 4 WIN64 ABI parameter slots.
3140
3439
  __ movq(StackSpaceOperand(0), r14); // argc.
3141
- __ movq(StackSpaceOperand(1), r12); // argv.
3440
+ __ movq(StackSpaceOperand(1), r15); // argv.
3142
3441
  if (result_size_ < 2) {
3143
3442
  // Pass a pointer to the Arguments object as the first argument.
3144
3443
  // Return result in single register (rax).
3145
3444
  __ lea(rcx, StackSpaceOperand(0));
3445
+ __ LoadAddress(rdx, ExternalReference::isolate_address());
3146
3446
  } else {
3147
3447
  ASSERT_EQ(2, result_size_);
3148
3448
  // Pass a pointer to the result location as the first argument.
3149
3449
  __ lea(rcx, StackSpaceOperand(2));
3150
3450
  // Pass a pointer to the Arguments object as the second argument.
3151
3451
  __ lea(rdx, StackSpaceOperand(0));
3452
+ __ LoadAddress(r8, ExternalReference::isolate_address());
3152
3453
  }
3153
3454
 
3154
3455
  #else // _WIN64
3155
3456
  // GCC passes arguments in rdi, rsi, rdx, rcx, r8, r9.
3156
3457
  __ movq(rdi, r14); // argc.
3157
- __ movq(rsi, r12); // argv.
3458
+ __ movq(rsi, r15); // argv.
3459
+ __ movq(rdx, ExternalReference::isolate_address());
3158
3460
  #endif
3159
3461
  __ call(rbx);
3160
3462
  // Result is in rax - do not destroy this register!
3161
3463
 
3162
3464
  if (always_allocate_scope) {
3163
- __ movq(kScratchRegister, scope_depth);
3164
- __ decl(Operand(kScratchRegister, 0));
3465
+ Operand scope_depth_operand = masm->ExternalOperand(scope_depth);
3466
+ __ decl(scope_depth_operand);
3165
3467
  }
3166
3468
 
3167
3469
  // Check for failure result.
@@ -3202,12 +3504,13 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
3202
3504
  __ j(equal, throw_out_of_memory_exception);
3203
3505
 
3204
3506
  // Retrieve the pending exception and clear the variable.
3205
- ExternalReference pending_exception_address(Top::k_pending_exception_address);
3206
- __ movq(kScratchRegister, pending_exception_address);
3207
- __ movq(rax, Operand(kScratchRegister, 0));
3208
- __ movq(rdx, ExternalReference::the_hole_value_location());
3209
- __ movq(rdx, Operand(rdx, 0));
3210
- __ movq(Operand(kScratchRegister, 0), rdx);
3507
+ ExternalReference pending_exception_address(
3508
+ Isolate::k_pending_exception_address, masm->isolate());
3509
+ Operand pending_exception_operand =
3510
+ masm->ExternalOperand(pending_exception_address);
3511
+ __ movq(rax, pending_exception_operand);
3512
+ __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
3513
+ __ movq(pending_exception_operand, rdx);
3211
3514
 
3212
3515
  // Special handling of termination exceptions which are uncatchable
3213
3516
  // by javascript code.
@@ -3258,7 +3561,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
3258
3561
  // rbp: frame pointer of exit frame (restored after C call).
3259
3562
  // rsp: stack pointer (restored after C call).
3260
3563
  // r14: number of arguments including receiver (C callee-saved).
3261
- // r12: argv pointer (C callee-saved).
3564
+ // r15: argv pointer (C callee-saved).
3262
3565
 
3263
3566
  Label throw_normal_exception;
3264
3567
  Label throw_termination_exception;
@@ -3306,53 +3609,58 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
3306
3609
  #ifdef ENABLE_LOGGING_AND_PROFILING
3307
3610
  Label not_outermost_js, not_outermost_js_2;
3308
3611
  #endif
3309
-
3310
- // Setup frame.
3311
- __ push(rbp);
3312
- __ movq(rbp, rsp);
3313
-
3314
- // Push the stack frame type marker twice.
3315
- int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
3316
- // Scratch register is neither callee-save, nor an argument register on any
3317
- // platform. It's free to use at this point.
3318
- // Cannot use smi-register for loading yet.
3319
- __ movq(kScratchRegister,
3320
- reinterpret_cast<uint64_t>(Smi::FromInt(marker)),
3321
- RelocInfo::NONE);
3322
- __ push(kScratchRegister); // context slot
3323
- __ push(kScratchRegister); // function slot
3324
- // Save callee-saved registers (X64/Win64 calling conventions).
3325
- __ push(r12);
3326
- __ push(r13);
3327
- __ push(r14);
3328
- __ push(r15);
3612
+ { // NOLINT. Scope block confuses linter.
3613
+ MacroAssembler::NoRootArrayScope uninitialized_root_register(masm);
3614
+ // Setup frame.
3615
+ __ push(rbp);
3616
+ __ movq(rbp, rsp);
3617
+
3618
+ // Push the stack frame type marker twice.
3619
+ int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
3620
+ // Scratch register is neither callee-save, nor an argument register on any
3621
+ // platform. It's free to use at this point.
3622
+ // Cannot use smi-register for loading yet.
3623
+ __ movq(kScratchRegister,
3624
+ reinterpret_cast<uint64_t>(Smi::FromInt(marker)),
3625
+ RelocInfo::NONE);
3626
+ __ push(kScratchRegister); // context slot
3627
+ __ push(kScratchRegister); // function slot
3628
+ // Save callee-saved registers (X64/Win64 calling conventions).
3629
+ __ push(r12);
3630
+ __ push(r13);
3631
+ __ push(r14);
3632
+ __ push(r15);
3329
3633
  #ifdef _WIN64
3330
- __ push(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
3331
- __ push(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
3634
+ __ push(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
3635
+ __ push(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
3332
3636
  #endif
3333
- __ push(rbx);
3334
- // TODO(X64): On Win64, if we ever use XMM6-XMM15, the low low 64 bits are
3335
- // callee save as well.
3637
+ __ push(rbx);
3638
+ // TODO(X64): On Win64, if we ever use XMM6-XMM15, the low low 64 bits are
3639
+ // callee save as well.
3640
+
3641
+ // Set up the roots and smi constant registers.
3642
+ // Needs to be done before any further smi loads.
3643
+ __ InitializeSmiConstantRegister();
3644
+ __ InitializeRootRegister();
3645
+ }
3336
3646
 
3337
- // Save copies of the top frame descriptor on the stack.
3338
- ExternalReference c_entry_fp(Top::k_c_entry_fp_address);
3339
- __ load_rax(c_entry_fp);
3340
- __ push(rax);
3647
+ Isolate* isolate = masm->isolate();
3341
3648
 
3342
- // Set up the roots and smi constant registers.
3343
- // Needs to be done before any further smi loads.
3344
- ExternalReference roots_address = ExternalReference::roots_address();
3345
- __ movq(kRootRegister, roots_address);
3346
- __ InitializeSmiConstantRegister();
3649
+ // Save copies of the top frame descriptor on the stack.
3650
+ ExternalReference c_entry_fp(Isolate::k_c_entry_fp_address, isolate);
3651
+ {
3652
+ Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
3653
+ __ push(c_entry_fp_operand);
3654
+ }
3347
3655
 
3348
3656
  #ifdef ENABLE_LOGGING_AND_PROFILING
3349
3657
  // If this is the outermost JS call, set js_entry_sp value.
3350
- ExternalReference js_entry_sp(Top::k_js_entry_sp_address);
3351
- __ load_rax(js_entry_sp);
3658
+ ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address, isolate);
3659
+ __ Load(rax, js_entry_sp);
3352
3660
  __ testq(rax, rax);
3353
3661
  __ j(not_zero, &not_outermost_js);
3354
3662
  __ movq(rax, rbp);
3355
- __ store_rax(js_entry_sp);
3663
+ __ Store(js_entry_sp, rax);
3356
3664
  __ bind(&not_outermost_js);
3357
3665
  #endif
3358
3666
 
@@ -3361,8 +3669,9 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
3361
3669
 
3362
3670
  // Caught exception: Store result (exception) in the pending
3363
3671
  // exception field in the JSEnv and return a failure sentinel.
3364
- ExternalReference pending_exception(Top::k_pending_exception_address);
3365
- __ store_rax(pending_exception);
3672
+ ExternalReference pending_exception(Isolate::k_pending_exception_address,
3673
+ isolate);
3674
+ __ Store(pending_exception, rax);
3366
3675
  __ movq(rax, Failure::Exception(), RelocInfo::NONE);
3367
3676
  __ jmp(&exit);
3368
3677
 
@@ -3371,8 +3680,8 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
3371
3680
  __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
3372
3681
 
3373
3682
  // Clear any pending exceptions.
3374
- __ load_rax(ExternalReference::the_hole_value_location());
3375
- __ store_rax(pending_exception);
3683
+ __ LoadRoot(rax, Heap::kTheHoleValueRootIndex);
3684
+ __ Store(pending_exception, rax);
3376
3685
 
3377
3686
  // Fake a receiver (NULL).
3378
3687
  __ push(Immediate(0)); // receiver
@@ -3383,18 +3692,21 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
3383
3692
  // directly in the code, because the builtin stubs may not have been
3384
3693
  // generated yet at the time this code is generated.
3385
3694
  if (is_construct) {
3386
- ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline);
3387
- __ load_rax(construct_entry);
3695
+ ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
3696
+ isolate);
3697
+ __ Load(rax, construct_entry);
3388
3698
  } else {
3389
- ExternalReference entry(Builtins::JSEntryTrampoline);
3390
- __ load_rax(entry);
3699
+ ExternalReference entry(Builtins::kJSEntryTrampoline, isolate);
3700
+ __ Load(rax, entry);
3391
3701
  }
3392
3702
  __ lea(kScratchRegister, FieldOperand(rax, Code::kHeaderSize));
3393
3703
  __ call(kScratchRegister);
3394
3704
 
3395
3705
  // Unlink this frame from the handler chain.
3396
- __ movq(kScratchRegister, ExternalReference(Top::k_handler_address));
3397
- __ pop(Operand(kScratchRegister, 0));
3706
+ Operand handler_operand =
3707
+ masm->ExternalOperand(ExternalReference(Isolate::k_handler_address,
3708
+ isolate));
3709
+ __ pop(handler_operand);
3398
3710
  // Pop next_sp.
3399
3711
  __ addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
3400
3712
 
@@ -3410,8 +3722,10 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
3410
3722
 
3411
3723
  // Restore the top frame descriptor from the stack.
3412
3724
  __ bind(&exit);
3413
- __ movq(kScratchRegister, ExternalReference(Top::k_c_entry_fp_address));
3414
- __ pop(Operand(kScratchRegister, 0));
3725
+ {
3726
+ Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
3727
+ __ pop(c_entry_fp_operand);
3728
+ }
3415
3729
 
3416
3730
  // Restore callee-saved registers (X64 conventions).
3417
3731
  __ pop(rbx);
@@ -3434,20 +3748,39 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
3434
3748
 
3435
3749
  void InstanceofStub::Generate(MacroAssembler* masm) {
3436
3750
  // Implements "value instanceof function" operator.
3437
- // Expected input state:
3751
+ // Expected input state with no inline cache:
3438
3752
  // rsp[0] : return address
3439
3753
  // rsp[1] : function pointer
3440
3754
  // rsp[2] : value
3755
+ // Expected input state with an inline one-element cache:
3756
+ // rsp[0] : return address
3757
+ // rsp[1] : offset from return address to location of inline cache
3758
+ // rsp[2] : function pointer
3759
+ // rsp[3] : value
3441
3760
  // Returns a bitwise zero to indicate that the value
3442
3761
  // is and instance of the function and anything else to
3443
3762
  // indicate that the value is not an instance.
3444
3763
 
3445
- // None of the flags are supported on X64.
3446
- ASSERT(flags_ == kNoFlags);
3764
+ static const int kOffsetToMapCheckValue = 5;
3765
+ static const int kOffsetToResultValue = 21;
3766
+ // The last 4 bytes of the instruction sequence
3767
+ // movq(rax, FieldOperand(rdi, HeapObject::kMapOffset)
3768
+ // Move(kScratchRegister, FACTORY->the_hole_value())
3769
+ // in front of the hole value address.
3770
+ static const unsigned int kWordBeforeMapCheckValue = 0xBA49FF78;
3771
+ // The last 4 bytes of the instruction sequence
3772
+ // __ j(not_equal, &cache_miss);
3773
+ // __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
3774
+ // before the offset of the hole value in the root array.
3775
+ static const unsigned int kWordBeforeResultValue = 0x458B4909;
3776
+ // Only the inline check flag is supported on X64.
3777
+ ASSERT(flags_ == kNoFlags || HasCallSiteInlineCheck());
3778
+ int extra_stack_space = HasCallSiteInlineCheck() ? kPointerSize : 0;
3447
3779
 
3448
3780
  // Get the object - go slow case if it's a smi.
3449
3781
  Label slow;
3450
- __ movq(rax, Operand(rsp, 2 * kPointerSize));
3782
+
3783
+ __ movq(rax, Operand(rsp, 2 * kPointerSize + extra_stack_space));
3451
3784
  __ JumpIfSmi(rax, &slow);
3452
3785
 
3453
3786
  // Check that the left hand is a JS object. Leave its map in rax.
@@ -3457,19 +3790,23 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
3457
3790
  __ j(above, &slow);
3458
3791
 
3459
3792
  // Get the prototype of the function.
3460
- __ movq(rdx, Operand(rsp, 1 * kPointerSize));
3793
+ __ movq(rdx, Operand(rsp, 1 * kPointerSize + extra_stack_space));
3461
3794
  // rdx is function, rax is map.
3462
3795
 
3463
- // Look up the function and the map in the instanceof cache.
3464
- NearLabel miss;
3465
- __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
3466
- __ j(not_equal, &miss);
3467
- __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex);
3468
- __ j(not_equal, &miss);
3469
- __ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
3470
- __ ret(2 * kPointerSize);
3796
+ // If there is a call site cache don't look in the global cache, but do the
3797
+ // real lookup and update the call site cache.
3798
+ if (!HasCallSiteInlineCheck()) {
3799
+ // Look up the function and the map in the instanceof cache.
3800
+ NearLabel miss;
3801
+ __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
3802
+ __ j(not_equal, &miss);
3803
+ __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex);
3804
+ __ j(not_equal, &miss);
3805
+ __ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
3806
+ __ ret(2 * kPointerSize);
3807
+ __ bind(&miss);
3808
+ }
3471
3809
 
3472
- __ bind(&miss);
3473
3810
  __ TryGetFunctionPrototype(rdx, rbx, &slow);
3474
3811
 
3475
3812
  // Check that the function prototype is a JS object.
@@ -3483,8 +3820,19 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
3483
3820
  // rax is object map.
3484
3821
  // rdx is function.
3485
3822
  // rbx is function prototype.
3486
- __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
3487
- __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex);
3823
+ if (!HasCallSiteInlineCheck()) {
3824
+ __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
3825
+ __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex);
3826
+ } else {
3827
+ __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
3828
+ __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
3829
+ __ movq(Operand(kScratchRegister, kOffsetToMapCheckValue), rax);
3830
+ if (FLAG_debug_code) {
3831
+ __ movl(rdi, Immediate(kWordBeforeMapCheckValue));
3832
+ __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi);
3833
+ __ Assert(equal, "InstanceofStub unexpected call site cache.");
3834
+ }
3835
+ }
3488
3836
 
3489
3837
  __ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset));
3490
3838
 
@@ -3503,19 +3851,56 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
3503
3851
  __ jmp(&loop);
3504
3852
 
3505
3853
  __ bind(&is_instance);
3506
- __ xorl(rax, rax);
3507
- // Store bitwise zero in the cache. This is a Smi in GC terms.
3508
- STATIC_ASSERT(kSmiTag == 0);
3509
- __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
3510
- __ ret(2 * kPointerSize);
3854
+ if (!HasCallSiteInlineCheck()) {
3855
+ __ xorl(rax, rax);
3856
+ // Store bitwise zero in the cache. This is a Smi in GC terms.
3857
+ STATIC_ASSERT(kSmiTag == 0);
3858
+ __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
3859
+ } else {
3860
+ // Store offset of true in the root array at the inline check site.
3861
+ ASSERT((Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias
3862
+ == 0xB0 - 0x100);
3863
+ __ movl(rax, Immediate(0xB0)); // TrueValue is at -10 * kPointerSize.
3864
+ __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
3865
+ __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
3866
+ __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
3867
+ if (FLAG_debug_code) {
3868
+ __ movl(rax, Immediate(kWordBeforeResultValue));
3869
+ __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
3870
+ __ Assert(equal, "InstanceofStub unexpected call site cache.");
3871
+ }
3872
+ __ xorl(rax, rax);
3873
+ }
3874
+ __ ret(2 * kPointerSize + extra_stack_space);
3511
3875
 
3512
3876
  __ bind(&is_not_instance);
3513
- // We have to store a non-zero value in the cache.
3514
- __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex);
3515
- __ ret(2 * kPointerSize);
3877
+ if (!HasCallSiteInlineCheck()) {
3878
+ // We have to store a non-zero value in the cache.
3879
+ __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex);
3880
+ } else {
3881
+ // Store offset of false in the root array at the inline check site.
3882
+ ASSERT((Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias
3883
+ == 0xB8 - 0x100);
3884
+ __ movl(rax, Immediate(0xB8)); // FalseValue is at -9 * kPointerSize.
3885
+ __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
3886
+ __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
3887
+ __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
3888
+ if (FLAG_debug_code) {
3889
+ __ movl(rax, Immediate(kWordBeforeResultValue));
3890
+ __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
3891
+ __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
3892
+ }
3893
+ }
3894
+ __ ret(2 * kPointerSize + extra_stack_space);
3516
3895
 
3517
3896
  // Slow-case: Go through the JavaScript implementation.
3518
3897
  __ bind(&slow);
3898
+ if (HasCallSiteInlineCheck()) {
3899
+ // Remove extra value from the stack.
3900
+ __ pop(rcx);
3901
+ __ pop(rax);
3902
+ __ push(rcx);
3903
+ }
3519
3904
  __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
3520
3905
  }
3521
3906
 
@@ -3549,7 +3934,8 @@ const char* CompareStub::GetName() {
3549
3934
 
3550
3935
  if (name_ != NULL) return name_;
3551
3936
  const int kMaxNameLength = 100;
3552
- name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
3937
+ name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
3938
+ kMaxNameLength);
3553
3939
  if (name_ == NULL) return "OOM";
3554
3940
 
3555
3941
  const char* cc_name;
@@ -3683,7 +4069,7 @@ void StringCharCodeAtGenerator::GenerateSlow(
3683
4069
  // Index is not a smi.
3684
4070
  __ bind(&index_not_smi_);
3685
4071
  // If index is a heap number, try converting it to an integer.
3686
- __ CheckMap(index_, Factory::heap_number_map(), index_not_number_, true);
4072
+ __ CheckMap(index_, FACTORY->heap_number_map(), index_not_number_, true);
3687
4073
  call_helper.BeforeCall(masm);
3688
4074
  __ push(object_);
3689
4075
  __ push(index_);
@@ -3828,7 +4214,8 @@ void StringAddStub::Generate(MacroAssembler* masm) {
3828
4214
  __ SmiTest(rcx);
3829
4215
  __ j(not_zero, &second_not_zero_length);
3830
4216
  // Second string is empty, result is first string which is already in rax.
3831
- __ IncrementCounter(&Counters::string_add_native, 1);
4217
+ Counters* counters = masm->isolate()->counters();
4218
+ __ IncrementCounter(counters->string_add_native(), 1);
3832
4219
  __ ret(2 * kPointerSize);
3833
4220
  __ bind(&second_not_zero_length);
3834
4221
  __ movq(rbx, FieldOperand(rax, String::kLengthOffset));
@@ -3836,7 +4223,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
3836
4223
  __ j(not_zero, &both_not_zero_length);
3837
4224
  // First string is empty, result is second string which is in rdx.
3838
4225
  __ movq(rax, rdx);
3839
- __ IncrementCounter(&Counters::string_add_native, 1);
4226
+ __ IncrementCounter(counters->string_add_native(), 1);
3840
4227
  __ ret(2 * kPointerSize);
3841
4228
 
3842
4229
  // Both strings are non-empty.
@@ -3862,8 +4249,8 @@ void StringAddStub::Generate(MacroAssembler* masm) {
3862
4249
  // Look at the length of the result of adding the two strings.
3863
4250
  STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue / 2);
3864
4251
  __ SmiAdd(rbx, rbx, rcx);
3865
- // Use the runtime system when adding two one character strings, as it
3866
- // contains optimizations for this specific case using the symbol table.
4252
+ // Use the symbol table when adding two one character strings, as it
4253
+ // helps later optimizations to return a symbol here.
3867
4254
  __ SmiCompare(rbx, Smi::FromInt(2));
3868
4255
  __ j(not_equal, &longer_than_two);
3869
4256
 
@@ -3879,8 +4266,8 @@ void StringAddStub::Generate(MacroAssembler* masm) {
3879
4266
  // just allocate a new one.
3880
4267
  Label make_two_character_string, make_flat_ascii_string;
3881
4268
  StringHelper::GenerateTwoCharacterSymbolTableProbe(
3882
- masm, rbx, rcx, r14, r11, rdi, r12, &make_two_character_string);
3883
- __ IncrementCounter(&Counters::string_add_native, 1);
4269
+ masm, rbx, rcx, r14, r11, rdi, r15, &make_two_character_string);
4270
+ __ IncrementCounter(counters->string_add_native(), 1);
3884
4271
  __ ret(2 * kPointerSize);
3885
4272
 
3886
4273
  __ bind(&make_two_character_string);
@@ -3920,7 +4307,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
3920
4307
  __ movq(FieldOperand(rcx, ConsString::kFirstOffset), rax);
3921
4308
  __ movq(FieldOperand(rcx, ConsString::kSecondOffset), rdx);
3922
4309
  __ movq(rax, rcx);
3923
- __ IncrementCounter(&Counters::string_add_native, 1);
4310
+ __ IncrementCounter(counters->string_add_native(), 1);
3924
4311
  __ ret(2 * kPointerSize);
3925
4312
  __ bind(&non_ascii);
3926
4313
  // At least one of the strings is two-byte. Check whether it happens
@@ -3994,7 +4381,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
3994
4381
  // rdi: length of second argument
3995
4382
  StringHelper::GenerateCopyCharacters(masm, rcx, rdx, rdi, true);
3996
4383
  __ movq(rax, rbx);
3997
- __ IncrementCounter(&Counters::string_add_native, 1);
4384
+ __ IncrementCounter(counters->string_add_native(), 1);
3998
4385
  __ ret(2 * kPointerSize);
3999
4386
 
4000
4387
  // Handle creating a flat two byte result.
@@ -4031,7 +4418,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
4031
4418
  // rdi: length of second argument
4032
4419
  StringHelper::GenerateCopyCharacters(masm, rcx, rdx, rdi, false);
4033
4420
  __ movq(rax, rbx);
4034
- __ IncrementCounter(&Counters::string_add_native, 1);
4421
+ __ IncrementCounter(counters->string_add_native(), 1);
4035
4422
  __ ret(2 * kPointerSize);
4036
4423
 
4037
4424
  // Just jump to runtime to add the two strings.
@@ -4215,15 +4602,14 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
4215
4602
  FieldOperand(symbol_table, SymbolTable::kCapacityOffset));
4216
4603
  __ decl(mask);
4217
4604
 
4218
- Register undefined = scratch4;
4219
- __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
4605
+ Register map = scratch4;
4220
4606
 
4221
4607
  // Registers
4222
4608
  // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
4223
4609
  // hash: hash of two character string (32-bit int)
4224
4610
  // symbol_table: symbol table
4225
4611
  // mask: capacity mask (32-bit int)
4226
- // undefined: undefined value
4612
+ // map: -
4227
4613
  // scratch: -
4228
4614
 
4229
4615
  // Perform a number of probes in the symbol table.
@@ -4238,7 +4624,7 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
4238
4624
  }
4239
4625
  __ andl(scratch, mask);
4240
4626
 
4241
- // Load the entry from the symble table.
4627
+ // Load the entry from the symbol table.
4242
4628
  Register candidate = scratch; // Scratch register contains candidate.
4243
4629
  STATIC_ASSERT(SymbolTable::kEntrySize == 1);
4244
4630
  __ movq(candidate,
@@ -4248,8 +4634,16 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
4248
4634
  SymbolTable::kElementsStartOffset));
4249
4635
 
4250
4636
  // If entry is undefined no string with this hash can be found.
4251
- __ cmpq(candidate, undefined);
4637
+ NearLabel is_string;
4638
+ __ CmpObjectType(candidate, ODDBALL_TYPE, map);
4639
+ __ j(not_equal, &is_string);
4640
+
4641
+ __ CompareRoot(candidate, Heap::kUndefinedValueRootIndex);
4252
4642
  __ j(equal, not_found);
4643
+ // Must be null (deleted entry).
4644
+ __ jmp(&next_probe[i]);
4645
+
4646
+ __ bind(&is_string);
4253
4647
 
4254
4648
  // If length is not 2 the string is not a candidate.
4255
4649
  __ SmiCompare(FieldOperand(candidate, String::kLengthOffset),
@@ -4261,8 +4655,7 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
4261
4655
  Register temp = kScratchRegister;
4262
4656
 
4263
4657
  // Check that the candidate is a non-external ascii string.
4264
- __ movq(temp, FieldOperand(candidate, HeapObject::kMapOffset));
4265
- __ movzxbl(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
4658
+ __ movzxbl(temp, FieldOperand(map, Map::kInstanceTypeOffset));
4266
4659
  __ JumpIfInstanceTypeIsNotSequentialAscii(
4267
4660
  temp, temp, &next_probe[i]);
4268
4661
 
@@ -4440,7 +4833,8 @@ void SubStringStub::Generate(MacroAssembler* masm) {
4440
4833
  // rsi: character of sub string start
4441
4834
  StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, true);
4442
4835
  __ movq(rsi, rdx); // Restore rsi.
4443
- __ IncrementCounter(&Counters::sub_string_native, 1);
4836
+ Counters* counters = masm->isolate()->counters();
4837
+ __ IncrementCounter(counters->sub_string_native(), 1);
4444
4838
  __ ret(kArgumentsSize);
4445
4839
 
4446
4840
  __ bind(&non_ascii_flat);
@@ -4477,7 +4871,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
4477
4871
  __ movq(rsi, rdx); // Restore esi.
4478
4872
 
4479
4873
  __ bind(&return_rax);
4480
- __ IncrementCounter(&Counters::sub_string_native, 1);
4874
+ __ IncrementCounter(counters->sub_string_native(), 1);
4481
4875
  __ ret(kArgumentsSize);
4482
4876
 
4483
4877
  // Just jump to runtime to create the sub string.
@@ -4591,7 +4985,8 @@ void StringCompareStub::Generate(MacroAssembler* masm) {
4591
4985
  __ cmpq(rdx, rax);
4592
4986
  __ j(not_equal, &not_same);
4593
4987
  __ Move(rax, Smi::FromInt(EQUAL));
4594
- __ IncrementCounter(&Counters::string_compare_native, 1);
4988
+ Counters* counters = masm->isolate()->counters();
4989
+ __ IncrementCounter(counters->string_compare_native(), 1);
4595
4990
  __ ret(2 * kPointerSize);
4596
4991
 
4597
4992
  __ bind(&not_same);
@@ -4600,7 +4995,7 @@ void StringCompareStub::Generate(MacroAssembler* masm) {
4600
4995
  __ JumpIfNotBothSequentialAsciiStrings(rdx, rax, rcx, rbx, &runtime);
4601
4996
 
4602
4997
  // Inline comparison of ascii strings.
4603
- __ IncrementCounter(&Counters::string_compare_native, 1);
4998
+ __ IncrementCounter(counters->string_compare_native(), 1);
4604
4999
  // Drop arguments from the stack
4605
5000
  __ pop(rcx);
4606
5001
  __ addq(rsp, Immediate(2 * kPointerSize));
@@ -4613,6 +5008,7 @@ void StringCompareStub::Generate(MacroAssembler* masm) {
4613
5008
  __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
4614
5009
  }
4615
5010
 
5011
+
4616
5012
  void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
4617
5013
  ASSERT(state_ == CompareIC::SMIS);
4618
5014
  NearLabel miss;
@@ -4708,7 +5104,8 @@ void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
4708
5104
  __ push(rcx);
4709
5105
 
4710
5106
  // Call the runtime system in a fresh internal frame.
4711
- ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss));
5107
+ ExternalReference miss =
5108
+ ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
4712
5109
  __ EnterInternalFrame();
4713
5110
  __ push(rdx);
4714
5111
  __ push(rax);
@@ -4730,144 +5127,6 @@ void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
4730
5127
  }
4731
5128
 
4732
5129
 
4733
- void GenerateFastPixelArrayLoad(MacroAssembler* masm,
4734
- Register receiver,
4735
- Register key,
4736
- Register elements,
4737
- Register untagged_key,
4738
- Register result,
4739
- Label* not_pixel_array,
4740
- Label* key_not_smi,
4741
- Label* out_of_range) {
4742
- // Register use:
4743
- // receiver - holds the receiver and is unchanged.
4744
- // key - holds the key and is unchanged (must be a smi).
4745
- // elements - is set to the the receiver's element if
4746
- // the receiver doesn't have a pixel array or the
4747
- // key is not a smi, otherwise it's the elements'
4748
- // external pointer.
4749
- // untagged_key - is set to the untagged key
4750
-
4751
- // Some callers already have verified that the key is a smi. key_not_smi is
4752
- // set to NULL as a sentinel for that case. Otherwise, add an explicit check
4753
- // to ensure the key is a smi must be added.
4754
- if (key_not_smi != NULL) {
4755
- __ JumpIfNotSmi(key, key_not_smi);
4756
- } else {
4757
- if (FLAG_debug_code) {
4758
- __ AbortIfNotSmi(key);
4759
- }
4760
- }
4761
- __ SmiToInteger32(untagged_key, key);
4762
-
4763
- __ movq(elements, FieldOperand(receiver, JSObject::kElementsOffset));
4764
- // By passing NULL as not_pixel_array, callers signal that they have already
4765
- // verified that the receiver has pixel array elements.
4766
- if (not_pixel_array != NULL) {
4767
- __ CheckMap(elements, Factory::pixel_array_map(), not_pixel_array, true);
4768
- } else {
4769
- if (FLAG_debug_code) {
4770
- // Map check should have already made sure that elements is a pixel array.
4771
- __ Cmp(FieldOperand(elements, HeapObject::kMapOffset),
4772
- Factory::pixel_array_map());
4773
- __ Assert(equal, "Elements isn't a pixel array");
4774
- }
4775
- }
4776
-
4777
- // Check that the smi is in range.
4778
- __ cmpl(untagged_key, FieldOperand(elements, PixelArray::kLengthOffset));
4779
- __ j(above_equal, out_of_range); // unsigned check handles negative keys.
4780
-
4781
- // Load and tag the element as a smi.
4782
- __ movq(elements, FieldOperand(elements, PixelArray::kExternalPointerOffset));
4783
- __ movzxbq(result, Operand(elements, untagged_key, times_1, 0));
4784
- __ Integer32ToSmi(result, result);
4785
- __ ret(0);
4786
- }
4787
-
4788
-
4789
- // Stores an indexed element into a pixel array, clamping the stored value.
4790
- void GenerateFastPixelArrayStore(MacroAssembler* masm,
4791
- Register receiver,
4792
- Register key,
4793
- Register value,
4794
- Register elements,
4795
- Register scratch1,
4796
- bool load_elements_from_receiver,
4797
- bool key_is_untagged,
4798
- Label* key_not_smi,
4799
- Label* value_not_smi,
4800
- Label* not_pixel_array,
4801
- Label* out_of_range) {
4802
- // Register use:
4803
- // receiver - holds the receiver and is unchanged.
4804
- // key - holds the key (must be a smi) and is unchanged.
4805
- // value - holds the value (must be a smi) and is unchanged.
4806
- // elements - holds the element object of the receiver on entry if
4807
- // load_elements_from_receiver is false, otherwise used
4808
- // internally to store the pixel arrays elements and
4809
- // external array pointer.
4810
- //
4811
- Register external_pointer = elements;
4812
- Register untagged_key = scratch1;
4813
- Register untagged_value = receiver; // Only set once success guaranteed.
4814
-
4815
- // Fetch the receiver's elements if the caller hasn't already done so.
4816
- if (load_elements_from_receiver) {
4817
- __ movq(elements, FieldOperand(receiver, JSObject::kElementsOffset));
4818
- }
4819
-
4820
- // By passing NULL as not_pixel_array, callers signal that they have already
4821
- // verified that the receiver has pixel array elements.
4822
- if (not_pixel_array != NULL) {
4823
- __ CheckMap(elements, Factory::pixel_array_map(), not_pixel_array, true);
4824
- } else {
4825
- if (FLAG_debug_code) {
4826
- // Map check should have already made sure that elements is a pixel array.
4827
- __ Cmp(FieldOperand(elements, HeapObject::kMapOffset),
4828
- Factory::pixel_array_map());
4829
- __ Assert(equal, "Elements isn't a pixel array");
4830
- }
4831
- }
4832
-
4833
- // Key must be a smi and it must be in range.
4834
- if (key_is_untagged) {
4835
- untagged_key = key;
4836
- } else {
4837
- // Some callers already have verified that the key is a smi. key_not_smi is
4838
- // set to NULL as a sentinel for that case. Otherwise, add an explicit
4839
- // check to ensure the key is a smi.
4840
- if (key_not_smi != NULL) {
4841
- __ JumpIfNotSmi(key, key_not_smi);
4842
- } else {
4843
- if (FLAG_debug_code) {
4844
- __ AbortIfNotSmi(key);
4845
- }
4846
- }
4847
- __ SmiToInteger32(untagged_key, key);
4848
- }
4849
- __ cmpl(untagged_key, FieldOperand(elements, PixelArray::kLengthOffset));
4850
- __ j(above_equal, out_of_range); // unsigned check handles negative keys.
4851
-
4852
- // Value must be a smi.
4853
- __ JumpIfNotSmi(value, value_not_smi);
4854
- __ SmiToInteger32(untagged_value, value);
4855
-
4856
- { // Clamp the value to [0..255].
4857
- NearLabel done;
4858
- __ testl(untagged_value, Immediate(0xFFFFFF00));
4859
- __ j(zero, &done);
4860
- __ setcc(negative, untagged_value); // 1 if negative, 0 if positive.
4861
- __ decb(untagged_value); // 0 if negative, 255 if positive.
4862
- __ bind(&done);
4863
- }
4864
-
4865
- __ movq(external_pointer,
4866
- FieldOperand(elements, PixelArray::kExternalPointerOffset));
4867
- __ movb(Operand(external_pointer, untagged_key, times_1, 0), untagged_value);
4868
- __ ret(0); // Return value in eax.
4869
- }
4870
-
4871
5130
  #undef __
4872
5131
 
4873
5132
  } } // namespace v8::internal