mustang 0.0.1 → 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (560) hide show
  1. data/.rspec +1 -0
  2. data/Isolate +9 -0
  3. data/README.md +6 -12
  4. data/Rakefile +30 -4
  5. data/TODO.md +9 -0
  6. data/ext/v8/extconf.rb +56 -0
  7. data/ext/v8/v8.cpp +37 -0
  8. data/ext/v8/v8_array.cpp +161 -0
  9. data/ext/v8/v8_array.h +17 -0
  10. data/ext/v8/v8_base.cpp +147 -0
  11. data/ext/v8/v8_base.h +23 -0
  12. data/ext/v8/v8_cast.cpp +151 -0
  13. data/ext/v8/v8_cast.h +64 -0
  14. data/ext/v8/v8_context.cpp +174 -0
  15. data/ext/v8/v8_context.h +12 -0
  16. data/ext/v8/v8_date.cpp +61 -0
  17. data/ext/v8/v8_date.h +16 -0
  18. data/ext/v8/v8_errors.cpp +147 -0
  19. data/ext/v8/v8_errors.h +19 -0
  20. data/ext/v8/v8_external.cpp +66 -0
  21. data/ext/v8/v8_external.h +16 -0
  22. data/ext/v8/v8_function.cpp +182 -0
  23. data/ext/v8/v8_function.h +14 -0
  24. data/ext/v8/v8_integer.cpp +70 -0
  25. data/ext/v8/v8_integer.h +16 -0
  26. data/ext/v8/v8_macros.h +30 -0
  27. data/ext/v8/v8_main.cpp +53 -0
  28. data/ext/v8/v8_main.h +13 -0
  29. data/ext/v8/v8_number.cpp +62 -0
  30. data/ext/v8/v8_number.h +16 -0
  31. data/ext/v8/v8_object.cpp +172 -0
  32. data/ext/v8/v8_object.h +17 -0
  33. data/ext/v8/v8_ref.cpp +72 -0
  34. data/ext/v8/v8_ref.h +43 -0
  35. data/ext/v8/v8_regexp.cpp +148 -0
  36. data/ext/v8/v8_regexp.h +16 -0
  37. data/ext/v8/v8_string.cpp +78 -0
  38. data/ext/v8/v8_string.h +16 -0
  39. data/ext/v8/v8_value.cpp +370 -0
  40. data/ext/v8/v8_value.h +19 -0
  41. data/gemspec.yml +2 -1
  42. data/lib/core_ext/class.rb +14 -0
  43. data/lib/core_ext/object.rb +12 -0
  44. data/lib/core_ext/symbol.rb +23 -0
  45. data/lib/mustang.rb +44 -0
  46. data/lib/mustang/context.rb +69 -0
  47. data/lib/mustang/errors.rb +36 -0
  48. data/lib/support/delegated.rb +25 -0
  49. data/lib/v8/array.rb +21 -0
  50. data/lib/v8/context.rb +13 -0
  51. data/lib/v8/date.rb +20 -0
  52. data/lib/v8/error.rb +15 -0
  53. data/lib/v8/external.rb +16 -0
  54. data/lib/v8/function.rb +11 -0
  55. data/lib/v8/integer.rb +16 -0
  56. data/lib/v8/number.rb +16 -0
  57. data/lib/v8/object.rb +66 -0
  58. data/lib/v8/regexp.rb +23 -0
  59. data/lib/v8/string.rb +27 -0
  60. data/mustang.gemspec +3 -0
  61. data/spec/core_ext/class_spec.rb +19 -0
  62. data/spec/core_ext/object_spec.rb +19 -0
  63. data/spec/core_ext/symbol_spec.rb +27 -0
  64. data/spec/fixtures/test1.js +2 -0
  65. data/spec/fixtures/test2.js +2 -0
  66. data/spec/spec_helper.rb +20 -0
  67. data/spec/v8/array_spec.rb +88 -0
  68. data/spec/v8/cast_spec.rb +151 -0
  69. data/spec/v8/context_spec.rb +78 -0
  70. data/spec/v8/data_spec.rb +39 -0
  71. data/spec/v8/date_spec.rb +45 -0
  72. data/spec/v8/empty_spec.rb +27 -0
  73. data/spec/v8/errors_spec.rb +142 -0
  74. data/spec/v8/external_spec.rb +44 -0
  75. data/spec/v8/function_spec.rb +170 -0
  76. data/spec/v8/integer_spec.rb +41 -0
  77. data/spec/v8/main_spec.rb +18 -0
  78. data/spec/v8/null_spec.rb +27 -0
  79. data/spec/v8/number_spec.rb +40 -0
  80. data/spec/v8/object_spec.rb +79 -0
  81. data/spec/v8/primitive_spec.rb +9 -0
  82. data/spec/v8/regexp_spec.rb +65 -0
  83. data/spec/v8/string_spec.rb +48 -0
  84. data/spec/v8/undefined_spec.rb +27 -0
  85. data/spec/v8/value_spec.rb +215 -0
  86. data/vendor/v8/.gitignore +2 -0
  87. data/vendor/v8/AUTHORS +3 -1
  88. data/vendor/v8/ChangeLog +117 -0
  89. data/vendor/v8/SConstruct +334 -53
  90. data/vendor/v8/include/v8-debug.h +21 -11
  91. data/vendor/v8/include/v8-preparser.h +1 -1
  92. data/vendor/v8/include/v8-profiler.h +122 -43
  93. data/vendor/v8/include/v8-testing.h +5 -0
  94. data/vendor/v8/include/v8.h +171 -17
  95. data/vendor/v8/preparser/SConscript +38 -0
  96. data/vendor/v8/preparser/preparser-process.cc +77 -114
  97. data/vendor/v8/samples/shell.cc +232 -46
  98. data/vendor/v8/src/SConscript +29 -5
  99. data/vendor/v8/src/accessors.cc +70 -211
  100. data/vendor/v8/{test/cctest/test-mips.cc → src/allocation-inl.h} +15 -18
  101. data/vendor/v8/src/allocation.cc +0 -82
  102. data/vendor/v8/src/allocation.h +9 -42
  103. data/vendor/v8/src/api.cc +1645 -1156
  104. data/vendor/v8/src/api.h +76 -12
  105. data/vendor/v8/src/apiutils.h +0 -7
  106. data/vendor/v8/src/arguments.h +15 -4
  107. data/vendor/v8/src/arm/assembler-arm-inl.h +10 -9
  108. data/vendor/v8/src/arm/assembler-arm.cc +62 -23
  109. data/vendor/v8/src/arm/assembler-arm.h +76 -11
  110. data/vendor/v8/src/arm/builtins-arm.cc +39 -33
  111. data/vendor/v8/src/arm/code-stubs-arm.cc +1182 -402
  112. data/vendor/v8/src/arm/code-stubs-arm.h +20 -54
  113. data/vendor/v8/src/arm/codegen-arm.cc +159 -106
  114. data/vendor/v8/src/arm/codegen-arm.h +6 -6
  115. data/vendor/v8/src/arm/constants-arm.h +16 -1
  116. data/vendor/v8/src/arm/cpu-arm.cc +7 -5
  117. data/vendor/v8/src/arm/debug-arm.cc +6 -4
  118. data/vendor/v8/src/arm/deoptimizer-arm.cc +51 -14
  119. data/vendor/v8/src/arm/disasm-arm.cc +47 -15
  120. data/vendor/v8/src/arm/frames-arm.h +1 -1
  121. data/vendor/v8/src/arm/full-codegen-arm.cc +724 -408
  122. data/vendor/v8/src/arm/ic-arm.cc +90 -85
  123. data/vendor/v8/src/arm/lithium-arm.cc +140 -69
  124. data/vendor/v8/src/arm/lithium-arm.h +161 -46
  125. data/vendor/v8/src/arm/lithium-codegen-arm.cc +567 -297
  126. data/vendor/v8/src/arm/lithium-codegen-arm.h +21 -9
  127. data/vendor/v8/src/arm/lithium-gap-resolver-arm.cc +2 -0
  128. data/vendor/v8/src/arm/macro-assembler-arm.cc +457 -96
  129. data/vendor/v8/src/arm/macro-assembler-arm.h +115 -18
  130. data/vendor/v8/src/arm/regexp-macro-assembler-arm.cc +20 -13
  131. data/vendor/v8/src/arm/regexp-macro-assembler-arm.h +1 -0
  132. data/vendor/v8/src/arm/simulator-arm.cc +184 -101
  133. data/vendor/v8/src/arm/simulator-arm.h +26 -21
  134. data/vendor/v8/src/arm/stub-cache-arm.cc +450 -467
  135. data/vendor/v8/src/arm/virtual-frame-arm.cc +14 -12
  136. data/vendor/v8/src/arm/virtual-frame-arm.h +11 -8
  137. data/vendor/v8/src/array.js +35 -18
  138. data/vendor/v8/src/assembler.cc +186 -92
  139. data/vendor/v8/src/assembler.h +106 -69
  140. data/vendor/v8/src/ast-inl.h +5 -0
  141. data/vendor/v8/src/ast.cc +46 -35
  142. data/vendor/v8/src/ast.h +107 -50
  143. data/vendor/v8/src/atomicops.h +2 -0
  144. data/vendor/v8/src/atomicops_internals_mips_gcc.h +169 -0
  145. data/vendor/v8/src/bootstrapper.cc +649 -399
  146. data/vendor/v8/src/bootstrapper.h +94 -27
  147. data/vendor/v8/src/builtins.cc +359 -227
  148. data/vendor/v8/src/builtins.h +157 -123
  149. data/vendor/v8/src/checks.cc +2 -2
  150. data/vendor/v8/src/checks.h +4 -0
  151. data/vendor/v8/src/code-stubs.cc +27 -17
  152. data/vendor/v8/src/code-stubs.h +38 -17
  153. data/vendor/v8/src/codegen-inl.h +5 -1
  154. data/vendor/v8/src/codegen.cc +27 -17
  155. data/vendor/v8/src/codegen.h +9 -9
  156. data/vendor/v8/src/compilation-cache.cc +92 -206
  157. data/vendor/v8/src/compilation-cache.h +205 -30
  158. data/vendor/v8/src/compiler.cc +107 -120
  159. data/vendor/v8/src/compiler.h +17 -2
  160. data/vendor/v8/src/contexts.cc +22 -15
  161. data/vendor/v8/src/contexts.h +14 -8
  162. data/vendor/v8/src/conversions.cc +86 -30
  163. data/vendor/v8/src/counters.cc +19 -4
  164. data/vendor/v8/src/counters.h +28 -16
  165. data/vendor/v8/src/cpu-profiler-inl.h +4 -3
  166. data/vendor/v8/src/cpu-profiler.cc +123 -72
  167. data/vendor/v8/src/cpu-profiler.h +33 -19
  168. data/vendor/v8/src/cpu.h +2 -0
  169. data/vendor/v8/src/d8-debug.cc +3 -3
  170. data/vendor/v8/src/d8-debug.h +7 -6
  171. data/vendor/v8/src/d8-posix.cc +2 -0
  172. data/vendor/v8/src/d8.cc +22 -12
  173. data/vendor/v8/src/d8.gyp +3 -0
  174. data/vendor/v8/src/d8.js +618 -0
  175. data/vendor/v8/src/data-flow.h +3 -3
  176. data/vendor/v8/src/dateparser.h +4 -2
  177. data/vendor/v8/src/debug-agent.cc +10 -9
  178. data/vendor/v8/src/debug-agent.h +9 -11
  179. data/vendor/v8/src/debug-debugger.js +121 -0
  180. data/vendor/v8/src/debug.cc +331 -227
  181. data/vendor/v8/src/debug.h +248 -219
  182. data/vendor/v8/src/deoptimizer.cc +173 -62
  183. data/vendor/v8/src/deoptimizer.h +119 -19
  184. data/vendor/v8/src/disasm.h +3 -0
  185. data/vendor/v8/src/disassembler.cc +10 -9
  186. data/vendor/v8/src/execution.cc +185 -129
  187. data/vendor/v8/src/execution.h +47 -78
  188. data/vendor/v8/src/extensions/experimental/break-iterator.cc +250 -0
  189. data/vendor/v8/src/extensions/experimental/break-iterator.h +89 -0
  190. data/vendor/v8/src/extensions/experimental/experimental.gyp +2 -0
  191. data/vendor/v8/src/extensions/experimental/i18n-extension.cc +22 -2
  192. data/vendor/v8/src/extensions/externalize-string-extension.cc +2 -2
  193. data/vendor/v8/src/extensions/gc-extension.cc +1 -1
  194. data/vendor/v8/src/factory.cc +261 -154
  195. data/vendor/v8/src/factory.h +162 -158
  196. data/vendor/v8/src/flag-definitions.h +17 -11
  197. data/vendor/v8/src/frame-element.cc +0 -5
  198. data/vendor/v8/src/frame-element.h +9 -13
  199. data/vendor/v8/src/frames-inl.h +7 -0
  200. data/vendor/v8/src/frames.cc +56 -46
  201. data/vendor/v8/src/frames.h +36 -25
  202. data/vendor/v8/src/full-codegen.cc +15 -24
  203. data/vendor/v8/src/full-codegen.h +13 -41
  204. data/vendor/v8/src/func-name-inferrer.cc +7 -6
  205. data/vendor/v8/src/func-name-inferrer.h +1 -1
  206. data/vendor/v8/src/gdb-jit.cc +1 -0
  207. data/vendor/v8/src/global-handles.cc +118 -56
  208. data/vendor/v8/src/global-handles.h +98 -40
  209. data/vendor/v8/src/globals.h +2 -2
  210. data/vendor/v8/src/handles-inl.h +106 -9
  211. data/vendor/v8/src/handles.cc +220 -157
  212. data/vendor/v8/src/handles.h +38 -59
  213. data/vendor/v8/src/hashmap.h +3 -3
  214. data/vendor/v8/src/heap-inl.h +141 -25
  215. data/vendor/v8/src/heap-profiler.cc +117 -63
  216. data/vendor/v8/src/heap-profiler.h +38 -21
  217. data/vendor/v8/src/heap.cc +805 -564
  218. data/vendor/v8/src/heap.h +640 -594
  219. data/vendor/v8/src/hydrogen-instructions.cc +216 -73
  220. data/vendor/v8/src/hydrogen-instructions.h +259 -124
  221. data/vendor/v8/src/hydrogen.cc +996 -1171
  222. data/vendor/v8/src/hydrogen.h +163 -144
  223. data/vendor/v8/src/ia32/assembler-ia32-inl.h +12 -11
  224. data/vendor/v8/src/ia32/assembler-ia32.cc +85 -39
  225. data/vendor/v8/src/ia32/assembler-ia32.h +82 -16
  226. data/vendor/v8/src/ia32/builtins-ia32.cc +64 -58
  227. data/vendor/v8/src/ia32/code-stubs-ia32.cc +248 -324
  228. data/vendor/v8/src/ia32/code-stubs-ia32.h +3 -44
  229. data/vendor/v8/src/ia32/codegen-ia32.cc +217 -165
  230. data/vendor/v8/src/ia32/codegen-ia32.h +3 -0
  231. data/vendor/v8/src/ia32/cpu-ia32.cc +6 -5
  232. data/vendor/v8/src/ia32/debug-ia32.cc +8 -5
  233. data/vendor/v8/src/ia32/deoptimizer-ia32.cc +124 -14
  234. data/vendor/v8/src/ia32/disasm-ia32.cc +85 -62
  235. data/vendor/v8/src/ia32/frames-ia32.h +1 -1
  236. data/vendor/v8/src/ia32/full-codegen-ia32.cc +348 -435
  237. data/vendor/v8/src/ia32/ic-ia32.cc +91 -91
  238. data/vendor/v8/src/ia32/lithium-codegen-ia32.cc +500 -255
  239. data/vendor/v8/src/ia32/lithium-codegen-ia32.h +13 -4
  240. data/vendor/v8/src/ia32/lithium-gap-resolver-ia32.cc +6 -0
  241. data/vendor/v8/src/ia32/lithium-ia32.cc +122 -45
  242. data/vendor/v8/src/ia32/lithium-ia32.h +128 -41
  243. data/vendor/v8/src/ia32/macro-assembler-ia32.cc +109 -84
  244. data/vendor/v8/src/ia32/macro-assembler-ia32.h +18 -9
  245. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.cc +26 -15
  246. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.h +1 -0
  247. data/vendor/v8/src/ia32/register-allocator-ia32.cc +30 -30
  248. data/vendor/v8/src/ia32/simulator-ia32.h +4 -4
  249. data/vendor/v8/src/ia32/stub-cache-ia32.cc +383 -400
  250. data/vendor/v8/src/ia32/virtual-frame-ia32.cc +36 -13
  251. data/vendor/v8/src/ia32/virtual-frame-ia32.h +11 -5
  252. data/vendor/v8/src/ic-inl.h +12 -2
  253. data/vendor/v8/src/ic.cc +304 -221
  254. data/vendor/v8/src/ic.h +115 -58
  255. data/vendor/v8/src/interpreter-irregexp.cc +25 -21
  256. data/vendor/v8/src/interpreter-irregexp.h +2 -1
  257. data/vendor/v8/src/isolate.cc +883 -0
  258. data/vendor/v8/src/isolate.h +1304 -0
  259. data/vendor/v8/src/json.js +10 -10
  260. data/vendor/v8/src/jsregexp.cc +111 -80
  261. data/vendor/v8/src/jsregexp.h +6 -7
  262. data/vendor/v8/src/jump-target-heavy.cc +5 -8
  263. data/vendor/v8/src/jump-target-heavy.h +0 -6
  264. data/vendor/v8/src/jump-target-inl.h +1 -1
  265. data/vendor/v8/src/jump-target-light.cc +3 -3
  266. data/vendor/v8/src/lithium-allocator-inl.h +2 -0
  267. data/vendor/v8/src/lithium-allocator.cc +42 -30
  268. data/vendor/v8/src/lithium-allocator.h +8 -22
  269. data/vendor/v8/src/lithium.cc +1 -0
  270. data/vendor/v8/src/liveedit.cc +141 -99
  271. data/vendor/v8/src/liveedit.h +7 -2
  272. data/vendor/v8/src/liveobjectlist-inl.h +90 -0
  273. data/vendor/v8/src/liveobjectlist.cc +2537 -1
  274. data/vendor/v8/src/liveobjectlist.h +245 -35
  275. data/vendor/v8/src/log-utils.cc +122 -35
  276. data/vendor/v8/src/log-utils.h +33 -36
  277. data/vendor/v8/src/log.cc +299 -241
  278. data/vendor/v8/src/log.h +177 -110
  279. data/vendor/v8/src/mark-compact.cc +612 -470
  280. data/vendor/v8/src/mark-compact.h +153 -80
  281. data/vendor/v8/src/messages.cc +16 -14
  282. data/vendor/v8/src/messages.js +30 -7
  283. data/vendor/v8/src/mips/assembler-mips-inl.h +155 -35
  284. data/vendor/v8/src/mips/assembler-mips.cc +1093 -219
  285. data/vendor/v8/src/mips/assembler-mips.h +552 -153
  286. data/vendor/v8/src/mips/builtins-mips.cc +43 -100
  287. data/vendor/v8/src/mips/code-stubs-mips.cc +752 -0
  288. data/vendor/v8/src/mips/code-stubs-mips.h +511 -0
  289. data/vendor/v8/src/mips/codegen-mips-inl.h +8 -14
  290. data/vendor/v8/src/mips/codegen-mips.cc +672 -896
  291. data/vendor/v8/src/mips/codegen-mips.h +271 -69
  292. data/vendor/v8/src/mips/constants-mips.cc +44 -20
  293. data/vendor/v8/src/mips/constants-mips.h +238 -40
  294. data/vendor/v8/src/mips/cpu-mips.cc +20 -3
  295. data/vendor/v8/src/mips/debug-mips.cc +35 -7
  296. data/vendor/v8/src/mips/deoptimizer-mips.cc +91 -0
  297. data/vendor/v8/src/mips/disasm-mips.cc +329 -93
  298. data/vendor/v8/src/mips/frames-mips.cc +2 -50
  299. data/vendor/v8/src/mips/frames-mips.h +24 -9
  300. data/vendor/v8/src/mips/full-codegen-mips.cc +473 -23
  301. data/vendor/v8/src/mips/ic-mips.cc +81 -45
  302. data/vendor/v8/src/mips/jump-target-mips.cc +11 -106
  303. data/vendor/v8/src/mips/lithium-codegen-mips.h +65 -0
  304. data/vendor/v8/src/mips/lithium-mips.h +304 -0
  305. data/vendor/v8/src/mips/macro-assembler-mips.cc +2391 -390
  306. data/vendor/v8/src/mips/macro-assembler-mips.h +718 -121
  307. data/vendor/v8/src/mips/regexp-macro-assembler-mips.cc +478 -0
  308. data/vendor/v8/src/mips/regexp-macro-assembler-mips.h +250 -0
  309. data/vendor/v8/src/mips/register-allocator-mips-inl.h +0 -3
  310. data/vendor/v8/src/mips/register-allocator-mips.h +3 -2
  311. data/vendor/v8/src/mips/simulator-mips.cc +1009 -221
  312. data/vendor/v8/src/mips/simulator-mips.h +119 -36
  313. data/vendor/v8/src/mips/stub-cache-mips.cc +331 -148
  314. data/vendor/v8/src/mips/{fast-codegen-mips.cc → virtual-frame-mips-inl.h} +11 -30
  315. data/vendor/v8/src/mips/virtual-frame-mips.cc +137 -149
  316. data/vendor/v8/src/mips/virtual-frame-mips.h +294 -312
  317. data/vendor/v8/src/mirror-debugger.js +9 -8
  318. data/vendor/v8/src/mksnapshot.cc +2 -2
  319. data/vendor/v8/src/objects-debug.cc +16 -16
  320. data/vendor/v8/src/objects-inl.h +421 -195
  321. data/vendor/v8/src/objects-printer.cc +7 -7
  322. data/vendor/v8/src/objects-visiting.cc +1 -1
  323. data/vendor/v8/src/objects-visiting.h +33 -12
  324. data/vendor/v8/src/objects.cc +935 -658
  325. data/vendor/v8/src/objects.h +234 -139
  326. data/vendor/v8/src/parser.cc +484 -439
  327. data/vendor/v8/src/parser.h +35 -14
  328. data/vendor/v8/src/platform-cygwin.cc +173 -107
  329. data/vendor/v8/src/platform-freebsd.cc +224 -72
  330. data/vendor/v8/src/platform-linux.cc +234 -95
  331. data/vendor/v8/src/platform-macos.cc +215 -82
  332. data/vendor/v8/src/platform-nullos.cc +9 -3
  333. data/vendor/v8/src/platform-openbsd.cc +22 -7
  334. data/vendor/v8/src/platform-posix.cc +30 -5
  335. data/vendor/v8/src/platform-solaris.cc +120 -38
  336. data/vendor/v8/src/platform-tls-mac.h +62 -0
  337. data/vendor/v8/src/platform-tls-win32.h +62 -0
  338. data/vendor/v8/src/platform-tls.h +50 -0
  339. data/vendor/v8/src/platform-win32.cc +195 -97
  340. data/vendor/v8/src/platform.h +72 -15
  341. data/vendor/v8/src/preparse-data.cc +2 -0
  342. data/vendor/v8/src/preparser-api.cc +8 -2
  343. data/vendor/v8/src/preparser.cc +1 -1
  344. data/vendor/v8/src/prettyprinter.cc +43 -52
  345. data/vendor/v8/src/prettyprinter.h +1 -1
  346. data/vendor/v8/src/profile-generator-inl.h +0 -28
  347. data/vendor/v8/src/profile-generator.cc +942 -685
  348. data/vendor/v8/src/profile-generator.h +210 -176
  349. data/vendor/v8/src/property.cc +6 -0
  350. data/vendor/v8/src/property.h +14 -3
  351. data/vendor/v8/src/regexp-macro-assembler-irregexp.cc +1 -1
  352. data/vendor/v8/src/regexp-macro-assembler.cc +28 -19
  353. data/vendor/v8/src/regexp-macro-assembler.h +11 -6
  354. data/vendor/v8/src/regexp-stack.cc +18 -10
  355. data/vendor/v8/src/regexp-stack.h +45 -21
  356. data/vendor/v8/src/regexp.js +3 -3
  357. data/vendor/v8/src/register-allocator-inl.h +3 -3
  358. data/vendor/v8/src/register-allocator.cc +1 -7
  359. data/vendor/v8/src/register-allocator.h +5 -15
  360. data/vendor/v8/src/rewriter.cc +2 -1
  361. data/vendor/v8/src/runtime-profiler.cc +158 -128
  362. data/vendor/v8/src/runtime-profiler.h +131 -15
  363. data/vendor/v8/src/runtime.cc +2409 -1692
  364. data/vendor/v8/src/runtime.h +93 -17
  365. data/vendor/v8/src/safepoint-table.cc +3 -0
  366. data/vendor/v8/src/safepoint-table.h +9 -3
  367. data/vendor/v8/src/scanner-base.cc +21 -28
  368. data/vendor/v8/src/scanner-base.h +22 -11
  369. data/vendor/v8/src/scanner.cc +3 -5
  370. data/vendor/v8/src/scanner.h +4 -2
  371. data/vendor/v8/src/scopeinfo.cc +11 -16
  372. data/vendor/v8/src/scopeinfo.h +26 -15
  373. data/vendor/v8/src/scopes.cc +67 -37
  374. data/vendor/v8/src/scopes.h +26 -12
  375. data/vendor/v8/src/serialize.cc +193 -154
  376. data/vendor/v8/src/serialize.h +41 -36
  377. data/vendor/v8/src/small-pointer-list.h +163 -0
  378. data/vendor/v8/src/snapshot-common.cc +1 -1
  379. data/vendor/v8/src/snapshot.h +3 -1
  380. data/vendor/v8/src/spaces-inl.h +30 -25
  381. data/vendor/v8/src/spaces.cc +263 -370
  382. data/vendor/v8/src/spaces.h +178 -166
  383. data/vendor/v8/src/string-search.cc +4 -3
  384. data/vendor/v8/src/string-search.h +21 -20
  385. data/vendor/v8/src/string-stream.cc +32 -24
  386. data/vendor/v8/src/string.js +7 -7
  387. data/vendor/v8/src/stub-cache.cc +324 -248
  388. data/vendor/v8/src/stub-cache.h +181 -155
  389. data/vendor/v8/src/token.cc +3 -3
  390. data/vendor/v8/src/token.h +3 -3
  391. data/vendor/v8/src/top.cc +218 -390
  392. data/vendor/v8/src/type-info.cc +98 -32
  393. data/vendor/v8/src/type-info.h +10 -3
  394. data/vendor/v8/src/unicode.cc +1 -1
  395. data/vendor/v8/src/unicode.h +1 -1
  396. data/vendor/v8/src/utils.h +3 -0
  397. data/vendor/v8/src/v8-counters.cc +18 -11
  398. data/vendor/v8/src/v8-counters.h +34 -13
  399. data/vendor/v8/src/v8.cc +66 -121
  400. data/vendor/v8/src/v8.h +7 -4
  401. data/vendor/v8/src/v8globals.h +18 -12
  402. data/vendor/v8/src/{memory.h → v8memory.h} +0 -0
  403. data/vendor/v8/src/v8natives.js +59 -18
  404. data/vendor/v8/src/v8threads.cc +127 -114
  405. data/vendor/v8/src/v8threads.h +42 -35
  406. data/vendor/v8/src/v8utils.h +2 -39
  407. data/vendor/v8/src/variables.h +1 -1
  408. data/vendor/v8/src/version.cc +26 -5
  409. data/vendor/v8/src/version.h +4 -0
  410. data/vendor/v8/src/virtual-frame-heavy-inl.h +2 -4
  411. data/vendor/v8/src/virtual-frame-light-inl.h +5 -4
  412. data/vendor/v8/src/vm-state-inl.h +21 -17
  413. data/vendor/v8/src/vm-state.h +7 -5
  414. data/vendor/v8/src/win32-headers.h +1 -0
  415. data/vendor/v8/src/x64/assembler-x64-inl.h +12 -11
  416. data/vendor/v8/src/x64/assembler-x64.cc +80 -40
  417. data/vendor/v8/src/x64/assembler-x64.h +67 -17
  418. data/vendor/v8/src/x64/builtins-x64.cc +34 -33
  419. data/vendor/v8/src/x64/code-stubs-x64.cc +636 -377
  420. data/vendor/v8/src/x64/code-stubs-x64.h +14 -48
  421. data/vendor/v8/src/x64/codegen-x64-inl.h +1 -1
  422. data/vendor/v8/src/x64/codegen-x64.cc +158 -136
  423. data/vendor/v8/src/x64/codegen-x64.h +4 -1
  424. data/vendor/v8/src/x64/cpu-x64.cc +7 -5
  425. data/vendor/v8/src/x64/debug-x64.cc +8 -6
  426. data/vendor/v8/src/x64/deoptimizer-x64.cc +195 -20
  427. data/vendor/v8/src/x64/disasm-x64.cc +42 -23
  428. data/vendor/v8/src/x64/frames-x64.cc +1 -1
  429. data/vendor/v8/src/x64/frames-x64.h +2 -2
  430. data/vendor/v8/src/x64/full-codegen-x64.cc +780 -218
  431. data/vendor/v8/src/x64/ic-x64.cc +77 -79
  432. data/vendor/v8/src/x64/jump-target-x64.cc +1 -1
  433. data/vendor/v8/src/x64/lithium-codegen-x64.cc +698 -181
  434. data/vendor/v8/src/x64/lithium-codegen-x64.h +31 -6
  435. data/vendor/v8/src/x64/lithium-x64.cc +136 -54
  436. data/vendor/v8/src/x64/lithium-x64.h +142 -51
  437. data/vendor/v8/src/x64/macro-assembler-x64.cc +456 -187
  438. data/vendor/v8/src/x64/macro-assembler-x64.h +166 -34
  439. data/vendor/v8/src/x64/regexp-macro-assembler-x64.cc +44 -28
  440. data/vendor/v8/src/x64/regexp-macro-assembler-x64.h +8 -4
  441. data/vendor/v8/src/x64/register-allocator-x64-inl.h +3 -3
  442. data/vendor/v8/src/x64/register-allocator-x64.cc +12 -8
  443. data/vendor/v8/src/x64/simulator-x64.h +5 -5
  444. data/vendor/v8/src/x64/stub-cache-x64.cc +299 -344
  445. data/vendor/v8/src/x64/virtual-frame-x64.cc +37 -13
  446. data/vendor/v8/src/x64/virtual-frame-x64.h +13 -7
  447. data/vendor/v8/src/zone-inl.h +49 -3
  448. data/vendor/v8/src/zone.cc +42 -41
  449. data/vendor/v8/src/zone.h +37 -34
  450. data/vendor/v8/test/benchmarks/testcfg.py +100 -0
  451. data/vendor/v8/test/cctest/SConscript +5 -4
  452. data/vendor/v8/test/cctest/cctest.h +3 -2
  453. data/vendor/v8/test/cctest/cctest.status +6 -11
  454. data/vendor/v8/test/cctest/test-accessors.cc +3 -3
  455. data/vendor/v8/test/cctest/test-alloc.cc +39 -33
  456. data/vendor/v8/test/cctest/test-api.cc +1092 -205
  457. data/vendor/v8/test/cctest/test-assembler-arm.cc +39 -25
  458. data/vendor/v8/test/cctest/test-assembler-ia32.cc +36 -37
  459. data/vendor/v8/test/cctest/test-assembler-mips.cc +1098 -40
  460. data/vendor/v8/test/cctest/test-assembler-x64.cc +32 -25
  461. data/vendor/v8/test/cctest/test-ast.cc +1 -0
  462. data/vendor/v8/test/cctest/test-circular-queue.cc +8 -5
  463. data/vendor/v8/test/cctest/test-compiler.cc +24 -24
  464. data/vendor/v8/test/cctest/test-cpu-profiler.cc +140 -5
  465. data/vendor/v8/test/cctest/test-dataflow.cc +1 -0
  466. data/vendor/v8/test/cctest/test-debug.cc +136 -77
  467. data/vendor/v8/test/cctest/test-decls.cc +1 -1
  468. data/vendor/v8/test/cctest/test-deoptimization.cc +25 -24
  469. data/vendor/v8/test/cctest/test-disasm-arm.cc +9 -4
  470. data/vendor/v8/test/cctest/test-disasm-ia32.cc +10 -8
  471. data/vendor/v8/test/cctest/test-func-name-inference.cc +10 -4
  472. data/vendor/v8/test/cctest/test-heap-profiler.cc +226 -164
  473. data/vendor/v8/test/cctest/test-heap.cc +240 -217
  474. data/vendor/v8/test/cctest/test-liveedit.cc +1 -0
  475. data/vendor/v8/test/cctest/test-log-stack-tracer.cc +18 -20
  476. data/vendor/v8/test/cctest/test-log.cc +114 -108
  477. data/vendor/v8/test/cctest/test-macro-assembler-x64.cc +247 -177
  478. data/vendor/v8/test/cctest/test-mark-compact.cc +129 -90
  479. data/vendor/v8/test/cctest/test-parsing.cc +15 -14
  480. data/vendor/v8/test/cctest/test-platform-linux.cc +1 -0
  481. data/vendor/v8/test/cctest/test-platform-tls.cc +66 -0
  482. data/vendor/v8/test/cctest/test-platform-win32.cc +1 -0
  483. data/vendor/v8/test/cctest/test-profile-generator.cc +1 -1
  484. data/vendor/v8/test/cctest/test-regexp.cc +53 -41
  485. data/vendor/v8/test/cctest/test-reloc-info.cc +18 -11
  486. data/vendor/v8/test/cctest/test-serialize.cc +44 -43
  487. data/vendor/v8/test/cctest/test-sockets.cc +8 -3
  488. data/vendor/v8/test/cctest/test-spaces.cc +47 -29
  489. data/vendor/v8/test/cctest/test-strings.cc +20 -20
  490. data/vendor/v8/test/cctest/test-thread-termination.cc +8 -3
  491. data/vendor/v8/test/cctest/test-threads.cc +5 -3
  492. data/vendor/v8/test/cctest/test-utils.cc +5 -4
  493. data/vendor/v8/test/cctest/testcfg.py +7 -3
  494. data/vendor/v8/test/es5conform/es5conform.status +2 -77
  495. data/vendor/v8/test/es5conform/testcfg.py +1 -1
  496. data/vendor/v8/test/message/testcfg.py +1 -1
  497. data/vendor/v8/test/mjsunit/accessors-on-global-object.js +3 -3
  498. data/vendor/v8/test/mjsunit/array-concat.js +43 -1
  499. data/vendor/v8/test/mjsunit/array-join.js +25 -0
  500. data/vendor/v8/test/mjsunit/bitops-info.js +7 -1
  501. data/vendor/v8/test/mjsunit/compiler/array-length.js +2 -2
  502. data/vendor/v8/test/mjsunit/compiler/global-accessors.js +47 -0
  503. data/vendor/v8/test/mjsunit/compiler/pic.js +1 -1
  504. data/vendor/v8/test/mjsunit/compiler/regress-loadfield.js +65 -0
  505. data/vendor/v8/test/mjsunit/math-sqrt.js +5 -1
  506. data/vendor/v8/test/mjsunit/mjsunit.js +59 -8
  507. data/vendor/v8/test/mjsunit/mjsunit.status +0 -12
  508. data/vendor/v8/test/mjsunit/mul-exhaustive.js +129 -11
  509. data/vendor/v8/test/mjsunit/negate-zero.js +1 -1
  510. data/vendor/v8/test/mjsunit/object-freeze.js +5 -13
  511. data/vendor/v8/test/mjsunit/object-prevent-extensions.js +9 -50
  512. data/vendor/v8/test/mjsunit/object-seal.js +4 -13
  513. data/vendor/v8/test/mjsunit/override-eval-with-non-function.js +36 -0
  514. data/vendor/v8/test/mjsunit/regress/regress-1145.js +54 -0
  515. data/vendor/v8/test/mjsunit/regress/regress-1172-bis.js +37 -0
  516. data/vendor/v8/test/mjsunit/regress/regress-1181.js +54 -0
  517. data/vendor/v8/test/mjsunit/regress/regress-1207.js +35 -0
  518. data/vendor/v8/test/mjsunit/regress/regress-1209.js +34 -0
  519. data/vendor/v8/test/mjsunit/regress/regress-1210.js +48 -0
  520. data/vendor/v8/test/mjsunit/regress/regress-1213.js +43 -0
  521. data/vendor/v8/test/mjsunit/regress/regress-1218.js +29 -0
  522. data/vendor/v8/test/mjsunit/regress/regress-1229.js +79 -0
  523. data/vendor/v8/test/mjsunit/regress/regress-1233.js +47 -0
  524. data/vendor/v8/test/mjsunit/regress/regress-1236.js +34 -0
  525. data/vendor/v8/test/mjsunit/regress/regress-1237.js +36 -0
  526. data/vendor/v8/test/mjsunit/regress/regress-1240.js +39 -0
  527. data/vendor/v8/test/mjsunit/regress/regress-1257.js +58 -0
  528. data/vendor/v8/test/mjsunit/regress/regress-1278.js +69 -0
  529. data/vendor/v8/test/mjsunit/regress/regress-create-exception.js +1 -0
  530. data/vendor/v8/test/mjsunit/regress/regress-lazy-deopt-reloc.js +52 -0
  531. data/vendor/v8/test/mjsunit/sin-cos.js +15 -10
  532. data/vendor/v8/test/mjsunit/smi-negative-zero.js +2 -2
  533. data/vendor/v8/test/mjsunit/str-to-num.js +1 -1
  534. data/vendor/v8/test/mjsunit/strict-mode.js +435 -0
  535. data/vendor/v8/test/mjsunit/testcfg.py +23 -6
  536. data/vendor/v8/test/mozilla/mozilla.status +0 -2
  537. data/vendor/v8/test/mozilla/testcfg.py +1 -1
  538. data/vendor/v8/test/preparser/empty.js +28 -0
  539. data/vendor/v8/test/preparser/functions-only.js +38 -0
  540. data/vendor/v8/test/preparser/non-alphanum.js +34 -0
  541. data/vendor/v8/test/preparser/symbols-only.js +49 -0
  542. data/vendor/v8/test/preparser/testcfg.py +90 -0
  543. data/vendor/v8/test/sputnik/testcfg.py +1 -1
  544. data/vendor/v8/test/test262/README +16 -0
  545. data/vendor/v8/test/test262/harness-adapt.js +80 -0
  546. data/vendor/v8/test/test262/test262.status +1506 -0
  547. data/vendor/v8/test/test262/testcfg.py +123 -0
  548. data/vendor/v8/tools/freebsd-tick-processor +10 -0
  549. data/vendor/v8/tools/gyp/v8.gyp +8 -33
  550. data/vendor/v8/tools/linux-tick-processor +5 -3
  551. data/vendor/v8/tools/test.py +37 -14
  552. data/vendor/v8/tools/tickprocessor.js +22 -8
  553. data/vendor/v8/tools/visual_studio/v8_base.vcproj +13 -1
  554. data/vendor/v8/tools/visual_studio/v8_base_arm.vcproj +5 -1
  555. data/vendor/v8/tools/visual_studio/v8_base_x64.vcproj +5 -1
  556. data/vendor/v8/tools/visual_studio/x64.vsprops +1 -0
  557. metadata +1495 -1341
  558. data/ext/extconf.rb +0 -22
  559. data/ext/mustang.cpp +0 -58
  560. data/vendor/v8/src/top.h +0 -608
@@ -1,4 +1,4 @@
1
- // Copyright 2010 the V8 project authors. All rights reserved.
1
+ // Copyright 2011 the V8 project authors. All rights reserved.
2
2
  // Redistribution and use in source and binary forms, with or without
3
3
  // modification, are permitted provided that the following conditions are
4
4
  // met:
@@ -39,15 +39,23 @@ namespace internal {
39
39
  // TranscendentalCache runtime function.
40
40
  class TranscendentalCacheStub: public CodeStub {
41
41
  public:
42
- explicit TranscendentalCacheStub(TranscendentalCache::Type type)
43
- : type_(type) {}
42
+ enum ArgumentType {
43
+ TAGGED = 0,
44
+ UNTAGGED = 1 << TranscendentalCache::kTranscendentalTypeBits
45
+ };
46
+
47
+ explicit TranscendentalCacheStub(TranscendentalCache::Type type,
48
+ ArgumentType argument_type)
49
+ : type_(type), argument_type_(argument_type) {}
44
50
  void Generate(MacroAssembler* masm);
45
51
  private:
46
52
  TranscendentalCache::Type type_;
53
+ ArgumentType argument_type_;
54
+
47
55
  Major MajorKey() { return TranscendentalCache; }
48
- int MinorKey() { return type_; }
56
+ int MinorKey() { return type_ | argument_type_; }
49
57
  Runtime::FunctionId RuntimeFunction();
50
- void GenerateOperation(MacroAssembler* masm, Label* on_nan_result);
58
+ void GenerateOperation(MacroAssembler* masm);
51
59
  };
52
60
 
53
61
 
@@ -281,6 +289,7 @@ class TypeRecordingBinaryOpStub: public CodeStub {
281
289
  void GenerateSmiStub(MacroAssembler* masm);
282
290
  void GenerateInt32Stub(MacroAssembler* masm);
283
291
  void GenerateHeapNumberStub(MacroAssembler* masm);
292
+ void GenerateOddballStub(MacroAssembler* masm);
284
293
  void GenerateStringStub(MacroAssembler* masm);
285
294
  void GenerateGenericStub(MacroAssembler* masm);
286
295
 
@@ -463,49 +472,6 @@ class NumberToStringStub: public CodeStub {
463
472
  };
464
473
 
465
474
 
466
- // Generate code to load an element from a pixel array. The receiver is assumed
467
- // to not be a smi and to have elements, the caller must guarantee this
468
- // precondition. If key is not a smi, then the generated code branches to
469
- // key_not_smi. Callers can specify NULL for key_not_smi to signal that a smi
470
- // check has already been performed on key so that the smi check is not
471
- // generated. If key is not a valid index within the bounds of the pixel array,
472
- // the generated code jumps to out_of_range. receiver, key and elements are
473
- // unchanged throughout the generated code sequence.
474
- void GenerateFastPixelArrayLoad(MacroAssembler* masm,
475
- Register receiver,
476
- Register key,
477
- Register elements,
478
- Register untagged_key,
479
- Register result,
480
- Label* not_pixel_array,
481
- Label* key_not_smi,
482
- Label* out_of_range);
483
-
484
- // Generate code to store an element into a pixel array, clamping values between
485
- // [0..255]. The receiver is assumed to not be a smi and to have elements, the
486
- // caller must guarantee this precondition. If key is not a smi, then the
487
- // generated code branches to key_not_smi. Callers can specify NULL for
488
- // key_not_smi to signal that a smi check has already been performed on key so
489
- // that the smi check is not generated. If the value is not a smi, the
490
- // generated code will branch to value_not_smi. If the receiver
491
- // doesn't have pixel array elements, the generated code will branch to
492
- // not_pixel_array, unless not_pixel_array is NULL, in which case the caller
493
- // must ensure that the receiver has pixel array elements. If key is not a
494
- // valid index within the bounds of the pixel array, the generated code jumps to
495
- // out_of_range.
496
- void GenerateFastPixelArrayStore(MacroAssembler* masm,
497
- Register receiver,
498
- Register key,
499
- Register value,
500
- Register elements,
501
- Register scratch1,
502
- bool load_elements_from_receiver,
503
- bool key_is_untagged,
504
- Label* key_not_smi,
505
- Label* value_not_smi,
506
- Label* not_pixel_array,
507
- Label* out_of_range);
508
-
509
475
  } } // namespace v8::internal
510
476
 
511
477
  #endif // V8_X64_CODE_STUBS_X64_H_
@@ -1,4 +1,4 @@
1
- // Copyright 2009 the V8 project authors. All rights reserved.
1
+ // Copyright 2010 the V8 project authors. All rights reserved.
2
2
  // Redistribution and use in source and binary forms, with or without
3
3
  // modification, are permitted provided that the following conditions are
4
4
  // met:
@@ -1,4 +1,4 @@
1
- // Copyright 2010 the V8 project authors. All rights reserved.
1
+ // Copyright 2011 the V8 project authors. All rights reserved.
2
2
  // Redistribution and use in source and binary forms, with or without
3
3
  // modification, are permitted provided that the following conditions are
4
4
  // met:
@@ -180,7 +180,7 @@ void CodeGenerator::Generate(CompilationInfo* info) {
180
180
  ASSERT_EQ(0, loop_nesting_);
181
181
  loop_nesting_ = info->is_in_loop() ? 1 : 0;
182
182
 
183
- JumpTarget::set_compiling_deferred_code(false);
183
+ Isolate::Current()->set_jump_target_compiling_deferred_code(false);
184
184
 
185
185
  {
186
186
  CodeGenState state(this);
@@ -281,7 +281,7 @@ void CodeGenerator::Generate(CompilationInfo* info) {
281
281
 
282
282
  // Initialize ThisFunction reference if present.
283
283
  if (scope()->is_function_scope() && scope()->function() != NULL) {
284
- frame_->Push(Factory::the_hole_value());
284
+ frame_->Push(FACTORY->the_hole_value());
285
285
  StoreToSlot(scope()->function()->AsSlot(), NOT_CONST_INIT);
286
286
  }
287
287
 
@@ -316,7 +316,7 @@ void CodeGenerator::Generate(CompilationInfo* info) {
316
316
  if (!scope()->HasIllegalRedeclaration()) {
317
317
  Comment cmnt(masm_, "[ function body");
318
318
  #ifdef DEBUG
319
- bool is_builtin = Bootstrapper::IsActive();
319
+ bool is_builtin = Isolate::Current()->bootstrapper()->IsActive();
320
320
  bool should_trace =
321
321
  is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
322
322
  if (should_trace) {
@@ -333,7 +333,7 @@ void CodeGenerator::Generate(CompilationInfo* info) {
333
333
  ASSERT(!function_return_is_shadowed_);
334
334
  CodeForReturnPosition(info->function());
335
335
  frame_->PrepareForReturn();
336
- Result undefined(Factory::undefined_value());
336
+ Result undefined(FACTORY->undefined_value());
337
337
  if (function_return_.is_bound()) {
338
338
  function_return_.Jump(&undefined);
339
339
  } else {
@@ -365,9 +365,9 @@ void CodeGenerator::Generate(CompilationInfo* info) {
365
365
 
366
366
  // Process any deferred code using the register allocator.
367
367
  if (!HasStackOverflow()) {
368
- JumpTarget::set_compiling_deferred_code(true);
368
+ info->isolate()->set_jump_target_compiling_deferred_code(true);
369
369
  ProcessDeferred();
370
- JumpTarget::set_compiling_deferred_code(false);
370
+ info->isolate()->set_jump_target_compiling_deferred_code(false);
371
371
  }
372
372
 
373
373
  // There is no need to delete the register allocator, it is a
@@ -516,12 +516,12 @@ void CodeGenerator::Load(Expression* expr) {
516
516
  if (dest.false_was_fall_through()) {
517
517
  // The false target was just bound.
518
518
  JumpTarget loaded;
519
- frame_->Push(Factory::false_value());
519
+ frame_->Push(FACTORY->false_value());
520
520
  // There may be dangling jumps to the true target.
521
521
  if (true_target.is_linked()) {
522
522
  loaded.Jump();
523
523
  true_target.Bind();
524
- frame_->Push(Factory::true_value());
524
+ frame_->Push(FACTORY->true_value());
525
525
  loaded.Bind();
526
526
  }
527
527
 
@@ -529,11 +529,11 @@ void CodeGenerator::Load(Expression* expr) {
529
529
  // There is true, and possibly false, control flow (with true as
530
530
  // the fall through).
531
531
  JumpTarget loaded;
532
- frame_->Push(Factory::true_value());
532
+ frame_->Push(FACTORY->true_value());
533
533
  if (false_target.is_linked()) {
534
534
  loaded.Jump();
535
535
  false_target.Bind();
536
- frame_->Push(Factory::false_value());
536
+ frame_->Push(FACTORY->false_value());
537
537
  loaded.Bind();
538
538
  }
539
539
 
@@ -548,14 +548,14 @@ void CodeGenerator::Load(Expression* expr) {
548
548
  loaded.Jump(); // Don't lose the current TOS.
549
549
  if (true_target.is_linked()) {
550
550
  true_target.Bind();
551
- frame_->Push(Factory::true_value());
551
+ frame_->Push(FACTORY->true_value());
552
552
  if (false_target.is_linked()) {
553
553
  loaded.Jump();
554
554
  }
555
555
  }
556
556
  if (false_target.is_linked()) {
557
557
  false_target.Bind();
558
- frame_->Push(Factory::false_value());
558
+ frame_->Push(FACTORY->false_value());
559
559
  }
560
560
  loaded.Bind();
561
561
  }
@@ -611,11 +611,13 @@ void CodeGenerator::LoadTypeofExpression(Expression* expr) {
611
611
 
612
612
  ArgumentsAllocationMode CodeGenerator::ArgumentsMode() {
613
613
  if (scope()->arguments() == NULL) return NO_ARGUMENTS_ALLOCATION;
614
- ASSERT(scope()->arguments_shadow() != NULL);
614
+
615
+ // In strict mode there is no need for shadow arguments.
616
+ ASSERT(scope()->arguments_shadow() != NULL || scope()->is_strict_mode());
615
617
  // We don't want to do lazy arguments allocation for functions that
616
618
  // have heap-allocated contexts, because it interfers with the
617
619
  // uninitialized const tracking in the context objects.
618
- return (scope()->num_heap_slots() > 0)
620
+ return (scope()->num_heap_slots() > 0 || scope()->is_strict_mode())
619
621
  ? EAGER_ARGUMENTS_ALLOCATION
620
622
  : LAZY_ARGUMENTS_ALLOCATION;
621
623
  }
@@ -630,9 +632,11 @@ Result CodeGenerator::StoreArgumentsObject(bool initial) {
630
632
  // When using lazy arguments allocation, we store the arguments marker value
631
633
  // as a sentinel indicating that the arguments object hasn't been
632
634
  // allocated yet.
633
- frame_->Push(Factory::arguments_marker());
635
+ frame_->Push(FACTORY->arguments_marker());
634
636
  } else {
635
- ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
637
+ ArgumentsAccessStub stub(is_strict_mode()
638
+ ? ArgumentsAccessStub::NEW_STRICT
639
+ : ArgumentsAccessStub::NEW_NON_STRICT);
636
640
  frame_->PushFunction();
637
641
  frame_->PushReceiverSlotAddress();
638
642
  frame_->Push(Smi::FromInt(scope()->num_parameters()));
@@ -643,7 +647,9 @@ Result CodeGenerator::StoreArgumentsObject(bool initial) {
643
647
  Variable* arguments = scope()->arguments();
644
648
  Variable* shadow = scope()->arguments_shadow();
645
649
  ASSERT(arguments != NULL && arguments->AsSlot() != NULL);
646
- ASSERT(shadow != NULL && shadow->AsSlot() != NULL);
650
+ ASSERT((shadow != NULL && shadow->AsSlot() != NULL) ||
651
+ scope()->is_strict_mode());
652
+
647
653
  JumpTarget done;
648
654
  bool skip_arguments = false;
649
655
  if (mode == LAZY_ARGUMENTS_ALLOCATION && !initial) {
@@ -666,7 +672,9 @@ Result CodeGenerator::StoreArgumentsObject(bool initial) {
666
672
  StoreToSlot(arguments->AsSlot(), NOT_CONST_INIT);
667
673
  if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind();
668
674
  }
669
- StoreToSlot(shadow->AsSlot(), NOT_CONST_INIT);
675
+ if (shadow != NULL) {
676
+ StoreToSlot(shadow->AsSlot(), NOT_CONST_INIT);
677
+ }
670
678
  return frame_->Pop();
671
679
  }
672
680
 
@@ -760,7 +768,7 @@ void CodeGenerator::ToBoolean(ControlDestination* dest) {
760
768
  __ AbortIfNotNumber(value.reg());
761
769
  }
762
770
  // Smi => false iff zero.
763
- __ SmiCompare(value.reg(), Smi::FromInt(0));
771
+ __ Cmp(value.reg(), Smi::FromInt(0));
764
772
  if (value.is_smi()) {
765
773
  value.Unuse();
766
774
  dest->Split(not_zero);
@@ -788,7 +796,7 @@ void CodeGenerator::ToBoolean(ControlDestination* dest) {
788
796
  dest->false_target()->Branch(equal);
789
797
 
790
798
  // Smi => false iff zero.
791
- __ SmiCompare(value.reg(), Smi::FromInt(0));
799
+ __ Cmp(value.reg(), Smi::FromInt(0));
792
800
  dest->false_target()->Branch(equal);
793
801
  Condition is_smi = masm_->CheckSmi(value.reg());
794
802
  dest->true_target()->Branch(is_smi);
@@ -1030,7 +1038,7 @@ void CodeGenerator::GenericBinaryOperation(BinaryOperation* expr,
1030
1038
  true, overwrite_mode);
1031
1039
  } else {
1032
1040
  // Set the flags based on the operation, type and loop nesting level.
1033
- // Bit operations always assume they likely operate on Smis. Still only
1041
+ // Bit operations always assume they likely operate on smis. Still only
1034
1042
  // generate the inline Smi check code if this operation is part of a loop.
1035
1043
  // For all other operations only inline the Smi check code for likely smis
1036
1044
  // if the operation is part of a loop.
@@ -1054,7 +1062,7 @@ void CodeGenerator::GenericBinaryOperation(BinaryOperation* expr,
1054
1062
 
1055
1063
 
1056
1064
  bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) {
1057
- Object* answer_object = Heap::undefined_value();
1065
+ Object* answer_object = HEAP->undefined_value();
1058
1066
  switch (op) {
1059
1067
  case Token::ADD:
1060
1068
  // Use intptr_t to detect overflow of 32-bit int.
@@ -1128,7 +1136,7 @@ bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) {
1128
1136
  UNREACHABLE();
1129
1137
  break;
1130
1138
  }
1131
- if (answer_object == Heap::undefined_value()) {
1139
+ if (answer_object->IsUndefined()) {
1132
1140
  return false;
1133
1141
  }
1134
1142
  frame_->Push(Handle<Object>(answer_object));
@@ -1363,7 +1371,7 @@ Result CodeGenerator::LikelySmiBinaryOperation(BinaryOperation* expr,
1363
1371
  if (!left_type_info.IsNumber()) {
1364
1372
  // Branch if not a heapnumber.
1365
1373
  __ Cmp(FieldOperand(answer.reg(), HeapObject::kMapOffset),
1366
- Factory::heap_number_map());
1374
+ FACTORY->heap_number_map());
1367
1375
  deferred->Branch(not_equal);
1368
1376
  }
1369
1377
  // Load integer value into answer register using truncation.
@@ -2102,7 +2110,7 @@ void CodeGenerator::Comparison(AstNode* node,
2102
2110
  if (cc == equal) {
2103
2111
  Label comparison_done;
2104
2112
  __ SmiCompare(FieldOperand(left_side.reg(), String::kLengthOffset),
2105
- Smi::FromInt(1));
2113
+ Smi::FromInt(1));
2106
2114
  __ j(not_equal, &comparison_done);
2107
2115
  uint8_t char_value =
2108
2116
  static_cast<uint8_t>(String::cast(*right_val)->Get(0));
@@ -2288,7 +2296,7 @@ void CodeGenerator::ConstantSmiComparison(Condition cc,
2288
2296
  // CompareStub and the inline code both support all values of cc.
2289
2297
  }
2290
2298
  // Implement comparison against a constant Smi, inlining the case
2291
- // where both sides are Smis.
2299
+ // where both sides are smis.
2292
2300
  left_side->ToRegister();
2293
2301
  Register left_reg = left_side->reg();
2294
2302
  Smi* constant_smi = Smi::cast(*right_side->handle());
@@ -2298,7 +2306,6 @@ void CodeGenerator::ConstantSmiComparison(Condition cc,
2298
2306
  __ AbortIfNotSmi(left_reg);
2299
2307
  }
2300
2308
  // Test smi equality and comparison by signed int comparison.
2301
- // Both sides are smis, so we can use an Immediate.
2302
2309
  __ SmiCompare(left_reg, constant_smi);
2303
2310
  left_side->Unuse();
2304
2311
  right_side->Unuse();
@@ -2308,7 +2315,7 @@ void CodeGenerator::ConstantSmiComparison(Condition cc,
2308
2315
  JumpTarget is_smi;
2309
2316
  if (cc == equal) {
2310
2317
  // We can do the equality comparison before the smi check.
2311
- __ SmiCompare(left_reg, constant_smi);
2318
+ __ Cmp(left_reg, constant_smi);
2312
2319
  dest->true_target()->Branch(equal);
2313
2320
  Condition left_is_smi = masm_->CheckSmi(left_reg);
2314
2321
  dest->false_target()->Branch(left_is_smi);
@@ -2326,7 +2333,7 @@ void CodeGenerator::ConstantSmiComparison(Condition cc,
2326
2333
  // not to be a smi.
2327
2334
  JumpTarget not_number;
2328
2335
  __ Cmp(FieldOperand(left_reg, HeapObject::kMapOffset),
2329
- Factory::heap_number_map());
2336
+ FACTORY->heap_number_map());
2330
2337
  not_number.Branch(not_equal, left_side);
2331
2338
  __ movsd(xmm1,
2332
2339
  FieldOperand(left_reg, HeapNumber::kValueOffset));
@@ -2486,7 +2493,7 @@ void CodeGenerator::CallApplyLazy(Expression* applicand,
2486
2493
  // give us a megamorphic load site. Not super, but it works.
2487
2494
  Load(applicand);
2488
2495
  frame()->Dup();
2489
- Handle<String> name = Factory::LookupAsciiSymbol("apply");
2496
+ Handle<String> name = FACTORY->LookupAsciiSymbol("apply");
2490
2497
  frame()->Push(name);
2491
2498
  Result answer = frame()->CallLoadIC(RelocInfo::CODE_TARGET);
2492
2499
  __ nop();
@@ -2554,7 +2561,7 @@ void CodeGenerator::CallApplyLazy(Expression* applicand,
2554
2561
  __ j(not_equal, &build_args);
2555
2562
  __ movq(rcx, FieldOperand(rax, JSFunction::kCodeEntryOffset));
2556
2563
  __ subq(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2557
- Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply));
2564
+ Handle<Code> apply_code = Isolate::Current()->builtins()->FunctionApply();
2558
2565
  __ Cmp(rcx, apply_code);
2559
2566
  __ j(not_equal, &build_args);
2560
2567
 
@@ -2569,8 +2576,8 @@ void CodeGenerator::CallApplyLazy(Expression* applicand,
2569
2576
  // adaptor frame below it.
2570
2577
  Label invoke, adapted;
2571
2578
  __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2572
- __ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset),
2573
- Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2579
+ __ Cmp(Operand(rdx, StandardFrameConstants::kContextOffset),
2580
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2574
2581
  __ j(equal, &adapted);
2575
2582
 
2576
2583
  // No arguments adaptor frame. Copy fixed number of arguments.
@@ -2747,7 +2754,8 @@ void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
2747
2754
  frame_->EmitPush(rsi); // The context is the first argument.
2748
2755
  frame_->EmitPush(kScratchRegister);
2749
2756
  frame_->EmitPush(Smi::FromInt(is_eval() ? 1 : 0));
2750
- Result ignored = frame_->CallRuntime(Runtime::kDeclareGlobals, 3);
2757
+ frame_->EmitPush(Smi::FromInt(strict_mode_flag()));
2758
+ Result ignored = frame_->CallRuntime(Runtime::kDeclareGlobals, 4);
2751
2759
  // Return value is ignored.
2752
2760
  }
2753
2761
 
@@ -2796,7 +2804,7 @@ void CodeGenerator::VisitDeclaration(Declaration* node) {
2796
2804
  // If we have a function or a constant, we need to initialize the variable.
2797
2805
  Expression* val = NULL;
2798
2806
  if (node->mode() == Variable::CONST) {
2799
- val = new Literal(Factory::the_hole_value());
2807
+ val = new Literal(FACTORY->the_hole_value());
2800
2808
  } else {
2801
2809
  val = node->fun(); // NULL if we don't have a function
2802
2810
  }
@@ -3850,7 +3858,7 @@ void CodeGenerator::VisitForInStatement(ForInStatement* node) {
3850
3858
  __ movq(rbx, rax);
3851
3859
 
3852
3860
  // If the property has been removed while iterating, we just skip it.
3853
- __ SmiCompare(rbx, Smi::FromInt(0));
3861
+ __ Cmp(rbx, Smi::FromInt(0));
3854
3862
  node->continue_target()->Branch(equal);
3855
3863
 
3856
3864
  end_del_check.Bind();
@@ -3972,7 +3980,7 @@ void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) {
3972
3980
  function_return_is_shadowed_ = function_return_was_shadowed;
3973
3981
 
3974
3982
  // Get an external reference to the handler address.
3975
- ExternalReference handler_address(Top::k_handler_address);
3983
+ ExternalReference handler_address(Isolate::k_handler_address, isolate());
3976
3984
 
3977
3985
  // Make sure that there's nothing left on the stack above the
3978
3986
  // handler structure.
@@ -4101,7 +4109,7 @@ void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) {
4101
4109
  function_return_is_shadowed_ = function_return_was_shadowed;
4102
4110
 
4103
4111
  // Get an external reference to the handler address.
4104
- ExternalReference handler_address(Top::k_handler_address);
4112
+ ExternalReference handler_address(Isolate::k_handler_address, isolate());
4105
4113
 
4106
4114
  // If we can fall off the end of the try block, unlink from the try
4107
4115
  // chain and set the state on the frame to FALLING.
@@ -4254,10 +4262,11 @@ void CodeGenerator::InstantiateFunction(
4254
4262
 
4255
4263
  // Use the fast case closure allocation code that allocates in new
4256
4264
  // space for nested functions that don't need literals cloning.
4257
- if (scope()->is_function_scope() &&
4258
- function_info->num_literals() == 0 &&
4259
- !pretenure) {
4260
- FastNewClosureStub stub;
4265
+ if (!pretenure &&
4266
+ scope()->is_function_scope() &&
4267
+ function_info->num_literals() == 0) {
4268
+ FastNewClosureStub stub(
4269
+ function_info->strict_mode() ? kStrictMode : kNonStrictMode);
4261
4270
  frame_->Push(function_info);
4262
4271
  Result answer = frame_->CallStub(&stub, 1);
4263
4272
  frame_->Push(&answer);
@@ -4267,8 +4276,8 @@ void CodeGenerator::InstantiateFunction(
4267
4276
  frame_->EmitPush(rsi);
4268
4277
  frame_->EmitPush(function_info);
4269
4278
  frame_->EmitPush(pretenure
4270
- ? Factory::true_value()
4271
- : Factory::false_value());
4279
+ ? FACTORY->true_value()
4280
+ : FACTORY->false_value());
4272
4281
  Result result = frame_->CallRuntime(Runtime::kNewClosure, 3);
4273
4282
  frame_->Push(&result);
4274
4283
  }
@@ -4605,7 +4614,8 @@ void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) {
4605
4614
  // by initialization.
4606
4615
  value = frame_->CallRuntime(Runtime::kInitializeConstContextSlot, 3);
4607
4616
  } else {
4608
- value = frame_->CallRuntime(Runtime::kStoreContextSlot, 3);
4617
+ frame_->Push(Smi::FromInt(strict_mode_flag()));
4618
+ value = frame_->CallRuntime(Runtime::kStoreContextSlot, 4);
4609
4619
  }
4610
4620
  // Storing a variable must keep the (new) value on the expression
4611
4621
  // stack. This is necessary for compiling chained assignment
@@ -4752,7 +4762,7 @@ class DeferredAllocateInNewSpace: public DeferredCode {
4752
4762
  Register target,
4753
4763
  int registers_to_save = 0)
4754
4764
  : size_(size), target_(target), registers_to_save_(registers_to_save) {
4755
- ASSERT(size >= kPointerSize && size <= Heap::MaxObjectSizeInNewSpace());
4765
+ ASSERT(size >= kPointerSize && size <= HEAP->MaxObjectSizeInNewSpace());
4756
4766
  set_comment("[ DeferredAllocateInNewSpace");
4757
4767
  }
4758
4768
  void Generate();
@@ -4914,8 +4924,9 @@ void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
4914
4924
  Load(property->key());
4915
4925
  Load(property->value());
4916
4926
  if (property->emit_store()) {
4927
+ frame_->Push(Smi::FromInt(NONE)); // PropertyAttributes
4917
4928
  // Ignore the result.
4918
- Result ignored = frame_->CallRuntime(Runtime::kSetProperty, 3);
4929
+ Result ignored = frame_->CallRuntime(Runtime::kSetProperty, 4);
4919
4930
  } else {
4920
4931
  frame_->Drop(3);
4921
4932
  }
@@ -4966,11 +4977,12 @@ void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
4966
4977
  frame_->Push(node->constant_elements());
4967
4978
  int length = node->values()->length();
4968
4979
  Result clone;
4969
- if (node->constant_elements()->map() == Heap::fixed_cow_array_map()) {
4980
+ if (node->constant_elements()->map() == HEAP->fixed_cow_array_map()) {
4970
4981
  FastCloneShallowArrayStub stub(
4971
4982
  FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
4972
4983
  clone = frame_->CallStub(&stub, 3);
4973
- __ IncrementCounter(&Counters::cow_arrays_created_stub, 1);
4984
+ Counters* counters = masm()->isolate()->counters();
4985
+ __ IncrementCounter(counters->cow_arrays_created_stub(), 1);
4974
4986
  } else if (node->depth() > 1) {
4975
4987
  clone = frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
4976
4988
  } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
@@ -5368,7 +5380,7 @@ void CodeGenerator::VisitCall(Call* node) {
5368
5380
  Load(function);
5369
5381
 
5370
5382
  // Allocate a frame slot for the receiver.
5371
- frame_->Push(Factory::undefined_value());
5383
+ frame_->Push(FACTORY->undefined_value());
5372
5384
 
5373
5385
  // Load the arguments.
5374
5386
  int arg_count = args->length();
@@ -5400,7 +5412,7 @@ void CodeGenerator::VisitCall(Call* node) {
5400
5412
  if (arg_count > 0) {
5401
5413
  frame_->PushElementAt(arg_count);
5402
5414
  } else {
5403
- frame_->Push(Factory::undefined_value());
5415
+ frame_->Push(FACTORY->undefined_value());
5404
5416
  }
5405
5417
  frame_->PushParameterAt(-1);
5406
5418
 
@@ -5422,7 +5434,7 @@ void CodeGenerator::VisitCall(Call* node) {
5422
5434
  if (arg_count > 0) {
5423
5435
  frame_->PushElementAt(arg_count);
5424
5436
  } else {
5425
- frame_->Push(Factory::undefined_value());
5437
+ frame_->Push(FACTORY->undefined_value());
5426
5438
  }
5427
5439
  frame_->PushParameterAt(-1);
5428
5440
 
@@ -5711,7 +5723,7 @@ void CodeGenerator::GenerateLog(ZoneList<Expression*>* args) {
5711
5723
  }
5712
5724
  #endif
5713
5725
  // Finally, we're expected to leave a value on the top of the stack.
5714
- frame_->Push(Factory::undefined_value());
5726
+ frame_->Push(FACTORY->undefined_value());
5715
5727
  }
5716
5728
 
5717
5729
 
@@ -5974,7 +5986,7 @@ void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) {
5974
5986
  Condition is_smi = masm_->CheckSmi(obj.reg());
5975
5987
  destination()->false_target()->Branch(is_smi);
5976
5988
 
5977
- __ Move(kScratchRegister, Factory::null_value());
5989
+ __ Move(kScratchRegister, FACTORY->null_value());
5978
5990
  __ cmpq(obj.reg(), kScratchRegister);
5979
5991
  destination()->true_target()->Branch(equal);
5980
5992
 
@@ -6066,7 +6078,7 @@ class DeferredIsStringWrapperSafeForDefaultValueOf : public DeferredCode {
6066
6078
  __ jmp(&entry);
6067
6079
  __ bind(&loop);
6068
6080
  __ movq(scratch2_, FieldOperand(map_result_, 0));
6069
- __ Cmp(scratch2_, Factory::value_of_symbol());
6081
+ __ Cmp(scratch2_, FACTORY->value_of_symbol());
6070
6082
  __ j(equal, &false_result);
6071
6083
  __ addq(map_result_, Immediate(kPointerSize));
6072
6084
  __ bind(&entry);
@@ -6189,15 +6201,15 @@ void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) {
6189
6201
 
6190
6202
  // Skip the arguments adaptor frame if it exists.
6191
6203
  Label check_frame_marker;
6192
- __ SmiCompare(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
6193
- Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
6204
+ __ Cmp(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
6205
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
6194
6206
  __ j(not_equal, &check_frame_marker);
6195
6207
  __ movq(fp.reg(), Operand(fp.reg(), StandardFrameConstants::kCallerFPOffset));
6196
6208
 
6197
6209
  // Check the marker in the calling frame.
6198
6210
  __ bind(&check_frame_marker);
6199
- __ SmiCompare(Operand(fp.reg(), StandardFrameConstants::kMarkerOffset),
6200
- Smi::FromInt(StackFrame::CONSTRUCT));
6211
+ __ Cmp(Operand(fp.reg(), StandardFrameConstants::kMarkerOffset),
6212
+ Smi::FromInt(StackFrame::CONSTRUCT));
6201
6213
  fp.Unuse();
6202
6214
  destination()->Split(equal);
6203
6215
  }
@@ -6217,8 +6229,8 @@ void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) {
6217
6229
 
6218
6230
  // Check if the calling frame is an arguments adaptor frame.
6219
6231
  __ movq(fp.reg(), Operand(rbp, StandardFrameConstants::kCallerFPOffset));
6220
- __ SmiCompare(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
6221
- Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
6232
+ __ Cmp(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
6233
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
6222
6234
  __ j(not_equal, &exit);
6223
6235
 
6224
6236
  // Arguments adaptor case: Read the arguments length from the
@@ -6278,17 +6290,17 @@ void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
6278
6290
 
6279
6291
  // Functions have class 'Function'.
6280
6292
  function.Bind();
6281
- frame_->Push(Factory::function_class_symbol());
6293
+ frame_->Push(FACTORY->function_class_symbol());
6282
6294
  leave.Jump();
6283
6295
 
6284
6296
  // Objects with a non-function constructor have class 'Object'.
6285
6297
  non_function_constructor.Bind();
6286
- frame_->Push(Factory::Object_symbol());
6298
+ frame_->Push(FACTORY->Object_symbol());
6287
6299
  leave.Jump();
6288
6300
 
6289
6301
  // Non-JS objects have class null.
6290
6302
  null.Bind();
6291
- frame_->Push(Factory::null_value());
6303
+ frame_->Push(FACTORY->null_value());
6292
6304
 
6293
6305
  // All done.
6294
6306
  leave.Bind();
@@ -6426,8 +6438,13 @@ void CodeGenerator::GenerateRandomHeapNumber(
6426
6438
 
6427
6439
  // Return a random uint32 number in rax.
6428
6440
  // The fresh HeapNumber is in rbx, which is callee-save on both x64 ABIs.
6429
- __ PrepareCallCFunction(0);
6430
- __ CallCFunction(ExternalReference::random_uint32_function(), 0);
6441
+ __ PrepareCallCFunction(1);
6442
+ #ifdef _WIN64
6443
+ __ LoadAddress(rcx, ExternalReference::isolate_address());
6444
+ #else
6445
+ __ LoadAddress(rdi, ExternalReference::isolate_address());
6446
+ #endif
6447
+ __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
6431
6448
 
6432
6449
  // Convert 32 random bits in rax to 0.(32 random bits) in a double
6433
6450
  // by computing:
@@ -6657,10 +6674,10 @@ void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) {
6657
6674
  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
6658
6675
 
6659
6676
  Handle<FixedArray> jsfunction_result_caches(
6660
- Top::global_context()->jsfunction_result_caches());
6677
+ Isolate::Current()->global_context()->jsfunction_result_caches());
6661
6678
  if (jsfunction_result_caches->length() <= cache_id) {
6662
6679
  __ Abort("Attempt to use undefined cache.");
6663
- frame_->Push(Factory::undefined_value());
6680
+ frame_->Push(FACTORY->undefined_value());
6664
6681
  return;
6665
6682
  }
6666
6683
 
@@ -6774,8 +6791,8 @@ void CodeGenerator::GenerateSwapElements(ZoneList<Expression*>* args) {
6774
6791
  // Fetch the map and check if array is in fast case.
6775
6792
  // Check that object doesn't require security checks and
6776
6793
  // has no indexed interceptor.
6777
- __ CmpObjectType(object.reg(), FIRST_JS_OBJECT_TYPE, tmp1.reg());
6778
- deferred->Branch(below);
6794
+ __ CmpObjectType(object.reg(), JS_ARRAY_TYPE, tmp1.reg());
6795
+ deferred->Branch(not_equal);
6779
6796
  __ testb(FieldOperand(tmp1.reg(), Map::kBitFieldOffset),
6780
6797
  Immediate(KeyedLoadIC::kSlowCaseBitFieldMask));
6781
6798
  deferred->Branch(not_zero);
@@ -6817,7 +6834,7 @@ void CodeGenerator::GenerateSwapElements(ZoneList<Expression*>* args) {
6817
6834
 
6818
6835
  Label done;
6819
6836
  __ InNewSpace(tmp1.reg(), tmp2.reg(), equal, &done);
6820
- // Possible optimization: do a check that both values are Smis
6837
+ // Possible optimization: do a check that both values are smis
6821
6838
  // (or them and test against Smi mask.)
6822
6839
 
6823
6840
  __ movq(tmp2.reg(), tmp1.reg());
@@ -6826,7 +6843,7 @@ void CodeGenerator::GenerateSwapElements(ZoneList<Expression*>* args) {
6826
6843
  __ bind(&done);
6827
6844
 
6828
6845
  deferred->BindExit();
6829
- frame_->Push(Factory::undefined_value());
6846
+ frame_->Push(FACTORY->undefined_value());
6830
6847
  }
6831
6848
 
6832
6849
 
@@ -7030,7 +7047,8 @@ void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) {
7030
7047
  void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) {
7031
7048
  ASSERT_EQ(args->length(), 1);
7032
7049
  Load(args->at(0));
7033
- TranscendentalCacheStub stub(TranscendentalCache::SIN);
7050
+ TranscendentalCacheStub stub(TranscendentalCache::SIN,
7051
+ TranscendentalCacheStub::TAGGED);
7034
7052
  Result result = frame_->CallStub(&stub, 1);
7035
7053
  frame_->Push(&result);
7036
7054
  }
@@ -7039,7 +7057,8 @@ void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) {
7039
7057
  void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) {
7040
7058
  ASSERT_EQ(args->length(), 1);
7041
7059
  Load(args->at(0));
7042
- TranscendentalCacheStub stub(TranscendentalCache::COS);
7060
+ TranscendentalCacheStub stub(TranscendentalCache::COS,
7061
+ TranscendentalCacheStub::TAGGED);
7043
7062
  Result result = frame_->CallStub(&stub, 1);
7044
7063
  frame_->Push(&result);
7045
7064
  }
@@ -7048,7 +7067,8 @@ void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) {
7048
7067
  void CodeGenerator::GenerateMathLog(ZoneList<Expression*>* args) {
7049
7068
  ASSERT_EQ(args->length(), 1);
7050
7069
  Load(args->at(0));
7051
- TranscendentalCacheStub stub(TranscendentalCache::LOG);
7070
+ TranscendentalCacheStub stub(TranscendentalCache::LOG,
7071
+ TranscendentalCacheStub::TAGGED);
7052
7072
  Result result = frame_->CallStub(&stub, 1);
7053
7073
  frame_->Push(&result);
7054
7074
  }
@@ -7167,7 +7187,7 @@ void CodeGenerator::GenerateGetCachedArrayIndex(ZoneList<Expression*>* args) {
7167
7187
 
7168
7188
 
7169
7189
  void CodeGenerator::GenerateFastAsciiArrayJoin(ZoneList<Expression*>* args) {
7170
- frame_->Push(Factory::undefined_value());
7190
+ frame_->Push(FACTORY->undefined_value());
7171
7191
  }
7172
7192
 
7173
7193
 
@@ -7178,7 +7198,7 @@ void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
7178
7198
 
7179
7199
  ZoneList<Expression*>* args = node->arguments();
7180
7200
  Comment cmnt(masm_, "[ CallRuntime");
7181
- Runtime::Function* function = node->function();
7201
+ const Runtime::Function* function = node->function();
7182
7202
 
7183
7203
  if (function == NULL) {
7184
7204
  // Push the builtins object found in the current global object.
@@ -7262,12 +7282,12 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
7262
7282
  } else {
7263
7283
  // Default: Result of deleting non-global, not dynamically
7264
7284
  // introduced variables is false.
7265
- frame_->Push(Factory::false_value());
7285
+ frame_->Push(FACTORY->false_value());
7266
7286
  }
7267
7287
  } else {
7268
7288
  // Default: Result of deleting expressions is true.
7269
7289
  Load(node->expression()); // may have side-effects
7270
- frame_->SetElementAt(0, Factory::true_value());
7290
+ frame_->SetElementAt(0, FACTORY->true_value());
7271
7291
  }
7272
7292
 
7273
7293
  } else if (op == Token::TYPEOF) {
@@ -7288,10 +7308,10 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
7288
7308
  expression->AsLiteral()->IsNull())) {
7289
7309
  // Omit evaluating the value of the primitive literal.
7290
7310
  // It will be discarded anyway, and can have no side effect.
7291
- frame_->Push(Factory::undefined_value());
7311
+ frame_->Push(FACTORY->undefined_value());
7292
7312
  } else {
7293
7313
  Load(node->expression());
7294
- frame_->SetElementAt(0, Factory::undefined_value());
7314
+ frame_->SetElementAt(0, FACTORY->undefined_value());
7295
7315
  }
7296
7316
 
7297
7317
  } else {
@@ -7763,7 +7783,7 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
7763
7783
  Result answer = frame_->Pop();
7764
7784
  answer.ToRegister();
7765
7785
 
7766
- if (check->Equals(Heap::number_symbol())) {
7786
+ if (check->Equals(HEAP->number_symbol())) {
7767
7787
  Condition is_smi = masm_->CheckSmi(answer.reg());
7768
7788
  destination()->true_target()->Branch(is_smi);
7769
7789
  frame_->Spill(answer.reg());
@@ -7772,7 +7792,7 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
7772
7792
  answer.Unuse();
7773
7793
  destination()->Split(equal);
7774
7794
 
7775
- } else if (check->Equals(Heap::string_symbol())) {
7795
+ } else if (check->Equals(HEAP->string_symbol())) {
7776
7796
  Condition is_smi = masm_->CheckSmi(answer.reg());
7777
7797
  destination()->false_target()->Branch(is_smi);
7778
7798
 
@@ -7786,14 +7806,14 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
7786
7806
  answer.Unuse();
7787
7807
  destination()->Split(below); // Unsigned byte comparison needed.
7788
7808
 
7789
- } else if (check->Equals(Heap::boolean_symbol())) {
7809
+ } else if (check->Equals(HEAP->boolean_symbol())) {
7790
7810
  __ CompareRoot(answer.reg(), Heap::kTrueValueRootIndex);
7791
7811
  destination()->true_target()->Branch(equal);
7792
7812
  __ CompareRoot(answer.reg(), Heap::kFalseValueRootIndex);
7793
7813
  answer.Unuse();
7794
7814
  destination()->Split(equal);
7795
7815
 
7796
- } else if (check->Equals(Heap::undefined_symbol())) {
7816
+ } else if (check->Equals(HEAP->undefined_symbol())) {
7797
7817
  __ CompareRoot(answer.reg(), Heap::kUndefinedValueRootIndex);
7798
7818
  destination()->true_target()->Branch(equal);
7799
7819
 
@@ -7808,7 +7828,7 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
7808
7828
  answer.Unuse();
7809
7829
  destination()->Split(not_zero);
7810
7830
 
7811
- } else if (check->Equals(Heap::function_symbol())) {
7831
+ } else if (check->Equals(HEAP->function_symbol())) {
7812
7832
  Condition is_smi = masm_->CheckSmi(answer.reg());
7813
7833
  destination()->false_target()->Branch(is_smi);
7814
7834
  frame_->Spill(answer.reg());
@@ -7819,7 +7839,7 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
7819
7839
  answer.Unuse();
7820
7840
  destination()->Split(equal);
7821
7841
 
7822
- } else if (check->Equals(Heap::object_symbol())) {
7842
+ } else if (check->Equals(HEAP->object_symbol())) {
7823
7843
  Condition is_smi = masm_->CheckSmi(answer.reg());
7824
7844
  destination()->false_target()->Branch(is_smi);
7825
7845
  __ CompareRoot(answer.reg(), Heap::kNullValueRootIndex);
@@ -7949,7 +7969,7 @@ bool CodeGenerator::HasValidEntryRegisters() {
7949
7969
  && (allocator()->count(r9) == (frame()->is_used(r9) ? 1 : 0))
7950
7970
  && (allocator()->count(r11) == (frame()->is_used(r11) ? 1 : 0))
7951
7971
  && (allocator()->count(r14) == (frame()->is_used(r14) ? 1 : 0))
7952
- && (allocator()->count(r12) == (frame()->is_used(r12) ? 1 : 0));
7972
+ && (allocator()->count(r15) == (frame()->is_used(r15) ? 1 : 0));
7953
7973
  }
7954
7974
  #endif
7955
7975
 
@@ -7983,7 +8003,7 @@ void DeferredReferenceGetNamedValue::Generate() {
7983
8003
  __ movq(rax, receiver_);
7984
8004
  }
7985
8005
  __ Move(rcx, name_);
7986
- Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
8006
+ Handle<Code> ic = Isolate::Current()->builtins()->LoadIC_Initialize();
7987
8007
  __ Call(ic, RelocInfo::CODE_TARGET);
7988
8008
  // The call must be followed by a test rax instruction to indicate
7989
8009
  // that the inobject property case was inlined.
@@ -7995,7 +8015,8 @@ void DeferredReferenceGetNamedValue::Generate() {
7995
8015
  // Here we use masm_-> instead of the __ macro because this is the
7996
8016
  // instruction that gets patched and coverage code gets in the way.
7997
8017
  masm_->testl(rax, Immediate(-delta_to_patch_site));
7998
- __ IncrementCounter(&Counters::named_load_inline_miss, 1);
8018
+ Counters* counters = masm()->isolate()->counters();
8019
+ __ IncrementCounter(counters->named_load_inline_miss(), 1);
7999
8020
 
8000
8021
  if (!dst_.is(rax)) __ movq(dst_, rax);
8001
8022
  }
@@ -8048,7 +8069,7 @@ void DeferredReferenceGetKeyedValue::Generate() {
8048
8069
  // it in the IC initialization code and patch the movq instruction.
8049
8070
  // This means that we cannot allow test instructions after calls to
8050
8071
  // KeyedLoadIC stubs in other places.
8051
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
8072
+ Handle<Code> ic = Isolate::Current()->builtins()->KeyedLoadIC_Initialize();
8052
8073
  __ Call(ic, RelocInfo::CODE_TARGET);
8053
8074
  // The delta from the start of the map-compare instruction to the
8054
8075
  // test instruction. We use masm_-> directly here instead of the __
@@ -8062,7 +8083,8 @@ void DeferredReferenceGetKeyedValue::Generate() {
8062
8083
  // 7-byte NOP with non-zero immediate (0f 1f 80 xxxxxxxx) which won't
8063
8084
  // be generated normally.
8064
8085
  masm_->testl(rax, Immediate(-delta_to_patch_site));
8065
- __ IncrementCounter(&Counters::keyed_load_inline_miss, 1);
8086
+ Counters* counters = masm()->isolate()->counters();
8087
+ __ IncrementCounter(counters->keyed_load_inline_miss(), 1);
8066
8088
 
8067
8089
  if (!dst_.is(rax)) __ movq(dst_, rax);
8068
8090
  }
@@ -8072,8 +8094,12 @@ class DeferredReferenceSetKeyedValue: public DeferredCode {
8072
8094
  public:
8073
8095
  DeferredReferenceSetKeyedValue(Register value,
8074
8096
  Register key,
8075
- Register receiver)
8076
- : value_(value), key_(key), receiver_(receiver) {
8097
+ Register receiver,
8098
+ StrictModeFlag strict_mode)
8099
+ : value_(value),
8100
+ key_(key),
8101
+ receiver_(receiver),
8102
+ strict_mode_(strict_mode) {
8077
8103
  set_comment("[ DeferredReferenceSetKeyedValue");
8078
8104
  }
8079
8105
 
@@ -8086,11 +8112,13 @@ class DeferredReferenceSetKeyedValue: public DeferredCode {
8086
8112
  Register key_;
8087
8113
  Register receiver_;
8088
8114
  Label patch_site_;
8115
+ StrictModeFlag strict_mode_;
8089
8116
  };
8090
8117
 
8091
8118
 
8092
8119
  void DeferredReferenceSetKeyedValue::Generate() {
8093
- __ IncrementCounter(&Counters::keyed_store_inline_miss, 1);
8120
+ Counters* counters = masm()->isolate()->counters();
8121
+ __ IncrementCounter(counters->keyed_store_inline_miss(), 1);
8094
8122
  // Move value, receiver, and key to registers rax, rdx, and rcx, as
8095
8123
  // the IC stub expects.
8096
8124
  // Move value to rax, using xchg if the receiver or key is in rax.
@@ -8137,7 +8165,9 @@ void DeferredReferenceSetKeyedValue::Generate() {
8137
8165
  }
8138
8166
 
8139
8167
  // Call the IC stub.
8140
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
8168
+ Handle<Code> ic(Isolate::Current()->builtins()->builtin(
8169
+ (strict_mode_ == kStrictMode) ? Builtins::kKeyedStoreIC_Initialize_Strict
8170
+ : Builtins::kKeyedStoreIC_Initialize));
8141
8171
  __ Call(ic, RelocInfo::CODE_TARGET);
8142
8172
  // The delta from the start of the map-compare instructions (initial movq)
8143
8173
  // to the test instruction. We use masm_-> directly here instead of the
@@ -8182,17 +8212,9 @@ Result CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
8182
8212
  result = allocator()->Allocate();
8183
8213
  ASSERT(result.is_valid());
8184
8214
 
8185
- // Cannot use r12 for receiver, because that changes
8186
- // the distance between a call and a fixup location,
8187
- // due to a special encoding of r12 as r/m in a ModR/M byte.
8188
- if (receiver.reg().is(r12)) {
8189
- frame()->Spill(receiver.reg()); // It will be overwritten with result.
8190
- // Swap receiver and value.
8191
- __ movq(result.reg(), receiver.reg());
8192
- Result temp = receiver;
8193
- receiver = result;
8194
- result = temp;
8195
- }
8215
+ // r12 is now a reserved register, so it cannot be the receiver.
8216
+ // If it was, the distance to the fixup location would not be constant.
8217
+ ASSERT(!receiver.reg().is(r12));
8196
8218
 
8197
8219
  DeferredReferenceGetNamedValue* deferred =
8198
8220
  new DeferredReferenceGetNamedValue(result.reg(), receiver.reg(), name);
@@ -8204,7 +8226,8 @@ Result CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
8204
8226
  // This is the map check instruction that will be patched (so we can't
8205
8227
  // use the double underscore macro that may insert instructions).
8206
8228
  // Initially use an invalid map to force a failure.
8207
- masm()->Move(kScratchRegister, Factory::null_value());
8229
+ masm()->movq(kScratchRegister, FACTORY->null_value(),
8230
+ RelocInfo::EMBEDDED_OBJECT);
8208
8231
  masm()->cmpq(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
8209
8232
  kScratchRegister);
8210
8233
  // This branch is always a forwards branch so it's always a fixed
@@ -8222,7 +8245,8 @@ Result CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
8222
8245
  int offset = kMaxInt;
8223
8246
  masm()->movq(result.reg(), FieldOperand(receiver.reg(), offset));
8224
8247
 
8225
- __ IncrementCounter(&Counters::named_load_inline, 1);
8248
+ Counters* counters = masm()->isolate()->counters();
8249
+ __ IncrementCounter(counters->named_load_inline(), 1);
8226
8250
  deferred->BindExit();
8227
8251
  }
8228
8252
  ASSERT(frame()->height() == original_height - 1);
@@ -8257,17 +8281,9 @@ Result CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) {
8257
8281
  result = allocator()->Allocate();
8258
8282
  ASSERT(result.is_valid() && receiver.is_valid() && value.is_valid());
8259
8283
 
8260
- // Cannot use r12 for receiver, because that changes
8261
- // the distance between a call and a fixup location,
8262
- // due to a special encoding of r12 as r/m in a ModR/M byte.
8263
- if (receiver.reg().is(r12)) {
8264
- frame()->Spill(receiver.reg()); // It will be overwritten with result.
8265
- // Swap receiver and value.
8266
- __ movq(result.reg(), receiver.reg());
8267
- Result temp = receiver;
8268
- receiver = result;
8269
- result = temp;
8270
- }
8284
+ // r12 is now a reserved register, so it cannot be the receiver.
8285
+ // If it was, the distance to the fixup location would not be constant.
8286
+ ASSERT(!receiver.reg().is(r12));
8271
8287
 
8272
8288
  // Check that the receiver is a heap object.
8273
8289
  Condition is_smi = masm()->CheckSmi(receiver.reg());
@@ -8280,7 +8296,8 @@ Result CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) {
8280
8296
  // the __ macro for the following two instructions because it
8281
8297
  // might introduce extra instructions.
8282
8298
  __ bind(&patch_site);
8283
- masm()->Move(kScratchRegister, Factory::null_value());
8299
+ masm()->movq(kScratchRegister, FACTORY->null_value(),
8300
+ RelocInfo::EMBEDDED_OBJECT);
8284
8301
  masm()->cmpq(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
8285
8302
  kScratchRegister);
8286
8303
  // This branch is always a forwards branch so it's always a fixed size
@@ -8393,7 +8410,7 @@ Result CodeGenerator::EmitKeyedLoad() {
8393
8410
  // coverage code can interfere with the patching. Do not use a load
8394
8411
  // from the root array to load null_value, since the load must be patched
8395
8412
  // with the expected receiver map, which is not in the root array.
8396
- masm_->movq(kScratchRegister, Factory::null_value(),
8413
+ masm_->movq(kScratchRegister, FACTORY->null_value(),
8397
8414
  RelocInfo::EMBEDDED_OBJECT);
8398
8415
  masm_->cmpq(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
8399
8416
  kScratchRegister);
@@ -8428,7 +8445,8 @@ Result CodeGenerator::EmitKeyedLoad() {
8428
8445
  result = elements;
8429
8446
  __ CompareRoot(result.reg(), Heap::kTheHoleValueRootIndex);
8430
8447
  deferred->Branch(equal);
8431
- __ IncrementCounter(&Counters::keyed_load_inline, 1);
8448
+ Counters* counters = masm()->isolate()->counters();
8449
+ __ IncrementCounter(counters->keyed_load_inline(), 1);
8432
8450
 
8433
8451
  deferred->BindExit();
8434
8452
  } else {
@@ -8478,7 +8496,8 @@ Result CodeGenerator::EmitKeyedStore(StaticType* key_type) {
8478
8496
  DeferredReferenceSetKeyedValue* deferred =
8479
8497
  new DeferredReferenceSetKeyedValue(result.reg(),
8480
8498
  key.reg(),
8481
- receiver.reg());
8499
+ receiver.reg(),
8500
+ strict_mode_flag());
8482
8501
 
8483
8502
  // Check that the receiver is not a smi.
8484
8503
  __ JumpIfSmi(receiver.reg(), deferred->entry_label());
@@ -8494,12 +8513,6 @@ Result CodeGenerator::EmitKeyedStore(StaticType* key_type) {
8494
8513
  __ CmpObjectType(receiver.reg(), JS_ARRAY_TYPE, kScratchRegister);
8495
8514
  deferred->Branch(not_equal);
8496
8515
 
8497
- // Check that the key is within bounds. Both the key and the length of
8498
- // the JSArray are smis. Use unsigned comparison to handle negative keys.
8499
- __ SmiCompare(FieldOperand(receiver.reg(), JSArray::kLengthOffset),
8500
- key.reg());
8501
- deferred->Branch(below_equal);
8502
-
8503
8516
  // Get the elements array from the receiver and check that it is not a
8504
8517
  // dictionary.
8505
8518
  __ movq(tmp.reg(),
@@ -8522,12 +8535,20 @@ Result CodeGenerator::EmitKeyedStore(StaticType* key_type) {
8522
8535
  __ bind(deferred->patch_site());
8523
8536
  // Avoid using __ to ensure the distance from patch_site
8524
8537
  // to the map address is always the same.
8525
- masm()->movq(kScratchRegister, Factory::fixed_array_map(),
8538
+ masm()->movq(kScratchRegister, FACTORY->fixed_array_map(),
8526
8539
  RelocInfo::EMBEDDED_OBJECT);
8527
8540
  __ cmpq(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
8528
8541
  kScratchRegister);
8529
8542
  deferred->Branch(not_equal);
8530
8543
 
8544
+ // Check that the key is within bounds. Both the key and the length of
8545
+ // the JSArray are smis (because the fixed array check above ensures the
8546
+ // elements are in fast case). Use unsigned comparison to handle negative
8547
+ // keys.
8548
+ __ SmiCompare(FieldOperand(receiver.reg(), JSArray::kLengthOffset),
8549
+ key.reg());
8550
+ deferred->Branch(below_equal);
8551
+
8531
8552
  // Store the value.
8532
8553
  SmiIndex index =
8533
8554
  masm()->SmiToIndex(kScratchRegister, key.reg(), kPointerSizeLog2);
@@ -8536,11 +8557,12 @@ Result CodeGenerator::EmitKeyedStore(StaticType* key_type) {
8536
8557
  index.scale,
8537
8558
  FixedArray::kHeaderSize),
8538
8559
  result.reg());
8539
- __ IncrementCounter(&Counters::keyed_store_inline, 1);
8560
+ Counters* counters = masm()->isolate()->counters();
8561
+ __ IncrementCounter(counters->keyed_store_inline(), 1);
8540
8562
 
8541
8563
  deferred->BindExit();
8542
8564
  } else {
8543
- result = frame()->CallKeyedStoreIC();
8565
+ result = frame()->CallKeyedStoreIC(strict_mode_flag());
8544
8566
  // Make sure that we do not have a test instruction after the
8545
8567
  // call. A test instruction after the call is used to
8546
8568
  // indicate that we have generated an inline version of the
@@ -8733,7 +8755,7 @@ ModuloFunction CreateModuloFunction() {
8733
8755
  &actual_size,
8734
8756
  true));
8735
8757
  CHECK(buffer);
8736
- Assembler masm(buffer, static_cast<int>(actual_size));
8758
+ Assembler masm(NULL, buffer, static_cast<int>(actual_size));
8737
8759
  // Generated code is put into a fixed, unmovable, buffer, and not into
8738
8760
  // the V8 heap. We can't, and don't, refer to any relocatable addresses
8739
8761
  // (e.g. the JavaScript nan-object).
@@ -8807,7 +8829,7 @@ ModuloFunction CreateModuloFunction() {
8807
8829
 
8808
8830
  CodeDesc desc;
8809
8831
  masm.GetCode(&desc);
8810
- // Call the function from C++.
8832
+ // Call the function from C++ through this pointer.
8811
8833
  return FUNCTION_CAST<ModuloFunction>(buffer);
8812
8834
  }
8813
8835