mustang 0.0.1 → 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (560) hide show
  1. data/.rspec +1 -0
  2. data/Isolate +9 -0
  3. data/README.md +6 -12
  4. data/Rakefile +30 -4
  5. data/TODO.md +9 -0
  6. data/ext/v8/extconf.rb +56 -0
  7. data/ext/v8/v8.cpp +37 -0
  8. data/ext/v8/v8_array.cpp +161 -0
  9. data/ext/v8/v8_array.h +17 -0
  10. data/ext/v8/v8_base.cpp +147 -0
  11. data/ext/v8/v8_base.h +23 -0
  12. data/ext/v8/v8_cast.cpp +151 -0
  13. data/ext/v8/v8_cast.h +64 -0
  14. data/ext/v8/v8_context.cpp +174 -0
  15. data/ext/v8/v8_context.h +12 -0
  16. data/ext/v8/v8_date.cpp +61 -0
  17. data/ext/v8/v8_date.h +16 -0
  18. data/ext/v8/v8_errors.cpp +147 -0
  19. data/ext/v8/v8_errors.h +19 -0
  20. data/ext/v8/v8_external.cpp +66 -0
  21. data/ext/v8/v8_external.h +16 -0
  22. data/ext/v8/v8_function.cpp +182 -0
  23. data/ext/v8/v8_function.h +14 -0
  24. data/ext/v8/v8_integer.cpp +70 -0
  25. data/ext/v8/v8_integer.h +16 -0
  26. data/ext/v8/v8_macros.h +30 -0
  27. data/ext/v8/v8_main.cpp +53 -0
  28. data/ext/v8/v8_main.h +13 -0
  29. data/ext/v8/v8_number.cpp +62 -0
  30. data/ext/v8/v8_number.h +16 -0
  31. data/ext/v8/v8_object.cpp +172 -0
  32. data/ext/v8/v8_object.h +17 -0
  33. data/ext/v8/v8_ref.cpp +72 -0
  34. data/ext/v8/v8_ref.h +43 -0
  35. data/ext/v8/v8_regexp.cpp +148 -0
  36. data/ext/v8/v8_regexp.h +16 -0
  37. data/ext/v8/v8_string.cpp +78 -0
  38. data/ext/v8/v8_string.h +16 -0
  39. data/ext/v8/v8_value.cpp +370 -0
  40. data/ext/v8/v8_value.h +19 -0
  41. data/gemspec.yml +2 -1
  42. data/lib/core_ext/class.rb +14 -0
  43. data/lib/core_ext/object.rb +12 -0
  44. data/lib/core_ext/symbol.rb +23 -0
  45. data/lib/mustang.rb +44 -0
  46. data/lib/mustang/context.rb +69 -0
  47. data/lib/mustang/errors.rb +36 -0
  48. data/lib/support/delegated.rb +25 -0
  49. data/lib/v8/array.rb +21 -0
  50. data/lib/v8/context.rb +13 -0
  51. data/lib/v8/date.rb +20 -0
  52. data/lib/v8/error.rb +15 -0
  53. data/lib/v8/external.rb +16 -0
  54. data/lib/v8/function.rb +11 -0
  55. data/lib/v8/integer.rb +16 -0
  56. data/lib/v8/number.rb +16 -0
  57. data/lib/v8/object.rb +66 -0
  58. data/lib/v8/regexp.rb +23 -0
  59. data/lib/v8/string.rb +27 -0
  60. data/mustang.gemspec +3 -0
  61. data/spec/core_ext/class_spec.rb +19 -0
  62. data/spec/core_ext/object_spec.rb +19 -0
  63. data/spec/core_ext/symbol_spec.rb +27 -0
  64. data/spec/fixtures/test1.js +2 -0
  65. data/spec/fixtures/test2.js +2 -0
  66. data/spec/spec_helper.rb +20 -0
  67. data/spec/v8/array_spec.rb +88 -0
  68. data/spec/v8/cast_spec.rb +151 -0
  69. data/spec/v8/context_spec.rb +78 -0
  70. data/spec/v8/data_spec.rb +39 -0
  71. data/spec/v8/date_spec.rb +45 -0
  72. data/spec/v8/empty_spec.rb +27 -0
  73. data/spec/v8/errors_spec.rb +142 -0
  74. data/spec/v8/external_spec.rb +44 -0
  75. data/spec/v8/function_spec.rb +170 -0
  76. data/spec/v8/integer_spec.rb +41 -0
  77. data/spec/v8/main_spec.rb +18 -0
  78. data/spec/v8/null_spec.rb +27 -0
  79. data/spec/v8/number_spec.rb +40 -0
  80. data/spec/v8/object_spec.rb +79 -0
  81. data/spec/v8/primitive_spec.rb +9 -0
  82. data/spec/v8/regexp_spec.rb +65 -0
  83. data/spec/v8/string_spec.rb +48 -0
  84. data/spec/v8/undefined_spec.rb +27 -0
  85. data/spec/v8/value_spec.rb +215 -0
  86. data/vendor/v8/.gitignore +2 -0
  87. data/vendor/v8/AUTHORS +3 -1
  88. data/vendor/v8/ChangeLog +117 -0
  89. data/vendor/v8/SConstruct +334 -53
  90. data/vendor/v8/include/v8-debug.h +21 -11
  91. data/vendor/v8/include/v8-preparser.h +1 -1
  92. data/vendor/v8/include/v8-profiler.h +122 -43
  93. data/vendor/v8/include/v8-testing.h +5 -0
  94. data/vendor/v8/include/v8.h +171 -17
  95. data/vendor/v8/preparser/SConscript +38 -0
  96. data/vendor/v8/preparser/preparser-process.cc +77 -114
  97. data/vendor/v8/samples/shell.cc +232 -46
  98. data/vendor/v8/src/SConscript +29 -5
  99. data/vendor/v8/src/accessors.cc +70 -211
  100. data/vendor/v8/{test/cctest/test-mips.cc → src/allocation-inl.h} +15 -18
  101. data/vendor/v8/src/allocation.cc +0 -82
  102. data/vendor/v8/src/allocation.h +9 -42
  103. data/vendor/v8/src/api.cc +1645 -1156
  104. data/vendor/v8/src/api.h +76 -12
  105. data/vendor/v8/src/apiutils.h +0 -7
  106. data/vendor/v8/src/arguments.h +15 -4
  107. data/vendor/v8/src/arm/assembler-arm-inl.h +10 -9
  108. data/vendor/v8/src/arm/assembler-arm.cc +62 -23
  109. data/vendor/v8/src/arm/assembler-arm.h +76 -11
  110. data/vendor/v8/src/arm/builtins-arm.cc +39 -33
  111. data/vendor/v8/src/arm/code-stubs-arm.cc +1182 -402
  112. data/vendor/v8/src/arm/code-stubs-arm.h +20 -54
  113. data/vendor/v8/src/arm/codegen-arm.cc +159 -106
  114. data/vendor/v8/src/arm/codegen-arm.h +6 -6
  115. data/vendor/v8/src/arm/constants-arm.h +16 -1
  116. data/vendor/v8/src/arm/cpu-arm.cc +7 -5
  117. data/vendor/v8/src/arm/debug-arm.cc +6 -4
  118. data/vendor/v8/src/arm/deoptimizer-arm.cc +51 -14
  119. data/vendor/v8/src/arm/disasm-arm.cc +47 -15
  120. data/vendor/v8/src/arm/frames-arm.h +1 -1
  121. data/vendor/v8/src/arm/full-codegen-arm.cc +724 -408
  122. data/vendor/v8/src/arm/ic-arm.cc +90 -85
  123. data/vendor/v8/src/arm/lithium-arm.cc +140 -69
  124. data/vendor/v8/src/arm/lithium-arm.h +161 -46
  125. data/vendor/v8/src/arm/lithium-codegen-arm.cc +567 -297
  126. data/vendor/v8/src/arm/lithium-codegen-arm.h +21 -9
  127. data/vendor/v8/src/arm/lithium-gap-resolver-arm.cc +2 -0
  128. data/vendor/v8/src/arm/macro-assembler-arm.cc +457 -96
  129. data/vendor/v8/src/arm/macro-assembler-arm.h +115 -18
  130. data/vendor/v8/src/arm/regexp-macro-assembler-arm.cc +20 -13
  131. data/vendor/v8/src/arm/regexp-macro-assembler-arm.h +1 -0
  132. data/vendor/v8/src/arm/simulator-arm.cc +184 -101
  133. data/vendor/v8/src/arm/simulator-arm.h +26 -21
  134. data/vendor/v8/src/arm/stub-cache-arm.cc +450 -467
  135. data/vendor/v8/src/arm/virtual-frame-arm.cc +14 -12
  136. data/vendor/v8/src/arm/virtual-frame-arm.h +11 -8
  137. data/vendor/v8/src/array.js +35 -18
  138. data/vendor/v8/src/assembler.cc +186 -92
  139. data/vendor/v8/src/assembler.h +106 -69
  140. data/vendor/v8/src/ast-inl.h +5 -0
  141. data/vendor/v8/src/ast.cc +46 -35
  142. data/vendor/v8/src/ast.h +107 -50
  143. data/vendor/v8/src/atomicops.h +2 -0
  144. data/vendor/v8/src/atomicops_internals_mips_gcc.h +169 -0
  145. data/vendor/v8/src/bootstrapper.cc +649 -399
  146. data/vendor/v8/src/bootstrapper.h +94 -27
  147. data/vendor/v8/src/builtins.cc +359 -227
  148. data/vendor/v8/src/builtins.h +157 -123
  149. data/vendor/v8/src/checks.cc +2 -2
  150. data/vendor/v8/src/checks.h +4 -0
  151. data/vendor/v8/src/code-stubs.cc +27 -17
  152. data/vendor/v8/src/code-stubs.h +38 -17
  153. data/vendor/v8/src/codegen-inl.h +5 -1
  154. data/vendor/v8/src/codegen.cc +27 -17
  155. data/vendor/v8/src/codegen.h +9 -9
  156. data/vendor/v8/src/compilation-cache.cc +92 -206
  157. data/vendor/v8/src/compilation-cache.h +205 -30
  158. data/vendor/v8/src/compiler.cc +107 -120
  159. data/vendor/v8/src/compiler.h +17 -2
  160. data/vendor/v8/src/contexts.cc +22 -15
  161. data/vendor/v8/src/contexts.h +14 -8
  162. data/vendor/v8/src/conversions.cc +86 -30
  163. data/vendor/v8/src/counters.cc +19 -4
  164. data/vendor/v8/src/counters.h +28 -16
  165. data/vendor/v8/src/cpu-profiler-inl.h +4 -3
  166. data/vendor/v8/src/cpu-profiler.cc +123 -72
  167. data/vendor/v8/src/cpu-profiler.h +33 -19
  168. data/vendor/v8/src/cpu.h +2 -0
  169. data/vendor/v8/src/d8-debug.cc +3 -3
  170. data/vendor/v8/src/d8-debug.h +7 -6
  171. data/vendor/v8/src/d8-posix.cc +2 -0
  172. data/vendor/v8/src/d8.cc +22 -12
  173. data/vendor/v8/src/d8.gyp +3 -0
  174. data/vendor/v8/src/d8.js +618 -0
  175. data/vendor/v8/src/data-flow.h +3 -3
  176. data/vendor/v8/src/dateparser.h +4 -2
  177. data/vendor/v8/src/debug-agent.cc +10 -9
  178. data/vendor/v8/src/debug-agent.h +9 -11
  179. data/vendor/v8/src/debug-debugger.js +121 -0
  180. data/vendor/v8/src/debug.cc +331 -227
  181. data/vendor/v8/src/debug.h +248 -219
  182. data/vendor/v8/src/deoptimizer.cc +173 -62
  183. data/vendor/v8/src/deoptimizer.h +119 -19
  184. data/vendor/v8/src/disasm.h +3 -0
  185. data/vendor/v8/src/disassembler.cc +10 -9
  186. data/vendor/v8/src/execution.cc +185 -129
  187. data/vendor/v8/src/execution.h +47 -78
  188. data/vendor/v8/src/extensions/experimental/break-iterator.cc +250 -0
  189. data/vendor/v8/src/extensions/experimental/break-iterator.h +89 -0
  190. data/vendor/v8/src/extensions/experimental/experimental.gyp +2 -0
  191. data/vendor/v8/src/extensions/experimental/i18n-extension.cc +22 -2
  192. data/vendor/v8/src/extensions/externalize-string-extension.cc +2 -2
  193. data/vendor/v8/src/extensions/gc-extension.cc +1 -1
  194. data/vendor/v8/src/factory.cc +261 -154
  195. data/vendor/v8/src/factory.h +162 -158
  196. data/vendor/v8/src/flag-definitions.h +17 -11
  197. data/vendor/v8/src/frame-element.cc +0 -5
  198. data/vendor/v8/src/frame-element.h +9 -13
  199. data/vendor/v8/src/frames-inl.h +7 -0
  200. data/vendor/v8/src/frames.cc +56 -46
  201. data/vendor/v8/src/frames.h +36 -25
  202. data/vendor/v8/src/full-codegen.cc +15 -24
  203. data/vendor/v8/src/full-codegen.h +13 -41
  204. data/vendor/v8/src/func-name-inferrer.cc +7 -6
  205. data/vendor/v8/src/func-name-inferrer.h +1 -1
  206. data/vendor/v8/src/gdb-jit.cc +1 -0
  207. data/vendor/v8/src/global-handles.cc +118 -56
  208. data/vendor/v8/src/global-handles.h +98 -40
  209. data/vendor/v8/src/globals.h +2 -2
  210. data/vendor/v8/src/handles-inl.h +106 -9
  211. data/vendor/v8/src/handles.cc +220 -157
  212. data/vendor/v8/src/handles.h +38 -59
  213. data/vendor/v8/src/hashmap.h +3 -3
  214. data/vendor/v8/src/heap-inl.h +141 -25
  215. data/vendor/v8/src/heap-profiler.cc +117 -63
  216. data/vendor/v8/src/heap-profiler.h +38 -21
  217. data/vendor/v8/src/heap.cc +805 -564
  218. data/vendor/v8/src/heap.h +640 -594
  219. data/vendor/v8/src/hydrogen-instructions.cc +216 -73
  220. data/vendor/v8/src/hydrogen-instructions.h +259 -124
  221. data/vendor/v8/src/hydrogen.cc +996 -1171
  222. data/vendor/v8/src/hydrogen.h +163 -144
  223. data/vendor/v8/src/ia32/assembler-ia32-inl.h +12 -11
  224. data/vendor/v8/src/ia32/assembler-ia32.cc +85 -39
  225. data/vendor/v8/src/ia32/assembler-ia32.h +82 -16
  226. data/vendor/v8/src/ia32/builtins-ia32.cc +64 -58
  227. data/vendor/v8/src/ia32/code-stubs-ia32.cc +248 -324
  228. data/vendor/v8/src/ia32/code-stubs-ia32.h +3 -44
  229. data/vendor/v8/src/ia32/codegen-ia32.cc +217 -165
  230. data/vendor/v8/src/ia32/codegen-ia32.h +3 -0
  231. data/vendor/v8/src/ia32/cpu-ia32.cc +6 -5
  232. data/vendor/v8/src/ia32/debug-ia32.cc +8 -5
  233. data/vendor/v8/src/ia32/deoptimizer-ia32.cc +124 -14
  234. data/vendor/v8/src/ia32/disasm-ia32.cc +85 -62
  235. data/vendor/v8/src/ia32/frames-ia32.h +1 -1
  236. data/vendor/v8/src/ia32/full-codegen-ia32.cc +348 -435
  237. data/vendor/v8/src/ia32/ic-ia32.cc +91 -91
  238. data/vendor/v8/src/ia32/lithium-codegen-ia32.cc +500 -255
  239. data/vendor/v8/src/ia32/lithium-codegen-ia32.h +13 -4
  240. data/vendor/v8/src/ia32/lithium-gap-resolver-ia32.cc +6 -0
  241. data/vendor/v8/src/ia32/lithium-ia32.cc +122 -45
  242. data/vendor/v8/src/ia32/lithium-ia32.h +128 -41
  243. data/vendor/v8/src/ia32/macro-assembler-ia32.cc +109 -84
  244. data/vendor/v8/src/ia32/macro-assembler-ia32.h +18 -9
  245. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.cc +26 -15
  246. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.h +1 -0
  247. data/vendor/v8/src/ia32/register-allocator-ia32.cc +30 -30
  248. data/vendor/v8/src/ia32/simulator-ia32.h +4 -4
  249. data/vendor/v8/src/ia32/stub-cache-ia32.cc +383 -400
  250. data/vendor/v8/src/ia32/virtual-frame-ia32.cc +36 -13
  251. data/vendor/v8/src/ia32/virtual-frame-ia32.h +11 -5
  252. data/vendor/v8/src/ic-inl.h +12 -2
  253. data/vendor/v8/src/ic.cc +304 -221
  254. data/vendor/v8/src/ic.h +115 -58
  255. data/vendor/v8/src/interpreter-irregexp.cc +25 -21
  256. data/vendor/v8/src/interpreter-irregexp.h +2 -1
  257. data/vendor/v8/src/isolate.cc +883 -0
  258. data/vendor/v8/src/isolate.h +1304 -0
  259. data/vendor/v8/src/json.js +10 -10
  260. data/vendor/v8/src/jsregexp.cc +111 -80
  261. data/vendor/v8/src/jsregexp.h +6 -7
  262. data/vendor/v8/src/jump-target-heavy.cc +5 -8
  263. data/vendor/v8/src/jump-target-heavy.h +0 -6
  264. data/vendor/v8/src/jump-target-inl.h +1 -1
  265. data/vendor/v8/src/jump-target-light.cc +3 -3
  266. data/vendor/v8/src/lithium-allocator-inl.h +2 -0
  267. data/vendor/v8/src/lithium-allocator.cc +42 -30
  268. data/vendor/v8/src/lithium-allocator.h +8 -22
  269. data/vendor/v8/src/lithium.cc +1 -0
  270. data/vendor/v8/src/liveedit.cc +141 -99
  271. data/vendor/v8/src/liveedit.h +7 -2
  272. data/vendor/v8/src/liveobjectlist-inl.h +90 -0
  273. data/vendor/v8/src/liveobjectlist.cc +2537 -1
  274. data/vendor/v8/src/liveobjectlist.h +245 -35
  275. data/vendor/v8/src/log-utils.cc +122 -35
  276. data/vendor/v8/src/log-utils.h +33 -36
  277. data/vendor/v8/src/log.cc +299 -241
  278. data/vendor/v8/src/log.h +177 -110
  279. data/vendor/v8/src/mark-compact.cc +612 -470
  280. data/vendor/v8/src/mark-compact.h +153 -80
  281. data/vendor/v8/src/messages.cc +16 -14
  282. data/vendor/v8/src/messages.js +30 -7
  283. data/vendor/v8/src/mips/assembler-mips-inl.h +155 -35
  284. data/vendor/v8/src/mips/assembler-mips.cc +1093 -219
  285. data/vendor/v8/src/mips/assembler-mips.h +552 -153
  286. data/vendor/v8/src/mips/builtins-mips.cc +43 -100
  287. data/vendor/v8/src/mips/code-stubs-mips.cc +752 -0
  288. data/vendor/v8/src/mips/code-stubs-mips.h +511 -0
  289. data/vendor/v8/src/mips/codegen-mips-inl.h +8 -14
  290. data/vendor/v8/src/mips/codegen-mips.cc +672 -896
  291. data/vendor/v8/src/mips/codegen-mips.h +271 -69
  292. data/vendor/v8/src/mips/constants-mips.cc +44 -20
  293. data/vendor/v8/src/mips/constants-mips.h +238 -40
  294. data/vendor/v8/src/mips/cpu-mips.cc +20 -3
  295. data/vendor/v8/src/mips/debug-mips.cc +35 -7
  296. data/vendor/v8/src/mips/deoptimizer-mips.cc +91 -0
  297. data/vendor/v8/src/mips/disasm-mips.cc +329 -93
  298. data/vendor/v8/src/mips/frames-mips.cc +2 -50
  299. data/vendor/v8/src/mips/frames-mips.h +24 -9
  300. data/vendor/v8/src/mips/full-codegen-mips.cc +473 -23
  301. data/vendor/v8/src/mips/ic-mips.cc +81 -45
  302. data/vendor/v8/src/mips/jump-target-mips.cc +11 -106
  303. data/vendor/v8/src/mips/lithium-codegen-mips.h +65 -0
  304. data/vendor/v8/src/mips/lithium-mips.h +304 -0
  305. data/vendor/v8/src/mips/macro-assembler-mips.cc +2391 -390
  306. data/vendor/v8/src/mips/macro-assembler-mips.h +718 -121
  307. data/vendor/v8/src/mips/regexp-macro-assembler-mips.cc +478 -0
  308. data/vendor/v8/src/mips/regexp-macro-assembler-mips.h +250 -0
  309. data/vendor/v8/src/mips/register-allocator-mips-inl.h +0 -3
  310. data/vendor/v8/src/mips/register-allocator-mips.h +3 -2
  311. data/vendor/v8/src/mips/simulator-mips.cc +1009 -221
  312. data/vendor/v8/src/mips/simulator-mips.h +119 -36
  313. data/vendor/v8/src/mips/stub-cache-mips.cc +331 -148
  314. data/vendor/v8/src/mips/{fast-codegen-mips.cc → virtual-frame-mips-inl.h} +11 -30
  315. data/vendor/v8/src/mips/virtual-frame-mips.cc +137 -149
  316. data/vendor/v8/src/mips/virtual-frame-mips.h +294 -312
  317. data/vendor/v8/src/mirror-debugger.js +9 -8
  318. data/vendor/v8/src/mksnapshot.cc +2 -2
  319. data/vendor/v8/src/objects-debug.cc +16 -16
  320. data/vendor/v8/src/objects-inl.h +421 -195
  321. data/vendor/v8/src/objects-printer.cc +7 -7
  322. data/vendor/v8/src/objects-visiting.cc +1 -1
  323. data/vendor/v8/src/objects-visiting.h +33 -12
  324. data/vendor/v8/src/objects.cc +935 -658
  325. data/vendor/v8/src/objects.h +234 -139
  326. data/vendor/v8/src/parser.cc +484 -439
  327. data/vendor/v8/src/parser.h +35 -14
  328. data/vendor/v8/src/platform-cygwin.cc +173 -107
  329. data/vendor/v8/src/platform-freebsd.cc +224 -72
  330. data/vendor/v8/src/platform-linux.cc +234 -95
  331. data/vendor/v8/src/platform-macos.cc +215 -82
  332. data/vendor/v8/src/platform-nullos.cc +9 -3
  333. data/vendor/v8/src/platform-openbsd.cc +22 -7
  334. data/vendor/v8/src/platform-posix.cc +30 -5
  335. data/vendor/v8/src/platform-solaris.cc +120 -38
  336. data/vendor/v8/src/platform-tls-mac.h +62 -0
  337. data/vendor/v8/src/platform-tls-win32.h +62 -0
  338. data/vendor/v8/src/platform-tls.h +50 -0
  339. data/vendor/v8/src/platform-win32.cc +195 -97
  340. data/vendor/v8/src/platform.h +72 -15
  341. data/vendor/v8/src/preparse-data.cc +2 -0
  342. data/vendor/v8/src/preparser-api.cc +8 -2
  343. data/vendor/v8/src/preparser.cc +1 -1
  344. data/vendor/v8/src/prettyprinter.cc +43 -52
  345. data/vendor/v8/src/prettyprinter.h +1 -1
  346. data/vendor/v8/src/profile-generator-inl.h +0 -28
  347. data/vendor/v8/src/profile-generator.cc +942 -685
  348. data/vendor/v8/src/profile-generator.h +210 -176
  349. data/vendor/v8/src/property.cc +6 -0
  350. data/vendor/v8/src/property.h +14 -3
  351. data/vendor/v8/src/regexp-macro-assembler-irregexp.cc +1 -1
  352. data/vendor/v8/src/regexp-macro-assembler.cc +28 -19
  353. data/vendor/v8/src/regexp-macro-assembler.h +11 -6
  354. data/vendor/v8/src/regexp-stack.cc +18 -10
  355. data/vendor/v8/src/regexp-stack.h +45 -21
  356. data/vendor/v8/src/regexp.js +3 -3
  357. data/vendor/v8/src/register-allocator-inl.h +3 -3
  358. data/vendor/v8/src/register-allocator.cc +1 -7
  359. data/vendor/v8/src/register-allocator.h +5 -15
  360. data/vendor/v8/src/rewriter.cc +2 -1
  361. data/vendor/v8/src/runtime-profiler.cc +158 -128
  362. data/vendor/v8/src/runtime-profiler.h +131 -15
  363. data/vendor/v8/src/runtime.cc +2409 -1692
  364. data/vendor/v8/src/runtime.h +93 -17
  365. data/vendor/v8/src/safepoint-table.cc +3 -0
  366. data/vendor/v8/src/safepoint-table.h +9 -3
  367. data/vendor/v8/src/scanner-base.cc +21 -28
  368. data/vendor/v8/src/scanner-base.h +22 -11
  369. data/vendor/v8/src/scanner.cc +3 -5
  370. data/vendor/v8/src/scanner.h +4 -2
  371. data/vendor/v8/src/scopeinfo.cc +11 -16
  372. data/vendor/v8/src/scopeinfo.h +26 -15
  373. data/vendor/v8/src/scopes.cc +67 -37
  374. data/vendor/v8/src/scopes.h +26 -12
  375. data/vendor/v8/src/serialize.cc +193 -154
  376. data/vendor/v8/src/serialize.h +41 -36
  377. data/vendor/v8/src/small-pointer-list.h +163 -0
  378. data/vendor/v8/src/snapshot-common.cc +1 -1
  379. data/vendor/v8/src/snapshot.h +3 -1
  380. data/vendor/v8/src/spaces-inl.h +30 -25
  381. data/vendor/v8/src/spaces.cc +263 -370
  382. data/vendor/v8/src/spaces.h +178 -166
  383. data/vendor/v8/src/string-search.cc +4 -3
  384. data/vendor/v8/src/string-search.h +21 -20
  385. data/vendor/v8/src/string-stream.cc +32 -24
  386. data/vendor/v8/src/string.js +7 -7
  387. data/vendor/v8/src/stub-cache.cc +324 -248
  388. data/vendor/v8/src/stub-cache.h +181 -155
  389. data/vendor/v8/src/token.cc +3 -3
  390. data/vendor/v8/src/token.h +3 -3
  391. data/vendor/v8/src/top.cc +218 -390
  392. data/vendor/v8/src/type-info.cc +98 -32
  393. data/vendor/v8/src/type-info.h +10 -3
  394. data/vendor/v8/src/unicode.cc +1 -1
  395. data/vendor/v8/src/unicode.h +1 -1
  396. data/vendor/v8/src/utils.h +3 -0
  397. data/vendor/v8/src/v8-counters.cc +18 -11
  398. data/vendor/v8/src/v8-counters.h +34 -13
  399. data/vendor/v8/src/v8.cc +66 -121
  400. data/vendor/v8/src/v8.h +7 -4
  401. data/vendor/v8/src/v8globals.h +18 -12
  402. data/vendor/v8/src/{memory.h → v8memory.h} +0 -0
  403. data/vendor/v8/src/v8natives.js +59 -18
  404. data/vendor/v8/src/v8threads.cc +127 -114
  405. data/vendor/v8/src/v8threads.h +42 -35
  406. data/vendor/v8/src/v8utils.h +2 -39
  407. data/vendor/v8/src/variables.h +1 -1
  408. data/vendor/v8/src/version.cc +26 -5
  409. data/vendor/v8/src/version.h +4 -0
  410. data/vendor/v8/src/virtual-frame-heavy-inl.h +2 -4
  411. data/vendor/v8/src/virtual-frame-light-inl.h +5 -4
  412. data/vendor/v8/src/vm-state-inl.h +21 -17
  413. data/vendor/v8/src/vm-state.h +7 -5
  414. data/vendor/v8/src/win32-headers.h +1 -0
  415. data/vendor/v8/src/x64/assembler-x64-inl.h +12 -11
  416. data/vendor/v8/src/x64/assembler-x64.cc +80 -40
  417. data/vendor/v8/src/x64/assembler-x64.h +67 -17
  418. data/vendor/v8/src/x64/builtins-x64.cc +34 -33
  419. data/vendor/v8/src/x64/code-stubs-x64.cc +636 -377
  420. data/vendor/v8/src/x64/code-stubs-x64.h +14 -48
  421. data/vendor/v8/src/x64/codegen-x64-inl.h +1 -1
  422. data/vendor/v8/src/x64/codegen-x64.cc +158 -136
  423. data/vendor/v8/src/x64/codegen-x64.h +4 -1
  424. data/vendor/v8/src/x64/cpu-x64.cc +7 -5
  425. data/vendor/v8/src/x64/debug-x64.cc +8 -6
  426. data/vendor/v8/src/x64/deoptimizer-x64.cc +195 -20
  427. data/vendor/v8/src/x64/disasm-x64.cc +42 -23
  428. data/vendor/v8/src/x64/frames-x64.cc +1 -1
  429. data/vendor/v8/src/x64/frames-x64.h +2 -2
  430. data/vendor/v8/src/x64/full-codegen-x64.cc +780 -218
  431. data/vendor/v8/src/x64/ic-x64.cc +77 -79
  432. data/vendor/v8/src/x64/jump-target-x64.cc +1 -1
  433. data/vendor/v8/src/x64/lithium-codegen-x64.cc +698 -181
  434. data/vendor/v8/src/x64/lithium-codegen-x64.h +31 -6
  435. data/vendor/v8/src/x64/lithium-x64.cc +136 -54
  436. data/vendor/v8/src/x64/lithium-x64.h +142 -51
  437. data/vendor/v8/src/x64/macro-assembler-x64.cc +456 -187
  438. data/vendor/v8/src/x64/macro-assembler-x64.h +166 -34
  439. data/vendor/v8/src/x64/regexp-macro-assembler-x64.cc +44 -28
  440. data/vendor/v8/src/x64/regexp-macro-assembler-x64.h +8 -4
  441. data/vendor/v8/src/x64/register-allocator-x64-inl.h +3 -3
  442. data/vendor/v8/src/x64/register-allocator-x64.cc +12 -8
  443. data/vendor/v8/src/x64/simulator-x64.h +5 -5
  444. data/vendor/v8/src/x64/stub-cache-x64.cc +299 -344
  445. data/vendor/v8/src/x64/virtual-frame-x64.cc +37 -13
  446. data/vendor/v8/src/x64/virtual-frame-x64.h +13 -7
  447. data/vendor/v8/src/zone-inl.h +49 -3
  448. data/vendor/v8/src/zone.cc +42 -41
  449. data/vendor/v8/src/zone.h +37 -34
  450. data/vendor/v8/test/benchmarks/testcfg.py +100 -0
  451. data/vendor/v8/test/cctest/SConscript +5 -4
  452. data/vendor/v8/test/cctest/cctest.h +3 -2
  453. data/vendor/v8/test/cctest/cctest.status +6 -11
  454. data/vendor/v8/test/cctest/test-accessors.cc +3 -3
  455. data/vendor/v8/test/cctest/test-alloc.cc +39 -33
  456. data/vendor/v8/test/cctest/test-api.cc +1092 -205
  457. data/vendor/v8/test/cctest/test-assembler-arm.cc +39 -25
  458. data/vendor/v8/test/cctest/test-assembler-ia32.cc +36 -37
  459. data/vendor/v8/test/cctest/test-assembler-mips.cc +1098 -40
  460. data/vendor/v8/test/cctest/test-assembler-x64.cc +32 -25
  461. data/vendor/v8/test/cctest/test-ast.cc +1 -0
  462. data/vendor/v8/test/cctest/test-circular-queue.cc +8 -5
  463. data/vendor/v8/test/cctest/test-compiler.cc +24 -24
  464. data/vendor/v8/test/cctest/test-cpu-profiler.cc +140 -5
  465. data/vendor/v8/test/cctest/test-dataflow.cc +1 -0
  466. data/vendor/v8/test/cctest/test-debug.cc +136 -77
  467. data/vendor/v8/test/cctest/test-decls.cc +1 -1
  468. data/vendor/v8/test/cctest/test-deoptimization.cc +25 -24
  469. data/vendor/v8/test/cctest/test-disasm-arm.cc +9 -4
  470. data/vendor/v8/test/cctest/test-disasm-ia32.cc +10 -8
  471. data/vendor/v8/test/cctest/test-func-name-inference.cc +10 -4
  472. data/vendor/v8/test/cctest/test-heap-profiler.cc +226 -164
  473. data/vendor/v8/test/cctest/test-heap.cc +240 -217
  474. data/vendor/v8/test/cctest/test-liveedit.cc +1 -0
  475. data/vendor/v8/test/cctest/test-log-stack-tracer.cc +18 -20
  476. data/vendor/v8/test/cctest/test-log.cc +114 -108
  477. data/vendor/v8/test/cctest/test-macro-assembler-x64.cc +247 -177
  478. data/vendor/v8/test/cctest/test-mark-compact.cc +129 -90
  479. data/vendor/v8/test/cctest/test-parsing.cc +15 -14
  480. data/vendor/v8/test/cctest/test-platform-linux.cc +1 -0
  481. data/vendor/v8/test/cctest/test-platform-tls.cc +66 -0
  482. data/vendor/v8/test/cctest/test-platform-win32.cc +1 -0
  483. data/vendor/v8/test/cctest/test-profile-generator.cc +1 -1
  484. data/vendor/v8/test/cctest/test-regexp.cc +53 -41
  485. data/vendor/v8/test/cctest/test-reloc-info.cc +18 -11
  486. data/vendor/v8/test/cctest/test-serialize.cc +44 -43
  487. data/vendor/v8/test/cctest/test-sockets.cc +8 -3
  488. data/vendor/v8/test/cctest/test-spaces.cc +47 -29
  489. data/vendor/v8/test/cctest/test-strings.cc +20 -20
  490. data/vendor/v8/test/cctest/test-thread-termination.cc +8 -3
  491. data/vendor/v8/test/cctest/test-threads.cc +5 -3
  492. data/vendor/v8/test/cctest/test-utils.cc +5 -4
  493. data/vendor/v8/test/cctest/testcfg.py +7 -3
  494. data/vendor/v8/test/es5conform/es5conform.status +2 -77
  495. data/vendor/v8/test/es5conform/testcfg.py +1 -1
  496. data/vendor/v8/test/message/testcfg.py +1 -1
  497. data/vendor/v8/test/mjsunit/accessors-on-global-object.js +3 -3
  498. data/vendor/v8/test/mjsunit/array-concat.js +43 -1
  499. data/vendor/v8/test/mjsunit/array-join.js +25 -0
  500. data/vendor/v8/test/mjsunit/bitops-info.js +7 -1
  501. data/vendor/v8/test/mjsunit/compiler/array-length.js +2 -2
  502. data/vendor/v8/test/mjsunit/compiler/global-accessors.js +47 -0
  503. data/vendor/v8/test/mjsunit/compiler/pic.js +1 -1
  504. data/vendor/v8/test/mjsunit/compiler/regress-loadfield.js +65 -0
  505. data/vendor/v8/test/mjsunit/math-sqrt.js +5 -1
  506. data/vendor/v8/test/mjsunit/mjsunit.js +59 -8
  507. data/vendor/v8/test/mjsunit/mjsunit.status +0 -12
  508. data/vendor/v8/test/mjsunit/mul-exhaustive.js +129 -11
  509. data/vendor/v8/test/mjsunit/negate-zero.js +1 -1
  510. data/vendor/v8/test/mjsunit/object-freeze.js +5 -13
  511. data/vendor/v8/test/mjsunit/object-prevent-extensions.js +9 -50
  512. data/vendor/v8/test/mjsunit/object-seal.js +4 -13
  513. data/vendor/v8/test/mjsunit/override-eval-with-non-function.js +36 -0
  514. data/vendor/v8/test/mjsunit/regress/regress-1145.js +54 -0
  515. data/vendor/v8/test/mjsunit/regress/regress-1172-bis.js +37 -0
  516. data/vendor/v8/test/mjsunit/regress/regress-1181.js +54 -0
  517. data/vendor/v8/test/mjsunit/regress/regress-1207.js +35 -0
  518. data/vendor/v8/test/mjsunit/regress/regress-1209.js +34 -0
  519. data/vendor/v8/test/mjsunit/regress/regress-1210.js +48 -0
  520. data/vendor/v8/test/mjsunit/regress/regress-1213.js +43 -0
  521. data/vendor/v8/test/mjsunit/regress/regress-1218.js +29 -0
  522. data/vendor/v8/test/mjsunit/regress/regress-1229.js +79 -0
  523. data/vendor/v8/test/mjsunit/regress/regress-1233.js +47 -0
  524. data/vendor/v8/test/mjsunit/regress/regress-1236.js +34 -0
  525. data/vendor/v8/test/mjsunit/regress/regress-1237.js +36 -0
  526. data/vendor/v8/test/mjsunit/regress/regress-1240.js +39 -0
  527. data/vendor/v8/test/mjsunit/regress/regress-1257.js +58 -0
  528. data/vendor/v8/test/mjsunit/regress/regress-1278.js +69 -0
  529. data/vendor/v8/test/mjsunit/regress/regress-create-exception.js +1 -0
  530. data/vendor/v8/test/mjsunit/regress/regress-lazy-deopt-reloc.js +52 -0
  531. data/vendor/v8/test/mjsunit/sin-cos.js +15 -10
  532. data/vendor/v8/test/mjsunit/smi-negative-zero.js +2 -2
  533. data/vendor/v8/test/mjsunit/str-to-num.js +1 -1
  534. data/vendor/v8/test/mjsunit/strict-mode.js +435 -0
  535. data/vendor/v8/test/mjsunit/testcfg.py +23 -6
  536. data/vendor/v8/test/mozilla/mozilla.status +0 -2
  537. data/vendor/v8/test/mozilla/testcfg.py +1 -1
  538. data/vendor/v8/test/preparser/empty.js +28 -0
  539. data/vendor/v8/test/preparser/functions-only.js +38 -0
  540. data/vendor/v8/test/preparser/non-alphanum.js +34 -0
  541. data/vendor/v8/test/preparser/symbols-only.js +49 -0
  542. data/vendor/v8/test/preparser/testcfg.py +90 -0
  543. data/vendor/v8/test/sputnik/testcfg.py +1 -1
  544. data/vendor/v8/test/test262/README +16 -0
  545. data/vendor/v8/test/test262/harness-adapt.js +80 -0
  546. data/vendor/v8/test/test262/test262.status +1506 -0
  547. data/vendor/v8/test/test262/testcfg.py +123 -0
  548. data/vendor/v8/tools/freebsd-tick-processor +10 -0
  549. data/vendor/v8/tools/gyp/v8.gyp +8 -33
  550. data/vendor/v8/tools/linux-tick-processor +5 -3
  551. data/vendor/v8/tools/test.py +37 -14
  552. data/vendor/v8/tools/tickprocessor.js +22 -8
  553. data/vendor/v8/tools/visual_studio/v8_base.vcproj +13 -1
  554. data/vendor/v8/tools/visual_studio/v8_base_arm.vcproj +5 -1
  555. data/vendor/v8/tools/visual_studio/v8_base_x64.vcproj +5 -1
  556. data/vendor/v8/tools/visual_studio/x64.vsprops +1 -0
  557. metadata +1495 -1341
  558. data/ext/extconf.rb +0 -22
  559. data/ext/mustang.cpp +0 -58
  560. data/vendor/v8/src/top.h +0 -608
@@ -1,4 +1,4 @@
1
- // Copyright 2009 the V8 project authors. All rights reserved.
1
+ // Copyright 2010 the V8 project authors. All rights reserved.
2
2
  // Redistribution and use in source and binary forms, with or without
3
3
  // modification, are permitted provided that the following conditions are
4
4
  // met:
@@ -1,4 +1,4 @@
1
- // Copyright 2009 the V8 project authors. All rights reserved.
1
+ // Copyright 2010 the V8 project authors. All rights reserved.
2
2
  // Redistribution and use in source and binary forms, with or without
3
3
  // modification, are permitted provided that the following conditions are
4
4
  // met:
@@ -99,7 +99,7 @@ class JavaScriptFrameConstants : public AllStatic {
99
99
  public:
100
100
  // FP-relative.
101
101
  static const int kLocal0Offset = StandardFrameConstants::kExpressionsOffset;
102
- static const int kSavedRegistersOffset = +2 * kPointerSize;
102
+ static const int kLastParameterOffset = +2 * kPointerSize;
103
103
  static const int kFunctionOffset = StandardFrameConstants::kMarkerOffset;
104
104
 
105
105
  // Caller SP-relative.
@@ -198,52 +198,59 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
198
198
  // function, receiver address, parameter count.
199
199
  // The stub will rewrite receiver and parameter count if the previous
200
200
  // stack frame was an arguments adapter frame.
201
- ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
201
+ ArgumentsAccessStub stub(
202
+ is_strict_mode() ? ArgumentsAccessStub::NEW_STRICT
203
+ : ArgumentsAccessStub::NEW_NON_STRICT);
202
204
  __ CallStub(&stub);
203
- // Store new arguments object in both "arguments" and ".arguments" slots.
204
- __ movq(rcx, rax);
205
- Move(arguments->AsSlot(), rax, rbx, rdx);
206
- Slot* dot_arguments_slot = scope()->arguments_shadow()->AsSlot();
207
- Move(dot_arguments_slot, rcx, rbx, rdx);
208
- }
209
205
 
210
- { Comment cmnt(masm_, "[ Declarations");
211
- // For named function expressions, declare the function name as a
212
- // constant.
213
- if (scope()->is_function_scope() && scope()->function() != NULL) {
214
- EmitDeclaration(scope()->function(), Variable::CONST, NULL);
215
- }
216
- // Visit all the explicit declarations unless there is an illegal
217
- // redeclaration.
218
- if (scope()->HasIllegalRedeclaration()) {
219
- scope()->VisitIllegalRedeclaration(this);
220
- } else {
221
- VisitDeclarations(scope()->declarations());
206
+ Variable* arguments_shadow = scope()->arguments_shadow();
207
+ if (arguments_shadow != NULL) {
208
+ // Store new arguments object in both "arguments" and ".arguments" slots.
209
+ __ movq(rcx, rax);
210
+ Move(arguments_shadow->AsSlot(), rcx, rbx, rdx);
222
211
  }
212
+ Move(arguments->AsSlot(), rax, rbx, rdx);
223
213
  }
224
214
 
225
215
  if (FLAG_trace) {
226
216
  __ CallRuntime(Runtime::kTraceEnter, 0);
227
217
  }
228
218
 
229
- { Comment cmnt(masm_, "[ Stack check");
230
- PrepareForBailout(info->function(), NO_REGISTERS);
231
- NearLabel ok;
232
- __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
233
- __ j(above_equal, &ok);
234
- StackCheckStub stub;
235
- __ CallStub(&stub);
236
- __ bind(&ok);
237
- }
219
+ // Visit the declarations and body unless there is an illegal
220
+ // redeclaration.
221
+ if (scope()->HasIllegalRedeclaration()) {
222
+ Comment cmnt(masm_, "[ Declarations");
223
+ scope()->VisitIllegalRedeclaration(this);
224
+ } else {
225
+ { Comment cmnt(masm_, "[ Declarations");
226
+ // For named function expressions, declare the function name as a
227
+ // constant.
228
+ if (scope()->is_function_scope() && scope()->function() != NULL) {
229
+ EmitDeclaration(scope()->function(), Variable::CONST, NULL);
230
+ }
231
+ VisitDeclarations(scope()->declarations());
232
+ }
238
233
 
239
- { Comment cmnt(masm_, "[ Body");
240
- ASSERT(loop_depth() == 0);
241
- VisitStatements(function()->body());
242
- ASSERT(loop_depth() == 0);
234
+ { Comment cmnt(masm_, "[ Stack check");
235
+ PrepareForBailout(info->function(), NO_REGISTERS);
236
+ NearLabel ok;
237
+ __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
238
+ __ j(above_equal, &ok);
239
+ StackCheckStub stub;
240
+ __ CallStub(&stub);
241
+ __ bind(&ok);
242
+ }
243
+
244
+ { Comment cmnt(masm_, "[ Body");
245
+ ASSERT(loop_depth() == 0);
246
+ VisitStatements(function()->body());
247
+ ASSERT(loop_depth() == 0);
248
+ }
243
249
  }
244
250
 
251
+ // Always emit a 'return undefined' in case control fell off the end of
252
+ // the body.
245
253
  { Comment cmnt(masm_, "[ return <undefined>;");
246
- // Emit a 'return undefined' in case control fell off the end of the body.
247
254
  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
248
255
  EmitReturnSequence();
249
256
  }
@@ -267,6 +274,13 @@ void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
267
274
  // the deoptimization input data found in the optimized code.
268
275
  RecordStackCheck(stmt->OsrEntryId());
269
276
 
277
+ // Loop stack checks can be patched to perform on-stack replacement. In
278
+ // order to decide whether or not to perform OSR we embed the loop depth
279
+ // in a test instruction after the call so we can extract it from the OSR
280
+ // builtin.
281
+ ASSERT(loop_depth() > 0);
282
+ __ testl(rax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker)));
283
+
270
284
  __ bind(&ok);
271
285
  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
272
286
  // Record a mapping of the OSR id to this PC. This is used if the OSR
@@ -318,13 +332,6 @@ void FullCodeGenerator::EmitReturnSequence() {
318
332
  }
319
333
 
320
334
 
321
- FullCodeGenerator::ConstantOperand FullCodeGenerator::GetConstantOperand(
322
- Token::Value op, Expression* left, Expression* right) {
323
- ASSERT(ShouldInlineSmiCase(op));
324
- return kNoConstants;
325
- }
326
-
327
-
328
335
  void FullCodeGenerator::EffectContext::Plug(Slot* slot) const {
329
336
  }
330
337
 
@@ -474,10 +481,10 @@ void FullCodeGenerator::AccumulatorValueContext::Plug(
474
481
  Label* materialize_false) const {
475
482
  NearLabel done;
476
483
  __ bind(materialize_true);
477
- __ Move(result_register(), Factory::true_value());
484
+ __ Move(result_register(), isolate()->factory()->true_value());
478
485
  __ jmp(&done);
479
486
  __ bind(materialize_false);
480
- __ Move(result_register(), Factory::false_value());
487
+ __ Move(result_register(), isolate()->factory()->false_value());
481
488
  __ bind(&done);
482
489
  }
483
490
 
@@ -487,10 +494,10 @@ void FullCodeGenerator::StackValueContext::Plug(
487
494
  Label* materialize_false) const {
488
495
  NearLabel done;
489
496
  __ bind(materialize_true);
490
- __ Push(Factory::true_value());
497
+ __ Push(isolate()->factory()->true_value());
491
498
  __ jmp(&done);
492
499
  __ bind(materialize_false);
493
- __ Push(Factory::false_value());
500
+ __ Push(isolate()->factory()->false_value());
494
501
  __ bind(&done);
495
502
  }
496
503
 
@@ -543,8 +550,8 @@ void FullCodeGenerator::DoTest(Label* if_true,
543
550
  __ j(equal, if_true);
544
551
  __ CompareRoot(result_register(), Heap::kFalseValueRootIndex);
545
552
  __ j(equal, if_false);
546
- ASSERT_EQ(0, kSmiTag);
547
- __ SmiCompare(result_register(), Smi::FromInt(0));
553
+ STATIC_ASSERT(kSmiTag == 0);
554
+ __ Cmp(result_register(), Smi::FromInt(0));
548
555
  __ j(equal, if_false);
549
556
  Condition is_smi = masm_->CheckSmi(result_register());
550
557
  __ j(is_smi, if_true);
@@ -733,7 +740,9 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable,
733
740
  prop->key()->AsLiteral()->handle()->IsSmi());
734
741
  __ Move(rcx, prop->key()->AsLiteral()->handle());
735
742
 
736
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
743
+ Handle<Code> ic = is_strict_mode()
744
+ ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
745
+ : isolate()->builtins()->KeyedStoreIC_Initialize();
737
746
  EmitCallIC(ic, RelocInfo::CODE_TARGET);
738
747
  }
739
748
  }
@@ -750,7 +759,8 @@ void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
750
759
  __ push(rsi); // The context is the first argument.
751
760
  __ Push(pairs);
752
761
  __ Push(Smi::FromInt(is_eval() ? 1 : 0));
753
- __ CallRuntime(Runtime::kDeclareGlobals, 3);
762
+ __ Push(Smi::FromInt(strict_mode_flag()));
763
+ __ CallRuntime(Runtime::kDeclareGlobals, 4);
754
764
  // Return value is ignored.
755
765
  }
756
766
 
@@ -829,6 +839,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
829
839
  Comment cmnt(masm_, "[ Case body");
830
840
  CaseClause* clause = clauses->at(i);
831
841
  __ bind(clause->body_target()->entry_label());
842
+ PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
832
843
  VisitStatements(clause->statements());
833
844
  }
834
845
 
@@ -851,7 +862,9 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
851
862
  VisitForAccumulatorValue(stmt->enumerable());
852
863
  __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
853
864
  __ j(equal, &exit);
854
- __ CompareRoot(rax, Heap::kNullValueRootIndex);
865
+ Register null_value = rdi;
866
+ __ LoadRoot(null_value, Heap::kNullValueRootIndex);
867
+ __ cmpq(rax, null_value);
855
868
  __ j(equal, &exit);
856
869
 
857
870
  // Convert the object to a JS object.
@@ -865,12 +878,61 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
865
878
  __ bind(&done_convert);
866
879
  __ push(rax);
867
880
 
868
- // BUG(867): Check cache validity in generated code. This is a fast
869
- // case for the JSObject::IsSimpleEnum cache validity checks. If we
870
- // cannot guarantee cache validity, call the runtime system to check
871
- // cache validity or get the property names in a fixed array.
881
+ // Check cache validity in generated code. This is a fast case for
882
+ // the JSObject::IsSimpleEnum cache validity checks. If we cannot
883
+ // guarantee cache validity, call the runtime system to check cache
884
+ // validity or get the property names in a fixed array.
885
+ Label next, call_runtime;
886
+ Register empty_fixed_array_value = r8;
887
+ __ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
888
+ Register empty_descriptor_array_value = r9;
889
+ __ LoadRoot(empty_descriptor_array_value,
890
+ Heap::kEmptyDescriptorArrayRootIndex);
891
+ __ movq(rcx, rax);
892
+ __ bind(&next);
893
+
894
+ // Check that there are no elements. Register rcx contains the
895
+ // current JS object we've reached through the prototype chain.
896
+ __ cmpq(empty_fixed_array_value,
897
+ FieldOperand(rcx, JSObject::kElementsOffset));
898
+ __ j(not_equal, &call_runtime);
899
+
900
+ // Check that instance descriptors are not empty so that we can
901
+ // check for an enum cache. Leave the map in rbx for the subsequent
902
+ // prototype load.
903
+ __ movq(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
904
+ __ movq(rdx, FieldOperand(rbx, Map::kInstanceDescriptorsOffset));
905
+ __ cmpq(rdx, empty_descriptor_array_value);
906
+ __ j(equal, &call_runtime);
907
+
908
+ // Check that there is an enum cache in the non-empty instance
909
+ // descriptors (rdx). This is the case if the next enumeration
910
+ // index field does not contain a smi.
911
+ __ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumerationIndexOffset));
912
+ __ JumpIfSmi(rdx, &call_runtime);
913
+
914
+ // For all objects but the receiver, check that the cache is empty.
915
+ NearLabel check_prototype;
916
+ __ cmpq(rcx, rax);
917
+ __ j(equal, &check_prototype);
918
+ __ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumCacheBridgeCacheOffset));
919
+ __ cmpq(rdx, empty_fixed_array_value);
920
+ __ j(not_equal, &call_runtime);
921
+
922
+ // Load the prototype from the map and loop if non-null.
923
+ __ bind(&check_prototype);
924
+ __ movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
925
+ __ cmpq(rcx, null_value);
926
+ __ j(not_equal, &next);
927
+
928
+ // The enum cache is valid. Load the map of the object being
929
+ // iterated over and use the cache for the iteration.
930
+ NearLabel use_cache;
931
+ __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset));
932
+ __ jmp(&use_cache);
872
933
 
873
934
  // Get the set of properties to enumerate.
935
+ __ bind(&call_runtime);
874
936
  __ push(rax); // Duplicate the enumerable object on the stack.
875
937
  __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
876
938
 
@@ -883,6 +945,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
883
945
  __ j(not_equal, &fixed_array);
884
946
 
885
947
  // We got a map in register rax. Get the enumeration cache from it.
948
+ __ bind(&use_cache);
886
949
  __ movq(rcx, FieldOperand(rax, Map::kInstanceDescriptorsOffset));
887
950
  __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumerationIndexOffset));
888
951
  __ movq(rdx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
@@ -934,7 +997,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
934
997
  __ push(rcx); // Enumerable.
935
998
  __ push(rbx); // Current entry.
936
999
  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
937
- __ SmiCompare(rax, Smi::FromInt(0));
1000
+ __ Cmp(rax, Smi::FromInt(0));
938
1001
  __ j(equal, loop_statement.continue_target());
939
1002
  __ movq(rbx, rax);
940
1003
 
@@ -971,17 +1034,25 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
971
1034
  void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
972
1035
  bool pretenure) {
973
1036
  // Use the fast case closure allocation code that allocates in new
974
- // space for nested functions that don't need literals cloning.
975
- if (scope()->is_function_scope() &&
976
- info->num_literals() == 0 &&
977
- !pretenure) {
978
- FastNewClosureStub stub;
1037
+ // space for nested functions that don't need literals cloning. If
1038
+ // we're running with the --always-opt or the --prepare-always-opt
1039
+ // flag, we need to use the runtime function so that the new function
1040
+ // we are creating here gets a chance to have its code optimized and
1041
+ // doesn't just get a copy of the existing unoptimized code.
1042
+ if (!FLAG_always_opt &&
1043
+ !FLAG_prepare_always_opt &&
1044
+ !pretenure &&
1045
+ scope()->is_function_scope() &&
1046
+ info->num_literals() == 0) {
1047
+ FastNewClosureStub stub(info->strict_mode() ? kStrictMode : kNonStrictMode);
979
1048
  __ Push(info);
980
1049
  __ CallStub(&stub);
981
1050
  } else {
982
1051
  __ push(rsi);
983
1052
  __ Push(info);
984
- __ Push(pretenure ? Factory::true_value() : Factory::false_value());
1053
+ __ Push(pretenure
1054
+ ? isolate()->factory()->true_value()
1055
+ : isolate()->factory()->false_value());
985
1056
  __ CallRuntime(Runtime::kNewClosure, 3);
986
1057
  }
987
1058
  context()->Plug(rax);
@@ -1050,7 +1121,7 @@ void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
1050
1121
  // load IC call.
1051
1122
  __ movq(rax, GlobalObjectOperand());
1052
1123
  __ Move(rcx, slot->var()->name());
1053
- Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
1124
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1054
1125
  RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1055
1126
  ? RelocInfo::CODE_TARGET
1056
1127
  : RelocInfo::CODE_TARGET_CONTEXT;
@@ -1082,8 +1153,11 @@ MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(
1082
1153
  // Check that last extension is NULL.
1083
1154
  __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1084
1155
  __ j(not_equal, slow);
1085
- __ movq(temp, ContextOperand(context, Context::FCONTEXT_INDEX));
1086
- return ContextOperand(temp, slot->index());
1156
+
1157
+ // This function is used only for loads, not stores, so it's safe to
1158
+ // return an rsi-based operand (the write barrier cannot be allowed to
1159
+ // destroy the rsi register).
1160
+ return ContextOperand(context, slot->index());
1087
1161
  }
1088
1162
 
1089
1163
 
@@ -1130,7 +1204,8 @@ void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
1130
1204
  ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
1131
1205
  slow));
1132
1206
  __ Move(rax, key_literal->handle());
1133
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
1207
+ Handle<Code> ic =
1208
+ isolate()->builtins()->KeyedLoadIC_Initialize();
1134
1209
  EmitCallIC(ic, RelocInfo::CODE_TARGET);
1135
1210
  __ jmp(done);
1136
1211
  }
@@ -1153,7 +1228,7 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var) {
1153
1228
  // object on the stack.
1154
1229
  __ Move(rcx, var->name());
1155
1230
  __ movq(rax, GlobalObjectOperand());
1156
- Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
1231
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1157
1232
  EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1158
1233
  context()->Plug(rax);
1159
1234
 
@@ -1216,7 +1291,7 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var) {
1216
1291
  __ Move(rax, key_literal->handle());
1217
1292
 
1218
1293
  // Do a keyed property load.
1219
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
1294
+ Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1220
1295
  EmitCallIC(ic, RelocInfo::CODE_TARGET);
1221
1296
  context()->Plug(rax);
1222
1297
  }
@@ -1283,7 +1358,13 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1283
1358
  __ push(FieldOperand(rdi, JSFunction::kLiteralsOffset));
1284
1359
  __ Push(Smi::FromInt(expr->literal_index()));
1285
1360
  __ Push(expr->constant_properties());
1286
- __ Push(Smi::FromInt(expr->fast_elements() ? 1 : 0));
1361
+ int flags = expr->fast_elements()
1362
+ ? ObjectLiteral::kFastElements
1363
+ : ObjectLiteral::kNoFlags;
1364
+ flags |= expr->has_function()
1365
+ ? ObjectLiteral::kHasFunction
1366
+ : ObjectLiteral::kNoFlags;
1367
+ __ Push(Smi::FromInt(flags));
1287
1368
  if (expr->depth() > 1) {
1288
1369
  __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1289
1370
  } else {
@@ -1321,7 +1402,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1321
1402
  __ Move(rcx, key->handle());
1322
1403
  __ movq(rdx, Operand(rsp, 0));
1323
1404
  if (property->emit_store()) {
1324
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
1405
+ Handle<Code> ic = isolate()->builtins()->StoreIC_Initialize();
1325
1406
  EmitCallIC(ic, RelocInfo::CODE_TARGET);
1326
1407
  PrepareForBailoutForId(key->id(), NO_REGISTERS);
1327
1408
  }
@@ -1333,7 +1414,8 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1333
1414
  VisitForStackValue(key);
1334
1415
  VisitForStackValue(value);
1335
1416
  if (property->emit_store()) {
1336
- __ CallRuntime(Runtime::kSetProperty, 3);
1417
+ __ Push(Smi::FromInt(NONE)); // PropertyAttributes
1418
+ __ CallRuntime(Runtime::kSetProperty, 4);
1337
1419
  } else {
1338
1420
  __ Drop(3);
1339
1421
  }
@@ -1351,6 +1433,12 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1351
1433
  }
1352
1434
  }
1353
1435
 
1436
+ if (expr->has_function()) {
1437
+ ASSERT(result_saved);
1438
+ __ push(Operand(rsp, 0));
1439
+ __ CallRuntime(Runtime::kToFastProperties, 1);
1440
+ }
1441
+
1354
1442
  if (result_saved) {
1355
1443
  context()->PlugTOS();
1356
1444
  } else {
@@ -1369,11 +1457,12 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1369
1457
  __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
1370
1458
  __ Push(Smi::FromInt(expr->literal_index()));
1371
1459
  __ Push(expr->constant_elements());
1372
- if (expr->constant_elements()->map() == Heap::fixed_cow_array_map()) {
1460
+ if (expr->constant_elements()->map() ==
1461
+ isolate()->heap()->fixed_cow_array_map()) {
1373
1462
  FastCloneShallowArrayStub stub(
1374
1463
  FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
1375
1464
  __ CallStub(&stub);
1376
- __ IncrementCounter(&Counters::cow_arrays_created_stub, 1);
1465
+ __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
1377
1466
  } else if (expr->depth() > 1) {
1378
1467
  __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1379
1468
  } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
@@ -1487,36 +1576,29 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1487
1576
  }
1488
1577
  }
1489
1578
 
1579
+ // For compound assignments we need another deoptimization point after the
1580
+ // variable/property load.
1490
1581
  if (expr->is_compound()) {
1491
1582
  { AccumulatorValueContext context(this);
1492
1583
  switch (assign_type) {
1493
1584
  case VARIABLE:
1494
1585
  EmitVariableLoad(expr->target()->AsVariableProxy()->var());
1586
+ PrepareForBailout(expr->target(), TOS_REG);
1495
1587
  break;
1496
1588
  case NAMED_PROPERTY:
1497
1589
  EmitNamedPropertyLoad(property);
1590
+ PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1498
1591
  break;
1499
1592
  case KEYED_PROPERTY:
1500
1593
  EmitKeyedPropertyLoad(property);
1594
+ PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1501
1595
  break;
1502
1596
  }
1503
1597
  }
1504
1598
 
1505
- // For property compound assignments we need another deoptimization
1506
- // point after the property load.
1507
- if (property != NULL) {
1508
- PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1509
- }
1510
-
1511
1599
  Token::Value op = expr->binary_op();
1512
- ConstantOperand constant = ShouldInlineSmiCase(op)
1513
- ? GetConstantOperand(op, expr->target(), expr->value())
1514
- : kNoConstants;
1515
- ASSERT(constant == kRightConstant || constant == kNoConstants);
1516
- if (constant == kNoConstants) {
1517
- __ push(rax); // Left operand goes on the stack.
1518
- VisitForAccumulatorValue(expr->value());
1519
- }
1600
+ __ push(rax); // Left operand goes on the stack.
1601
+ VisitForAccumulatorValue(expr->value());
1520
1602
 
1521
1603
  OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1522
1604
  ? OVERWRITE_RIGHT
@@ -1528,8 +1610,7 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1528
1610
  op,
1529
1611
  mode,
1530
1612
  expr->target(),
1531
- expr->value(),
1532
- constant);
1613
+ expr->value());
1533
1614
  } else {
1534
1615
  EmitBinaryOp(op, mode);
1535
1616
  }
@@ -1564,14 +1645,14 @@ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1564
1645
  SetSourcePosition(prop->position());
1565
1646
  Literal* key = prop->key()->AsLiteral();
1566
1647
  __ Move(rcx, key->handle());
1567
- Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
1648
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1568
1649
  EmitCallIC(ic, RelocInfo::CODE_TARGET);
1569
1650
  }
1570
1651
 
1571
1652
 
1572
1653
  void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1573
1654
  SetSourcePosition(prop->position());
1574
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
1655
+ Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1575
1656
  EmitCallIC(ic, RelocInfo::CODE_TARGET);
1576
1657
  }
1577
1658
 
@@ -1580,10 +1661,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
1580
1661
  Token::Value op,
1581
1662
  OverwriteMode mode,
1582
1663
  Expression* left,
1583
- Expression* right,
1584
- ConstantOperand constant) {
1585
- ASSERT(constant == kNoConstants); // Only handled case.
1586
-
1664
+ Expression* right) {
1587
1665
  // Do combined smi check of the operands. Left operand is on the
1588
1666
  // stack (popped into rdx). Right operand is in rax but moved into
1589
1667
  // rcx to make the shifts easier.
@@ -1680,7 +1758,9 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
1680
1758
  __ movq(rdx, rax);
1681
1759
  __ pop(rax); // Restore value.
1682
1760
  __ Move(rcx, prop->key()->AsLiteral()->handle());
1683
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
1761
+ Handle<Code> ic = is_strict_mode()
1762
+ ? isolate()->builtins()->StoreIC_Initialize_Strict()
1763
+ : isolate()->builtins()->StoreIC_Initialize();
1684
1764
  EmitCallIC(ic, RelocInfo::CODE_TARGET);
1685
1765
  break;
1686
1766
  }
@@ -1701,7 +1781,9 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
1701
1781
  __ pop(rdx);
1702
1782
  }
1703
1783
  __ pop(rax); // Restore value.
1704
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
1784
+ Handle<Code> ic = is_strict_mode()
1785
+ ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
1786
+ : isolate()->builtins()->KeyedStoreIC_Initialize();
1705
1787
  EmitCallIC(ic, RelocInfo::CODE_TARGET);
1706
1788
  break;
1707
1789
  }
@@ -1725,62 +1807,81 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
1725
1807
  // rcx, and the global object on the stack.
1726
1808
  __ Move(rcx, var->name());
1727
1809
  __ movq(rdx, GlobalObjectOperand());
1728
- Handle<Code> ic(Builtins::builtin(is_strict()
1729
- ? Builtins::StoreIC_Initialize_Strict
1730
- : Builtins::StoreIC_Initialize));
1810
+ Handle<Code> ic = is_strict_mode()
1811
+ ? isolate()->builtins()->StoreIC_Initialize_Strict()
1812
+ : isolate()->builtins()->StoreIC_Initialize();
1731
1813
  EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1732
1814
 
1733
- } else if (var->mode() != Variable::CONST || op == Token::INIT_CONST) {
1734
- // Perform the assignment for non-const variables and for initialization
1735
- // of const variables. Const assignments are simply skipped.
1736
- Label done;
1815
+ } else if (op == Token::INIT_CONST) {
1816
+ // Like var declarations, const declarations are hoisted to function
1817
+ // scope. However, unlike var initializers, const initializers are able
1818
+ // to drill a hole to that function context, even from inside a 'with'
1819
+ // context. We thus bypass the normal static scope lookup.
1820
+ Slot* slot = var->AsSlot();
1821
+ Label skip;
1822
+ switch (slot->type()) {
1823
+ case Slot::PARAMETER:
1824
+ // No const parameters.
1825
+ UNREACHABLE();
1826
+ break;
1827
+ case Slot::LOCAL:
1828
+ __ movq(rdx, Operand(rbp, SlotOffset(slot)));
1829
+ __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
1830
+ __ j(not_equal, &skip);
1831
+ __ movq(Operand(rbp, SlotOffset(slot)), rax);
1832
+ break;
1833
+ case Slot::CONTEXT: {
1834
+ __ movq(rcx, ContextOperand(rsi, Context::FCONTEXT_INDEX));
1835
+ __ movq(rdx, ContextOperand(rcx, slot->index()));
1836
+ __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
1837
+ __ j(not_equal, &skip);
1838
+ __ movq(ContextOperand(rcx, slot->index()), rax);
1839
+ int offset = Context::SlotOffset(slot->index());
1840
+ __ movq(rdx, rax); // Preserve the stored value in eax.
1841
+ __ RecordWrite(rcx, offset, rdx, rbx);
1842
+ break;
1843
+ }
1844
+ case Slot::LOOKUP:
1845
+ __ push(rax);
1846
+ __ push(rsi);
1847
+ __ Push(var->name());
1848
+ __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
1849
+ break;
1850
+ }
1851
+ __ bind(&skip);
1852
+
1853
+ } else if (var->mode() != Variable::CONST) {
1854
+ // Perform the assignment for non-const variables. Const assignments
1855
+ // are simply skipped.
1737
1856
  Slot* slot = var->AsSlot();
1738
1857
  switch (slot->type()) {
1739
1858
  case Slot::PARAMETER:
1740
1859
  case Slot::LOCAL:
1741
- if (op == Token::INIT_CONST) {
1742
- // Detect const reinitialization by checking for the hole value.
1743
- __ movq(rdx, Operand(rbp, SlotOffset(slot)));
1744
- __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
1745
- __ j(not_equal, &done);
1746
- }
1747
1860
  // Perform the assignment.
1748
1861
  __ movq(Operand(rbp, SlotOffset(slot)), rax);
1749
1862
  break;
1750
1863
 
1751
1864
  case Slot::CONTEXT: {
1752
1865
  MemOperand target = EmitSlotSearch(slot, rcx);
1753
- if (op == Token::INIT_CONST) {
1754
- // Detect const reinitialization by checking for the hole value.
1755
- __ movq(rdx, target);
1756
- __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
1757
- __ j(not_equal, &done);
1758
- }
1759
1866
  // Perform the assignment and issue the write barrier.
1760
1867
  __ movq(target, rax);
1761
1868
  // The value of the assignment is in rax. RecordWrite clobbers its
1762
1869
  // register arguments.
1763
1870
  __ movq(rdx, rax);
1764
- int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
1871
+ int offset = Context::SlotOffset(slot->index());
1765
1872
  __ RecordWrite(rcx, offset, rdx, rbx);
1766
1873
  break;
1767
1874
  }
1768
1875
 
1769
1876
  case Slot::LOOKUP:
1770
- // Call the runtime for the assignment. The runtime will ignore
1771
- // const reinitialization.
1877
+ // Call the runtime for the assignment.
1772
1878
  __ push(rax); // Value.
1773
1879
  __ push(rsi); // Context.
1774
1880
  __ Push(var->name());
1775
- if (op == Token::INIT_CONST) {
1776
- // The runtime will ignore const redeclaration.
1777
- __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
1778
- } else {
1779
- __ CallRuntime(Runtime::kStoreContextSlot, 3);
1780
- }
1881
+ __ Push(Smi::FromInt(strict_mode_flag()));
1882
+ __ CallRuntime(Runtime::kStoreContextSlot, 4);
1781
1883
  break;
1782
1884
  }
1783
- __ bind(&done);
1784
1885
  }
1785
1886
  }
1786
1887
 
@@ -1809,7 +1910,9 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
1809
1910
  } else {
1810
1911
  __ pop(rdx);
1811
1912
  }
1812
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
1913
+ Handle<Code> ic = is_strict_mode()
1914
+ ? isolate()->builtins()->StoreIC_Initialize_Strict()
1915
+ : isolate()->builtins()->StoreIC_Initialize();
1813
1916
  EmitCallIC(ic, RelocInfo::CODE_TARGET);
1814
1917
 
1815
1918
  // If the assignment ends an initialization block, revert to fast case.
@@ -1847,7 +1950,9 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
1847
1950
  }
1848
1951
  // Record source code position before IC call.
1849
1952
  SetSourcePosition(expr->position());
1850
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
1953
+ Handle<Code> ic = is_strict_mode()
1954
+ ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
1955
+ : isolate()->builtins()->KeyedStoreIC_Initialize();
1851
1956
  EmitCallIC(ic, RelocInfo::CODE_TARGET);
1852
1957
 
1853
1958
  // If the assignment ends an initialization block, revert to fast case.
@@ -1898,7 +2003,8 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
1898
2003
  SetSourcePosition(expr->position());
1899
2004
  // Call the IC initialization code.
1900
2005
  InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
1901
- Handle<Code> ic = StubCache::ComputeCallInitialize(arg_count, in_loop);
2006
+ Handle<Code> ic =
2007
+ ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
1902
2008
  EmitCallIC(ic, mode);
1903
2009
  RecordJSReturnSite(expr);
1904
2010
  // Restore context register.
@@ -1931,7 +2037,8 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
1931
2037
  SetSourcePosition(expr->position());
1932
2038
  // Call the IC initialization code.
1933
2039
  InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
1934
- Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arg_count, in_loop);
2040
+ Handle<Code> ic =
2041
+ ISOLATE->stub_cache()->ComputeKeyedCallInitialize(arg_count, in_loop);
1935
2042
  __ movq(rcx, Operand(rsp, (arg_count + 1) * kPointerSize)); // Key.
1936
2043
  EmitCallIC(ic, mode);
1937
2044
  RecordJSReturnSite(expr);
@@ -1963,6 +2070,27 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
1963
2070
  }
1964
2071
 
1965
2072
 
2073
+ void FullCodeGenerator::EmitResolvePossiblyDirectEval(ResolveEvalFlag flag,
2074
+ int arg_count) {
2075
+ // Push copy of the first argument or undefined if it doesn't exist.
2076
+ if (arg_count > 0) {
2077
+ __ push(Operand(rsp, arg_count * kPointerSize));
2078
+ } else {
2079
+ __ PushRoot(Heap::kUndefinedValueRootIndex);
2080
+ }
2081
+
2082
+ // Push the receiver of the enclosing function and do runtime call.
2083
+ __ push(Operand(rbp, (2 + scope()->num_parameters()) * kPointerSize));
2084
+
2085
+ // Push the strict mode flag.
2086
+ __ Push(Smi::FromInt(strict_mode_flag()));
2087
+
2088
+ __ CallRuntime(flag == SKIP_CONTEXT_LOOKUP
2089
+ ? Runtime::kResolvePossiblyDirectEvalNoLookup
2090
+ : Runtime::kResolvePossiblyDirectEval, 4);
2091
+ }
2092
+
2093
+
1966
2094
  void FullCodeGenerator::VisitCall(Call* expr) {
1967
2095
  #ifdef DEBUG
1968
2096
  // We want to verify that RecordJSReturnSite gets called on all paths
@@ -1990,21 +2118,30 @@ void FullCodeGenerator::VisitCall(Call* expr) {
1990
2118
  VisitForStackValue(args->at(i));
1991
2119
  }
1992
2120
 
1993
- // Push copy of the function - found below the arguments.
1994
- __ push(Operand(rsp, (arg_count + 1) * kPointerSize));
1995
-
1996
- // Push copy of the first argument or undefined if it doesn't exist.
1997
- if (arg_count > 0) {
1998
- __ push(Operand(rsp, arg_count * kPointerSize));
1999
- } else {
2000
- __ PushRoot(Heap::kUndefinedValueRootIndex);
2121
+ // If we know that eval can only be shadowed by eval-introduced
2122
+ // variables we attempt to load the global eval function directly
2123
+ // in generated code. If we succeed, there is no need to perform a
2124
+ // context lookup in the runtime system.
2125
+ Label done;
2126
+ if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
2127
+ Label slow;
2128
+ EmitLoadGlobalSlotCheckExtensions(var->AsSlot(),
2129
+ NOT_INSIDE_TYPEOF,
2130
+ &slow);
2131
+ // Push the function and resolve eval.
2132
+ __ push(rax);
2133
+ EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count);
2134
+ __ jmp(&done);
2135
+ __ bind(&slow);
2001
2136
  }
2002
2137
 
2003
- // Push the receiver of the enclosing function and do runtime call.
2004
- __ push(Operand(rbp, (2 + scope()->num_parameters()) * kPointerSize));
2005
- // Push the strict mode flag.
2006
- __ Push(Smi::FromInt(strict_mode_flag()));
2007
- __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 4);
2138
+ // Push copy of the function (found below the arguments) and
2139
+ // resolve eval.
2140
+ __ push(Operand(rsp, (arg_count + 1) * kPointerSize));
2141
+ EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count);
2142
+ if (done.is_linked()) {
2143
+ __ bind(&done);
2144
+ }
2008
2145
 
2009
2146
  // The runtime call returns a pair of values in rax (function) and
2010
2147
  // rdx (receiver). Touch up the stack with the right values.
@@ -2094,7 +2231,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
2094
2231
  // Record source code position for IC call.
2095
2232
  SetSourcePosition(prop->position());
2096
2233
 
2097
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
2234
+ Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2098
2235
  EmitCallIC(ic, RelocInfo::CODE_TARGET);
2099
2236
  // Push result (function).
2100
2237
  __ push(rax);
@@ -2110,15 +2247,6 @@ void FullCodeGenerator::VisitCall(Call* expr) {
2110
2247
  }
2111
2248
  }
2112
2249
  } else {
2113
- // Call to some other expression. If the expression is an anonymous
2114
- // function literal not called in a loop, mark it as one that should
2115
- // also use the full code generator.
2116
- FunctionLiteral* lit = fun->AsFunctionLiteral();
2117
- if (lit != NULL &&
2118
- lit->name()->Equals(Heap::empty_string()) &&
2119
- loop_depth() == 0) {
2120
- lit->set_try_full_codegen(true);
2121
- }
2122
2250
  { PreservePositionScope scope(masm()->positions_recorder());
2123
2251
  VisitForStackValue(fun);
2124
2252
  }
@@ -2162,7 +2290,8 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2162
2290
  __ Set(rax, arg_count);
2163
2291
  __ movq(rdi, Operand(rsp, arg_count * kPointerSize));
2164
2292
 
2165
- Handle<Code> construct_builtin(Builtins::builtin(Builtins::JSConstructCall));
2293
+ Handle<Code> construct_builtin =
2294
+ isolate()->builtins()->JSConstructCall();
2166
2295
  __ Call(construct_builtin, RelocInfo::CONSTRUCT_CALL);
2167
2296
  context()->Plug(rax);
2168
2297
  }
@@ -2296,11 +2425,71 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2296
2425
  context()->PrepareTest(&materialize_true, &materialize_false,
2297
2426
  &if_true, &if_false, &fall_through);
2298
2427
 
2299
- // Just indicate false, as %_IsStringWrapperSafeForDefaultValueOf() is only
2300
- // used in a few functions in runtime.js which should not normally be hit by
2301
- // this compiler.
2428
+ if (FLAG_debug_code) __ AbortIfSmi(rax);
2429
+
2430
+ // Check whether this map has already been checked to be safe for default
2431
+ // valueOf.
2432
+ __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2433
+ __ testb(FieldOperand(rbx, Map::kBitField2Offset),
2434
+ Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
2435
+ __ j(not_zero, if_true);
2436
+
2437
+ // Check for fast case object. Generate false result for slow case object.
2438
+ __ movq(rcx, FieldOperand(rax, JSObject::kPropertiesOffset));
2439
+ __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
2440
+ __ CompareRoot(rcx, Heap::kHashTableMapRootIndex);
2441
+ __ j(equal, if_false);
2442
+
2443
+ // Look for valueOf symbol in the descriptor array, and indicate false if
2444
+ // found. The type is not checked, so if it is a transition it is a false
2445
+ // negative.
2446
+ __ movq(rbx, FieldOperand(rbx, Map::kInstanceDescriptorsOffset));
2447
+ __ movq(rcx, FieldOperand(rbx, FixedArray::kLengthOffset));
2448
+ // rbx: descriptor array
2449
+ // rcx: length of descriptor array
2450
+ // Calculate the end of the descriptor array.
2451
+ SmiIndex index = masm_->SmiToIndex(rdx, rcx, kPointerSizeLog2);
2452
+ __ lea(rcx,
2453
+ Operand(
2454
+ rbx, index.reg, index.scale, FixedArray::kHeaderSize));
2455
+ // Calculate location of the first key name.
2456
+ __ addq(rbx,
2457
+ Immediate(FixedArray::kHeaderSize +
2458
+ DescriptorArray::kFirstIndex * kPointerSize));
2459
+ // Loop through all the keys in the descriptor array. If one of these is the
2460
+ // symbol valueOf the result is false.
2461
+ Label entry, loop;
2462
+ __ jmp(&entry);
2463
+ __ bind(&loop);
2464
+ __ movq(rdx, FieldOperand(rbx, 0));
2465
+ __ Cmp(rdx, FACTORY->value_of_symbol());
2466
+ __ j(equal, if_false);
2467
+ __ addq(rbx, Immediate(kPointerSize));
2468
+ __ bind(&entry);
2469
+ __ cmpq(rbx, rcx);
2470
+ __ j(not_equal, &loop);
2471
+
2472
+ // Reload map as register rbx was used as temporary above.
2473
+ __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2474
+
2475
+ // If a valueOf property is not found on the object check that it's
2476
+ // prototype is the un-modified String prototype. If not result is false.
2477
+ __ movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
2478
+ __ testq(rcx, Immediate(kSmiTagMask));
2479
+ __ j(zero, if_false);
2480
+ __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
2481
+ __ movq(rdx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2482
+ __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalContextOffset));
2483
+ __ cmpq(rcx,
2484
+ ContextOperand(rdx, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
2485
+ __ j(not_equal, if_false);
2486
+ // Set the bit in the map to indicate that it has been checked safe for
2487
+ // default valueOf and set true result.
2488
+ __ or_(FieldOperand(rbx, Map::kBitField2Offset),
2489
+ Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
2490
+ __ jmp(if_true);
2491
+
2302
2492
  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2303
- __ jmp(if_false);
2304
2493
  context()->Plug(if_true, if_false);
2305
2494
  }
2306
2495
 
@@ -2384,15 +2573,15 @@ void FullCodeGenerator::EmitIsConstructCall(ZoneList<Expression*>* args) {
2384
2573
 
2385
2574
  // Skip the arguments adaptor frame if it exists.
2386
2575
  Label check_frame_marker;
2387
- __ SmiCompare(Operand(rax, StandardFrameConstants::kContextOffset),
2388
- Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2576
+ __ Cmp(Operand(rax, StandardFrameConstants::kContextOffset),
2577
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2389
2578
  __ j(not_equal, &check_frame_marker);
2390
2579
  __ movq(rax, Operand(rax, StandardFrameConstants::kCallerFPOffset));
2391
2580
 
2392
2581
  // Check the marker in the calling frame.
2393
2582
  __ bind(&check_frame_marker);
2394
- __ SmiCompare(Operand(rax, StandardFrameConstants::kMarkerOffset),
2395
- Smi::FromInt(StackFrame::CONSTRUCT));
2583
+ __ Cmp(Operand(rax, StandardFrameConstants::kMarkerOffset),
2584
+ Smi::FromInt(StackFrame::CONSTRUCT));
2396
2585
  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2397
2586
  Split(equal, if_true, if_false, fall_through);
2398
2587
 
@@ -2446,8 +2635,8 @@ void FullCodeGenerator::EmitArgumentsLength(ZoneList<Expression*>* args) {
2446
2635
 
2447
2636
  // Check if the calling frame is an arguments adaptor frame.
2448
2637
  __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2449
- __ SmiCompare(Operand(rbx, StandardFrameConstants::kContextOffset),
2450
- Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2638
+ __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
2639
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2451
2640
  __ j(not_equal, &exit);
2452
2641
 
2453
2642
  // Arguments adaptor case: Read the arguments length from the
@@ -2495,12 +2684,12 @@ void FullCodeGenerator::EmitClassOf(ZoneList<Expression*>* args) {
2495
2684
 
2496
2685
  // Functions have class 'Function'.
2497
2686
  __ bind(&function);
2498
- __ Move(rax, Factory::function_class_symbol());
2687
+ __ Move(rax, isolate()->factory()->function_class_symbol());
2499
2688
  __ jmp(&done);
2500
2689
 
2501
2690
  // Objects with a non-function constructor have class 'Object'.
2502
2691
  __ bind(&non_function_constructor);
2503
- __ Move(rax, Factory::Object_symbol());
2692
+ __ Move(rax, isolate()->factory()->Object_symbol());
2504
2693
  __ jmp(&done);
2505
2694
 
2506
2695
  // Non-JS objects have class null.
@@ -2554,8 +2743,13 @@ void FullCodeGenerator::EmitRandomHeapNumber(ZoneList<Expression*>* args) {
2554
2743
 
2555
2744
  // Return a random uint32 number in rax.
2556
2745
  // The fresh HeapNumber is in rbx, which is callee-save on both x64 ABIs.
2557
- __ PrepareCallCFunction(0);
2558
- __ CallCFunction(ExternalReference::random_uint32_function(), 0);
2746
+ __ PrepareCallCFunction(1);
2747
+ #ifdef _WIN64
2748
+ __ LoadAddress(rcx, ExternalReference::isolate_address());
2749
+ #else
2750
+ __ LoadAddress(rdi, ExternalReference::isolate_address());
2751
+ #endif
2752
+ __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
2559
2753
 
2560
2754
  // Convert 32 random bits in rax to 0.(32 random bits) in a double
2561
2755
  // by computing:
@@ -2621,7 +2815,8 @@ void FullCodeGenerator::EmitMathPow(ZoneList<Expression*>* args) {
2621
2815
  ASSERT(args->length() == 2);
2622
2816
  VisitForStackValue(args->at(0));
2623
2817
  VisitForStackValue(args->at(1));
2624
- __ CallRuntime(Runtime::kMath_pow, 2);
2818
+ MathPowStub stub;
2819
+ __ CallStub(&stub);
2625
2820
  context()->Plug(rax);
2626
2821
  }
2627
2822
 
@@ -2805,7 +3000,8 @@ void FullCodeGenerator::EmitStringCompare(ZoneList<Expression*>* args) {
2805
3000
 
2806
3001
  void FullCodeGenerator::EmitMathSin(ZoneList<Expression*>* args) {
2807
3002
  // Load the argument on the stack and call the stub.
2808
- TranscendentalCacheStub stub(TranscendentalCache::SIN);
3003
+ TranscendentalCacheStub stub(TranscendentalCache::SIN,
3004
+ TranscendentalCacheStub::TAGGED);
2809
3005
  ASSERT(args->length() == 1);
2810
3006
  VisitForStackValue(args->at(0));
2811
3007
  __ CallStub(&stub);
@@ -2815,7 +3011,8 @@ void FullCodeGenerator::EmitMathSin(ZoneList<Expression*>* args) {
2815
3011
 
2816
3012
  void FullCodeGenerator::EmitMathCos(ZoneList<Expression*>* args) {
2817
3013
  // Load the argument on the stack and call the stub.
2818
- TranscendentalCacheStub stub(TranscendentalCache::COS);
3014
+ TranscendentalCacheStub stub(TranscendentalCache::COS,
3015
+ TranscendentalCacheStub::TAGGED);
2819
3016
  ASSERT(args->length() == 1);
2820
3017
  VisitForStackValue(args->at(0));
2821
3018
  __ CallStub(&stub);
@@ -2825,7 +3022,8 @@ void FullCodeGenerator::EmitMathCos(ZoneList<Expression*>* args) {
2825
3022
 
2826
3023
  void FullCodeGenerator::EmitMathLog(ZoneList<Expression*>* args) {
2827
3024
  // Load the argument on the stack and call the stub.
2828
- TranscendentalCacheStub stub(TranscendentalCache::LOG);
3025
+ TranscendentalCacheStub stub(TranscendentalCache::LOG,
3026
+ TranscendentalCacheStub::TAGGED);
2829
3027
  ASSERT(args->length() == 1);
2830
3028
  VisitForStackValue(args->at(0));
2831
3029
  __ CallStub(&stub);
@@ -2877,7 +3075,73 @@ void FullCodeGenerator::EmitSwapElements(ZoneList<Expression*>* args) {
2877
3075
  VisitForStackValue(args->at(0));
2878
3076
  VisitForStackValue(args->at(1));
2879
3077
  VisitForStackValue(args->at(2));
3078
+ Label done;
3079
+ Label slow_case;
3080
+ Register object = rax;
3081
+ Register index_1 = rbx;
3082
+ Register index_2 = rcx;
3083
+ Register elements = rdi;
3084
+ Register temp = rdx;
3085
+ __ movq(object, Operand(rsp, 2 * kPointerSize));
3086
+ // Fetch the map and check if array is in fast case.
3087
+ // Check that object doesn't require security checks and
3088
+ // has no indexed interceptor.
3089
+ __ CmpObjectType(object, JS_ARRAY_TYPE, temp);
3090
+ __ j(not_equal, &slow_case);
3091
+ __ testb(FieldOperand(temp, Map::kBitFieldOffset),
3092
+ Immediate(KeyedLoadIC::kSlowCaseBitFieldMask));
3093
+ __ j(not_zero, &slow_case);
3094
+
3095
+ // Check the object's elements are in fast case and writable.
3096
+ __ movq(elements, FieldOperand(object, JSObject::kElementsOffset));
3097
+ __ CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
3098
+ Heap::kFixedArrayMapRootIndex);
3099
+ __ j(not_equal, &slow_case);
3100
+
3101
+ // Check that both indices are smis.
3102
+ __ movq(index_1, Operand(rsp, 1 * kPointerSize));
3103
+ __ movq(index_2, Operand(rsp, 0 * kPointerSize));
3104
+ __ JumpIfNotBothSmi(index_1, index_2, &slow_case);
3105
+
3106
+ // Check that both indices are valid.
3107
+ // The JSArray length field is a smi since the array is in fast case mode.
3108
+ __ movq(temp, FieldOperand(object, JSArray::kLengthOffset));
3109
+ __ SmiCompare(temp, index_1);
3110
+ __ j(below_equal, &slow_case);
3111
+ __ SmiCompare(temp, index_2);
3112
+ __ j(below_equal, &slow_case);
3113
+
3114
+ __ SmiToInteger32(index_1, index_1);
3115
+ __ SmiToInteger32(index_2, index_2);
3116
+ // Bring addresses into index1 and index2.
3117
+ __ lea(index_1, FieldOperand(elements, index_1, times_pointer_size,
3118
+ FixedArray::kHeaderSize));
3119
+ __ lea(index_2, FieldOperand(elements, index_2, times_pointer_size,
3120
+ FixedArray::kHeaderSize));
3121
+
3122
+ // Swap elements. Use object and temp as scratch registers.
3123
+ __ movq(object, Operand(index_1, 0));
3124
+ __ movq(temp, Operand(index_2, 0));
3125
+ __ movq(Operand(index_2, 0), object);
3126
+ __ movq(Operand(index_1, 0), temp);
3127
+
3128
+ Label new_space;
3129
+ __ InNewSpace(elements, temp, equal, &new_space);
3130
+
3131
+ __ movq(object, elements);
3132
+ __ RecordWriteHelper(object, index_1, temp);
3133
+ __ RecordWriteHelper(elements, index_2, temp);
3134
+
3135
+ __ bind(&new_space);
3136
+ // We are done. Drop elements from the stack, and return undefined.
3137
+ __ addq(rsp, Immediate(3 * kPointerSize));
3138
+ __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
3139
+ __ jmp(&done);
3140
+
3141
+ __ bind(&slow_case);
2880
3142
  __ CallRuntime(Runtime::kSwapElements, 3);
3143
+
3144
+ __ bind(&done);
2881
3145
  context()->Plug(rax);
2882
3146
  }
2883
3147
 
@@ -2889,7 +3153,7 @@ void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) {
2889
3153
  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
2890
3154
 
2891
3155
  Handle<FixedArray> jsfunction_result_caches(
2892
- Top::global_context()->jsfunction_result_caches());
3156
+ isolate()->global_context()->jsfunction_result_caches());
2893
3157
  if (jsfunction_result_caches->length() <= cache_id) {
2894
3158
  __ Abort("Attempt to use undefined cache.");
2895
3159
  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
@@ -2966,10 +3230,10 @@ void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) {
2966
3230
  __ cmpq(tmp, FieldOperand(right, JSRegExp::kDataOffset));
2967
3231
  __ j(equal, &ok);
2968
3232
  __ bind(&fail);
2969
- __ Move(rax, Factory::false_value());
3233
+ __ Move(rax, isolate()->factory()->false_value());
2970
3234
  __ jmp(&done);
2971
3235
  __ bind(&ok);
2972
- __ Move(rax, Factory::true_value());
3236
+ __ Move(rax, isolate()->factory()->true_value());
2973
3237
  __ bind(&done);
2974
3238
 
2975
3239
  context()->Plug(rax);
@@ -3000,9 +3264,12 @@ void FullCodeGenerator::EmitHasCachedArrayIndex(ZoneList<Expression*>* args) {
3000
3264
 
3001
3265
  void FullCodeGenerator::EmitGetCachedArrayIndex(ZoneList<Expression*>* args) {
3002
3266
  ASSERT(args->length() == 1);
3003
-
3004
3267
  VisitForAccumulatorValue(args->at(0));
3005
3268
 
3269
+ if (FLAG_debug_code) {
3270
+ __ AbortIfNotString(rax);
3271
+ }
3272
+
3006
3273
  __ movl(rax, FieldOperand(rax, String::kHashFieldOffset));
3007
3274
  ASSERT(String::kHashShift >= kSmiTagSize);
3008
3275
  __ IndexFromHash(rax, rax);
@@ -3012,7 +3279,288 @@ void FullCodeGenerator::EmitGetCachedArrayIndex(ZoneList<Expression*>* args) {
3012
3279
 
3013
3280
 
3014
3281
  void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) {
3015
- context()->Plug(Heap::kUndefinedValueRootIndex);
3282
+ Label bailout, return_result, done, one_char_separator, long_separator,
3283
+ non_trivial_array, not_size_one_array, loop,
3284
+ loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3285
+ ASSERT(args->length() == 2);
3286
+ // We will leave the separator on the stack until the end of the function.
3287
+ VisitForStackValue(args->at(1));
3288
+ // Load this to rax (= array)
3289
+ VisitForAccumulatorValue(args->at(0));
3290
+ // All aliases of the same register have disjoint lifetimes.
3291
+ Register array = rax;
3292
+ Register elements = no_reg; // Will be rax.
3293
+
3294
+ Register index = rdx;
3295
+
3296
+ Register string_length = rcx;
3297
+
3298
+ Register string = rsi;
3299
+
3300
+ Register scratch = rbx;
3301
+
3302
+ Register array_length = rdi;
3303
+ Register result_pos = no_reg; // Will be rdi.
3304
+
3305
+ Operand separator_operand = Operand(rsp, 2 * kPointerSize);
3306
+ Operand result_operand = Operand(rsp, 1 * kPointerSize);
3307
+ Operand array_length_operand = Operand(rsp, 0 * kPointerSize);
3308
+ // Separator operand is already pushed. Make room for the two
3309
+ // other stack fields, and clear the direction flag in anticipation
3310
+ // of calling CopyBytes.
3311
+ __ subq(rsp, Immediate(2 * kPointerSize));
3312
+ __ cld();
3313
+ // Check that the array is a JSArray
3314
+ __ JumpIfSmi(array, &bailout);
3315
+ __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3316
+ __ j(not_equal, &bailout);
3317
+
3318
+ // Check that the array has fast elements.
3319
+ __ testb(FieldOperand(scratch, Map::kBitField2Offset),
3320
+ Immediate(1 << Map::kHasFastElements));
3321
+ __ j(zero, &bailout);
3322
+
3323
+ // Array has fast elements, so its length must be a smi.
3324
+ // If the array has length zero, return the empty string.
3325
+ __ movq(array_length, FieldOperand(array, JSArray::kLengthOffset));
3326
+ __ SmiCompare(array_length, Smi::FromInt(0));
3327
+ __ j(not_zero, &non_trivial_array);
3328
+ __ LoadRoot(rax, Heap::kEmptyStringRootIndex);
3329
+ __ jmp(&return_result);
3330
+
3331
+ // Save the array length on the stack.
3332
+ __ bind(&non_trivial_array);
3333
+ __ SmiToInteger32(array_length, array_length);
3334
+ __ movl(array_length_operand, array_length);
3335
+
3336
+ // Save the FixedArray containing array's elements.
3337
+ // End of array's live range.
3338
+ elements = array;
3339
+ __ movq(elements, FieldOperand(array, JSArray::kElementsOffset));
3340
+ array = no_reg;
3341
+
3342
+
3343
+ // Check that all array elements are sequential ASCII strings, and
3344
+ // accumulate the sum of their lengths, as a smi-encoded value.
3345
+ __ Set(index, 0);
3346
+ __ Set(string_length, 0);
3347
+ // Loop condition: while (index < array_length).
3348
+ // Live loop registers: index(int32), array_length(int32), string(String*),
3349
+ // scratch, string_length(int32), elements(FixedArray*).
3350
+ if (FLAG_debug_code) {
3351
+ __ cmpq(index, array_length);
3352
+ __ Assert(below, "No empty arrays here in EmitFastAsciiArrayJoin");
3353
+ }
3354
+ __ bind(&loop);
3355
+ __ movq(string, FieldOperand(elements,
3356
+ index,
3357
+ times_pointer_size,
3358
+ FixedArray::kHeaderSize));
3359
+ __ JumpIfSmi(string, &bailout);
3360
+ __ movq(scratch, FieldOperand(string, HeapObject::kMapOffset));
3361
+ __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3362
+ __ andb(scratch, Immediate(
3363
+ kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3364
+ __ cmpb(scratch, Immediate(kStringTag | kAsciiStringTag | kSeqStringTag));
3365
+ __ j(not_equal, &bailout);
3366
+ __ AddSmiField(string_length,
3367
+ FieldOperand(string, SeqAsciiString::kLengthOffset));
3368
+ __ j(overflow, &bailout);
3369
+ __ incl(index);
3370
+ __ cmpl(index, array_length);
3371
+ __ j(less, &loop);
3372
+
3373
+ // Live registers:
3374
+ // string_length: Sum of string lengths.
3375
+ // elements: FixedArray of strings.
3376
+ // index: Array length.
3377
+ // array_length: Array length.
3378
+
3379
+ // If array_length is 1, return elements[0], a string.
3380
+ __ cmpl(array_length, Immediate(1));
3381
+ __ j(not_equal, &not_size_one_array);
3382
+ __ movq(rax, FieldOperand(elements, FixedArray::kHeaderSize));
3383
+ __ jmp(&return_result);
3384
+
3385
+ __ bind(&not_size_one_array);
3386
+
3387
+ // End of array_length live range.
3388
+ result_pos = array_length;
3389
+ array_length = no_reg;
3390
+
3391
+ // Live registers:
3392
+ // string_length: Sum of string lengths.
3393
+ // elements: FixedArray of strings.
3394
+ // index: Array length.
3395
+
3396
+ // Check that the separator is a sequential ASCII string.
3397
+ __ movq(string, separator_operand);
3398
+ __ JumpIfSmi(string, &bailout);
3399
+ __ movq(scratch, FieldOperand(string, HeapObject::kMapOffset));
3400
+ __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3401
+ __ andb(scratch, Immediate(
3402
+ kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3403
+ __ cmpb(scratch, Immediate(kStringTag | kAsciiStringTag | kSeqStringTag));
3404
+ __ j(not_equal, &bailout);
3405
+
3406
+ // Live registers:
3407
+ // string_length: Sum of string lengths.
3408
+ // elements: FixedArray of strings.
3409
+ // index: Array length.
3410
+ // string: Separator string.
3411
+
3412
+ // Add (separator length times (array_length - 1)) to string_length.
3413
+ __ SmiToInteger32(scratch,
3414
+ FieldOperand(string, SeqAsciiString::kLengthOffset));
3415
+ __ decl(index);
3416
+ __ imull(scratch, index);
3417
+ __ j(overflow, &bailout);
3418
+ __ addl(string_length, scratch);
3419
+ __ j(overflow, &bailout);
3420
+
3421
+ // Live registers and stack values:
3422
+ // string_length: Total length of result string.
3423
+ // elements: FixedArray of strings.
3424
+ __ AllocateAsciiString(result_pos, string_length, scratch,
3425
+ index, string, &bailout);
3426
+ __ movq(result_operand, result_pos);
3427
+ __ lea(result_pos, FieldOperand(result_pos, SeqAsciiString::kHeaderSize));
3428
+
3429
+ __ movq(string, separator_operand);
3430
+ __ SmiCompare(FieldOperand(string, SeqAsciiString::kLengthOffset),
3431
+ Smi::FromInt(1));
3432
+ __ j(equal, &one_char_separator);
3433
+ __ j(greater, &long_separator);
3434
+
3435
+
3436
+ // Empty separator case:
3437
+ __ Set(index, 0);
3438
+ __ movl(scratch, array_length_operand);
3439
+ __ jmp(&loop_1_condition);
3440
+ // Loop condition: while (index < array_length).
3441
+ __ bind(&loop_1);
3442
+ // Each iteration of the loop concatenates one string to the result.
3443
+ // Live values in registers:
3444
+ // index: which element of the elements array we are adding to the result.
3445
+ // result_pos: the position to which we are currently copying characters.
3446
+ // elements: the FixedArray of strings we are joining.
3447
+ // scratch: array length.
3448
+
3449
+ // Get string = array[index].
3450
+ __ movq(string, FieldOperand(elements, index,
3451
+ times_pointer_size,
3452
+ FixedArray::kHeaderSize));
3453
+ __ SmiToInteger32(string_length,
3454
+ FieldOperand(string, String::kLengthOffset));
3455
+ __ lea(string,
3456
+ FieldOperand(string, SeqAsciiString::kHeaderSize));
3457
+ __ CopyBytes(result_pos, string, string_length);
3458
+ __ incl(index);
3459
+ __ bind(&loop_1_condition);
3460
+ __ cmpl(index, scratch);
3461
+ __ j(less, &loop_1); // Loop while (index < array_length).
3462
+ __ jmp(&done);
3463
+
3464
+ // Generic bailout code used from several places.
3465
+ __ bind(&bailout);
3466
+ __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
3467
+ __ jmp(&return_result);
3468
+
3469
+
3470
+ // One-character separator case
3471
+ __ bind(&one_char_separator);
3472
+ // Get the separator ascii character value.
3473
+ // Register "string" holds the separator.
3474
+ __ movzxbl(scratch, FieldOperand(string, SeqAsciiString::kHeaderSize));
3475
+ __ Set(index, 0);
3476
+ // Jump into the loop after the code that copies the separator, so the first
3477
+ // element is not preceded by a separator
3478
+ __ jmp(&loop_2_entry);
3479
+ // Loop condition: while (index < length).
3480
+ __ bind(&loop_2);
3481
+ // Each iteration of the loop concatenates one string to the result.
3482
+ // Live values in registers:
3483
+ // elements: The FixedArray of strings we are joining.
3484
+ // index: which element of the elements array we are adding to the result.
3485
+ // result_pos: the position to which we are currently copying characters.
3486
+ // scratch: Separator character.
3487
+
3488
+ // Copy the separator character to the result.
3489
+ __ movb(Operand(result_pos, 0), scratch);
3490
+ __ incq(result_pos);
3491
+
3492
+ __ bind(&loop_2_entry);
3493
+ // Get string = array[index].
3494
+ __ movq(string, FieldOperand(elements, index,
3495
+ times_pointer_size,
3496
+ FixedArray::kHeaderSize));
3497
+ __ SmiToInteger32(string_length,
3498
+ FieldOperand(string, String::kLengthOffset));
3499
+ __ lea(string,
3500
+ FieldOperand(string, SeqAsciiString::kHeaderSize));
3501
+ __ CopyBytes(result_pos, string, string_length);
3502
+ __ incl(index);
3503
+ __ cmpl(index, array_length_operand);
3504
+ __ j(less, &loop_2); // End while (index < length).
3505
+ __ jmp(&done);
3506
+
3507
+
3508
+ // Long separator case (separator is more than one character).
3509
+ __ bind(&long_separator);
3510
+
3511
+ // Make elements point to end of elements array, and index
3512
+ // count from -array_length to zero, so we don't need to maintain
3513
+ // a loop limit.
3514
+ __ movl(index, array_length_operand);
3515
+ __ lea(elements, FieldOperand(elements, index, times_pointer_size,
3516
+ FixedArray::kHeaderSize));
3517
+ __ neg(index);
3518
+
3519
+ // Replace separator string with pointer to its first character, and
3520
+ // make scratch be its length.
3521
+ __ movq(string, separator_operand);
3522
+ __ SmiToInteger32(scratch,
3523
+ FieldOperand(string, String::kLengthOffset));
3524
+ __ lea(string,
3525
+ FieldOperand(string, SeqAsciiString::kHeaderSize));
3526
+ __ movq(separator_operand, string);
3527
+
3528
+ // Jump into the loop after the code that copies the separator, so the first
3529
+ // element is not preceded by a separator
3530
+ __ jmp(&loop_3_entry);
3531
+ // Loop condition: while (index < length).
3532
+ __ bind(&loop_3);
3533
+ // Each iteration of the loop concatenates one string to the result.
3534
+ // Live values in registers:
3535
+ // index: which element of the elements array we are adding to the result.
3536
+ // result_pos: the position to which we are currently copying characters.
3537
+ // scratch: Separator length.
3538
+ // separator_operand (rsp[0x10]): Address of first char of separator.
3539
+
3540
+ // Copy the separator to the result.
3541
+ __ movq(string, separator_operand);
3542
+ __ movl(string_length, scratch);
3543
+ __ CopyBytes(result_pos, string, string_length, 2);
3544
+
3545
+ __ bind(&loop_3_entry);
3546
+ // Get string = array[index].
3547
+ __ movq(string, Operand(elements, index, times_pointer_size, 0));
3548
+ __ SmiToInteger32(string_length,
3549
+ FieldOperand(string, String::kLengthOffset));
3550
+ __ lea(string,
3551
+ FieldOperand(string, SeqAsciiString::kHeaderSize));
3552
+ __ CopyBytes(result_pos, string, string_length);
3553
+ __ incq(index);
3554
+ __ j(not_equal, &loop_3); // Loop while (index < 0).
3555
+
3556
+ __ bind(&done);
3557
+ __ movq(rax, result_operand);
3558
+
3559
+ __ bind(&return_result);
3560
+ // Drop temp values from the stack, and restore context register.
3561
+ __ addq(rsp, Immediate(3 * kPointerSize));
3562
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3563
+ context()->Plug(rax);
3016
3564
  }
3017
3565
 
3018
3566
 
@@ -3043,7 +3591,8 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3043
3591
  // Call the JS runtime function using a call IC.
3044
3592
  __ Move(rcx, expr->name());
3045
3593
  InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
3046
- Handle<Code> ic = StubCache::ComputeCallInitialize(arg_count, in_loop);
3594
+ Handle<Code> ic =
3595
+ ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
3047
3596
  EmitCallIC(ic, RelocInfo::CODE_TARGET);
3048
3597
  // Restore context register.
3049
3598
  __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
@@ -3259,7 +3808,11 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3259
3808
 
3260
3809
  // We need a second deoptimization point after loading the value
3261
3810
  // in case evaluating the property load my have a side effect.
3262
- PrepareForBailout(expr->increment(), TOS_REG);
3811
+ if (assign_type == VARIABLE) {
3812
+ PrepareForBailout(expr->expression(), TOS_REG);
3813
+ } else {
3814
+ PrepareForBailout(expr->increment(), TOS_REG);
3815
+ }
3263
3816
 
3264
3817
  // Call ToNumber only if operand is not a smi.
3265
3818
  NearLabel no_conversion;
@@ -3355,7 +3908,9 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3355
3908
  case NAMED_PROPERTY: {
3356
3909
  __ Move(rcx, prop->key()->AsLiteral()->handle());
3357
3910
  __ pop(rdx);
3358
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
3911
+ Handle<Code> ic = is_strict_mode()
3912
+ ? isolate()->builtins()->StoreIC_Initialize_Strict()
3913
+ : isolate()->builtins()->StoreIC_Initialize();
3359
3914
  EmitCallIC(ic, RelocInfo::CODE_TARGET);
3360
3915
  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3361
3916
  if (expr->is_postfix()) {
@@ -3370,7 +3925,9 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3370
3925
  case KEYED_PROPERTY: {
3371
3926
  __ pop(rcx);
3372
3927
  __ pop(rdx);
3373
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
3928
+ Handle<Code> ic = is_strict_mode()
3929
+ ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3930
+ : isolate()->builtins()->KeyedStoreIC_Initialize();
3374
3931
  EmitCallIC(ic, RelocInfo::CODE_TARGET);
3375
3932
  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3376
3933
  if (expr->is_postfix()) {
@@ -3395,7 +3952,7 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
3395
3952
  Comment cmnt(masm_, "Global variable");
3396
3953
  __ Move(rcx, proxy->name());
3397
3954
  __ movq(rax, GlobalObjectOperand());
3398
- Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
3955
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
3399
3956
  // Use a regular load, not a contextual load, to avoid a reference
3400
3957
  // error.
3401
3958
  EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -3448,62 +4005,49 @@ bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
3448
4005
  }
3449
4006
  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
3450
4007
 
3451
- if (check->Equals(Heap::number_symbol())) {
3452
- Condition is_smi = masm_->CheckSmi(rax);
3453
- __ j(is_smi, if_true);
4008
+ if (check->Equals(isolate()->heap()->number_symbol())) {
4009
+ __ JumpIfSmi(rax, if_true);
3454
4010
  __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset));
3455
4011
  __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex);
3456
4012
  Split(equal, if_true, if_false, fall_through);
3457
- } else if (check->Equals(Heap::string_symbol())) {
3458
- Condition is_smi = masm_->CheckSmi(rax);
3459
- __ j(is_smi, if_false);
4013
+ } else if (check->Equals(isolate()->heap()->string_symbol())) {
4014
+ __ JumpIfSmi(rax, if_false);
3460
4015
  // Check for undetectable objects => false.
3461
- __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
4016
+ __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
4017
+ __ j(above_equal, if_false);
3462
4018
  __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
3463
4019
  Immediate(1 << Map::kIsUndetectable));
3464
- __ j(not_zero, if_false);
3465
- __ CmpInstanceType(rdx, FIRST_NONSTRING_TYPE);
3466
- Split(below, if_true, if_false, fall_through);
3467
- } else if (check->Equals(Heap::boolean_symbol())) {
4020
+ Split(zero, if_true, if_false, fall_through);
4021
+ } else if (check->Equals(isolate()->heap()->boolean_symbol())) {
3468
4022
  __ CompareRoot(rax, Heap::kTrueValueRootIndex);
3469
4023
  __ j(equal, if_true);
3470
4024
  __ CompareRoot(rax, Heap::kFalseValueRootIndex);
3471
4025
  Split(equal, if_true, if_false, fall_through);
3472
- } else if (check->Equals(Heap::undefined_symbol())) {
4026
+ } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
3473
4027
  __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
3474
4028
  __ j(equal, if_true);
3475
- Condition is_smi = masm_->CheckSmi(rax);
3476
- __ j(is_smi, if_false);
4029
+ __ JumpIfSmi(rax, if_false);
3477
4030
  // Check for undetectable objects => true.
3478
4031
  __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
3479
4032
  __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
3480
4033
  Immediate(1 << Map::kIsUndetectable));
3481
4034
  Split(not_zero, if_true, if_false, fall_through);
3482
- } else if (check->Equals(Heap::function_symbol())) {
3483
- Condition is_smi = masm_->CheckSmi(rax);
3484
- __ j(is_smi, if_false);
3485
- __ CmpObjectType(rax, JS_FUNCTION_TYPE, rdx);
3486
- __ j(equal, if_true);
3487
- // Regular expressions => 'function' (they are callable).
3488
- __ CmpInstanceType(rdx, JS_REGEXP_TYPE);
3489
- Split(equal, if_true, if_false, fall_through);
3490
- } else if (check->Equals(Heap::object_symbol())) {
3491
- Condition is_smi = masm_->CheckSmi(rax);
3492
- __ j(is_smi, if_false);
4035
+ } else if (check->Equals(isolate()->heap()->function_symbol())) {
4036
+ __ JumpIfSmi(rax, if_false);
4037
+ __ CmpObjectType(rax, FIRST_FUNCTION_CLASS_TYPE, rdx);
4038
+ Split(above_equal, if_true, if_false, fall_through);
4039
+ } else if (check->Equals(isolate()->heap()->object_symbol())) {
4040
+ __ JumpIfSmi(rax, if_false);
3493
4041
  __ CompareRoot(rax, Heap::kNullValueRootIndex);
3494
4042
  __ j(equal, if_true);
3495
- // Regular expressions => 'function', not 'object'.
3496
- __ CmpObjectType(rax, JS_REGEXP_TYPE, rdx);
3497
- __ j(equal, if_false);
4043
+ __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rdx);
4044
+ __ j(below, if_false);
4045
+ __ CmpInstanceType(rdx, FIRST_FUNCTION_CLASS_TYPE);
4046
+ __ j(above_equal, if_false);
3498
4047
  // Check for undetectable objects => false.
3499
4048
  __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
3500
4049
  Immediate(1 << Map::kIsUndetectable));
3501
- __ j(not_zero, if_false);
3502
- // Check for JS objects => true.
3503
- __ CmpInstanceType(rdx, FIRST_JS_OBJECT_TYPE);
3504
- __ j(below, if_false);
3505
- __ CmpInstanceType(rdx, LAST_JS_OBJECT_TYPE);
3506
- Split(below_equal, if_true, if_false, fall_through);
4050
+ Split(zero, if_true, if_false, fall_through);
3507
4051
  } else {
3508
4052
  if (if_false != fall_through) __ jmp(if_false);
3509
4053
  }
@@ -3672,18 +4216,19 @@ Register FullCodeGenerator::context_register() {
3672
4216
  void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
3673
4217
  ASSERT(mode == RelocInfo::CODE_TARGET ||
3674
4218
  mode == RelocInfo::CODE_TARGET_CONTEXT);
4219
+ Counters* counters = isolate()->counters();
3675
4220
  switch (ic->kind()) {
3676
4221
  case Code::LOAD_IC:
3677
- __ IncrementCounter(&Counters::named_load_full, 1);
4222
+ __ IncrementCounter(counters->named_load_full(), 1);
3678
4223
  break;
3679
4224
  case Code::KEYED_LOAD_IC:
3680
- __ IncrementCounter(&Counters::keyed_load_full, 1);
4225
+ __ IncrementCounter(counters->keyed_load_full(), 1);
3681
4226
  break;
3682
4227
  case Code::STORE_IC:
3683
- __ IncrementCounter(&Counters::named_store_full, 1);
4228
+ __ IncrementCounter(counters->named_store_full(), 1);
3684
4229
  break;
3685
4230
  case Code::KEYED_STORE_IC:
3686
- __ IncrementCounter(&Counters::keyed_store_full, 1);
4231
+ __ IncrementCounter(counters->keyed_store_full(), 1);
3687
4232
  default:
3688
4233
  break;
3689
4234
  }
@@ -3715,6 +4260,23 @@ void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
3715
4260
 
3716
4261
 
3717
4262
  void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
4263
+ Counters* counters = isolate()->counters();
4264
+ switch (ic->kind()) {
4265
+ case Code::LOAD_IC:
4266
+ __ IncrementCounter(counters->named_load_full(), 1);
4267
+ break;
4268
+ case Code::KEYED_LOAD_IC:
4269
+ __ IncrementCounter(counters->keyed_load_full(), 1);
4270
+ break;
4271
+ case Code::STORE_IC:
4272
+ __ IncrementCounter(counters->named_store_full(), 1);
4273
+ break;
4274
+ case Code::KEYED_STORE_IC:
4275
+ __ IncrementCounter(counters->keyed_store_full(), 1);
4276
+ default:
4277
+ break;
4278
+ }
4279
+
3718
4280
  __ call(ic, RelocInfo::CODE_TARGET);
3719
4281
  if (patch_site != NULL && patch_site->is_bound()) {
3720
4282
  patch_site->EmitPatchInfo();