mustang 0.0.1 → 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (560) hide show
  1. data/.rspec +1 -0
  2. data/Isolate +9 -0
  3. data/README.md +6 -12
  4. data/Rakefile +30 -4
  5. data/TODO.md +9 -0
  6. data/ext/v8/extconf.rb +56 -0
  7. data/ext/v8/v8.cpp +37 -0
  8. data/ext/v8/v8_array.cpp +161 -0
  9. data/ext/v8/v8_array.h +17 -0
  10. data/ext/v8/v8_base.cpp +147 -0
  11. data/ext/v8/v8_base.h +23 -0
  12. data/ext/v8/v8_cast.cpp +151 -0
  13. data/ext/v8/v8_cast.h +64 -0
  14. data/ext/v8/v8_context.cpp +174 -0
  15. data/ext/v8/v8_context.h +12 -0
  16. data/ext/v8/v8_date.cpp +61 -0
  17. data/ext/v8/v8_date.h +16 -0
  18. data/ext/v8/v8_errors.cpp +147 -0
  19. data/ext/v8/v8_errors.h +19 -0
  20. data/ext/v8/v8_external.cpp +66 -0
  21. data/ext/v8/v8_external.h +16 -0
  22. data/ext/v8/v8_function.cpp +182 -0
  23. data/ext/v8/v8_function.h +14 -0
  24. data/ext/v8/v8_integer.cpp +70 -0
  25. data/ext/v8/v8_integer.h +16 -0
  26. data/ext/v8/v8_macros.h +30 -0
  27. data/ext/v8/v8_main.cpp +53 -0
  28. data/ext/v8/v8_main.h +13 -0
  29. data/ext/v8/v8_number.cpp +62 -0
  30. data/ext/v8/v8_number.h +16 -0
  31. data/ext/v8/v8_object.cpp +172 -0
  32. data/ext/v8/v8_object.h +17 -0
  33. data/ext/v8/v8_ref.cpp +72 -0
  34. data/ext/v8/v8_ref.h +43 -0
  35. data/ext/v8/v8_regexp.cpp +148 -0
  36. data/ext/v8/v8_regexp.h +16 -0
  37. data/ext/v8/v8_string.cpp +78 -0
  38. data/ext/v8/v8_string.h +16 -0
  39. data/ext/v8/v8_value.cpp +370 -0
  40. data/ext/v8/v8_value.h +19 -0
  41. data/gemspec.yml +2 -1
  42. data/lib/core_ext/class.rb +14 -0
  43. data/lib/core_ext/object.rb +12 -0
  44. data/lib/core_ext/symbol.rb +23 -0
  45. data/lib/mustang.rb +44 -0
  46. data/lib/mustang/context.rb +69 -0
  47. data/lib/mustang/errors.rb +36 -0
  48. data/lib/support/delegated.rb +25 -0
  49. data/lib/v8/array.rb +21 -0
  50. data/lib/v8/context.rb +13 -0
  51. data/lib/v8/date.rb +20 -0
  52. data/lib/v8/error.rb +15 -0
  53. data/lib/v8/external.rb +16 -0
  54. data/lib/v8/function.rb +11 -0
  55. data/lib/v8/integer.rb +16 -0
  56. data/lib/v8/number.rb +16 -0
  57. data/lib/v8/object.rb +66 -0
  58. data/lib/v8/regexp.rb +23 -0
  59. data/lib/v8/string.rb +27 -0
  60. data/mustang.gemspec +3 -0
  61. data/spec/core_ext/class_spec.rb +19 -0
  62. data/spec/core_ext/object_spec.rb +19 -0
  63. data/spec/core_ext/symbol_spec.rb +27 -0
  64. data/spec/fixtures/test1.js +2 -0
  65. data/spec/fixtures/test2.js +2 -0
  66. data/spec/spec_helper.rb +20 -0
  67. data/spec/v8/array_spec.rb +88 -0
  68. data/spec/v8/cast_spec.rb +151 -0
  69. data/spec/v8/context_spec.rb +78 -0
  70. data/spec/v8/data_spec.rb +39 -0
  71. data/spec/v8/date_spec.rb +45 -0
  72. data/spec/v8/empty_spec.rb +27 -0
  73. data/spec/v8/errors_spec.rb +142 -0
  74. data/spec/v8/external_spec.rb +44 -0
  75. data/spec/v8/function_spec.rb +170 -0
  76. data/spec/v8/integer_spec.rb +41 -0
  77. data/spec/v8/main_spec.rb +18 -0
  78. data/spec/v8/null_spec.rb +27 -0
  79. data/spec/v8/number_spec.rb +40 -0
  80. data/spec/v8/object_spec.rb +79 -0
  81. data/spec/v8/primitive_spec.rb +9 -0
  82. data/spec/v8/regexp_spec.rb +65 -0
  83. data/spec/v8/string_spec.rb +48 -0
  84. data/spec/v8/undefined_spec.rb +27 -0
  85. data/spec/v8/value_spec.rb +215 -0
  86. data/vendor/v8/.gitignore +2 -0
  87. data/vendor/v8/AUTHORS +3 -1
  88. data/vendor/v8/ChangeLog +117 -0
  89. data/vendor/v8/SConstruct +334 -53
  90. data/vendor/v8/include/v8-debug.h +21 -11
  91. data/vendor/v8/include/v8-preparser.h +1 -1
  92. data/vendor/v8/include/v8-profiler.h +122 -43
  93. data/vendor/v8/include/v8-testing.h +5 -0
  94. data/vendor/v8/include/v8.h +171 -17
  95. data/vendor/v8/preparser/SConscript +38 -0
  96. data/vendor/v8/preparser/preparser-process.cc +77 -114
  97. data/vendor/v8/samples/shell.cc +232 -46
  98. data/vendor/v8/src/SConscript +29 -5
  99. data/vendor/v8/src/accessors.cc +70 -211
  100. data/vendor/v8/{test/cctest/test-mips.cc → src/allocation-inl.h} +15 -18
  101. data/vendor/v8/src/allocation.cc +0 -82
  102. data/vendor/v8/src/allocation.h +9 -42
  103. data/vendor/v8/src/api.cc +1645 -1156
  104. data/vendor/v8/src/api.h +76 -12
  105. data/vendor/v8/src/apiutils.h +0 -7
  106. data/vendor/v8/src/arguments.h +15 -4
  107. data/vendor/v8/src/arm/assembler-arm-inl.h +10 -9
  108. data/vendor/v8/src/arm/assembler-arm.cc +62 -23
  109. data/vendor/v8/src/arm/assembler-arm.h +76 -11
  110. data/vendor/v8/src/arm/builtins-arm.cc +39 -33
  111. data/vendor/v8/src/arm/code-stubs-arm.cc +1182 -402
  112. data/vendor/v8/src/arm/code-stubs-arm.h +20 -54
  113. data/vendor/v8/src/arm/codegen-arm.cc +159 -106
  114. data/vendor/v8/src/arm/codegen-arm.h +6 -6
  115. data/vendor/v8/src/arm/constants-arm.h +16 -1
  116. data/vendor/v8/src/arm/cpu-arm.cc +7 -5
  117. data/vendor/v8/src/arm/debug-arm.cc +6 -4
  118. data/vendor/v8/src/arm/deoptimizer-arm.cc +51 -14
  119. data/vendor/v8/src/arm/disasm-arm.cc +47 -15
  120. data/vendor/v8/src/arm/frames-arm.h +1 -1
  121. data/vendor/v8/src/arm/full-codegen-arm.cc +724 -408
  122. data/vendor/v8/src/arm/ic-arm.cc +90 -85
  123. data/vendor/v8/src/arm/lithium-arm.cc +140 -69
  124. data/vendor/v8/src/arm/lithium-arm.h +161 -46
  125. data/vendor/v8/src/arm/lithium-codegen-arm.cc +567 -297
  126. data/vendor/v8/src/arm/lithium-codegen-arm.h +21 -9
  127. data/vendor/v8/src/arm/lithium-gap-resolver-arm.cc +2 -0
  128. data/vendor/v8/src/arm/macro-assembler-arm.cc +457 -96
  129. data/vendor/v8/src/arm/macro-assembler-arm.h +115 -18
  130. data/vendor/v8/src/arm/regexp-macro-assembler-arm.cc +20 -13
  131. data/vendor/v8/src/arm/regexp-macro-assembler-arm.h +1 -0
  132. data/vendor/v8/src/arm/simulator-arm.cc +184 -101
  133. data/vendor/v8/src/arm/simulator-arm.h +26 -21
  134. data/vendor/v8/src/arm/stub-cache-arm.cc +450 -467
  135. data/vendor/v8/src/arm/virtual-frame-arm.cc +14 -12
  136. data/vendor/v8/src/arm/virtual-frame-arm.h +11 -8
  137. data/vendor/v8/src/array.js +35 -18
  138. data/vendor/v8/src/assembler.cc +186 -92
  139. data/vendor/v8/src/assembler.h +106 -69
  140. data/vendor/v8/src/ast-inl.h +5 -0
  141. data/vendor/v8/src/ast.cc +46 -35
  142. data/vendor/v8/src/ast.h +107 -50
  143. data/vendor/v8/src/atomicops.h +2 -0
  144. data/vendor/v8/src/atomicops_internals_mips_gcc.h +169 -0
  145. data/vendor/v8/src/bootstrapper.cc +649 -399
  146. data/vendor/v8/src/bootstrapper.h +94 -27
  147. data/vendor/v8/src/builtins.cc +359 -227
  148. data/vendor/v8/src/builtins.h +157 -123
  149. data/vendor/v8/src/checks.cc +2 -2
  150. data/vendor/v8/src/checks.h +4 -0
  151. data/vendor/v8/src/code-stubs.cc +27 -17
  152. data/vendor/v8/src/code-stubs.h +38 -17
  153. data/vendor/v8/src/codegen-inl.h +5 -1
  154. data/vendor/v8/src/codegen.cc +27 -17
  155. data/vendor/v8/src/codegen.h +9 -9
  156. data/vendor/v8/src/compilation-cache.cc +92 -206
  157. data/vendor/v8/src/compilation-cache.h +205 -30
  158. data/vendor/v8/src/compiler.cc +107 -120
  159. data/vendor/v8/src/compiler.h +17 -2
  160. data/vendor/v8/src/contexts.cc +22 -15
  161. data/vendor/v8/src/contexts.h +14 -8
  162. data/vendor/v8/src/conversions.cc +86 -30
  163. data/vendor/v8/src/counters.cc +19 -4
  164. data/vendor/v8/src/counters.h +28 -16
  165. data/vendor/v8/src/cpu-profiler-inl.h +4 -3
  166. data/vendor/v8/src/cpu-profiler.cc +123 -72
  167. data/vendor/v8/src/cpu-profiler.h +33 -19
  168. data/vendor/v8/src/cpu.h +2 -0
  169. data/vendor/v8/src/d8-debug.cc +3 -3
  170. data/vendor/v8/src/d8-debug.h +7 -6
  171. data/vendor/v8/src/d8-posix.cc +2 -0
  172. data/vendor/v8/src/d8.cc +22 -12
  173. data/vendor/v8/src/d8.gyp +3 -0
  174. data/vendor/v8/src/d8.js +618 -0
  175. data/vendor/v8/src/data-flow.h +3 -3
  176. data/vendor/v8/src/dateparser.h +4 -2
  177. data/vendor/v8/src/debug-agent.cc +10 -9
  178. data/vendor/v8/src/debug-agent.h +9 -11
  179. data/vendor/v8/src/debug-debugger.js +121 -0
  180. data/vendor/v8/src/debug.cc +331 -227
  181. data/vendor/v8/src/debug.h +248 -219
  182. data/vendor/v8/src/deoptimizer.cc +173 -62
  183. data/vendor/v8/src/deoptimizer.h +119 -19
  184. data/vendor/v8/src/disasm.h +3 -0
  185. data/vendor/v8/src/disassembler.cc +10 -9
  186. data/vendor/v8/src/execution.cc +185 -129
  187. data/vendor/v8/src/execution.h +47 -78
  188. data/vendor/v8/src/extensions/experimental/break-iterator.cc +250 -0
  189. data/vendor/v8/src/extensions/experimental/break-iterator.h +89 -0
  190. data/vendor/v8/src/extensions/experimental/experimental.gyp +2 -0
  191. data/vendor/v8/src/extensions/experimental/i18n-extension.cc +22 -2
  192. data/vendor/v8/src/extensions/externalize-string-extension.cc +2 -2
  193. data/vendor/v8/src/extensions/gc-extension.cc +1 -1
  194. data/vendor/v8/src/factory.cc +261 -154
  195. data/vendor/v8/src/factory.h +162 -158
  196. data/vendor/v8/src/flag-definitions.h +17 -11
  197. data/vendor/v8/src/frame-element.cc +0 -5
  198. data/vendor/v8/src/frame-element.h +9 -13
  199. data/vendor/v8/src/frames-inl.h +7 -0
  200. data/vendor/v8/src/frames.cc +56 -46
  201. data/vendor/v8/src/frames.h +36 -25
  202. data/vendor/v8/src/full-codegen.cc +15 -24
  203. data/vendor/v8/src/full-codegen.h +13 -41
  204. data/vendor/v8/src/func-name-inferrer.cc +7 -6
  205. data/vendor/v8/src/func-name-inferrer.h +1 -1
  206. data/vendor/v8/src/gdb-jit.cc +1 -0
  207. data/vendor/v8/src/global-handles.cc +118 -56
  208. data/vendor/v8/src/global-handles.h +98 -40
  209. data/vendor/v8/src/globals.h +2 -2
  210. data/vendor/v8/src/handles-inl.h +106 -9
  211. data/vendor/v8/src/handles.cc +220 -157
  212. data/vendor/v8/src/handles.h +38 -59
  213. data/vendor/v8/src/hashmap.h +3 -3
  214. data/vendor/v8/src/heap-inl.h +141 -25
  215. data/vendor/v8/src/heap-profiler.cc +117 -63
  216. data/vendor/v8/src/heap-profiler.h +38 -21
  217. data/vendor/v8/src/heap.cc +805 -564
  218. data/vendor/v8/src/heap.h +640 -594
  219. data/vendor/v8/src/hydrogen-instructions.cc +216 -73
  220. data/vendor/v8/src/hydrogen-instructions.h +259 -124
  221. data/vendor/v8/src/hydrogen.cc +996 -1171
  222. data/vendor/v8/src/hydrogen.h +163 -144
  223. data/vendor/v8/src/ia32/assembler-ia32-inl.h +12 -11
  224. data/vendor/v8/src/ia32/assembler-ia32.cc +85 -39
  225. data/vendor/v8/src/ia32/assembler-ia32.h +82 -16
  226. data/vendor/v8/src/ia32/builtins-ia32.cc +64 -58
  227. data/vendor/v8/src/ia32/code-stubs-ia32.cc +248 -324
  228. data/vendor/v8/src/ia32/code-stubs-ia32.h +3 -44
  229. data/vendor/v8/src/ia32/codegen-ia32.cc +217 -165
  230. data/vendor/v8/src/ia32/codegen-ia32.h +3 -0
  231. data/vendor/v8/src/ia32/cpu-ia32.cc +6 -5
  232. data/vendor/v8/src/ia32/debug-ia32.cc +8 -5
  233. data/vendor/v8/src/ia32/deoptimizer-ia32.cc +124 -14
  234. data/vendor/v8/src/ia32/disasm-ia32.cc +85 -62
  235. data/vendor/v8/src/ia32/frames-ia32.h +1 -1
  236. data/vendor/v8/src/ia32/full-codegen-ia32.cc +348 -435
  237. data/vendor/v8/src/ia32/ic-ia32.cc +91 -91
  238. data/vendor/v8/src/ia32/lithium-codegen-ia32.cc +500 -255
  239. data/vendor/v8/src/ia32/lithium-codegen-ia32.h +13 -4
  240. data/vendor/v8/src/ia32/lithium-gap-resolver-ia32.cc +6 -0
  241. data/vendor/v8/src/ia32/lithium-ia32.cc +122 -45
  242. data/vendor/v8/src/ia32/lithium-ia32.h +128 -41
  243. data/vendor/v8/src/ia32/macro-assembler-ia32.cc +109 -84
  244. data/vendor/v8/src/ia32/macro-assembler-ia32.h +18 -9
  245. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.cc +26 -15
  246. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.h +1 -0
  247. data/vendor/v8/src/ia32/register-allocator-ia32.cc +30 -30
  248. data/vendor/v8/src/ia32/simulator-ia32.h +4 -4
  249. data/vendor/v8/src/ia32/stub-cache-ia32.cc +383 -400
  250. data/vendor/v8/src/ia32/virtual-frame-ia32.cc +36 -13
  251. data/vendor/v8/src/ia32/virtual-frame-ia32.h +11 -5
  252. data/vendor/v8/src/ic-inl.h +12 -2
  253. data/vendor/v8/src/ic.cc +304 -221
  254. data/vendor/v8/src/ic.h +115 -58
  255. data/vendor/v8/src/interpreter-irregexp.cc +25 -21
  256. data/vendor/v8/src/interpreter-irregexp.h +2 -1
  257. data/vendor/v8/src/isolate.cc +883 -0
  258. data/vendor/v8/src/isolate.h +1304 -0
  259. data/vendor/v8/src/json.js +10 -10
  260. data/vendor/v8/src/jsregexp.cc +111 -80
  261. data/vendor/v8/src/jsregexp.h +6 -7
  262. data/vendor/v8/src/jump-target-heavy.cc +5 -8
  263. data/vendor/v8/src/jump-target-heavy.h +0 -6
  264. data/vendor/v8/src/jump-target-inl.h +1 -1
  265. data/vendor/v8/src/jump-target-light.cc +3 -3
  266. data/vendor/v8/src/lithium-allocator-inl.h +2 -0
  267. data/vendor/v8/src/lithium-allocator.cc +42 -30
  268. data/vendor/v8/src/lithium-allocator.h +8 -22
  269. data/vendor/v8/src/lithium.cc +1 -0
  270. data/vendor/v8/src/liveedit.cc +141 -99
  271. data/vendor/v8/src/liveedit.h +7 -2
  272. data/vendor/v8/src/liveobjectlist-inl.h +90 -0
  273. data/vendor/v8/src/liveobjectlist.cc +2537 -1
  274. data/vendor/v8/src/liveobjectlist.h +245 -35
  275. data/vendor/v8/src/log-utils.cc +122 -35
  276. data/vendor/v8/src/log-utils.h +33 -36
  277. data/vendor/v8/src/log.cc +299 -241
  278. data/vendor/v8/src/log.h +177 -110
  279. data/vendor/v8/src/mark-compact.cc +612 -470
  280. data/vendor/v8/src/mark-compact.h +153 -80
  281. data/vendor/v8/src/messages.cc +16 -14
  282. data/vendor/v8/src/messages.js +30 -7
  283. data/vendor/v8/src/mips/assembler-mips-inl.h +155 -35
  284. data/vendor/v8/src/mips/assembler-mips.cc +1093 -219
  285. data/vendor/v8/src/mips/assembler-mips.h +552 -153
  286. data/vendor/v8/src/mips/builtins-mips.cc +43 -100
  287. data/vendor/v8/src/mips/code-stubs-mips.cc +752 -0
  288. data/vendor/v8/src/mips/code-stubs-mips.h +511 -0
  289. data/vendor/v8/src/mips/codegen-mips-inl.h +8 -14
  290. data/vendor/v8/src/mips/codegen-mips.cc +672 -896
  291. data/vendor/v8/src/mips/codegen-mips.h +271 -69
  292. data/vendor/v8/src/mips/constants-mips.cc +44 -20
  293. data/vendor/v8/src/mips/constants-mips.h +238 -40
  294. data/vendor/v8/src/mips/cpu-mips.cc +20 -3
  295. data/vendor/v8/src/mips/debug-mips.cc +35 -7
  296. data/vendor/v8/src/mips/deoptimizer-mips.cc +91 -0
  297. data/vendor/v8/src/mips/disasm-mips.cc +329 -93
  298. data/vendor/v8/src/mips/frames-mips.cc +2 -50
  299. data/vendor/v8/src/mips/frames-mips.h +24 -9
  300. data/vendor/v8/src/mips/full-codegen-mips.cc +473 -23
  301. data/vendor/v8/src/mips/ic-mips.cc +81 -45
  302. data/vendor/v8/src/mips/jump-target-mips.cc +11 -106
  303. data/vendor/v8/src/mips/lithium-codegen-mips.h +65 -0
  304. data/vendor/v8/src/mips/lithium-mips.h +304 -0
  305. data/vendor/v8/src/mips/macro-assembler-mips.cc +2391 -390
  306. data/vendor/v8/src/mips/macro-assembler-mips.h +718 -121
  307. data/vendor/v8/src/mips/regexp-macro-assembler-mips.cc +478 -0
  308. data/vendor/v8/src/mips/regexp-macro-assembler-mips.h +250 -0
  309. data/vendor/v8/src/mips/register-allocator-mips-inl.h +0 -3
  310. data/vendor/v8/src/mips/register-allocator-mips.h +3 -2
  311. data/vendor/v8/src/mips/simulator-mips.cc +1009 -221
  312. data/vendor/v8/src/mips/simulator-mips.h +119 -36
  313. data/vendor/v8/src/mips/stub-cache-mips.cc +331 -148
  314. data/vendor/v8/src/mips/{fast-codegen-mips.cc → virtual-frame-mips-inl.h} +11 -30
  315. data/vendor/v8/src/mips/virtual-frame-mips.cc +137 -149
  316. data/vendor/v8/src/mips/virtual-frame-mips.h +294 -312
  317. data/vendor/v8/src/mirror-debugger.js +9 -8
  318. data/vendor/v8/src/mksnapshot.cc +2 -2
  319. data/vendor/v8/src/objects-debug.cc +16 -16
  320. data/vendor/v8/src/objects-inl.h +421 -195
  321. data/vendor/v8/src/objects-printer.cc +7 -7
  322. data/vendor/v8/src/objects-visiting.cc +1 -1
  323. data/vendor/v8/src/objects-visiting.h +33 -12
  324. data/vendor/v8/src/objects.cc +935 -658
  325. data/vendor/v8/src/objects.h +234 -139
  326. data/vendor/v8/src/parser.cc +484 -439
  327. data/vendor/v8/src/parser.h +35 -14
  328. data/vendor/v8/src/platform-cygwin.cc +173 -107
  329. data/vendor/v8/src/platform-freebsd.cc +224 -72
  330. data/vendor/v8/src/platform-linux.cc +234 -95
  331. data/vendor/v8/src/platform-macos.cc +215 -82
  332. data/vendor/v8/src/platform-nullos.cc +9 -3
  333. data/vendor/v8/src/platform-openbsd.cc +22 -7
  334. data/vendor/v8/src/platform-posix.cc +30 -5
  335. data/vendor/v8/src/platform-solaris.cc +120 -38
  336. data/vendor/v8/src/platform-tls-mac.h +62 -0
  337. data/vendor/v8/src/platform-tls-win32.h +62 -0
  338. data/vendor/v8/src/platform-tls.h +50 -0
  339. data/vendor/v8/src/platform-win32.cc +195 -97
  340. data/vendor/v8/src/platform.h +72 -15
  341. data/vendor/v8/src/preparse-data.cc +2 -0
  342. data/vendor/v8/src/preparser-api.cc +8 -2
  343. data/vendor/v8/src/preparser.cc +1 -1
  344. data/vendor/v8/src/prettyprinter.cc +43 -52
  345. data/vendor/v8/src/prettyprinter.h +1 -1
  346. data/vendor/v8/src/profile-generator-inl.h +0 -28
  347. data/vendor/v8/src/profile-generator.cc +942 -685
  348. data/vendor/v8/src/profile-generator.h +210 -176
  349. data/vendor/v8/src/property.cc +6 -0
  350. data/vendor/v8/src/property.h +14 -3
  351. data/vendor/v8/src/regexp-macro-assembler-irregexp.cc +1 -1
  352. data/vendor/v8/src/regexp-macro-assembler.cc +28 -19
  353. data/vendor/v8/src/regexp-macro-assembler.h +11 -6
  354. data/vendor/v8/src/regexp-stack.cc +18 -10
  355. data/vendor/v8/src/regexp-stack.h +45 -21
  356. data/vendor/v8/src/regexp.js +3 -3
  357. data/vendor/v8/src/register-allocator-inl.h +3 -3
  358. data/vendor/v8/src/register-allocator.cc +1 -7
  359. data/vendor/v8/src/register-allocator.h +5 -15
  360. data/vendor/v8/src/rewriter.cc +2 -1
  361. data/vendor/v8/src/runtime-profiler.cc +158 -128
  362. data/vendor/v8/src/runtime-profiler.h +131 -15
  363. data/vendor/v8/src/runtime.cc +2409 -1692
  364. data/vendor/v8/src/runtime.h +93 -17
  365. data/vendor/v8/src/safepoint-table.cc +3 -0
  366. data/vendor/v8/src/safepoint-table.h +9 -3
  367. data/vendor/v8/src/scanner-base.cc +21 -28
  368. data/vendor/v8/src/scanner-base.h +22 -11
  369. data/vendor/v8/src/scanner.cc +3 -5
  370. data/vendor/v8/src/scanner.h +4 -2
  371. data/vendor/v8/src/scopeinfo.cc +11 -16
  372. data/vendor/v8/src/scopeinfo.h +26 -15
  373. data/vendor/v8/src/scopes.cc +67 -37
  374. data/vendor/v8/src/scopes.h +26 -12
  375. data/vendor/v8/src/serialize.cc +193 -154
  376. data/vendor/v8/src/serialize.h +41 -36
  377. data/vendor/v8/src/small-pointer-list.h +163 -0
  378. data/vendor/v8/src/snapshot-common.cc +1 -1
  379. data/vendor/v8/src/snapshot.h +3 -1
  380. data/vendor/v8/src/spaces-inl.h +30 -25
  381. data/vendor/v8/src/spaces.cc +263 -370
  382. data/vendor/v8/src/spaces.h +178 -166
  383. data/vendor/v8/src/string-search.cc +4 -3
  384. data/vendor/v8/src/string-search.h +21 -20
  385. data/vendor/v8/src/string-stream.cc +32 -24
  386. data/vendor/v8/src/string.js +7 -7
  387. data/vendor/v8/src/stub-cache.cc +324 -248
  388. data/vendor/v8/src/stub-cache.h +181 -155
  389. data/vendor/v8/src/token.cc +3 -3
  390. data/vendor/v8/src/token.h +3 -3
  391. data/vendor/v8/src/top.cc +218 -390
  392. data/vendor/v8/src/type-info.cc +98 -32
  393. data/vendor/v8/src/type-info.h +10 -3
  394. data/vendor/v8/src/unicode.cc +1 -1
  395. data/vendor/v8/src/unicode.h +1 -1
  396. data/vendor/v8/src/utils.h +3 -0
  397. data/vendor/v8/src/v8-counters.cc +18 -11
  398. data/vendor/v8/src/v8-counters.h +34 -13
  399. data/vendor/v8/src/v8.cc +66 -121
  400. data/vendor/v8/src/v8.h +7 -4
  401. data/vendor/v8/src/v8globals.h +18 -12
  402. data/vendor/v8/src/{memory.h → v8memory.h} +0 -0
  403. data/vendor/v8/src/v8natives.js +59 -18
  404. data/vendor/v8/src/v8threads.cc +127 -114
  405. data/vendor/v8/src/v8threads.h +42 -35
  406. data/vendor/v8/src/v8utils.h +2 -39
  407. data/vendor/v8/src/variables.h +1 -1
  408. data/vendor/v8/src/version.cc +26 -5
  409. data/vendor/v8/src/version.h +4 -0
  410. data/vendor/v8/src/virtual-frame-heavy-inl.h +2 -4
  411. data/vendor/v8/src/virtual-frame-light-inl.h +5 -4
  412. data/vendor/v8/src/vm-state-inl.h +21 -17
  413. data/vendor/v8/src/vm-state.h +7 -5
  414. data/vendor/v8/src/win32-headers.h +1 -0
  415. data/vendor/v8/src/x64/assembler-x64-inl.h +12 -11
  416. data/vendor/v8/src/x64/assembler-x64.cc +80 -40
  417. data/vendor/v8/src/x64/assembler-x64.h +67 -17
  418. data/vendor/v8/src/x64/builtins-x64.cc +34 -33
  419. data/vendor/v8/src/x64/code-stubs-x64.cc +636 -377
  420. data/vendor/v8/src/x64/code-stubs-x64.h +14 -48
  421. data/vendor/v8/src/x64/codegen-x64-inl.h +1 -1
  422. data/vendor/v8/src/x64/codegen-x64.cc +158 -136
  423. data/vendor/v8/src/x64/codegen-x64.h +4 -1
  424. data/vendor/v8/src/x64/cpu-x64.cc +7 -5
  425. data/vendor/v8/src/x64/debug-x64.cc +8 -6
  426. data/vendor/v8/src/x64/deoptimizer-x64.cc +195 -20
  427. data/vendor/v8/src/x64/disasm-x64.cc +42 -23
  428. data/vendor/v8/src/x64/frames-x64.cc +1 -1
  429. data/vendor/v8/src/x64/frames-x64.h +2 -2
  430. data/vendor/v8/src/x64/full-codegen-x64.cc +780 -218
  431. data/vendor/v8/src/x64/ic-x64.cc +77 -79
  432. data/vendor/v8/src/x64/jump-target-x64.cc +1 -1
  433. data/vendor/v8/src/x64/lithium-codegen-x64.cc +698 -181
  434. data/vendor/v8/src/x64/lithium-codegen-x64.h +31 -6
  435. data/vendor/v8/src/x64/lithium-x64.cc +136 -54
  436. data/vendor/v8/src/x64/lithium-x64.h +142 -51
  437. data/vendor/v8/src/x64/macro-assembler-x64.cc +456 -187
  438. data/vendor/v8/src/x64/macro-assembler-x64.h +166 -34
  439. data/vendor/v8/src/x64/regexp-macro-assembler-x64.cc +44 -28
  440. data/vendor/v8/src/x64/regexp-macro-assembler-x64.h +8 -4
  441. data/vendor/v8/src/x64/register-allocator-x64-inl.h +3 -3
  442. data/vendor/v8/src/x64/register-allocator-x64.cc +12 -8
  443. data/vendor/v8/src/x64/simulator-x64.h +5 -5
  444. data/vendor/v8/src/x64/stub-cache-x64.cc +299 -344
  445. data/vendor/v8/src/x64/virtual-frame-x64.cc +37 -13
  446. data/vendor/v8/src/x64/virtual-frame-x64.h +13 -7
  447. data/vendor/v8/src/zone-inl.h +49 -3
  448. data/vendor/v8/src/zone.cc +42 -41
  449. data/vendor/v8/src/zone.h +37 -34
  450. data/vendor/v8/test/benchmarks/testcfg.py +100 -0
  451. data/vendor/v8/test/cctest/SConscript +5 -4
  452. data/vendor/v8/test/cctest/cctest.h +3 -2
  453. data/vendor/v8/test/cctest/cctest.status +6 -11
  454. data/vendor/v8/test/cctest/test-accessors.cc +3 -3
  455. data/vendor/v8/test/cctest/test-alloc.cc +39 -33
  456. data/vendor/v8/test/cctest/test-api.cc +1092 -205
  457. data/vendor/v8/test/cctest/test-assembler-arm.cc +39 -25
  458. data/vendor/v8/test/cctest/test-assembler-ia32.cc +36 -37
  459. data/vendor/v8/test/cctest/test-assembler-mips.cc +1098 -40
  460. data/vendor/v8/test/cctest/test-assembler-x64.cc +32 -25
  461. data/vendor/v8/test/cctest/test-ast.cc +1 -0
  462. data/vendor/v8/test/cctest/test-circular-queue.cc +8 -5
  463. data/vendor/v8/test/cctest/test-compiler.cc +24 -24
  464. data/vendor/v8/test/cctest/test-cpu-profiler.cc +140 -5
  465. data/vendor/v8/test/cctest/test-dataflow.cc +1 -0
  466. data/vendor/v8/test/cctest/test-debug.cc +136 -77
  467. data/vendor/v8/test/cctest/test-decls.cc +1 -1
  468. data/vendor/v8/test/cctest/test-deoptimization.cc +25 -24
  469. data/vendor/v8/test/cctest/test-disasm-arm.cc +9 -4
  470. data/vendor/v8/test/cctest/test-disasm-ia32.cc +10 -8
  471. data/vendor/v8/test/cctest/test-func-name-inference.cc +10 -4
  472. data/vendor/v8/test/cctest/test-heap-profiler.cc +226 -164
  473. data/vendor/v8/test/cctest/test-heap.cc +240 -217
  474. data/vendor/v8/test/cctest/test-liveedit.cc +1 -0
  475. data/vendor/v8/test/cctest/test-log-stack-tracer.cc +18 -20
  476. data/vendor/v8/test/cctest/test-log.cc +114 -108
  477. data/vendor/v8/test/cctest/test-macro-assembler-x64.cc +247 -177
  478. data/vendor/v8/test/cctest/test-mark-compact.cc +129 -90
  479. data/vendor/v8/test/cctest/test-parsing.cc +15 -14
  480. data/vendor/v8/test/cctest/test-platform-linux.cc +1 -0
  481. data/vendor/v8/test/cctest/test-platform-tls.cc +66 -0
  482. data/vendor/v8/test/cctest/test-platform-win32.cc +1 -0
  483. data/vendor/v8/test/cctest/test-profile-generator.cc +1 -1
  484. data/vendor/v8/test/cctest/test-regexp.cc +53 -41
  485. data/vendor/v8/test/cctest/test-reloc-info.cc +18 -11
  486. data/vendor/v8/test/cctest/test-serialize.cc +44 -43
  487. data/vendor/v8/test/cctest/test-sockets.cc +8 -3
  488. data/vendor/v8/test/cctest/test-spaces.cc +47 -29
  489. data/vendor/v8/test/cctest/test-strings.cc +20 -20
  490. data/vendor/v8/test/cctest/test-thread-termination.cc +8 -3
  491. data/vendor/v8/test/cctest/test-threads.cc +5 -3
  492. data/vendor/v8/test/cctest/test-utils.cc +5 -4
  493. data/vendor/v8/test/cctest/testcfg.py +7 -3
  494. data/vendor/v8/test/es5conform/es5conform.status +2 -77
  495. data/vendor/v8/test/es5conform/testcfg.py +1 -1
  496. data/vendor/v8/test/message/testcfg.py +1 -1
  497. data/vendor/v8/test/mjsunit/accessors-on-global-object.js +3 -3
  498. data/vendor/v8/test/mjsunit/array-concat.js +43 -1
  499. data/vendor/v8/test/mjsunit/array-join.js +25 -0
  500. data/vendor/v8/test/mjsunit/bitops-info.js +7 -1
  501. data/vendor/v8/test/mjsunit/compiler/array-length.js +2 -2
  502. data/vendor/v8/test/mjsunit/compiler/global-accessors.js +47 -0
  503. data/vendor/v8/test/mjsunit/compiler/pic.js +1 -1
  504. data/vendor/v8/test/mjsunit/compiler/regress-loadfield.js +65 -0
  505. data/vendor/v8/test/mjsunit/math-sqrt.js +5 -1
  506. data/vendor/v8/test/mjsunit/mjsunit.js +59 -8
  507. data/vendor/v8/test/mjsunit/mjsunit.status +0 -12
  508. data/vendor/v8/test/mjsunit/mul-exhaustive.js +129 -11
  509. data/vendor/v8/test/mjsunit/negate-zero.js +1 -1
  510. data/vendor/v8/test/mjsunit/object-freeze.js +5 -13
  511. data/vendor/v8/test/mjsunit/object-prevent-extensions.js +9 -50
  512. data/vendor/v8/test/mjsunit/object-seal.js +4 -13
  513. data/vendor/v8/test/mjsunit/override-eval-with-non-function.js +36 -0
  514. data/vendor/v8/test/mjsunit/regress/regress-1145.js +54 -0
  515. data/vendor/v8/test/mjsunit/regress/regress-1172-bis.js +37 -0
  516. data/vendor/v8/test/mjsunit/regress/regress-1181.js +54 -0
  517. data/vendor/v8/test/mjsunit/regress/regress-1207.js +35 -0
  518. data/vendor/v8/test/mjsunit/regress/regress-1209.js +34 -0
  519. data/vendor/v8/test/mjsunit/regress/regress-1210.js +48 -0
  520. data/vendor/v8/test/mjsunit/regress/regress-1213.js +43 -0
  521. data/vendor/v8/test/mjsunit/regress/regress-1218.js +29 -0
  522. data/vendor/v8/test/mjsunit/regress/regress-1229.js +79 -0
  523. data/vendor/v8/test/mjsunit/regress/regress-1233.js +47 -0
  524. data/vendor/v8/test/mjsunit/regress/regress-1236.js +34 -0
  525. data/vendor/v8/test/mjsunit/regress/regress-1237.js +36 -0
  526. data/vendor/v8/test/mjsunit/regress/regress-1240.js +39 -0
  527. data/vendor/v8/test/mjsunit/regress/regress-1257.js +58 -0
  528. data/vendor/v8/test/mjsunit/regress/regress-1278.js +69 -0
  529. data/vendor/v8/test/mjsunit/regress/regress-create-exception.js +1 -0
  530. data/vendor/v8/test/mjsunit/regress/regress-lazy-deopt-reloc.js +52 -0
  531. data/vendor/v8/test/mjsunit/sin-cos.js +15 -10
  532. data/vendor/v8/test/mjsunit/smi-negative-zero.js +2 -2
  533. data/vendor/v8/test/mjsunit/str-to-num.js +1 -1
  534. data/vendor/v8/test/mjsunit/strict-mode.js +435 -0
  535. data/vendor/v8/test/mjsunit/testcfg.py +23 -6
  536. data/vendor/v8/test/mozilla/mozilla.status +0 -2
  537. data/vendor/v8/test/mozilla/testcfg.py +1 -1
  538. data/vendor/v8/test/preparser/empty.js +28 -0
  539. data/vendor/v8/test/preparser/functions-only.js +38 -0
  540. data/vendor/v8/test/preparser/non-alphanum.js +34 -0
  541. data/vendor/v8/test/preparser/symbols-only.js +49 -0
  542. data/vendor/v8/test/preparser/testcfg.py +90 -0
  543. data/vendor/v8/test/sputnik/testcfg.py +1 -1
  544. data/vendor/v8/test/test262/README +16 -0
  545. data/vendor/v8/test/test262/harness-adapt.js +80 -0
  546. data/vendor/v8/test/test262/test262.status +1506 -0
  547. data/vendor/v8/test/test262/testcfg.py +123 -0
  548. data/vendor/v8/tools/freebsd-tick-processor +10 -0
  549. data/vendor/v8/tools/gyp/v8.gyp +8 -33
  550. data/vendor/v8/tools/linux-tick-processor +5 -3
  551. data/vendor/v8/tools/test.py +37 -14
  552. data/vendor/v8/tools/tickprocessor.js +22 -8
  553. data/vendor/v8/tools/visual_studio/v8_base.vcproj +13 -1
  554. data/vendor/v8/tools/visual_studio/v8_base_arm.vcproj +5 -1
  555. data/vendor/v8/tools/visual_studio/v8_base_x64.vcproj +5 -1
  556. data/vendor/v8/tools/visual_studio/x64.vsprops +1 -0
  557. metadata +1495 -1341
  558. data/ext/extconf.rb +0 -22
  559. data/ext/mustang.cpp +0 -58
  560. data/vendor/v8/src/top.h +0 -608
@@ -268,10 +268,10 @@ class CodeGenerator: public AstVisitor {
268
268
  static int GetInlinedKeyedLoadInstructionsAfterPatch() {
269
269
  return FLAG_debug_code ? 32 : 13;
270
270
  }
271
- static const int kInlinedKeyedStoreInstructionsAfterPatch = 5;
271
+ static const int kInlinedKeyedStoreInstructionsAfterPatch = 8;
272
272
  static int GetInlinedNamedStoreInstructionsAfterPatch() {
273
- ASSERT(inlined_write_barrier_size_ != -1);
274
- return inlined_write_barrier_size_ + 4;
273
+ ASSERT(Isolate::Current()->inlined_write_barrier_size() != -1);
274
+ return Isolate::Current()->inlined_write_barrier_size() + 4;
275
275
  }
276
276
 
277
277
  private:
@@ -287,6 +287,7 @@ class CodeGenerator: public AstVisitor {
287
287
  // Accessors
288
288
  inline bool is_eval();
289
289
  inline Scope* scope();
290
+ inline bool is_strict_mode();
290
291
  inline StrictModeFlag strict_mode_flag();
291
292
 
292
293
  // Generating deferred code.
@@ -575,15 +576,14 @@ class CodeGenerator: public AstVisitor {
575
576
  // to some unlinking code).
576
577
  bool function_return_is_shadowed_;
577
578
 
578
- // Size of inlined write barriers generated by EmitNamedStore.
579
- static int inlined_write_barrier_size_;
580
-
581
579
  friend class VirtualFrame;
580
+ friend class Isolate;
582
581
  friend class JumpTarget;
583
582
  friend class Reference;
584
583
  friend class FastCodeGenerator;
585
584
  friend class FullCodeGenerator;
586
585
  friend class FullCodeGenSyntaxChecker;
586
+ friend class InlineRuntimeFunctionsTable;
587
587
  friend class LCodeGen;
588
588
 
589
589
  DISALLOW_COPY_AND_ASSIGN(CodeGenerator);
@@ -89,6 +89,11 @@
89
89
  namespace v8 {
90
90
  namespace internal {
91
91
 
92
+ // Constant pool marker.
93
+ static const int kConstantPoolMarkerMask = 0xffe00000;
94
+ static const int kConstantPoolMarker = 0x0c000000;
95
+ static const int kConstantPoolLengthMask = 0x001ffff;
96
+
92
97
  // Number of registers in normal ARM mode.
93
98
  static const int kNumRegisters = 16;
94
99
 
@@ -385,9 +390,14 @@ enum VFPConversionMode {
385
390
  kDefaultRoundToZero = 1
386
391
  };
387
392
 
393
+ // This mask does not include the "inexact" or "input denormal" cumulative
394
+ // exceptions flags, because we usually don't want to check for it.
388
395
  static const uint32_t kVFPExceptionMask = 0xf;
396
+ static const uint32_t kVFPInvalidOpExceptionBit = 1 << 0;
397
+ static const uint32_t kVFPOverflowExceptionBit = 1 << 2;
398
+ static const uint32_t kVFPUnderflowExceptionBit = 1 << 3;
399
+ static const uint32_t kVFPInexactExceptionBit = 1 << 4;
389
400
  static const uint32_t kVFPFlushToZeroMask = 1 << 24;
390
- static const uint32_t kVFPInvalidExceptionBit = 1;
391
401
 
392
402
  static const uint32_t kVFPNConditionFlagBit = 1 << 31;
393
403
  static const uint32_t kVFPZConditionFlagBit = 1 << 30;
@@ -411,6 +421,11 @@ enum VFPRoundingMode {
411
421
 
412
422
  static const uint32_t kVFPRoundingModeMask = 3 << 22;
413
423
 
424
+ enum CheckForInexactConversion {
425
+ kCheckForInexactConversion,
426
+ kDontCheckForInexactConversion
427
+ };
428
+
414
429
  // -----------------------------------------------------------------------------
415
430
  // Hints.
416
431
 
@@ -42,10 +42,12 @@ namespace v8 {
42
42
  namespace internal {
43
43
 
44
44
  void CPU::Setup() {
45
- CpuFeatures::Probe(true);
46
- if (!CpuFeatures::IsSupported(VFP3) || Serializer::enabled()) {
47
- V8::DisableCrankshaft();
48
- }
45
+ CpuFeatures::Probe();
46
+ }
47
+
48
+
49
+ bool CPU::SupportsCrankshaft() {
50
+ return CpuFeatures::IsSupported(VFP3);
49
51
  }
50
52
 
51
53
 
@@ -61,7 +63,7 @@ void CPU::FlushICache(void* start, size_t size) {
61
63
  // that the Icache was flushed.
62
64
  // None of this code ends up in the snapshot so there are no issues
63
65
  // around whether or not to generate the code when building snapshots.
64
- Simulator::FlushICache(start, size);
66
+ Simulator::FlushICache(Isolate::Current()->simulator_i_cache(), start, size);
65
67
  #else
66
68
  // Ideally, we would call
67
69
  // syscall(__ARM_NR_cacheflush, start,
@@ -65,7 +65,7 @@ void BreakLocationIterator::SetDebugBreakAtReturn() {
65
65
  patcher.masm()->mov(v8::internal::lr, v8::internal::pc);
66
66
  patcher.masm()->ldr(v8::internal::pc, MemOperand(v8::internal::pc, -4));
67
67
  #endif
68
- patcher.Emit(Debug::debug_break_return()->entry());
68
+ patcher.Emit(Isolate::Current()->debug()->debug_break_return()->entry());
69
69
  patcher.masm()->bkpt(0);
70
70
  }
71
71
 
@@ -115,7 +115,7 @@ void BreakLocationIterator::SetDebugBreakAtSlot() {
115
115
  patcher.masm()->mov(v8::internal::lr, v8::internal::pc);
116
116
  patcher.masm()->ldr(v8::internal::pc, MemOperand(v8::internal::pc, -4));
117
117
  #endif
118
- patcher.Emit(Debug::debug_break_return()->entry());
118
+ patcher.Emit(Isolate::Current()->debug()->debug_break_slot()->entry());
119
119
  }
120
120
 
121
121
 
@@ -159,7 +159,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
159
159
  __ RecordComment("// Calling from debug break to runtime - come in - over");
160
160
  #endif
161
161
  __ mov(r0, Operand(0, RelocInfo::NONE)); // no arguments
162
- __ mov(r1, Operand(ExternalReference::debug_break()));
162
+ __ mov(r1, Operand(ExternalReference::debug_break(masm->isolate())));
163
163
 
164
164
  CEntryStub ceb(1);
165
165
  __ CallStub(&ceb);
@@ -185,7 +185,9 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
185
185
  // Now that the break point has been handled, resume normal execution by
186
186
  // jumping to the target address intended by the caller and that was
187
187
  // overwritten by the address of DebugBreakXXX.
188
- __ mov(ip, Operand(ExternalReference(Debug_Address::AfterBreakTarget())));
188
+ ExternalReference after_break_target =
189
+ ExternalReference(Debug_Address::AfterBreakTarget(), masm->isolate());
190
+ __ mov(ip, Operand(after_break_target));
189
191
  __ ldr(ip, MemOperand(ip));
190
192
  __ Jump(ip);
191
193
  }
@@ -44,8 +44,14 @@ int Deoptimizer::patch_size() {
44
44
  }
45
45
 
46
46
 
47
+ void Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code) {
48
+ // Nothing to do. No new relocation information is written for lazy
49
+ // deoptimization on ARM.
50
+ }
51
+
47
52
 
48
53
  void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
54
+ HandleScope scope;
49
55
  AssertNoAllocation no_allocation;
50
56
 
51
57
  if (!function->IsOptimized()) return;
@@ -69,8 +75,6 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
69
75
  int deoptimization_index = safepoint_entry.deoptimization_index();
70
76
  int gap_code_size = safepoint_entry.gap_code_size();
71
77
  // Check that we did not shoot past next safepoint.
72
- // TODO(srdjan): How do we guarantee that safepoint code does not
73
- // overlap other safepoint patching code?
74
78
  CHECK(pc_offset >= last_pc_offset);
75
79
  #ifdef DEBUG
76
80
  // Destroy the code which is not supposed to be run again.
@@ -107,8 +111,9 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
107
111
 
108
112
  // Add the deoptimizing code to the list.
109
113
  DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code);
110
- node->set_next(deoptimizing_code_list_);
111
- deoptimizing_code_list_ = node;
114
+ DeoptimizerData* data = code->GetIsolate()->deoptimizer_data();
115
+ node->set_next(data->deoptimizing_code_list_);
116
+ data->deoptimizing_code_list_ = node;
112
117
 
113
118
  // Set the code for the function to non-optimized version.
114
119
  function->ReplaceCode(function->shared()->code());
@@ -117,6 +122,11 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
117
122
  PrintF("[forced deoptimization: ");
118
123
  function->PrintName();
119
124
  PrintF(" / %x]\n", reinterpret_cast<uint32_t>(function));
125
+ #ifdef DEBUG
126
+ if (FLAG_print_code) {
127
+ code->PrintLn();
128
+ }
129
+ #endif
120
130
  }
121
131
  }
122
132
 
@@ -278,14 +288,33 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
278
288
 
279
289
  // There are no translation commands for the caller's pc and fp, the
280
290
  // context, and the function. Set them up explicitly.
281
- for (int i = 0; ok && i < 4; i++) {
291
+ for (int i = StandardFrameConstants::kCallerPCOffset;
292
+ ok && i >= StandardFrameConstants::kMarkerOffset;
293
+ i -= kPointerSize) {
282
294
  uint32_t input_value = input_->GetFrameSlot(input_offset);
283
295
  if (FLAG_trace_osr) {
284
- PrintF(" [sp + %d] <- 0x%08x ; [sp + %d] (fixed part)\n",
296
+ const char* name = "UNKNOWN";
297
+ switch (i) {
298
+ case StandardFrameConstants::kCallerPCOffset:
299
+ name = "caller's pc";
300
+ break;
301
+ case StandardFrameConstants::kCallerFPOffset:
302
+ name = "fp";
303
+ break;
304
+ case StandardFrameConstants::kContextOffset:
305
+ name = "context";
306
+ break;
307
+ case StandardFrameConstants::kMarkerOffset:
308
+ name = "function";
309
+ break;
310
+ }
311
+ PrintF(" [sp + %d] <- 0x%08x ; [sp + %d] (fixed part - %s)\n",
285
312
  output_offset,
286
313
  input_value,
287
- input_offset);
314
+ input_offset,
315
+ name);
288
316
  }
317
+
289
318
  output_[0]->SetFrameSlot(output_offset, input_->GetFrameSlot(input_offset));
290
319
  input_offset -= kPointerSize;
291
320
  output_offset -= kPointerSize;
@@ -311,7 +340,7 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
311
340
  optimized_code_->entry() + pc_offset);
312
341
  output_[0]->SetPc(pc);
313
342
  }
314
- Code* continuation = Builtins::builtin(Builtins::NotifyOSR);
343
+ Code* continuation = isolate_->builtins()->builtin(Builtins::kNotifyOSR);
315
344
  output_[0]->SetContinuation(
316
345
  reinterpret_cast<uint32_t>(continuation->entry()));
317
346
 
@@ -485,11 +514,13 @@ void Deoptimizer::DoComputeFrame(TranslationIterator* iterator,
485
514
  FullCodeGenerator::StateField::decode(pc_and_state);
486
515
  output_frame->SetState(Smi::FromInt(state));
487
516
 
517
+
488
518
  // Set the continuation for the topmost frame.
489
519
  if (is_topmost) {
520
+ Builtins* builtins = isolate_->builtins();
490
521
  Code* continuation = (bailout_type_ == EAGER)
491
- ? Builtins::builtin(Builtins::NotifyDeoptimized)
492
- : Builtins::builtin(Builtins::NotifyLazyDeoptimized);
522
+ ? builtins->builtin(Builtins::kNotifyDeoptimized)
523
+ : builtins->builtin(Builtins::kNotifyLazyDeoptimized);
493
524
  output_frame->SetContinuation(
494
525
  reinterpret_cast<uint32_t>(continuation->entry()));
495
526
  }
@@ -505,6 +536,9 @@ void Deoptimizer::DoComputeFrame(TranslationIterator* iterator,
505
536
  // easily ported.
506
537
  void Deoptimizer::EntryGenerator::Generate() {
507
538
  GeneratePrologue();
539
+
540
+ Isolate* isolate = masm()->isolate();
541
+
508
542
  CpuFeatures::Scope scope(VFP3);
509
543
  // Save all general purpose registers before messing with them.
510
544
  const int kNumberOfRegisters = Register::kNumRegisters;
@@ -552,14 +586,16 @@ void Deoptimizer::EntryGenerator::Generate() {
552
586
 
553
587
  // Allocate a new deoptimizer object.
554
588
  // Pass four arguments in r0 to r3 and fifth argument on stack.
555
- __ PrepareCallCFunction(5, r5);
589
+ __ PrepareCallCFunction(6, r5);
556
590
  __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
557
591
  __ mov(r1, Operand(type())); // bailout type,
558
592
  // r2: bailout id already loaded.
559
593
  // r3: code address or 0 already loaded.
560
594
  __ str(r4, MemOperand(sp, 0 * kPointerSize)); // Fp-to-sp delta.
595
+ __ mov(r5, Operand(ExternalReference::isolate_address()));
596
+ __ str(r5, MemOperand(sp, 1 * kPointerSize)); // Isolate.
561
597
  // Call Deoptimizer::New().
562
- __ CallCFunction(ExternalReference::new_deoptimizer_function(), 5);
598
+ __ CallCFunction(ExternalReference::new_deoptimizer_function(isolate), 6);
563
599
 
564
600
  // Preserve "deoptimizer" object in register r0 and get the input
565
601
  // frame descriptor pointer to r1 (deoptimizer->input_);
@@ -613,7 +649,8 @@ void Deoptimizer::EntryGenerator::Generate() {
613
649
  // r0: deoptimizer object; r1: scratch.
614
650
  __ PrepareCallCFunction(1, r1);
615
651
  // Call Deoptimizer::ComputeOutputFrames().
616
- __ CallCFunction(ExternalReference::compute_output_frames_function(), 1);
652
+ __ CallCFunction(
653
+ ExternalReference::compute_output_frames_function(isolate), 1);
617
654
  __ pop(r0); // Restore deoptimizer object (class Deoptimizer).
618
655
 
619
656
  // Replace the current (input) frame with the output frames.
@@ -663,7 +700,7 @@ void Deoptimizer::EntryGenerator::Generate() {
663
700
  __ pop(ip); // remove lr
664
701
 
665
702
  // Set up the roots register.
666
- ExternalReference roots_address = ExternalReference::roots_address();
703
+ ExternalReference roots_address = ExternalReference::roots_address(isolate);
667
704
  __ mov(r10, Operand(roots_address));
668
705
 
669
706
  __ pop(ip); // remove pc
@@ -89,6 +89,9 @@ class Decoder {
89
89
  // Returns the length of the disassembled machine instruction in bytes.
90
90
  int InstructionDecode(byte* instruction);
91
91
 
92
+ static bool IsConstantPoolAt(byte* instr_ptr);
93
+ static int ConstantPoolSizeAt(byte* instr_ptr);
94
+
92
95
  private:
93
96
  // Bottleneck functions to print into the out_buffer.
94
97
  void PrintChar(const char ch);
@@ -899,6 +902,7 @@ void Decoder::DecodeType2(Instruction* instr) {
899
902
  case da_x: {
900
903
  if (instr->HasW()) {
901
904
  Unknown(instr); // not used in V8
905
+ return;
902
906
  }
903
907
  Format(instr, "'memop'cond'b 'rd, ['rn], #-'off12");
904
908
  break;
@@ -906,6 +910,7 @@ void Decoder::DecodeType2(Instruction* instr) {
906
910
  case ia_x: {
907
911
  if (instr->HasW()) {
908
912
  Unknown(instr); // not used in V8
913
+ return;
909
914
  }
910
915
  Format(instr, "'memop'cond'b 'rd, ['rn], #+'off12");
911
916
  break;
@@ -992,11 +997,15 @@ void Decoder::DecodeType3(Instruction* instr) {
992
997
 
993
998
 
994
999
  void Decoder::DecodeType4(Instruction* instr) {
995
- ASSERT(instr->Bit(22) == 0); // Privileged mode currently not supported.
996
- if (instr->HasL()) {
997
- Format(instr, "ldm'cond'pu 'rn'w, 'rlist");
1000
+ if (instr->Bit(22) != 0) {
1001
+ // Privileged mode currently not supported.
1002
+ Unknown(instr);
998
1003
  } else {
999
- Format(instr, "stm'cond'pu 'rn'w, 'rlist");
1004
+ if (instr->HasL()) {
1005
+ Format(instr, "ldm'cond'pu 'rn'w, 'rlist");
1006
+ } else {
1007
+ Format(instr, "stm'cond'pu 'rn'w, 'rlist");
1008
+ }
1000
1009
  }
1001
1010
  }
1002
1011
 
@@ -1042,6 +1051,8 @@ int Decoder::DecodeType7(Instruction* instr) {
1042
1051
  // vmov: Rt = Sn
1043
1052
  // vcvt: Dd = Sm
1044
1053
  // vcvt: Sd = Dm
1054
+ // Dd = vabs(Dm)
1055
+ // Dd = vneg(Dm)
1045
1056
  // Dd = vadd(Dn, Dm)
1046
1057
  // Dd = vsub(Dn, Dm)
1047
1058
  // Dd = vmul(Dn, Dm)
@@ -1067,6 +1078,9 @@ void Decoder::DecodeTypeVFP(Instruction* instr) {
1067
1078
  } else if ((instr->Opc2Value() == 0x0) && (instr->Opc3Value() == 0x3)) {
1068
1079
  // vabs
1069
1080
  Format(instr, "vabs'cond 'Dd, 'Dm");
1081
+ } else if ((instr->Opc2Value() == 0x1) && (instr->Opc3Value() == 0x1)) {
1082
+ // vneg
1083
+ Format(instr, "vneg'cond 'Dd, 'Dm");
1070
1084
  } else if ((instr->Opc2Value() == 0x7) && (instr->Opc3Value() == 0x3)) {
1071
1085
  DecodeVCVTBetweenDoubleAndSingle(instr);
1072
1086
  } else if ((instr->Opc2Value() == 0x8) && (instr->Opc3Value() & 0x1)) {
@@ -1294,7 +1308,23 @@ void Decoder::DecodeType6CoprocessorIns(Instruction* instr) {
1294
1308
  break;
1295
1309
  }
1296
1310
  } else {
1297
- UNIMPLEMENTED(); // Not used by V8.
1311
+ Unknown(instr); // Not used by V8.
1312
+ }
1313
+ }
1314
+
1315
+
1316
+ bool Decoder::IsConstantPoolAt(byte* instr_ptr) {
1317
+ int instruction_bits = *(reinterpret_cast<int*>(instr_ptr));
1318
+ return (instruction_bits & kConstantPoolMarkerMask) == kConstantPoolMarker;
1319
+ }
1320
+
1321
+
1322
+ int Decoder::ConstantPoolSizeAt(byte* instr_ptr) {
1323
+ if (IsConstantPoolAt(instr_ptr)) {
1324
+ int instruction_bits = *(reinterpret_cast<int*>(instr_ptr));
1325
+ return instruction_bits & kConstantPoolLengthMask;
1326
+ } else {
1327
+ return -1;
1298
1328
  }
1299
1329
  }
1300
1330
 
@@ -1307,7 +1337,15 @@ int Decoder::InstructionDecode(byte* instr_ptr) {
1307
1337
  "%08x ",
1308
1338
  instr->InstructionBits());
1309
1339
  if (instr->ConditionField() == kSpecialCondition) {
1310
- UNIMPLEMENTED();
1340
+ Unknown(instr);
1341
+ return Instruction::kInstrSize;
1342
+ }
1343
+ int instruction_bits = *(reinterpret_cast<int*>(instr_ptr));
1344
+ if ((instruction_bits & kConstantPoolMarkerMask) == kConstantPoolMarker) {
1345
+ out_buffer_pos_ += OS::SNPrintF(out_buffer_ + out_buffer_pos_,
1346
+ "constant pool begin (length %d)",
1347
+ instruction_bits &
1348
+ kConstantPoolLengthMask);
1311
1349
  return Instruction::kInstrSize;
1312
1350
  }
1313
1351
  switch (instr->TypeValue()) {
@@ -1359,9 +1397,8 @@ namespace disasm {
1359
1397
 
1360
1398
 
1361
1399
  const char* NameConverter::NameOfAddress(byte* addr) const {
1362
- static v8::internal::EmbeddedVector<char, 32> tmp_buffer;
1363
- v8::internal::OS::SNPrintF(tmp_buffer, "%p", addr);
1364
- return tmp_buffer.start();
1400
+ v8::internal::OS::SNPrintF(tmp_buffer_, "%p", addr);
1401
+ return tmp_buffer_.start();
1365
1402
  }
1366
1403
 
1367
1404
 
@@ -1411,12 +1448,7 @@ int Disassembler::InstructionDecode(v8::internal::Vector<char> buffer,
1411
1448
 
1412
1449
 
1413
1450
  int Disassembler::ConstantPoolSizeAt(byte* instruction) {
1414
- int instruction_bits = *(reinterpret_cast<int*>(instruction));
1415
- if ((instruction_bits & 0xfff00000) == 0x03000000) {
1416
- return instruction_bits & 0x0000ffff;
1417
- } else {
1418
- return -1;
1419
- }
1451
+ return v8::internal::Decoder::ConstantPoolSizeAt(instruction);
1420
1452
  }
1421
1453
 
1422
1454
 
@@ -136,7 +136,7 @@ class JavaScriptFrameConstants : public AllStatic {
136
136
  public:
137
137
  // FP-relative.
138
138
  static const int kLocal0Offset = StandardFrameConstants::kExpressionsOffset;
139
- static const int kSavedRegistersOffset = +2 * kPointerSize;
139
+ static const int kLastParameterOffset = +2 * kPointerSize;
140
140
  static const int kFunctionOffset = StandardFrameConstants::kMarkerOffset;
141
141
 
142
142
  // Caller SP-relative.
@@ -210,13 +210,18 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
210
210
  // function, receiver address, parameter count.
211
211
  // The stub will rewrite receiever and parameter count if the previous
212
212
  // stack frame was an arguments adapter frame.
213
- ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
213
+ ArgumentsAccessStub stub(
214
+ is_strict_mode() ? ArgumentsAccessStub::NEW_STRICT
215
+ : ArgumentsAccessStub::NEW_NON_STRICT);
214
216
  __ CallStub(&stub);
215
- // Duplicate the value; move-to-slot operation might clobber registers.
216
- __ mov(r3, r0);
217
+
218
+ Variable* arguments_shadow = scope()->arguments_shadow();
219
+ if (arguments_shadow != NULL) {
220
+ // Duplicate the value; move-to-slot operation might clobber registers.
221
+ __ mov(r3, r0);
222
+ Move(arguments_shadow->AsSlot(), r3, r1, r2);
223
+ }
217
224
  Move(arguments->AsSlot(), r0, r1, r2);
218
- Slot* dot_arguments_slot = scope()->arguments_shadow()->AsSlot();
219
- Move(dot_arguments_slot, r3, r1, r2);
220
225
  }
221
226
 
222
227
  if (FLAG_trace) {
@@ -339,23 +344,6 @@ void FullCodeGenerator::EmitReturnSequence() {
339
344
  }
340
345
 
341
346
 
342
- FullCodeGenerator::ConstantOperand FullCodeGenerator::GetConstantOperand(
343
- Token::Value op, Expression* left, Expression* right) {
344
- ASSERT(ShouldInlineSmiCase(op));
345
- if (op == Token::DIV || op == Token::MOD || op == Token::MUL) {
346
- // We never generate inlined constant smi operations for these.
347
- return kNoConstants;
348
- } else if (right->IsSmiLiteral()) {
349
- return kRightConstant;
350
- } else if (left->IsSmiLiteral() && !Token::IsShiftOp(op)) {
351
- // Don't inline shifts with constant left hand side.
352
- return kLeftConstant;
353
- } else {
354
- return kNoConstants;
355
- }
356
- }
357
-
358
-
359
347
  void FullCodeGenerator::EffectContext::Plug(Slot* slot) const {
360
348
  }
361
349
 
@@ -574,13 +562,38 @@ void FullCodeGenerator::TestContext::Plug(bool flag) const {
574
562
  void FullCodeGenerator::DoTest(Label* if_true,
575
563
  Label* if_false,
576
564
  Label* fall_through) {
577
- // Call the runtime to find the boolean value of the source and then
578
- // translate it into control flow to the pair of labels.
579
- __ push(result_register());
580
- __ CallRuntime(Runtime::kToBool, 1);
581
- __ LoadRoot(ip, Heap::kTrueValueRootIndex);
582
- __ cmp(r0, ip);
583
- Split(eq, if_true, if_false, fall_through);
565
+ if (CpuFeatures::IsSupported(VFP3)) {
566
+ CpuFeatures::Scope scope(VFP3);
567
+ // Emit the inlined tests assumed by the stub.
568
+ __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
569
+ __ cmp(result_register(), ip);
570
+ __ b(eq, if_false);
571
+ __ LoadRoot(ip, Heap::kTrueValueRootIndex);
572
+ __ cmp(result_register(), ip);
573
+ __ b(eq, if_true);
574
+ __ LoadRoot(ip, Heap::kFalseValueRootIndex);
575
+ __ cmp(result_register(), ip);
576
+ __ b(eq, if_false);
577
+ STATIC_ASSERT(kSmiTag == 0);
578
+ __ tst(result_register(), result_register());
579
+ __ b(eq, if_false);
580
+ __ JumpIfSmi(result_register(), if_true);
581
+
582
+ // Call the ToBoolean stub for all other cases.
583
+ ToBooleanStub stub(result_register());
584
+ __ CallStub(&stub);
585
+ __ tst(result_register(), result_register());
586
+ } else {
587
+ // Call the runtime to find the boolean value of the source and then
588
+ // translate it into control flow to the pair of labels.
589
+ __ push(result_register());
590
+ __ CallRuntime(Runtime::kToBool, 1);
591
+ __ LoadRoot(ip, Heap::kFalseValueRootIndex);
592
+ __ cmp(r0, ip);
593
+ }
594
+
595
+ // The stub returns nonzero for true.
596
+ Split(ne, if_true, if_false, fall_through);
584
597
  }
585
598
 
586
599
 
@@ -768,7 +781,9 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable,
768
781
  prop->key()->AsLiteral()->handle()->IsSmi());
769
782
  __ mov(r1, Operand(prop->key()->AsLiteral()->handle()));
770
783
 
771
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
784
+ Handle<Code> ic = is_strict_mode()
785
+ ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
786
+ : isolate()->builtins()->KeyedStoreIC_Initialize();
772
787
  EmitCallIC(ic, RelocInfo::CODE_TARGET);
773
788
  // Value in r0 is ignored (declarations are statements).
774
789
  }
@@ -784,10 +799,11 @@ void FullCodeGenerator::VisitDeclaration(Declaration* decl) {
784
799
  void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
785
800
  // Call the runtime to declare the globals.
786
801
  // The context is the first argument.
787
- __ mov(r1, Operand(pairs));
788
- __ mov(r0, Operand(Smi::FromInt(is_eval() ? 1 : 0)));
789
- __ Push(cp, r1, r0);
790
- __ CallRuntime(Runtime::kDeclareGlobals, 3);
802
+ __ mov(r2, Operand(pairs));
803
+ __ mov(r1, Operand(Smi::FromInt(is_eval() ? 1 : 0)));
804
+ __ mov(r0, Operand(Smi::FromInt(strict_mode_flag())));
805
+ __ Push(cp, r2, r1, r0);
806
+ __ CallRuntime(Runtime::kDeclareGlobals, 4);
791
807
  // Return value is ignored.
792
808
  }
793
809
 
@@ -796,9 +812,9 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
796
812
  Comment cmnt(masm_, "[ SwitchStatement");
797
813
  Breakable nested_statement(this, stmt);
798
814
  SetStatementPosition(stmt);
815
+
799
816
  // Keep the switch value on the stack until a case matches.
800
817
  VisitForStackValue(stmt->tag());
801
-
802
818
  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
803
819
 
804
820
  ZoneList<CaseClause*>* clauses = stmt->cases();
@@ -864,6 +880,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
864
880
  Comment cmnt(masm_, "[ Case body");
865
881
  CaseClause* clause = clauses->at(i);
866
882
  __ bind(clause->body_target()->entry_label());
883
+ PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
867
884
  VisitStatements(clause->statements());
868
885
  }
869
886
 
@@ -887,8 +904,9 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
887
904
  __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
888
905
  __ cmp(r0, ip);
889
906
  __ b(eq, &exit);
890
- __ LoadRoot(ip, Heap::kNullValueRootIndex);
891
- __ cmp(r0, ip);
907
+ Register null_value = r5;
908
+ __ LoadRoot(null_value, Heap::kNullValueRootIndex);
909
+ __ cmp(r0, null_value);
892
910
  __ b(eq, &exit);
893
911
 
894
912
  // Convert the object to a JS object.
@@ -902,12 +920,62 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
902
920
  __ bind(&done_convert);
903
921
  __ push(r0);
904
922
 
905
- // BUG(867): Check cache validity in generated code. This is a fast
906
- // case for the JSObject::IsSimpleEnum cache validity checks. If we
907
- // cannot guarantee cache validity, call the runtime system to check
908
- // cache validity or get the property names in a fixed array.
923
+ // Check cache validity in generated code. This is a fast case for
924
+ // the JSObject::IsSimpleEnum cache validity checks. If we cannot
925
+ // guarantee cache validity, call the runtime system to check cache
926
+ // validity or get the property names in a fixed array.
927
+ Label next, call_runtime;
928
+ // Preload a couple of values used in the loop.
929
+ Register empty_fixed_array_value = r6;
930
+ __ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
931
+ Register empty_descriptor_array_value = r7;
932
+ __ LoadRoot(empty_descriptor_array_value,
933
+ Heap::kEmptyDescriptorArrayRootIndex);
934
+ __ mov(r1, r0);
935
+ __ bind(&next);
936
+
937
+ // Check that there are no elements. Register r1 contains the
938
+ // current JS object we've reached through the prototype chain.
939
+ __ ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset));
940
+ __ cmp(r2, empty_fixed_array_value);
941
+ __ b(ne, &call_runtime);
942
+
943
+ // Check that instance descriptors are not empty so that we can
944
+ // check for an enum cache. Leave the map in r2 for the subsequent
945
+ // prototype load.
946
+ __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
947
+ __ ldr(r3, FieldMemOperand(r2, Map::kInstanceDescriptorsOffset));
948
+ __ cmp(r3, empty_descriptor_array_value);
949
+ __ b(eq, &call_runtime);
950
+
951
+ // Check that there is an enum cache in the non-empty instance
952
+ // descriptors (r3). This is the case if the next enumeration
953
+ // index field does not contain a smi.
954
+ __ ldr(r3, FieldMemOperand(r3, DescriptorArray::kEnumerationIndexOffset));
955
+ __ JumpIfSmi(r3, &call_runtime);
956
+
957
+ // For all objects but the receiver, check that the cache is empty.
958
+ Label check_prototype;
959
+ __ cmp(r1, r0);
960
+ __ b(eq, &check_prototype);
961
+ __ ldr(r3, FieldMemOperand(r3, DescriptorArray::kEnumCacheBridgeCacheOffset));
962
+ __ cmp(r3, empty_fixed_array_value);
963
+ __ b(ne, &call_runtime);
964
+
965
+ // Load the prototype from the map and loop if non-null.
966
+ __ bind(&check_prototype);
967
+ __ ldr(r1, FieldMemOperand(r2, Map::kPrototypeOffset));
968
+ __ cmp(r1, null_value);
969
+ __ b(ne, &next);
970
+
971
+ // The enum cache is valid. Load the map of the object being
972
+ // iterated over and use the cache for the iteration.
973
+ Label use_cache;
974
+ __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
975
+ __ b(&use_cache);
909
976
 
910
977
  // Get the set of properties to enumerate.
978
+ __ bind(&call_runtime);
911
979
  __ push(r0); // Duplicate the enumerable object on the stack.
912
980
  __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
913
981
 
@@ -922,6 +990,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
922
990
  __ b(ne, &fixed_array);
923
991
 
924
992
  // We got a map in register r0. Get the enumeration cache from it.
993
+ __ bind(&use_cache);
925
994
  __ ldr(r1, FieldMemOperand(r0, Map::kInstanceDescriptorsOffset));
926
995
  __ ldr(r1, FieldMemOperand(r1, DescriptorArray::kEnumerationIndexOffset));
927
996
  __ ldr(r2, FieldMemOperand(r1, DescriptorArray::kEnumCacheBridgeCacheOffset));
@@ -1010,11 +1079,17 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1010
1079
  void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1011
1080
  bool pretenure) {
1012
1081
  // Use the fast case closure allocation code that allocates in new
1013
- // space for nested functions that don't need literals cloning.
1014
- if (scope()->is_function_scope() &&
1015
- info->num_literals() == 0 &&
1016
- !pretenure) {
1017
- FastNewClosureStub stub;
1082
+ // space for nested functions that don't need literals cloning. If
1083
+ // we're running with the --always-opt or the --prepare-always-opt
1084
+ // flag, we need to use the runtime function so that the new function
1085
+ // we are creating here gets a chance to have its code optimized and
1086
+ // doesn't just get a copy of the existing unoptimized code.
1087
+ if (!FLAG_always_opt &&
1088
+ !FLAG_prepare_always_opt &&
1089
+ !pretenure &&
1090
+ scope()->is_function_scope() &&
1091
+ info->num_literals() == 0) {
1092
+ FastNewClosureStub stub(info->strict_mode() ? kStrictMode : kNonStrictMode);
1018
1093
  __ mov(r0, Operand(info));
1019
1094
  __ push(r0);
1020
1095
  __ CallStub(&stub);
@@ -1111,7 +1186,8 @@ void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
1111
1186
  ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
1112
1187
  slow));
1113
1188
  __ mov(r0, Operand(key_literal->handle()));
1114
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
1189
+ Handle<Code> ic =
1190
+ isolate()->builtins()->KeyedLoadIC_Initialize();
1115
1191
  EmitCallIC(ic, RelocInfo::CODE_TARGET);
1116
1192
  __ jmp(done);
1117
1193
  }
@@ -1177,7 +1253,7 @@ void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
1177
1253
  RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1178
1254
  ? RelocInfo::CODE_TARGET
1179
1255
  : RelocInfo::CODE_TARGET_CONTEXT;
1180
- Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
1256
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1181
1257
  EmitCallIC(ic, mode);
1182
1258
  }
1183
1259
 
@@ -1195,7 +1271,7 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var) {
1195
1271
  // object (receiver) in r0.
1196
1272
  __ ldr(r0, GlobalObjectOperand());
1197
1273
  __ mov(r2, Operand(var->name()));
1198
- Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
1274
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1199
1275
  EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1200
1276
  context()->Plug(r0);
1201
1277
 
@@ -1254,7 +1330,7 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var) {
1254
1330
  __ mov(r0, Operand(key_literal->handle()));
1255
1331
 
1256
1332
  // Call keyed load IC. It has arguments key and receiver in r0 and r1.
1257
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
1333
+ Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1258
1334
  EmitCallIC(ic, RelocInfo::CODE_TARGET);
1259
1335
  context()->Plug(r0);
1260
1336
  }
@@ -1265,18 +1341,19 @@ void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1265
1341
  Comment cmnt(masm_, "[ RegExpLiteral");
1266
1342
  Label materialized;
1267
1343
  // Registers will be used as follows:
1344
+ // r5 = materialized value (RegExp literal)
1268
1345
  // r4 = JS function, literals array
1269
1346
  // r3 = literal index
1270
1347
  // r2 = RegExp pattern
1271
1348
  // r1 = RegExp flags
1272
- // r0 = temp + materialized value (RegExp literal)
1349
+ // r0 = RegExp literal clone
1273
1350
  __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1274
1351
  __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
1275
1352
  int literal_offset =
1276
1353
  FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1277
- __ ldr(r0, FieldMemOperand(r4, literal_offset));
1354
+ __ ldr(r5, FieldMemOperand(r4, literal_offset));
1278
1355
  __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1279
- __ cmp(r0, ip);
1356
+ __ cmp(r5, ip);
1280
1357
  __ b(ne, &materialized);
1281
1358
 
1282
1359
  // Create regexp literal using runtime function.
@@ -1286,20 +1363,27 @@ void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1286
1363
  __ mov(r1, Operand(expr->flags()));
1287
1364
  __ Push(r4, r3, r2, r1);
1288
1365
  __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1366
+ __ mov(r5, r0);
1289
1367
 
1290
1368
  __ bind(&materialized);
1291
1369
  int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1292
- __ push(r0);
1370
+ Label allocated, runtime_allocate;
1371
+ __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
1372
+ __ jmp(&allocated);
1373
+
1374
+ __ bind(&runtime_allocate);
1375
+ __ push(r5);
1293
1376
  __ mov(r0, Operand(Smi::FromInt(size)));
1294
1377
  __ push(r0);
1295
1378
  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1379
+ __ pop(r5);
1296
1380
 
1381
+ __ bind(&allocated);
1297
1382
  // After this, registers are used as follows:
1298
1383
  // r0: Newly allocated regexp.
1299
- // r1: Materialized regexp.
1384
+ // r5: Materialized regexp.
1300
1385
  // r2: temp.
1301
- __ pop(r1);
1302
- __ CopyFields(r0, r1, r2.bit(), size / kPointerSize);
1386
+ __ CopyFields(r0, r5, r2.bit(), size / kPointerSize);
1303
1387
  context()->Plug(r0);
1304
1388
  }
1305
1389
 
@@ -1310,7 +1394,13 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1310
1394
  __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1311
1395
  __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1312
1396
  __ mov(r1, Operand(expr->constant_properties()));
1313
- __ mov(r0, Operand(Smi::FromInt(expr->fast_elements() ? 1 : 0)));
1397
+ int flags = expr->fast_elements()
1398
+ ? ObjectLiteral::kFastElements
1399
+ : ObjectLiteral::kNoFlags;
1400
+ flags |= expr->has_function()
1401
+ ? ObjectLiteral::kHasFunction
1402
+ : ObjectLiteral::kNoFlags;
1403
+ __ mov(r0, Operand(Smi::FromInt(flags)));
1314
1404
  __ Push(r3, r2, r1, r0);
1315
1405
  if (expr->depth() > 1) {
1316
1406
  __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
@@ -1349,7 +1439,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1349
1439
  VisitForAccumulatorValue(value);
1350
1440
  __ mov(r2, Operand(key->handle()));
1351
1441
  __ ldr(r1, MemOperand(sp));
1352
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
1442
+ Handle<Code> ic = isolate()->builtins()->StoreIC_Initialize();
1353
1443
  EmitCallIC(ic, RelocInfo::CODE_TARGET);
1354
1444
  PrepareForBailoutForId(key->id(), NO_REGISTERS);
1355
1445
  } else {
@@ -1365,7 +1455,9 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1365
1455
  VisitForStackValue(key);
1366
1456
  VisitForStackValue(value);
1367
1457
  if (property->emit_store()) {
1368
- __ CallRuntime(Runtime::kSetProperty, 3);
1458
+ __ mov(r0, Operand(Smi::FromInt(NONE))); // PropertyAttributes
1459
+ __ push(r0);
1460
+ __ CallRuntime(Runtime::kSetProperty, 4);
1369
1461
  } else {
1370
1462
  __ Drop(3);
1371
1463
  }
@@ -1386,6 +1478,13 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1386
1478
  }
1387
1479
  }
1388
1480
 
1481
+ if (expr->has_function()) {
1482
+ ASSERT(result_saved);
1483
+ __ ldr(r0, MemOperand(sp));
1484
+ __ push(r0);
1485
+ __ CallRuntime(Runtime::kToFastProperties, 1);
1486
+ }
1487
+
1389
1488
  if (result_saved) {
1390
1489
  context()->PlugTOS();
1391
1490
  } else {
@@ -1405,11 +1504,13 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1405
1504
  __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1406
1505
  __ mov(r1, Operand(expr->constant_elements()));
1407
1506
  __ Push(r3, r2, r1);
1408
- if (expr->constant_elements()->map() == Heap::fixed_cow_array_map()) {
1507
+ if (expr->constant_elements()->map() ==
1508
+ isolate()->heap()->fixed_cow_array_map()) {
1409
1509
  FastCloneShallowArrayStub stub(
1410
1510
  FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
1411
1511
  __ CallStub(&stub);
1412
- __ IncrementCounter(&Counters::cow_arrays_created_stub, 1, r1, r2);
1512
+ __ IncrementCounter(
1513
+ isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2);
1413
1514
  } else if (expr->depth() > 1) {
1414
1515
  __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1415
1516
  } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
@@ -1521,36 +1622,29 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1521
1622
  break;
1522
1623
  }
1523
1624
 
1625
+ // For compound assignments we need another deoptimization point after the
1626
+ // variable/property load.
1524
1627
  if (expr->is_compound()) {
1525
1628
  { AccumulatorValueContext context(this);
1526
1629
  switch (assign_type) {
1527
1630
  case VARIABLE:
1528
1631
  EmitVariableLoad(expr->target()->AsVariableProxy()->var());
1632
+ PrepareForBailout(expr->target(), TOS_REG);
1529
1633
  break;
1530
1634
  case NAMED_PROPERTY:
1531
1635
  EmitNamedPropertyLoad(property);
1636
+ PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1532
1637
  break;
1533
1638
  case KEYED_PROPERTY:
1534
1639
  EmitKeyedPropertyLoad(property);
1640
+ PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1535
1641
  break;
1536
1642
  }
1537
1643
  }
1538
1644
 
1539
- // For property compound assignments we need another deoptimization
1540
- // point after the property load.
1541
- if (property != NULL) {
1542
- PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1543
- }
1544
-
1545
1645
  Token::Value op = expr->binary_op();
1546
- ConstantOperand constant = ShouldInlineSmiCase(op)
1547
- ? GetConstantOperand(op, expr->target(), expr->value())
1548
- : kNoConstants;
1549
- ASSERT(constant == kRightConstant || constant == kNoConstants);
1550
- if (constant == kNoConstants) {
1551
- __ push(r0); // Left operand goes on the stack.
1552
- VisitForAccumulatorValue(expr->value());
1553
- }
1646
+ __ push(r0); // Left operand goes on the stack.
1647
+ VisitForAccumulatorValue(expr->value());
1554
1648
 
1555
1649
  OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1556
1650
  ? OVERWRITE_RIGHT
@@ -1562,8 +1656,7 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1562
1656
  op,
1563
1657
  mode,
1564
1658
  expr->target(),
1565
- expr->value(),
1566
- constant);
1659
+ expr->value());
1567
1660
  } else {
1568
1661
  EmitBinaryOp(op, mode);
1569
1662
  }
@@ -1600,7 +1693,7 @@ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1600
1693
  Literal* key = prop->key()->AsLiteral();
1601
1694
  __ mov(r2, Operand(key->handle()));
1602
1695
  // Call load IC. It has arguments receiver and property name r0 and r2.
1603
- Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
1696
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1604
1697
  EmitCallIC(ic, RelocInfo::CODE_TARGET);
1605
1698
  }
1606
1699
 
@@ -1608,222 +1701,16 @@ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1608
1701
  void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1609
1702
  SetSourcePosition(prop->position());
1610
1703
  // Call keyed load IC. It has arguments key and receiver in r0 and r1.
1611
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
1704
+ Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1612
1705
  EmitCallIC(ic, RelocInfo::CODE_TARGET);
1613
1706
  }
1614
1707
 
1615
1708
 
1616
- void FullCodeGenerator::EmitConstantSmiAdd(Expression* expr,
1617
- OverwriteMode mode,
1618
- bool left_is_constant_smi,
1619
- Smi* value) {
1620
- Label call_stub, done;
1621
- // Optimistically add smi value with unknown object. If result overflows or is
1622
- // not a smi then we had either a smi overflow or added a smi with a tagged
1623
- // pointer.
1624
- __ mov(r1, Operand(value));
1625
- __ add(r2, r0, r1, SetCC);
1626
- __ b(vs, &call_stub);
1627
- JumpPatchSite patch_site(masm_);
1628
- patch_site.EmitJumpIfNotSmi(r2, &call_stub);
1629
- __ mov(r0, r2);
1630
- __ b(&done);
1631
-
1632
- // Call the shared stub.
1633
- __ bind(&call_stub);
1634
- if (!left_is_constant_smi) {
1635
- __ Swap(r0, r1, r2);
1636
- }
1637
- TypeRecordingBinaryOpStub stub(Token::ADD, mode);
1638
- EmitCallIC(stub.GetCode(), &patch_site);
1639
-
1640
- __ bind(&done);
1641
- context()->Plug(r0);
1642
- }
1643
-
1644
-
1645
- void FullCodeGenerator::EmitConstantSmiSub(Expression* expr,
1646
- OverwriteMode mode,
1647
- bool left_is_constant_smi,
1648
- Smi* value) {
1649
- Label call_stub, done;
1650
- // Optimistically subtract smi value and unknown object. If result overflows
1651
- // or is not a smi then we had either a smi overflow or subtraction between a
1652
- // smi and a tagged pointer.
1653
- __ mov(r1, Operand(value));
1654
- if (left_is_constant_smi) {
1655
- __ sub(r2, r1, r0, SetCC);
1656
- } else {
1657
- __ sub(r2, r0, r1, SetCC);
1658
- }
1659
- __ b(vs, &call_stub);
1660
- JumpPatchSite patch_site(masm_);
1661
- patch_site.EmitJumpIfNotSmi(r2, &call_stub);
1662
- __ mov(r0, r2);
1663
- __ b(&done);
1664
-
1665
- // Call the shared stub.
1666
- __ bind(&call_stub);
1667
- if (!left_is_constant_smi) {
1668
- __ Swap(r0, r1, r2);
1669
- }
1670
- TypeRecordingBinaryOpStub stub(Token::SUB, mode);
1671
- EmitCallIC(stub.GetCode(), &patch_site);
1672
-
1673
- __ bind(&done);
1674
- context()->Plug(r0);
1675
- }
1676
-
1677
-
1678
- void FullCodeGenerator::EmitConstantSmiShiftOp(Expression* expr,
1679
- Token::Value op,
1680
- OverwriteMode mode,
1681
- Smi* value) {
1682
- Label call_stub, smi_case, done;
1683
- int shift_value = value->value() & 0x1f;
1684
-
1685
- JumpPatchSite patch_site(masm_);
1686
- patch_site.EmitJumpIfSmi(r0, &smi_case);
1687
-
1688
- // Call stub.
1689
- __ bind(&call_stub);
1690
- __ mov(r1, r0);
1691
- __ mov(r0, Operand(value));
1692
- TypeRecordingBinaryOpStub stub(op, mode);
1693
- EmitCallIC(stub.GetCode(), &patch_site);
1694
- __ b(&done);
1695
-
1696
- // Smi case.
1697
- __ bind(&smi_case);
1698
- switch (op) {
1699
- case Token::SHL:
1700
- if (shift_value != 0) {
1701
- __ mov(r1, r0);
1702
- if (shift_value > 1) {
1703
- __ mov(r1, Operand(r1, LSL, shift_value - 1));
1704
- }
1705
- // Convert int result to smi, checking that it is in int range.
1706
- __ SmiTag(r1, SetCC);
1707
- __ b(vs, &call_stub);
1708
- __ mov(r0, r1); // Put result back into r0.
1709
- }
1710
- break;
1711
- case Token::SAR:
1712
- if (shift_value != 0) {
1713
- __ mov(r0, Operand(r0, ASR, shift_value));
1714
- __ bic(r0, r0, Operand(kSmiTagMask));
1715
- }
1716
- break;
1717
- case Token::SHR:
1718
- // SHR must return a positive value. When shifting by 0 or 1 we need to
1719
- // check that smi tagging the result will not create a negative value.
1720
- if (shift_value < 2) {
1721
- __ mov(r2, Operand(shift_value));
1722
- __ SmiUntag(r1, r0);
1723
- if (shift_value != 0) {
1724
- __ mov(r1, Operand(r1, LSR, shift_value));
1725
- }
1726
- __ tst(r1, Operand(0xc0000000));
1727
- __ b(ne, &call_stub);
1728
- __ SmiTag(r0, r1); // result in r0.
1729
- } else {
1730
- __ SmiUntag(r0);
1731
- __ mov(r0, Operand(r0, LSR, shift_value));
1732
- __ SmiTag(r0);
1733
- }
1734
- break;
1735
- default:
1736
- UNREACHABLE();
1737
- }
1738
-
1739
- __ bind(&done);
1740
- context()->Plug(r0);
1741
- }
1742
-
1743
-
1744
- void FullCodeGenerator::EmitConstantSmiBitOp(Expression* expr,
1745
- Token::Value op,
1746
- OverwriteMode mode,
1747
- Smi* value) {
1748
- Label smi_case, done;
1749
-
1750
- JumpPatchSite patch_site(masm_);
1751
- patch_site.EmitJumpIfSmi(r0, &smi_case);
1752
-
1753
- // The order of the arguments does not matter for bit-ops with a
1754
- // constant operand.
1755
- __ mov(r1, Operand(value));
1756
- TypeRecordingBinaryOpStub stub(op, mode);
1757
- EmitCallIC(stub.GetCode(), &patch_site);
1758
- __ jmp(&done);
1759
-
1760
- // Smi case.
1761
- __ bind(&smi_case);
1762
- __ mov(r1, Operand(value));
1763
- switch (op) {
1764
- case Token::BIT_OR:
1765
- __ orr(r0, r0, Operand(r1));
1766
- break;
1767
- case Token::BIT_XOR:
1768
- __ eor(r0, r0, Operand(r1));
1769
- break;
1770
- case Token::BIT_AND:
1771
- __ and_(r0, r0, Operand(r1));
1772
- break;
1773
- default:
1774
- UNREACHABLE();
1775
- }
1776
-
1777
- __ bind(&done);
1778
- context()->Plug(r0);
1779
- }
1780
-
1781
-
1782
- void FullCodeGenerator::EmitConstantSmiBinaryOp(Expression* expr,
1783
- Token::Value op,
1784
- OverwriteMode mode,
1785
- bool left_is_constant_smi,
1786
- Smi* value) {
1787
- switch (op) {
1788
- case Token::BIT_OR:
1789
- case Token::BIT_XOR:
1790
- case Token::BIT_AND:
1791
- EmitConstantSmiBitOp(expr, op, mode, value);
1792
- break;
1793
- case Token::SHL:
1794
- case Token::SAR:
1795
- case Token::SHR:
1796
- ASSERT(!left_is_constant_smi);
1797
- EmitConstantSmiShiftOp(expr, op, mode, value);
1798
- break;
1799
- case Token::ADD:
1800
- EmitConstantSmiAdd(expr, mode, left_is_constant_smi, value);
1801
- break;
1802
- case Token::SUB:
1803
- EmitConstantSmiSub(expr, mode, left_is_constant_smi, value);
1804
- break;
1805
- default:
1806
- UNREACHABLE();
1807
- }
1808
- }
1809
-
1810
-
1811
1709
  void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
1812
1710
  Token::Value op,
1813
1711
  OverwriteMode mode,
1814
1712
  Expression* left_expr,
1815
- Expression* right_expr,
1816
- ConstantOperand constant) {
1817
- if (constant == kRightConstant) {
1818
- Smi* value = Smi::cast(*right_expr->AsLiteral()->handle());
1819
- EmitConstantSmiBinaryOp(expr, op, mode, false, value);
1820
- return;
1821
- } else if (constant == kLeftConstant) {
1822
- Smi* value = Smi::cast(*left_expr->AsLiteral()->handle());
1823
- EmitConstantSmiBinaryOp(expr, op, mode, true, value);
1824
- return;
1825
- }
1826
-
1713
+ Expression* right_expr) {
1827
1714
  Label done, smi_case, stub_call;
1828
1715
 
1829
1716
  Register scratch1 = r2;
@@ -1959,7 +1846,9 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
1959
1846
  __ mov(r1, r0);
1960
1847
  __ pop(r0); // Restore value.
1961
1848
  __ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
1962
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
1849
+ Handle<Code> ic = is_strict_mode()
1850
+ ? isolate()->builtins()->StoreIC_Initialize_Strict()
1851
+ : isolate()->builtins()->StoreIC_Initialize();
1963
1852
  EmitCallIC(ic, RelocInfo::CODE_TARGET);
1964
1853
  break;
1965
1854
  }
@@ -1980,7 +1869,9 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
1980
1869
  __ pop(r2);
1981
1870
  }
1982
1871
  __ pop(r0); // Restore value.
1983
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
1872
+ Handle<Code> ic = is_strict_mode()
1873
+ ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
1874
+ : isolate()->builtins()->KeyedStoreIC_Initialize();
1984
1875
  EmitCallIC(ic, RelocInfo::CODE_TARGET);
1985
1876
  break;
1986
1877
  }
@@ -2004,9 +1895,9 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2004
1895
  // r2, and the global object in r1.
2005
1896
  __ mov(r2, Operand(var->name()));
2006
1897
  __ ldr(r1, GlobalObjectOperand());
2007
- Handle<Code> ic(Builtins::builtin(is_strict()
2008
- ? Builtins::StoreIC_Initialize_Strict
2009
- : Builtins::StoreIC_Initialize));
1898
+ Handle<Code> ic = is_strict_mode()
1899
+ ? isolate()->builtins()->StoreIC_Initialize_Strict()
1900
+ : isolate()->builtins()->StoreIC_Initialize();
2010
1901
  EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
2011
1902
 
2012
1903
  } else if (op == Token::INIT_CONST) {
@@ -2075,9 +1966,10 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2075
1966
  case Slot::LOOKUP:
2076
1967
  // Call the runtime for the assignment.
2077
1968
  __ push(r0); // Value.
2078
- __ mov(r0, Operand(slot->var()->name()));
2079
- __ Push(cp, r0); // Context and name.
2080
- __ CallRuntime(Runtime::kStoreContextSlot, 3);
1969
+ __ mov(r1, Operand(slot->var()->name()));
1970
+ __ mov(r0, Operand(Smi::FromInt(strict_mode_flag())));
1971
+ __ Push(cp, r1, r0); // Context, name, strict mode.
1972
+ __ CallRuntime(Runtime::kStoreContextSlot, 4);
2081
1973
  break;
2082
1974
  }
2083
1975
  }
@@ -2112,7 +2004,9 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2112
2004
  __ pop(r1);
2113
2005
  }
2114
2006
 
2115
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
2007
+ Handle<Code> ic = is_strict_mode()
2008
+ ? isolate()->builtins()->StoreIC_Initialize_Strict()
2009
+ : isolate()->builtins()->StoreIC_Initialize();
2116
2010
  EmitCallIC(ic, RelocInfo::CODE_TARGET);
2117
2011
 
2118
2012
  // If the assignment ends an initialization block, revert to fast case.
@@ -2156,7 +2050,9 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2156
2050
  __ pop(r2);
2157
2051
  }
2158
2052
 
2159
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
2053
+ Handle<Code> ic = is_strict_mode()
2054
+ ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
2055
+ : isolate()->builtins()->KeyedStoreIC_Initialize();
2160
2056
  EmitCallIC(ic, RelocInfo::CODE_TARGET);
2161
2057
 
2162
2058
  // If the assignment ends an initialization block, revert to fast case.
@@ -2207,7 +2103,8 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
2207
2103
  SetSourcePosition(expr->position());
2208
2104
  // Call the IC initialization code.
2209
2105
  InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2210
- Handle<Code> ic = StubCache::ComputeCallInitialize(arg_count, in_loop);
2106
+ Handle<Code> ic =
2107
+ isolate()->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
2211
2108
  EmitCallIC(ic, mode);
2212
2109
  RecordJSReturnSite(expr);
2213
2110
  // Restore context register.
@@ -2240,7 +2137,8 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2240
2137
  SetSourcePosition(expr->position());
2241
2138
  // Call the IC initialization code.
2242
2139
  InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2243
- Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arg_count, in_loop);
2140
+ Handle<Code> ic =
2141
+ isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count, in_loop);
2244
2142
  __ ldr(r2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key.
2245
2143
  EmitCallIC(ic, mode);
2246
2144
  RecordJSReturnSite(expr);
@@ -2271,6 +2169,29 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
2271
2169
  }
2272
2170
 
2273
2171
 
2172
+ void FullCodeGenerator::EmitResolvePossiblyDirectEval(ResolveEvalFlag flag,
2173
+ int arg_count) {
2174
+ // Push copy of the first argument or undefined if it doesn't exist.
2175
+ if (arg_count > 0) {
2176
+ __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2177
+ } else {
2178
+ __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2179
+ }
2180
+ __ push(r1);
2181
+
2182
+ // Push the receiver of the enclosing function and do runtime call.
2183
+ __ ldr(r1, MemOperand(fp, (2 + scope()->num_parameters()) * kPointerSize));
2184
+ __ push(r1);
2185
+ // Push the strict mode flag.
2186
+ __ mov(r1, Operand(Smi::FromInt(strict_mode_flag())));
2187
+ __ push(r1);
2188
+
2189
+ __ CallRuntime(flag == SKIP_CONTEXT_LOOKUP
2190
+ ? Runtime::kResolvePossiblyDirectEvalNoLookup
2191
+ : Runtime::kResolvePossiblyDirectEval, 4);
2192
+ }
2193
+
2194
+
2274
2195
  void FullCodeGenerator::VisitCall(Call* expr) {
2275
2196
  #ifdef DEBUG
2276
2197
  // We want to verify that RecordJSReturnSite gets called on all paths
@@ -2300,26 +2221,31 @@ void FullCodeGenerator::VisitCall(Call* expr) {
2300
2221
  VisitForStackValue(args->at(i));
2301
2222
  }
2302
2223
 
2303
- // Push copy of the function - found below the arguments.
2304
- __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2305
- __ push(r1);
2306
-
2307
- // Push copy of the first argument or undefined if it doesn't exist.
2308
- if (arg_count > 0) {
2309
- __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2310
- __ push(r1);
2311
- } else {
2312
- __ push(r2);
2224
+ // If we know that eval can only be shadowed by eval-introduced
2225
+ // variables we attempt to load the global eval function directly
2226
+ // in generated code. If we succeed, there is no need to perform a
2227
+ // context lookup in the runtime system.
2228
+ Label done;
2229
+ if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
2230
+ Label slow;
2231
+ EmitLoadGlobalSlotCheckExtensions(var->AsSlot(),
2232
+ NOT_INSIDE_TYPEOF,
2233
+ &slow);
2234
+ // Push the function and resolve eval.
2235
+ __ push(r0);
2236
+ EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count);
2237
+ __ jmp(&done);
2238
+ __ bind(&slow);
2313
2239
  }
2314
2240
 
2315
- // Push the receiver of the enclosing function and do runtime call.
2316
- __ ldr(r1,
2317
- MemOperand(fp, (2 + scope()->num_parameters()) * kPointerSize));
2318
- __ push(r1);
2319
- // Push the strict mode flag.
2320
- __ mov(r1, Operand(Smi::FromInt(strict_mode_flag())));
2241
+ // Push copy of the function (found below the arguments) and
2242
+ // resolve eval.
2243
+ __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2321
2244
  __ push(r1);
2322
- __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 4);
2245
+ EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count);
2246
+ if (done.is_linked()) {
2247
+ __ bind(&done);
2248
+ }
2323
2249
 
2324
2250
  // The runtime call returns a pair of values in r0 (function) and
2325
2251
  // r1 (receiver). Touch up the stack with the right values.
@@ -2411,7 +2337,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
2411
2337
  // Record source code position for IC call.
2412
2338
  SetSourcePosition(prop->position());
2413
2339
 
2414
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
2340
+ Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2415
2341
  EmitCallIC(ic, RelocInfo::CODE_TARGET);
2416
2342
  __ ldr(r1, GlobalObjectOperand());
2417
2343
  __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
@@ -2425,16 +2351,6 @@ void FullCodeGenerator::VisitCall(Call* expr) {
2425
2351
  }
2426
2352
  }
2427
2353
  } else {
2428
- // Call to some other expression. If the expression is an anonymous
2429
- // function literal not called in a loop, mark it as one that should
2430
- // also use the fast code generator.
2431
- FunctionLiteral* lit = fun->AsFunctionLiteral();
2432
- if (lit != NULL &&
2433
- lit->name()->Equals(Heap::empty_string()) &&
2434
- loop_depth() == 0) {
2435
- lit->set_try_full_codegen(true);
2436
- }
2437
-
2438
2354
  { PreservePositionScope scope(masm()->positions_recorder());
2439
2355
  VisitForStackValue(fun);
2440
2356
  }
@@ -2479,7 +2395,8 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2479
2395
  __ mov(r0, Operand(arg_count));
2480
2396
  __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2481
2397
 
2482
- Handle<Code> construct_builtin(Builtins::builtin(Builtins::JSConstructCall));
2398
+ Handle<Code> construct_builtin =
2399
+ isolate()->builtins()->JSConstructCall();
2483
2400
  __ Call(construct_builtin, RelocInfo::CONSTRUCT_CALL);
2484
2401
  context()->Plug(r0);
2485
2402
  }
@@ -2615,11 +2532,75 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2615
2532
  context()->PrepareTest(&materialize_true, &materialize_false,
2616
2533
  &if_true, &if_false, &fall_through);
2617
2534
 
2618
- // Just indicate false, as %_IsStringWrapperSafeForDefaultValueOf() is only
2619
- // used in a few functions in runtime.js which should not normally be hit by
2620
- // this compiler.
2535
+ if (FLAG_debug_code) __ AbortIfSmi(r0);
2536
+
2537
+ __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2538
+ __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset));
2539
+ __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
2540
+ __ b(ne, if_true);
2541
+
2542
+ // Check for fast case object. Generate false result for slow case object.
2543
+ __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset));
2544
+ __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
2545
+ __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
2546
+ __ cmp(r2, ip);
2547
+ __ b(eq, if_false);
2548
+
2549
+ // Look for valueOf symbol in the descriptor array, and indicate false if
2550
+ // found. The type is not checked, so if it is a transition it is a false
2551
+ // negative.
2552
+ __ ldr(r4, FieldMemOperand(r1, Map::kInstanceDescriptorsOffset));
2553
+ __ ldr(r3, FieldMemOperand(r4, FixedArray::kLengthOffset));
2554
+ // r4: descriptor array
2555
+ // r3: length of descriptor array
2556
+ // Calculate the end of the descriptor array.
2557
+ STATIC_ASSERT(kSmiTag == 0);
2558
+ STATIC_ASSERT(kSmiTagSize == 1);
2559
+ STATIC_ASSERT(kPointerSize == 4);
2560
+ __ add(r2, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2561
+ __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
2562
+
2563
+ // Calculate location of the first key name.
2564
+ __ add(r4,
2565
+ r4,
2566
+ Operand(FixedArray::kHeaderSize - kHeapObjectTag +
2567
+ DescriptorArray::kFirstIndex * kPointerSize));
2568
+ // Loop through all the keys in the descriptor array. If one of these is the
2569
+ // symbol valueOf the result is false.
2570
+ Label entry, loop;
2571
+ // The use of ip to store the valueOf symbol asumes that it is not otherwise
2572
+ // used in the loop below.
2573
+ __ mov(ip, Operand(FACTORY->value_of_symbol()));
2574
+ __ jmp(&entry);
2575
+ __ bind(&loop);
2576
+ __ ldr(r3, MemOperand(r4, 0));
2577
+ __ cmp(r3, ip);
2578
+ __ b(eq, if_false);
2579
+ __ add(r4, r4, Operand(kPointerSize));
2580
+ __ bind(&entry);
2581
+ __ cmp(r4, Operand(r2));
2582
+ __ b(ne, &loop);
2583
+
2584
+ // If a valueOf property is not found on the object check that it's
2585
+ // prototype is the un-modified String prototype. If not result is false.
2586
+ __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
2587
+ __ tst(r2, Operand(kSmiTagMask));
2588
+ __ b(eq, if_false);
2589
+ __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
2590
+ __ ldr(r3, ContextOperand(cp, Context::GLOBAL_INDEX));
2591
+ __ ldr(r3, FieldMemOperand(r3, GlobalObject::kGlobalContextOffset));
2592
+ __ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
2593
+ __ cmp(r2, r3);
2594
+ __ b(ne, if_false);
2595
+
2596
+ // Set the bit in the map to indicate that it has been checked safe for
2597
+ // default valueOf and set true result.
2598
+ __ ldrb(r2, FieldMemOperand(r4, Map::kBitField2Offset));
2599
+ __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
2600
+ __ strb(r2, FieldMemOperand(r4, Map::kBitField2Offset));
2601
+ __ jmp(if_true);
2602
+
2621
2603
  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2622
- __ jmp(if_false);
2623
2604
  context()->Plug(if_true, if_false);
2624
2605
  }
2625
2606
 
@@ -2875,8 +2856,9 @@ void FullCodeGenerator::EmitRandomHeapNumber(ZoneList<Expression*>* args) {
2875
2856
  // by computing:
2876
2857
  // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
2877
2858
  if (CpuFeatures::IsSupported(VFP3)) {
2878
- __ PrepareCallCFunction(0, r1);
2879
- __ CallCFunction(ExternalReference::random_uint32_function(), 0);
2859
+ __ PrepareCallCFunction(1, r0);
2860
+ __ mov(r0, Operand(ExternalReference::isolate_address()));
2861
+ __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
2880
2862
 
2881
2863
  CpuFeatures::Scope scope(VFP3);
2882
2864
  // 0x41300000 is the top half of 1.0 x 2^20 as a double.
@@ -2894,10 +2876,11 @@ void FullCodeGenerator::EmitRandomHeapNumber(ZoneList<Expression*>* args) {
2894
2876
  __ vstr(d7, r0, HeapNumber::kValueOffset);
2895
2877
  __ mov(r0, r4);
2896
2878
  } else {
2879
+ __ PrepareCallCFunction(2, r0);
2897
2880
  __ mov(r0, Operand(r4));
2898
- __ PrepareCallCFunction(1, r1);
2881
+ __ mov(r1, Operand(ExternalReference::isolate_address()));
2899
2882
  __ CallCFunction(
2900
- ExternalReference::fill_heap_number_with_random_function(), 1);
2883
+ ExternalReference::fill_heap_number_with_random_function(isolate()), 2);
2901
2884
  }
2902
2885
 
2903
2886
  context()->Plug(r0);
@@ -2952,7 +2935,8 @@ void FullCodeGenerator::EmitMathPow(ZoneList<Expression*>* args) {
2952
2935
  ASSERT(args->length() == 2);
2953
2936
  VisitForStackValue(args->at(0));
2954
2937
  VisitForStackValue(args->at(1));
2955
- __ CallRuntime(Runtime::kMath_pow, 2);
2938
+ MathPowStub stub;
2939
+ __ CallStub(&stub);
2956
2940
  context()->Plug(r0);
2957
2941
  }
2958
2942
 
@@ -3134,37 +3118,43 @@ void FullCodeGenerator::EmitStringCompare(ZoneList<Expression*>* args) {
3134
3118
 
3135
3119
 
3136
3120
  void FullCodeGenerator::EmitMathSin(ZoneList<Expression*>* args) {
3137
- // Load the argument on the stack and call the runtime.
3121
+ // Load the argument on the stack and call the stub.
3122
+ TranscendentalCacheStub stub(TranscendentalCache::SIN,
3123
+ TranscendentalCacheStub::TAGGED);
3138
3124
  ASSERT(args->length() == 1);
3139
3125
  VisitForStackValue(args->at(0));
3140
- __ CallRuntime(Runtime::kMath_sin, 1);
3126
+ __ CallStub(&stub);
3141
3127
  context()->Plug(r0);
3142
3128
  }
3143
3129
 
3144
3130
 
3145
3131
  void FullCodeGenerator::EmitMathCos(ZoneList<Expression*>* args) {
3146
- // Load the argument on the stack and call the runtime.
3132
+ // Load the argument on the stack and call the stub.
3133
+ TranscendentalCacheStub stub(TranscendentalCache::COS,
3134
+ TranscendentalCacheStub::TAGGED);
3147
3135
  ASSERT(args->length() == 1);
3148
3136
  VisitForStackValue(args->at(0));
3149
- __ CallRuntime(Runtime::kMath_cos, 1);
3137
+ __ CallStub(&stub);
3150
3138
  context()->Plug(r0);
3151
3139
  }
3152
3140
 
3153
3141
 
3154
- void FullCodeGenerator::EmitMathSqrt(ZoneList<Expression*>* args) {
3155
- // Load the argument on the stack and call the runtime function.
3142
+ void FullCodeGenerator::EmitMathLog(ZoneList<Expression*>* args) {
3143
+ // Load the argument on the stack and call the stub.
3144
+ TranscendentalCacheStub stub(TranscendentalCache::LOG,
3145
+ TranscendentalCacheStub::TAGGED);
3156
3146
  ASSERT(args->length() == 1);
3157
3147
  VisitForStackValue(args->at(0));
3158
- __ CallRuntime(Runtime::kMath_sqrt, 1);
3148
+ __ CallStub(&stub);
3159
3149
  context()->Plug(r0);
3160
3150
  }
3161
3151
 
3162
3152
 
3163
- void FullCodeGenerator::EmitMathLog(ZoneList<Expression*>* args) {
3153
+ void FullCodeGenerator::EmitMathSqrt(ZoneList<Expression*>* args) {
3164
3154
  // Load the argument on the stack and call the runtime function.
3165
3155
  ASSERT(args->length() == 1);
3166
3156
  VisitForStackValue(args->at(0));
3167
- __ CallRuntime(Runtime::kMath_log, 1);
3157
+ __ CallRuntime(Runtime::kMath_sqrt, 1);
3168
3158
  context()->Plug(r0);
3169
3159
  }
3170
3160
 
@@ -3204,7 +3194,79 @@ void FullCodeGenerator::EmitSwapElements(ZoneList<Expression*>* args) {
3204
3194
  VisitForStackValue(args->at(0));
3205
3195
  VisitForStackValue(args->at(1));
3206
3196
  VisitForStackValue(args->at(2));
3197
+ Label done;
3198
+ Label slow_case;
3199
+ Register object = r0;
3200
+ Register index1 = r1;
3201
+ Register index2 = r2;
3202
+ Register elements = r3;
3203
+ Register scratch1 = r4;
3204
+ Register scratch2 = r5;
3205
+
3206
+ __ ldr(object, MemOperand(sp, 2 * kPointerSize));
3207
+ // Fetch the map and check if array is in fast case.
3208
+ // Check that object doesn't require security checks and
3209
+ // has no indexed interceptor.
3210
+ __ CompareObjectType(object, scratch1, scratch2, JS_ARRAY_TYPE);
3211
+ __ b(ne, &slow_case);
3212
+ // Map is now in scratch1.
3213
+
3214
+ __ ldrb(scratch2, FieldMemOperand(scratch1, Map::kBitFieldOffset));
3215
+ __ tst(scratch2, Operand(KeyedLoadIC::kSlowCaseBitFieldMask));
3216
+ __ b(ne, &slow_case);
3217
+
3218
+ // Check the object's elements are in fast case and writable.
3219
+ __ ldr(elements, FieldMemOperand(object, JSObject::kElementsOffset));
3220
+ __ ldr(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset));
3221
+ __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
3222
+ __ cmp(scratch1, ip);
3223
+ __ b(ne, &slow_case);
3224
+
3225
+ // Check that both indices are smis.
3226
+ __ ldr(index1, MemOperand(sp, 1 * kPointerSize));
3227
+ __ ldr(index2, MemOperand(sp, 0));
3228
+ __ JumpIfNotBothSmi(index1, index2, &slow_case);
3229
+
3230
+ // Check that both indices are valid.
3231
+ __ ldr(scratch1, FieldMemOperand(object, JSArray::kLengthOffset));
3232
+ __ cmp(scratch1, index1);
3233
+ __ cmp(scratch1, index2, hi);
3234
+ __ b(ls, &slow_case);
3235
+
3236
+ // Bring the address of the elements into index1 and index2.
3237
+ __ add(scratch1, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3238
+ __ add(index1,
3239
+ scratch1,
3240
+ Operand(index1, LSL, kPointerSizeLog2 - kSmiTagSize));
3241
+ __ add(index2,
3242
+ scratch1,
3243
+ Operand(index2, LSL, kPointerSizeLog2 - kSmiTagSize));
3244
+
3245
+ // Swap elements.
3246
+ __ ldr(scratch1, MemOperand(index1, 0));
3247
+ __ ldr(scratch2, MemOperand(index2, 0));
3248
+ __ str(scratch1, MemOperand(index2, 0));
3249
+ __ str(scratch2, MemOperand(index1, 0));
3250
+
3251
+ Label new_space;
3252
+ __ InNewSpace(elements, scratch1, eq, &new_space);
3253
+ // Possible optimization: do a check that both values are Smis
3254
+ // (or them and test against Smi mask.)
3255
+
3256
+ __ mov(scratch1, elements);
3257
+ __ RecordWriteHelper(elements, index1, scratch2);
3258
+ __ RecordWriteHelper(scratch1, index2, scratch2); // scratch1 holds elements.
3259
+
3260
+ __ bind(&new_space);
3261
+ // We are done. Drop elements from the stack, and return undefined.
3262
+ __ Drop(3);
3263
+ __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3264
+ __ jmp(&done);
3265
+
3266
+ __ bind(&slow_case);
3207
3267
  __ CallRuntime(Runtime::kSwapElements, 3);
3268
+
3269
+ __ bind(&done);
3208
3270
  context()->Plug(r0);
3209
3271
  }
3210
3272
 
@@ -3216,7 +3278,7 @@ void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) {
3216
3278
  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
3217
3279
 
3218
3280
  Handle<FixedArray> jsfunction_result_caches(
3219
- Top::global_context()->jsfunction_result_caches());
3281
+ isolate()->global_context()->jsfunction_result_caches());
3220
3282
  if (jsfunction_result_caches->length() <= cache_id) {
3221
3283
  __ Abort("Attempt to use undefined cache.");
3222
3284
  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
@@ -3323,16 +3385,248 @@ void FullCodeGenerator::EmitHasCachedArrayIndex(ZoneList<Expression*>* args) {
3323
3385
  void FullCodeGenerator::EmitGetCachedArrayIndex(ZoneList<Expression*>* args) {
3324
3386
  ASSERT(args->length() == 1);
3325
3387
  VisitForAccumulatorValue(args->at(0));
3388
+
3389
+ if (FLAG_debug_code) {
3390
+ __ AbortIfNotString(r0);
3391
+ }
3392
+
3326
3393
  __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3327
3394
  __ IndexFromHash(r0, r0);
3395
+
3328
3396
  context()->Plug(r0);
3329
3397
  }
3330
3398
 
3331
3399
 
3332
3400
  void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) {
3401
+ Label bailout, done, one_char_separator, long_separator,
3402
+ non_trivial_array, not_size_one_array, loop,
3403
+ empty_separator_loop, one_char_separator_loop,
3404
+ one_char_separator_loop_entry, long_separator_loop;
3405
+
3406
+ ASSERT(args->length() == 2);
3407
+ VisitForStackValue(args->at(1));
3408
+ VisitForAccumulatorValue(args->at(0));
3409
+
3410
+ // All aliases of the same register have disjoint lifetimes.
3411
+ Register array = r0;
3412
+ Register elements = no_reg; // Will be r0.
3413
+ Register result = no_reg; // Will be r0.
3414
+ Register separator = r1;
3415
+ Register array_length = r2;
3416
+ Register result_pos = no_reg; // Will be r2
3417
+ Register string_length = r3;
3418
+ Register string = r4;
3419
+ Register element = r5;
3420
+ Register elements_end = r6;
3421
+ Register scratch1 = r7;
3422
+ Register scratch2 = r9;
3423
+
3424
+ // Separator operand is on the stack.
3425
+ __ pop(separator);
3426
+
3427
+ // Check that the array is a JSArray.
3428
+ __ JumpIfSmi(array, &bailout);
3429
+ __ CompareObjectType(array, scratch1, scratch2, JS_ARRAY_TYPE);
3430
+ __ b(ne, &bailout);
3431
+
3432
+ // Check that the array has fast elements.
3433
+ __ ldrb(scratch2, FieldMemOperand(scratch1, Map::kBitField2Offset));
3434
+ __ tst(scratch2, Operand(1 << Map::kHasFastElements));
3435
+ __ b(eq, &bailout);
3436
+
3437
+ // If the array has length zero, return the empty string.
3438
+ __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3439
+ __ SmiUntag(array_length, SetCC);
3440
+ __ b(ne, &non_trivial_array);
3441
+ __ LoadRoot(r0, Heap::kEmptyStringRootIndex);
3442
+ __ b(&done);
3443
+
3444
+ __ bind(&non_trivial_array);
3445
+
3446
+ // Get the FixedArray containing array's elements.
3447
+ elements = array;
3448
+ __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3449
+ array = no_reg; // End of array's live range.
3450
+
3451
+ // Check that all array elements are sequential ASCII strings, and
3452
+ // accumulate the sum of their lengths, as a smi-encoded value.
3453
+ __ mov(string_length, Operand(0));
3454
+ __ add(element,
3455
+ elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3456
+ __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3457
+ // Loop condition: while (element < elements_end).
3458
+ // Live values in registers:
3459
+ // elements: Fixed array of strings.
3460
+ // array_length: Length of the fixed array of strings (not smi)
3461
+ // separator: Separator string
3462
+ // string_length: Accumulated sum of string lengths (smi).
3463
+ // element: Current array element.
3464
+ // elements_end: Array end.
3465
+ if (FLAG_debug_code) {
3466
+ __ cmp(array_length, Operand(0));
3467
+ __ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin");
3468
+ }
3469
+ __ bind(&loop);
3470
+ __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3471
+ __ JumpIfSmi(string, &bailout);
3472
+ __ ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
3473
+ __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3474
+ __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3475
+ __ ldr(scratch1, FieldMemOperand(string, SeqAsciiString::kLengthOffset));
3476
+ __ add(string_length, string_length, Operand(scratch1));
3477
+ __ b(vs, &bailout);
3478
+ __ cmp(element, elements_end);
3479
+ __ b(lt, &loop);
3480
+
3481
+ // If array_length is 1, return elements[0], a string.
3482
+ __ cmp(array_length, Operand(1));
3483
+ __ b(ne, &not_size_one_array);
3484
+ __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
3485
+ __ b(&done);
3486
+
3487
+ __ bind(&not_size_one_array);
3488
+
3489
+ // Live values in registers:
3490
+ // separator: Separator string
3491
+ // array_length: Length of the array.
3492
+ // string_length: Sum of string lengths (smi).
3493
+ // elements: FixedArray of strings.
3494
+
3495
+ // Check that the separator is a flat ASCII string.
3496
+ __ JumpIfSmi(separator, &bailout);
3497
+ __ ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
3498
+ __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3499
+ __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3500
+
3501
+ // Add (separator length times array_length) - separator length to the
3502
+ // string_length to get the length of the result string. array_length is not
3503
+ // smi but the other values are, so the result is a smi
3504
+ __ ldr(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
3505
+ __ sub(string_length, string_length, Operand(scratch1));
3506
+ __ smull(scratch2, ip, array_length, scratch1);
3507
+ // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
3508
+ // zero.
3509
+ __ cmp(ip, Operand(0));
3510
+ __ b(ne, &bailout);
3511
+ __ tst(scratch2, Operand(0x80000000));
3512
+ __ b(ne, &bailout);
3513
+ __ add(string_length, string_length, Operand(scratch2));
3514
+ __ b(vs, &bailout);
3515
+ __ SmiUntag(string_length);
3516
+
3517
+ // Get first element in the array to free up the elements register to be used
3518
+ // for the result.
3519
+ __ add(element,
3520
+ elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3521
+ result = elements; // End of live range for elements.
3522
+ elements = no_reg;
3523
+ // Live values in registers:
3524
+ // element: First array element
3525
+ // separator: Separator string
3526
+ // string_length: Length of result string (not smi)
3527
+ // array_length: Length of the array.
3528
+ __ AllocateAsciiString(result,
3529
+ string_length,
3530
+ scratch1,
3531
+ scratch2,
3532
+ elements_end,
3533
+ &bailout);
3534
+ // Prepare for looping. Set up elements_end to end of the array. Set
3535
+ // result_pos to the position of the result where to write the first
3536
+ // character.
3537
+ __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3538
+ result_pos = array_length; // End of live range for array_length.
3539
+ array_length = no_reg;
3540
+ __ add(result_pos,
3541
+ result,
3542
+ Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3543
+
3544
+ // Check the length of the separator.
3545
+ __ ldr(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
3546
+ __ cmp(scratch1, Operand(Smi::FromInt(1)));
3547
+ __ b(eq, &one_char_separator);
3548
+ __ b(gt, &long_separator);
3549
+
3550
+ // Empty separator case
3551
+ __ bind(&empty_separator_loop);
3552
+ // Live values in registers:
3553
+ // result_pos: the position to which we are currently copying characters.
3554
+ // element: Current array element.
3555
+ // elements_end: Array end.
3556
+
3557
+ // Copy next array element to the result.
3558
+ __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3559
+ __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
3560
+ __ SmiUntag(string_length);
3561
+ __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3562
+ __ CopyBytes(string, result_pos, string_length, scratch1);
3563
+ __ cmp(element, elements_end);
3564
+ __ b(lt, &empty_separator_loop); // End while (element < elements_end).
3565
+ ASSERT(result.is(r0));
3566
+ __ b(&done);
3567
+
3568
+ // One-character separator case
3569
+ __ bind(&one_char_separator);
3570
+ // Replace separator with its ascii character value.
3571
+ __ ldrb(separator, FieldMemOperand(separator, SeqAsciiString::kHeaderSize));
3572
+ // Jump into the loop after the code that copies the separator, so the first
3573
+ // element is not preceded by a separator
3574
+ __ jmp(&one_char_separator_loop_entry);
3575
+
3576
+ __ bind(&one_char_separator_loop);
3577
+ // Live values in registers:
3578
+ // result_pos: the position to which we are currently copying characters.
3579
+ // element: Current array element.
3580
+ // elements_end: Array end.
3581
+ // separator: Single separator ascii char (in lower byte).
3582
+
3583
+ // Copy the separator character to the result.
3584
+ __ strb(separator, MemOperand(result_pos, 1, PostIndex));
3585
+
3586
+ // Copy next array element to the result.
3587
+ __ bind(&one_char_separator_loop_entry);
3588
+ __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3589
+ __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
3590
+ __ SmiUntag(string_length);
3591
+ __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3592
+ __ CopyBytes(string, result_pos, string_length, scratch1);
3593
+ __ cmp(element, elements_end);
3594
+ __ b(lt, &one_char_separator_loop); // End while (element < elements_end).
3595
+ ASSERT(result.is(r0));
3596
+ __ b(&done);
3597
+
3598
+ // Long separator case (separator is more than one character). Entry is at the
3599
+ // label long_separator below.
3600
+ __ bind(&long_separator_loop);
3601
+ // Live values in registers:
3602
+ // result_pos: the position to which we are currently copying characters.
3603
+ // element: Current array element.
3604
+ // elements_end: Array end.
3605
+ // separator: Separator string.
3606
+
3607
+ // Copy the separator to the result.
3608
+ __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset));
3609
+ __ SmiUntag(string_length);
3610
+ __ add(string,
3611
+ separator,
3612
+ Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3613
+ __ CopyBytes(string, result_pos, string_length, scratch1);
3614
+
3615
+ __ bind(&long_separator);
3616
+ __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3617
+ __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
3618
+ __ SmiUntag(string_length);
3619
+ __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3620
+ __ CopyBytes(string, result_pos, string_length, scratch1);
3621
+ __ cmp(element, elements_end);
3622
+ __ b(lt, &long_separator_loop); // End while (element < elements_end).
3623
+ ASSERT(result.is(r0));
3624
+ __ b(&done);
3625
+
3626
+ __ bind(&bailout);
3333
3627
  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3628
+ __ bind(&done);
3334
3629
  context()->Plug(r0);
3335
- return;
3336
3630
  }
3337
3631
 
3338
3632
 
@@ -3363,7 +3657,8 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3363
3657
  if (expr->is_jsruntime()) {
3364
3658
  // Call the JS runtime function.
3365
3659
  __ mov(r2, Operand(expr->name()));
3366
- Handle<Code> ic = StubCache::ComputeCallInitialize(arg_count, NOT_IN_LOOP);
3660
+ Handle<Code> ic =
3661
+ isolate()->stub_cache()->ComputeCallInitialize(arg_count, NOT_IN_LOOP);
3367
3662
  EmitCallIC(ic, RelocInfo::CODE_TARGET);
3368
3663
  // Restore context register.
3369
3664
  __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -3486,9 +3781,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3486
3781
  bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
3487
3782
  UnaryOverwriteMode overwrite =
3488
3783
  can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
3489
- GenericUnaryOpStub stub(Token::SUB,
3490
- overwrite,
3491
- NO_UNARY_FLAGS);
3784
+ GenericUnaryOpStub stub(Token::SUB, overwrite, NO_UNARY_FLAGS);
3492
3785
  // GenericUnaryOpStub expects the argument to be in the
3493
3786
  // accumulator register r0.
3494
3787
  VisitForAccumulatorValue(expr->expression());
@@ -3589,7 +3882,11 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3589
3882
 
3590
3883
  // We need a second deoptimization point after loading the value
3591
3884
  // in case evaluating the property load my have a side effect.
3592
- PrepareForBailout(expr->increment(), TOS_REG);
3885
+ if (assign_type == VARIABLE) {
3886
+ PrepareForBailout(expr->expression(), TOS_REG);
3887
+ } else {
3888
+ PrepareForBailout(expr->increment(), TOS_REG);
3889
+ }
3593
3890
 
3594
3891
  // Call ToNumber only if operand is not a smi.
3595
3892
  Label no_conversion;
@@ -3669,7 +3966,9 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3669
3966
  case NAMED_PROPERTY: {
3670
3967
  __ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
3671
3968
  __ pop(r1);
3672
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
3969
+ Handle<Code> ic = is_strict_mode()
3970
+ ? isolate()->builtins()->StoreIC_Initialize_Strict()
3971
+ : isolate()->builtins()->StoreIC_Initialize();
3673
3972
  EmitCallIC(ic, RelocInfo::CODE_TARGET);
3674
3973
  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3675
3974
  if (expr->is_postfix()) {
@@ -3684,7 +3983,9 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3684
3983
  case KEYED_PROPERTY: {
3685
3984
  __ pop(r1); // Key.
3686
3985
  __ pop(r2); // Receiver.
3687
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
3986
+ Handle<Code> ic = is_strict_mode()
3987
+ ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3988
+ : isolate()->builtins()->KeyedStoreIC_Initialize();
3688
3989
  EmitCallIC(ic, RelocInfo::CODE_TARGET);
3689
3990
  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3690
3991
  if (expr->is_postfix()) {
@@ -3708,7 +4009,7 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
3708
4009
  Comment cmnt(masm_, "Global variable");
3709
4010
  __ ldr(r0, GlobalObjectOperand());
3710
4011
  __ mov(r2, Operand(proxy->name()));
3711
- Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
4012
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
3712
4013
  // Use a regular load, not a contextual load, to avoid a reference
3713
4014
  // error.
3714
4015
  EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -3761,72 +4062,53 @@ bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
3761
4062
  }
3762
4063
  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
3763
4064
 
3764
- if (check->Equals(Heap::number_symbol())) {
3765
- __ tst(r0, Operand(kSmiTagMask));
3766
- __ b(eq, if_true);
4065
+ if (check->Equals(isolate()->heap()->number_symbol())) {
4066
+ __ JumpIfSmi(r0, if_true);
3767
4067
  __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3768
4068
  __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
3769
4069
  __ cmp(r0, ip);
3770
4070
  Split(eq, if_true, if_false, fall_through);
3771
- } else if (check->Equals(Heap::string_symbol())) {
3772
- __ tst(r0, Operand(kSmiTagMask));
3773
- __ b(eq, if_false);
4071
+ } else if (check->Equals(isolate()->heap()->string_symbol())) {
4072
+ __ JumpIfSmi(r0, if_false);
3774
4073
  // Check for undetectable objects => false.
3775
- __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
4074
+ __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
4075
+ __ b(ge, if_false);
3776
4076
  __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
3777
- __ and_(r1, r1, Operand(1 << Map::kIsUndetectable));
3778
- __ cmp(r1, Operand(1 << Map::kIsUndetectable));
3779
- __ b(eq, if_false);
3780
- __ ldrb(r1, FieldMemOperand(r0, Map::kInstanceTypeOffset));
3781
- __ cmp(r1, Operand(FIRST_NONSTRING_TYPE));
3782
- Split(lt, if_true, if_false, fall_through);
3783
- } else if (check->Equals(Heap::boolean_symbol())) {
3784
- __ LoadRoot(ip, Heap::kTrueValueRootIndex);
3785
- __ cmp(r0, ip);
4077
+ __ tst(r1, Operand(1 << Map::kIsUndetectable));
4078
+ Split(eq, if_true, if_false, fall_through);
4079
+ } else if (check->Equals(isolate()->heap()->boolean_symbol())) {
4080
+ __ CompareRoot(r0, Heap::kTrueValueRootIndex);
3786
4081
  __ b(eq, if_true);
3787
- __ LoadRoot(ip, Heap::kFalseValueRootIndex);
3788
- __ cmp(r0, ip);
4082
+ __ CompareRoot(r0, Heap::kFalseValueRootIndex);
3789
4083
  Split(eq, if_true, if_false, fall_through);
3790
- } else if (check->Equals(Heap::undefined_symbol())) {
3791
- __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3792
- __ cmp(r0, ip);
4084
+ } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4085
+ __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
3793
4086
  __ b(eq, if_true);
3794
- __ tst(r0, Operand(kSmiTagMask));
3795
- __ b(eq, if_false);
4087
+ __ JumpIfSmi(r0, if_false);
3796
4088
  // Check for undetectable objects => true.
3797
4089
  __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3798
4090
  __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
3799
- __ and_(r1, r1, Operand(1 << Map::kIsUndetectable));
3800
- __ cmp(r1, Operand(1 << Map::kIsUndetectable));
3801
- Split(eq, if_true, if_false, fall_through);
3802
- } else if (check->Equals(Heap::function_symbol())) {
3803
- __ tst(r0, Operand(kSmiTagMask));
3804
- __ b(eq, if_false);
3805
- __ CompareObjectType(r0, r1, r0, JS_FUNCTION_TYPE);
3806
- __ b(eq, if_true);
3807
- // Regular expressions => 'function' (they are callable).
3808
- __ CompareInstanceType(r1, r0, JS_REGEXP_TYPE);
3809
- Split(eq, if_true, if_false, fall_through);
3810
- } else if (check->Equals(Heap::object_symbol())) {
3811
- __ tst(r0, Operand(kSmiTagMask));
3812
- __ b(eq, if_false);
3813
- __ LoadRoot(ip, Heap::kNullValueRootIndex);
3814
- __ cmp(r0, ip);
4091
+ __ tst(r1, Operand(1 << Map::kIsUndetectable));
4092
+ Split(ne, if_true, if_false, fall_through);
4093
+
4094
+ } else if (check->Equals(isolate()->heap()->function_symbol())) {
4095
+ __ JumpIfSmi(r0, if_false);
4096
+ __ CompareObjectType(r0, r1, r0, FIRST_FUNCTION_CLASS_TYPE);
4097
+ Split(ge, if_true, if_false, fall_through);
4098
+
4099
+ } else if (check->Equals(isolate()->heap()->object_symbol())) {
4100
+ __ JumpIfSmi(r0, if_false);
4101
+ __ CompareRoot(r0, Heap::kNullValueRootIndex);
3815
4102
  __ b(eq, if_true);
3816
- // Regular expressions => 'function', not 'object'.
3817
- __ CompareObjectType(r0, r1, r0, JS_REGEXP_TYPE);
3818
- __ b(eq, if_false);
3819
- // Check for undetectable objects => false.
3820
- __ ldrb(r0, FieldMemOperand(r1, Map::kBitFieldOffset));
3821
- __ and_(r0, r0, Operand(1 << Map::kIsUndetectable));
3822
- __ cmp(r0, Operand(1 << Map::kIsUndetectable));
3823
- __ b(eq, if_false);
3824
4103
  // Check for JS objects => true.
3825
- __ ldrb(r0, FieldMemOperand(r1, Map::kInstanceTypeOffset));
3826
- __ cmp(r0, Operand(FIRST_JS_OBJECT_TYPE));
3827
- __ b(lt, if_false);
3828
- __ cmp(r0, Operand(LAST_JS_OBJECT_TYPE));
3829
- Split(le, if_true, if_false, fall_through);
4104
+ __ CompareObjectType(r0, r0, r1, FIRST_JS_OBJECT_TYPE);
4105
+ __ b(lo, if_false);
4106
+ __ CompareInstanceType(r0, r1, FIRST_FUNCTION_CLASS_TYPE);
4107
+ __ b(hs, if_false);
4108
+ // Check for undetectable objects => false.
4109
+ __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4110
+ __ tst(r1, Operand(1 << Map::kIsUndetectable));
4111
+ Split(eq, if_true, if_false, fall_through);
3830
4112
  } else {
3831
4113
  if (if_false != fall_through) __ jmp(if_false);
3832
4114
  }
@@ -3998,11 +4280,45 @@ Register FullCodeGenerator::context_register() {
3998
4280
  void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
3999
4281
  ASSERT(mode == RelocInfo::CODE_TARGET ||
4000
4282
  mode == RelocInfo::CODE_TARGET_CONTEXT);
4283
+ Counters* counters = isolate()->counters();
4284
+ switch (ic->kind()) {
4285
+ case Code::LOAD_IC:
4286
+ __ IncrementCounter(counters->named_load_full(), 1, r1, r2);
4287
+ break;
4288
+ case Code::KEYED_LOAD_IC:
4289
+ __ IncrementCounter(counters->keyed_load_full(), 1, r1, r2);
4290
+ break;
4291
+ case Code::STORE_IC:
4292
+ __ IncrementCounter(counters->named_store_full(), 1, r1, r2);
4293
+ break;
4294
+ case Code::KEYED_STORE_IC:
4295
+ __ IncrementCounter(counters->keyed_store_full(), 1, r1, r2);
4296
+ default:
4297
+ break;
4298
+ }
4299
+
4001
4300
  __ Call(ic, mode);
4002
4301
  }
4003
4302
 
4004
4303
 
4005
4304
  void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
4305
+ Counters* counters = isolate()->counters();
4306
+ switch (ic->kind()) {
4307
+ case Code::LOAD_IC:
4308
+ __ IncrementCounter(counters->named_load_full(), 1, r1, r2);
4309
+ break;
4310
+ case Code::KEYED_LOAD_IC:
4311
+ __ IncrementCounter(counters->keyed_load_full(), 1, r1, r2);
4312
+ break;
4313
+ case Code::STORE_IC:
4314
+ __ IncrementCounter(counters->named_store_full(), 1, r1, r2);
4315
+ break;
4316
+ case Code::KEYED_STORE_IC:
4317
+ __ IncrementCounter(counters->keyed_store_full(), 1, r1, r2);
4318
+ default:
4319
+ break;
4320
+ }
4321
+
4006
4322
  __ Call(ic, RelocInfo::CODE_TARGET);
4007
4323
  if (patch_site != NULL && patch_site->is_bound()) {
4008
4324
  patch_site->EmitPatchInfo();