mustang 0.0.1 → 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (560) hide show
  1. data/.rspec +1 -0
  2. data/Isolate +9 -0
  3. data/README.md +6 -12
  4. data/Rakefile +30 -4
  5. data/TODO.md +9 -0
  6. data/ext/v8/extconf.rb +56 -0
  7. data/ext/v8/v8.cpp +37 -0
  8. data/ext/v8/v8_array.cpp +161 -0
  9. data/ext/v8/v8_array.h +17 -0
  10. data/ext/v8/v8_base.cpp +147 -0
  11. data/ext/v8/v8_base.h +23 -0
  12. data/ext/v8/v8_cast.cpp +151 -0
  13. data/ext/v8/v8_cast.h +64 -0
  14. data/ext/v8/v8_context.cpp +174 -0
  15. data/ext/v8/v8_context.h +12 -0
  16. data/ext/v8/v8_date.cpp +61 -0
  17. data/ext/v8/v8_date.h +16 -0
  18. data/ext/v8/v8_errors.cpp +147 -0
  19. data/ext/v8/v8_errors.h +19 -0
  20. data/ext/v8/v8_external.cpp +66 -0
  21. data/ext/v8/v8_external.h +16 -0
  22. data/ext/v8/v8_function.cpp +182 -0
  23. data/ext/v8/v8_function.h +14 -0
  24. data/ext/v8/v8_integer.cpp +70 -0
  25. data/ext/v8/v8_integer.h +16 -0
  26. data/ext/v8/v8_macros.h +30 -0
  27. data/ext/v8/v8_main.cpp +53 -0
  28. data/ext/v8/v8_main.h +13 -0
  29. data/ext/v8/v8_number.cpp +62 -0
  30. data/ext/v8/v8_number.h +16 -0
  31. data/ext/v8/v8_object.cpp +172 -0
  32. data/ext/v8/v8_object.h +17 -0
  33. data/ext/v8/v8_ref.cpp +72 -0
  34. data/ext/v8/v8_ref.h +43 -0
  35. data/ext/v8/v8_regexp.cpp +148 -0
  36. data/ext/v8/v8_regexp.h +16 -0
  37. data/ext/v8/v8_string.cpp +78 -0
  38. data/ext/v8/v8_string.h +16 -0
  39. data/ext/v8/v8_value.cpp +370 -0
  40. data/ext/v8/v8_value.h +19 -0
  41. data/gemspec.yml +2 -1
  42. data/lib/core_ext/class.rb +14 -0
  43. data/lib/core_ext/object.rb +12 -0
  44. data/lib/core_ext/symbol.rb +23 -0
  45. data/lib/mustang.rb +44 -0
  46. data/lib/mustang/context.rb +69 -0
  47. data/lib/mustang/errors.rb +36 -0
  48. data/lib/support/delegated.rb +25 -0
  49. data/lib/v8/array.rb +21 -0
  50. data/lib/v8/context.rb +13 -0
  51. data/lib/v8/date.rb +20 -0
  52. data/lib/v8/error.rb +15 -0
  53. data/lib/v8/external.rb +16 -0
  54. data/lib/v8/function.rb +11 -0
  55. data/lib/v8/integer.rb +16 -0
  56. data/lib/v8/number.rb +16 -0
  57. data/lib/v8/object.rb +66 -0
  58. data/lib/v8/regexp.rb +23 -0
  59. data/lib/v8/string.rb +27 -0
  60. data/mustang.gemspec +3 -0
  61. data/spec/core_ext/class_spec.rb +19 -0
  62. data/spec/core_ext/object_spec.rb +19 -0
  63. data/spec/core_ext/symbol_spec.rb +27 -0
  64. data/spec/fixtures/test1.js +2 -0
  65. data/spec/fixtures/test2.js +2 -0
  66. data/spec/spec_helper.rb +20 -0
  67. data/spec/v8/array_spec.rb +88 -0
  68. data/spec/v8/cast_spec.rb +151 -0
  69. data/spec/v8/context_spec.rb +78 -0
  70. data/spec/v8/data_spec.rb +39 -0
  71. data/spec/v8/date_spec.rb +45 -0
  72. data/spec/v8/empty_spec.rb +27 -0
  73. data/spec/v8/errors_spec.rb +142 -0
  74. data/spec/v8/external_spec.rb +44 -0
  75. data/spec/v8/function_spec.rb +170 -0
  76. data/spec/v8/integer_spec.rb +41 -0
  77. data/spec/v8/main_spec.rb +18 -0
  78. data/spec/v8/null_spec.rb +27 -0
  79. data/spec/v8/number_spec.rb +40 -0
  80. data/spec/v8/object_spec.rb +79 -0
  81. data/spec/v8/primitive_spec.rb +9 -0
  82. data/spec/v8/regexp_spec.rb +65 -0
  83. data/spec/v8/string_spec.rb +48 -0
  84. data/spec/v8/undefined_spec.rb +27 -0
  85. data/spec/v8/value_spec.rb +215 -0
  86. data/vendor/v8/.gitignore +2 -0
  87. data/vendor/v8/AUTHORS +3 -1
  88. data/vendor/v8/ChangeLog +117 -0
  89. data/vendor/v8/SConstruct +334 -53
  90. data/vendor/v8/include/v8-debug.h +21 -11
  91. data/vendor/v8/include/v8-preparser.h +1 -1
  92. data/vendor/v8/include/v8-profiler.h +122 -43
  93. data/vendor/v8/include/v8-testing.h +5 -0
  94. data/vendor/v8/include/v8.h +171 -17
  95. data/vendor/v8/preparser/SConscript +38 -0
  96. data/vendor/v8/preparser/preparser-process.cc +77 -114
  97. data/vendor/v8/samples/shell.cc +232 -46
  98. data/vendor/v8/src/SConscript +29 -5
  99. data/vendor/v8/src/accessors.cc +70 -211
  100. data/vendor/v8/{test/cctest/test-mips.cc → src/allocation-inl.h} +15 -18
  101. data/vendor/v8/src/allocation.cc +0 -82
  102. data/vendor/v8/src/allocation.h +9 -42
  103. data/vendor/v8/src/api.cc +1645 -1156
  104. data/vendor/v8/src/api.h +76 -12
  105. data/vendor/v8/src/apiutils.h +0 -7
  106. data/vendor/v8/src/arguments.h +15 -4
  107. data/vendor/v8/src/arm/assembler-arm-inl.h +10 -9
  108. data/vendor/v8/src/arm/assembler-arm.cc +62 -23
  109. data/vendor/v8/src/arm/assembler-arm.h +76 -11
  110. data/vendor/v8/src/arm/builtins-arm.cc +39 -33
  111. data/vendor/v8/src/arm/code-stubs-arm.cc +1182 -402
  112. data/vendor/v8/src/arm/code-stubs-arm.h +20 -54
  113. data/vendor/v8/src/arm/codegen-arm.cc +159 -106
  114. data/vendor/v8/src/arm/codegen-arm.h +6 -6
  115. data/vendor/v8/src/arm/constants-arm.h +16 -1
  116. data/vendor/v8/src/arm/cpu-arm.cc +7 -5
  117. data/vendor/v8/src/arm/debug-arm.cc +6 -4
  118. data/vendor/v8/src/arm/deoptimizer-arm.cc +51 -14
  119. data/vendor/v8/src/arm/disasm-arm.cc +47 -15
  120. data/vendor/v8/src/arm/frames-arm.h +1 -1
  121. data/vendor/v8/src/arm/full-codegen-arm.cc +724 -408
  122. data/vendor/v8/src/arm/ic-arm.cc +90 -85
  123. data/vendor/v8/src/arm/lithium-arm.cc +140 -69
  124. data/vendor/v8/src/arm/lithium-arm.h +161 -46
  125. data/vendor/v8/src/arm/lithium-codegen-arm.cc +567 -297
  126. data/vendor/v8/src/arm/lithium-codegen-arm.h +21 -9
  127. data/vendor/v8/src/arm/lithium-gap-resolver-arm.cc +2 -0
  128. data/vendor/v8/src/arm/macro-assembler-arm.cc +457 -96
  129. data/vendor/v8/src/arm/macro-assembler-arm.h +115 -18
  130. data/vendor/v8/src/arm/regexp-macro-assembler-arm.cc +20 -13
  131. data/vendor/v8/src/arm/regexp-macro-assembler-arm.h +1 -0
  132. data/vendor/v8/src/arm/simulator-arm.cc +184 -101
  133. data/vendor/v8/src/arm/simulator-arm.h +26 -21
  134. data/vendor/v8/src/arm/stub-cache-arm.cc +450 -467
  135. data/vendor/v8/src/arm/virtual-frame-arm.cc +14 -12
  136. data/vendor/v8/src/arm/virtual-frame-arm.h +11 -8
  137. data/vendor/v8/src/array.js +35 -18
  138. data/vendor/v8/src/assembler.cc +186 -92
  139. data/vendor/v8/src/assembler.h +106 -69
  140. data/vendor/v8/src/ast-inl.h +5 -0
  141. data/vendor/v8/src/ast.cc +46 -35
  142. data/vendor/v8/src/ast.h +107 -50
  143. data/vendor/v8/src/atomicops.h +2 -0
  144. data/vendor/v8/src/atomicops_internals_mips_gcc.h +169 -0
  145. data/vendor/v8/src/bootstrapper.cc +649 -399
  146. data/vendor/v8/src/bootstrapper.h +94 -27
  147. data/vendor/v8/src/builtins.cc +359 -227
  148. data/vendor/v8/src/builtins.h +157 -123
  149. data/vendor/v8/src/checks.cc +2 -2
  150. data/vendor/v8/src/checks.h +4 -0
  151. data/vendor/v8/src/code-stubs.cc +27 -17
  152. data/vendor/v8/src/code-stubs.h +38 -17
  153. data/vendor/v8/src/codegen-inl.h +5 -1
  154. data/vendor/v8/src/codegen.cc +27 -17
  155. data/vendor/v8/src/codegen.h +9 -9
  156. data/vendor/v8/src/compilation-cache.cc +92 -206
  157. data/vendor/v8/src/compilation-cache.h +205 -30
  158. data/vendor/v8/src/compiler.cc +107 -120
  159. data/vendor/v8/src/compiler.h +17 -2
  160. data/vendor/v8/src/contexts.cc +22 -15
  161. data/vendor/v8/src/contexts.h +14 -8
  162. data/vendor/v8/src/conversions.cc +86 -30
  163. data/vendor/v8/src/counters.cc +19 -4
  164. data/vendor/v8/src/counters.h +28 -16
  165. data/vendor/v8/src/cpu-profiler-inl.h +4 -3
  166. data/vendor/v8/src/cpu-profiler.cc +123 -72
  167. data/vendor/v8/src/cpu-profiler.h +33 -19
  168. data/vendor/v8/src/cpu.h +2 -0
  169. data/vendor/v8/src/d8-debug.cc +3 -3
  170. data/vendor/v8/src/d8-debug.h +7 -6
  171. data/vendor/v8/src/d8-posix.cc +2 -0
  172. data/vendor/v8/src/d8.cc +22 -12
  173. data/vendor/v8/src/d8.gyp +3 -0
  174. data/vendor/v8/src/d8.js +618 -0
  175. data/vendor/v8/src/data-flow.h +3 -3
  176. data/vendor/v8/src/dateparser.h +4 -2
  177. data/vendor/v8/src/debug-agent.cc +10 -9
  178. data/vendor/v8/src/debug-agent.h +9 -11
  179. data/vendor/v8/src/debug-debugger.js +121 -0
  180. data/vendor/v8/src/debug.cc +331 -227
  181. data/vendor/v8/src/debug.h +248 -219
  182. data/vendor/v8/src/deoptimizer.cc +173 -62
  183. data/vendor/v8/src/deoptimizer.h +119 -19
  184. data/vendor/v8/src/disasm.h +3 -0
  185. data/vendor/v8/src/disassembler.cc +10 -9
  186. data/vendor/v8/src/execution.cc +185 -129
  187. data/vendor/v8/src/execution.h +47 -78
  188. data/vendor/v8/src/extensions/experimental/break-iterator.cc +250 -0
  189. data/vendor/v8/src/extensions/experimental/break-iterator.h +89 -0
  190. data/vendor/v8/src/extensions/experimental/experimental.gyp +2 -0
  191. data/vendor/v8/src/extensions/experimental/i18n-extension.cc +22 -2
  192. data/vendor/v8/src/extensions/externalize-string-extension.cc +2 -2
  193. data/vendor/v8/src/extensions/gc-extension.cc +1 -1
  194. data/vendor/v8/src/factory.cc +261 -154
  195. data/vendor/v8/src/factory.h +162 -158
  196. data/vendor/v8/src/flag-definitions.h +17 -11
  197. data/vendor/v8/src/frame-element.cc +0 -5
  198. data/vendor/v8/src/frame-element.h +9 -13
  199. data/vendor/v8/src/frames-inl.h +7 -0
  200. data/vendor/v8/src/frames.cc +56 -46
  201. data/vendor/v8/src/frames.h +36 -25
  202. data/vendor/v8/src/full-codegen.cc +15 -24
  203. data/vendor/v8/src/full-codegen.h +13 -41
  204. data/vendor/v8/src/func-name-inferrer.cc +7 -6
  205. data/vendor/v8/src/func-name-inferrer.h +1 -1
  206. data/vendor/v8/src/gdb-jit.cc +1 -0
  207. data/vendor/v8/src/global-handles.cc +118 -56
  208. data/vendor/v8/src/global-handles.h +98 -40
  209. data/vendor/v8/src/globals.h +2 -2
  210. data/vendor/v8/src/handles-inl.h +106 -9
  211. data/vendor/v8/src/handles.cc +220 -157
  212. data/vendor/v8/src/handles.h +38 -59
  213. data/vendor/v8/src/hashmap.h +3 -3
  214. data/vendor/v8/src/heap-inl.h +141 -25
  215. data/vendor/v8/src/heap-profiler.cc +117 -63
  216. data/vendor/v8/src/heap-profiler.h +38 -21
  217. data/vendor/v8/src/heap.cc +805 -564
  218. data/vendor/v8/src/heap.h +640 -594
  219. data/vendor/v8/src/hydrogen-instructions.cc +216 -73
  220. data/vendor/v8/src/hydrogen-instructions.h +259 -124
  221. data/vendor/v8/src/hydrogen.cc +996 -1171
  222. data/vendor/v8/src/hydrogen.h +163 -144
  223. data/vendor/v8/src/ia32/assembler-ia32-inl.h +12 -11
  224. data/vendor/v8/src/ia32/assembler-ia32.cc +85 -39
  225. data/vendor/v8/src/ia32/assembler-ia32.h +82 -16
  226. data/vendor/v8/src/ia32/builtins-ia32.cc +64 -58
  227. data/vendor/v8/src/ia32/code-stubs-ia32.cc +248 -324
  228. data/vendor/v8/src/ia32/code-stubs-ia32.h +3 -44
  229. data/vendor/v8/src/ia32/codegen-ia32.cc +217 -165
  230. data/vendor/v8/src/ia32/codegen-ia32.h +3 -0
  231. data/vendor/v8/src/ia32/cpu-ia32.cc +6 -5
  232. data/vendor/v8/src/ia32/debug-ia32.cc +8 -5
  233. data/vendor/v8/src/ia32/deoptimizer-ia32.cc +124 -14
  234. data/vendor/v8/src/ia32/disasm-ia32.cc +85 -62
  235. data/vendor/v8/src/ia32/frames-ia32.h +1 -1
  236. data/vendor/v8/src/ia32/full-codegen-ia32.cc +348 -435
  237. data/vendor/v8/src/ia32/ic-ia32.cc +91 -91
  238. data/vendor/v8/src/ia32/lithium-codegen-ia32.cc +500 -255
  239. data/vendor/v8/src/ia32/lithium-codegen-ia32.h +13 -4
  240. data/vendor/v8/src/ia32/lithium-gap-resolver-ia32.cc +6 -0
  241. data/vendor/v8/src/ia32/lithium-ia32.cc +122 -45
  242. data/vendor/v8/src/ia32/lithium-ia32.h +128 -41
  243. data/vendor/v8/src/ia32/macro-assembler-ia32.cc +109 -84
  244. data/vendor/v8/src/ia32/macro-assembler-ia32.h +18 -9
  245. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.cc +26 -15
  246. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.h +1 -0
  247. data/vendor/v8/src/ia32/register-allocator-ia32.cc +30 -30
  248. data/vendor/v8/src/ia32/simulator-ia32.h +4 -4
  249. data/vendor/v8/src/ia32/stub-cache-ia32.cc +383 -400
  250. data/vendor/v8/src/ia32/virtual-frame-ia32.cc +36 -13
  251. data/vendor/v8/src/ia32/virtual-frame-ia32.h +11 -5
  252. data/vendor/v8/src/ic-inl.h +12 -2
  253. data/vendor/v8/src/ic.cc +304 -221
  254. data/vendor/v8/src/ic.h +115 -58
  255. data/vendor/v8/src/interpreter-irregexp.cc +25 -21
  256. data/vendor/v8/src/interpreter-irregexp.h +2 -1
  257. data/vendor/v8/src/isolate.cc +883 -0
  258. data/vendor/v8/src/isolate.h +1304 -0
  259. data/vendor/v8/src/json.js +10 -10
  260. data/vendor/v8/src/jsregexp.cc +111 -80
  261. data/vendor/v8/src/jsregexp.h +6 -7
  262. data/vendor/v8/src/jump-target-heavy.cc +5 -8
  263. data/vendor/v8/src/jump-target-heavy.h +0 -6
  264. data/vendor/v8/src/jump-target-inl.h +1 -1
  265. data/vendor/v8/src/jump-target-light.cc +3 -3
  266. data/vendor/v8/src/lithium-allocator-inl.h +2 -0
  267. data/vendor/v8/src/lithium-allocator.cc +42 -30
  268. data/vendor/v8/src/lithium-allocator.h +8 -22
  269. data/vendor/v8/src/lithium.cc +1 -0
  270. data/vendor/v8/src/liveedit.cc +141 -99
  271. data/vendor/v8/src/liveedit.h +7 -2
  272. data/vendor/v8/src/liveobjectlist-inl.h +90 -0
  273. data/vendor/v8/src/liveobjectlist.cc +2537 -1
  274. data/vendor/v8/src/liveobjectlist.h +245 -35
  275. data/vendor/v8/src/log-utils.cc +122 -35
  276. data/vendor/v8/src/log-utils.h +33 -36
  277. data/vendor/v8/src/log.cc +299 -241
  278. data/vendor/v8/src/log.h +177 -110
  279. data/vendor/v8/src/mark-compact.cc +612 -470
  280. data/vendor/v8/src/mark-compact.h +153 -80
  281. data/vendor/v8/src/messages.cc +16 -14
  282. data/vendor/v8/src/messages.js +30 -7
  283. data/vendor/v8/src/mips/assembler-mips-inl.h +155 -35
  284. data/vendor/v8/src/mips/assembler-mips.cc +1093 -219
  285. data/vendor/v8/src/mips/assembler-mips.h +552 -153
  286. data/vendor/v8/src/mips/builtins-mips.cc +43 -100
  287. data/vendor/v8/src/mips/code-stubs-mips.cc +752 -0
  288. data/vendor/v8/src/mips/code-stubs-mips.h +511 -0
  289. data/vendor/v8/src/mips/codegen-mips-inl.h +8 -14
  290. data/vendor/v8/src/mips/codegen-mips.cc +672 -896
  291. data/vendor/v8/src/mips/codegen-mips.h +271 -69
  292. data/vendor/v8/src/mips/constants-mips.cc +44 -20
  293. data/vendor/v8/src/mips/constants-mips.h +238 -40
  294. data/vendor/v8/src/mips/cpu-mips.cc +20 -3
  295. data/vendor/v8/src/mips/debug-mips.cc +35 -7
  296. data/vendor/v8/src/mips/deoptimizer-mips.cc +91 -0
  297. data/vendor/v8/src/mips/disasm-mips.cc +329 -93
  298. data/vendor/v8/src/mips/frames-mips.cc +2 -50
  299. data/vendor/v8/src/mips/frames-mips.h +24 -9
  300. data/vendor/v8/src/mips/full-codegen-mips.cc +473 -23
  301. data/vendor/v8/src/mips/ic-mips.cc +81 -45
  302. data/vendor/v8/src/mips/jump-target-mips.cc +11 -106
  303. data/vendor/v8/src/mips/lithium-codegen-mips.h +65 -0
  304. data/vendor/v8/src/mips/lithium-mips.h +304 -0
  305. data/vendor/v8/src/mips/macro-assembler-mips.cc +2391 -390
  306. data/vendor/v8/src/mips/macro-assembler-mips.h +718 -121
  307. data/vendor/v8/src/mips/regexp-macro-assembler-mips.cc +478 -0
  308. data/vendor/v8/src/mips/regexp-macro-assembler-mips.h +250 -0
  309. data/vendor/v8/src/mips/register-allocator-mips-inl.h +0 -3
  310. data/vendor/v8/src/mips/register-allocator-mips.h +3 -2
  311. data/vendor/v8/src/mips/simulator-mips.cc +1009 -221
  312. data/vendor/v8/src/mips/simulator-mips.h +119 -36
  313. data/vendor/v8/src/mips/stub-cache-mips.cc +331 -148
  314. data/vendor/v8/src/mips/{fast-codegen-mips.cc → virtual-frame-mips-inl.h} +11 -30
  315. data/vendor/v8/src/mips/virtual-frame-mips.cc +137 -149
  316. data/vendor/v8/src/mips/virtual-frame-mips.h +294 -312
  317. data/vendor/v8/src/mirror-debugger.js +9 -8
  318. data/vendor/v8/src/mksnapshot.cc +2 -2
  319. data/vendor/v8/src/objects-debug.cc +16 -16
  320. data/vendor/v8/src/objects-inl.h +421 -195
  321. data/vendor/v8/src/objects-printer.cc +7 -7
  322. data/vendor/v8/src/objects-visiting.cc +1 -1
  323. data/vendor/v8/src/objects-visiting.h +33 -12
  324. data/vendor/v8/src/objects.cc +935 -658
  325. data/vendor/v8/src/objects.h +234 -139
  326. data/vendor/v8/src/parser.cc +484 -439
  327. data/vendor/v8/src/parser.h +35 -14
  328. data/vendor/v8/src/platform-cygwin.cc +173 -107
  329. data/vendor/v8/src/platform-freebsd.cc +224 -72
  330. data/vendor/v8/src/platform-linux.cc +234 -95
  331. data/vendor/v8/src/platform-macos.cc +215 -82
  332. data/vendor/v8/src/platform-nullos.cc +9 -3
  333. data/vendor/v8/src/platform-openbsd.cc +22 -7
  334. data/vendor/v8/src/platform-posix.cc +30 -5
  335. data/vendor/v8/src/platform-solaris.cc +120 -38
  336. data/vendor/v8/src/platform-tls-mac.h +62 -0
  337. data/vendor/v8/src/platform-tls-win32.h +62 -0
  338. data/vendor/v8/src/platform-tls.h +50 -0
  339. data/vendor/v8/src/platform-win32.cc +195 -97
  340. data/vendor/v8/src/platform.h +72 -15
  341. data/vendor/v8/src/preparse-data.cc +2 -0
  342. data/vendor/v8/src/preparser-api.cc +8 -2
  343. data/vendor/v8/src/preparser.cc +1 -1
  344. data/vendor/v8/src/prettyprinter.cc +43 -52
  345. data/vendor/v8/src/prettyprinter.h +1 -1
  346. data/vendor/v8/src/profile-generator-inl.h +0 -28
  347. data/vendor/v8/src/profile-generator.cc +942 -685
  348. data/vendor/v8/src/profile-generator.h +210 -176
  349. data/vendor/v8/src/property.cc +6 -0
  350. data/vendor/v8/src/property.h +14 -3
  351. data/vendor/v8/src/regexp-macro-assembler-irregexp.cc +1 -1
  352. data/vendor/v8/src/regexp-macro-assembler.cc +28 -19
  353. data/vendor/v8/src/regexp-macro-assembler.h +11 -6
  354. data/vendor/v8/src/regexp-stack.cc +18 -10
  355. data/vendor/v8/src/regexp-stack.h +45 -21
  356. data/vendor/v8/src/regexp.js +3 -3
  357. data/vendor/v8/src/register-allocator-inl.h +3 -3
  358. data/vendor/v8/src/register-allocator.cc +1 -7
  359. data/vendor/v8/src/register-allocator.h +5 -15
  360. data/vendor/v8/src/rewriter.cc +2 -1
  361. data/vendor/v8/src/runtime-profiler.cc +158 -128
  362. data/vendor/v8/src/runtime-profiler.h +131 -15
  363. data/vendor/v8/src/runtime.cc +2409 -1692
  364. data/vendor/v8/src/runtime.h +93 -17
  365. data/vendor/v8/src/safepoint-table.cc +3 -0
  366. data/vendor/v8/src/safepoint-table.h +9 -3
  367. data/vendor/v8/src/scanner-base.cc +21 -28
  368. data/vendor/v8/src/scanner-base.h +22 -11
  369. data/vendor/v8/src/scanner.cc +3 -5
  370. data/vendor/v8/src/scanner.h +4 -2
  371. data/vendor/v8/src/scopeinfo.cc +11 -16
  372. data/vendor/v8/src/scopeinfo.h +26 -15
  373. data/vendor/v8/src/scopes.cc +67 -37
  374. data/vendor/v8/src/scopes.h +26 -12
  375. data/vendor/v8/src/serialize.cc +193 -154
  376. data/vendor/v8/src/serialize.h +41 -36
  377. data/vendor/v8/src/small-pointer-list.h +163 -0
  378. data/vendor/v8/src/snapshot-common.cc +1 -1
  379. data/vendor/v8/src/snapshot.h +3 -1
  380. data/vendor/v8/src/spaces-inl.h +30 -25
  381. data/vendor/v8/src/spaces.cc +263 -370
  382. data/vendor/v8/src/spaces.h +178 -166
  383. data/vendor/v8/src/string-search.cc +4 -3
  384. data/vendor/v8/src/string-search.h +21 -20
  385. data/vendor/v8/src/string-stream.cc +32 -24
  386. data/vendor/v8/src/string.js +7 -7
  387. data/vendor/v8/src/stub-cache.cc +324 -248
  388. data/vendor/v8/src/stub-cache.h +181 -155
  389. data/vendor/v8/src/token.cc +3 -3
  390. data/vendor/v8/src/token.h +3 -3
  391. data/vendor/v8/src/top.cc +218 -390
  392. data/vendor/v8/src/type-info.cc +98 -32
  393. data/vendor/v8/src/type-info.h +10 -3
  394. data/vendor/v8/src/unicode.cc +1 -1
  395. data/vendor/v8/src/unicode.h +1 -1
  396. data/vendor/v8/src/utils.h +3 -0
  397. data/vendor/v8/src/v8-counters.cc +18 -11
  398. data/vendor/v8/src/v8-counters.h +34 -13
  399. data/vendor/v8/src/v8.cc +66 -121
  400. data/vendor/v8/src/v8.h +7 -4
  401. data/vendor/v8/src/v8globals.h +18 -12
  402. data/vendor/v8/src/{memory.h → v8memory.h} +0 -0
  403. data/vendor/v8/src/v8natives.js +59 -18
  404. data/vendor/v8/src/v8threads.cc +127 -114
  405. data/vendor/v8/src/v8threads.h +42 -35
  406. data/vendor/v8/src/v8utils.h +2 -39
  407. data/vendor/v8/src/variables.h +1 -1
  408. data/vendor/v8/src/version.cc +26 -5
  409. data/vendor/v8/src/version.h +4 -0
  410. data/vendor/v8/src/virtual-frame-heavy-inl.h +2 -4
  411. data/vendor/v8/src/virtual-frame-light-inl.h +5 -4
  412. data/vendor/v8/src/vm-state-inl.h +21 -17
  413. data/vendor/v8/src/vm-state.h +7 -5
  414. data/vendor/v8/src/win32-headers.h +1 -0
  415. data/vendor/v8/src/x64/assembler-x64-inl.h +12 -11
  416. data/vendor/v8/src/x64/assembler-x64.cc +80 -40
  417. data/vendor/v8/src/x64/assembler-x64.h +67 -17
  418. data/vendor/v8/src/x64/builtins-x64.cc +34 -33
  419. data/vendor/v8/src/x64/code-stubs-x64.cc +636 -377
  420. data/vendor/v8/src/x64/code-stubs-x64.h +14 -48
  421. data/vendor/v8/src/x64/codegen-x64-inl.h +1 -1
  422. data/vendor/v8/src/x64/codegen-x64.cc +158 -136
  423. data/vendor/v8/src/x64/codegen-x64.h +4 -1
  424. data/vendor/v8/src/x64/cpu-x64.cc +7 -5
  425. data/vendor/v8/src/x64/debug-x64.cc +8 -6
  426. data/vendor/v8/src/x64/deoptimizer-x64.cc +195 -20
  427. data/vendor/v8/src/x64/disasm-x64.cc +42 -23
  428. data/vendor/v8/src/x64/frames-x64.cc +1 -1
  429. data/vendor/v8/src/x64/frames-x64.h +2 -2
  430. data/vendor/v8/src/x64/full-codegen-x64.cc +780 -218
  431. data/vendor/v8/src/x64/ic-x64.cc +77 -79
  432. data/vendor/v8/src/x64/jump-target-x64.cc +1 -1
  433. data/vendor/v8/src/x64/lithium-codegen-x64.cc +698 -181
  434. data/vendor/v8/src/x64/lithium-codegen-x64.h +31 -6
  435. data/vendor/v8/src/x64/lithium-x64.cc +136 -54
  436. data/vendor/v8/src/x64/lithium-x64.h +142 -51
  437. data/vendor/v8/src/x64/macro-assembler-x64.cc +456 -187
  438. data/vendor/v8/src/x64/macro-assembler-x64.h +166 -34
  439. data/vendor/v8/src/x64/regexp-macro-assembler-x64.cc +44 -28
  440. data/vendor/v8/src/x64/regexp-macro-assembler-x64.h +8 -4
  441. data/vendor/v8/src/x64/register-allocator-x64-inl.h +3 -3
  442. data/vendor/v8/src/x64/register-allocator-x64.cc +12 -8
  443. data/vendor/v8/src/x64/simulator-x64.h +5 -5
  444. data/vendor/v8/src/x64/stub-cache-x64.cc +299 -344
  445. data/vendor/v8/src/x64/virtual-frame-x64.cc +37 -13
  446. data/vendor/v8/src/x64/virtual-frame-x64.h +13 -7
  447. data/vendor/v8/src/zone-inl.h +49 -3
  448. data/vendor/v8/src/zone.cc +42 -41
  449. data/vendor/v8/src/zone.h +37 -34
  450. data/vendor/v8/test/benchmarks/testcfg.py +100 -0
  451. data/vendor/v8/test/cctest/SConscript +5 -4
  452. data/vendor/v8/test/cctest/cctest.h +3 -2
  453. data/vendor/v8/test/cctest/cctest.status +6 -11
  454. data/vendor/v8/test/cctest/test-accessors.cc +3 -3
  455. data/vendor/v8/test/cctest/test-alloc.cc +39 -33
  456. data/vendor/v8/test/cctest/test-api.cc +1092 -205
  457. data/vendor/v8/test/cctest/test-assembler-arm.cc +39 -25
  458. data/vendor/v8/test/cctest/test-assembler-ia32.cc +36 -37
  459. data/vendor/v8/test/cctest/test-assembler-mips.cc +1098 -40
  460. data/vendor/v8/test/cctest/test-assembler-x64.cc +32 -25
  461. data/vendor/v8/test/cctest/test-ast.cc +1 -0
  462. data/vendor/v8/test/cctest/test-circular-queue.cc +8 -5
  463. data/vendor/v8/test/cctest/test-compiler.cc +24 -24
  464. data/vendor/v8/test/cctest/test-cpu-profiler.cc +140 -5
  465. data/vendor/v8/test/cctest/test-dataflow.cc +1 -0
  466. data/vendor/v8/test/cctest/test-debug.cc +136 -77
  467. data/vendor/v8/test/cctest/test-decls.cc +1 -1
  468. data/vendor/v8/test/cctest/test-deoptimization.cc +25 -24
  469. data/vendor/v8/test/cctest/test-disasm-arm.cc +9 -4
  470. data/vendor/v8/test/cctest/test-disasm-ia32.cc +10 -8
  471. data/vendor/v8/test/cctest/test-func-name-inference.cc +10 -4
  472. data/vendor/v8/test/cctest/test-heap-profiler.cc +226 -164
  473. data/vendor/v8/test/cctest/test-heap.cc +240 -217
  474. data/vendor/v8/test/cctest/test-liveedit.cc +1 -0
  475. data/vendor/v8/test/cctest/test-log-stack-tracer.cc +18 -20
  476. data/vendor/v8/test/cctest/test-log.cc +114 -108
  477. data/vendor/v8/test/cctest/test-macro-assembler-x64.cc +247 -177
  478. data/vendor/v8/test/cctest/test-mark-compact.cc +129 -90
  479. data/vendor/v8/test/cctest/test-parsing.cc +15 -14
  480. data/vendor/v8/test/cctest/test-platform-linux.cc +1 -0
  481. data/vendor/v8/test/cctest/test-platform-tls.cc +66 -0
  482. data/vendor/v8/test/cctest/test-platform-win32.cc +1 -0
  483. data/vendor/v8/test/cctest/test-profile-generator.cc +1 -1
  484. data/vendor/v8/test/cctest/test-regexp.cc +53 -41
  485. data/vendor/v8/test/cctest/test-reloc-info.cc +18 -11
  486. data/vendor/v8/test/cctest/test-serialize.cc +44 -43
  487. data/vendor/v8/test/cctest/test-sockets.cc +8 -3
  488. data/vendor/v8/test/cctest/test-spaces.cc +47 -29
  489. data/vendor/v8/test/cctest/test-strings.cc +20 -20
  490. data/vendor/v8/test/cctest/test-thread-termination.cc +8 -3
  491. data/vendor/v8/test/cctest/test-threads.cc +5 -3
  492. data/vendor/v8/test/cctest/test-utils.cc +5 -4
  493. data/vendor/v8/test/cctest/testcfg.py +7 -3
  494. data/vendor/v8/test/es5conform/es5conform.status +2 -77
  495. data/vendor/v8/test/es5conform/testcfg.py +1 -1
  496. data/vendor/v8/test/message/testcfg.py +1 -1
  497. data/vendor/v8/test/mjsunit/accessors-on-global-object.js +3 -3
  498. data/vendor/v8/test/mjsunit/array-concat.js +43 -1
  499. data/vendor/v8/test/mjsunit/array-join.js +25 -0
  500. data/vendor/v8/test/mjsunit/bitops-info.js +7 -1
  501. data/vendor/v8/test/mjsunit/compiler/array-length.js +2 -2
  502. data/vendor/v8/test/mjsunit/compiler/global-accessors.js +47 -0
  503. data/vendor/v8/test/mjsunit/compiler/pic.js +1 -1
  504. data/vendor/v8/test/mjsunit/compiler/regress-loadfield.js +65 -0
  505. data/vendor/v8/test/mjsunit/math-sqrt.js +5 -1
  506. data/vendor/v8/test/mjsunit/mjsunit.js +59 -8
  507. data/vendor/v8/test/mjsunit/mjsunit.status +0 -12
  508. data/vendor/v8/test/mjsunit/mul-exhaustive.js +129 -11
  509. data/vendor/v8/test/mjsunit/negate-zero.js +1 -1
  510. data/vendor/v8/test/mjsunit/object-freeze.js +5 -13
  511. data/vendor/v8/test/mjsunit/object-prevent-extensions.js +9 -50
  512. data/vendor/v8/test/mjsunit/object-seal.js +4 -13
  513. data/vendor/v8/test/mjsunit/override-eval-with-non-function.js +36 -0
  514. data/vendor/v8/test/mjsunit/regress/regress-1145.js +54 -0
  515. data/vendor/v8/test/mjsunit/regress/regress-1172-bis.js +37 -0
  516. data/vendor/v8/test/mjsunit/regress/regress-1181.js +54 -0
  517. data/vendor/v8/test/mjsunit/regress/regress-1207.js +35 -0
  518. data/vendor/v8/test/mjsunit/regress/regress-1209.js +34 -0
  519. data/vendor/v8/test/mjsunit/regress/regress-1210.js +48 -0
  520. data/vendor/v8/test/mjsunit/regress/regress-1213.js +43 -0
  521. data/vendor/v8/test/mjsunit/regress/regress-1218.js +29 -0
  522. data/vendor/v8/test/mjsunit/regress/regress-1229.js +79 -0
  523. data/vendor/v8/test/mjsunit/regress/regress-1233.js +47 -0
  524. data/vendor/v8/test/mjsunit/regress/regress-1236.js +34 -0
  525. data/vendor/v8/test/mjsunit/regress/regress-1237.js +36 -0
  526. data/vendor/v8/test/mjsunit/regress/regress-1240.js +39 -0
  527. data/vendor/v8/test/mjsunit/regress/regress-1257.js +58 -0
  528. data/vendor/v8/test/mjsunit/regress/regress-1278.js +69 -0
  529. data/vendor/v8/test/mjsunit/regress/regress-create-exception.js +1 -0
  530. data/vendor/v8/test/mjsunit/regress/regress-lazy-deopt-reloc.js +52 -0
  531. data/vendor/v8/test/mjsunit/sin-cos.js +15 -10
  532. data/vendor/v8/test/mjsunit/smi-negative-zero.js +2 -2
  533. data/vendor/v8/test/mjsunit/str-to-num.js +1 -1
  534. data/vendor/v8/test/mjsunit/strict-mode.js +435 -0
  535. data/vendor/v8/test/mjsunit/testcfg.py +23 -6
  536. data/vendor/v8/test/mozilla/mozilla.status +0 -2
  537. data/vendor/v8/test/mozilla/testcfg.py +1 -1
  538. data/vendor/v8/test/preparser/empty.js +28 -0
  539. data/vendor/v8/test/preparser/functions-only.js +38 -0
  540. data/vendor/v8/test/preparser/non-alphanum.js +34 -0
  541. data/vendor/v8/test/preparser/symbols-only.js +49 -0
  542. data/vendor/v8/test/preparser/testcfg.py +90 -0
  543. data/vendor/v8/test/sputnik/testcfg.py +1 -1
  544. data/vendor/v8/test/test262/README +16 -0
  545. data/vendor/v8/test/test262/harness-adapt.js +80 -0
  546. data/vendor/v8/test/test262/test262.status +1506 -0
  547. data/vendor/v8/test/test262/testcfg.py +123 -0
  548. data/vendor/v8/tools/freebsd-tick-processor +10 -0
  549. data/vendor/v8/tools/gyp/v8.gyp +8 -33
  550. data/vendor/v8/tools/linux-tick-processor +5 -3
  551. data/vendor/v8/tools/test.py +37 -14
  552. data/vendor/v8/tools/tickprocessor.js +22 -8
  553. data/vendor/v8/tools/visual_studio/v8_base.vcproj +13 -1
  554. data/vendor/v8/tools/visual_studio/v8_base_arm.vcproj +5 -1
  555. data/vendor/v8/tools/visual_studio/v8_base_x64.vcproj +5 -1
  556. data/vendor/v8/tools/visual_studio/x64.vsprops +1 -0
  557. metadata +1495 -1341
  558. data/ext/extconf.rb +0 -22
  559. data/ext/mustang.cpp +0 -58
  560. data/vendor/v8/src/top.h +0 -608
@@ -25,6 +25,8 @@
25
25
  // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
26
  // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
27
 
28
+ #include "v8.h"
29
+
28
30
  #include "arm/lithium-codegen-arm.h"
29
31
  #include "arm/lithium-gap-resolver-arm.h"
30
32
  #include "code-stubs.h"
@@ -34,7 +36,7 @@ namespace v8 {
34
36
  namespace internal {
35
37
 
36
38
 
37
- class SafepointGenerator : public PostCallGenerator {
39
+ class SafepointGenerator : public CallWrapper {
38
40
  public:
39
41
  SafepointGenerator(LCodeGen* codegen,
40
42
  LPointerMap* pointers,
@@ -44,7 +46,24 @@ class SafepointGenerator : public PostCallGenerator {
44
46
  deoptimization_index_(deoptimization_index) { }
45
47
  virtual ~SafepointGenerator() { }
46
48
 
47
- virtual void Generate() {
49
+ virtual void BeforeCall(int call_size) {
50
+ ASSERT(call_size >= 0);
51
+ // Ensure that we have enough space after the previous safepoint position
52
+ // for the generated code there.
53
+ int call_end = codegen_->masm()->pc_offset() + call_size;
54
+ int prev_jump_end =
55
+ codegen_->LastSafepointEnd() + Deoptimizer::patch_size();
56
+ if (call_end < prev_jump_end) {
57
+ int padding_size = prev_jump_end - call_end;
58
+ ASSERT_EQ(0, padding_size % Assembler::kInstrSize);
59
+ while (padding_size > 0) {
60
+ codegen_->masm()->nop();
61
+ padding_size -= Assembler::kInstrSize;
62
+ }
63
+ }
64
+ }
65
+
66
+ virtual void AfterCall() {
48
67
  codegen_->RecordSafepoint(pointers_, deoptimization_index_);
49
68
  }
50
69
 
@@ -75,13 +94,14 @@ void LCodeGen::FinishCode(Handle<Code> code) {
75
94
  code->set_stack_slots(StackSlotCount());
76
95
  code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
77
96
  PopulateDeoptimizationData(code);
97
+ Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
78
98
  }
79
99
 
80
100
 
81
101
  void LCodeGen::Abort(const char* format, ...) {
82
102
  if (FLAG_trace_bailout) {
83
- SmartPointer<char> debug_name = graph()->debug_name()->ToCString();
84
- PrintF("Aborting LCodeGen in @\"%s\": ", *debug_name);
103
+ SmartPointer<char> name(info()->shared_info()->DebugName()->ToCString());
104
+ PrintF("Aborting LCodeGen in @\"%s\": ", *name);
85
105
  va_list arguments;
86
106
  va_start(arguments, format);
87
107
  OS::VPrint(format, arguments);
@@ -472,7 +492,7 @@ void LCodeGen::CallCode(Handle<Code> code,
472
492
  }
473
493
 
474
494
 
475
- void LCodeGen::CallRuntime(Runtime::Function* function,
495
+ void LCodeGen::CallRuntime(const Runtime::Function* function,
476
496
  int num_arguments,
477
497
  LInstruction* instr) {
478
498
  ASSERT(instr != NULL);
@@ -571,13 +591,14 @@ void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
571
591
  if (length == 0) return;
572
592
  ASSERT(FLAG_deopt);
573
593
  Handle<DeoptimizationInputData> data =
574
- Factory::NewDeoptimizationInputData(length, TENURED);
594
+ factory()->NewDeoptimizationInputData(length, TENURED);
575
595
 
576
- data->SetTranslationByteArray(*translations_.CreateByteArray());
596
+ Handle<ByteArray> translations = translations_.CreateByteArray();
597
+ data->SetTranslationByteArray(*translations);
577
598
  data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
578
599
 
579
600
  Handle<FixedArray> literals =
580
- Factory::NewFixedArray(deoptimization_literals_.length(), TENURED);
601
+ factory()->NewFixedArray(deoptimization_literals_.length(), TENURED);
581
602
  for (int i = 0; i < deoptimization_literals_.length(); i++) {
582
603
  literals->set(i, *deoptimization_literals_[i]);
583
604
  }
@@ -739,15 +760,6 @@ void LCodeGen::DoCallStub(LCallStub* instr) {
739
760
  CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
740
761
  break;
741
762
  }
742
- case CodeStub::StringCharAt: {
743
- StringCharAtStub stub;
744
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
745
- break;
746
- }
747
- case CodeStub::MathPow: {
748
- Abort("MathPowStub unimplemented.");
749
- break;
750
- }
751
763
  case CodeStub::NumberToString: {
752
764
  NumberToStringStub stub;
753
765
  CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
@@ -765,7 +777,8 @@ void LCodeGen::DoCallStub(LCallStub* instr) {
765
777
  }
766
778
  case CodeStub::TranscendentalCache: {
767
779
  __ ldr(r0, MemOperand(sp, 0));
768
- TranscendentalCacheStub stub(instr->transcendental_type());
780
+ TranscendentalCacheStub stub(instr->transcendental_type(),
781
+ TranscendentalCacheStub::TAGGED);
769
782
  CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
770
783
  break;
771
784
  }
@@ -781,55 +794,91 @@ void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
781
794
 
782
795
 
783
796
  void LCodeGen::DoModI(LModI* instr) {
784
- class DeferredModI: public LDeferredCode {
785
- public:
786
- DeferredModI(LCodeGen* codegen, LModI* instr)
787
- : LDeferredCode(codegen), instr_(instr) { }
788
- virtual void Generate() {
789
- codegen()->DoDeferredBinaryOpStub(instr_, Token::MOD);
797
+ if (instr->hydrogen()->HasPowerOf2Divisor()) {
798
+ Register dividend = ToRegister(instr->InputAt(0));
799
+
800
+ int32_t divisor =
801
+ HConstant::cast(instr->hydrogen()->right())->Integer32Value();
802
+
803
+ if (divisor < 0) divisor = -divisor;
804
+
805
+ Label positive_dividend, done;
806
+ __ cmp(dividend, Operand(0));
807
+ __ b(pl, &positive_dividend);
808
+ __ rsb(dividend, dividend, Operand(0));
809
+ __ and_(dividend, dividend, Operand(divisor - 1));
810
+ __ rsb(dividend, dividend, Operand(0), SetCC);
811
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
812
+ __ b(ne, &done);
813
+ DeoptimizeIf(al, instr->environment());
790
814
  }
791
- private:
792
- LModI* instr_;
793
- };
815
+ __ bind(&positive_dividend);
816
+ __ and_(dividend, dividend, Operand(divisor - 1));
817
+ __ bind(&done);
818
+ return;
819
+ }
820
+
794
821
  // These registers hold untagged 32 bit values.
795
822
  Register left = ToRegister(instr->InputAt(0));
796
823
  Register right = ToRegister(instr->InputAt(1));
797
824
  Register result = ToRegister(instr->result());
825
+
798
826
  Register scratch = scratch0();
827
+ Register scratch2 = ToRegister(instr->TempAt(0));
828
+ DwVfpRegister dividend = ToDoubleRegister(instr->TempAt(1));
829
+ DwVfpRegister divisor = ToDoubleRegister(instr->TempAt(2));
830
+ DwVfpRegister quotient = double_scratch0();
831
+
832
+ ASSERT(result.is(left));
833
+
834
+ ASSERT(!dividend.is(divisor));
835
+ ASSERT(!dividend.is(quotient));
836
+ ASSERT(!divisor.is(quotient));
837
+ ASSERT(!scratch.is(left));
838
+ ASSERT(!scratch.is(right));
839
+ ASSERT(!scratch.is(result));
840
+
841
+ Label done, vfp_modulo, both_positive, right_negative;
799
842
 
800
- Label deoptimize, done;
801
843
  // Check for x % 0.
802
844
  if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
803
- __ tst(right, Operand(right));
804
- __ b(eq, &deoptimize);
845
+ __ cmp(right, Operand(0));
846
+ DeoptimizeIf(eq, instr->environment());
805
847
  }
806
848
 
807
- // Check for (0 % -x) that will produce negative zero.
808
- if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
809
- Label ok;
810
- __ tst(left, Operand(left));
811
- __ b(ne, &ok);
812
- __ tst(right, Operand(right));
813
- __ b(pl, &ok);
814
- __ b(al, &deoptimize);
815
- __ bind(&ok);
816
- }
849
+ // (0 % x) must yield 0 (if x is finite, which is the case here).
850
+ __ cmp(left, Operand(0));
851
+ __ b(eq, &done);
852
+ // Preload right in a vfp register.
853
+ __ vmov(divisor.low(), right);
854
+ __ b(lt, &vfp_modulo);
855
+
856
+ __ cmp(left, Operand(right));
857
+ __ b(lt, &done);
858
+
859
+ // Check for (positive) power of two on the right hand side.
860
+ __ JumpIfNotPowerOfTwoOrZeroAndNeg(right,
861
+ scratch,
862
+ &right_negative,
863
+ &both_positive);
864
+ // Perform modulo operation (scratch contains right - 1).
865
+ __ and_(result, scratch, Operand(left));
866
+ __ b(&done);
817
867
 
818
- // Try a few common cases before using the stub.
819
- Label call_stub;
868
+ __ bind(&right_negative);
869
+ // Negate right. The sign of the divisor does not matter.
870
+ __ rsb(right, right, Operand(0));
871
+
872
+ __ bind(&both_positive);
820
873
  const int kUnfolds = 3;
821
- // Skip if either side is negative.
822
- __ cmp(left, Operand(0));
823
- __ cmp(right, Operand(0), NegateCondition(mi));
824
- __ b(mi, &call_stub);
825
874
  // If the right hand side is smaller than the (nonnegative)
826
- // left hand side, it is the result. Else try a few subtractions
827
- // of the left hand side.
875
+ // left hand side, the left hand side is the result.
876
+ // Else try a few subtractions of the left hand side.
828
877
  __ mov(scratch, left);
829
878
  for (int i = 0; i < kUnfolds; i++) {
830
879
  // Check if the left hand side is less or equal than the
831
880
  // the right hand side.
832
- __ cmp(scratch, right);
881
+ __ cmp(scratch, Operand(right));
833
882
  __ mov(result, scratch, LeaveCC, lt);
834
883
  __ b(lt, &done);
835
884
  // If not, reduce the left hand side by the right hand
@@ -837,28 +886,45 @@ void LCodeGen::DoModI(LModI* instr) {
837
886
  if (i < kUnfolds - 1) __ sub(scratch, scratch, right);
838
887
  }
839
888
 
840
- // Check for power of two on the right hand side.
841
- __ JumpIfNotPowerOfTwoOrZero(right, scratch, &call_stub);
842
- // Perform modulo operation (scratch contains right - 1).
843
- __ and_(result, scratch, Operand(left));
844
-
845
- __ bind(&call_stub);
846
- // Call the stub. The numbers in r0 and r1 have
847
- // to be tagged to Smis. If that is not possible, deoptimize.
848
- DeferredModI* deferred = new DeferredModI(this, instr);
849
- __ TrySmiTag(left, &deoptimize, scratch);
850
- __ TrySmiTag(right, &deoptimize, scratch);
851
-
852
- __ b(al, deferred->entry());
853
- __ bind(deferred->exit());
854
-
855
- // If the result in r0 is a Smi, untag it, else deoptimize.
856
- __ JumpIfNotSmi(result, &deoptimize);
857
- __ SmiUntag(result);
889
+ __ bind(&vfp_modulo);
890
+ // Load the arguments in VFP registers.
891
+ // The divisor value is preloaded before. Be careful that 'right' is only live
892
+ // on entry.
893
+ __ vmov(dividend.low(), left);
894
+ // From here on don't use right as it may have been reallocated (for example
895
+ // to scratch2).
896
+ right = no_reg;
897
+
898
+ __ vcvt_f64_s32(dividend, dividend.low());
899
+ __ vcvt_f64_s32(divisor, divisor.low());
900
+
901
+ // We do not care about the sign of the divisor.
902
+ __ vabs(divisor, divisor);
903
+ // Compute the quotient and round it to a 32bit integer.
904
+ __ vdiv(quotient, dividend, divisor);
905
+ __ vcvt_s32_f64(quotient.low(), quotient);
906
+ __ vcvt_f64_s32(quotient, quotient.low());
907
+
908
+ // Compute the remainder in result.
909
+ DwVfpRegister double_scratch = dividend;
910
+ __ vmul(double_scratch, divisor, quotient);
911
+ __ vcvt_s32_f64(double_scratch.low(), double_scratch);
912
+ __ vmov(scratch, double_scratch.low());
913
+
914
+ if (!instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
915
+ __ sub(result, left, scratch);
916
+ } else {
917
+ Label ok;
918
+ // Check for -0.
919
+ __ sub(scratch2, left, scratch, SetCC);
920
+ __ b(ne, &ok);
921
+ __ cmp(left, Operand(0));
922
+ DeoptimizeIf(mi, instr->environment());
923
+ __ bind(&ok);
924
+ // Load the result and we are done.
925
+ __ mov(result, scratch2);
926
+ }
858
927
 
859
- __ b(al, &done);
860
- __ bind(&deoptimize);
861
- DeoptimizeIf(al, instr->environment());
862
928
  __ bind(&done);
863
929
  }
864
930
 
@@ -882,16 +948,16 @@ void LCodeGen::DoDivI(LDivI* instr) {
882
948
 
883
949
  // Check for x / 0.
884
950
  if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
885
- __ tst(right, right);
951
+ __ cmp(right, Operand(0));
886
952
  DeoptimizeIf(eq, instr->environment());
887
953
  }
888
954
 
889
955
  // Check for (0 / -x) that will produce negative zero.
890
956
  if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
891
957
  Label left_not_zero;
892
- __ tst(left, Operand(left));
958
+ __ cmp(left, Operand(0));
893
959
  __ b(ne, &left_not_zero);
894
- __ tst(right, Operand(right));
960
+ __ cmp(right, Operand(0));
895
961
  DeoptimizeIf(mi, instr->environment());
896
962
  __ bind(&left_not_zero);
897
963
  }
@@ -998,7 +1064,7 @@ void LCodeGen::DoMulI(LMulI* instr) {
998
1064
  if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
999
1065
  // Bail out if the result is supposed to be negative zero.
1000
1066
  Label done;
1001
- __ tst(left, Operand(left));
1067
+ __ cmp(left, Operand(0));
1002
1068
  __ b(ne, &done);
1003
1069
  if (instr->InputAt(1)->IsConstantOperand()) {
1004
1070
  if (ToInteger32(LConstantOperand::cast(instr->InputAt(1))) <= 0) {
@@ -1020,16 +1086,25 @@ void LCodeGen::DoBitI(LBitI* instr) {
1020
1086
  ASSERT(left->Equals(instr->result()));
1021
1087
  ASSERT(left->IsRegister());
1022
1088
  Register result = ToRegister(left);
1023
- Register right_reg = EmitLoadRegister(right, ip);
1089
+ Operand right_operand(no_reg);
1090
+
1091
+ if (right->IsStackSlot() || right->IsArgument()) {
1092
+ Register right_reg = EmitLoadRegister(right, ip);
1093
+ right_operand = Operand(right_reg);
1094
+ } else {
1095
+ ASSERT(right->IsRegister() || right->IsConstantOperand());
1096
+ right_operand = ToOperand(right);
1097
+ }
1098
+
1024
1099
  switch (instr->op()) {
1025
1100
  case Token::BIT_AND:
1026
- __ and_(result, ToRegister(left), Operand(right_reg));
1101
+ __ and_(result, ToRegister(left), right_operand);
1027
1102
  break;
1028
1103
  case Token::BIT_OR:
1029
- __ orr(result, ToRegister(left), Operand(right_reg));
1104
+ __ orr(result, ToRegister(left), right_operand);
1030
1105
  break;
1031
1106
  case Token::BIT_XOR:
1032
- __ eor(result, ToRegister(left), Operand(right_reg));
1107
+ __ eor(result, ToRegister(left), right_operand);
1033
1108
  break;
1034
1109
  default:
1035
1110
  UNREACHABLE();
@@ -1098,11 +1173,21 @@ void LCodeGen::DoShiftI(LShiftI* instr) {
1098
1173
 
1099
1174
 
1100
1175
  void LCodeGen::DoSubI(LSubI* instr) {
1101
- Register left = ToRegister(instr->InputAt(0));
1102
- Register right = EmitLoadRegister(instr->InputAt(1), ip);
1103
- ASSERT(instr->InputAt(0)->Equals(instr->result()));
1104
- __ sub(left, left, right, SetCC);
1105
- if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1176
+ LOperand* left = instr->InputAt(0);
1177
+ LOperand* right = instr->InputAt(1);
1178
+ ASSERT(left->Equals(instr->result()));
1179
+ bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1180
+ SBit set_cond = can_overflow ? SetCC : LeaveCC;
1181
+
1182
+ if (right->IsStackSlot() || right->IsArgument()) {
1183
+ Register right_reg = EmitLoadRegister(right, ip);
1184
+ __ sub(ToRegister(left), ToRegister(left), Operand(right_reg), set_cond);
1185
+ } else {
1186
+ ASSERT(right->IsRegister() || right->IsConstantOperand());
1187
+ __ sub(ToRegister(left), ToRegister(left), ToOperand(right), set_cond);
1188
+ }
1189
+
1190
+ if (can_overflow) {
1106
1191
  DeoptimizeIf(vs, instr->environment());
1107
1192
  }
1108
1193
  }
@@ -1135,10 +1220,10 @@ void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
1135
1220
  }
1136
1221
 
1137
1222
 
1138
- void LCodeGen::DoPixelArrayLength(LPixelArrayLength* instr) {
1223
+ void LCodeGen::DoExternalArrayLength(LExternalArrayLength* instr) {
1139
1224
  Register result = ToRegister(instr->result());
1140
1225
  Register array = ToRegister(instr->InputAt(0));
1141
- __ ldr(result, FieldMemOperand(array, PixelArray::kLengthOffset));
1226
+ __ ldr(result, FieldMemOperand(array, ExternalArray::kLengthOffset));
1142
1227
  }
1143
1228
 
1144
1229
 
@@ -1191,11 +1276,18 @@ void LCodeGen::DoAddI(LAddI* instr) {
1191
1276
  LOperand* left = instr->InputAt(0);
1192
1277
  LOperand* right = instr->InputAt(1);
1193
1278
  ASSERT(left->Equals(instr->result()));
1279
+ bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1280
+ SBit set_cond = can_overflow ? SetCC : LeaveCC;
1194
1281
 
1195
- Register right_reg = EmitLoadRegister(right, ip);
1196
- __ add(ToRegister(left), ToRegister(left), Operand(right_reg), SetCC);
1282
+ if (right->IsStackSlot() || right->IsArgument()) {
1283
+ Register right_reg = EmitLoadRegister(right, ip);
1284
+ __ add(ToRegister(left), ToRegister(left), Operand(right_reg), set_cond);
1285
+ } else {
1286
+ ASSERT(right->IsRegister() || right->IsConstantOperand());
1287
+ __ add(ToRegister(left), ToRegister(left), ToOperand(right), set_cond);
1288
+ }
1197
1289
 
1198
- if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1290
+ if (can_overflow) {
1199
1291
  DeoptimizeIf(vs, instr->environment());
1200
1292
  }
1201
1293
  }
@@ -1224,7 +1316,8 @@ void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
1224
1316
  __ PrepareCallCFunction(4, scratch0());
1225
1317
  __ vmov(r0, r1, left);
1226
1318
  __ vmov(r2, r3, right);
1227
- __ CallCFunction(ExternalReference::double_fp_operation(Token::MOD), 4);
1319
+ __ CallCFunction(
1320
+ ExternalReference::double_fp_operation(Token::MOD, isolate()), 4);
1228
1321
  // Move the result in the double result register.
1229
1322
  __ GetCFunctionDoubleResult(ToDoubleRegister(instr->result()));
1230
1323
 
@@ -1709,10 +1802,13 @@ void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
1709
1802
  void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
1710
1803
  Register input = ToRegister(instr->InputAt(0));
1711
1804
  Register result = ToRegister(instr->result());
1712
- Register scratch = scratch0();
1713
1805
 
1714
- __ ldr(scratch, FieldMemOperand(input, String::kHashFieldOffset));
1715
- __ IndexFromHash(scratch, result);
1806
+ if (FLAG_debug_code) {
1807
+ __ AbortIfNotString(input);
1808
+ }
1809
+
1810
+ __ ldr(result, FieldMemOperand(input, String::kHashFieldOffset));
1811
+ __ IndexFromHash(result, result);
1716
1812
  }
1717
1813
 
1718
1814
 
@@ -1860,10 +1956,9 @@ void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
1860
1956
  InstanceofStub stub(InstanceofStub::kArgsInRegisters);
1861
1957
  CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1862
1958
 
1863
- Label true_value, done;
1864
- __ tst(r0, r0);
1865
- __ mov(r0, Operand(Factory::false_value()), LeaveCC, ne);
1866
- __ mov(r0, Operand(Factory::true_value()), LeaveCC, eq);
1959
+ __ cmp(r0, Operand(0));
1960
+ __ mov(r0, Operand(factory()->false_value()), LeaveCC, ne);
1961
+ __ mov(r0, Operand(factory()->true_value()), LeaveCC, eq);
1867
1962
  }
1868
1963
 
1869
1964
 
@@ -1876,7 +1971,7 @@ void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
1876
1971
 
1877
1972
  InstanceofStub stub(InstanceofStub::kArgsInRegisters);
1878
1973
  CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1879
- __ tst(r0, Operand(r0));
1974
+ __ cmp(r0, Operand(0));
1880
1975
  EmitBranch(true_block, false_block, eq);
1881
1976
  }
1882
1977
 
@@ -1922,13 +2017,13 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
1922
2017
  // We use Factory::the_hole_value() on purpose instead of loading from the
1923
2018
  // root array to force relocation to be able to later patch with
1924
2019
  // the cached map.
1925
- __ mov(ip, Operand(Factory::the_hole_value()));
2020
+ __ mov(ip, Operand(factory()->the_hole_value()));
1926
2021
  __ cmp(map, Operand(ip));
1927
2022
  __ b(ne, &cache_miss);
1928
2023
  // We use Factory::the_hole_value() on purpose instead of loading from the
1929
2024
  // root array to force relocation to be able to later patch
1930
2025
  // with true or false.
1931
- __ mov(result, Operand(Factory::the_hole_value()));
2026
+ __ mov(result, Operand(factory()->the_hole_value()));
1932
2027
  __ b(&done);
1933
2028
 
1934
2029
  // The inlined call site cache did not match. Check null and string before
@@ -1985,11 +2080,7 @@ void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
1985
2080
  __ BlockConstPoolFor(kAdditionalDelta);
1986
2081
  __ mov(temp, Operand(delta * kPointerSize));
1987
2082
  __ StoreToSafepointRegisterSlot(temp, temp);
1988
- __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
1989
- ASSERT_EQ(kAdditionalDelta,
1990
- masm_->InstructionsGeneratedSince(&before_push_delta));
1991
- RecordSafepointWithRegisters(
1992
- instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
2083
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1993
2084
  // Put the result value into the result register slot and
1994
2085
  // restore all registers.
1995
2086
  __ StoreToSafepointRegisterSlot(result, result);
@@ -2072,7 +2163,7 @@ void LCodeGen::DoReturn(LReturn* instr) {
2072
2163
  }
2073
2164
 
2074
2165
 
2075
- void LCodeGen::DoLoadGlobal(LLoadGlobal* instr) {
2166
+ void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
2076
2167
  Register result = ToRegister(instr->result());
2077
2168
  __ mov(ip, Operand(Handle<Object>(instr->hydrogen()->cell())));
2078
2169
  __ ldr(result, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
@@ -2084,6 +2175,18 @@ void LCodeGen::DoLoadGlobal(LLoadGlobal* instr) {
2084
2175
  }
2085
2176
 
2086
2177
 
2178
+ void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
2179
+ ASSERT(ToRegister(instr->global_object()).is(r0));
2180
+ ASSERT(ToRegister(instr->result()).is(r0));
2181
+
2182
+ __ mov(r2, Operand(instr->name()));
2183
+ RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET
2184
+ : RelocInfo::CODE_TARGET_CONTEXT;
2185
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2186
+ CallCode(ic, mode, instr);
2187
+ }
2188
+
2189
+
2087
2190
  void LCodeGen::DoStoreGlobal(LStoreGlobal* instr) {
2088
2191
  Register value = ToRegister(instr->InputAt(0));
2089
2192
  Register scratch = scratch0();
@@ -2139,13 +2242,77 @@ void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
2139
2242
  }
2140
2243
 
2141
2244
 
2245
+ void LCodeGen::EmitLoadField(Register result,
2246
+ Register object,
2247
+ Handle<Map> type,
2248
+ Handle<String> name) {
2249
+ LookupResult lookup;
2250
+ type->LookupInDescriptors(NULL, *name, &lookup);
2251
+ ASSERT(lookup.IsProperty() && lookup.type() == FIELD);
2252
+ int index = lookup.GetLocalFieldIndexFromMap(*type);
2253
+ int offset = index * kPointerSize;
2254
+ if (index < 0) {
2255
+ // Negative property indices are in-object properties, indexed
2256
+ // from the end of the fixed part of the object.
2257
+ __ ldr(result, FieldMemOperand(object, offset + type->instance_size()));
2258
+ } else {
2259
+ // Non-negative property indices are in the properties array.
2260
+ __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
2261
+ __ ldr(result, FieldMemOperand(result, offset + FixedArray::kHeaderSize));
2262
+ }
2263
+ }
2264
+
2265
+
2266
+ void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
2267
+ Register object = ToRegister(instr->object());
2268
+ Register result = ToRegister(instr->result());
2269
+ Register scratch = scratch0();
2270
+ int map_count = instr->hydrogen()->types()->length();
2271
+ Handle<String> name = instr->hydrogen()->name();
2272
+ if (map_count == 0) {
2273
+ ASSERT(instr->hydrogen()->need_generic());
2274
+ __ mov(r2, Operand(name));
2275
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2276
+ CallCode(ic, RelocInfo::CODE_TARGET, instr);
2277
+ } else {
2278
+ Label done;
2279
+ __ ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
2280
+ for (int i = 0; i < map_count - 1; ++i) {
2281
+ Handle<Map> map = instr->hydrogen()->types()->at(i);
2282
+ Label next;
2283
+ __ cmp(scratch, Operand(map));
2284
+ __ b(ne, &next);
2285
+ EmitLoadField(result, object, map, name);
2286
+ __ b(&done);
2287
+ __ bind(&next);
2288
+ }
2289
+ Handle<Map> map = instr->hydrogen()->types()->last();
2290
+ __ cmp(scratch, Operand(map));
2291
+ if (instr->hydrogen()->need_generic()) {
2292
+ Label generic;
2293
+ __ b(ne, &generic);
2294
+ EmitLoadField(result, object, map, name);
2295
+ __ b(&done);
2296
+ __ bind(&generic);
2297
+ __ mov(r2, Operand(name));
2298
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2299
+ CallCode(ic, RelocInfo::CODE_TARGET, instr);
2300
+ } else {
2301
+ DeoptimizeIf(ne, instr->environment());
2302
+ EmitLoadField(result, object, map, name);
2303
+ }
2304
+ __ bind(&done);
2305
+ }
2306
+ }
2307
+
2308
+
2142
2309
  void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
2143
2310
  ASSERT(ToRegister(instr->object()).is(r0));
2144
2311
  ASSERT(ToRegister(instr->result()).is(r0));
2145
2312
 
2146
2313
  // Name is always in r2.
2147
2314
  __ mov(r2, Operand(instr->name()));
2148
- Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
2315
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2149
2316
  CallCode(ic, RelocInfo::CODE_TARGET, instr);
2150
2317
  }
2151
2318
 
@@ -2206,7 +2373,7 @@ void LCodeGen::DoLoadElements(LLoadElements* instr) {
2206
2373
  __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
2207
2374
  __ cmp(scratch, ip);
2208
2375
  __ b(eq, &done);
2209
- __ LoadRoot(ip, Heap::kPixelArrayMapRootIndex);
2376
+ __ LoadRoot(ip, Heap::kExternalPixelArrayMapRootIndex);
2210
2377
  __ cmp(scratch, ip);
2211
2378
  __ b(eq, &done);
2212
2379
  __ LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
@@ -2217,11 +2384,12 @@ void LCodeGen::DoLoadElements(LLoadElements* instr) {
2217
2384
  }
2218
2385
 
2219
2386
 
2220
- void LCodeGen::DoLoadPixelArrayExternalPointer(
2221
- LLoadPixelArrayExternalPointer* instr) {
2387
+ void LCodeGen::DoLoadExternalArrayPointer(
2388
+ LLoadExternalArrayPointer* instr) {
2222
2389
  Register to_reg = ToRegister(instr->result());
2223
2390
  Register from_reg = ToRegister(instr->InputAt(0));
2224
- __ ldr(to_reg, FieldMemOperand(from_reg, PixelArray::kExternalPointerOffset));
2391
+ __ ldr(to_reg, FieldMemOperand(from_reg,
2392
+ ExternalArray::kExternalPointerOffset));
2225
2393
  }
2226
2394
 
2227
2395
 
@@ -2261,13 +2429,16 @@ void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
2261
2429
  }
2262
2430
 
2263
2431
 
2264
- void LCodeGen::DoLoadPixelArrayElement(LLoadPixelArrayElement* instr) {
2265
- Register external_elements = ToRegister(instr->external_pointer());
2432
+ void LCodeGen::DoLoadKeyedSpecializedArrayElement(
2433
+ LLoadKeyedSpecializedArrayElement* instr) {
2434
+ ASSERT(instr->array_type() == kExternalPixelArray);
2435
+
2436
+ Register external_pointer = ToRegister(instr->external_pointer());
2266
2437
  Register key = ToRegister(instr->key());
2267
2438
  Register result = ToRegister(instr->result());
2268
2439
 
2269
2440
  // Load the result.
2270
- __ ldrb(result, MemOperand(external_elements, key));
2441
+ __ ldrb(result, MemOperand(external_pointer, key));
2271
2442
  }
2272
2443
 
2273
2444
 
@@ -2275,7 +2446,7 @@ void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
2275
2446
  ASSERT(ToRegister(instr->object()).is(r1));
2276
2447
  ASSERT(ToRegister(instr->key()).is(r0));
2277
2448
 
2278
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
2449
+ Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2279
2450
  CallCode(ic, RelocInfo::CODE_TARGET, instr);
2280
2451
  }
2281
2452
 
@@ -2367,7 +2538,7 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
2367
2538
  // stack.
2368
2539
  Label invoke, loop;
2369
2540
  // length is a small non-negative integer, due to the test above.
2370
- __ tst(length, Operand(length));
2541
+ __ cmp(length, Operand(0));
2371
2542
  __ b(eq, &invoke);
2372
2543
  __ bind(&loop);
2373
2544
  __ ldr(scratch, MemOperand(elements, length, LSL, 2));
@@ -2437,7 +2608,7 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
2437
2608
  LInstruction* instr) {
2438
2609
  // Change context if needed.
2439
2610
  bool change_context =
2440
- (graph()->info()->closure()->context() != function->context()) ||
2611
+ (info()->closure()->context() != function->context()) ||
2441
2612
  scope()->contains_with() ||
2442
2613
  (scope()->num_heap_slots() > 0);
2443
2614
  if (change_context) {
@@ -2586,41 +2757,6 @@ void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
2586
2757
  }
2587
2758
 
2588
2759
 
2589
- // Truncates a double using a specific rounding mode.
2590
- // Clears the z flag (ne condition) if an overflow occurs.
2591
- void LCodeGen::EmitVFPTruncate(VFPRoundingMode rounding_mode,
2592
- SwVfpRegister result,
2593
- DwVfpRegister double_input,
2594
- Register scratch1,
2595
- Register scratch2) {
2596
- Register prev_fpscr = scratch1;
2597
- Register scratch = scratch2;
2598
-
2599
- // Set custom FPCSR:
2600
- // - Set rounding mode.
2601
- // - Clear vfp cumulative exception flags.
2602
- // - Make sure Flush-to-zero mode control bit is unset.
2603
- __ vmrs(prev_fpscr);
2604
- __ bic(scratch, prev_fpscr, Operand(kVFPExceptionMask |
2605
- kVFPRoundingModeMask |
2606
- kVFPFlushToZeroMask));
2607
- __ orr(scratch, scratch, Operand(rounding_mode));
2608
- __ vmsr(scratch);
2609
-
2610
- // Convert the argument to an integer.
2611
- __ vcvt_s32_f64(result,
2612
- double_input,
2613
- kFPSCRRounding);
2614
-
2615
- // Retrieve FPSCR.
2616
- __ vmrs(scratch);
2617
- // Restore FPSCR.
2618
- __ vmsr(prev_fpscr);
2619
- // Check for vfp exceptions.
2620
- __ tst(scratch, Operand(kVFPExceptionMask));
2621
- }
2622
-
2623
-
2624
2760
  void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
2625
2761
  DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
2626
2762
  Register result = ToRegister(instr->result());
@@ -2628,24 +2764,26 @@ void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
2628
2764
  Register scratch1 = scratch0();
2629
2765
  Register scratch2 = ToRegister(instr->TempAt(0));
2630
2766
 
2631
- EmitVFPTruncate(kRoundToMinusInf,
2632
- single_scratch,
2633
- input,
2634
- scratch1,
2635
- scratch2);
2767
+ __ EmitVFPTruncate(kRoundToMinusInf,
2768
+ single_scratch,
2769
+ input,
2770
+ scratch1,
2771
+ scratch2);
2636
2772
  DeoptimizeIf(ne, instr->environment());
2637
2773
 
2638
2774
  // Move the result back to general purpose register r0.
2639
2775
  __ vmov(result, single_scratch);
2640
2776
 
2641
- // Test for -0.
2642
- Label done;
2643
- __ cmp(result, Operand(0));
2644
- __ b(ne, &done);
2645
- __ vmov(scratch1, input.high());
2646
- __ tst(scratch1, Operand(HeapNumber::kSignMask));
2647
- DeoptimizeIf(ne, instr->environment());
2648
- __ bind(&done);
2777
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2778
+ // Test for -0.
2779
+ Label done;
2780
+ __ cmp(result, Operand(0));
2781
+ __ b(ne, &done);
2782
+ __ vmov(scratch1, input.high());
2783
+ __ tst(scratch1, Operand(HeapNumber::kSignMask));
2784
+ DeoptimizeIf(ne, instr->environment());
2785
+ __ bind(&done);
2786
+ }
2649
2787
  }
2650
2788
 
2651
2789
 
@@ -2654,22 +2792,24 @@ void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
2654
2792
  Register result = ToRegister(instr->result());
2655
2793
  Register scratch1 = scratch0();
2656
2794
  Register scratch2 = result;
2657
- EmitVFPTruncate(kRoundToNearest,
2658
- double_scratch0().low(),
2659
- input,
2660
- scratch1,
2661
- scratch2);
2795
+ __ EmitVFPTruncate(kRoundToNearest,
2796
+ double_scratch0().low(),
2797
+ input,
2798
+ scratch1,
2799
+ scratch2);
2662
2800
  DeoptimizeIf(ne, instr->environment());
2663
2801
  __ vmov(result, double_scratch0().low());
2664
2802
 
2665
- // Test for -0.
2666
- Label done;
2667
- __ cmp(result, Operand(0));
2668
- __ b(ne, &done);
2669
- __ vmov(scratch1, input.high());
2670
- __ tst(scratch1, Operand(HeapNumber::kSignMask));
2671
- DeoptimizeIf(ne, instr->environment());
2672
- __ bind(&done);
2803
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2804
+ // Test for -0.
2805
+ Label done;
2806
+ __ cmp(result, Operand(0));
2807
+ __ b(ne, &done);
2808
+ __ vmov(scratch1, input.high());
2809
+ __ tst(scratch1, Operand(HeapNumber::kSignMask));
2810
+ DeoptimizeIf(ne, instr->environment());
2811
+ __ bind(&done);
2812
+ }
2673
2813
  }
2674
2814
 
2675
2815
 
@@ -2680,6 +2820,22 @@ void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
2680
2820
  }
2681
2821
 
2682
2822
 
2823
+ void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
2824
+ DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
2825
+ Register scratch = scratch0();
2826
+ SwVfpRegister single_scratch = double_scratch0().low();
2827
+ DoubleRegister double_scratch = double_scratch0();
2828
+ ASSERT(ToDoubleRegister(instr->result()).is(input));
2829
+
2830
+ // Add +0 to convert -0 to +0.
2831
+ __ mov(scratch, Operand(0));
2832
+ __ vmov(single_scratch, scratch);
2833
+ __ vcvt_f64_s32(double_scratch, single_scratch);
2834
+ __ vadd(input, input, double_scratch);
2835
+ __ vsqrt(input, input);
2836
+ }
2837
+
2838
+
2683
2839
  void LCodeGen::DoPower(LPower* instr) {
2684
2840
  LOperand* left = instr->InputAt(0);
2685
2841
  LOperand* right = instr->InputAt(1);
@@ -2691,14 +2847,16 @@ void LCodeGen::DoPower(LPower* instr) {
2691
2847
  __ PrepareCallCFunction(4, scratch);
2692
2848
  __ vmov(r0, r1, ToDoubleRegister(left));
2693
2849
  __ vmov(r2, r3, ToDoubleRegister(right));
2694
- __ CallCFunction(ExternalReference::power_double_double_function(), 4);
2850
+ __ CallCFunction(
2851
+ ExternalReference::power_double_double_function(isolate()), 4);
2695
2852
  } else if (exponent_type.IsInteger32()) {
2696
2853
  ASSERT(ToRegister(right).is(r0));
2697
2854
  // Prepare arguments and call C function.
2698
2855
  __ PrepareCallCFunction(4, scratch);
2699
2856
  __ mov(r2, ToRegister(right));
2700
2857
  __ vmov(r0, r1, ToDoubleRegister(left));
2701
- __ CallCFunction(ExternalReference::power_double_int_function(), 4);
2858
+ __ CallCFunction(
2859
+ ExternalReference::power_double_int_function(isolate()), 4);
2702
2860
  } else {
2703
2861
  ASSERT(exponent_type.IsTagged());
2704
2862
  ASSERT(instr->hydrogen()->left()->representation().IsDouble());
@@ -2731,13 +2889,38 @@ void LCodeGen::DoPower(LPower* instr) {
2731
2889
  __ PrepareCallCFunction(4, scratch);
2732
2890
  __ vmov(r0, r1, ToDoubleRegister(left));
2733
2891
  __ vmov(r2, r3, result_reg);
2734
- __ CallCFunction(ExternalReference::power_double_double_function(), 4);
2892
+ __ CallCFunction(
2893
+ ExternalReference::power_double_double_function(isolate()), 4);
2735
2894
  }
2736
2895
  // Store the result in the result register.
2737
2896
  __ GetCFunctionDoubleResult(result_reg);
2738
2897
  }
2739
2898
 
2740
2899
 
2900
+ void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
2901
+ ASSERT(ToDoubleRegister(instr->result()).is(d2));
2902
+ TranscendentalCacheStub stub(TranscendentalCache::LOG,
2903
+ TranscendentalCacheStub::UNTAGGED);
2904
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2905
+ }
2906
+
2907
+
2908
+ void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
2909
+ ASSERT(ToDoubleRegister(instr->result()).is(d2));
2910
+ TranscendentalCacheStub stub(TranscendentalCache::COS,
2911
+ TranscendentalCacheStub::UNTAGGED);
2912
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2913
+ }
2914
+
2915
+
2916
+ void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
2917
+ ASSERT(ToDoubleRegister(instr->result()).is(d2));
2918
+ TranscendentalCacheStub stub(TranscendentalCache::SIN,
2919
+ TranscendentalCacheStub::UNTAGGED);
2920
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2921
+ }
2922
+
2923
+
2741
2924
  void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
2742
2925
  switch (instr->op()) {
2743
2926
  case kMathAbs:
@@ -2752,6 +2935,18 @@ void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
2752
2935
  case kMathSqrt:
2753
2936
  DoMathSqrt(instr);
2754
2937
  break;
2938
+ case kMathPowHalf:
2939
+ DoMathPowHalf(instr);
2940
+ break;
2941
+ case kMathCos:
2942
+ DoMathCos(instr);
2943
+ break;
2944
+ case kMathSin:
2945
+ DoMathSin(instr);
2946
+ break;
2947
+ case kMathLog:
2948
+ DoMathLog(instr);
2949
+ break;
2755
2950
  default:
2756
2951
  Abort("Unimplemented type of LUnaryMathOperation.");
2757
2952
  UNREACHABLE();
@@ -2763,7 +2958,8 @@ void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
2763
2958
  ASSERT(ToRegister(instr->result()).is(r0));
2764
2959
 
2765
2960
  int arity = instr->arity();
2766
- Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
2961
+ Handle<Code> ic =
2962
+ isolate()->stub_cache()->ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
2767
2963
  CallCode(ic, RelocInfo::CODE_TARGET, instr);
2768
2964
  __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2769
2965
  }
@@ -2773,7 +2969,8 @@ void LCodeGen::DoCallNamed(LCallNamed* instr) {
2773
2969
  ASSERT(ToRegister(instr->result()).is(r0));
2774
2970
 
2775
2971
  int arity = instr->arity();
2776
- Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
2972
+ Handle<Code> ic = isolate()->stub_cache()->ComputeCallInitialize(
2973
+ arity, NOT_IN_LOOP);
2777
2974
  __ mov(r2, Operand(instr->name()));
2778
2975
  CallCode(ic, RelocInfo::CODE_TARGET, instr);
2779
2976
  // Restore context register.
@@ -2796,7 +2993,8 @@ void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
2796
2993
  ASSERT(ToRegister(instr->result()).is(r0));
2797
2994
 
2798
2995
  int arity = instr->arity();
2799
- Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
2996
+ Handle<Code> ic =
2997
+ isolate()->stub_cache()->ComputeCallInitialize(arity, NOT_IN_LOOP);
2800
2998
  __ mov(r2, Operand(instr->name()));
2801
2999
  CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2802
3000
  __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2814,7 +3012,7 @@ void LCodeGen::DoCallNew(LCallNew* instr) {
2814
3012
  ASSERT(ToRegister(instr->InputAt(0)).is(r1));
2815
3013
  ASSERT(ToRegister(instr->result()).is(r0));
2816
3014
 
2817
- Handle<Code> builtin(Builtins::builtin(Builtins::JSConstructCall));
3015
+ Handle<Code> builtin = isolate()->builtins()->JSConstructCall();
2818
3016
  __ mov(r0, Operand(instr->arity()));
2819
3017
  CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
2820
3018
  }
@@ -2863,9 +3061,9 @@ void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
2863
3061
 
2864
3062
  // Name is always in r2.
2865
3063
  __ mov(r2, Operand(instr->name()));
2866
- Handle<Code> ic(Builtins::builtin(info_->is_strict()
2867
- ? Builtins::StoreIC_Initialize_Strict
2868
- : Builtins::StoreIC_Initialize));
3064
+ Handle<Code> ic = info_->is_strict()
3065
+ ? isolate()->builtins()->StoreIC_Initialize_Strict()
3066
+ : isolate()->builtins()->StoreIC_Initialize();
2869
3067
  CallCode(ic, RelocInfo::CODE_TARGET, instr);
2870
3068
  }
2871
3069
 
@@ -2902,12 +3100,28 @@ void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
2902
3100
  }
2903
3101
 
2904
3102
 
3103
+ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
3104
+ LStoreKeyedSpecializedArrayElement* instr) {
3105
+ ASSERT(instr->array_type() == kExternalPixelArray);
3106
+
3107
+ Register external_pointer = ToRegister(instr->external_pointer());
3108
+ Register key = ToRegister(instr->key());
3109
+ Register value = ToRegister(instr->value());
3110
+
3111
+ // Clamp the value to [0..255].
3112
+ __ Usat(value, 8, Operand(value));
3113
+ __ strb(value, MemOperand(external_pointer, key, LSL, 0));
3114
+ }
3115
+
3116
+
2905
3117
  void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
2906
3118
  ASSERT(ToRegister(instr->object()).is(r2));
2907
3119
  ASSERT(ToRegister(instr->key()).is(r1));
2908
3120
  ASSERT(ToRegister(instr->value()).is(r0));
2909
3121
 
2910
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
3122
+ Handle<Code> ic = info_->is_strict()
3123
+ ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3124
+ : isolate()->builtins()->KeyedStoreIC_Initialize();
2911
3125
  CallCode(ic, RelocInfo::CODE_TARGET, instr);
2912
3126
  }
2913
3127
 
@@ -3053,6 +3267,56 @@ void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
3053
3267
  }
3054
3268
 
3055
3269
 
3270
+ void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
3271
+ class DeferredStringCharFromCode: public LDeferredCode {
3272
+ public:
3273
+ DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr)
3274
+ : LDeferredCode(codegen), instr_(instr) { }
3275
+ virtual void Generate() { codegen()->DoDeferredStringCharFromCode(instr_); }
3276
+ private:
3277
+ LStringCharFromCode* instr_;
3278
+ };
3279
+
3280
+ DeferredStringCharFromCode* deferred =
3281
+ new DeferredStringCharFromCode(this, instr);
3282
+
3283
+ ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
3284
+ Register char_code = ToRegister(instr->char_code());
3285
+ Register result = ToRegister(instr->result());
3286
+ ASSERT(!char_code.is(result));
3287
+
3288
+ __ cmp(char_code, Operand(String::kMaxAsciiCharCode));
3289
+ __ b(hi, deferred->entry());
3290
+ __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex);
3291
+ __ add(result, result, Operand(char_code, LSL, kPointerSizeLog2));
3292
+ __ ldr(result, FieldMemOperand(result, FixedArray::kHeaderSize));
3293
+ __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3294
+ __ cmp(result, ip);
3295
+ __ b(eq, deferred->entry());
3296
+ __ bind(deferred->exit());
3297
+ }
3298
+
3299
+
3300
+ void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
3301
+ Register char_code = ToRegister(instr->char_code());
3302
+ Register result = ToRegister(instr->result());
3303
+
3304
+ // TODO(3095996): Get rid of this. For now, we need to make the
3305
+ // result register contain a valid pointer because it is already
3306
+ // contained in the register pointer map.
3307
+ __ mov(result, Operand(0));
3308
+
3309
+ __ PushSafepointRegisters();
3310
+ __ SmiTag(char_code);
3311
+ __ push(char_code);
3312
+ __ CallRuntimeSaveDoubles(Runtime::kCharFromCode);
3313
+ RecordSafepointWithRegisters(
3314
+ instr->pointer_map(), 1, Safepoint::kNoDeoptimizationIndex);
3315
+ __ StoreToSafepointRegisterSlot(r0, result);
3316
+ __ PopSafepointRegisters();
3317
+ }
3318
+
3319
+
3056
3320
  void LCodeGen::DoStringLength(LStringLength* instr) {
3057
3321
  Register string = ToRegister(instr->InputAt(0));
3058
3322
  Register result = ToRegister(instr->result());
@@ -3265,19 +3529,30 @@ class DeferredTaggedToI: public LDeferredCode {
3265
3529
 
3266
3530
 
3267
3531
  void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
3268
- Label done;
3269
3532
  Register input_reg = ToRegister(instr->InputAt(0));
3270
- Register scratch = scratch0();
3271
- DoubleRegister dbl_scratch = d0;
3272
- SwVfpRegister flt_scratch = s0;
3273
- DoubleRegister dbl_tmp = ToDoubleRegister(instr->TempAt(0));
3533
+ Register scratch1 = scratch0();
3534
+ Register scratch2 = ToRegister(instr->TempAt(0));
3535
+ DwVfpRegister double_scratch = double_scratch0();
3536
+ SwVfpRegister single_scratch = double_scratch.low();
3537
+
3538
+ ASSERT(!scratch1.is(input_reg) && !scratch1.is(scratch2));
3539
+ ASSERT(!scratch2.is(input_reg) && !scratch2.is(scratch1));
3540
+
3541
+ Label done;
3274
3542
 
3275
3543
  // Heap number map check.
3276
- __ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
3544
+ __ ldr(scratch1, FieldMemOperand(input_reg, HeapObject::kMapOffset));
3277
3545
  __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
3278
- __ cmp(scratch, Operand(ip));
3546
+ __ cmp(scratch1, Operand(ip));
3279
3547
 
3280
3548
  if (instr->truncating()) {
3549
+ Register scratch3 = ToRegister(instr->TempAt(1));
3550
+ DwVfpRegister double_scratch2 = ToDoubleRegister(instr->TempAt(2));
3551
+ ASSERT(!scratch3.is(input_reg) &&
3552
+ !scratch3.is(scratch1) &&
3553
+ !scratch3.is(scratch2));
3554
+ // Performs a truncating conversion of a floating point number as used by
3555
+ // the JS bitwise operations.
3281
3556
  Label heap_number;
3282
3557
  __ b(eq, &heap_number);
3283
3558
  // Check for undefined. Undefined is converted to zero for truncating
@@ -3289,36 +3564,38 @@ void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
3289
3564
  __ b(&done);
3290
3565
 
3291
3566
  __ bind(&heap_number);
3292
- __ sub(ip, input_reg, Operand(kHeapObjectTag));
3293
- __ vldr(dbl_tmp, ip, HeapNumber::kValueOffset);
3294
- __ vcmp(dbl_tmp, 0.0); // Sets overflow bit in FPSCR flags if NaN.
3295
- __ vcvt_s32_f64(flt_scratch, dbl_tmp);
3296
- __ vmov(input_reg, flt_scratch); // 32-bit result of conversion.
3297
- __ vmrs(pc); // Move vector status bits to normal status bits.
3298
- // Overflow bit is set if dbl_tmp is Nan.
3299
- __ cmn(input_reg, Operand(1), vc); // 0x7fffffff + 1 -> overflow.
3300
- __ cmp(input_reg, Operand(1), vc); // 0x80000000 - 1 -> overflow.
3301
- DeoptimizeIf(vs, instr->environment()); // Saturation may have occured.
3567
+ __ sub(scratch1, input_reg, Operand(kHeapObjectTag));
3568
+ __ vldr(double_scratch2, scratch1, HeapNumber::kValueOffset);
3569
+
3570
+ __ EmitECMATruncate(input_reg,
3571
+ double_scratch2,
3572
+ single_scratch,
3573
+ scratch1,
3574
+ scratch2,
3575
+ scratch3);
3302
3576
 
3303
3577
  } else {
3578
+ CpuFeatures::Scope scope(VFP3);
3304
3579
  // Deoptimize if we don't have a heap number.
3305
3580
  DeoptimizeIf(ne, instr->environment());
3306
3581
 
3307
3582
  __ sub(ip, input_reg, Operand(kHeapObjectTag));
3308
- __ vldr(dbl_tmp, ip, HeapNumber::kValueOffset);
3309
- __ vcvt_s32_f64(flt_scratch, dbl_tmp);
3310
- __ vmov(input_reg, flt_scratch); // 32-bit result of conversion.
3311
- // Non-truncating conversion means that we cannot lose bits, so we convert
3312
- // back to check; note that using non-overlapping s and d regs would be
3313
- // slightly faster.
3314
- __ vcvt_f64_s32(dbl_scratch, flt_scratch);
3315
- __ VFPCompareAndSetFlags(dbl_scratch, dbl_tmp);
3316
- DeoptimizeIf(ne, instr->environment()); // Not equal or unordered.
3583
+ __ vldr(double_scratch, ip, HeapNumber::kValueOffset);
3584
+ __ EmitVFPTruncate(kRoundToZero,
3585
+ single_scratch,
3586
+ double_scratch,
3587
+ scratch1,
3588
+ scratch2,
3589
+ kCheckForInexactConversion);
3590
+ DeoptimizeIf(ne, instr->environment());
3591
+ // Load the result.
3592
+ __ vmov(input_reg, single_scratch);
3593
+
3317
3594
  if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3318
- __ tst(input_reg, Operand(input_reg));
3595
+ __ cmp(input_reg, Operand(0));
3319
3596
  __ b(ne, &done);
3320
- __ vmov(lr, ip, dbl_tmp);
3321
- __ tst(ip, Operand(1 << 31)); // Test sign bit.
3597
+ __ vmov(scratch1, double_scratch.high());
3598
+ __ tst(scratch1, Operand(HeapNumber::kSignMask));
3322
3599
  DeoptimizeIf(ne, instr->environment());
3323
3600
  }
3324
3601
  }
@@ -3360,50 +3637,52 @@ void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
3360
3637
 
3361
3638
 
3362
3639
  void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
3363
- LOperand* input = instr->InputAt(0);
3364
- ASSERT(input->IsDoubleRegister());
3365
- LOperand* result = instr->result();
3366
- ASSERT(result->IsRegister());
3367
-
3368
- DoubleRegister double_input = ToDoubleRegister(input);
3369
- Register result_reg = ToRegister(result);
3370
- SwVfpRegister single_scratch = double_scratch0().low();
3640
+ Register result_reg = ToRegister(instr->result());
3371
3641
  Register scratch1 = scratch0();
3372
3642
  Register scratch2 = ToRegister(instr->TempAt(0));
3643
+ DwVfpRegister double_input = ToDoubleRegister(instr->InputAt(0));
3644
+ DwVfpRegister double_scratch = double_scratch0();
3645
+ SwVfpRegister single_scratch = double_scratch0().low();
3373
3646
 
3374
- VFPRoundingMode rounding_mode = instr->truncating() ? kRoundToMinusInf
3375
- : kRoundToNearest;
3376
-
3377
- EmitVFPTruncate(rounding_mode,
3378
- single_scratch,
3379
- double_input,
3380
- scratch1,
3381
- scratch2);
3382
- // Deoptimize if we had a vfp invalid exception.
3383
- DeoptimizeIf(ne, instr->environment());
3384
- // Retrieve the result.
3385
- __ vmov(result_reg, single_scratch);
3647
+ Label done;
3386
3648
 
3387
- if (instr->truncating() &&
3388
- instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3389
- Label done;
3390
- __ cmp(result_reg, Operand(0));
3391
- __ b(ne, &done);
3392
- // Check for -0.
3393
- __ vmov(scratch1, double_input.high());
3394
- __ tst(scratch1, Operand(HeapNumber::kSignMask));
3649
+ if (instr->truncating()) {
3650
+ Register scratch3 = ToRegister(instr->TempAt(1));
3651
+ __ EmitECMATruncate(result_reg,
3652
+ double_input,
3653
+ single_scratch,
3654
+ scratch1,
3655
+ scratch2,
3656
+ scratch3);
3657
+ } else {
3658
+ VFPRoundingMode rounding_mode = kRoundToMinusInf;
3659
+ __ EmitVFPTruncate(rounding_mode,
3660
+ single_scratch,
3661
+ double_input,
3662
+ scratch1,
3663
+ scratch2,
3664
+ kCheckForInexactConversion);
3665
+ // Deoptimize if we had a vfp invalid exception,
3666
+ // including inexact operation.
3395
3667
  DeoptimizeIf(ne, instr->environment());
3396
-
3397
- __ bind(&done);
3668
+ // Retrieve the result.
3669
+ __ vmov(result_reg, single_scratch);
3398
3670
  }
3671
+ __ bind(&done);
3399
3672
  }
3400
3673
 
3401
3674
 
3402
3675
  void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
3403
3676
  LOperand* input = instr->InputAt(0);
3404
- ASSERT(input->IsRegister());
3405
3677
  __ tst(ToRegister(input), Operand(kSmiTagMask));
3406
- DeoptimizeIf(instr->condition(), instr->environment());
3678
+ DeoptimizeIf(ne, instr->environment());
3679
+ }
3680
+
3681
+
3682
+ void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
3683
+ LOperand* input = instr->InputAt(0);
3684
+ __ tst(ToRegister(input), Operand(kSmiTagMask));
3685
+ DeoptimizeIf(eq, instr->environment());
3407
3686
  }
3408
3687
 
3409
3688
 
@@ -3452,9 +3731,9 @@ void LCodeGen::DoCheckMap(LCheckMap* instr) {
3452
3731
 
3453
3732
  void LCodeGen::LoadHeapObject(Register result,
3454
3733
  Handle<HeapObject> object) {
3455
- if (Heap::InNewSpace(*object)) {
3734
+ if (heap()->InNewSpace(*object)) {
3456
3735
  Handle<JSGlobalPropertyCell> cell =
3457
- Factory::NewJSGlobalPropertyCell(object);
3736
+ factory()->NewJSGlobalPropertyCell(object);
3458
3737
  __ mov(result, Operand(cell));
3459
3738
  __ ldr(result, FieldMemOperand(result, JSGlobalPropertyCell::kValueOffset));
3460
3739
  } else {
@@ -3536,6 +3815,13 @@ void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
3536
3815
  }
3537
3816
 
3538
3817
 
3818
+ void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
3819
+ ASSERT(ToRegister(instr->InputAt(0)).is(r0));
3820
+ __ push(r0);
3821
+ CallRuntime(Runtime::kToFastProperties, 1, instr);
3822
+ }
3823
+
3824
+
3539
3825
  void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
3540
3826
  Label materialized;
3541
3827
  // Registers will be used as follows:
@@ -3596,16 +3882,17 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
3596
3882
  // space for nested functions that don't need literals cloning.
3597
3883
  Handle<SharedFunctionInfo> shared_info = instr->shared_info();
3598
3884
  bool pretenure = instr->hydrogen()->pretenure();
3599
- if (shared_info->num_literals() == 0 && !pretenure) {
3600
- FastNewClosureStub stub;
3885
+ if (!pretenure && shared_info->num_literals() == 0) {
3886
+ FastNewClosureStub stub(
3887
+ shared_info->strict_mode() ? kStrictMode : kNonStrictMode);
3601
3888
  __ mov(r1, Operand(shared_info));
3602
3889
  __ push(r1);
3603
3890
  CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3604
3891
  } else {
3605
3892
  __ mov(r2, Operand(shared_info));
3606
3893
  __ mov(r1, Operand(pretenure
3607
- ? Factory::true_value()
3608
- : Factory::false_value()));
3894
+ ? factory()->true_value()
3895
+ : factory()->false_value()));
3609
3896
  __ Push(cp, r2, r1);
3610
3897
  CallRuntime(Runtime::kNewClosure, 3, instr);
3611
3898
  }
@@ -3664,71 +3951,54 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
3664
3951
  Handle<String> type_name) {
3665
3952
  Condition final_branch_condition = kNoCondition;
3666
3953
  Register scratch = scratch0();
3667
- if (type_name->Equals(Heap::number_symbol())) {
3668
- __ tst(input, Operand(kSmiTagMask));
3669
- __ b(eq, true_label);
3954
+ if (type_name->Equals(heap()->number_symbol())) {
3955
+ __ JumpIfSmi(input, true_label);
3670
3956
  __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
3671
3957
  __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
3672
3958
  __ cmp(input, Operand(ip));
3673
3959
  final_branch_condition = eq;
3674
3960
 
3675
- } else if (type_name->Equals(Heap::string_symbol())) {
3676
- __ tst(input, Operand(kSmiTagMask));
3677
- __ b(eq, false_label);
3678
- __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
3961
+ } else if (type_name->Equals(heap()->string_symbol())) {
3962
+ __ JumpIfSmi(input, false_label);
3963
+ __ CompareObjectType(input, input, scratch, FIRST_NONSTRING_TYPE);
3964
+ __ b(ge, false_label);
3679
3965
  __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
3680
3966
  __ tst(ip, Operand(1 << Map::kIsUndetectable));
3681
- __ b(ne, false_label);
3682
- __ CompareInstanceType(input, scratch, FIRST_NONSTRING_TYPE);
3683
- final_branch_condition = lo;
3967
+ final_branch_condition = eq;
3684
3968
 
3685
- } else if (type_name->Equals(Heap::boolean_symbol())) {
3686
- __ LoadRoot(ip, Heap::kTrueValueRootIndex);
3687
- __ cmp(input, ip);
3969
+ } else if (type_name->Equals(heap()->boolean_symbol())) {
3970
+ __ CompareRoot(input, Heap::kTrueValueRootIndex);
3688
3971
  __ b(eq, true_label);
3689
- __ LoadRoot(ip, Heap::kFalseValueRootIndex);
3690
- __ cmp(input, ip);
3972
+ __ CompareRoot(input, Heap::kFalseValueRootIndex);
3691
3973
  final_branch_condition = eq;
3692
3974
 
3693
- } else if (type_name->Equals(Heap::undefined_symbol())) {
3694
- __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3695
- __ cmp(input, ip);
3975
+ } else if (type_name->Equals(heap()->undefined_symbol())) {
3976
+ __ CompareRoot(input, Heap::kUndefinedValueRootIndex);
3696
3977
  __ b(eq, true_label);
3697
- __ tst(input, Operand(kSmiTagMask));
3698
- __ b(eq, false_label);
3978
+ __ JumpIfSmi(input, false_label);
3699
3979
  // Check for undetectable objects => true.
3700
3980
  __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
3701
3981
  __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
3702
3982
  __ tst(ip, Operand(1 << Map::kIsUndetectable));
3703
3983
  final_branch_condition = ne;
3704
3984
 
3705
- } else if (type_name->Equals(Heap::function_symbol())) {
3706
- __ tst(input, Operand(kSmiTagMask));
3707
- __ b(eq, false_label);
3708
- __ CompareObjectType(input, input, scratch, JS_FUNCTION_TYPE);
3709
- __ b(eq, true_label);
3710
- // Regular expressions => 'function' (they are callable).
3711
- __ CompareInstanceType(input, scratch, JS_REGEXP_TYPE);
3712
- final_branch_condition = eq;
3985
+ } else if (type_name->Equals(heap()->function_symbol())) {
3986
+ __ JumpIfSmi(input, false_label);
3987
+ __ CompareObjectType(input, input, scratch, FIRST_FUNCTION_CLASS_TYPE);
3988
+ final_branch_condition = ge;
3713
3989
 
3714
- } else if (type_name->Equals(Heap::object_symbol())) {
3715
- __ tst(input, Operand(kSmiTagMask));
3716
- __ b(eq, false_label);
3717
- __ LoadRoot(ip, Heap::kNullValueRootIndex);
3718
- __ cmp(input, ip);
3990
+ } else if (type_name->Equals(heap()->object_symbol())) {
3991
+ __ JumpIfSmi(input, false_label);
3992
+ __ CompareRoot(input, Heap::kNullValueRootIndex);
3719
3993
  __ b(eq, true_label);
3720
- // Regular expressions => 'function', not 'object'.
3721
- __ CompareObjectType(input, input, scratch, JS_REGEXP_TYPE);
3722
- __ b(eq, false_label);
3994
+ __ CompareObjectType(input, input, scratch, FIRST_JS_OBJECT_TYPE);
3995
+ __ b(lo, false_label);
3996
+ __ CompareInstanceType(input, scratch, FIRST_FUNCTION_CLASS_TYPE);
3997
+ __ b(hs, false_label);
3723
3998
  // Check for undetectable objects => false.
3724
3999
  __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
3725
4000
  __ tst(ip, Operand(1 << Map::kIsUndetectable));
3726
- __ b(ne, false_label);
3727
- // Check for JS objects => true.
3728
- __ CompareInstanceType(input, scratch, FIRST_JS_OBJECT_TYPE);
3729
- __ b(lo, false_label);
3730
- __ CompareInstanceType(input, scratch, LAST_JS_OBJECT_TYPE);
3731
- final_branch_condition = ls;
4001
+ final_branch_condition = eq;
3732
4002
 
3733
4003
  } else {
3734
4004
  final_branch_condition = ne;