libv8 3.11.8.17 → 3.16.14.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (754) hide show
  1. checksums.yaml +4 -4
  2. data/.travis.yml +1 -2
  3. data/Gemfile +1 -1
  4. data/Rakefile +6 -7
  5. data/lib/libv8/version.rb +1 -1
  6. data/vendor/v8/.gitignore +24 -3
  7. data/vendor/v8/AUTHORS +7 -0
  8. data/vendor/v8/ChangeLog +839 -0
  9. data/vendor/v8/DEPS +1 -1
  10. data/vendor/v8/Makefile.android +92 -0
  11. data/vendor/v8/OWNERS +11 -0
  12. data/vendor/v8/PRESUBMIT.py +71 -0
  13. data/vendor/v8/SConstruct +34 -39
  14. data/vendor/v8/build/android.gypi +56 -37
  15. data/vendor/v8/build/common.gypi +112 -30
  16. data/vendor/v8/build/gyp_v8 +1 -1
  17. data/vendor/v8/build/standalone.gypi +15 -11
  18. data/vendor/v8/include/v8-debug.h +9 -1
  19. data/vendor/v8/include/v8-preparser.h +4 -3
  20. data/vendor/v8/include/v8-profiler.h +25 -25
  21. data/vendor/v8/include/v8-testing.h +4 -3
  22. data/vendor/v8/include/v8.h +994 -540
  23. data/vendor/v8/preparser/preparser-process.cc +3 -3
  24. data/vendor/v8/samples/lineprocessor.cc +20 -27
  25. data/vendor/v8/samples/process.cc +18 -14
  26. data/vendor/v8/samples/shell.cc +16 -15
  27. data/vendor/v8/src/SConscript +15 -14
  28. data/vendor/v8/src/accessors.cc +169 -77
  29. data/vendor/v8/src/accessors.h +4 -0
  30. data/vendor/v8/src/allocation-inl.h +2 -2
  31. data/vendor/v8/src/allocation.h +7 -7
  32. data/vendor/v8/src/api.cc +810 -497
  33. data/vendor/v8/src/api.h +85 -60
  34. data/vendor/v8/src/arm/assembler-arm-inl.h +179 -22
  35. data/vendor/v8/src/arm/assembler-arm.cc +633 -264
  36. data/vendor/v8/src/arm/assembler-arm.h +264 -197
  37. data/vendor/v8/src/arm/builtins-arm.cc +117 -27
  38. data/vendor/v8/src/arm/code-stubs-arm.cc +1241 -700
  39. data/vendor/v8/src/arm/code-stubs-arm.h +35 -138
  40. data/vendor/v8/src/arm/codegen-arm.cc +285 -16
  41. data/vendor/v8/src/arm/codegen-arm.h +22 -0
  42. data/vendor/v8/src/arm/constants-arm.cc +5 -3
  43. data/vendor/v8/src/arm/constants-arm.h +24 -11
  44. data/vendor/v8/src/arm/debug-arm.cc +3 -3
  45. data/vendor/v8/src/arm/deoptimizer-arm.cc +382 -92
  46. data/vendor/v8/src/arm/disasm-arm.cc +61 -12
  47. data/vendor/v8/src/arm/frames-arm.h +0 -14
  48. data/vendor/v8/src/arm/full-codegen-arm.cc +332 -304
  49. data/vendor/v8/src/arm/ic-arm.cc +180 -259
  50. data/vendor/v8/src/arm/lithium-arm.cc +364 -316
  51. data/vendor/v8/src/arm/lithium-arm.h +512 -275
  52. data/vendor/v8/src/arm/lithium-codegen-arm.cc +1768 -809
  53. data/vendor/v8/src/arm/lithium-codegen-arm.h +97 -35
  54. data/vendor/v8/src/arm/lithium-gap-resolver-arm.cc +12 -5
  55. data/vendor/v8/src/arm/macro-assembler-arm.cc +439 -228
  56. data/vendor/v8/src/arm/macro-assembler-arm.h +116 -70
  57. data/vendor/v8/src/arm/regexp-macro-assembler-arm.cc +54 -44
  58. data/vendor/v8/src/arm/regexp-macro-assembler-arm.h +3 -10
  59. data/vendor/v8/src/arm/simulator-arm.cc +272 -238
  60. data/vendor/v8/src/arm/simulator-arm.h +38 -8
  61. data/vendor/v8/src/arm/stub-cache-arm.cc +522 -895
  62. data/vendor/v8/src/array.js +101 -70
  63. data/vendor/v8/src/assembler.cc +270 -19
  64. data/vendor/v8/src/assembler.h +110 -15
  65. data/vendor/v8/src/ast.cc +79 -69
  66. data/vendor/v8/src/ast.h +255 -301
  67. data/vendor/v8/src/atomicops.h +7 -1
  68. data/vendor/v8/src/atomicops_internals_tsan.h +335 -0
  69. data/vendor/v8/src/bootstrapper.cc +481 -418
  70. data/vendor/v8/src/bootstrapper.h +4 -4
  71. data/vendor/v8/src/builtins.cc +498 -311
  72. data/vendor/v8/src/builtins.h +75 -47
  73. data/vendor/v8/src/checks.cc +2 -1
  74. data/vendor/v8/src/checks.h +8 -0
  75. data/vendor/v8/src/code-stubs-hydrogen.cc +253 -0
  76. data/vendor/v8/src/code-stubs.cc +249 -84
  77. data/vendor/v8/src/code-stubs.h +501 -169
  78. data/vendor/v8/src/codegen.cc +36 -18
  79. data/vendor/v8/src/codegen.h +25 -3
  80. data/vendor/v8/src/collection.js +54 -17
  81. data/vendor/v8/src/compilation-cache.cc +24 -16
  82. data/vendor/v8/src/compilation-cache.h +15 -6
  83. data/vendor/v8/src/compiler.cc +497 -195
  84. data/vendor/v8/src/compiler.h +246 -38
  85. data/vendor/v8/src/contexts.cc +64 -24
  86. data/vendor/v8/src/contexts.h +60 -29
  87. data/vendor/v8/src/conversions-inl.h +24 -14
  88. data/vendor/v8/src/conversions.h +7 -4
  89. data/vendor/v8/src/counters.cc +21 -12
  90. data/vendor/v8/src/counters.h +44 -16
  91. data/vendor/v8/src/cpu-profiler.h +1 -1
  92. data/vendor/v8/src/d8-debug.cc +2 -2
  93. data/vendor/v8/src/d8-readline.cc +13 -2
  94. data/vendor/v8/src/d8.cc +681 -273
  95. data/vendor/v8/src/d8.gyp +4 -4
  96. data/vendor/v8/src/d8.h +38 -18
  97. data/vendor/v8/src/d8.js +0 -617
  98. data/vendor/v8/src/data-flow.h +55 -0
  99. data/vendor/v8/src/date.js +1 -42
  100. data/vendor/v8/src/dateparser-inl.h +5 -1
  101. data/vendor/v8/src/debug-agent.cc +10 -15
  102. data/vendor/v8/src/debug-debugger.js +147 -149
  103. data/vendor/v8/src/debug.cc +323 -164
  104. data/vendor/v8/src/debug.h +26 -14
  105. data/vendor/v8/src/deoptimizer.cc +765 -290
  106. data/vendor/v8/src/deoptimizer.h +130 -28
  107. data/vendor/v8/src/disassembler.cc +10 -4
  108. data/vendor/v8/src/elements-kind.cc +7 -2
  109. data/vendor/v8/src/elements-kind.h +19 -0
  110. data/vendor/v8/src/elements.cc +607 -285
  111. data/vendor/v8/src/elements.h +36 -13
  112. data/vendor/v8/src/execution.cc +52 -31
  113. data/vendor/v8/src/execution.h +4 -4
  114. data/vendor/v8/src/extensions/externalize-string-extension.cc +5 -4
  115. data/vendor/v8/src/extensions/gc-extension.cc +5 -1
  116. data/vendor/v8/src/extensions/statistics-extension.cc +153 -0
  117. data/vendor/v8/src/{inspector.h → extensions/statistics-extension.h} +12 -23
  118. data/vendor/v8/src/factory.cc +101 -134
  119. data/vendor/v8/src/factory.h +36 -31
  120. data/vendor/v8/src/flag-definitions.h +102 -25
  121. data/vendor/v8/src/flags.cc +9 -5
  122. data/vendor/v8/src/frames-inl.h +10 -0
  123. data/vendor/v8/src/frames.cc +116 -26
  124. data/vendor/v8/src/frames.h +96 -12
  125. data/vendor/v8/src/full-codegen.cc +219 -74
  126. data/vendor/v8/src/full-codegen.h +63 -21
  127. data/vendor/v8/src/func-name-inferrer.cc +8 -7
  128. data/vendor/v8/src/func-name-inferrer.h +5 -3
  129. data/vendor/v8/src/gdb-jit.cc +71 -57
  130. data/vendor/v8/src/global-handles.cc +230 -101
  131. data/vendor/v8/src/global-handles.h +26 -27
  132. data/vendor/v8/src/globals.h +17 -19
  133. data/vendor/v8/src/handles-inl.h +59 -12
  134. data/vendor/v8/src/handles.cc +180 -200
  135. data/vendor/v8/src/handles.h +80 -11
  136. data/vendor/v8/src/hashmap.h +60 -40
  137. data/vendor/v8/src/heap-inl.h +107 -45
  138. data/vendor/v8/src/heap-profiler.cc +38 -19
  139. data/vendor/v8/src/heap-profiler.h +24 -14
  140. data/vendor/v8/src/heap.cc +1123 -738
  141. data/vendor/v8/src/heap.h +385 -146
  142. data/vendor/v8/src/hydrogen-instructions.cc +700 -217
  143. data/vendor/v8/src/hydrogen-instructions.h +1158 -472
  144. data/vendor/v8/src/hydrogen.cc +3319 -1662
  145. data/vendor/v8/src/hydrogen.h +411 -170
  146. data/vendor/v8/src/ia32/assembler-ia32-inl.h +46 -16
  147. data/vendor/v8/src/ia32/assembler-ia32.cc +131 -61
  148. data/vendor/v8/src/ia32/assembler-ia32.h +115 -57
  149. data/vendor/v8/src/ia32/builtins-ia32.cc +99 -5
  150. data/vendor/v8/src/ia32/code-stubs-ia32.cc +787 -495
  151. data/vendor/v8/src/ia32/code-stubs-ia32.h +10 -100
  152. data/vendor/v8/src/ia32/codegen-ia32.cc +227 -23
  153. data/vendor/v8/src/ia32/codegen-ia32.h +14 -0
  154. data/vendor/v8/src/ia32/deoptimizer-ia32.cc +428 -87
  155. data/vendor/v8/src/ia32/disasm-ia32.cc +28 -1
  156. data/vendor/v8/src/ia32/frames-ia32.h +6 -16
  157. data/vendor/v8/src/ia32/full-codegen-ia32.cc +280 -272
  158. data/vendor/v8/src/ia32/ic-ia32.cc +150 -250
  159. data/vendor/v8/src/ia32/lithium-codegen-ia32.cc +1600 -517
  160. data/vendor/v8/src/ia32/lithium-codegen-ia32.h +90 -24
  161. data/vendor/v8/src/ia32/lithium-gap-resolver-ia32.cc +10 -6
  162. data/vendor/v8/src/ia32/lithium-gap-resolver-ia32.h +2 -2
  163. data/vendor/v8/src/ia32/lithium-ia32.cc +405 -302
  164. data/vendor/v8/src/ia32/lithium-ia32.h +526 -271
  165. data/vendor/v8/src/ia32/macro-assembler-ia32.cc +378 -119
  166. data/vendor/v8/src/ia32/macro-assembler-ia32.h +62 -28
  167. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.cc +43 -30
  168. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.h +2 -10
  169. data/vendor/v8/src/ia32/stub-cache-ia32.cc +492 -678
  170. data/vendor/v8/src/ic-inl.h +9 -4
  171. data/vendor/v8/src/ic.cc +836 -923
  172. data/vendor/v8/src/ic.h +228 -247
  173. data/vendor/v8/src/incremental-marking-inl.h +26 -30
  174. data/vendor/v8/src/incremental-marking.cc +276 -248
  175. data/vendor/v8/src/incremental-marking.h +29 -37
  176. data/vendor/v8/src/interface.cc +34 -25
  177. data/vendor/v8/src/interface.h +69 -25
  178. data/vendor/v8/src/interpreter-irregexp.cc +2 -2
  179. data/vendor/v8/src/isolate.cc +382 -76
  180. data/vendor/v8/src/isolate.h +109 -56
  181. data/vendor/v8/src/json-parser.h +217 -104
  182. data/vendor/v8/src/json-stringifier.h +745 -0
  183. data/vendor/v8/src/json.js +10 -132
  184. data/vendor/v8/src/jsregexp-inl.h +106 -0
  185. data/vendor/v8/src/jsregexp.cc +517 -285
  186. data/vendor/v8/src/jsregexp.h +145 -117
  187. data/vendor/v8/src/list-inl.h +35 -22
  188. data/vendor/v8/src/list.h +46 -19
  189. data/vendor/v8/src/lithium-allocator-inl.h +22 -2
  190. data/vendor/v8/src/lithium-allocator.cc +85 -70
  191. data/vendor/v8/src/lithium-allocator.h +21 -39
  192. data/vendor/v8/src/lithium.cc +259 -5
  193. data/vendor/v8/src/lithium.h +131 -32
  194. data/vendor/v8/src/liveedit-debugger.js +52 -3
  195. data/vendor/v8/src/liveedit.cc +393 -113
  196. data/vendor/v8/src/liveedit.h +7 -3
  197. data/vendor/v8/src/log-utils.cc +4 -2
  198. data/vendor/v8/src/log.cc +170 -140
  199. data/vendor/v8/src/log.h +62 -11
  200. data/vendor/v8/src/macro-assembler.h +17 -0
  201. data/vendor/v8/src/macros.py +2 -0
  202. data/vendor/v8/src/mark-compact-inl.h +3 -23
  203. data/vendor/v8/src/mark-compact.cc +801 -830
  204. data/vendor/v8/src/mark-compact.h +154 -47
  205. data/vendor/v8/src/marking-thread.cc +85 -0
  206. data/vendor/v8/src/{inspector.cc → marking-thread.h} +32 -24
  207. data/vendor/v8/src/math.js +12 -18
  208. data/vendor/v8/src/messages.cc +18 -8
  209. data/vendor/v8/src/messages.js +314 -261
  210. data/vendor/v8/src/mips/assembler-mips-inl.h +58 -6
  211. data/vendor/v8/src/mips/assembler-mips.cc +92 -75
  212. data/vendor/v8/src/mips/assembler-mips.h +54 -60
  213. data/vendor/v8/src/mips/builtins-mips.cc +116 -17
  214. data/vendor/v8/src/mips/code-stubs-mips.cc +919 -556
  215. data/vendor/v8/src/mips/code-stubs-mips.h +22 -131
  216. data/vendor/v8/src/mips/codegen-mips.cc +281 -6
  217. data/vendor/v8/src/mips/codegen-mips.h +22 -0
  218. data/vendor/v8/src/mips/constants-mips.cc +2 -0
  219. data/vendor/v8/src/mips/constants-mips.h +12 -2
  220. data/vendor/v8/src/mips/deoptimizer-mips.cc +286 -50
  221. data/vendor/v8/src/mips/disasm-mips.cc +13 -0
  222. data/vendor/v8/src/mips/full-codegen-mips.cc +297 -284
  223. data/vendor/v8/src/mips/ic-mips.cc +182 -263
  224. data/vendor/v8/src/mips/lithium-codegen-mips.cc +1208 -556
  225. data/vendor/v8/src/mips/lithium-codegen-mips.h +72 -19
  226. data/vendor/v8/src/mips/lithium-gap-resolver-mips.cc +9 -2
  227. data/vendor/v8/src/mips/lithium-mips.cc +290 -302
  228. data/vendor/v8/src/mips/lithium-mips.h +463 -266
  229. data/vendor/v8/src/mips/macro-assembler-mips.cc +208 -115
  230. data/vendor/v8/src/mips/macro-assembler-mips.h +67 -24
  231. data/vendor/v8/src/mips/regexp-macro-assembler-mips.cc +40 -25
  232. data/vendor/v8/src/mips/regexp-macro-assembler-mips.h +3 -9
  233. data/vendor/v8/src/mips/simulator-mips.cc +112 -40
  234. data/vendor/v8/src/mips/simulator-mips.h +5 -0
  235. data/vendor/v8/src/mips/stub-cache-mips.cc +502 -884
  236. data/vendor/v8/src/mirror-debugger.js +157 -30
  237. data/vendor/v8/src/mksnapshot.cc +88 -14
  238. data/vendor/v8/src/object-observe.js +235 -0
  239. data/vendor/v8/src/objects-debug.cc +178 -176
  240. data/vendor/v8/src/objects-inl.h +1333 -486
  241. data/vendor/v8/src/objects-printer.cc +125 -43
  242. data/vendor/v8/src/objects-visiting-inl.h +578 -6
  243. data/vendor/v8/src/objects-visiting.cc +2 -2
  244. data/vendor/v8/src/objects-visiting.h +172 -79
  245. data/vendor/v8/src/objects.cc +3533 -2885
  246. data/vendor/v8/src/objects.h +1352 -1131
  247. data/vendor/v8/src/optimizing-compiler-thread.cc +152 -0
  248. data/vendor/v8/src/optimizing-compiler-thread.h +111 -0
  249. data/vendor/v8/src/parser.cc +390 -500
  250. data/vendor/v8/src/parser.h +45 -33
  251. data/vendor/v8/src/platform-cygwin.cc +10 -21
  252. data/vendor/v8/src/platform-freebsd.cc +36 -41
  253. data/vendor/v8/src/platform-linux.cc +160 -124
  254. data/vendor/v8/src/platform-macos.cc +30 -27
  255. data/vendor/v8/src/platform-nullos.cc +17 -1
  256. data/vendor/v8/src/platform-openbsd.cc +19 -50
  257. data/vendor/v8/src/platform-posix.cc +14 -0
  258. data/vendor/v8/src/platform-solaris.cc +20 -53
  259. data/vendor/v8/src/platform-win32.cc +49 -26
  260. data/vendor/v8/src/platform.h +40 -1
  261. data/vendor/v8/src/preparser.cc +8 -5
  262. data/vendor/v8/src/preparser.h +2 -2
  263. data/vendor/v8/src/prettyprinter.cc +16 -0
  264. data/vendor/v8/src/prettyprinter.h +2 -0
  265. data/vendor/v8/src/profile-generator-inl.h +1 -0
  266. data/vendor/v8/src/profile-generator.cc +209 -147
  267. data/vendor/v8/src/profile-generator.h +15 -12
  268. data/vendor/v8/src/property-details.h +46 -31
  269. data/vendor/v8/src/property.cc +27 -46
  270. data/vendor/v8/src/property.h +163 -83
  271. data/vendor/v8/src/proxy.js +7 -2
  272. data/vendor/v8/src/regexp-macro-assembler-irregexp.cc +4 -13
  273. data/vendor/v8/src/regexp-macro-assembler-irregexp.h +1 -2
  274. data/vendor/v8/src/regexp-macro-assembler-tracer.cc +1 -11
  275. data/vendor/v8/src/regexp-macro-assembler-tracer.h +0 -1
  276. data/vendor/v8/src/regexp-macro-assembler.cc +31 -14
  277. data/vendor/v8/src/regexp-macro-assembler.h +14 -11
  278. data/vendor/v8/src/regexp-stack.cc +1 -0
  279. data/vendor/v8/src/regexp.js +9 -8
  280. data/vendor/v8/src/rewriter.cc +18 -7
  281. data/vendor/v8/src/runtime-profiler.cc +52 -43
  282. data/vendor/v8/src/runtime-profiler.h +0 -25
  283. data/vendor/v8/src/runtime.cc +2006 -2023
  284. data/vendor/v8/src/runtime.h +56 -49
  285. data/vendor/v8/src/safepoint-table.cc +12 -18
  286. data/vendor/v8/src/safepoint-table.h +11 -8
  287. data/vendor/v8/src/scanner.cc +1 -0
  288. data/vendor/v8/src/scanner.h +4 -10
  289. data/vendor/v8/src/scopeinfo.cc +35 -9
  290. data/vendor/v8/src/scopeinfo.h +64 -3
  291. data/vendor/v8/src/scopes.cc +251 -156
  292. data/vendor/v8/src/scopes.h +61 -27
  293. data/vendor/v8/src/serialize.cc +348 -396
  294. data/vendor/v8/src/serialize.h +125 -114
  295. data/vendor/v8/src/small-pointer-list.h +11 -11
  296. data/vendor/v8/src/{smart-array-pointer.h → smart-pointers.h} +64 -15
  297. data/vendor/v8/src/snapshot-common.cc +64 -15
  298. data/vendor/v8/src/snapshot-empty.cc +7 -1
  299. data/vendor/v8/src/snapshot.h +9 -2
  300. data/vendor/v8/src/spaces-inl.h +17 -0
  301. data/vendor/v8/src/spaces.cc +477 -183
  302. data/vendor/v8/src/spaces.h +238 -58
  303. data/vendor/v8/src/splay-tree-inl.h +8 -7
  304. data/vendor/v8/src/splay-tree.h +24 -10
  305. data/vendor/v8/src/store-buffer.cc +12 -5
  306. data/vendor/v8/src/store-buffer.h +2 -4
  307. data/vendor/v8/src/string-search.h +22 -6
  308. data/vendor/v8/src/string-stream.cc +11 -8
  309. data/vendor/v8/src/string.js +47 -15
  310. data/vendor/v8/src/stub-cache.cc +461 -224
  311. data/vendor/v8/src/stub-cache.h +164 -102
  312. data/vendor/v8/src/sweeper-thread.cc +105 -0
  313. data/vendor/v8/src/sweeper-thread.h +81 -0
  314. data/vendor/v8/src/token.h +1 -0
  315. data/vendor/v8/src/transitions-inl.h +220 -0
  316. data/vendor/v8/src/transitions.cc +160 -0
  317. data/vendor/v8/src/transitions.h +207 -0
  318. data/vendor/v8/src/type-info.cc +182 -181
  319. data/vendor/v8/src/type-info.h +31 -19
  320. data/vendor/v8/src/unicode-inl.h +62 -106
  321. data/vendor/v8/src/unicode.cc +57 -67
  322. data/vendor/v8/src/unicode.h +45 -91
  323. data/vendor/v8/src/uri.js +57 -29
  324. data/vendor/v8/src/utils.h +105 -5
  325. data/vendor/v8/src/v8-counters.cc +54 -11
  326. data/vendor/v8/src/v8-counters.h +134 -19
  327. data/vendor/v8/src/v8.cc +29 -29
  328. data/vendor/v8/src/v8.h +1 -0
  329. data/vendor/v8/src/v8conversions.cc +26 -22
  330. data/vendor/v8/src/v8globals.h +56 -43
  331. data/vendor/v8/src/v8natives.js +83 -30
  332. data/vendor/v8/src/v8threads.cc +42 -21
  333. data/vendor/v8/src/v8threads.h +4 -1
  334. data/vendor/v8/src/v8utils.cc +9 -93
  335. data/vendor/v8/src/v8utils.h +37 -33
  336. data/vendor/v8/src/variables.cc +6 -3
  337. data/vendor/v8/src/variables.h +6 -13
  338. data/vendor/v8/src/version.cc +2 -2
  339. data/vendor/v8/src/vm-state-inl.h +11 -0
  340. data/vendor/v8/src/x64/assembler-x64-inl.h +39 -8
  341. data/vendor/v8/src/x64/assembler-x64.cc +78 -64
  342. data/vendor/v8/src/x64/assembler-x64.h +38 -33
  343. data/vendor/v8/src/x64/builtins-x64.cc +105 -7
  344. data/vendor/v8/src/x64/code-stubs-x64.cc +790 -413
  345. data/vendor/v8/src/x64/code-stubs-x64.h +10 -106
  346. data/vendor/v8/src/x64/codegen-x64.cc +210 -8
  347. data/vendor/v8/src/x64/codegen-x64.h +20 -1
  348. data/vendor/v8/src/x64/deoptimizer-x64.cc +336 -75
  349. data/vendor/v8/src/x64/disasm-x64.cc +15 -0
  350. data/vendor/v8/src/x64/frames-x64.h +0 -14
  351. data/vendor/v8/src/x64/full-codegen-x64.cc +293 -270
  352. data/vendor/v8/src/x64/ic-x64.cc +153 -251
  353. data/vendor/v8/src/x64/lithium-codegen-x64.cc +1379 -531
  354. data/vendor/v8/src/x64/lithium-codegen-x64.h +67 -23
  355. data/vendor/v8/src/x64/lithium-gap-resolver-x64.cc +2 -2
  356. data/vendor/v8/src/x64/lithium-x64.cc +349 -289
  357. data/vendor/v8/src/x64/lithium-x64.h +460 -250
  358. data/vendor/v8/src/x64/macro-assembler-x64.cc +350 -177
  359. data/vendor/v8/src/x64/macro-assembler-x64.h +67 -49
  360. data/vendor/v8/src/x64/regexp-macro-assembler-x64.cc +46 -33
  361. data/vendor/v8/src/x64/regexp-macro-assembler-x64.h +2 -3
  362. data/vendor/v8/src/x64/stub-cache-x64.cc +484 -653
  363. data/vendor/v8/src/zone-inl.h +9 -27
  364. data/vendor/v8/src/zone.cc +5 -5
  365. data/vendor/v8/src/zone.h +53 -27
  366. data/vendor/v8/test/benchmarks/testcfg.py +5 -0
  367. data/vendor/v8/test/cctest/cctest.cc +4 -0
  368. data/vendor/v8/test/cctest/cctest.gyp +3 -1
  369. data/vendor/v8/test/cctest/cctest.h +57 -9
  370. data/vendor/v8/test/cctest/cctest.status +15 -15
  371. data/vendor/v8/test/cctest/test-accessors.cc +26 -0
  372. data/vendor/v8/test/cctest/test-alloc.cc +22 -30
  373. data/vendor/v8/test/cctest/test-api.cc +1943 -314
  374. data/vendor/v8/test/cctest/test-assembler-arm.cc +133 -13
  375. data/vendor/v8/test/cctest/test-assembler-ia32.cc +1 -1
  376. data/vendor/v8/test/cctest/test-assembler-mips.cc +12 -0
  377. data/vendor/v8/test/cctest/test-ast.cc +4 -2
  378. data/vendor/v8/test/cctest/test-compiler.cc +61 -29
  379. data/vendor/v8/test/cctest/test-dataflow.cc +2 -2
  380. data/vendor/v8/test/cctest/test-debug.cc +212 -33
  381. data/vendor/v8/test/cctest/test-decls.cc +257 -11
  382. data/vendor/v8/test/cctest/test-dictionary.cc +24 -10
  383. data/vendor/v8/test/cctest/test-disasm-arm.cc +118 -1
  384. data/vendor/v8/test/cctest/test-disasm-ia32.cc +3 -2
  385. data/vendor/v8/test/cctest/test-flags.cc +14 -1
  386. data/vendor/v8/test/cctest/test-func-name-inference.cc +7 -4
  387. data/vendor/v8/test/cctest/test-global-object.cc +51 -0
  388. data/vendor/v8/test/cctest/test-hashing.cc +32 -23
  389. data/vendor/v8/test/cctest/test-heap-profiler.cc +131 -77
  390. data/vendor/v8/test/cctest/test-heap.cc +1084 -143
  391. data/vendor/v8/test/cctest/test-list.cc +1 -1
  392. data/vendor/v8/test/cctest/test-liveedit.cc +3 -2
  393. data/vendor/v8/test/cctest/test-lockers.cc +12 -13
  394. data/vendor/v8/test/cctest/test-log.cc +10 -8
  395. data/vendor/v8/test/cctest/test-macro-assembler-x64.cc +2 -2
  396. data/vendor/v8/test/cctest/test-mark-compact.cc +44 -22
  397. data/vendor/v8/test/cctest/test-object-observe.cc +434 -0
  398. data/vendor/v8/test/cctest/test-parsing.cc +86 -39
  399. data/vendor/v8/test/cctest/test-platform-linux.cc +6 -0
  400. data/vendor/v8/test/cctest/test-platform-win32.cc +7 -0
  401. data/vendor/v8/test/cctest/test-random.cc +5 -4
  402. data/vendor/v8/test/cctest/test-regexp.cc +137 -101
  403. data/vendor/v8/test/cctest/test-serialize.cc +150 -230
  404. data/vendor/v8/test/cctest/test-sockets.cc +1 -1
  405. data/vendor/v8/test/cctest/test-spaces.cc +139 -0
  406. data/vendor/v8/test/cctest/test-strings.cc +736 -74
  407. data/vendor/v8/test/cctest/test-thread-termination.cc +10 -11
  408. data/vendor/v8/test/cctest/test-threads.cc +4 -4
  409. data/vendor/v8/test/cctest/test-utils.cc +16 -0
  410. data/vendor/v8/test/cctest/test-weakmaps.cc +7 -3
  411. data/vendor/v8/test/cctest/testcfg.py +64 -5
  412. data/vendor/v8/test/es5conform/testcfg.py +5 -0
  413. data/vendor/v8/test/message/message.status +1 -1
  414. data/vendor/v8/test/message/overwritten-builtins.out +3 -0
  415. data/vendor/v8/test/message/testcfg.py +89 -8
  416. data/vendor/v8/test/message/try-catch-finally-no-message.out +26 -26
  417. data/vendor/v8/test/mjsunit/accessor-map-sharing.js +18 -2
  418. data/vendor/v8/test/mjsunit/allocation-site-info.js +126 -0
  419. data/vendor/v8/test/mjsunit/array-bounds-check-removal.js +62 -1
  420. data/vendor/v8/test/mjsunit/array-iteration.js +1 -1
  421. data/vendor/v8/test/mjsunit/array-literal-transitions.js +2 -0
  422. data/vendor/v8/test/mjsunit/array-natives-elements.js +317 -0
  423. data/vendor/v8/test/mjsunit/array-reduce.js +8 -8
  424. data/vendor/v8/test/mjsunit/array-slice.js +12 -0
  425. data/vendor/v8/test/mjsunit/array-store-and-grow.js +4 -1
  426. data/vendor/v8/test/mjsunit/assert-opt-and-deopt.js +1 -1
  427. data/vendor/v8/test/mjsunit/bugs/bug-2337.js +53 -0
  428. data/vendor/v8/test/mjsunit/compare-known-objects-slow.js +69 -0
  429. data/vendor/v8/test/mjsunit/compiler/alloc-object-huge.js +3 -1
  430. data/vendor/v8/test/mjsunit/compiler/inline-accessors.js +368 -0
  431. data/vendor/v8/test/mjsunit/compiler/inline-arguments.js +87 -1
  432. data/vendor/v8/test/mjsunit/compiler/inline-closures.js +49 -0
  433. data/vendor/v8/test/mjsunit/compiler/inline-construct.js +55 -43
  434. data/vendor/v8/test/mjsunit/compiler/inline-literals.js +39 -0
  435. data/vendor/v8/test/mjsunit/compiler/multiply-add.js +69 -0
  436. data/vendor/v8/test/mjsunit/compiler/optimized-closures.js +57 -0
  437. data/vendor/v8/test/mjsunit/compiler/parallel-proto-change.js +44 -0
  438. data/vendor/v8/test/mjsunit/compiler/property-static.js +69 -0
  439. data/vendor/v8/test/mjsunit/compiler/proto-chain-constant.js +55 -0
  440. data/vendor/v8/test/mjsunit/compiler/proto-chain-load.js +44 -0
  441. data/vendor/v8/test/mjsunit/compiler/regress-gvn.js +3 -2
  442. data/vendor/v8/test/mjsunit/compiler/regress-or.js +6 -2
  443. data/vendor/v8/test/mjsunit/compiler/rotate.js +224 -0
  444. data/vendor/v8/test/mjsunit/compiler/uint32.js +173 -0
  445. data/vendor/v8/test/mjsunit/count-based-osr.js +2 -1
  446. data/vendor/v8/test/mjsunit/d8-os.js +3 -3
  447. data/vendor/v8/test/mjsunit/date-parse.js +3 -0
  448. data/vendor/v8/test/mjsunit/date.js +22 -0
  449. data/vendor/v8/test/mjsunit/debug-break-inline.js +1 -0
  450. data/vendor/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js +22 -12
  451. data/vendor/v8/test/mjsunit/debug-evaluate-locals-optimized.js +21 -10
  452. data/vendor/v8/test/mjsunit/debug-liveedit-compile-error.js +60 -0
  453. data/vendor/v8/test/mjsunit/debug-liveedit-double-call.js +142 -0
  454. data/vendor/v8/test/mjsunit/debug-liveedit-literals.js +94 -0
  455. data/vendor/v8/test/mjsunit/debug-liveedit-restart-frame.js +153 -0
  456. data/vendor/v8/test/mjsunit/debug-multiple-breakpoints.js +1 -1
  457. data/vendor/v8/test/mjsunit/debug-script-breakpoints-closure.js +67 -0
  458. data/vendor/v8/test/mjsunit/debug-script-breakpoints-nested.js +82 -0
  459. data/vendor/v8/test/mjsunit/debug-script.js +4 -2
  460. data/vendor/v8/test/mjsunit/debug-set-variable-value.js +308 -0
  461. data/vendor/v8/test/mjsunit/debug-stepout-scope-part1.js +190 -0
  462. data/vendor/v8/test/mjsunit/debug-stepout-scope-part2.js +83 -0
  463. data/vendor/v8/test/mjsunit/debug-stepout-scope-part3.js +80 -0
  464. data/vendor/v8/test/mjsunit/debug-stepout-scope-part4.js +80 -0
  465. data/vendor/v8/test/mjsunit/debug-stepout-scope-part5.js +77 -0
  466. data/vendor/v8/test/mjsunit/debug-stepout-scope-part6.js +79 -0
  467. data/vendor/v8/test/mjsunit/debug-stepout-scope-part7.js +79 -0
  468. data/vendor/v8/test/mjsunit/{debug-stepout-scope.js → debug-stepout-scope-part8.js} +0 -189
  469. data/vendor/v8/test/mjsunit/delete-non-configurable.js +74 -0
  470. data/vendor/v8/test/mjsunit/deopt-minus-zero.js +56 -0
  471. data/vendor/v8/test/mjsunit/elements-kind.js +6 -4
  472. data/vendor/v8/test/mjsunit/elements-length-no-holey.js +33 -0
  473. data/vendor/v8/test/mjsunit/elements-transition-hoisting.js +46 -19
  474. data/vendor/v8/test/mjsunit/error-accessors.js +54 -0
  475. data/vendor/v8/test/mjsunit/error-constructors.js +1 -14
  476. data/vendor/v8/test/mjsunit/error-tostring.js +8 -0
  477. data/vendor/v8/test/mjsunit/eval-stack-trace.js +204 -0
  478. data/vendor/v8/test/mjsunit/external-array.js +364 -1
  479. data/vendor/v8/test/mjsunit/fast-array-length.js +37 -0
  480. data/vendor/v8/test/mjsunit/fast-non-keyed.js +113 -0
  481. data/vendor/v8/test/mjsunit/fast-prototype.js +117 -0
  482. data/vendor/v8/test/mjsunit/function-call.js +14 -18
  483. data/vendor/v8/test/mjsunit/fuzz-natives-part1.js +230 -0
  484. data/vendor/v8/test/mjsunit/fuzz-natives-part2.js +229 -0
  485. data/vendor/v8/test/mjsunit/fuzz-natives-part3.js +229 -0
  486. data/vendor/v8/test/mjsunit/{fuzz-natives.js → fuzz-natives-part4.js} +12 -2
  487. data/vendor/v8/test/mjsunit/generated-transition-stub.js +218 -0
  488. data/vendor/v8/test/mjsunit/greedy.js +1 -1
  489. data/vendor/v8/test/mjsunit/harmony/block-conflicts.js +2 -1
  490. data/vendor/v8/test/mjsunit/harmony/block-let-crankshaft.js +1 -1
  491. data/vendor/v8/test/mjsunit/harmony/collections.js +69 -11
  492. data/vendor/v8/test/mjsunit/harmony/debug-blockscopes.js +2 -2
  493. data/vendor/v8/test/mjsunit/harmony/module-linking.js +180 -3
  494. data/vendor/v8/test/mjsunit/harmony/module-parsing.js +31 -0
  495. data/vendor/v8/test/mjsunit/harmony/module-recompile.js +87 -0
  496. data/vendor/v8/test/mjsunit/harmony/module-resolution.js +15 -2
  497. data/vendor/v8/test/mjsunit/harmony/object-observe.js +1056 -0
  498. data/vendor/v8/test/mjsunit/harmony/proxies-json.js +178 -0
  499. data/vendor/v8/test/mjsunit/harmony/proxies.js +25 -10
  500. data/vendor/v8/test/mjsunit/json-parser-recursive.js +33 -0
  501. data/vendor/v8/test/mjsunit/json-stringify-recursive.js +52 -0
  502. data/vendor/v8/test/mjsunit/json.js +38 -2
  503. data/vendor/v8/test/mjsunit/json2.js +153 -0
  504. data/vendor/v8/test/mjsunit/limit-locals.js +5 -4
  505. data/vendor/v8/test/mjsunit/manual-parallel-recompile.js +79 -0
  506. data/vendor/v8/test/mjsunit/math-exp-precision.js +64 -0
  507. data/vendor/v8/test/mjsunit/math-floor-negative.js +59 -0
  508. data/vendor/v8/test/mjsunit/math-floor-of-div-minus-zero.js +41 -0
  509. data/vendor/v8/test/mjsunit/math-floor-of-div-nosudiv.js +288 -0
  510. data/vendor/v8/test/mjsunit/math-floor-of-div.js +81 -9
  511. data/vendor/v8/test/mjsunit/{math-floor.js → math-floor-part1.js} +1 -72
  512. data/vendor/v8/test/mjsunit/math-floor-part2.js +76 -0
  513. data/vendor/v8/test/mjsunit/math-floor-part3.js +78 -0
  514. data/vendor/v8/test/mjsunit/math-floor-part4.js +76 -0
  515. data/vendor/v8/test/mjsunit/mirror-object.js +43 -9
  516. data/vendor/v8/test/mjsunit/mjsunit.js +1 -1
  517. data/vendor/v8/test/mjsunit/mjsunit.status +52 -27
  518. data/vendor/v8/test/mjsunit/mul-exhaustive-part1.js +491 -0
  519. data/vendor/v8/test/mjsunit/mul-exhaustive-part10.js +470 -0
  520. data/vendor/v8/test/mjsunit/mul-exhaustive-part2.js +525 -0
  521. data/vendor/v8/test/mjsunit/mul-exhaustive-part3.js +532 -0
  522. data/vendor/v8/test/mjsunit/mul-exhaustive-part4.js +509 -0
  523. data/vendor/v8/test/mjsunit/mul-exhaustive-part5.js +505 -0
  524. data/vendor/v8/test/mjsunit/mul-exhaustive-part6.js +554 -0
  525. data/vendor/v8/test/mjsunit/mul-exhaustive-part7.js +497 -0
  526. data/vendor/v8/test/mjsunit/mul-exhaustive-part8.js +526 -0
  527. data/vendor/v8/test/mjsunit/mul-exhaustive-part9.js +533 -0
  528. data/vendor/v8/test/mjsunit/new-function.js +34 -0
  529. data/vendor/v8/test/mjsunit/numops-fuzz-part1.js +1172 -0
  530. data/vendor/v8/test/mjsunit/numops-fuzz-part2.js +1178 -0
  531. data/vendor/v8/test/mjsunit/numops-fuzz-part3.js +1178 -0
  532. data/vendor/v8/test/mjsunit/numops-fuzz-part4.js +1177 -0
  533. data/vendor/v8/test/mjsunit/object-define-property.js +107 -2
  534. data/vendor/v8/test/mjsunit/override-read-only-property.js +6 -4
  535. data/vendor/v8/test/mjsunit/packed-elements.js +2 -2
  536. data/vendor/v8/test/mjsunit/parse-int-float.js +4 -4
  537. data/vendor/v8/test/mjsunit/pixel-array-rounding.js +1 -1
  538. data/vendor/v8/test/mjsunit/readonly.js +228 -0
  539. data/vendor/v8/test/mjsunit/regexp-capture-3.js +16 -18
  540. data/vendor/v8/test/mjsunit/regexp-capture.js +2 -0
  541. data/vendor/v8/test/mjsunit/regexp-global.js +122 -0
  542. data/vendor/v8/test/mjsunit/regexp-results-cache.js +78 -0
  543. data/vendor/v8/test/mjsunit/regress/regress-1117.js +12 -3
  544. data/vendor/v8/test/mjsunit/regress/regress-1118.js +1 -1
  545. data/vendor/v8/test/mjsunit/regress/regress-115100.js +36 -0
  546. data/vendor/v8/test/mjsunit/regress/regress-1199637.js +1 -3
  547. data/vendor/v8/test/mjsunit/regress/regress-121407.js +1 -1
  548. data/vendor/v8/test/mjsunit/regress/regress-131923.js +30 -0
  549. data/vendor/v8/test/mjsunit/regress/regress-131994.js +70 -0
  550. data/vendor/v8/test/mjsunit/regress/regress-133211.js +35 -0
  551. data/vendor/v8/test/mjsunit/regress/regress-133211b.js +39 -0
  552. data/vendor/v8/test/mjsunit/regress/regress-136048.js +34 -0
  553. data/vendor/v8/test/mjsunit/regress/regress-137768.js +73 -0
  554. data/vendor/v8/test/mjsunit/regress/regress-143967.js +34 -0
  555. data/vendor/v8/test/mjsunit/regress/regress-145201.js +107 -0
  556. data/vendor/v8/test/mjsunit/regress/regress-147497.js +45 -0
  557. data/vendor/v8/test/mjsunit/regress/regress-148378.js +38 -0
  558. data/vendor/v8/test/mjsunit/regress/regress-1563.js +1 -1
  559. data/vendor/v8/test/mjsunit/regress/regress-1591.js +48 -0
  560. data/vendor/v8/test/mjsunit/regress/regress-164442.js +45 -0
  561. data/vendor/v8/test/mjsunit/regress/regress-165637.js +61 -0
  562. data/vendor/v8/test/mjsunit/regress/regress-166379.js +39 -0
  563. data/vendor/v8/test/mjsunit/regress/regress-166553.js +33 -0
  564. data/vendor/v8/test/mjsunit/regress/regress-1692.js +1 -1
  565. data/vendor/v8/test/mjsunit/regress/regress-171641.js +40 -0
  566. data/vendor/v8/test/mjsunit/regress/regress-1980.js +1 -1
  567. data/vendor/v8/test/mjsunit/regress/regress-2073.js +99 -0
  568. data/vendor/v8/test/mjsunit/regress/regress-2119.js +36 -0
  569. data/vendor/v8/test/mjsunit/regress/regress-2156.js +39 -0
  570. data/vendor/v8/test/mjsunit/regress/regress-2163.js +70 -0
  571. data/vendor/v8/test/mjsunit/regress/regress-2170.js +58 -0
  572. data/vendor/v8/test/mjsunit/regress/regress-2172.js +35 -0
  573. data/vendor/v8/test/mjsunit/regress/regress-2185-2.js +145 -0
  574. data/vendor/v8/test/mjsunit/regress/regress-2185.js +38 -0
  575. data/vendor/v8/test/mjsunit/regress/regress-2186.js +49 -0
  576. data/vendor/v8/test/mjsunit/regress/regress-2193.js +58 -0
  577. data/vendor/v8/test/mjsunit/regress/regress-2219.js +32 -0
  578. data/vendor/v8/test/mjsunit/regress/regress-2225.js +65 -0
  579. data/vendor/v8/test/mjsunit/regress/regress-2226.js +36 -0
  580. data/vendor/v8/test/mjsunit/regress/regress-2234.js +41 -0
  581. data/vendor/v8/test/mjsunit/regress/regress-2243.js +31 -0
  582. data/vendor/v8/test/mjsunit/regress/regress-2249.js +33 -0
  583. data/vendor/v8/test/mjsunit/regress/regress-2250.js +68 -0
  584. data/vendor/v8/test/mjsunit/regress/regress-2261.js +113 -0
  585. data/vendor/v8/test/mjsunit/regress/regress-2263.js +30 -0
  586. data/vendor/v8/test/mjsunit/regress/regress-2284.js +32 -0
  587. data/vendor/v8/test/mjsunit/regress/regress-2285.js +32 -0
  588. data/vendor/v8/test/mjsunit/regress/regress-2286.js +32 -0
  589. data/vendor/v8/test/mjsunit/regress/regress-2289.js +34 -0
  590. data/vendor/v8/test/mjsunit/regress/regress-2291.js +36 -0
  591. data/vendor/v8/test/mjsunit/regress/regress-2294.js +70 -0
  592. data/vendor/v8/test/mjsunit/regress/regress-2296.js +40 -0
  593. data/vendor/v8/test/mjsunit/regress/regress-2315.js +40 -0
  594. data/vendor/v8/test/mjsunit/regress/regress-2318.js +66 -0
  595. data/vendor/v8/test/mjsunit/regress/regress-2322.js +36 -0
  596. data/vendor/v8/test/mjsunit/regress/regress-2326.js +54 -0
  597. data/vendor/v8/test/mjsunit/regress/regress-2336.js +53 -0
  598. data/vendor/v8/test/mjsunit/regress/regress-2339.js +59 -0
  599. data/vendor/v8/test/mjsunit/regress/regress-2346.js +123 -0
  600. data/vendor/v8/test/mjsunit/regress/regress-2373.js +29 -0
  601. data/vendor/v8/test/mjsunit/regress/regress-2374.js +33 -0
  602. data/vendor/v8/test/mjsunit/regress/regress-2398.js +41 -0
  603. data/vendor/v8/test/mjsunit/regress/regress-2410.js +36 -0
  604. data/vendor/v8/test/mjsunit/regress/regress-2416.js +75 -0
  605. data/vendor/v8/test/mjsunit/regress/regress-2419.js +37 -0
  606. data/vendor/v8/test/mjsunit/regress/regress-2433.js +36 -0
  607. data/vendor/v8/test/mjsunit/regress/regress-2437.js +156 -0
  608. data/vendor/v8/test/mjsunit/regress/regress-2438.js +52 -0
  609. data/vendor/v8/test/mjsunit/regress/regress-2443.js +129 -0
  610. data/vendor/v8/test/mjsunit/regress/regress-2444.js +120 -0
  611. data/vendor/v8/test/mjsunit/regress/regress-2489.js +50 -0
  612. data/vendor/v8/test/mjsunit/regress/regress-2499.js +40 -0
  613. data/vendor/v8/test/mjsunit/regress/regress-334.js +1 -1
  614. data/vendor/v8/test/mjsunit/regress/regress-492.js +39 -1
  615. data/vendor/v8/test/mjsunit/regress/regress-builtin-array-op.js +38 -0
  616. data/vendor/v8/test/mjsunit/regress/regress-cnlt-elements.js +43 -0
  617. data/vendor/v8/test/mjsunit/regress/regress-cnlt-enum-indices.js +45 -0
  618. data/vendor/v8/test/mjsunit/regress/regress-cntl-descriptors-enum.js +46 -0
  619. data/vendor/v8/test/mjsunit/regress/regress-convert-enum.js +60 -0
  620. data/vendor/v8/test/mjsunit/regress/regress-convert-enum2.js +46 -0
  621. data/vendor/v8/test/mjsunit/regress/regress-convert-transition.js +40 -0
  622. data/vendor/v8/test/mjsunit/regress/regress-crbug-119926.js +3 -1
  623. data/vendor/v8/test/mjsunit/regress/regress-crbug-125148.js +90 -0
  624. data/vendor/v8/test/mjsunit/regress/regress-crbug-134055.js +63 -0
  625. data/vendor/v8/test/mjsunit/regress/regress-crbug-134609.js +59 -0
  626. data/vendor/v8/test/mjsunit/regress/regress-crbug-135008.js +45 -0
  627. data/vendor/v8/test/mjsunit/regress/regress-crbug-135066.js +55 -0
  628. data/vendor/v8/test/mjsunit/regress/regress-crbug-137689.js +47 -0
  629. data/vendor/v8/test/mjsunit/regress/regress-crbug-138887.js +48 -0
  630. data/vendor/v8/test/mjsunit/regress/regress-crbug-140083.js +44 -0
  631. data/vendor/v8/test/mjsunit/regress/regress-crbug-142087.js +38 -0
  632. data/vendor/v8/test/mjsunit/regress/regress-crbug-142218.js +44 -0
  633. data/vendor/v8/test/mjsunit/regress/regress-crbug-145961.js +39 -0
  634. data/vendor/v8/test/mjsunit/regress/regress-crbug-146910.js +33 -0
  635. data/vendor/v8/test/mjsunit/regress/regress-crbug-147475.js +48 -0
  636. data/vendor/v8/test/mjsunit/regress/regress-crbug-148376.js +35 -0
  637. data/vendor/v8/test/mjsunit/regress/regress-crbug-150545.js +53 -0
  638. data/vendor/v8/test/mjsunit/regress/regress-crbug-150729.js +39 -0
  639. data/vendor/v8/test/mjsunit/regress/regress-crbug-157019.js +54 -0
  640. data/vendor/v8/test/mjsunit/regress/regress-crbug-157520.js +38 -0
  641. data/vendor/v8/test/mjsunit/regress/regress-crbug-158185.js +39 -0
  642. data/vendor/v8/test/mjsunit/regress/regress-crbug-160010.js +35 -0
  643. data/vendor/v8/test/mjsunit/regress/regress-crbug-162085.js +71 -0
  644. data/vendor/v8/test/mjsunit/regress/regress-crbug-168545.js +34 -0
  645. data/vendor/v8/test/mjsunit/regress/regress-crbug-170856.js +33 -0
  646. data/vendor/v8/test/mjsunit/regress/regress-crbug-172345.js +34 -0
  647. data/vendor/v8/test/mjsunit/regress/regress-crbug-173974.js +36 -0
  648. data/vendor/v8/test/mjsunit/regress/regress-crbug-18639.js +9 -5
  649. data/vendor/v8/test/mjsunit/regress/regress-debug-code-recompilation.js +2 -1
  650. data/vendor/v8/test/mjsunit/regress/regress-deep-proto.js +45 -0
  651. data/vendor/v8/test/mjsunit/regress/regress-delete-empty-double.js +40 -0
  652. data/vendor/v8/test/mjsunit/regress/regress-iteration-order.js +42 -0
  653. data/vendor/v8/test/mjsunit/regress/regress-json-stringify-gc.js +41 -0
  654. data/vendor/v8/test/mjsunit/regress/regress-latin-1.js +78 -0
  655. data/vendor/v8/test/mjsunit/regress/regress-load-elements.js +49 -0
  656. data/vendor/v8/test/mjsunit/regress/regress-observe-empty-double-array.js +38 -0
  657. data/vendor/v8/test/mjsunit/regress/regress-undefined-store-keyed-fast-element.js +37 -0
  658. data/vendor/v8/test/mjsunit/shift-for-integer-div.js +59 -0
  659. data/vendor/v8/test/mjsunit/stack-traces-gc.js +119 -0
  660. data/vendor/v8/test/mjsunit/stack-traces-overflow.js +122 -0
  661. data/vendor/v8/test/mjsunit/stack-traces.js +39 -1
  662. data/vendor/v8/test/mjsunit/str-to-num.js +7 -2
  663. data/vendor/v8/test/mjsunit/strict-mode.js +36 -11
  664. data/vendor/v8/test/mjsunit/string-charcodeat.js +3 -0
  665. data/vendor/v8/test/mjsunit/string-natives.js +72 -0
  666. data/vendor/v8/test/mjsunit/string-split.js +17 -0
  667. data/vendor/v8/test/mjsunit/testcfg.py +76 -6
  668. data/vendor/v8/test/mjsunit/tools/tickprocessor.js +4 -1
  669. data/vendor/v8/test/mjsunit/try-finally-continue.js +72 -0
  670. data/vendor/v8/test/mjsunit/typed-array-slice.js +61 -0
  671. data/vendor/v8/test/mjsunit/unbox-double-arrays.js +2 -0
  672. data/vendor/v8/test/mjsunit/uri.js +12 -0
  673. data/vendor/v8/test/mjsunit/with-readonly.js +4 -2
  674. data/vendor/v8/test/mozilla/mozilla.status +19 -113
  675. data/vendor/v8/test/mozilla/testcfg.py +122 -3
  676. data/vendor/v8/test/preparser/preparser.status +5 -0
  677. data/vendor/v8/test/preparser/strict-identifiers.pyt +1 -1
  678. data/vendor/v8/test/preparser/testcfg.py +101 -5
  679. data/vendor/v8/test/sputnik/sputnik.status +1 -1
  680. data/vendor/v8/test/sputnik/testcfg.py +5 -0
  681. data/vendor/v8/test/test262/README +2 -2
  682. data/vendor/v8/test/test262/test262.status +13 -36
  683. data/vendor/v8/test/test262/testcfg.py +102 -8
  684. data/vendor/v8/tools/android-build.sh +0 -0
  685. data/vendor/v8/tools/android-ll-prof.sh +69 -0
  686. data/vendor/v8/tools/android-run.py +109 -0
  687. data/vendor/v8/tools/android-sync.sh +105 -0
  688. data/vendor/v8/tools/bash-completion.sh +0 -0
  689. data/vendor/v8/tools/check-static-initializers.sh +0 -0
  690. data/vendor/v8/tools/common-includes.sh +15 -22
  691. data/vendor/v8/tools/disasm.py +4 -4
  692. data/vendor/v8/tools/fuzz-harness.sh +0 -0
  693. data/vendor/v8/tools/gen-postmortem-metadata.py +6 -8
  694. data/vendor/v8/tools/grokdump.py +404 -129
  695. data/vendor/v8/tools/gyp/v8.gyp +105 -43
  696. data/vendor/v8/tools/linux-tick-processor +5 -5
  697. data/vendor/v8/tools/ll_prof.py +75 -15
  698. data/vendor/v8/tools/merge-to-branch.sh +2 -2
  699. data/vendor/v8/tools/plot-timer-events +70 -0
  700. data/vendor/v8/tools/plot-timer-events.js +510 -0
  701. data/vendor/v8/tools/presubmit.py +1 -0
  702. data/vendor/v8/tools/push-to-trunk.sh +14 -4
  703. data/vendor/v8/tools/run-llprof.sh +69 -0
  704. data/vendor/v8/tools/run-tests.py +372 -0
  705. data/vendor/v8/tools/run-valgrind.py +1 -1
  706. data/vendor/v8/tools/status-file-converter.py +39 -0
  707. data/vendor/v8/tools/test-server.py +224 -0
  708. data/vendor/v8/tools/test-wrapper-gypbuild.py +13 -16
  709. data/vendor/v8/tools/test.py +10 -19
  710. data/vendor/v8/tools/testrunner/README +174 -0
  711. data/vendor/v8/tools/testrunner/__init__.py +26 -0
  712. data/vendor/v8/tools/testrunner/local/__init__.py +26 -0
  713. data/vendor/v8/tools/testrunner/local/commands.py +153 -0
  714. data/vendor/v8/tools/testrunner/local/execution.py +182 -0
  715. data/vendor/v8/tools/testrunner/local/old_statusfile.py +460 -0
  716. data/vendor/v8/tools/testrunner/local/progress.py +238 -0
  717. data/vendor/v8/tools/testrunner/local/statusfile.py +145 -0
  718. data/vendor/v8/tools/testrunner/local/testsuite.py +187 -0
  719. data/vendor/v8/tools/testrunner/local/utils.py +108 -0
  720. data/vendor/v8/tools/testrunner/local/verbose.py +99 -0
  721. data/vendor/v8/tools/testrunner/network/__init__.py +26 -0
  722. data/vendor/v8/tools/testrunner/network/distro.py +90 -0
  723. data/vendor/v8/tools/testrunner/network/endpoint.py +124 -0
  724. data/vendor/v8/tools/testrunner/network/network_execution.py +253 -0
  725. data/vendor/v8/tools/testrunner/network/perfdata.py +120 -0
  726. data/vendor/v8/tools/testrunner/objects/__init__.py +26 -0
  727. data/vendor/v8/tools/testrunner/objects/context.py +50 -0
  728. data/vendor/v8/tools/testrunner/objects/output.py +60 -0
  729. data/vendor/v8/tools/testrunner/objects/peer.py +80 -0
  730. data/vendor/v8/tools/testrunner/objects/testcase.py +83 -0
  731. data/vendor/v8/tools/testrunner/objects/workpacket.py +90 -0
  732. data/vendor/v8/tools/testrunner/server/__init__.py +26 -0
  733. data/vendor/v8/tools/testrunner/server/compression.py +111 -0
  734. data/vendor/v8/tools/testrunner/server/constants.py +51 -0
  735. data/vendor/v8/tools/testrunner/server/daemon.py +147 -0
  736. data/vendor/v8/tools/testrunner/server/local_handler.py +119 -0
  737. data/vendor/v8/tools/testrunner/server/main.py +245 -0
  738. data/vendor/v8/tools/testrunner/server/presence_handler.py +120 -0
  739. data/vendor/v8/tools/testrunner/server/signatures.py +63 -0
  740. data/vendor/v8/tools/testrunner/server/status_handler.py +112 -0
  741. data/vendor/v8/tools/testrunner/server/work_handler.py +150 -0
  742. data/vendor/v8/tools/tick-processor.html +168 -0
  743. data/vendor/v8/tools/tickprocessor-driver.js +5 -3
  744. data/vendor/v8/tools/tickprocessor.js +58 -15
  745. metadata +534 -30
  746. data/patches/add-freebsd9-and-freebsd10-to-gyp-GetFlavor.patch +0 -11
  747. data/patches/do-not-imply-vfp3-and-armv7.patch +0 -44
  748. data/patches/fPIC-on-x64.patch +0 -14
  749. data/vendor/v8/src/liveobjectlist-inl.h +0 -126
  750. data/vendor/v8/src/liveobjectlist.cc +0 -2631
  751. data/vendor/v8/src/liveobjectlist.h +0 -319
  752. data/vendor/v8/test/mjsunit/mul-exhaustive.js +0 -4629
  753. data/vendor/v8/test/mjsunit/numops-fuzz.js +0 -4609
  754. data/vendor/v8/test/mjsunit/regress/regress-1969.js +0 -5045
@@ -30,6 +30,7 @@
30
30
  #if defined(V8_TARGET_ARCH_IA32)
31
31
 
32
32
  #include "ia32/lithium-codegen-ia32.h"
33
+ #include "ic.h"
33
34
  #include "code-stubs.h"
34
35
  #include "deoptimizer.h"
35
36
  #include "stub-cache.h"
@@ -70,18 +71,23 @@ bool LCodeGen::GenerateCode() {
70
71
  HPhase phase("Z_Code generation", chunk());
71
72
  ASSERT(is_unused());
72
73
  status_ = GENERATING;
73
- CpuFeatures::Scope scope(SSE2);
74
-
75
- CodeStub::GenerateFPStubs();
76
74
 
77
75
  // Open a frame scope to indicate that there is a frame on the stack. The
78
76
  // MANUAL indicates that the scope shouldn't actually generate code to set up
79
77
  // the frame (that is done in GeneratePrologue).
80
78
  FrameScope frame_scope(masm_, StackFrame::MANUAL);
81
79
 
80
+ support_aligned_spilled_doubles_ = info()->IsOptimizing();
81
+
82
+ dynamic_frame_alignment_ = info()->IsOptimizing() &&
83
+ ((chunk()->num_double_slots() > 2 &&
84
+ !chunk()->graph()->is_recursive()) ||
85
+ !info()->osr_ast_id().IsNone());
86
+
82
87
  return GeneratePrologue() &&
83
88
  GenerateBody() &&
84
89
  GenerateDeferredCode() &&
90
+ GenerateJumpTable() &&
85
91
  GenerateSafepointTable();
86
92
  }
87
93
 
@@ -91,21 +97,14 @@ void LCodeGen::FinishCode(Handle<Code> code) {
91
97
  code->set_stack_slots(GetStackSlotCount());
92
98
  code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
93
99
  PopulateDeoptimizationData(code);
94
- Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
100
+ if (!info()->IsStub()) {
101
+ Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
102
+ }
95
103
  }
96
104
 
97
105
 
98
- void LCodeGen::Abort(const char* format, ...) {
99
- if (FLAG_trace_bailout) {
100
- SmartArrayPointer<char> name(
101
- info()->shared_info()->DebugName()->ToCString());
102
- PrintF("Aborting LCodeGen in @\"%s\": ", *name);
103
- va_list arguments;
104
- va_start(arguments, format);
105
- OS::VPrint(format, arguments);
106
- va_end(arguments);
107
- PrintF("\n");
108
- }
106
+ void LCodeGen::Abort(const char* reason) {
107
+ info()->set_bailout_reason(reason);
109
108
  status_ = ABORTED;
110
109
  }
111
110
 
@@ -131,61 +130,144 @@ void LCodeGen::Comment(const char* format, ...) {
131
130
  bool LCodeGen::GeneratePrologue() {
132
131
  ASSERT(is_generating());
133
132
 
133
+ if (info()->IsOptimizing()) {
134
+ ProfileEntryHookStub::MaybeCallEntryHook(masm_);
135
+
134
136
  #ifdef DEBUG
135
- if (strlen(FLAG_stop_at) > 0 &&
136
- info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
137
- __ int3();
138
- }
137
+ if (strlen(FLAG_stop_at) > 0 &&
138
+ info_->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
139
+ __ int3();
140
+ }
139
141
  #endif
140
142
 
141
- // Strict mode functions and builtins need to replace the receiver
142
- // with undefined when called as functions (without an explicit
143
- // receiver object). ecx is zero for method calls and non-zero for
144
- // function calls.
145
- if (!info_->is_classic_mode() || info_->is_native()) {
146
- Label ok;
147
- __ test(ecx, Operand(ecx));
148
- __ j(zero, &ok, Label::kNear);
149
- // +1 for return address.
150
- int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize;
151
- __ mov(Operand(esp, receiver_offset),
152
- Immediate(isolate()->factory()->undefined_value()));
153
- __ bind(&ok);
143
+ // Strict mode functions and builtins need to replace the receiver
144
+ // with undefined when called as functions (without an explicit
145
+ // receiver object). ecx is zero for method calls and non-zero for
146
+ // function calls.
147
+ if (!info_->is_classic_mode() || info_->is_native()) {
148
+ Label ok;
149
+ __ test(ecx, Operand(ecx));
150
+ __ j(zero, &ok, Label::kNear);
151
+ // +1 for return address.
152
+ int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize;
153
+ __ mov(Operand(esp, receiver_offset),
154
+ Immediate(isolate()->factory()->undefined_value()));
155
+ __ bind(&ok);
156
+ }
157
+
158
+ if (support_aligned_spilled_doubles_ && dynamic_frame_alignment_) {
159
+ // Move state of dynamic frame alignment into edx.
160
+ __ mov(edx, Immediate(kNoAlignmentPadding));
161
+
162
+ Label do_not_pad, align_loop;
163
+ STATIC_ASSERT(kDoubleSize == 2 * kPointerSize);
164
+ // Align esp + 4 to a multiple of 2 * kPointerSize.
165
+ __ test(esp, Immediate(kPointerSize));
166
+ __ j(not_zero, &do_not_pad, Label::kNear);
167
+ __ push(Immediate(0));
168
+ __ mov(ebx, esp);
169
+ __ mov(edx, Immediate(kAlignmentPaddingPushed));
170
+ // Copy arguments, receiver, and return address.
171
+ __ mov(ecx, Immediate(scope()->num_parameters() + 2));
172
+
173
+ __ bind(&align_loop);
174
+ __ mov(eax, Operand(ebx, 1 * kPointerSize));
175
+ __ mov(Operand(ebx, 0), eax);
176
+ __ add(Operand(ebx), Immediate(kPointerSize));
177
+ __ dec(ecx);
178
+ __ j(not_zero, &align_loop, Label::kNear);
179
+ __ mov(Operand(ebx, 0), Immediate(kAlignmentZapValue));
180
+ __ bind(&do_not_pad);
181
+ }
182
+ }
183
+
184
+ info()->set_prologue_offset(masm_->pc_offset());
185
+ if (NeedsEagerFrame()) {
186
+ ASSERT(!frame_is_built_);
187
+ frame_is_built_ = true;
188
+ __ push(ebp); // Caller's frame pointer.
189
+ __ mov(ebp, esp);
190
+ __ push(esi); // Callee's context.
191
+ if (info()->IsStub()) {
192
+ __ push(Immediate(Smi::FromInt(StackFrame::STUB)));
193
+ } else {
194
+ __ push(edi); // Callee's JS function.
195
+ }
154
196
  }
155
197
 
156
- __ push(ebp); // Caller's frame pointer.
157
- __ mov(ebp, esp);
158
- __ push(esi); // Callee's context.
159
- __ push(edi); // Callee's JS function.
198
+ if (info()->IsOptimizing() &&
199
+ dynamic_frame_alignment_ &&
200
+ FLAG_debug_code) {
201
+ __ test(esp, Immediate(kPointerSize));
202
+ __ Assert(zero, "frame is expected to be aligned");
203
+ }
160
204
 
161
205
  // Reserve space for the stack slots needed by the code.
162
206
  int slots = GetStackSlotCount();
207
+ ASSERT(slots != 0 || !info()->IsOptimizing());
163
208
  if (slots > 0) {
164
- if (FLAG_debug_code) {
165
- __ mov(Operand(eax), Immediate(slots));
166
- Label loop;
167
- __ bind(&loop);
168
- __ push(Immediate(kSlotsZapValue));
169
- __ dec(eax);
170
- __ j(not_zero, &loop);
209
+ if (slots == 1) {
210
+ if (dynamic_frame_alignment_) {
211
+ __ push(edx);
212
+ } else {
213
+ __ push(Immediate(kNoAlignmentPadding));
214
+ }
171
215
  } else {
172
- __ sub(Operand(esp), Immediate(slots * kPointerSize));
216
+ if (FLAG_debug_code) {
217
+ __ sub(Operand(esp), Immediate(slots * kPointerSize));
218
+ __ push(eax);
219
+ __ mov(Operand(eax), Immediate(slots));
220
+ Label loop;
221
+ __ bind(&loop);
222
+ __ mov(MemOperand(esp, eax, times_4, 0),
223
+ Immediate(kSlotsZapValue));
224
+ __ dec(eax);
225
+ __ j(not_zero, &loop);
226
+ __ pop(eax);
227
+ } else {
228
+ __ sub(Operand(esp), Immediate(slots * kPointerSize));
173
229
  #ifdef _MSC_VER
174
- // On windows, you may not access the stack more than one page below
175
- // the most recently mapped page. To make the allocated area randomly
176
- // accessible, we write to each page in turn (the value is irrelevant).
177
- const int kPageSize = 4 * KB;
178
- for (int offset = slots * kPointerSize - kPageSize;
179
- offset > 0;
180
- offset -= kPageSize) {
181
- __ mov(Operand(esp, offset), eax);
182
- }
230
+ // On windows, you may not access the stack more than one page below
231
+ // the most recently mapped page. To make the allocated area randomly
232
+ // accessible, we write to each page in turn (the value is irrelevant).
233
+ const int kPageSize = 4 * KB;
234
+ for (int offset = slots * kPointerSize - kPageSize;
235
+ offset > 0;
236
+ offset -= kPageSize) {
237
+ __ mov(Operand(esp, offset), eax);
238
+ }
183
239
  #endif
240
+ }
241
+
242
+ if (support_aligned_spilled_doubles_) {
243
+ Comment(";;; Store dynamic frame alignment tag for spilled doubles");
244
+ // Store dynamic frame alignment state in the first local.
245
+ int offset = JavaScriptFrameConstants::kDynamicAlignmentStateOffset;
246
+ if (dynamic_frame_alignment_) {
247
+ __ mov(Operand(ebp, offset), edx);
248
+ } else {
249
+ __ mov(Operand(ebp, offset), Immediate(kNoAlignmentPadding));
250
+ }
251
+ }
252
+ }
253
+
254
+ if (info()->saves_caller_doubles() && CpuFeatures::IsSupported(SSE2)) {
255
+ Comment(";;; Save clobbered callee double registers");
256
+ CpuFeatures::Scope scope(SSE2);
257
+ int count = 0;
258
+ BitVector* doubles = chunk()->allocated_double_registers();
259
+ BitVector::Iterator save_iterator(doubles);
260
+ while (!save_iterator.Done()) {
261
+ __ movdbl(MemOperand(esp, count * kDoubleSize),
262
+ XMMRegister::FromAllocationIndex(save_iterator.Current()));
263
+ save_iterator.Advance();
264
+ count++;
265
+ }
184
266
  }
185
267
  }
186
268
 
187
269
  // Possibly allocate a local context.
188
- int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
270
+ int heap_slots = info_->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
189
271
  if (heap_slots > 0) {
190
272
  Comment(";;; Allocate local context");
191
273
  // Argument to NewContext is the function, which is still in edi.
@@ -225,7 +307,7 @@ bool LCodeGen::GeneratePrologue() {
225
307
  }
226
308
 
227
309
  // Trace the call.
228
- if (FLAG_trace) {
310
+ if (FLAG_trace && info()->IsOptimizing()) {
229
311
  // We have not executed any compiled code yet, so esi still holds the
230
312
  // incoming context.
231
313
  __ CallRuntime(Runtime::kTraceEnter, 0);
@@ -247,7 +329,30 @@ bool LCodeGen::GenerateBody() {
247
329
  }
248
330
 
249
331
  if (emit_instructions) {
250
- Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
332
+ if (FLAG_code_comments) {
333
+ HValue* hydrogen = instr->hydrogen_value();
334
+ if (hydrogen != NULL) {
335
+ if (hydrogen->IsChange()) {
336
+ HValue* changed_value = HChange::cast(hydrogen)->value();
337
+ int use_id = 0;
338
+ const char* use_mnemo = "dead";
339
+ if (hydrogen->UseCount() >= 1) {
340
+ HValue* use_value = hydrogen->uses().value();
341
+ use_id = use_value->id();
342
+ use_mnemo = use_value->Mnemonic();
343
+ }
344
+ Comment(";;; @%d: %s. <of #%d %s for #%d %s>",
345
+ current_instruction_, instr->Mnemonic(),
346
+ changed_value->id(), changed_value->Mnemonic(),
347
+ use_id, use_mnemo);
348
+ } else {
349
+ Comment(";;; @%d: %s. <#%d>", current_instruction_,
350
+ instr->Mnemonic(), hydrogen->id());
351
+ }
352
+ } else {
353
+ Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
354
+ }
355
+ }
251
356
  instr->CompileToNative(this);
252
357
  }
253
358
  }
@@ -256,16 +361,111 @@ bool LCodeGen::GenerateBody() {
256
361
  }
257
362
 
258
363
 
364
+ bool LCodeGen::GenerateJumpTable() {
365
+ Label needs_frame_not_call;
366
+ Label needs_frame_is_call;
367
+ for (int i = 0; i < jump_table_.length(); i++) {
368
+ __ bind(&jump_table_[i].label);
369
+ Address entry = jump_table_[i].address;
370
+ bool is_lazy_deopt = jump_table_[i].is_lazy_deopt;
371
+ Deoptimizer::BailoutType type =
372
+ is_lazy_deopt ? Deoptimizer::LAZY : Deoptimizer::EAGER;
373
+ int id = Deoptimizer::GetDeoptimizationId(entry, type);
374
+ if (id == Deoptimizer::kNotDeoptimizationEntry) {
375
+ Comment(";;; jump table entry %d.", i);
376
+ } else {
377
+ Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id);
378
+ }
379
+ if (jump_table_[i].needs_frame) {
380
+ __ push(Immediate(ExternalReference::ForDeoptEntry(entry)));
381
+ if (is_lazy_deopt) {
382
+ if (needs_frame_is_call.is_bound()) {
383
+ __ jmp(&needs_frame_is_call);
384
+ } else {
385
+ __ bind(&needs_frame_is_call);
386
+ __ push(MemOperand(ebp, StandardFrameConstants::kContextOffset));
387
+ // This variant of deopt can only be used with stubs. Since we don't
388
+ // have a function pointer to install in the stack frame that we're
389
+ // building, install a special marker there instead.
390
+ ASSERT(info()->IsStub());
391
+ __ push(Immediate(Smi::FromInt(StackFrame::STUB)));
392
+ // Push a PC inside the function so that the deopt code can find where
393
+ // the deopt comes from. It doesn't have to be the precise return
394
+ // address of a "calling" LAZY deopt, it only has to be somewhere
395
+ // inside the code body.
396
+ Label push_approx_pc;
397
+ __ call(&push_approx_pc);
398
+ __ bind(&push_approx_pc);
399
+ // Push the continuation which was stashed were the ebp should
400
+ // be. Replace it with the saved ebp.
401
+ __ push(MemOperand(esp, 3 * kPointerSize));
402
+ __ mov(MemOperand(esp, 4 * kPointerSize), ebp);
403
+ __ lea(ebp, MemOperand(esp, 4 * kPointerSize));
404
+ __ ret(0); // Call the continuation without clobbering registers.
405
+ }
406
+ } else {
407
+ if (needs_frame_not_call.is_bound()) {
408
+ __ jmp(&needs_frame_not_call);
409
+ } else {
410
+ __ bind(&needs_frame_not_call);
411
+ __ push(MemOperand(ebp, StandardFrameConstants::kContextOffset));
412
+ // This variant of deopt can only be used with stubs. Since we don't
413
+ // have a function pointer to install in the stack frame that we're
414
+ // building, install a special marker there instead.
415
+ ASSERT(info()->IsStub());
416
+ __ push(Immediate(Smi::FromInt(StackFrame::STUB)));
417
+ // Push the continuation which was stashed were the ebp should
418
+ // be. Replace it with the saved ebp.
419
+ __ push(MemOperand(esp, 2 * kPointerSize));
420
+ __ mov(MemOperand(esp, 3 * kPointerSize), ebp);
421
+ __ lea(ebp, MemOperand(esp, 3 * kPointerSize));
422
+ __ ret(0); // Call the continuation without clobbering registers.
423
+ }
424
+ }
425
+ } else {
426
+ if (is_lazy_deopt) {
427
+ __ call(entry, RelocInfo::RUNTIME_ENTRY);
428
+ } else {
429
+ __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
430
+ }
431
+ }
432
+ }
433
+ return !is_aborted();
434
+ }
435
+
436
+
259
437
  bool LCodeGen::GenerateDeferredCode() {
260
438
  ASSERT(is_generating());
261
439
  if (deferred_.length() > 0) {
262
440
  for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
263
441
  LDeferredCode* code = deferred_[i];
264
442
  __ bind(code->entry());
443
+ if (NeedsDeferredFrame()) {
444
+ Comment(";;; Deferred build frame",
445
+ code->instruction_index(),
446
+ code->instr()->Mnemonic());
447
+ ASSERT(!frame_is_built_);
448
+ ASSERT(info()->IsStub());
449
+ frame_is_built_ = true;
450
+ // Build the frame in such a way that esi isn't trashed.
451
+ __ push(ebp); // Caller's frame pointer.
452
+ __ push(Operand(ebp, StandardFrameConstants::kContextOffset));
453
+ __ push(Immediate(Smi::FromInt(StackFrame::STUB)));
454
+ __ lea(ebp, Operand(esp, 2 * kPointerSize));
455
+ }
265
456
  Comment(";;; Deferred code @%d: %s.",
266
457
  code->instruction_index(),
267
458
  code->instr()->Mnemonic());
268
459
  code->Generate();
460
+ if (NeedsDeferredFrame()) {
461
+ Comment(";;; Deferred destroy frame",
462
+ code->instruction_index(),
463
+ code->instr()->Mnemonic());
464
+ ASSERT(frame_is_built_);
465
+ frame_is_built_ = false;
466
+ __ mov(esp, ebp);
467
+ __ pop(ebp);
468
+ }
269
469
  __ jmp(code->exit());
270
470
  }
271
471
  }
@@ -279,6 +479,15 @@ bool LCodeGen::GenerateDeferredCode() {
279
479
 
280
480
  bool LCodeGen::GenerateSafepointTable() {
281
481
  ASSERT(is_done());
482
+ if (!info()->IsStub()) {
483
+ // For lazy deoptimization we need space to patch a call after every call.
484
+ // Ensure there is always space for such patching, even if the code ends
485
+ // in a call.
486
+ int target_offset = masm()->pc_offset() + Deoptimizer::patch_size();
487
+ while (masm()->pc_offset() < target_offset) {
488
+ masm()->nop();
489
+ }
490
+ }
282
491
  safepoints_.Emit(masm(), GetStackSlotCount());
283
492
  return !is_aborted();
284
493
  }
@@ -294,6 +503,11 @@ XMMRegister LCodeGen::ToDoubleRegister(int index) const {
294
503
  }
295
504
 
296
505
 
506
+ bool LCodeGen::IsX87TopOfStack(LOperand* op) const {
507
+ return op->IsDoubleRegister();
508
+ }
509
+
510
+
297
511
  Register LCodeGen::ToRegister(LOperand* op) const {
298
512
  ASSERT(op->IsRegister());
299
513
  return ToRegister(op->index());
@@ -307,24 +521,22 @@ XMMRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
307
521
 
308
522
 
309
523
  int LCodeGen::ToInteger32(LConstantOperand* op) const {
310
- Handle<Object> value = chunk_->LookupLiteral(op);
311
- ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
312
- ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
313
- value->Number());
314
- return static_cast<int32_t>(value->Number());
524
+ HConstant* constant = chunk_->LookupConstant(op);
525
+ return constant->Integer32Value();
315
526
  }
316
527
 
317
528
 
318
529
  Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const {
319
- Handle<Object> literal = chunk_->LookupLiteral(op);
530
+ HConstant* constant = chunk_->LookupConstant(op);
320
531
  ASSERT(chunk_->LookupLiteralRepresentation(op).IsTagged());
321
- return literal;
532
+ return constant->handle();
322
533
  }
323
534
 
324
535
 
325
536
  double LCodeGen::ToDouble(LConstantOperand* op) const {
326
- Handle<Object> value = chunk_->LookupLiteral(op);
327
- return value->Number();
537
+ HConstant* constant = chunk_->LookupConstant(op);
538
+ ASSERT(constant->HasDoubleValue());
539
+ return constant->DoubleValue();
328
540
  }
329
541
 
330
542
 
@@ -358,7 +570,9 @@ Operand LCodeGen::HighOperand(LOperand* op) {
358
570
 
359
571
 
360
572
  void LCodeGen::WriteTranslation(LEnvironment* environment,
361
- Translation* translation) {
573
+ Translation* translation,
574
+ int* arguments_index,
575
+ int* arguments_count) {
362
576
  if (environment == NULL) return;
363
577
 
364
578
  // The translation includes one command per value in the environment.
@@ -366,8 +580,22 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
366
580
  // The output frame height does not include the parameters.
367
581
  int height = translation_size - environment->parameter_count();
368
582
 
369
- WriteTranslation(environment->outer(), translation);
370
- int closure_id = DefineDeoptimizationLiteral(environment->closure());
583
+ // Function parameters are arguments to the outermost environment. The
584
+ // arguments index points to the first element of a sequence of tagged
585
+ // values on the stack that represent the arguments. This needs to be
586
+ // kept in sync with the LArgumentsElements implementation.
587
+ *arguments_index = -environment->parameter_count();
588
+ *arguments_count = environment->parameter_count();
589
+
590
+ WriteTranslation(environment->outer(),
591
+ translation,
592
+ arguments_index,
593
+ arguments_count);
594
+ bool has_closure_id = !info()->closure().is_null() &&
595
+ *info()->closure() != *environment->closure();
596
+ int closure_id = has_closure_id
597
+ ? DefineDeoptimizationLiteral(environment->closure())
598
+ : Translation::kSelfLiteralId;
371
599
  switch (environment->frame_type()) {
372
600
  case JS_FUNCTION:
373
601
  translation->BeginJSFrame(environment->ast_id(), closure_id, height);
@@ -375,12 +603,36 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
375
603
  case JS_CONSTRUCT:
376
604
  translation->BeginConstructStubFrame(closure_id, translation_size);
377
605
  break;
606
+ case JS_GETTER:
607
+ ASSERT(translation_size == 1);
608
+ ASSERT(height == 0);
609
+ translation->BeginGetterStubFrame(closure_id);
610
+ break;
611
+ case JS_SETTER:
612
+ ASSERT(translation_size == 2);
613
+ ASSERT(height == 0);
614
+ translation->BeginSetterStubFrame(closure_id);
615
+ break;
378
616
  case ARGUMENTS_ADAPTOR:
379
617
  translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
380
618
  break;
619
+ case STUB:
620
+ translation->BeginCompiledStubFrame();
621
+ break;
381
622
  default:
382
623
  UNREACHABLE();
383
624
  }
625
+
626
+ // Inlined frames which push their arguments cause the index to be
627
+ // bumped and another stack area to be used for materialization.
628
+ if (environment->entry() != NULL &&
629
+ environment->entry()->arguments_pushed()) {
630
+ *arguments_index = *arguments_index < 0
631
+ ? GetStackSlotCount()
632
+ : *arguments_index + *arguments_count;
633
+ *arguments_count = environment->entry()->arguments_count() + 1;
634
+ }
635
+
384
636
  for (int i = 0; i < translation_size; ++i) {
385
637
  LOperand* value = environment->values()->at(i);
386
638
  // spilled_registers_ and spilled_double_registers_ are either
@@ -391,7 +643,10 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
391
643
  translation->MarkDuplicate();
392
644
  AddToTranslation(translation,
393
645
  environment->spilled_registers()[value->index()],
394
- environment->HasTaggedValueAt(i));
646
+ environment->HasTaggedValueAt(i),
647
+ environment->HasUint32ValueAt(i),
648
+ *arguments_index,
649
+ *arguments_count);
395
650
  } else if (
396
651
  value->IsDoubleRegister() &&
397
652
  environment->spilled_double_registers()[value->index()] != NULL) {
@@ -399,26 +654,39 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
399
654
  AddToTranslation(
400
655
  translation,
401
656
  environment->spilled_double_registers()[value->index()],
402
- false);
657
+ false,
658
+ false,
659
+ *arguments_index,
660
+ *arguments_count);
403
661
  }
404
662
  }
405
663
 
406
- AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
664
+ AddToTranslation(translation,
665
+ value,
666
+ environment->HasTaggedValueAt(i),
667
+ environment->HasUint32ValueAt(i),
668
+ *arguments_index,
669
+ *arguments_count);
407
670
  }
408
671
  }
409
672
 
410
673
 
411
674
  void LCodeGen::AddToTranslation(Translation* translation,
412
675
  LOperand* op,
413
- bool is_tagged) {
676
+ bool is_tagged,
677
+ bool is_uint32,
678
+ int arguments_index,
679
+ int arguments_count) {
414
680
  if (op == NULL) {
415
681
  // TODO(twuerthinger): Introduce marker operands to indicate that this value
416
682
  // is not present and must be reconstructed from the deoptimizer. Currently
417
683
  // this is only used for the arguments object.
418
- translation->StoreArgumentsObject();
684
+ translation->StoreArgumentsObject(arguments_index, arguments_count);
419
685
  } else if (op->IsStackSlot()) {
420
686
  if (is_tagged) {
421
687
  translation->StoreStackSlot(op->index());
688
+ } else if (is_uint32) {
689
+ translation->StoreUint32StackSlot(op->index());
422
690
  } else {
423
691
  translation->StoreInt32StackSlot(op->index());
424
692
  }
@@ -432,6 +700,8 @@ void LCodeGen::AddToTranslation(Translation* translation,
432
700
  Register reg = ToRegister(op);
433
701
  if (is_tagged) {
434
702
  translation->StoreRegister(reg);
703
+ } else if (is_uint32) {
704
+ translation->StoreUint32Register(reg);
435
705
  } else {
436
706
  translation->StoreInt32Register(reg);
437
707
  }
@@ -439,8 +709,8 @@ void LCodeGen::AddToTranslation(Translation* translation,
439
709
  XMMRegister reg = ToDoubleRegister(op);
440
710
  translation->StoreDoubleRegister(reg);
441
711
  } else if (op->IsConstantOperand()) {
442
- Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
443
- int src_index = DefineDeoptimizationLiteral(literal);
712
+ HConstant* constant = chunk()->LookupConstant(LConstantOperand::cast(op));
713
+ int src_index = DefineDeoptimizationLiteral(constant->handle());
444
714
  translation->StoreLiteral(src_index);
445
715
  } else {
446
716
  UNREACHABLE();
@@ -485,13 +755,12 @@ void LCodeGen::CallRuntime(const Runtime::Function* fun,
485
755
  __ CallRuntime(fun, argc);
486
756
 
487
757
  RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
758
+
759
+ ASSERT(info()->is_calling());
488
760
  }
489
761
 
490
762
 
491
- void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
492
- int argc,
493
- LInstruction* instr,
494
- LOperand* context) {
763
+ void LCodeGen::LoadContextFromDeferred(LOperand* context) {
495
764
  if (context->IsRegister()) {
496
765
  if (!ToRegister(context).is(esi)) {
497
766
  __ mov(esi, ToRegister(context));
@@ -499,16 +768,25 @@ void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
499
768
  } else if (context->IsStackSlot()) {
500
769
  __ mov(esi, ToOperand(context));
501
770
  } else if (context->IsConstantOperand()) {
502
- Handle<Object> literal =
503
- chunk_->LookupLiteral(LConstantOperand::cast(context));
504
- __ LoadHeapObject(esi, Handle<Context>::cast(literal));
771
+ HConstant* constant =
772
+ chunk_->LookupConstant(LConstantOperand::cast(context));
773
+ __ LoadHeapObject(esi, Handle<Context>::cast(constant->handle()));
505
774
  } else {
506
775
  UNREACHABLE();
507
776
  }
777
+ }
778
+
779
+ void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
780
+ int argc,
781
+ LInstruction* instr,
782
+ LOperand* context) {
783
+ LoadContextFromDeferred(context);
508
784
 
509
785
  __ CallRuntimeSaveDoubles(id);
510
786
  RecordSafepointWithRegisters(
511
787
  instr->pointer_map(), argc, Safepoint::kNoLazyDeopt);
788
+
789
+ ASSERT(info()->is_calling());
512
790
  }
513
791
 
514
792
 
@@ -530,20 +808,22 @@ void LCodeGen::RegisterEnvironmentForDeoptimization(
530
808
 
531
809
  int frame_count = 0;
532
810
  int jsframe_count = 0;
811
+ int args_index = 0;
812
+ int args_count = 0;
533
813
  for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
534
814
  ++frame_count;
535
815
  if (e->frame_type() == JS_FUNCTION) {
536
816
  ++jsframe_count;
537
817
  }
538
818
  }
539
- Translation translation(&translations_, frame_count, jsframe_count);
540
- WriteTranslation(environment, &translation);
819
+ Translation translation(&translations_, frame_count, jsframe_count, zone());
820
+ WriteTranslation(environment, &translation, &args_index, &args_count);
541
821
  int deoptimization_index = deoptimizations_.length();
542
822
  int pc_offset = masm()->pc_offset();
543
823
  environment->Register(deoptimization_index,
544
824
  translation.index(),
545
825
  (mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
546
- deoptimizations_.Add(environment);
826
+ deoptimizations_.Add(environment, zone());
547
827
  }
548
828
  }
549
829
 
@@ -552,7 +832,11 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
552
832
  RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
553
833
  ASSERT(environment->HasBeenRegistered());
554
834
  int id = environment->deoptimization_index();
555
- Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
835
+ ASSERT(info()->IsOptimizing() || info()->IsStub());
836
+ Deoptimizer::BailoutType bailout_type = info()->IsStub()
837
+ ? Deoptimizer::LAZY
838
+ : Deoptimizer::EAGER;
839
+ Address entry = Deoptimizer::GetDeoptimizationEntry(id, bailout_type);
556
840
  if (entry == NULL) {
557
841
  Abort("bailout was not prepared");
558
842
  return;
@@ -565,37 +849,65 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
565
849
  __ push(eax);
566
850
  __ push(ebx);
567
851
  __ mov(ebx, shared);
568
- __ mov(eax, FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset));
852
+ __ mov(eax,
853
+ FieldOperand(ebx, SharedFunctionInfo::kStressDeoptCounterOffset));
569
854
  __ sub(Operand(eax), Immediate(Smi::FromInt(1)));
570
855
  __ j(not_zero, &no_deopt, Label::kNear);
571
856
  if (FLAG_trap_on_deopt) __ int3();
572
857
  __ mov(eax, Immediate(Smi::FromInt(FLAG_deopt_every_n_times)));
573
- __ mov(FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset), eax);
858
+ __ mov(FieldOperand(ebx, SharedFunctionInfo::kStressDeoptCounterOffset),
859
+ eax);
574
860
  __ pop(ebx);
575
861
  __ pop(eax);
576
862
  __ popfd();
577
863
  __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
578
864
 
579
865
  __ bind(&no_deopt);
580
- __ mov(FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset), eax);
866
+ __ mov(FieldOperand(ebx, SharedFunctionInfo::kStressDeoptCounterOffset),
867
+ eax);
581
868
  __ pop(ebx);
582
869
  __ pop(eax);
583
870
  __ popfd();
584
871
  }
585
872
 
873
+ ASSERT(info()->IsStub() || frame_is_built_);
874
+ bool lazy_deopt_needed = info()->IsStub();
586
875
  if (cc == no_condition) {
587
876
  if (FLAG_trap_on_deopt) __ int3();
588
- __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
877
+ if (lazy_deopt_needed) {
878
+ __ call(entry, RelocInfo::RUNTIME_ENTRY);
879
+ } else {
880
+ __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
881
+ }
589
882
  } else {
883
+ Label done;
590
884
  if (FLAG_trap_on_deopt) {
591
- Label done;
592
885
  __ j(NegateCondition(cc), &done, Label::kNear);
593
886
  __ int3();
594
- __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
595
- __ bind(&done);
887
+ }
888
+ if (!lazy_deopt_needed && frame_is_built_) {
889
+ if (FLAG_trap_on_deopt) {
890
+ __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
891
+ } else {
892
+ __ j(cc, entry, RelocInfo::RUNTIME_ENTRY);
893
+ }
596
894
  } else {
597
- __ j(cc, entry, RelocInfo::RUNTIME_ENTRY);
895
+ // We often have several deopts to the same entry, reuse the last
896
+ // jump entry if this is the case.
897
+ if (jump_table_.is_empty() ||
898
+ jump_table_.last().address != entry ||
899
+ jump_table_.last().needs_frame != !frame_is_built_ ||
900
+ jump_table_.last().is_lazy_deopt != lazy_deopt_needed) {
901
+ JumpTableEntry table_entry(entry, !frame_is_built_, lazy_deopt_needed);
902
+ jump_table_.Add(table_entry, zone());
903
+ }
904
+ if (FLAG_trap_on_deopt) {
905
+ __ jmp(&jump_table_.last().label);
906
+ } else {
907
+ __ j(cc, &jump_table_.last().label);
908
+ }
598
909
  }
910
+ __ bind(&done);
599
911
  }
600
912
  }
601
913
 
@@ -617,13 +929,13 @@ void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
617
929
  }
618
930
  data->SetLiteralArray(*literals);
619
931
 
620
- data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
932
+ data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id().ToInt()));
621
933
  data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
622
934
 
623
935
  // Populate the deoptimization entries.
624
936
  for (int i = 0; i < length; i++) {
625
937
  LEnvironment* env = deoptimizations_[i];
626
- data->SetAstId(i, Smi::FromInt(env->ast_id()));
938
+ data->SetAstId(i, env->ast_id());
627
939
  data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
628
940
  data->SetArgumentsStackHeight(i,
629
941
  Smi::FromInt(env->arguments_stack_height()));
@@ -638,7 +950,7 @@ int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
638
950
  for (int i = 0; i < deoptimization_literals_.length(); ++i) {
639
951
  if (deoptimization_literals_[i].is_identical_to(literal)) return i;
640
952
  }
641
- deoptimization_literals_.Add(literal);
953
+ deoptimization_literals_.Add(literal, zone());
642
954
  return result;
643
955
  }
644
956
 
@@ -683,9 +995,9 @@ void LCodeGen::RecordSafepoint(
683
995
  for (int i = 0; i < operands->length(); i++) {
684
996
  LOperand* pointer = operands->at(i);
685
997
  if (pointer->IsStackSlot()) {
686
- safepoint.DefinePointerSlot(pointer->index());
998
+ safepoint.DefinePointerSlot(pointer->index(), zone());
687
999
  } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
688
- safepoint.DefinePointerRegister(ToRegister(pointer));
1000
+ safepoint.DefinePointerRegister(ToRegister(pointer), zone());
689
1001
  }
690
1002
  }
691
1003
  }
@@ -698,7 +1010,7 @@ void LCodeGen::RecordSafepoint(LPointerMap* pointers,
698
1010
 
699
1011
 
700
1012
  void LCodeGen::RecordSafepoint(Safepoint::DeoptMode mode) {
701
- LPointerMap empty_pointers(RelocInfo::kNoPosition);
1013
+ LPointerMap empty_pointers(RelocInfo::kNoPosition, zone());
702
1014
  RecordSafepoint(&empty_pointers, mode);
703
1015
  }
704
1016
 
@@ -807,7 +1119,7 @@ void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
807
1119
 
808
1120
  void LCodeGen::DoModI(LModI* instr) {
809
1121
  if (instr->hydrogen()->HasPowerOf2Divisor()) {
810
- Register dividend = ToRegister(instr->InputAt(0));
1122
+ Register dividend = ToRegister(instr->left());
811
1123
 
812
1124
  int32_t divisor =
813
1125
  HConstant::cast(instr->hydrogen()->right())->Integer32Value();
@@ -831,8 +1143,8 @@ void LCodeGen::DoModI(LModI* instr) {
831
1143
  __ bind(&done);
832
1144
  } else {
833
1145
  Label done, remainder_eq_dividend, slow, do_subtraction, both_positive;
834
- Register left_reg = ToRegister(instr->InputAt(0));
835
- Register right_reg = ToRegister(instr->InputAt(1));
1146
+ Register left_reg = ToRegister(instr->left());
1147
+ Register right_reg = ToRegister(instr->right());
836
1148
  Register result_reg = ToRegister(instr->result());
837
1149
 
838
1150
  ASSERT(left_reg.is(eax));
@@ -862,7 +1174,7 @@ void LCodeGen::DoModI(LModI* instr) {
862
1174
  __ j(less, &remainder_eq_dividend, Label::kNear);
863
1175
 
864
1176
  // Check if the divisor is a PowerOfTwo integer.
865
- Register scratch = ToRegister(instr->TempAt(0));
1177
+ Register scratch = ToRegister(instr->temp());
866
1178
  __ mov(scratch, right_reg);
867
1179
  __ sub(Operand(scratch), Immediate(1));
868
1180
  __ test(scratch, Operand(right_reg));
@@ -885,6 +1197,17 @@ void LCodeGen::DoModI(LModI* instr) {
885
1197
 
886
1198
  // Slow case, using idiv instruction.
887
1199
  __ bind(&slow);
1200
+
1201
+ // Check for (kMinInt % -1).
1202
+ if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1203
+ Label left_not_min_int;
1204
+ __ cmp(left_reg, kMinInt);
1205
+ __ j(not_zero, &left_not_min_int, Label::kNear);
1206
+ __ cmp(right_reg, -1);
1207
+ DeoptimizeIf(zero, instr->environment());
1208
+ __ bind(&left_not_min_int);
1209
+ }
1210
+
888
1211
  // Sign extend to edx.
889
1212
  __ cdq();
890
1213
 
@@ -918,23 +1241,60 @@ void LCodeGen::DoModI(LModI* instr) {
918
1241
 
919
1242
 
920
1243
  void LCodeGen::DoDivI(LDivI* instr) {
921
- LOperand* right = instr->InputAt(1);
1244
+ if (!instr->is_flooring() && instr->hydrogen()->HasPowerOf2Divisor()) {
1245
+ Register dividend = ToRegister(instr->left());
1246
+ int32_t divisor =
1247
+ HConstant::cast(instr->hydrogen()->right())->Integer32Value();
1248
+ int32_t test_value = 0;
1249
+ int32_t power = 0;
1250
+
1251
+ if (divisor > 0) {
1252
+ test_value = divisor - 1;
1253
+ power = WhichPowerOf2(divisor);
1254
+ } else {
1255
+ // Check for (0 / -x) that will produce negative zero.
1256
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1257
+ __ test(dividend, Operand(dividend));
1258
+ DeoptimizeIf(zero, instr->environment());
1259
+ }
1260
+ // Check for (kMinInt / -1).
1261
+ if (divisor == -1 && instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1262
+ __ cmp(dividend, kMinInt);
1263
+ DeoptimizeIf(zero, instr->environment());
1264
+ }
1265
+ test_value = - divisor - 1;
1266
+ power = WhichPowerOf2(-divisor);
1267
+ }
1268
+
1269
+ if (test_value != 0) {
1270
+ // Deoptimize if remainder is not 0.
1271
+ __ test(dividend, Immediate(test_value));
1272
+ DeoptimizeIf(not_zero, instr->environment());
1273
+ __ sar(dividend, power);
1274
+ }
1275
+
1276
+ if (divisor < 0) __ neg(dividend);
1277
+
1278
+ return;
1279
+ }
1280
+
1281
+ LOperand* right = instr->right();
922
1282
  ASSERT(ToRegister(instr->result()).is(eax));
923
- ASSERT(ToRegister(instr->InputAt(0)).is(eax));
924
- ASSERT(!ToRegister(instr->InputAt(1)).is(eax));
925
- ASSERT(!ToRegister(instr->InputAt(1)).is(edx));
1283
+ ASSERT(ToRegister(instr->left()).is(eax));
1284
+ ASSERT(!ToRegister(instr->right()).is(eax));
1285
+ ASSERT(!ToRegister(instr->right()).is(edx));
926
1286
 
927
1287
  Register left_reg = eax;
928
1288
 
929
1289
  // Check for x / 0.
930
1290
  Register right_reg = ToRegister(right);
931
- if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
1291
+ if (instr->hydrogen_value()->CheckFlag(HValue::kCanBeDivByZero)) {
932
1292
  __ test(right_reg, ToOperand(right));
933
1293
  DeoptimizeIf(zero, instr->environment());
934
1294
  }
935
1295
 
936
1296
  // Check for (0 / -x) that will produce negative zero.
937
- if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1297
+ if (instr->hydrogen_value()->CheckFlag(HValue::kBailoutOnMinusZero)) {
938
1298
  Label left_not_zero;
939
1299
  __ test(left_reg, Operand(left_reg));
940
1300
  __ j(not_zero, &left_not_zero, Label::kNear);
@@ -943,8 +1303,8 @@ void LCodeGen::DoDivI(LDivI* instr) {
943
1303
  __ bind(&left_not_zero);
944
1304
  }
945
1305
 
946
- // Check for (-kMinInt / -1).
947
- if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1306
+ // Check for (kMinInt / -1).
1307
+ if (instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow)) {
948
1308
  Label left_not_min_int;
949
1309
  __ cmp(left_reg, kMinInt);
950
1310
  __ j(not_zero, &left_not_min_int, Label::kNear);
@@ -957,18 +1317,131 @@ void LCodeGen::DoDivI(LDivI* instr) {
957
1317
  __ cdq();
958
1318
  __ idiv(right_reg);
959
1319
 
960
- // Deoptimize if remainder is not 0.
961
- __ test(edx, Operand(edx));
962
- DeoptimizeIf(not_zero, instr->environment());
1320
+ if (!instr->is_flooring()) {
1321
+ // Deoptimize if remainder is not 0.
1322
+ __ test(edx, Operand(edx));
1323
+ DeoptimizeIf(not_zero, instr->environment());
1324
+ } else {
1325
+ Label done;
1326
+ __ test(edx, edx);
1327
+ __ j(zero, &done, Label::kNear);
1328
+ __ xor_(edx, right_reg);
1329
+ __ sar(edx, 31);
1330
+ __ add(eax, edx);
1331
+ __ bind(&done);
1332
+ }
1333
+ }
1334
+
1335
+
1336
+ void LCodeGen::DoMathFloorOfDiv(LMathFloorOfDiv* instr) {
1337
+ ASSERT(instr->right()->IsConstantOperand());
1338
+
1339
+ Register dividend = ToRegister(instr->left());
1340
+ int32_t divisor = ToInteger32(LConstantOperand::cast(instr->right()));
1341
+ Register result = ToRegister(instr->result());
1342
+
1343
+ switch (divisor) {
1344
+ case 0:
1345
+ DeoptimizeIf(no_condition, instr->environment());
1346
+ return;
1347
+
1348
+ case 1:
1349
+ __ Move(result, dividend);
1350
+ return;
1351
+
1352
+ case -1:
1353
+ __ Move(result, dividend);
1354
+ __ neg(result);
1355
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1356
+ DeoptimizeIf(zero, instr->environment());
1357
+ }
1358
+ if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1359
+ DeoptimizeIf(overflow, instr->environment());
1360
+ }
1361
+ return;
1362
+ }
1363
+
1364
+ uint32_t divisor_abs = abs(divisor);
1365
+ if (IsPowerOf2(divisor_abs)) {
1366
+ int32_t power = WhichPowerOf2(divisor_abs);
1367
+ if (divisor < 0) {
1368
+ // Input[dividend] is clobbered.
1369
+ // The sequence is tedious because neg(dividend) might overflow.
1370
+ __ mov(result, dividend);
1371
+ __ sar(dividend, 31);
1372
+ __ neg(result);
1373
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1374
+ DeoptimizeIf(zero, instr->environment());
1375
+ }
1376
+ __ shl(dividend, 32 - power);
1377
+ __ sar(result, power);
1378
+ __ not_(dividend);
1379
+ // Clear result.sign if dividend.sign is set.
1380
+ __ and_(result, dividend);
1381
+ } else {
1382
+ __ Move(result, dividend);
1383
+ __ sar(result, power);
1384
+ }
1385
+ } else {
1386
+ ASSERT(ToRegister(instr->left()).is(eax));
1387
+ ASSERT(ToRegister(instr->result()).is(edx));
1388
+ Register scratch = ToRegister(instr->temp());
1389
+
1390
+ // Find b which: 2^b < divisor_abs < 2^(b+1).
1391
+ unsigned b = 31 - CompilerIntrinsics::CountLeadingZeros(divisor_abs);
1392
+ unsigned shift = 32 + b; // Precision +1bit (effectively).
1393
+ double multiplier_f =
1394
+ static_cast<double>(static_cast<uint64_t>(1) << shift) / divisor_abs;
1395
+ int64_t multiplier;
1396
+ if (multiplier_f - floor(multiplier_f) < 0.5) {
1397
+ multiplier = static_cast<int64_t>(floor(multiplier_f));
1398
+ } else {
1399
+ multiplier = static_cast<int64_t>(floor(multiplier_f)) + 1;
1400
+ }
1401
+ // The multiplier is a uint32.
1402
+ ASSERT(multiplier > 0 &&
1403
+ multiplier < (static_cast<int64_t>(1) << 32));
1404
+ __ mov(scratch, dividend);
1405
+ if (divisor < 0 &&
1406
+ instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1407
+ __ test(dividend, dividend);
1408
+ DeoptimizeIf(zero, instr->environment());
1409
+ }
1410
+ __ mov(edx, static_cast<int32_t>(multiplier));
1411
+ __ imul(edx);
1412
+ if (static_cast<int32_t>(multiplier) < 0) {
1413
+ __ add(edx, scratch);
1414
+ }
1415
+ Register reg_lo = eax;
1416
+ Register reg_byte_scratch = scratch;
1417
+ if (!reg_byte_scratch.is_byte_register()) {
1418
+ __ xchg(reg_lo, reg_byte_scratch);
1419
+ reg_lo = scratch;
1420
+ reg_byte_scratch = eax;
1421
+ }
1422
+ if (divisor < 0) {
1423
+ __ xor_(reg_byte_scratch, reg_byte_scratch);
1424
+ __ cmp(reg_lo, 0x40000000);
1425
+ __ setcc(above, reg_byte_scratch);
1426
+ __ neg(edx);
1427
+ __ sub(edx, reg_byte_scratch);
1428
+ } else {
1429
+ __ xor_(reg_byte_scratch, reg_byte_scratch);
1430
+ __ cmp(reg_lo, 0xC0000000);
1431
+ __ setcc(above_equal, reg_byte_scratch);
1432
+ __ add(edx, reg_byte_scratch);
1433
+ }
1434
+ __ sar(edx, shift - 32);
1435
+ }
963
1436
  }
964
1437
 
965
1438
 
966
1439
  void LCodeGen::DoMulI(LMulI* instr) {
967
- Register left = ToRegister(instr->InputAt(0));
968
- LOperand* right = instr->InputAt(1);
1440
+ Register left = ToRegister(instr->left());
1441
+ LOperand* right = instr->right();
969
1442
 
970
1443
  if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
971
- __ mov(ToRegister(instr->TempAt(0)), left);
1444
+ __ mov(ToRegister(instr->temp()), left);
972
1445
  }
973
1446
 
974
1447
  if (right->IsConstantOperand()) {
@@ -1029,12 +1502,15 @@ void LCodeGen::DoMulI(LMulI* instr) {
1029
1502
  __ test(left, Operand(left));
1030
1503
  __ j(not_zero, &done, Label::kNear);
1031
1504
  if (right->IsConstantOperand()) {
1032
- if (ToInteger32(LConstantOperand::cast(right)) <= 0) {
1505
+ if (ToInteger32(LConstantOperand::cast(right)) < 0) {
1033
1506
  DeoptimizeIf(no_condition, instr->environment());
1507
+ } else if (ToInteger32(LConstantOperand::cast(right)) == 0) {
1508
+ __ cmp(ToRegister(instr->temp()), Immediate(0));
1509
+ DeoptimizeIf(less, instr->environment());
1034
1510
  }
1035
1511
  } else {
1036
1512
  // Test the non-zero operand for negative sign.
1037
- __ or_(ToRegister(instr->TempAt(0)), ToOperand(right));
1513
+ __ or_(ToRegister(instr->temp()), ToOperand(right));
1038
1514
  DeoptimizeIf(sign, instr->environment());
1039
1515
  }
1040
1516
  __ bind(&done);
@@ -1043,8 +1519,8 @@ void LCodeGen::DoMulI(LMulI* instr) {
1043
1519
 
1044
1520
 
1045
1521
  void LCodeGen::DoBitI(LBitI* instr) {
1046
- LOperand* left = instr->InputAt(0);
1047
- LOperand* right = instr->InputAt(1);
1522
+ LOperand* left = instr->left();
1523
+ LOperand* right = instr->right();
1048
1524
  ASSERT(left->Equals(instr->result()));
1049
1525
  ASSERT(left->IsRegister());
1050
1526
 
@@ -1084,14 +1560,21 @@ void LCodeGen::DoBitI(LBitI* instr) {
1084
1560
 
1085
1561
 
1086
1562
  void LCodeGen::DoShiftI(LShiftI* instr) {
1087
- LOperand* left = instr->InputAt(0);
1088
- LOperand* right = instr->InputAt(1);
1563
+ LOperand* left = instr->left();
1564
+ LOperand* right = instr->right();
1089
1565
  ASSERT(left->Equals(instr->result()));
1090
1566
  ASSERT(left->IsRegister());
1091
1567
  if (right->IsRegister()) {
1092
1568
  ASSERT(ToRegister(right).is(ecx));
1093
1569
 
1094
1570
  switch (instr->op()) {
1571
+ case Token::ROR:
1572
+ __ ror_cl(ToRegister(left));
1573
+ if (instr->can_deopt()) {
1574
+ __ test(ToRegister(left), Immediate(0x80000000));
1575
+ DeoptimizeIf(not_zero, instr->environment());
1576
+ }
1577
+ break;
1095
1578
  case Token::SAR:
1096
1579
  __ sar_cl(ToRegister(left));
1097
1580
  break;
@@ -1113,6 +1596,14 @@ void LCodeGen::DoShiftI(LShiftI* instr) {
1113
1596
  int value = ToInteger32(LConstantOperand::cast(right));
1114
1597
  uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
1115
1598
  switch (instr->op()) {
1599
+ case Token::ROR:
1600
+ if (shift_count == 0 && instr->can_deopt()) {
1601
+ __ test(ToRegister(left), Immediate(0x80000000));
1602
+ DeoptimizeIf(not_zero, instr->environment());
1603
+ } else {
1604
+ __ ror(ToRegister(left), shift_count);
1605
+ }
1606
+ break;
1116
1607
  case Token::SAR:
1117
1608
  if (shift_count != 0) {
1118
1609
  __ sar(ToRegister(left), shift_count);
@@ -1140,8 +1631,8 @@ void LCodeGen::DoShiftI(LShiftI* instr) {
1140
1631
 
1141
1632
 
1142
1633
  void LCodeGen::DoSubI(LSubI* instr) {
1143
- LOperand* left = instr->InputAt(0);
1144
- LOperand* right = instr->InputAt(1);
1634
+ LOperand* left = instr->left();
1635
+ LOperand* right = instr->right();
1145
1636
  ASSERT(left->Equals(instr->result()));
1146
1637
 
1147
1638
  if (right->IsConstantOperand()) {
@@ -1170,12 +1661,13 @@ void LCodeGen::DoConstantD(LConstantD* instr) {
1170
1661
  if (BitCast<uint64_t, double>(v) == 0) {
1171
1662
  __ xorps(res, res);
1172
1663
  } else {
1173
- Register temp = ToRegister(instr->TempAt(0));
1664
+ Register temp = ToRegister(instr->temp());
1174
1665
  uint64_t int_val = BitCast<uint64_t, double>(v);
1175
1666
  int32_t lower = static_cast<int32_t>(int_val);
1176
1667
  int32_t upper = static_cast<int32_t>(int_val >> (kBitsPerInt));
1177
1668
  if (CpuFeatures::IsSupported(SSE4_1)) {
1178
- CpuFeatures::Scope scope(SSE4_1);
1669
+ CpuFeatures::Scope scope1(SSE2);
1670
+ CpuFeatures::Scope scope2(SSE4_1);
1179
1671
  if (lower != 0) {
1180
1672
  __ Set(temp, Immediate(lower));
1181
1673
  __ movd(res, Operand(temp));
@@ -1187,6 +1679,7 @@ void LCodeGen::DoConstantD(LConstantD* instr) {
1187
1679
  __ pinsrd(res, Operand(temp), 1);
1188
1680
  }
1189
1681
  } else {
1682
+ CpuFeatures::Scope scope(SSE2);
1190
1683
  __ Set(temp, Immediate(upper));
1191
1684
  __ movd(res, Operand(temp));
1192
1685
  __ psllq(res, 32);
@@ -1213,7 +1706,7 @@ void LCodeGen::DoConstantT(LConstantT* instr) {
1213
1706
 
1214
1707
  void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
1215
1708
  Register result = ToRegister(instr->result());
1216
- Register array = ToRegister(instr->InputAt(0));
1709
+ Register array = ToRegister(instr->value());
1217
1710
  __ mov(result, FieldOperand(array, JSArray::kLengthOffset));
1218
1711
  }
1219
1712
 
@@ -1221,14 +1714,21 @@ void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
1221
1714
  void LCodeGen::DoFixedArrayBaseLength(
1222
1715
  LFixedArrayBaseLength* instr) {
1223
1716
  Register result = ToRegister(instr->result());
1224
- Register array = ToRegister(instr->InputAt(0));
1717
+ Register array = ToRegister(instr->value());
1225
1718
  __ mov(result, FieldOperand(array, FixedArrayBase::kLengthOffset));
1226
1719
  }
1227
1720
 
1228
1721
 
1722
+ void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) {
1723
+ Register result = ToRegister(instr->result());
1724
+ Register map = ToRegister(instr->value());
1725
+ __ EnumLength(result, map);
1726
+ }
1727
+
1728
+
1229
1729
  void LCodeGen::DoElementsKind(LElementsKind* instr) {
1230
1730
  Register result = ToRegister(instr->result());
1231
- Register input = ToRegister(instr->InputAt(0));
1731
+ Register input = ToRegister(instr->value());
1232
1732
 
1233
1733
  // Load map into |result|.
1234
1734
  __ mov(result, FieldOperand(input, HeapObject::kMapOffset));
@@ -1242,9 +1742,9 @@ void LCodeGen::DoElementsKind(LElementsKind* instr) {
1242
1742
 
1243
1743
 
1244
1744
  void LCodeGen::DoValueOf(LValueOf* instr) {
1245
- Register input = ToRegister(instr->InputAt(0));
1745
+ Register input = ToRegister(instr->value());
1246
1746
  Register result = ToRegister(instr->result());
1247
- Register map = ToRegister(instr->TempAt(0));
1747
+ Register map = ToRegister(instr->temp());
1248
1748
  ASSERT(input.is(result));
1249
1749
 
1250
1750
  Label done;
@@ -1261,19 +1761,18 @@ void LCodeGen::DoValueOf(LValueOf* instr) {
1261
1761
 
1262
1762
 
1263
1763
  void LCodeGen::DoDateField(LDateField* instr) {
1264
- Register object = ToRegister(instr->InputAt(0));
1764
+ Register object = ToRegister(instr->date());
1265
1765
  Register result = ToRegister(instr->result());
1266
- Register scratch = ToRegister(instr->TempAt(0));
1766
+ Register scratch = ToRegister(instr->temp());
1267
1767
  Smi* index = instr->index();
1268
1768
  Label runtime, done;
1269
1769
  ASSERT(object.is(result));
1270
1770
  ASSERT(object.is(eax));
1271
1771
 
1272
- #ifdef DEBUG
1273
- __ AbortIfSmi(object);
1772
+ __ test(object, Immediate(kSmiTagMask));
1773
+ DeoptimizeIf(zero, instr->environment());
1274
1774
  __ CmpObjectType(object, JS_DATE_TYPE, scratch);
1275
- __ Assert(equal, "Trying to get date field from non-date.");
1276
- #endif
1775
+ DeoptimizeIf(not_equal, instr->environment());
1277
1776
 
1278
1777
  if (index->value() == 0) {
1279
1778
  __ mov(result, FieldOperand(object, JSDate::kValueOffset));
@@ -1297,8 +1796,17 @@ void LCodeGen::DoDateField(LDateField* instr) {
1297
1796
  }
1298
1797
 
1299
1798
 
1799
+ void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) {
1800
+ SeqStringSetCharGenerator::Generate(masm(),
1801
+ instr->encoding(),
1802
+ ToRegister(instr->string()),
1803
+ ToRegister(instr->index()),
1804
+ ToRegister(instr->value()));
1805
+ }
1806
+
1807
+
1300
1808
  void LCodeGen::DoBitNotI(LBitNotI* instr) {
1301
- LOperand* input = instr->InputAt(0);
1809
+ LOperand* input = instr->value();
1302
1810
  ASSERT(input->Equals(instr->result()));
1303
1811
  __ not_(ToRegister(input));
1304
1812
  }
@@ -1317,8 +1825,8 @@ void LCodeGen::DoThrow(LThrow* instr) {
1317
1825
 
1318
1826
 
1319
1827
  void LCodeGen::DoAddI(LAddI* instr) {
1320
- LOperand* left = instr->InputAt(0);
1321
- LOperand* right = instr->InputAt(1);
1828
+ LOperand* left = instr->left();
1829
+ LOperand* right = instr->right();
1322
1830
  ASSERT(left->Equals(instr->result()));
1323
1831
 
1324
1832
  if (right->IsConstantOperand()) {
@@ -1333,9 +1841,72 @@ void LCodeGen::DoAddI(LAddI* instr) {
1333
1841
  }
1334
1842
 
1335
1843
 
1844
+ void LCodeGen::DoMathMinMax(LMathMinMax* instr) {
1845
+ CpuFeatures::Scope scope(SSE2);
1846
+ LOperand* left = instr->left();
1847
+ LOperand* right = instr->right();
1848
+ ASSERT(left->Equals(instr->result()));
1849
+ HMathMinMax::Operation operation = instr->hydrogen()->operation();
1850
+ if (instr->hydrogen()->representation().IsInteger32()) {
1851
+ Label return_left;
1852
+ Condition condition = (operation == HMathMinMax::kMathMin)
1853
+ ? less_equal
1854
+ : greater_equal;
1855
+ if (right->IsConstantOperand()) {
1856
+ Operand left_op = ToOperand(left);
1857
+ Immediate right_imm = ToInteger32Immediate(right);
1858
+ __ cmp(left_op, right_imm);
1859
+ __ j(condition, &return_left, Label::kNear);
1860
+ __ mov(left_op, right_imm);
1861
+ } else {
1862
+ Register left_reg = ToRegister(left);
1863
+ Operand right_op = ToOperand(right);
1864
+ __ cmp(left_reg, right_op);
1865
+ __ j(condition, &return_left, Label::kNear);
1866
+ __ mov(left_reg, right_op);
1867
+ }
1868
+ __ bind(&return_left);
1869
+ } else {
1870
+ ASSERT(instr->hydrogen()->representation().IsDouble());
1871
+ Label check_nan_left, check_zero, return_left, return_right;
1872
+ Condition condition = (operation == HMathMinMax::kMathMin) ? below : above;
1873
+ XMMRegister left_reg = ToDoubleRegister(left);
1874
+ XMMRegister right_reg = ToDoubleRegister(right);
1875
+ __ ucomisd(left_reg, right_reg);
1876
+ __ j(parity_even, &check_nan_left, Label::kNear); // At least one NaN.
1877
+ __ j(equal, &check_zero, Label::kNear); // left == right.
1878
+ __ j(condition, &return_left, Label::kNear);
1879
+ __ jmp(&return_right, Label::kNear);
1880
+
1881
+ __ bind(&check_zero);
1882
+ XMMRegister xmm_scratch = xmm0;
1883
+ __ xorps(xmm_scratch, xmm_scratch);
1884
+ __ ucomisd(left_reg, xmm_scratch);
1885
+ __ j(not_equal, &return_left, Label::kNear); // left == right != 0.
1886
+ // At this point, both left and right are either 0 or -0.
1887
+ if (operation == HMathMinMax::kMathMin) {
1888
+ __ orpd(left_reg, right_reg);
1889
+ } else {
1890
+ // Since we operate on +0 and/or -0, addsd and andsd have the same effect.
1891
+ __ addsd(left_reg, right_reg);
1892
+ }
1893
+ __ jmp(&return_left, Label::kNear);
1894
+
1895
+ __ bind(&check_nan_left);
1896
+ __ ucomisd(left_reg, left_reg); // NaN check.
1897
+ __ j(parity_even, &return_left, Label::kNear); // left == NaN.
1898
+ __ bind(&return_right);
1899
+ __ movsd(left_reg, right_reg);
1900
+
1901
+ __ bind(&return_left);
1902
+ }
1903
+ }
1904
+
1905
+
1336
1906
  void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
1337
- XMMRegister left = ToDoubleRegister(instr->InputAt(0));
1338
- XMMRegister right = ToDoubleRegister(instr->InputAt(1));
1907
+ CpuFeatures::Scope scope(SSE2);
1908
+ XMMRegister left = ToDoubleRegister(instr->left());
1909
+ XMMRegister right = ToDoubleRegister(instr->right());
1339
1910
  XMMRegister result = ToDoubleRegister(instr->result());
1340
1911
  // Modulo uses a fixed result register.
1341
1912
  ASSERT(instr->op() == Token::MOD || left.is(result));
@@ -1344,8 +1915,8 @@ void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
1344
1915
  __ addsd(left, right);
1345
1916
  break;
1346
1917
  case Token::SUB:
1347
- __ subsd(left, right);
1348
- break;
1918
+ __ subsd(left, right);
1919
+ break;
1349
1920
  case Token::MUL:
1350
1921
  __ mulsd(left, right);
1351
1922
  break;
@@ -1418,20 +1989,21 @@ void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) {
1418
1989
  void LCodeGen::DoBranch(LBranch* instr) {
1419
1990
  int true_block = chunk_->LookupDestination(instr->true_block_id());
1420
1991
  int false_block = chunk_->LookupDestination(instr->false_block_id());
1992
+ CpuFeatures::Scope scope(SSE2);
1421
1993
 
1422
1994
  Representation r = instr->hydrogen()->value()->representation();
1423
1995
  if (r.IsInteger32()) {
1424
- Register reg = ToRegister(instr->InputAt(0));
1996
+ Register reg = ToRegister(instr->value());
1425
1997
  __ test(reg, Operand(reg));
1426
1998
  EmitBranch(true_block, false_block, not_zero);
1427
1999
  } else if (r.IsDouble()) {
1428
- XMMRegister reg = ToDoubleRegister(instr->InputAt(0));
2000
+ XMMRegister reg = ToDoubleRegister(instr->value());
1429
2001
  __ xorps(xmm0, xmm0);
1430
2002
  __ ucomisd(reg, xmm0);
1431
2003
  EmitBranch(true_block, false_block, not_equal);
1432
2004
  } else {
1433
2005
  ASSERT(r.IsTagged());
1434
- Register reg = ToRegister(instr->InputAt(0));
2006
+ Register reg = ToRegister(instr->value());
1435
2007
  HType type = instr->hydrogen()->value()->type();
1436
2008
  if (type.IsBoolean()) {
1437
2009
  __ cmp(reg, factory()->true_value());
@@ -1479,7 +2051,7 @@ void LCodeGen::DoBranch(LBranch* instr) {
1479
2051
 
1480
2052
  Register map = no_reg; // Keep the compiler happy.
1481
2053
  if (expected.NeedsMap()) {
1482
- map = ToRegister(instr->TempAt(0));
2054
+ map = ToRegister(instr->temp());
1483
2055
  ASSERT(!map.is(reg));
1484
2056
  __ mov(map, FieldOperand(reg, HeapObject::kMapOffset));
1485
2057
 
@@ -1572,11 +2144,12 @@ Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
1572
2144
 
1573
2145
 
1574
2146
  void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
1575
- LOperand* left = instr->InputAt(0);
1576
- LOperand* right = instr->InputAt(1);
2147
+ LOperand* left = instr->left();
2148
+ LOperand* right = instr->right();
1577
2149
  int false_block = chunk_->LookupDestination(instr->false_block_id());
1578
2150
  int true_block = chunk_->LookupDestination(instr->true_block_id());
1579
2151
  Condition cc = TokenToCondition(instr->op(), instr->is_double());
2152
+ CpuFeatures::Scope scope(SSE2);
1580
2153
 
1581
2154
  if (left->IsConstantOperand() && right->IsConstantOperand()) {
1582
2155
  // We can statically evaluate the comparison.
@@ -1609,8 +2182,8 @@ void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
1609
2182
 
1610
2183
 
1611
2184
  void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
1612
- Register left = ToRegister(instr->InputAt(0));
1613
- Operand right = ToOperand(instr->InputAt(1));
2185
+ Register left = ToRegister(instr->left());
2186
+ Operand right = ToOperand(instr->right());
1614
2187
  int false_block = chunk_->LookupDestination(instr->false_block_id());
1615
2188
  int true_block = chunk_->LookupDestination(instr->true_block_id());
1616
2189
 
@@ -1620,7 +2193,7 @@ void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
1620
2193
 
1621
2194
 
1622
2195
  void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
1623
- Register left = ToRegister(instr->InputAt(0));
2196
+ Register left = ToRegister(instr->left());
1624
2197
  int true_block = chunk_->LookupDestination(instr->true_block_id());
1625
2198
  int false_block = chunk_->LookupDestination(instr->false_block_id());
1626
2199
 
@@ -1630,7 +2203,7 @@ void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
1630
2203
 
1631
2204
 
1632
2205
  void LCodeGen::DoIsNilAndBranch(LIsNilAndBranch* instr) {
1633
- Register reg = ToRegister(instr->InputAt(0));
2206
+ Register reg = ToRegister(instr->value());
1634
2207
  int false_block = chunk_->LookupDestination(instr->false_block_id());
1635
2208
 
1636
2209
  // If the expression is known to be untagged or a smi, then it's definitely
@@ -1660,7 +2233,7 @@ void LCodeGen::DoIsNilAndBranch(LIsNilAndBranch* instr) {
1660
2233
  __ JumpIfSmi(reg, false_label);
1661
2234
  // Check for undetectable objects by looking in the bit field in
1662
2235
  // the map. The object has already been smi checked.
1663
- Register scratch = ToRegister(instr->TempAt(0));
2236
+ Register scratch = ToRegister(instr->temp());
1664
2237
  __ mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
1665
2238
  __ movzx_b(scratch, FieldOperand(scratch, Map::kBitFieldOffset));
1666
2239
  __ test(scratch, Immediate(1 << Map::kIsUndetectable));
@@ -1693,8 +2266,8 @@ Condition LCodeGen::EmitIsObject(Register input,
1693
2266
 
1694
2267
 
1695
2268
  void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
1696
- Register reg = ToRegister(instr->InputAt(0));
1697
- Register temp = ToRegister(instr->TempAt(0));
2269
+ Register reg = ToRegister(instr->value());
2270
+ Register temp = ToRegister(instr->temp());
1698
2271
 
1699
2272
  int true_block = chunk_->LookupDestination(instr->true_block_id());
1700
2273
  int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1719,8 +2292,8 @@ Condition LCodeGen::EmitIsString(Register input,
1719
2292
 
1720
2293
 
1721
2294
  void LCodeGen::DoIsStringAndBranch(LIsStringAndBranch* instr) {
1722
- Register reg = ToRegister(instr->InputAt(0));
1723
- Register temp = ToRegister(instr->TempAt(0));
2295
+ Register reg = ToRegister(instr->value());
2296
+ Register temp = ToRegister(instr->temp());
1724
2297
 
1725
2298
  int true_block = chunk_->LookupDestination(instr->true_block_id());
1726
2299
  int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1733,7 +2306,7 @@ void LCodeGen::DoIsStringAndBranch(LIsStringAndBranch* instr) {
1733
2306
 
1734
2307
 
1735
2308
  void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
1736
- Operand input = ToOperand(instr->InputAt(0));
2309
+ Operand input = ToOperand(instr->value());
1737
2310
 
1738
2311
  int true_block = chunk_->LookupDestination(instr->true_block_id());
1739
2312
  int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1744,8 +2317,8 @@ void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
1744
2317
 
1745
2318
 
1746
2319
  void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
1747
- Register input = ToRegister(instr->InputAt(0));
1748
- Register temp = ToRegister(instr->TempAt(0));
2320
+ Register input = ToRegister(instr->value());
2321
+ Register temp = ToRegister(instr->temp());
1749
2322
 
1750
2323
  int true_block = chunk_->LookupDestination(instr->true_block_id());
1751
2324
  int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1815,8 +2388,8 @@ static Condition BranchCondition(HHasInstanceTypeAndBranch* instr) {
1815
2388
 
1816
2389
 
1817
2390
  void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
1818
- Register input = ToRegister(instr->InputAt(0));
1819
- Register temp = ToRegister(instr->TempAt(0));
2391
+ Register input = ToRegister(instr->value());
2392
+ Register temp = ToRegister(instr->temp());
1820
2393
 
1821
2394
  int true_block = chunk_->LookupDestination(instr->true_block_id());
1822
2395
  int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1831,12 +2404,10 @@ void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
1831
2404
 
1832
2405
 
1833
2406
  void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
1834
- Register input = ToRegister(instr->InputAt(0));
2407
+ Register input = ToRegister(instr->value());
1835
2408
  Register result = ToRegister(instr->result());
1836
2409
 
1837
- if (FLAG_debug_code) {
1838
- __ AbortIfNotString(input);
1839
- }
2410
+ __ AssertString(input);
1840
2411
 
1841
2412
  __ mov(result, FieldOperand(input, String::kHashFieldOffset));
1842
2413
  __ IndexFromHash(result, result);
@@ -1845,7 +2416,7 @@ void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
1845
2416
 
1846
2417
  void LCodeGen::DoHasCachedArrayIndexAndBranch(
1847
2418
  LHasCachedArrayIndexAndBranch* instr) {
1848
- Register input = ToRegister(instr->InputAt(0));
2419
+ Register input = ToRegister(instr->value());
1849
2420
 
1850
2421
  int true_block = chunk_->LookupDestination(instr->true_block_id());
1851
2422
  int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1869,7 +2440,7 @@ void LCodeGen::EmitClassOfTest(Label* is_true,
1869
2440
  ASSERT(!temp.is(temp2));
1870
2441
  __ JumpIfSmi(input, is_false);
1871
2442
 
1872
- if (class_name->IsEqualTo(CStrVector("Function"))) {
2443
+ if (class_name->IsOneByteEqualTo(STATIC_ASCII_VECTOR("Function"))) {
1873
2444
  // Assuming the following assertions, we can use the same compares to test
1874
2445
  // for both being a function type and being in the object type range.
1875
2446
  STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
@@ -1899,7 +2470,7 @@ void LCodeGen::EmitClassOfTest(Label* is_true,
1899
2470
  __ mov(temp, FieldOperand(temp, Map::kConstructorOffset));
1900
2471
  // Objects with a non-function constructor have class 'Object'.
1901
2472
  __ CmpObjectType(temp, JS_FUNCTION_TYPE, temp2);
1902
- if (class_name->IsEqualTo(CStrVector("Object"))) {
2473
+ if (class_name->IsOneByteEqualTo(STATIC_ASCII_VECTOR("Object"))) {
1903
2474
  __ j(not_equal, is_true);
1904
2475
  } else {
1905
2476
  __ j(not_equal, is_false);
@@ -1922,9 +2493,9 @@ void LCodeGen::EmitClassOfTest(Label* is_true,
1922
2493
 
1923
2494
 
1924
2495
  void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
1925
- Register input = ToRegister(instr->InputAt(0));
1926
- Register temp = ToRegister(instr->TempAt(0));
1927
- Register temp2 = ToRegister(instr->TempAt(1));
2496
+ Register input = ToRegister(instr->value());
2497
+ Register temp = ToRegister(instr->temp());
2498
+ Register temp2 = ToRegister(instr->temp2());
1928
2499
 
1929
2500
  Handle<String> class_name = instr->hydrogen()->class_name();
1930
2501
 
@@ -1941,7 +2512,7 @@ void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
1941
2512
 
1942
2513
 
1943
2514
  void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
1944
- Register reg = ToRegister(instr->InputAt(0));
2515
+ Register reg = ToRegister(instr->value());
1945
2516
  int true_block = instr->true_block_id();
1946
2517
  int false_block = instr->false_block_id();
1947
2518
 
@@ -1984,11 +2555,11 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
1984
2555
  };
1985
2556
 
1986
2557
  DeferredInstanceOfKnownGlobal* deferred;
1987
- deferred = new DeferredInstanceOfKnownGlobal(this, instr);
2558
+ deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr);
1988
2559
 
1989
2560
  Label done, false_result;
1990
- Register object = ToRegister(instr->InputAt(1));
1991
- Register temp = ToRegister(instr->TempAt(0));
2561
+ Register object = ToRegister(instr->value());
2562
+ Register temp = ToRegister(instr->temp());
1992
2563
 
1993
2564
  // A Smi is not an instance of anything.
1994
2565
  __ JumpIfSmi(object, &false_result);
@@ -1997,7 +2568,7 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
1997
2568
  // hole value will be patched to the last map/result pair generated by the
1998
2569
  // instanceof stub.
1999
2570
  Label cache_miss;
2000
- Register map = ToRegister(instr->TempAt(0));
2571
+ Register map = ToRegister(instr->temp());
2001
2572
  __ mov(map, FieldOperand(object, HeapObject::kMapOffset));
2002
2573
  __ bind(deferred->map_check()); // Label for calculating code patching.
2003
2574
  Handle<JSGlobalPropertyCell> cache_cell =
@@ -2048,7 +2619,7 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
2048
2619
  // register which is pushed last by PushSafepointRegisters as top of the
2049
2620
  // stack is used to pass the offset to the location of the map check to
2050
2621
  // the stub.
2051
- Register temp = ToRegister(instr->TempAt(0));
2622
+ Register temp = ToRegister(instr->temp());
2052
2623
  ASSERT(MacroAssembler::SafepointRegisterStackIndex(temp) == 0);
2053
2624
  __ LoadHeapObject(InstanceofStub::right(), instr->function());
2054
2625
  static const int kAdditionalDelta = 13;
@@ -2088,7 +2659,7 @@ void LCodeGen::DoCmpT(LCmpT* instr) {
2088
2659
 
2089
2660
 
2090
2661
  void LCodeGen::DoReturn(LReturn* instr) {
2091
- if (FLAG_trace) {
2662
+ if (FLAG_trace && info()->IsOptimizing()) {
2092
2663
  // Preserve the return value on the stack and rely on the runtime call
2093
2664
  // to return the value in the same register. We're leaving the code
2094
2665
  // managed by the register allocator and tearing down the frame, it's
@@ -2097,9 +2668,45 @@ void LCodeGen::DoReturn(LReturn* instr) {
2097
2668
  __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2098
2669
  __ CallRuntime(Runtime::kTraceExit, 1);
2099
2670
  }
2100
- __ mov(esp, ebp);
2101
- __ pop(ebp);
2102
- __ Ret((GetParameterCount() + 1) * kPointerSize, ecx);
2671
+ if (info()->saves_caller_doubles() && CpuFeatures::IsSupported(SSE2)) {
2672
+ ASSERT(NeedsEagerFrame());
2673
+ CpuFeatures::Scope scope(SSE2);
2674
+ BitVector* doubles = chunk()->allocated_double_registers();
2675
+ BitVector::Iterator save_iterator(doubles);
2676
+ int count = 0;
2677
+ while (!save_iterator.Done()) {
2678
+ __ movdbl(XMMRegister::FromAllocationIndex(save_iterator.Current()),
2679
+ MemOperand(esp, count * kDoubleSize));
2680
+ save_iterator.Advance();
2681
+ count++;
2682
+ }
2683
+ }
2684
+ if (dynamic_frame_alignment_) {
2685
+ // Fetch the state of the dynamic frame alignment.
2686
+ __ mov(edx, Operand(ebp,
2687
+ JavaScriptFrameConstants::kDynamicAlignmentStateOffset));
2688
+ }
2689
+ if (NeedsEagerFrame()) {
2690
+ __ mov(esp, ebp);
2691
+ __ pop(ebp);
2692
+ }
2693
+ if (dynamic_frame_alignment_) {
2694
+ Label no_padding;
2695
+ __ cmp(edx, Immediate(kNoAlignmentPadding));
2696
+ __ j(equal, &no_padding);
2697
+ if (FLAG_debug_code) {
2698
+ __ cmp(Operand(esp, (GetParameterCount() + 2) * kPointerSize),
2699
+ Immediate(kAlignmentZapValue));
2700
+ __ Assert(equal, "expected alignment marker");
2701
+ }
2702
+ __ Ret((GetParameterCount() + 2) * kPointerSize, ecx);
2703
+ __ bind(&no_padding);
2704
+ }
2705
+ if (info()->IsStub()) {
2706
+ __ Ret();
2707
+ } else {
2708
+ __ Ret((GetParameterCount() + 1) * kPointerSize, ecx);
2709
+ }
2103
2710
  }
2104
2711
 
2105
2712
 
@@ -2198,7 +2805,7 @@ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2198
2805
  HType type = instr->hydrogen()->value()->type();
2199
2806
  SmiCheck check_needed =
2200
2807
  type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
2201
- Register temp = ToRegister(instr->TempAt(0));
2808
+ Register temp = ToRegister(instr->temp());
2202
2809
  int offset = Context::SlotOffset(instr->slot_index());
2203
2810
  __ RecordWriteContextSlot(context,
2204
2811
  offset,
@@ -2228,12 +2835,12 @@ void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
2228
2835
  void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
2229
2836
  Register object,
2230
2837
  Handle<Map> type,
2231
- Handle<String> name) {
2838
+ Handle<String> name,
2839
+ LEnvironment* env) {
2232
2840
  LookupResult lookup(isolate());
2233
- type->LookupInDescriptors(NULL, *name, &lookup);
2234
- ASSERT(lookup.IsFound() &&
2235
- (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION));
2236
- if (lookup.type() == FIELD) {
2841
+ type->LookupDescriptor(NULL, *name, &lookup);
2842
+ ASSERT(lookup.IsFound() || lookup.IsCacheable());
2843
+ if (lookup.IsField()) {
2237
2844
  int index = lookup.GetLocalFieldIndexFromMap(*type);
2238
2845
  int offset = index * kPointerSize;
2239
2846
  if (index < 0) {
@@ -2245,9 +2852,23 @@ void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
2245
2852
  __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset));
2246
2853
  __ mov(result, FieldOperand(result, offset + FixedArray::kHeaderSize));
2247
2854
  }
2248
- } else {
2855
+ } else if (lookup.IsConstantFunction()) {
2249
2856
  Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type));
2250
2857
  __ LoadHeapObject(result, function);
2858
+ } else {
2859
+ // Negative lookup.
2860
+ // Check prototypes.
2861
+ Handle<HeapObject> current(HeapObject::cast((*type)->prototype()));
2862
+ Heap* heap = type->GetHeap();
2863
+ while (*current != heap->null_value()) {
2864
+ __ LoadHeapObject(result, current);
2865
+ __ cmp(FieldOperand(result, HeapObject::kMapOffset),
2866
+ Handle<Map>(current->map()));
2867
+ DeoptimizeIf(not_equal, env);
2868
+ current =
2869
+ Handle<HeapObject>(HeapObject::cast(current->map()->prototype()));
2870
+ }
2871
+ __ mov(result, factory()->undefined_value());
2251
2872
  }
2252
2873
  }
2253
2874
 
@@ -2269,6 +2890,22 @@ void LCodeGen::EmitPushTaggedOperand(LOperand* operand) {
2269
2890
  }
2270
2891
 
2271
2892
 
2893
+ // Check for cases where EmitLoadFieldOrConstantFunction needs to walk the
2894
+ // prototype chain, which causes unbounded code generation.
2895
+ static bool CompactEmit(SmallMapList* list,
2896
+ Handle<String> name,
2897
+ int i,
2898
+ Isolate* isolate) {
2899
+ Handle<Map> map = list->at(i);
2900
+ // If the map has ElementsKind transitions, we will generate map checks
2901
+ // for each kind in __ CompareMap(..., ALLOW_ELEMENTS_TRANSITION_MAPS).
2902
+ if (map->HasElementsTransition()) return false;
2903
+ LookupResult lookup(isolate);
2904
+ map->LookupDescriptor(NULL, *name, &lookup);
2905
+ return lookup.IsField() || lookup.IsConstantFunction();
2906
+ }
2907
+
2908
+
2272
2909
  void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
2273
2910
  Register object = ToRegister(instr->object());
2274
2911
  Register result = ToRegister(instr->result());
@@ -2282,18 +2919,32 @@ void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
2282
2919
  }
2283
2920
  Handle<String> name = instr->hydrogen()->name();
2284
2921
  Label done;
2922
+ bool all_are_compact = true;
2923
+ for (int i = 0; i < map_count; ++i) {
2924
+ if (!CompactEmit(instr->hydrogen()->types(), name, i, isolate())) {
2925
+ all_are_compact = false;
2926
+ break;
2927
+ }
2928
+ }
2285
2929
  for (int i = 0; i < map_count; ++i) {
2286
2930
  bool last = (i == map_count - 1);
2287
2931
  Handle<Map> map = instr->hydrogen()->types()->at(i);
2288
- __ cmp(FieldOperand(object, HeapObject::kMapOffset), map);
2932
+ Label check_passed;
2933
+ __ CompareMap(object, map, &check_passed, ALLOW_ELEMENT_TRANSITION_MAPS);
2289
2934
  if (last && !need_generic) {
2290
2935
  DeoptimizeIf(not_equal, instr->environment());
2291
- EmitLoadFieldOrConstantFunction(result, object, map, name);
2936
+ __ bind(&check_passed);
2937
+ EmitLoadFieldOrConstantFunction(
2938
+ result, object, map, name, instr->environment());
2292
2939
  } else {
2293
2940
  Label next;
2294
- __ j(not_equal, &next, Label::kNear);
2295
- EmitLoadFieldOrConstantFunction(result, object, map, name);
2296
- __ jmp(&done, Label::kNear);
2941
+ bool compact = all_are_compact ? true :
2942
+ CompactEmit(instr->hydrogen()->types(), name, i, isolate());
2943
+ __ j(not_equal, &next, compact ? Label::kNear : Label::kFar);
2944
+ __ bind(&check_passed);
2945
+ EmitLoadFieldOrConstantFunction(
2946
+ result, object, map, name, instr->environment());
2947
+ __ jmp(&done, all_are_compact ? Label::kNear : Label::kFar);
2297
2948
  __ bind(&next);
2298
2949
  }
2299
2950
  }
@@ -2319,7 +2970,7 @@ void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
2319
2970
 
2320
2971
  void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
2321
2972
  Register function = ToRegister(instr->function());
2322
- Register temp = ToRegister(instr->TempAt(0));
2973
+ Register temp = ToRegister(instr->temp());
2323
2974
  Register result = ToRegister(instr->result());
2324
2975
 
2325
2976
  // Check that the function really is a function.
@@ -2361,7 +3012,7 @@ void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
2361
3012
 
2362
3013
  void LCodeGen::DoLoadElements(LLoadElements* instr) {
2363
3014
  Register result = ToRegister(instr->result());
2364
- Register input = ToRegister(instr->InputAt(0));
3015
+ Register input = ToRegister(instr->object());
2365
3016
  __ mov(result, FieldOperand(input, JSObject::kElementsOffset));
2366
3017
  if (FLAG_debug_code) {
2367
3018
  Label done, ok, fail;
@@ -2397,7 +3048,7 @@ void LCodeGen::DoLoadElements(LLoadElements* instr) {
2397
3048
  void LCodeGen::DoLoadExternalArrayPointer(
2398
3049
  LLoadExternalArrayPointer* instr) {
2399
3050
  Register result = ToRegister(instr->result());
2400
- Register input = ToRegister(instr->InputAt(0));
3051
+ Register input = ToRegister(instr->object());
2401
3052
  __ mov(result, FieldOperand(input,
2402
3053
  ExternalArray::kExternalPointerOffset));
2403
3054
  }
@@ -2408,44 +3059,119 @@ void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
2408
3059
  Register length = ToRegister(instr->length());
2409
3060
  Operand index = ToOperand(instr->index());
2410
3061
  Register result = ToRegister(instr->result());
2411
-
2412
- __ sub(length, index);
2413
- DeoptimizeIf(below_equal, instr->environment());
2414
-
2415
3062
  // There are two words between the frame pointer and the last argument.
2416
3063
  // Subtracting from length accounts for one of them add one more.
3064
+ __ sub(length, index);
2417
3065
  __ mov(result, Operand(arguments, length, times_4, kPointerSize));
2418
3066
  }
2419
3067
 
2420
3068
 
2421
- void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
2422
- Register result = ToRegister(instr->result());
3069
+ void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) {
3070
+ ElementsKind elements_kind = instr->elements_kind();
3071
+ LOperand* key = instr->key();
3072
+ if (!key->IsConstantOperand() &&
3073
+ ExternalArrayOpRequiresTemp(instr->hydrogen()->key()->representation(),
3074
+ elements_kind)) {
3075
+ __ SmiUntag(ToRegister(key));
3076
+ }
3077
+ Operand operand(BuildFastArrayOperand(
3078
+ instr->elements(),
3079
+ key,
3080
+ instr->hydrogen()->key()->representation(),
3081
+ elements_kind,
3082
+ 0,
3083
+ instr->additional_index()));
3084
+ if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
3085
+ if (CpuFeatures::IsSupported(SSE2)) {
3086
+ CpuFeatures::Scope scope(SSE2);
3087
+ XMMRegister result(ToDoubleRegister(instr->result()));
3088
+ __ movss(result, operand);
3089
+ __ cvtss2sd(result, result);
3090
+ } else {
3091
+ __ fld_s(operand);
3092
+ HandleX87FPReturnValue(instr);
3093
+ }
3094
+ } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
3095
+ if (CpuFeatures::IsSupported(SSE2)) {
3096
+ CpuFeatures::Scope scope(SSE2);
3097
+ __ movdbl(ToDoubleRegister(instr->result()), operand);
3098
+ } else {
3099
+ __ fld_d(operand);
3100
+ HandleX87FPReturnValue(instr);
3101
+ }
3102
+ } else {
3103
+ Register result(ToRegister(instr->result()));
3104
+ switch (elements_kind) {
3105
+ case EXTERNAL_BYTE_ELEMENTS:
3106
+ __ movsx_b(result, operand);
3107
+ break;
3108
+ case EXTERNAL_PIXEL_ELEMENTS:
3109
+ case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3110
+ __ movzx_b(result, operand);
3111
+ break;
3112
+ case EXTERNAL_SHORT_ELEMENTS:
3113
+ __ movsx_w(result, operand);
3114
+ break;
3115
+ case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
3116
+ __ movzx_w(result, operand);
3117
+ break;
3118
+ case EXTERNAL_INT_ELEMENTS:
3119
+ __ mov(result, operand);
3120
+ break;
3121
+ case EXTERNAL_UNSIGNED_INT_ELEMENTS:
3122
+ __ mov(result, operand);
3123
+ if (!instr->hydrogen()->CheckFlag(HInstruction::kUint32)) {
3124
+ __ test(result, Operand(result));
3125
+ DeoptimizeIf(negative, instr->environment());
3126
+ }
3127
+ break;
3128
+ case EXTERNAL_FLOAT_ELEMENTS:
3129
+ case EXTERNAL_DOUBLE_ELEMENTS:
3130
+ case FAST_SMI_ELEMENTS:
3131
+ case FAST_ELEMENTS:
3132
+ case FAST_DOUBLE_ELEMENTS:
3133
+ case FAST_HOLEY_SMI_ELEMENTS:
3134
+ case FAST_HOLEY_ELEMENTS:
3135
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
3136
+ case DICTIONARY_ELEMENTS:
3137
+ case NON_STRICT_ARGUMENTS_ELEMENTS:
3138
+ UNREACHABLE();
3139
+ break;
3140
+ }
3141
+ }
3142
+ }
2423
3143
 
2424
- // Load the result.
2425
- __ mov(result,
2426
- BuildFastArrayOperand(instr->elements(),
2427
- instr->key(),
2428
- FAST_ELEMENTS,
2429
- FixedArray::kHeaderSize - kHeapObjectTag,
2430
- instr->additional_index()));
2431
3144
 
2432
- // Check for the hole value.
2433
- if (instr->hydrogen()->RequiresHoleCheck()) {
2434
- __ cmp(result, factory()->the_hole_value());
2435
- DeoptimizeIf(equal, instr->environment());
3145
+ void LCodeGen::HandleX87FPReturnValue(LInstruction* instr) {
3146
+ if (IsX87TopOfStack(instr->result())) {
3147
+ // Return value is already on stack. If the value has no uses, then
3148
+ // pop it off the FP stack. Otherwise, make sure that there are enough
3149
+ // copies of the value on the stack to feed all of the usages, e.g.
3150
+ // when the following instruction uses the return value in multiple
3151
+ // inputs.
3152
+ int count = instr->hydrogen_value()->UseCount();
3153
+ if (count == 0) {
3154
+ __ fstp(0);
3155
+ } else {
3156
+ count--;
3157
+ ASSERT(count <= 7);
3158
+ while (count-- > 0) {
3159
+ __ fld(0);
3160
+ }
3161
+ }
3162
+ } else {
3163
+ __ fstp_d(ToOperand(instr->result()));
2436
3164
  }
2437
3165
  }
2438
3166
 
2439
3167
 
2440
- void LCodeGen::DoLoadKeyedFastDoubleElement(
2441
- LLoadKeyedFastDoubleElement* instr) {
2442
- XMMRegister result = ToDoubleRegister(instr->result());
2443
-
3168
+ void LCodeGen::DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr) {
2444
3169
  if (instr->hydrogen()->RequiresHoleCheck()) {
2445
3170
  int offset = FixedDoubleArray::kHeaderSize - kHeapObjectTag +
2446
3171
  sizeof(kHoleNanLower32);
2447
3172
  Operand hole_check_operand = BuildFastArrayOperand(
2448
3173
  instr->elements(), instr->key(),
3174
+ instr->hydrogen()->key()->representation(),
2449
3175
  FAST_DOUBLE_ELEMENTS,
2450
3176
  offset,
2451
3177
  instr->additional_index());
@@ -2456,16 +3182,61 @@ void LCodeGen::DoLoadKeyedFastDoubleElement(
2456
3182
  Operand double_load_operand = BuildFastArrayOperand(
2457
3183
  instr->elements(),
2458
3184
  instr->key(),
3185
+ instr->hydrogen()->key()->representation(),
2459
3186
  FAST_DOUBLE_ELEMENTS,
2460
3187
  FixedDoubleArray::kHeaderSize - kHeapObjectTag,
2461
3188
  instr->additional_index());
2462
- __ movdbl(result, double_load_operand);
3189
+ if (CpuFeatures::IsSupported(SSE2)) {
3190
+ CpuFeatures::Scope scope(SSE2);
3191
+ XMMRegister result = ToDoubleRegister(instr->result());
3192
+ __ movdbl(result, double_load_operand);
3193
+ } else {
3194
+ __ fld_d(double_load_operand);
3195
+ HandleX87FPReturnValue(instr);
3196
+ }
3197
+ }
3198
+
3199
+
3200
+ void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) {
3201
+ Register result = ToRegister(instr->result());
3202
+
3203
+ // Load the result.
3204
+ __ mov(result,
3205
+ BuildFastArrayOperand(instr->elements(),
3206
+ instr->key(),
3207
+ instr->hydrogen()->key()->representation(),
3208
+ FAST_ELEMENTS,
3209
+ FixedArray::kHeaderSize - kHeapObjectTag,
3210
+ instr->additional_index()));
3211
+
3212
+ // Check for the hole value.
3213
+ if (instr->hydrogen()->RequiresHoleCheck()) {
3214
+ if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) {
3215
+ __ test(result, Immediate(kSmiTagMask));
3216
+ DeoptimizeIf(not_equal, instr->environment());
3217
+ } else {
3218
+ __ cmp(result, factory()->the_hole_value());
3219
+ DeoptimizeIf(equal, instr->environment());
3220
+ }
3221
+ }
3222
+ }
3223
+
3224
+
3225
+ void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) {
3226
+ if (instr->is_external()) {
3227
+ DoLoadKeyedExternalArray(instr);
3228
+ } else if (instr->hydrogen()->representation().IsDouble()) {
3229
+ DoLoadKeyedFixedDoubleArray(instr);
3230
+ } else {
3231
+ DoLoadKeyedFixedArray(instr);
3232
+ }
2463
3233
  }
2464
3234
 
2465
3235
 
2466
3236
  Operand LCodeGen::BuildFastArrayOperand(
2467
3237
  LOperand* elements_pointer,
2468
3238
  LOperand* key,
3239
+ Representation key_representation,
2469
3240
  ElementsKind elements_kind,
2470
3241
  uint32_t offset,
2471
3242
  uint32_t additional_index) {
@@ -2478,71 +3249,17 @@ Operand LCodeGen::BuildFastArrayOperand(
2478
3249
  }
2479
3250
  return Operand(elements_pointer_reg,
2480
3251
  ((constant_value + additional_index) << shift_size)
2481
- + offset);
2482
- } else {
2483
- ScaleFactor scale_factor = static_cast<ScaleFactor>(shift_size);
2484
- return Operand(elements_pointer_reg,
2485
- ToRegister(key),
2486
- scale_factor,
2487
- offset + (additional_index << shift_size));
2488
- }
2489
- }
2490
-
2491
-
2492
- void LCodeGen::DoLoadKeyedSpecializedArrayElement(
2493
- LLoadKeyedSpecializedArrayElement* instr) {
2494
- ElementsKind elements_kind = instr->elements_kind();
2495
- Operand operand(BuildFastArrayOperand(instr->external_pointer(),
2496
- instr->key(),
2497
- elements_kind,
2498
- 0,
2499
- instr->additional_index()));
2500
- if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
2501
- XMMRegister result(ToDoubleRegister(instr->result()));
2502
- __ movss(result, operand);
2503
- __ cvtss2sd(result, result);
2504
- } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
2505
- __ movdbl(ToDoubleRegister(instr->result()), operand);
2506
- } else {
2507
- Register result(ToRegister(instr->result()));
2508
- switch (elements_kind) {
2509
- case EXTERNAL_BYTE_ELEMENTS:
2510
- __ movsx_b(result, operand);
2511
- break;
2512
- case EXTERNAL_PIXEL_ELEMENTS:
2513
- case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
2514
- __ movzx_b(result, operand);
2515
- break;
2516
- case EXTERNAL_SHORT_ELEMENTS:
2517
- __ movsx_w(result, operand);
2518
- break;
2519
- case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
2520
- __ movzx_w(result, operand);
2521
- break;
2522
- case EXTERNAL_INT_ELEMENTS:
2523
- __ mov(result, operand);
2524
- break;
2525
- case EXTERNAL_UNSIGNED_INT_ELEMENTS:
2526
- __ mov(result, operand);
2527
- __ test(result, Operand(result));
2528
- // TODO(danno): we could be more clever here, perhaps having a special
2529
- // version of the stub that detects if the overflow case actually
2530
- // happens, and generate code that returns a double rather than int.
2531
- DeoptimizeIf(negative, instr->environment());
2532
- break;
2533
- case EXTERNAL_FLOAT_ELEMENTS:
2534
- case EXTERNAL_DOUBLE_ELEMENTS:
2535
- case FAST_SMI_ELEMENTS:
2536
- case FAST_ELEMENTS:
2537
- case FAST_DOUBLE_ELEMENTS:
2538
- case FAST_HOLEY_SMI_ELEMENTS:
2539
- case FAST_HOLEY_ELEMENTS:
2540
- case FAST_HOLEY_DOUBLE_ELEMENTS:
2541
- case DICTIONARY_ELEMENTS:
2542
- case NON_STRICT_ARGUMENTS_ELEMENTS:
2543
- UNREACHABLE();
2544
- break;
3252
+ + offset);
3253
+ } else {
3254
+ // Take the tag bit into account while computing the shift size.
3255
+ if (key_representation.IsTagged() && (shift_size >= 1)) {
3256
+ shift_size -= kSmiTagSize;
2545
3257
  }
3258
+ ScaleFactor scale_factor = static_cast<ScaleFactor>(shift_size);
3259
+ return Operand(elements_pointer_reg,
3260
+ ToRegister(key),
3261
+ scale_factor,
3262
+ offset + (additional_index << shift_size));
2546
3263
  }
2547
3264
  }
2548
3265
 
@@ -2587,7 +3304,7 @@ void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
2587
3304
 
2588
3305
 
2589
3306
  void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
2590
- Operand elem = ToOperand(instr->InputAt(0));
3307
+ Operand elem = ToOperand(instr->elements());
2591
3308
  Register result = ToRegister(instr->result());
2592
3309
 
2593
3310
  Label done;
@@ -2611,7 +3328,7 @@ void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
2611
3328
  void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
2612
3329
  Register receiver = ToRegister(instr->receiver());
2613
3330
  Register function = ToRegister(instr->function());
2614
- Register scratch = ToRegister(instr->TempAt(0));
3331
+ Register scratch = ToRegister(instr->temp());
2615
3332
 
2616
3333
  // If the receiver is null or undefined, we have to pass the global
2617
3334
  // object as a receiver to normal functions. Values have to be
@@ -2624,12 +3341,12 @@ void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
2624
3341
  FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2625
3342
  __ test_b(FieldOperand(scratch, SharedFunctionInfo::kStrictModeByteOffset),
2626
3343
  1 << SharedFunctionInfo::kStrictModeBitWithinByte);
2627
- __ j(not_equal, &receiver_ok, Label::kNear);
3344
+ __ j(not_equal, &receiver_ok); // A near jump is not sufficient here!
2628
3345
 
2629
3346
  // Do not transform the receiver to object for builtins.
2630
3347
  __ test_b(FieldOperand(scratch, SharedFunctionInfo::kNativeByteOffset),
2631
3348
  1 << SharedFunctionInfo::kNativeBitWithinByte);
2632
- __ j(not_equal, &receiver_ok, Label::kNear);
3349
+ __ j(not_equal, &receiver_ok);
2633
3350
 
2634
3351
  // Normal function. Replace undefined or null with global receiver.
2635
3352
  __ cmp(receiver, factory()->null_value());
@@ -2649,7 +3366,7 @@ void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
2649
3366
  // if it's better to use it than to explicitly fetch it from the context
2650
3367
  // here.
2651
3368
  __ mov(receiver, Operand(ebp, StandardFrameConstants::kContextOffset));
2652
- __ mov(receiver, ContextOperand(receiver, Context::GLOBAL_INDEX));
3369
+ __ mov(receiver, ContextOperand(receiver, Context::GLOBAL_OBJECT_INDEX));
2653
3370
  __ mov(receiver,
2654
3371
  FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
2655
3372
  __ bind(&receiver_ok);
@@ -2699,7 +3416,7 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
2699
3416
 
2700
3417
 
2701
3418
  void LCodeGen::DoPushArgument(LPushArgument* instr) {
2702
- LOperand* argument = instr->InputAt(0);
3419
+ LOperand* argument = instr->value();
2703
3420
  EmitPushTaggedOperand(argument);
2704
3421
  }
2705
3422
 
@@ -2711,13 +3428,18 @@ void LCodeGen::DoDrop(LDrop* instr) {
2711
3428
 
2712
3429
  void LCodeGen::DoThisFunction(LThisFunction* instr) {
2713
3430
  Register result = ToRegister(instr->result());
2714
- __ LoadHeapObject(result, instr->hydrogen()->closure());
3431
+ __ mov(result, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
2715
3432
  }
2716
3433
 
2717
3434
 
2718
3435
  void LCodeGen::DoContext(LContext* instr) {
2719
3436
  Register result = ToRegister(instr->result());
2720
- __ mov(result, Operand(ebp, StandardFrameConstants::kContextOffset));
3437
+ if (info()->IsOptimizing()) {
3438
+ __ mov(result, Operand(ebp, StandardFrameConstants::kContextOffset));
3439
+ } else {
3440
+ // If there is no frame, the context must be in esi.
3441
+ ASSERT(result.is(esi));
3442
+ }
2721
3443
  }
2722
3444
 
2723
3445
 
@@ -2730,7 +3452,7 @@ void LCodeGen::DoOuterContext(LOuterContext* instr) {
2730
3452
 
2731
3453
 
2732
3454
  void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
2733
- ASSERT(ToRegister(instr->InputAt(0)).is(esi));
3455
+ ASSERT(ToRegister(instr->context()).is(esi));
2734
3456
  __ push(esi); // The context is the first argument.
2735
3457
  __ push(Immediate(instr->hydrogen()->pairs()));
2736
3458
  __ push(Immediate(Smi::FromInt(instr->hydrogen()->flags())));
@@ -2741,7 +3463,8 @@ void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
2741
3463
  void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
2742
3464
  Register context = ToRegister(instr->context());
2743
3465
  Register result = ToRegister(instr->result());
2744
- __ mov(result, Operand(context, Context::SlotOffset(Context::GLOBAL_INDEX)));
3466
+ __ mov(result,
3467
+ Operand(context, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2745
3468
  }
2746
3469
 
2747
3470
 
@@ -2768,17 +3491,8 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
2768
3491
  __ LoadHeapObject(edi, function);
2769
3492
  }
2770
3493
 
2771
- // Change context if needed.
2772
- bool change_context =
2773
- (info()->closure()->context() != function->context()) ||
2774
- scope()->contains_with() ||
2775
- (scope()->num_heap_slots() > 0);
2776
-
2777
- if (change_context) {
2778
- __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2779
- } else {
2780
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2781
- }
3494
+ // Change context.
3495
+ __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2782
3496
 
2783
3497
  // Set eax to arguments count if adaption is not needed. Assumes that eax
2784
3498
  // is available to write to at this point.
@@ -2897,6 +3611,7 @@ void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
2897
3611
  ASSERT(instr->value()->Equals(instr->result()));
2898
3612
  Representation r = instr->hydrogen()->value()->representation();
2899
3613
 
3614
+ CpuFeatures::Scope scope(SSE2);
2900
3615
  if (r.IsDouble()) {
2901
3616
  XMMRegister scratch = xmm0;
2902
3617
  XMMRegister input_reg = ToDoubleRegister(instr->value());
@@ -2907,7 +3622,7 @@ void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
2907
3622
  EmitIntegerMathAbs(instr);
2908
3623
  } else { // Tagged case.
2909
3624
  DeferredMathAbsTaggedHeapNumber* deferred =
2910
- new DeferredMathAbsTaggedHeapNumber(this, instr);
3625
+ new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr);
2911
3626
  Register input_reg = ToRegister(instr->value());
2912
3627
  // Smi check.
2913
3628
  __ JumpIfNotSmi(input_reg, deferred->entry());
@@ -2918,6 +3633,7 @@ void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
2918
3633
 
2919
3634
 
2920
3635
  void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
3636
+ CpuFeatures::Scope scope(SSE2);
2921
3637
  XMMRegister xmm_scratch = xmm0;
2922
3638
  Register output_reg = ToRegister(instr->result());
2923
3639
  XMMRegister input_reg = ToDoubleRegister(instr->value());
@@ -2941,8 +3657,7 @@ void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
2941
3657
  __ cmp(output_reg, 0x80000000u);
2942
3658
  DeoptimizeIf(equal, instr->environment());
2943
3659
  } else {
2944
- Label negative_sign;
2945
- Label done;
3660
+ Label negative_sign, done;
2946
3661
  // Deoptimize on unordered.
2947
3662
  __ xorps(xmm_scratch, xmm_scratch); // Zero the register.
2948
3663
  __ ucomisd(input_reg, xmm_scratch);
@@ -2968,9 +3683,9 @@ void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
2968
3683
  DeoptimizeIf(equal, instr->environment());
2969
3684
  __ jmp(&done, Label::kNear);
2970
3685
 
2971
- // Non-zero negative reaches here
3686
+ // Non-zero negative reaches here.
2972
3687
  __ bind(&negative_sign);
2973
- // Truncate, then compare and compensate
3688
+ // Truncate, then compare and compensate.
2974
3689
  __ cvttsd2si(output_reg, Operand(input_reg));
2975
3690
  __ cvtsi2sd(xmm_scratch, output_reg);
2976
3691
  __ ucomisd(input_reg, xmm_scratch);
@@ -2983,6 +3698,7 @@ void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
2983
3698
  }
2984
3699
 
2985
3700
  void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
3701
+ CpuFeatures::Scope scope(SSE2);
2986
3702
  XMMRegister xmm_scratch = xmm0;
2987
3703
  Register output_reg = ToRegister(instr->result());
2988
3704
  XMMRegister input_reg = ToDoubleRegister(instr->value());
@@ -3028,6 +3744,7 @@ void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
3028
3744
 
3029
3745
 
3030
3746
  void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
3747
+ CpuFeatures::Scope scope(SSE2);
3031
3748
  XMMRegister input_reg = ToDoubleRegister(instr->value());
3032
3749
  ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
3033
3750
  __ sqrtsd(input_reg, input_reg);
@@ -3035,6 +3752,7 @@ void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
3035
3752
 
3036
3753
 
3037
3754
  void LCodeGen::DoMathPowHalf(LMathPowHalf* instr) {
3755
+ CpuFeatures::Scope scope(SSE2);
3038
3756
  XMMRegister xmm_scratch = xmm0;
3039
3757
  XMMRegister input_reg = ToDoubleRegister(instr->value());
3040
3758
  Register scratch = ToRegister(instr->temp());
@@ -3072,11 +3790,11 @@ void LCodeGen::DoPower(LPower* instr) {
3072
3790
  Representation exponent_type = instr->hydrogen()->right()->representation();
3073
3791
  // Having marked this as a call, we can use any registers.
3074
3792
  // Just make sure that the input/output registers are the expected ones.
3075
- ASSERT(!instr->InputAt(1)->IsDoubleRegister() ||
3076
- ToDoubleRegister(instr->InputAt(1)).is(xmm1));
3077
- ASSERT(!instr->InputAt(1)->IsRegister() ||
3078
- ToRegister(instr->InputAt(1)).is(eax));
3079
- ASSERT(ToDoubleRegister(instr->InputAt(0)).is(xmm2));
3793
+ ASSERT(!instr->right()->IsDoubleRegister() ||
3794
+ ToDoubleRegister(instr->right()).is(xmm1));
3795
+ ASSERT(!instr->right()->IsRegister() ||
3796
+ ToRegister(instr->right()).is(eax));
3797
+ ASSERT(ToDoubleRegister(instr->left()).is(xmm2));
3080
3798
  ASSERT(ToDoubleRegister(instr->result()).is(xmm3));
3081
3799
 
3082
3800
  if (exponent_type.IsTagged()) {
@@ -3109,21 +3827,22 @@ void LCodeGen::DoRandom(LRandom* instr) {
3109
3827
  LRandom* instr_;
3110
3828
  };
3111
3829
 
3112
- DeferredDoRandom* deferred = new DeferredDoRandom(this, instr);
3830
+ DeferredDoRandom* deferred = new(zone()) DeferredDoRandom(this, instr);
3113
3831
 
3832
+ CpuFeatures::Scope scope(SSE2);
3114
3833
  // Having marked this instruction as a call we can use any
3115
3834
  // registers.
3116
3835
  ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
3117
- ASSERT(ToRegister(instr->InputAt(0)).is(eax));
3836
+ ASSERT(ToRegister(instr->global_object()).is(eax));
3118
3837
  // Assert that the register size is indeed the size of each seed.
3119
3838
  static const int kSeedSize = sizeof(uint32_t);
3120
3839
  STATIC_ASSERT(kPointerSize == kSeedSize);
3121
3840
 
3122
- __ mov(eax, FieldOperand(eax, GlobalObject::kGlobalContextOffset));
3841
+ __ mov(eax, FieldOperand(eax, GlobalObject::kNativeContextOffset));
3123
3842
  static const int kRandomSeedOffset =
3124
3843
  FixedArray::kHeaderSize + Context::RANDOM_SEED_INDEX * kPointerSize;
3125
3844
  __ mov(ebx, FieldOperand(eax, kRandomSeedOffset));
3126
- // ebx: FixedArray of the global context's random seeds
3845
+ // ebx: FixedArray of the native context's random seeds
3127
3846
 
3128
3847
  // Load state[0].
3129
3848
  __ mov(ecx, FieldOperand(ebx, ByteArray::kHeaderSize));
@@ -3178,6 +3897,7 @@ void LCodeGen::DoDeferredRandom(LRandom* instr) {
3178
3897
 
3179
3898
 
3180
3899
  void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
3900
+ CpuFeatures::Scope scope(SSE2);
3181
3901
  ASSERT(instr->value()->Equals(instr->result()));
3182
3902
  XMMRegister input_reg = ToDoubleRegister(instr->value());
3183
3903
  Label positive, done, zero;
@@ -3208,6 +3928,17 @@ void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
3208
3928
  }
3209
3929
 
3210
3930
 
3931
+ void LCodeGen::DoMathExp(LMathExp* instr) {
3932
+ CpuFeatures::Scope scope(SSE2);
3933
+ XMMRegister input = ToDoubleRegister(instr->value());
3934
+ XMMRegister result = ToDoubleRegister(instr->result());
3935
+ Register temp1 = ToRegister(instr->temp1());
3936
+ Register temp2 = ToRegister(instr->temp2());
3937
+
3938
+ MathExpGenerator::EmitMathExp(masm(), input, result, xmm0, temp1, temp2);
3939
+ }
3940
+
3941
+
3211
3942
  void LCodeGen::DoMathTan(LUnaryMathOperation* instr) {
3212
3943
  ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
3213
3944
  TranscendentalCacheStub stub(TranscendentalCache::TAN,
@@ -3371,8 +4102,8 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
3371
4102
  if (!instr->hydrogen()->NeedsWriteBarrierForMap()) {
3372
4103
  __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition());
3373
4104
  } else {
3374
- Register temp = ToRegister(instr->TempAt(0));
3375
- Register temp_map = ToRegister(instr->TempAt(1));
4105
+ Register temp = ToRegister(instr->temp());
4106
+ Register temp_map = ToRegister(instr->temp_map());
3376
4107
  __ mov(temp_map, instr->transition());
3377
4108
  __ mov(FieldOperand(object, HeapObject::kMapOffset), temp_map);
3378
4109
  // Update the write barrier for the map field.
@@ -3393,7 +4124,7 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
3393
4124
  if (instr->is_in_object()) {
3394
4125
  __ mov(FieldOperand(object, offset), value);
3395
4126
  if (instr->hydrogen()->NeedsWriteBarrier()) {
3396
- Register temp = ToRegister(instr->TempAt(0));
4127
+ Register temp = ToRegister(instr->temp());
3397
4128
  // Update the write barrier for the object for in-object properties.
3398
4129
  __ RecordWriteField(object,
3399
4130
  offset,
@@ -3404,7 +4135,7 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
3404
4135
  check_needed);
3405
4136
  }
3406
4137
  } else {
3407
- Register temp = ToRegister(instr->TempAt(0));
4138
+ Register temp = ToRegister(instr->temp());
3408
4139
  __ mov(temp, FieldOperand(object, JSObject::kPropertiesOffset));
3409
4140
  __ mov(FieldOperand(temp, offset), value);
3410
4141
  if (instr->hydrogen()->NeedsWriteBarrier()) {
@@ -3435,10 +4166,36 @@ void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
3435
4166
  }
3436
4167
 
3437
4168
 
4169
+ void LCodeGen::DeoptIfTaggedButNotSmi(LEnvironment* environment,
4170
+ HValue* value,
4171
+ LOperand* operand) {
4172
+ if (value->representation().IsTagged() && !value->type().IsSmi()) {
4173
+ if (operand->IsRegister()) {
4174
+ __ test(ToRegister(operand), Immediate(kSmiTagMask));
4175
+ } else {
4176
+ __ test(ToOperand(operand), Immediate(kSmiTagMask));
4177
+ }
4178
+ DeoptimizeIf(not_zero, environment);
4179
+ }
4180
+ }
4181
+
4182
+
3438
4183
  void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
4184
+ DeoptIfTaggedButNotSmi(instr->environment(),
4185
+ instr->hydrogen()->length(),
4186
+ instr->length());
4187
+ DeoptIfTaggedButNotSmi(instr->environment(),
4188
+ instr->hydrogen()->index(),
4189
+ instr->index());
3439
4190
  if (instr->index()->IsConstantOperand()) {
3440
- __ cmp(ToOperand(instr->length()),
3441
- Immediate(ToInteger32(LConstantOperand::cast(instr->index()))));
4191
+ int constant_index =
4192
+ ToInteger32(LConstantOperand::cast(instr->index()));
4193
+ if (instr->hydrogen()->length()->representation().IsTagged()) {
4194
+ __ cmp(ToOperand(instr->length()),
4195
+ Immediate(Smi::FromInt(constant_index)));
4196
+ } else {
4197
+ __ cmp(ToOperand(instr->length()), Immediate(constant_index));
4198
+ }
3442
4199
  DeoptimizeIf(below_equal, instr->environment());
3443
4200
  } else {
3444
4201
  __ cmp(ToRegister(instr->index()), ToOperand(instr->length()));
@@ -3447,18 +4204,27 @@ void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
3447
4204
  }
3448
4205
 
3449
4206
 
3450
- void LCodeGen::DoStoreKeyedSpecializedArrayElement(
3451
- LStoreKeyedSpecializedArrayElement* instr) {
4207
+ void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) {
3452
4208
  ElementsKind elements_kind = instr->elements_kind();
3453
- Operand operand(BuildFastArrayOperand(instr->external_pointer(),
3454
- instr->key(),
3455
- elements_kind,
3456
- 0,
3457
- instr->additional_index()));
4209
+ LOperand* key = instr->key();
4210
+ if (!key->IsConstantOperand() &&
4211
+ ExternalArrayOpRequiresTemp(instr->hydrogen()->key()->representation(),
4212
+ elements_kind)) {
4213
+ __ SmiUntag(ToRegister(key));
4214
+ }
4215
+ Operand operand(BuildFastArrayOperand(
4216
+ instr->elements(),
4217
+ key,
4218
+ instr->hydrogen()->key()->representation(),
4219
+ elements_kind,
4220
+ 0,
4221
+ instr->additional_index()));
3458
4222
  if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
4223
+ CpuFeatures::Scope scope(SSE2);
3459
4224
  __ cvtsd2ss(xmm0, ToDoubleRegister(instr->value()));
3460
4225
  __ movss(operand, xmm0);
3461
4226
  } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
4227
+ CpuFeatures::Scope scope(SSE2);
3462
4228
  __ movdbl(operand, ToDoubleRegister(instr->value()));
3463
4229
  } else {
3464
4230
  Register value = ToRegister(instr->value());
@@ -3493,14 +4259,42 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
3493
4259
  }
3494
4260
 
3495
4261
 
3496
- void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
4262
+ void LCodeGen::DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr) {
4263
+ CpuFeatures::Scope scope(SSE2);
4264
+ XMMRegister value = ToDoubleRegister(instr->value());
4265
+
4266
+ if (instr->NeedsCanonicalization()) {
4267
+ Label have_value;
4268
+
4269
+ __ ucomisd(value, value);
4270
+ __ j(parity_odd, &have_value); // NaN.
4271
+
4272
+ ExternalReference canonical_nan_reference =
4273
+ ExternalReference::address_of_canonical_non_hole_nan();
4274
+ __ movdbl(value, Operand::StaticVariable(canonical_nan_reference));
4275
+ __ bind(&have_value);
4276
+ }
4277
+
4278
+ Operand double_store_operand = BuildFastArrayOperand(
4279
+ instr->elements(),
4280
+ instr->key(),
4281
+ instr->hydrogen()->key()->representation(),
4282
+ FAST_DOUBLE_ELEMENTS,
4283
+ FixedDoubleArray::kHeaderSize - kHeapObjectTag,
4284
+ instr->additional_index());
4285
+ __ movdbl(double_store_operand, value);
4286
+ }
4287
+
4288
+
4289
+ void LCodeGen::DoStoreKeyedFixedArray(LStoreKeyed* instr) {
3497
4290
  Register value = ToRegister(instr->value());
3498
- Register elements = ToRegister(instr->object());
4291
+ Register elements = ToRegister(instr->elements());
3499
4292
  Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
3500
4293
 
3501
4294
  Operand operand = BuildFastArrayOperand(
3502
- instr->object(),
4295
+ instr->elements(),
3503
4296
  instr->key(),
4297
+ instr->hydrogen()->key()->representation(),
3504
4298
  FAST_ELEMENTS,
3505
4299
  FixedArray::kHeaderSize - kHeapObjectTag,
3506
4300
  instr->additional_index());
@@ -3523,29 +4317,15 @@ void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
3523
4317
  }
3524
4318
 
3525
4319
 
3526
- void LCodeGen::DoStoreKeyedFastDoubleElement(
3527
- LStoreKeyedFastDoubleElement* instr) {
3528
- XMMRegister value = ToDoubleRegister(instr->value());
3529
-
3530
- if (instr->NeedsCanonicalization()) {
3531
- Label have_value;
3532
-
3533
- __ ucomisd(value, value);
3534
- __ j(parity_odd, &have_value); // NaN.
3535
-
3536
- ExternalReference canonical_nan_reference =
3537
- ExternalReference::address_of_canonical_non_hole_nan();
3538
- __ movdbl(value, Operand::StaticVariable(canonical_nan_reference));
3539
- __ bind(&have_value);
4320
+ void LCodeGen::DoStoreKeyed(LStoreKeyed* instr) {
4321
+ // By cases...external, fast-double, fast
4322
+ if (instr->is_external()) {
4323
+ DoStoreKeyedExternalArray(instr);
4324
+ } else if (instr->hydrogen()->value()->representation().IsDouble()) {
4325
+ DoStoreKeyedFixedDoubleArray(instr);
4326
+ } else {
4327
+ DoStoreKeyedFixedArray(instr);
3540
4328
  }
3541
-
3542
- Operand double_store_operand = BuildFastArrayOperand(
3543
- instr->elements(),
3544
- instr->key(),
3545
- FAST_DOUBLE_ELEMENTS,
3546
- FixedDoubleArray::kHeaderSize - kHeapObjectTag,
3547
- instr->additional_index());
3548
- __ movdbl(double_store_operand, value);
3549
4329
  }
3550
4330
 
3551
4331
 
@@ -3562,29 +4342,58 @@ void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
3562
4342
  }
3563
4343
 
3564
4344
 
4345
+ void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) {
4346
+ Register object = ToRegister(instr->object());
4347
+ Register temp = ToRegister(instr->temp());
4348
+ __ TestJSArrayForAllocationSiteInfo(object, temp);
4349
+ DeoptimizeIf(equal, instr->environment());
4350
+ }
4351
+
4352
+
3565
4353
  void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
3566
4354
  Register object_reg = ToRegister(instr->object());
3567
- Register new_map_reg = ToRegister(instr->new_map_reg());
3568
4355
 
3569
4356
  Handle<Map> from_map = instr->original_map();
3570
4357
  Handle<Map> to_map = instr->transitioned_map();
3571
- ElementsKind from_kind = from_map->elements_kind();
3572
- ElementsKind to_kind = to_map->elements_kind();
4358
+ ElementsKind from_kind = instr->from_kind();
4359
+ ElementsKind to_kind = instr->to_kind();
3573
4360
 
3574
4361
  Label not_applicable;
4362
+ bool is_simple_map_transition =
4363
+ IsSimpleMapChangeTransition(from_kind, to_kind);
4364
+ Label::Distance branch_distance =
4365
+ is_simple_map_transition ? Label::kNear : Label::kFar;
3575
4366
  __ cmp(FieldOperand(object_reg, HeapObject::kMapOffset), from_map);
3576
- __ j(not_equal, &not_applicable);
3577
- __ mov(new_map_reg, to_map);
3578
- if (IsSimpleMapChangeTransition(from_kind, to_kind)) {
3579
- Register object_reg = ToRegister(instr->object());
3580
- __ mov(FieldOperand(object_reg, HeapObject::kMapOffset), new_map_reg);
4367
+ __ j(not_equal, &not_applicable, branch_distance);
4368
+ if (is_simple_map_transition) {
4369
+ Register new_map_reg = ToRegister(instr->new_map_temp());
4370
+ Handle<Map> map = instr->hydrogen()->transitioned_map();
4371
+ __ mov(FieldOperand(object_reg, HeapObject::kMapOffset),
4372
+ Immediate(map));
3581
4373
  // Write barrier.
3582
- ASSERT_NE(instr->temp_reg(), NULL);
3583
- __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg,
3584
- ToRegister(instr->temp_reg()), kDontSaveFPRegs);
4374
+ ASSERT_NE(instr->temp(), NULL);
4375
+ __ RecordWriteForMap(object_reg, to_map, new_map_reg,
4376
+ ToRegister(instr->temp()),
4377
+ kDontSaveFPRegs);
4378
+ } else if (FLAG_compiled_transitions) {
4379
+ PushSafepointRegistersScope scope(this);
4380
+ if (!object_reg.is(eax)) {
4381
+ __ push(object_reg);
4382
+ }
4383
+ LoadContextFromDeferred(instr->context());
4384
+ if (!object_reg.is(eax)) {
4385
+ __ pop(eax);
4386
+ }
4387
+ __ mov(ebx, to_map);
4388
+ TransitionElementsKindStub stub(from_kind, to_kind);
4389
+ __ CallStub(&stub);
4390
+ RecordSafepointWithRegisters(
4391
+ instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
3585
4392
  } else if (IsFastSmiElementsKind(from_kind) &&
3586
4393
  IsFastDoubleElementsKind(to_kind)) {
3587
- Register fixed_object_reg = ToRegister(instr->temp_reg());
4394
+ Register new_map_reg = ToRegister(instr->new_map_temp());
4395
+ __ mov(new_map_reg, to_map);
4396
+ Register fixed_object_reg = ToRegister(instr->temp());
3588
4397
  ASSERT(fixed_object_reg.is(edx));
3589
4398
  ASSERT(new_map_reg.is(ebx));
3590
4399
  __ mov(fixed_object_reg, object_reg);
@@ -3592,7 +4401,9 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
3592
4401
  RelocInfo::CODE_TARGET, instr);
3593
4402
  } else if (IsFastDoubleElementsKind(from_kind) &&
3594
4403
  IsFastObjectElementsKind(to_kind)) {
3595
- Register fixed_object_reg = ToRegister(instr->temp_reg());
4404
+ Register new_map_reg = ToRegister(instr->new_map_temp());
4405
+ __ mov(new_map_reg, to_map);
4406
+ Register fixed_object_reg = ToRegister(instr->temp());
3596
4407
  ASSERT(fixed_object_reg.is(edx));
3597
4408
  ASSERT(new_map_reg.is(ebx));
3598
4409
  __ mov(fixed_object_reg, object_reg);
@@ -3617,7 +4428,7 @@ void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
3617
4428
  };
3618
4429
 
3619
4430
  DeferredStringCharCodeAt* deferred =
3620
- new DeferredStringCharCodeAt(this, instr);
4431
+ new(zone()) DeferredStringCharCodeAt(this, instr);
3621
4432
 
3622
4433
  StringCharLoadGenerator::Generate(masm(),
3623
4434
  factory(),
@@ -3653,9 +4464,7 @@ void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
3653
4464
  }
3654
4465
  CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2,
3655
4466
  instr, instr->context());
3656
- if (FLAG_debug_code) {
3657
- __ AbortIfNotSmi(eax);
3658
- }
4467
+ __ AssertSmi(eax);
3659
4468
  __ SmiUntag(eax);
3660
4469
  __ StoreToSafepointRegisterSlot(result, eax);
3661
4470
  }
@@ -3673,14 +4482,14 @@ void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
3673
4482
  };
3674
4483
 
3675
4484
  DeferredStringCharFromCode* deferred =
3676
- new DeferredStringCharFromCode(this, instr);
4485
+ new(zone()) DeferredStringCharFromCode(this, instr);
3677
4486
 
3678
4487
  ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
3679
4488
  Register char_code = ToRegister(instr->char_code());
3680
4489
  Register result = ToRegister(instr->result());
3681
4490
  ASSERT(!char_code.is(result));
3682
4491
 
3683
- __ cmp(char_code, String::kMaxAsciiCharCode);
4492
+ __ cmp(char_code, String::kMaxOneByteCharCode);
3684
4493
  __ j(above, deferred->entry());
3685
4494
  __ Set(result, Immediate(factory()->single_character_string_cache()));
3686
4495
  __ mov(result, FieldOperand(result,
@@ -3725,11 +4534,28 @@ void LCodeGen::DoStringAdd(LStringAdd* instr) {
3725
4534
 
3726
4535
 
3727
4536
  void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
3728
- LOperand* input = instr->InputAt(0);
3729
- ASSERT(input->IsRegister() || input->IsStackSlot());
4537
+ if (CpuFeatures::IsSupported(SSE2)) {
4538
+ CpuFeatures::Scope scope(SSE2);
4539
+ LOperand* input = instr->value();
4540
+ ASSERT(input->IsRegister() || input->IsStackSlot());
4541
+ LOperand* output = instr->result();
4542
+ ASSERT(output->IsDoubleRegister());
4543
+ __ cvtsi2sd(ToDoubleRegister(output), ToOperand(input));
4544
+ } else {
4545
+ UNREACHABLE();
4546
+ }
4547
+ }
4548
+
4549
+
4550
+ void LCodeGen::DoUint32ToDouble(LUint32ToDouble* instr) {
4551
+ CpuFeatures::Scope scope(SSE2);
4552
+ LOperand* input = instr->value();
3730
4553
  LOperand* output = instr->result();
3731
- ASSERT(output->IsDoubleRegister());
3732
- __ cvtsi2sd(ToDoubleRegister(output), ToOperand(input));
4554
+ LOperand* temp = instr->temp();
4555
+
4556
+ __ LoadUint32(ToDoubleRegister(output),
4557
+ ToRegister(input),
4558
+ ToDoubleRegister(temp));
3733
4559
  }
3734
4560
 
3735
4561
 
@@ -3738,38 +4564,91 @@ void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
3738
4564
  public:
3739
4565
  DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr)
3740
4566
  : LDeferredCode(codegen), instr_(instr) { }
3741
- virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); }
4567
+ virtual void Generate() {
4568
+ codegen()->DoDeferredNumberTagI(instr_, instr_->value(), SIGNED_INT32);
4569
+ }
3742
4570
  virtual LInstruction* instr() { return instr_; }
3743
4571
  private:
3744
4572
  LNumberTagI* instr_;
3745
4573
  };
3746
4574
 
3747
- LOperand* input = instr->InputAt(0);
4575
+ LOperand* input = instr->value();
3748
4576
  ASSERT(input->IsRegister() && input->Equals(instr->result()));
3749
4577
  Register reg = ToRegister(input);
3750
4578
 
3751
- DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
4579
+ DeferredNumberTagI* deferred = new(zone()) DeferredNumberTagI(this, instr);
3752
4580
  __ SmiTag(reg);
3753
4581
  __ j(overflow, deferred->entry());
3754
4582
  __ bind(deferred->exit());
3755
4583
  }
3756
4584
 
3757
4585
 
3758
- void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
4586
+ void LCodeGen::DoNumberTagU(LNumberTagU* instr) {
4587
+ class DeferredNumberTagU: public LDeferredCode {
4588
+ public:
4589
+ DeferredNumberTagU(LCodeGen* codegen, LNumberTagU* instr)
4590
+ : LDeferredCode(codegen), instr_(instr) { }
4591
+ virtual void Generate() {
4592
+ codegen()->DoDeferredNumberTagI(instr_, instr_->value(), UNSIGNED_INT32);
4593
+ }
4594
+ virtual LInstruction* instr() { return instr_; }
4595
+ private:
4596
+ LNumberTagU* instr_;
4597
+ };
4598
+
4599
+ LOperand* input = instr->value();
4600
+ ASSERT(input->IsRegister() && input->Equals(instr->result()));
4601
+ Register reg = ToRegister(input);
4602
+
4603
+ DeferredNumberTagU* deferred = new(zone()) DeferredNumberTagU(this, instr);
4604
+ __ cmp(reg, Immediate(Smi::kMaxValue));
4605
+ __ j(above, deferred->entry());
4606
+ __ SmiTag(reg);
4607
+ __ bind(deferred->exit());
4608
+ }
4609
+
4610
+
4611
+ void LCodeGen::DoDeferredNumberTagI(LInstruction* instr,
4612
+ LOperand* value,
4613
+ IntegerSignedness signedness) {
3759
4614
  Label slow;
3760
- Register reg = ToRegister(instr->InputAt(0));
4615
+ Register reg = ToRegister(value);
3761
4616
  Register tmp = reg.is(eax) ? ecx : eax;
3762
4617
 
3763
4618
  // Preserve the value of all registers.
3764
4619
  PushSafepointRegistersScope scope(this);
3765
4620
 
3766
- // There was overflow, so bits 30 and 31 of the original integer
3767
- // disagree. Try to allocate a heap number in new space and store
3768
- // the value in there. If that fails, call the runtime system.
3769
4621
  Label done;
3770
- __ SmiUntag(reg);
3771
- __ xor_(reg, 0x80000000);
3772
- __ cvtsi2sd(xmm0, Operand(reg));
4622
+
4623
+ if (signedness == SIGNED_INT32) {
4624
+ // There was overflow, so bits 30 and 31 of the original integer
4625
+ // disagree. Try to allocate a heap number in new space and store
4626
+ // the value in there. If that fails, call the runtime system.
4627
+ __ SmiUntag(reg);
4628
+ __ xor_(reg, 0x80000000);
4629
+ if (CpuFeatures::IsSupported(SSE2)) {
4630
+ CpuFeatures::Scope feature_scope(SSE2);
4631
+ __ cvtsi2sd(xmm0, Operand(reg));
4632
+ } else {
4633
+ __ push(reg);
4634
+ __ fild_s(Operand(esp, 0));
4635
+ __ pop(reg);
4636
+ }
4637
+ } else {
4638
+ if (CpuFeatures::IsSupported(SSE2)) {
4639
+ CpuFeatures::Scope feature_scope(SSE2);
4640
+ __ LoadUint32(xmm0, reg, xmm1);
4641
+ } else {
4642
+ // There's no fild variant for unsigned values, so zero-extend to a 64-bit
4643
+ // int manually.
4644
+ __ push(Immediate(0));
4645
+ __ push(reg);
4646
+ __ fild_d(Operand(esp, 0));
4647
+ __ pop(reg);
4648
+ __ pop(reg);
4649
+ }
4650
+ }
4651
+
3773
4652
  if (FLAG_inline_new) {
3774
4653
  __ AllocateHeapNumber(reg, tmp, no_reg, &slow);
3775
4654
  __ jmp(&done, Label::kNear);
@@ -3796,7 +4675,12 @@ void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
3796
4675
  // Done. Put the value in xmm0 into the value of the allocated heap
3797
4676
  // number.
3798
4677
  __ bind(&done);
3799
- __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), xmm0);
4678
+ if (CpuFeatures::IsSupported(SSE2)) {
4679
+ CpuFeatures::Scope feature_scope(SSE2);
4680
+ __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), xmm0);
4681
+ } else {
4682
+ __ fstp_d(FieldOperand(reg, HeapNumber::kValueOffset));
4683
+ }
3800
4684
  __ StoreToSafepointRegisterSlot(reg, reg);
3801
4685
  }
3802
4686
 
@@ -3812,18 +4696,83 @@ void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
3812
4696
  LNumberTagD* instr_;
3813
4697
  };
3814
4698
 
3815
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
3816
4699
  Register reg = ToRegister(instr->result());
3817
- Register tmp = ToRegister(instr->TempAt(0));
3818
4700
 
3819
- DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
4701
+ bool convert_hole = false;
4702
+ HValue* change_input = instr->hydrogen()->value();
4703
+ if (change_input->IsLoadKeyed()) {
4704
+ HLoadKeyed* load = HLoadKeyed::cast(change_input);
4705
+ convert_hole = load->UsesMustHandleHole();
4706
+ }
4707
+
4708
+ Label no_special_nan_handling;
4709
+ Label done;
4710
+ if (convert_hole) {
4711
+ bool use_sse2 = CpuFeatures::IsSupported(SSE2);
4712
+ if (use_sse2) {
4713
+ CpuFeatures::Scope scope(SSE2);
4714
+ XMMRegister input_reg = ToDoubleRegister(instr->value());
4715
+ __ ucomisd(input_reg, input_reg);
4716
+ } else {
4717
+ if (!IsX87TopOfStack(instr->value())) {
4718
+ __ fld_d(ToOperand(instr->value()));
4719
+ }
4720
+ __ fld(0);
4721
+ __ fld(0);
4722
+ __ FCmp();
4723
+ }
4724
+
4725
+ __ j(parity_odd, &no_special_nan_handling);
4726
+ __ sub(esp, Immediate(kDoubleSize));
4727
+ if (use_sse2) {
4728
+ CpuFeatures::Scope scope(SSE2);
4729
+ XMMRegister input_reg = ToDoubleRegister(instr->value());
4730
+ __ movdbl(MemOperand(esp, 0), input_reg);
4731
+ } else {
4732
+ __ fld(0);
4733
+ __ fstp_d(MemOperand(esp, 0));
4734
+ }
4735
+ __ cmp(MemOperand(esp, sizeof(kHoleNanLower32)),
4736
+ Immediate(kHoleNanUpper32));
4737
+ Label canonicalize;
4738
+ __ j(not_equal, &canonicalize);
4739
+ __ add(esp, Immediate(kDoubleSize));
4740
+ __ mov(reg, factory()->the_hole_value());
4741
+ __ jmp(&done);
4742
+ __ bind(&canonicalize);
4743
+ __ add(esp, Immediate(kDoubleSize));
4744
+ ExternalReference nan =
4745
+ ExternalReference::address_of_canonical_non_hole_nan();
4746
+ if (use_sse2) {
4747
+ CpuFeatures::Scope scope(SSE2);
4748
+ XMMRegister input_reg = ToDoubleRegister(instr->value());
4749
+ __ movdbl(input_reg, Operand::StaticVariable(nan));
4750
+ } else {
4751
+ __ fstp(0);
4752
+ __ fld_d(Operand::StaticVariable(nan));
4753
+ }
4754
+ }
4755
+
4756
+ __ bind(&no_special_nan_handling);
4757
+ DeferredNumberTagD* deferred = new(zone()) DeferredNumberTagD(this, instr);
3820
4758
  if (FLAG_inline_new) {
4759
+ Register tmp = ToRegister(instr->temp());
3821
4760
  __ AllocateHeapNumber(reg, tmp, no_reg, deferred->entry());
3822
4761
  } else {
3823
4762
  __ jmp(deferred->entry());
3824
4763
  }
3825
4764
  __ bind(deferred->exit());
3826
- __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), input_reg);
4765
+ if (CpuFeatures::IsSupported(SSE2)) {
4766
+ CpuFeatures::Scope scope(SSE2);
4767
+ XMMRegister input_reg = ToDoubleRegister(instr->value());
4768
+ __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), input_reg);
4769
+ } else {
4770
+ if (!IsX87TopOfStack(instr->value())) {
4771
+ __ fld_d(ToOperand(instr->value()));
4772
+ }
4773
+ __ fstp_d(FieldOperand(reg, HeapNumber::kValueOffset));
4774
+ }
4775
+ __ bind(&done);
3827
4776
  }
3828
4777
 
3829
4778
 
@@ -3849,7 +4798,7 @@ void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
3849
4798
 
3850
4799
 
3851
4800
  void LCodeGen::DoSmiTag(LSmiTag* instr) {
3852
- LOperand* input = instr->InputAt(0);
4801
+ LOperand* input = instr->value();
3853
4802
  ASSERT(input->IsRegister() && input->Equals(instr->result()));
3854
4803
  ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
3855
4804
  __ SmiTag(ToRegister(input));
@@ -3857,11 +4806,13 @@ void LCodeGen::DoSmiTag(LSmiTag* instr) {
3857
4806
 
3858
4807
 
3859
4808
  void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
3860
- LOperand* input = instr->InputAt(0);
4809
+ LOperand* input = instr->value();
3861
4810
  ASSERT(input->IsRegister() && input->Equals(instr->result()));
3862
4811
  if (instr->needs_check()) {
3863
4812
  __ test(ToRegister(input), Immediate(kSmiTagMask));
3864
4813
  DeoptimizeIf(not_zero, instr->environment());
4814
+ } else {
4815
+ __ AssertSmi(ToRegister(input));
3865
4816
  }
3866
4817
  __ SmiUntag(ToRegister(input));
3867
4818
  }
@@ -3872,44 +4823,59 @@ void LCodeGen::EmitNumberUntagD(Register input_reg,
3872
4823
  XMMRegister result_reg,
3873
4824
  bool deoptimize_on_undefined,
3874
4825
  bool deoptimize_on_minus_zero,
3875
- LEnvironment* env) {
4826
+ LEnvironment* env,
4827
+ NumberUntagDMode mode) {
3876
4828
  Label load_smi, done;
3877
4829
 
3878
- // Smi check.
3879
- __ JumpIfSmi(input_reg, &load_smi, Label::kNear);
4830
+ if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED) {
4831
+ // Smi check.
4832
+ __ JumpIfSmi(input_reg, &load_smi, Label::kNear);
3880
4833
 
3881
- // Heap number map check.
3882
- __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
3883
- factory()->heap_number_map());
3884
- if (deoptimize_on_undefined) {
3885
- DeoptimizeIf(not_equal, env);
3886
- } else {
3887
- Label heap_number;
3888
- __ j(equal, &heap_number, Label::kNear);
4834
+ // Heap number map check.
4835
+ __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
4836
+ factory()->heap_number_map());
4837
+ if (deoptimize_on_undefined) {
4838
+ DeoptimizeIf(not_equal, env);
4839
+ } else {
4840
+ Label heap_number;
4841
+ __ j(equal, &heap_number, Label::kNear);
3889
4842
 
3890
- __ cmp(input_reg, factory()->undefined_value());
3891
- DeoptimizeIf(not_equal, env);
4843
+ __ cmp(input_reg, factory()->undefined_value());
4844
+ DeoptimizeIf(not_equal, env);
3892
4845
 
3893
- // Convert undefined to NaN.
3894
- ExternalReference nan =
3895
- ExternalReference::address_of_canonical_non_hole_nan();
3896
- __ movdbl(result_reg, Operand::StaticVariable(nan));
3897
- __ jmp(&done, Label::kNear);
4846
+ // Convert undefined to NaN.
4847
+ ExternalReference nan =
4848
+ ExternalReference::address_of_canonical_non_hole_nan();
4849
+ __ movdbl(result_reg, Operand::StaticVariable(nan));
4850
+ __ jmp(&done, Label::kNear);
3898
4851
 
3899
- __ bind(&heap_number);
3900
- }
3901
- // Heap number to XMM conversion.
3902
- __ movdbl(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
3903
- if (deoptimize_on_minus_zero) {
3904
- XMMRegister xmm_scratch = xmm0;
3905
- __ xorps(xmm_scratch, xmm_scratch);
3906
- __ ucomisd(result_reg, xmm_scratch);
3907
- __ j(not_zero, &done, Label::kNear);
3908
- __ movmskpd(temp_reg, result_reg);
3909
- __ test_b(temp_reg, 1);
3910
- DeoptimizeIf(not_zero, env);
4852
+ __ bind(&heap_number);
4853
+ }
4854
+ // Heap number to XMM conversion.
4855
+ __ movdbl(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
4856
+ if (deoptimize_on_minus_zero) {
4857
+ XMMRegister xmm_scratch = xmm0;
4858
+ __ xorps(xmm_scratch, xmm_scratch);
4859
+ __ ucomisd(result_reg, xmm_scratch);
4860
+ __ j(not_zero, &done, Label::kNear);
4861
+ __ movmskpd(temp_reg, result_reg);
4862
+ __ test_b(temp_reg, 1);
4863
+ DeoptimizeIf(not_zero, env);
4864
+ }
4865
+ __ jmp(&done, Label::kNear);
4866
+ } else if (mode == NUMBER_CANDIDATE_IS_SMI_OR_HOLE) {
4867
+ __ test(input_reg, Immediate(kSmiTagMask));
4868
+ DeoptimizeIf(not_equal, env);
4869
+ } else if (mode == NUMBER_CANDIDATE_IS_SMI_CONVERT_HOLE) {
4870
+ __ test(input_reg, Immediate(kSmiTagMask));
4871
+ __ j(zero, &load_smi);
4872
+ ExternalReference hole_nan_reference =
4873
+ ExternalReference::address_of_the_hole_nan();
4874
+ __ movdbl(result_reg, Operand::StaticVariable(hole_nan_reference));
4875
+ __ jmp(&done, Label::kNear);
4876
+ } else {
4877
+ ASSERT(mode == NUMBER_CANDIDATE_IS_SMI);
3911
4878
  }
3912
- __ jmp(&done, Label::kNear);
3913
4879
 
3914
4880
  // Smi to XMM conversion
3915
4881
  __ bind(&load_smi);
@@ -3922,7 +4888,7 @@ void LCodeGen::EmitNumberUntagD(Register input_reg,
3922
4888
 
3923
4889
  void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
3924
4890
  Label done, heap_number;
3925
- Register input_reg = ToRegister(instr->InputAt(0));
4891
+ Register input_reg = ToRegister(instr->value());
3926
4892
 
3927
4893
  // Heap number map check.
3928
4894
  __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
@@ -3933,6 +4899,7 @@ void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
3933
4899
  // Check for undefined. Undefined is converted to zero for truncating
3934
4900
  // conversions.
3935
4901
  __ cmp(input_reg, factory()->undefined_value());
4902
+ __ RecordComment("Deferred TaggedToI: cannot truncate");
3936
4903
  DeoptimizeIf(not_equal, instr->environment());
3937
4904
  __ mov(input_reg, 0);
3938
4905
  __ jmp(&done, Label::kNear);
@@ -3953,6 +4920,7 @@ void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
3953
4920
  __ j(less, &convert, Label::kNear);
3954
4921
  // Pop FPU stack before deoptimizing.
3955
4922
  __ fstp(0);
4923
+ __ RecordComment("Deferred TaggedToI: exponent too big");
3956
4924
  DeoptimizeIf(no_condition, instr->environment());
3957
4925
 
3958
4926
  // Reserve space for 64 bit answer.
@@ -3963,7 +4931,8 @@ void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
3963
4931
  __ mov(input_reg, Operand(esp, 0)); // Low word of answer is the result.
3964
4932
  __ add(Operand(esp), Immediate(kDoubleSize));
3965
4933
  } else {
3966
- XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0));
4934
+ CpuFeatures::Scope scope(SSE2);
4935
+ XMMRegister xmm_temp = ToDoubleRegister(instr->temp());
3967
4936
  __ movdbl(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
3968
4937
  __ cvttsd2si(input_reg, Operand(xmm0));
3969
4938
  __ cmp(input_reg, 0x80000000u);
@@ -3976,24 +4945,31 @@ void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
3976
4945
  DeoptimizeIf(not_equal, instr->environment());
3977
4946
  DeoptimizeIf(parity_even, instr->environment()); // NaN.
3978
4947
  }
3979
- } else {
4948
+ } else if (CpuFeatures::IsSupported(SSE2)) {
4949
+ CpuFeatures::Scope scope(SSE2);
3980
4950
  // Deoptimize if we don't have a heap number.
4951
+ __ RecordComment("Deferred TaggedToI: not a heap number");
3981
4952
  DeoptimizeIf(not_equal, instr->environment());
3982
4953
 
3983
- XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0));
4954
+ XMMRegister xmm_temp = ToDoubleRegister(instr->temp());
3984
4955
  __ movdbl(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
3985
4956
  __ cvttsd2si(input_reg, Operand(xmm0));
3986
4957
  __ cvtsi2sd(xmm_temp, Operand(input_reg));
3987
4958
  __ ucomisd(xmm0, xmm_temp);
4959
+ __ RecordComment("Deferred TaggedToI: lost precision");
3988
4960
  DeoptimizeIf(not_equal, instr->environment());
4961
+ __ RecordComment("Deferred TaggedToI: NaN");
3989
4962
  DeoptimizeIf(parity_even, instr->environment()); // NaN.
3990
4963
  if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3991
4964
  __ test(input_reg, Operand(input_reg));
3992
4965
  __ j(not_zero, &done);
3993
4966
  __ movmskpd(input_reg, xmm0);
3994
4967
  __ and_(input_reg, 1);
4968
+ __ RecordComment("Deferred TaggedToI: minus zero");
3995
4969
  DeoptimizeIf(not_zero, instr->environment());
3996
4970
  }
4971
+ } else {
4972
+ UNREACHABLE();
3997
4973
  }
3998
4974
  __ bind(&done);
3999
4975
  }
@@ -4010,13 +4986,13 @@ void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
4010
4986
  LTaggedToI* instr_;
4011
4987
  };
4012
4988
 
4013
- LOperand* input = instr->InputAt(0);
4989
+ LOperand* input = instr->value();
4014
4990
  ASSERT(input->IsRegister());
4015
4991
  ASSERT(input->Equals(instr->result()));
4016
4992
 
4017
4993
  Register input_reg = ToRegister(input);
4018
4994
 
4019
- DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
4995
+ DeferredTaggedToI* deferred = new(zone()) DeferredTaggedToI(this, instr);
4020
4996
 
4021
4997
  // Smi check.
4022
4998
  __ JumpIfNotSmi(input_reg, deferred->entry());
@@ -4029,34 +5005,58 @@ void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
4029
5005
 
4030
5006
 
4031
5007
  void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
4032
- LOperand* input = instr->InputAt(0);
5008
+ LOperand* input = instr->value();
4033
5009
  ASSERT(input->IsRegister());
4034
- LOperand* temp = instr->TempAt(0);
5010
+ LOperand* temp = instr->temp();
4035
5011
  ASSERT(temp == NULL || temp->IsRegister());
4036
5012
  LOperand* result = instr->result();
4037
5013
  ASSERT(result->IsDoubleRegister());
4038
5014
 
4039
- Register input_reg = ToRegister(input);
4040
- XMMRegister result_reg = ToDoubleRegister(result);
4041
-
4042
- bool deoptimize_on_minus_zero =
4043
- instr->hydrogen()->deoptimize_on_minus_zero();
4044
- Register temp_reg = deoptimize_on_minus_zero ? ToRegister(temp) : no_reg;
5015
+ if (CpuFeatures::IsSupported(SSE2)) {
5016
+ CpuFeatures::Scope scope(SSE2);
5017
+ Register input_reg = ToRegister(input);
5018
+ XMMRegister result_reg = ToDoubleRegister(result);
5019
+
5020
+ bool deoptimize_on_minus_zero =
5021
+ instr->hydrogen()->deoptimize_on_minus_zero();
5022
+ Register temp_reg = deoptimize_on_minus_zero ? ToRegister(temp) : no_reg;
5023
+
5024
+ NumberUntagDMode mode = NUMBER_CANDIDATE_IS_ANY_TAGGED;
5025
+ HValue* value = instr->hydrogen()->value();
5026
+ if (value->type().IsSmi()) {
5027
+ if (value->IsLoadKeyed()) {
5028
+ HLoadKeyed* load = HLoadKeyed::cast(value);
5029
+ if (load->UsesMustHandleHole()) {
5030
+ if (load->hole_mode() == ALLOW_RETURN_HOLE) {
5031
+ mode = NUMBER_CANDIDATE_IS_SMI_CONVERT_HOLE;
5032
+ } else {
5033
+ mode = NUMBER_CANDIDATE_IS_SMI_OR_HOLE;
5034
+ }
5035
+ } else {
5036
+ mode = NUMBER_CANDIDATE_IS_SMI;
5037
+ }
5038
+ }
5039
+ }
4045
5040
 
4046
- EmitNumberUntagD(input_reg,
4047
- temp_reg,
4048
- result_reg,
4049
- instr->hydrogen()->deoptimize_on_undefined(),
4050
- deoptimize_on_minus_zero,
4051
- instr->environment());
5041
+ EmitNumberUntagD(input_reg,
5042
+ temp_reg,
5043
+ result_reg,
5044
+ instr->hydrogen()->deoptimize_on_undefined(),
5045
+ deoptimize_on_minus_zero,
5046
+ instr->environment(),
5047
+ mode);
5048
+ } else {
5049
+ UNIMPLEMENTED();
5050
+ }
4052
5051
  }
4053
5052
 
4054
5053
 
4055
5054
  void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
4056
- LOperand* input = instr->InputAt(0);
5055
+ LOperand* input = instr->value();
4057
5056
  ASSERT(input->IsDoubleRegister());
4058
5057
  LOperand* result = instr->result();
4059
5058
  ASSERT(result->IsRegister());
5059
+ CpuFeatures::Scope scope(SSE2);
4060
5060
 
4061
5061
  XMMRegister input_reg = ToDoubleRegister(input);
4062
5062
  Register result_reg = ToRegister(result);
@@ -4091,7 +5091,7 @@ void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
4091
5091
  __ bind(&done);
4092
5092
  } else {
4093
5093
  Label done;
4094
- Register temp_reg = ToRegister(instr->TempAt(0));
5094
+ Register temp_reg = ToRegister(instr->temp());
4095
5095
  XMMRegister xmm_scratch = xmm0;
4096
5096
 
4097
5097
  // If cvttsd2si succeeded, we're done. Otherwise, we attempt
@@ -4170,22 +5170,22 @@ void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
4170
5170
 
4171
5171
 
4172
5172
  void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
4173
- LOperand* input = instr->InputAt(0);
5173
+ LOperand* input = instr->value();
4174
5174
  __ test(ToOperand(input), Immediate(kSmiTagMask));
4175
5175
  DeoptimizeIf(not_zero, instr->environment());
4176
5176
  }
4177
5177
 
4178
5178
 
4179
5179
  void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
4180
- LOperand* input = instr->InputAt(0);
5180
+ LOperand* input = instr->value();
4181
5181
  __ test(ToOperand(input), Immediate(kSmiTagMask));
4182
5182
  DeoptimizeIf(zero, instr->environment());
4183
5183
  }
4184
5184
 
4185
5185
 
4186
5186
  void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
4187
- Register input = ToRegister(instr->InputAt(0));
4188
- Register temp = ToRegister(instr->TempAt(0));
5187
+ Register input = ToRegister(instr->value());
5188
+ Register temp = ToRegister(instr->temp());
4189
5189
 
4190
5190
  __ mov(temp, FieldOperand(input, HeapObject::kMapOffset));
4191
5191
 
@@ -4230,7 +5230,7 @@ void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
4230
5230
 
4231
5231
  void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
4232
5232
  Handle<JSFunction> target = instr->hydrogen()->target();
4233
- if (isolate()->heap()->InNewSpace(*target)) {
5233
+ if (instr->hydrogen()->target_in_new_space()) {
4234
5234
  Register reg = ToRegister(instr->value());
4235
5235
  Handle<JSGlobalPropertyCell> cell =
4236
5236
  isolate()->factory()->NewJSGlobalPropertyCell(target);
@@ -4246,16 +5246,16 @@ void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
4246
5246
  void LCodeGen::DoCheckMapCommon(Register reg,
4247
5247
  Handle<Map> map,
4248
5248
  CompareMapMode mode,
4249
- LEnvironment* env) {
5249
+ LInstruction* instr) {
4250
5250
  Label success;
4251
5251
  __ CompareMap(reg, map, &success, mode);
4252
- DeoptimizeIf(not_equal, env);
5252
+ DeoptimizeIf(not_equal, instr->environment());
4253
5253
  __ bind(&success);
4254
5254
  }
4255
5255
 
4256
5256
 
4257
5257
  void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
4258
- LOperand* input = instr->InputAt(0);
5258
+ LOperand* input = instr->value();
4259
5259
  ASSERT(input->IsRegister());
4260
5260
  Register reg = ToRegister(input);
4261
5261
 
@@ -4267,12 +5267,13 @@ void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
4267
5267
  __ j(equal, &success);
4268
5268
  }
4269
5269
  Handle<Map> map = map_set->last();
4270
- DoCheckMapCommon(reg, map, REQUIRE_EXACT_MAP, instr->environment());
5270
+ DoCheckMapCommon(reg, map, REQUIRE_EXACT_MAP, instr);
4271
5271
  __ bind(&success);
4272
5272
  }
4273
5273
 
4274
5274
 
4275
5275
  void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
5276
+ CpuFeatures::Scope scope(SSE2);
4276
5277
  XMMRegister value_reg = ToDoubleRegister(instr->unclamped());
4277
5278
  Register result_reg = ToRegister(instr->result());
4278
5279
  __ ClampDoubleToUint8(value_reg, xmm0, result_reg);
@@ -4287,6 +5288,8 @@ void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) {
4287
5288
 
4288
5289
 
4289
5290
  void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
5291
+ CpuFeatures::Scope scope(SSE2);
5292
+
4290
5293
  ASSERT(instr->unclamped()->Equals(instr->result()));
4291
5294
  Register input_reg = ToRegister(instr->unclamped());
4292
5295
  Label is_smi, done, heap_number;
@@ -4321,28 +5324,18 @@ void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
4321
5324
 
4322
5325
 
4323
5326
  void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
4324
- Register reg = ToRegister(instr->TempAt(0));
4325
-
4326
- Handle<JSObject> holder = instr->holder();
4327
- Handle<JSObject> current_prototype = instr->prototype();
5327
+ ASSERT(instr->temp()->Equals(instr->result()));
5328
+ Register reg = ToRegister(instr->temp());
4328
5329
 
4329
- // Load prototype object.
4330
- __ LoadHeapObject(reg, current_prototype);
5330
+ ZoneList<Handle<JSObject> >* prototypes = instr->prototypes();
5331
+ ZoneList<Handle<Map> >* maps = instr->maps();
4331
5332
 
4332
- // Check prototype maps up to the holder.
4333
- while (!current_prototype.is_identical_to(holder)) {
4334
- DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
4335
- ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment());
5333
+ ASSERT(prototypes->length() == maps->length());
4336
5334
 
4337
- current_prototype =
4338
- Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
4339
- // Load next prototype object.
4340
- __ LoadHeapObject(reg, current_prototype);
5335
+ for (int i = 0; i < prototypes->length(); i++) {
5336
+ __ LoadHeapObject(reg, prototypes->at(i));
5337
+ DoCheckMapCommon(reg, maps->at(i), ALLOW_ELEMENT_TRANSITION_MAPS, instr);
4341
5338
  }
4342
-
4343
- // Check the holder map.
4344
- DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
4345
- ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment());
4346
5339
  }
4347
5340
 
4348
5341
 
@@ -4357,10 +5350,11 @@ void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
4357
5350
  LAllocateObject* instr_;
4358
5351
  };
4359
5352
 
4360
- DeferredAllocateObject* deferred = new DeferredAllocateObject(this, instr);
5353
+ DeferredAllocateObject* deferred =
5354
+ new(zone()) DeferredAllocateObject(this, instr);
4361
5355
 
4362
5356
  Register result = ToRegister(instr->result());
4363
- Register scratch = ToRegister(instr->TempAt(0));
5357
+ Register scratch = ToRegister(instr->temp());
4364
5358
  Handle<JSFunction> constructor = instr->hydrogen()->constructor();
4365
5359
  Handle<Map> initial_map(constructor->initial_map());
4366
5360
  int instance_size = initial_map->instance_size();
@@ -4393,7 +5387,7 @@ void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
4393
5387
  __ mov(map, FieldOperand(scratch, JSFunction::kPrototypeOrInitialMapOffset));
4394
5388
 
4395
5389
  if (FLAG_debug_code) {
4396
- __ AbortIfSmi(map);
5390
+ __ AssertNotSmi(map);
4397
5391
  __ cmpb(FieldOperand(map, Map::kInstanceSizeOffset),
4398
5392
  instance_size >> kPointerSizeLog2);
4399
5393
  __ Assert(equal, "Unexpected instance size");
@@ -4443,11 +5437,67 @@ void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
4443
5437
  }
4444
5438
 
4445
5439
 
5440
+ void LCodeGen::DoAllocate(LAllocate* instr) {
5441
+ class DeferredAllocate: public LDeferredCode {
5442
+ public:
5443
+ DeferredAllocate(LCodeGen* codegen, LAllocate* instr)
5444
+ : LDeferredCode(codegen), instr_(instr) { }
5445
+ virtual void Generate() { codegen()->DoDeferredAllocate(instr_); }
5446
+ virtual LInstruction* instr() { return instr_; }
5447
+ private:
5448
+ LAllocate* instr_;
5449
+ };
5450
+
5451
+ DeferredAllocate* deferred =
5452
+ new(zone()) DeferredAllocate(this, instr);
5453
+
5454
+ Register size = ToRegister(instr->size());
5455
+ Register result = ToRegister(instr->result());
5456
+ Register temp = ToRegister(instr->temp());
5457
+
5458
+ HAllocate* original_instr = instr->hydrogen();
5459
+ if (original_instr->size()->IsConstant()) {
5460
+ UNREACHABLE();
5461
+ } else {
5462
+ // Allocate memory for the object.
5463
+ AllocationFlags flags = TAG_OBJECT;
5464
+ if (original_instr->MustAllocateDoubleAligned()) {
5465
+ flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
5466
+ }
5467
+ __ AllocateInNewSpace(size, result, temp, no_reg,
5468
+ deferred->entry(), flags);
5469
+ }
5470
+
5471
+ __ bind(deferred->exit());
5472
+ }
5473
+
5474
+
5475
+ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
5476
+ Register size = ToRegister(instr->size());
5477
+ Register result = ToRegister(instr->result());
5478
+
5479
+ __ SmiTag(size);
5480
+ PushSafepointRegistersScope scope(this);
5481
+ // TODO(3095996): Get rid of this. For now, we need to make the
5482
+ // result register contain a valid pointer because it is already
5483
+ // contained in the register pointer map.
5484
+ if (!size.is(result)) {
5485
+ __ StoreToSafepointRegisterSlot(result, size);
5486
+ }
5487
+ __ push(size);
5488
+ CallRuntimeFromDeferred(
5489
+ Runtime::kAllocateInNewSpace, 1, instr, instr->context());
5490
+ __ StoreToSafepointRegisterSlot(result, eax);
5491
+ }
5492
+
5493
+
4446
5494
  void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
4447
5495
  ASSERT(ToRegister(instr->context()).is(esi));
4448
- Heap* heap = isolate()->heap();
5496
+ Handle<FixedArray> literals(instr->environment()->closure()->literals());
4449
5497
  ElementsKind boilerplate_elements_kind =
4450
5498
  instr->hydrogen()->boilerplate_elements_kind();
5499
+ AllocationSiteMode allocation_site_mode =
5500
+ instr->hydrogen()->allocation_site_mode();
4451
5501
 
4452
5502
  // Deopt if the array literal boilerplate ElementsKind is of a type different
4453
5503
  // than the expected one. The check isn't necessary if the boilerplate has
@@ -4466,12 +5516,11 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
4466
5516
  }
4467
5517
 
4468
5518
  // Set up the parameters to the stub/runtime call.
4469
- __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
4470
- __ push(FieldOperand(eax, JSFunction::kLiteralsOffset));
5519
+ __ PushHeapObject(literals);
4471
5520
  __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
4472
5521
  // Boilerplate already exists, constant elements are never accessed.
4473
5522
  // Pass an empty fixed array.
4474
- __ push(Immediate(Handle<FixedArray>(heap->empty_fixed_array())));
5523
+ __ push(Immediate(isolate()->factory()->empty_fixed_array()));
4475
5524
 
4476
5525
  // Pick the right runtime function or stub to call.
4477
5526
  int length = instr->hydrogen()->length();
@@ -4479,7 +5528,7 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
4479
5528
  ASSERT(instr->hydrogen()->depth() == 1);
4480
5529
  FastCloneShallowArrayStub::Mode mode =
4481
5530
  FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
4482
- FastCloneShallowArrayStub stub(mode, length);
5531
+ FastCloneShallowArrayStub stub(mode, DONT_TRACK_ALLOCATION_SITE, length);
4483
5532
  CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4484
5533
  } else if (instr->hydrogen()->depth() > 1) {
4485
5534
  CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
@@ -4488,9 +5537,9 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
4488
5537
  } else {
4489
5538
  FastCloneShallowArrayStub::Mode mode =
4490
5539
  boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS
4491
- ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
4492
- : FastCloneShallowArrayStub::CLONE_ELEMENTS;
4493
- FastCloneShallowArrayStub stub(mode, length);
5540
+ ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
5541
+ : FastCloneShallowArrayStub::CLONE_ELEMENTS;
5542
+ FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
4494
5543
  CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4495
5544
  }
4496
5545
  }
@@ -4499,10 +5548,14 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
4499
5548
  void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
4500
5549
  Register result,
4501
5550
  Register source,
4502
- int* offset) {
5551
+ int* offset,
5552
+ AllocationSiteMode mode) {
4503
5553
  ASSERT(!source.is(ecx));
4504
5554
  ASSERT(!result.is(ecx));
4505
5555
 
5556
+ bool create_allocation_site_info = mode == TRACK_ALLOCATION_SITE &&
5557
+ object->map()->CanTrackAllocationSite();
5558
+
4506
5559
  if (FLAG_debug_code) {
4507
5560
  __ LoadHeapObject(ecx, object);
4508
5561
  __ cmp(source, ecx);
@@ -4525,8 +5578,13 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
4525
5578
  // this object and its backing store.
4526
5579
  int object_offset = *offset;
4527
5580
  int object_size = object->map()->instance_size();
4528
- int elements_offset = *offset + object_size;
4529
5581
  int elements_size = has_elements ? elements->Size() : 0;
5582
+ int elements_offset = *offset + object_size;
5583
+ if (create_allocation_site_info) {
5584
+ elements_offset += AllocationSiteInfo::kSize;
5585
+ *offset += AllocationSiteInfo::kSize;
5586
+ }
5587
+
4530
5588
  *offset += object_size + elements_size;
4531
5589
 
4532
5590
  // Copy object header.
@@ -4551,7 +5609,8 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
4551
5609
  __ lea(ecx, Operand(result, *offset));
4552
5610
  __ mov(FieldOperand(result, total_offset), ecx);
4553
5611
  __ LoadHeapObject(source, value_object);
4554
- EmitDeepCopy(value_object, result, source, offset);
5612
+ EmitDeepCopy(value_object, result, source, offset,
5613
+ DONT_TRACK_ALLOCATION_SITE);
4555
5614
  } else if (value->IsHeapObject()) {
4556
5615
  __ LoadHeapObject(ecx, Handle<HeapObject>::cast(value));
4557
5616
  __ mov(FieldOperand(result, total_offset), ecx);
@@ -4560,6 +5619,14 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
4560
5619
  }
4561
5620
  }
4562
5621
 
5622
+ // Build Allocation Site Info if desired
5623
+ if (create_allocation_site_info) {
5624
+ __ mov(FieldOperand(result, object_size),
5625
+ Immediate(Handle<Map>(isolate()->heap()->
5626
+ allocation_site_info_map())));
5627
+ __ mov(FieldOperand(result, object_size + kPointerSize), source);
5628
+ }
5629
+
4563
5630
  if (has_elements) {
4564
5631
  // Copy elements backing store header.
4565
5632
  __ LoadHeapObject(source, elements);
@@ -4575,8 +5642,8 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
4575
5642
  Handle<FixedDoubleArray>::cast(elements);
4576
5643
  for (int i = 0; i < elements_length; i++) {
4577
5644
  int64_t value = double_array->get_representation(i);
4578
- int32_t value_low = value & 0xFFFFFFFF;
4579
- int32_t value_high = value >> 32;
5645
+ int32_t value_low = static_cast<int32_t>(value & 0xFFFFFFFF);
5646
+ int32_t value_high = static_cast<int32_t>(value >> 32);
4580
5647
  int total_offset =
4581
5648
  elements_offset + FixedDoubleArray::OffsetOfElementAt(i);
4582
5649
  __ mov(FieldOperand(result, total_offset), Immediate(value_low));
@@ -4592,7 +5659,8 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
4592
5659
  __ lea(ecx, Operand(result, *offset));
4593
5660
  __ mov(FieldOperand(result, total_offset), ecx);
4594
5661
  __ LoadHeapObject(source, value_object);
4595
- EmitDeepCopy(value_object, result, source, offset);
5662
+ EmitDeepCopy(value_object, result, source, offset,
5663
+ DONT_TRACK_ALLOCATION_SITE);
4596
5664
  } else if (value->IsHeapObject()) {
4597
5665
  __ LoadHeapObject(ecx, Handle<HeapObject>::cast(value));
4598
5666
  __ mov(FieldOperand(result, total_offset), ecx);
@@ -4642,7 +5710,8 @@ void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
4642
5710
  __ bind(&allocated);
4643
5711
  int offset = 0;
4644
5712
  __ LoadHeapObject(ebx, instr->hydrogen()->boilerplate());
4645
- EmitDeepCopy(instr->hydrogen()->boilerplate(), eax, ebx, &offset);
5713
+ EmitDeepCopy(instr->hydrogen()->boilerplate(), eax, ebx, &offset,
5714
+ instr->hydrogen()->allocation_site_mode());
4646
5715
  ASSERT_EQ(size, offset);
4647
5716
  }
4648
5717
 
@@ -4680,7 +5749,7 @@ void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
4680
5749
 
4681
5750
 
4682
5751
  void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
4683
- ASSERT(ToRegister(instr->InputAt(0)).is(eax));
5752
+ ASSERT(ToRegister(instr->value()).is(eax));
4684
5753
  __ push(eax);
4685
5754
  CallRuntime(Runtime::kToFastProperties, 1, instr);
4686
5755
  }
@@ -4690,15 +5759,13 @@ void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
4690
5759
  ASSERT(ToRegister(instr->context()).is(esi));
4691
5760
  Label materialized;
4692
5761
  // Registers will be used as follows:
4693
- // edi = JS function.
4694
5762
  // ecx = literals array.
4695
5763
  // ebx = regexp literal.
4696
5764
  // eax = regexp literal clone.
4697
5765
  // esi = context.
4698
- __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
4699
- __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
4700
- int literal_offset = FixedArray::kHeaderSize +
4701
- instr->hydrogen()->literal_index() * kPointerSize;
5766
+ int literal_offset =
5767
+ FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index());
5768
+ __ LoadHeapObject(ecx, instr->hydrogen()->literals());
4702
5769
  __ mov(ebx, FieldOperand(ecx, literal_offset));
4703
5770
  __ cmp(ebx, factory()->undefined_value());
4704
5771
  __ j(not_equal, &materialized, Label::kNear);
@@ -4762,14 +5829,14 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
4762
5829
 
4763
5830
 
4764
5831
  void LCodeGen::DoTypeof(LTypeof* instr) {
4765
- LOperand* input = instr->InputAt(1);
5832
+ LOperand* input = instr->value();
4766
5833
  EmitPushTaggedOperand(input);
4767
5834
  CallRuntime(Runtime::kTypeof, 1, instr);
4768
5835
  }
4769
5836
 
4770
5837
 
4771
5838
  void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
4772
- Register input = ToRegister(instr->InputAt(0));
5839
+ Register input = ToRegister(instr->value());
4773
5840
  int true_block = chunk_->LookupDestination(instr->true_block_id());
4774
5841
  int false_block = chunk_->LookupDestination(instr->false_block_id());
4775
5842
  Label* true_label = chunk_->GetAssemblyLabel(true_block);
@@ -4853,7 +5920,7 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
4853
5920
 
4854
5921
 
4855
5922
  void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
4856
- Register temp = ToRegister(instr->TempAt(0));
5923
+ Register temp = ToRegister(instr->temp());
4857
5924
  int true_block = chunk_->LookupDestination(instr->true_block_id());
4858
5925
  int false_block = chunk_->LookupDestination(instr->false_block_id());
4859
5926
 
@@ -4881,13 +5948,15 @@ void LCodeGen::EmitIsConstructCall(Register temp) {
4881
5948
 
4882
5949
 
4883
5950
  void LCodeGen::EnsureSpaceForLazyDeopt() {
4884
- // Ensure that we have enough space after the previous lazy-bailout
4885
- // instruction for patching the code here.
4886
- int current_pc = masm()->pc_offset();
4887
- int patch_size = Deoptimizer::patch_size();
4888
- if (current_pc < last_lazy_deopt_pc_ + patch_size) {
4889
- int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc;
4890
- __ Nop(padding_size);
5951
+ if (!info()->IsStub()) {
5952
+ // Ensure that we have enough space after the previous lazy-bailout
5953
+ // instruction for patching the code here.
5954
+ int current_pc = masm()->pc_offset();
5955
+ int patch_size = Deoptimizer::patch_size();
5956
+ if (current_pc < last_lazy_deopt_pc_ + patch_size) {
5957
+ int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc;
5958
+ __ Nop(padding_size);
5959
+ }
4891
5960
  }
4892
5961
  last_lazy_deopt_pc_ = masm()->pc_offset();
4893
5962
  }
@@ -4907,6 +5976,11 @@ void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
4907
5976
  }
4908
5977
 
4909
5978
 
5979
+ void LCodeGen::DoDummyUse(LDummyUse* instr) {
5980
+ // Nothing to see here, move on!
5981
+ }
5982
+
5983
+
4910
5984
  void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
4911
5985
  LOperand* obj = instr->object();
4912
5986
  LOperand* key = instr->key();
@@ -4972,7 +6046,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
4972
6046
  ASSERT(instr->hydrogen()->is_backwards_branch());
4973
6047
  // Perform stack overflow check if this goto needs it before jumping.
4974
6048
  DeferredStackCheck* deferred_stack_check =
4975
- new DeferredStackCheck(this, instr);
6049
+ new(zone()) DeferredStackCheck(this, instr);
4976
6050
  ExternalReference stack_limit =
4977
6051
  ExternalReference::address_of_stack_limit(isolate());
4978
6052
  __ cmp(esp, Operand::StaticVariable(stack_limit));
@@ -5054,11 +6128,20 @@ void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
5054
6128
  void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
5055
6129
  Register map = ToRegister(instr->map());
5056
6130
  Register result = ToRegister(instr->result());
6131
+ Label load_cache, done;
6132
+ __ EnumLength(result, map);
6133
+ __ cmp(result, Immediate(Smi::FromInt(0)));
6134
+ __ j(not_equal, &load_cache);
6135
+ __ mov(result, isolate()->factory()->empty_fixed_array());
6136
+ __ jmp(&done);
6137
+
6138
+ __ bind(&load_cache);
5057
6139
  __ LoadInstanceDescriptors(map, result);
5058
6140
  __ mov(result,
5059
- FieldOperand(result, DescriptorArray::kEnumerationIndexOffset));
6141
+ FieldOperand(result, DescriptorArray::kEnumCacheOffset));
5060
6142
  __ mov(result,
5061
6143
  FieldOperand(result, FixedArray::SizeFor(instr->idx())));
6144
+ __ bind(&done);
5062
6145
  __ test(result, result);
5063
6146
  DeoptimizeIf(equal, instr->environment());
5064
6147
  }