libv8 3.11.8.17 → 3.16.14.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (754) hide show
  1. checksums.yaml +4 -4
  2. data/.travis.yml +1 -2
  3. data/Gemfile +1 -1
  4. data/Rakefile +6 -7
  5. data/lib/libv8/version.rb +1 -1
  6. data/vendor/v8/.gitignore +24 -3
  7. data/vendor/v8/AUTHORS +7 -0
  8. data/vendor/v8/ChangeLog +839 -0
  9. data/vendor/v8/DEPS +1 -1
  10. data/vendor/v8/Makefile.android +92 -0
  11. data/vendor/v8/OWNERS +11 -0
  12. data/vendor/v8/PRESUBMIT.py +71 -0
  13. data/vendor/v8/SConstruct +34 -39
  14. data/vendor/v8/build/android.gypi +56 -37
  15. data/vendor/v8/build/common.gypi +112 -30
  16. data/vendor/v8/build/gyp_v8 +1 -1
  17. data/vendor/v8/build/standalone.gypi +15 -11
  18. data/vendor/v8/include/v8-debug.h +9 -1
  19. data/vendor/v8/include/v8-preparser.h +4 -3
  20. data/vendor/v8/include/v8-profiler.h +25 -25
  21. data/vendor/v8/include/v8-testing.h +4 -3
  22. data/vendor/v8/include/v8.h +994 -540
  23. data/vendor/v8/preparser/preparser-process.cc +3 -3
  24. data/vendor/v8/samples/lineprocessor.cc +20 -27
  25. data/vendor/v8/samples/process.cc +18 -14
  26. data/vendor/v8/samples/shell.cc +16 -15
  27. data/vendor/v8/src/SConscript +15 -14
  28. data/vendor/v8/src/accessors.cc +169 -77
  29. data/vendor/v8/src/accessors.h +4 -0
  30. data/vendor/v8/src/allocation-inl.h +2 -2
  31. data/vendor/v8/src/allocation.h +7 -7
  32. data/vendor/v8/src/api.cc +810 -497
  33. data/vendor/v8/src/api.h +85 -60
  34. data/vendor/v8/src/arm/assembler-arm-inl.h +179 -22
  35. data/vendor/v8/src/arm/assembler-arm.cc +633 -264
  36. data/vendor/v8/src/arm/assembler-arm.h +264 -197
  37. data/vendor/v8/src/arm/builtins-arm.cc +117 -27
  38. data/vendor/v8/src/arm/code-stubs-arm.cc +1241 -700
  39. data/vendor/v8/src/arm/code-stubs-arm.h +35 -138
  40. data/vendor/v8/src/arm/codegen-arm.cc +285 -16
  41. data/vendor/v8/src/arm/codegen-arm.h +22 -0
  42. data/vendor/v8/src/arm/constants-arm.cc +5 -3
  43. data/vendor/v8/src/arm/constants-arm.h +24 -11
  44. data/vendor/v8/src/arm/debug-arm.cc +3 -3
  45. data/vendor/v8/src/arm/deoptimizer-arm.cc +382 -92
  46. data/vendor/v8/src/arm/disasm-arm.cc +61 -12
  47. data/vendor/v8/src/arm/frames-arm.h +0 -14
  48. data/vendor/v8/src/arm/full-codegen-arm.cc +332 -304
  49. data/vendor/v8/src/arm/ic-arm.cc +180 -259
  50. data/vendor/v8/src/arm/lithium-arm.cc +364 -316
  51. data/vendor/v8/src/arm/lithium-arm.h +512 -275
  52. data/vendor/v8/src/arm/lithium-codegen-arm.cc +1768 -809
  53. data/vendor/v8/src/arm/lithium-codegen-arm.h +97 -35
  54. data/vendor/v8/src/arm/lithium-gap-resolver-arm.cc +12 -5
  55. data/vendor/v8/src/arm/macro-assembler-arm.cc +439 -228
  56. data/vendor/v8/src/arm/macro-assembler-arm.h +116 -70
  57. data/vendor/v8/src/arm/regexp-macro-assembler-arm.cc +54 -44
  58. data/vendor/v8/src/arm/regexp-macro-assembler-arm.h +3 -10
  59. data/vendor/v8/src/arm/simulator-arm.cc +272 -238
  60. data/vendor/v8/src/arm/simulator-arm.h +38 -8
  61. data/vendor/v8/src/arm/stub-cache-arm.cc +522 -895
  62. data/vendor/v8/src/array.js +101 -70
  63. data/vendor/v8/src/assembler.cc +270 -19
  64. data/vendor/v8/src/assembler.h +110 -15
  65. data/vendor/v8/src/ast.cc +79 -69
  66. data/vendor/v8/src/ast.h +255 -301
  67. data/vendor/v8/src/atomicops.h +7 -1
  68. data/vendor/v8/src/atomicops_internals_tsan.h +335 -0
  69. data/vendor/v8/src/bootstrapper.cc +481 -418
  70. data/vendor/v8/src/bootstrapper.h +4 -4
  71. data/vendor/v8/src/builtins.cc +498 -311
  72. data/vendor/v8/src/builtins.h +75 -47
  73. data/vendor/v8/src/checks.cc +2 -1
  74. data/vendor/v8/src/checks.h +8 -0
  75. data/vendor/v8/src/code-stubs-hydrogen.cc +253 -0
  76. data/vendor/v8/src/code-stubs.cc +249 -84
  77. data/vendor/v8/src/code-stubs.h +501 -169
  78. data/vendor/v8/src/codegen.cc +36 -18
  79. data/vendor/v8/src/codegen.h +25 -3
  80. data/vendor/v8/src/collection.js +54 -17
  81. data/vendor/v8/src/compilation-cache.cc +24 -16
  82. data/vendor/v8/src/compilation-cache.h +15 -6
  83. data/vendor/v8/src/compiler.cc +497 -195
  84. data/vendor/v8/src/compiler.h +246 -38
  85. data/vendor/v8/src/contexts.cc +64 -24
  86. data/vendor/v8/src/contexts.h +60 -29
  87. data/vendor/v8/src/conversions-inl.h +24 -14
  88. data/vendor/v8/src/conversions.h +7 -4
  89. data/vendor/v8/src/counters.cc +21 -12
  90. data/vendor/v8/src/counters.h +44 -16
  91. data/vendor/v8/src/cpu-profiler.h +1 -1
  92. data/vendor/v8/src/d8-debug.cc +2 -2
  93. data/vendor/v8/src/d8-readline.cc +13 -2
  94. data/vendor/v8/src/d8.cc +681 -273
  95. data/vendor/v8/src/d8.gyp +4 -4
  96. data/vendor/v8/src/d8.h +38 -18
  97. data/vendor/v8/src/d8.js +0 -617
  98. data/vendor/v8/src/data-flow.h +55 -0
  99. data/vendor/v8/src/date.js +1 -42
  100. data/vendor/v8/src/dateparser-inl.h +5 -1
  101. data/vendor/v8/src/debug-agent.cc +10 -15
  102. data/vendor/v8/src/debug-debugger.js +147 -149
  103. data/vendor/v8/src/debug.cc +323 -164
  104. data/vendor/v8/src/debug.h +26 -14
  105. data/vendor/v8/src/deoptimizer.cc +765 -290
  106. data/vendor/v8/src/deoptimizer.h +130 -28
  107. data/vendor/v8/src/disassembler.cc +10 -4
  108. data/vendor/v8/src/elements-kind.cc +7 -2
  109. data/vendor/v8/src/elements-kind.h +19 -0
  110. data/vendor/v8/src/elements.cc +607 -285
  111. data/vendor/v8/src/elements.h +36 -13
  112. data/vendor/v8/src/execution.cc +52 -31
  113. data/vendor/v8/src/execution.h +4 -4
  114. data/vendor/v8/src/extensions/externalize-string-extension.cc +5 -4
  115. data/vendor/v8/src/extensions/gc-extension.cc +5 -1
  116. data/vendor/v8/src/extensions/statistics-extension.cc +153 -0
  117. data/vendor/v8/src/{inspector.h → extensions/statistics-extension.h} +12 -23
  118. data/vendor/v8/src/factory.cc +101 -134
  119. data/vendor/v8/src/factory.h +36 -31
  120. data/vendor/v8/src/flag-definitions.h +102 -25
  121. data/vendor/v8/src/flags.cc +9 -5
  122. data/vendor/v8/src/frames-inl.h +10 -0
  123. data/vendor/v8/src/frames.cc +116 -26
  124. data/vendor/v8/src/frames.h +96 -12
  125. data/vendor/v8/src/full-codegen.cc +219 -74
  126. data/vendor/v8/src/full-codegen.h +63 -21
  127. data/vendor/v8/src/func-name-inferrer.cc +8 -7
  128. data/vendor/v8/src/func-name-inferrer.h +5 -3
  129. data/vendor/v8/src/gdb-jit.cc +71 -57
  130. data/vendor/v8/src/global-handles.cc +230 -101
  131. data/vendor/v8/src/global-handles.h +26 -27
  132. data/vendor/v8/src/globals.h +17 -19
  133. data/vendor/v8/src/handles-inl.h +59 -12
  134. data/vendor/v8/src/handles.cc +180 -200
  135. data/vendor/v8/src/handles.h +80 -11
  136. data/vendor/v8/src/hashmap.h +60 -40
  137. data/vendor/v8/src/heap-inl.h +107 -45
  138. data/vendor/v8/src/heap-profiler.cc +38 -19
  139. data/vendor/v8/src/heap-profiler.h +24 -14
  140. data/vendor/v8/src/heap.cc +1123 -738
  141. data/vendor/v8/src/heap.h +385 -146
  142. data/vendor/v8/src/hydrogen-instructions.cc +700 -217
  143. data/vendor/v8/src/hydrogen-instructions.h +1158 -472
  144. data/vendor/v8/src/hydrogen.cc +3319 -1662
  145. data/vendor/v8/src/hydrogen.h +411 -170
  146. data/vendor/v8/src/ia32/assembler-ia32-inl.h +46 -16
  147. data/vendor/v8/src/ia32/assembler-ia32.cc +131 -61
  148. data/vendor/v8/src/ia32/assembler-ia32.h +115 -57
  149. data/vendor/v8/src/ia32/builtins-ia32.cc +99 -5
  150. data/vendor/v8/src/ia32/code-stubs-ia32.cc +787 -495
  151. data/vendor/v8/src/ia32/code-stubs-ia32.h +10 -100
  152. data/vendor/v8/src/ia32/codegen-ia32.cc +227 -23
  153. data/vendor/v8/src/ia32/codegen-ia32.h +14 -0
  154. data/vendor/v8/src/ia32/deoptimizer-ia32.cc +428 -87
  155. data/vendor/v8/src/ia32/disasm-ia32.cc +28 -1
  156. data/vendor/v8/src/ia32/frames-ia32.h +6 -16
  157. data/vendor/v8/src/ia32/full-codegen-ia32.cc +280 -272
  158. data/vendor/v8/src/ia32/ic-ia32.cc +150 -250
  159. data/vendor/v8/src/ia32/lithium-codegen-ia32.cc +1600 -517
  160. data/vendor/v8/src/ia32/lithium-codegen-ia32.h +90 -24
  161. data/vendor/v8/src/ia32/lithium-gap-resolver-ia32.cc +10 -6
  162. data/vendor/v8/src/ia32/lithium-gap-resolver-ia32.h +2 -2
  163. data/vendor/v8/src/ia32/lithium-ia32.cc +405 -302
  164. data/vendor/v8/src/ia32/lithium-ia32.h +526 -271
  165. data/vendor/v8/src/ia32/macro-assembler-ia32.cc +378 -119
  166. data/vendor/v8/src/ia32/macro-assembler-ia32.h +62 -28
  167. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.cc +43 -30
  168. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.h +2 -10
  169. data/vendor/v8/src/ia32/stub-cache-ia32.cc +492 -678
  170. data/vendor/v8/src/ic-inl.h +9 -4
  171. data/vendor/v8/src/ic.cc +836 -923
  172. data/vendor/v8/src/ic.h +228 -247
  173. data/vendor/v8/src/incremental-marking-inl.h +26 -30
  174. data/vendor/v8/src/incremental-marking.cc +276 -248
  175. data/vendor/v8/src/incremental-marking.h +29 -37
  176. data/vendor/v8/src/interface.cc +34 -25
  177. data/vendor/v8/src/interface.h +69 -25
  178. data/vendor/v8/src/interpreter-irregexp.cc +2 -2
  179. data/vendor/v8/src/isolate.cc +382 -76
  180. data/vendor/v8/src/isolate.h +109 -56
  181. data/vendor/v8/src/json-parser.h +217 -104
  182. data/vendor/v8/src/json-stringifier.h +745 -0
  183. data/vendor/v8/src/json.js +10 -132
  184. data/vendor/v8/src/jsregexp-inl.h +106 -0
  185. data/vendor/v8/src/jsregexp.cc +517 -285
  186. data/vendor/v8/src/jsregexp.h +145 -117
  187. data/vendor/v8/src/list-inl.h +35 -22
  188. data/vendor/v8/src/list.h +46 -19
  189. data/vendor/v8/src/lithium-allocator-inl.h +22 -2
  190. data/vendor/v8/src/lithium-allocator.cc +85 -70
  191. data/vendor/v8/src/lithium-allocator.h +21 -39
  192. data/vendor/v8/src/lithium.cc +259 -5
  193. data/vendor/v8/src/lithium.h +131 -32
  194. data/vendor/v8/src/liveedit-debugger.js +52 -3
  195. data/vendor/v8/src/liveedit.cc +393 -113
  196. data/vendor/v8/src/liveedit.h +7 -3
  197. data/vendor/v8/src/log-utils.cc +4 -2
  198. data/vendor/v8/src/log.cc +170 -140
  199. data/vendor/v8/src/log.h +62 -11
  200. data/vendor/v8/src/macro-assembler.h +17 -0
  201. data/vendor/v8/src/macros.py +2 -0
  202. data/vendor/v8/src/mark-compact-inl.h +3 -23
  203. data/vendor/v8/src/mark-compact.cc +801 -830
  204. data/vendor/v8/src/mark-compact.h +154 -47
  205. data/vendor/v8/src/marking-thread.cc +85 -0
  206. data/vendor/v8/src/{inspector.cc → marking-thread.h} +32 -24
  207. data/vendor/v8/src/math.js +12 -18
  208. data/vendor/v8/src/messages.cc +18 -8
  209. data/vendor/v8/src/messages.js +314 -261
  210. data/vendor/v8/src/mips/assembler-mips-inl.h +58 -6
  211. data/vendor/v8/src/mips/assembler-mips.cc +92 -75
  212. data/vendor/v8/src/mips/assembler-mips.h +54 -60
  213. data/vendor/v8/src/mips/builtins-mips.cc +116 -17
  214. data/vendor/v8/src/mips/code-stubs-mips.cc +919 -556
  215. data/vendor/v8/src/mips/code-stubs-mips.h +22 -131
  216. data/vendor/v8/src/mips/codegen-mips.cc +281 -6
  217. data/vendor/v8/src/mips/codegen-mips.h +22 -0
  218. data/vendor/v8/src/mips/constants-mips.cc +2 -0
  219. data/vendor/v8/src/mips/constants-mips.h +12 -2
  220. data/vendor/v8/src/mips/deoptimizer-mips.cc +286 -50
  221. data/vendor/v8/src/mips/disasm-mips.cc +13 -0
  222. data/vendor/v8/src/mips/full-codegen-mips.cc +297 -284
  223. data/vendor/v8/src/mips/ic-mips.cc +182 -263
  224. data/vendor/v8/src/mips/lithium-codegen-mips.cc +1208 -556
  225. data/vendor/v8/src/mips/lithium-codegen-mips.h +72 -19
  226. data/vendor/v8/src/mips/lithium-gap-resolver-mips.cc +9 -2
  227. data/vendor/v8/src/mips/lithium-mips.cc +290 -302
  228. data/vendor/v8/src/mips/lithium-mips.h +463 -266
  229. data/vendor/v8/src/mips/macro-assembler-mips.cc +208 -115
  230. data/vendor/v8/src/mips/macro-assembler-mips.h +67 -24
  231. data/vendor/v8/src/mips/regexp-macro-assembler-mips.cc +40 -25
  232. data/vendor/v8/src/mips/regexp-macro-assembler-mips.h +3 -9
  233. data/vendor/v8/src/mips/simulator-mips.cc +112 -40
  234. data/vendor/v8/src/mips/simulator-mips.h +5 -0
  235. data/vendor/v8/src/mips/stub-cache-mips.cc +502 -884
  236. data/vendor/v8/src/mirror-debugger.js +157 -30
  237. data/vendor/v8/src/mksnapshot.cc +88 -14
  238. data/vendor/v8/src/object-observe.js +235 -0
  239. data/vendor/v8/src/objects-debug.cc +178 -176
  240. data/vendor/v8/src/objects-inl.h +1333 -486
  241. data/vendor/v8/src/objects-printer.cc +125 -43
  242. data/vendor/v8/src/objects-visiting-inl.h +578 -6
  243. data/vendor/v8/src/objects-visiting.cc +2 -2
  244. data/vendor/v8/src/objects-visiting.h +172 -79
  245. data/vendor/v8/src/objects.cc +3533 -2885
  246. data/vendor/v8/src/objects.h +1352 -1131
  247. data/vendor/v8/src/optimizing-compiler-thread.cc +152 -0
  248. data/vendor/v8/src/optimizing-compiler-thread.h +111 -0
  249. data/vendor/v8/src/parser.cc +390 -500
  250. data/vendor/v8/src/parser.h +45 -33
  251. data/vendor/v8/src/platform-cygwin.cc +10 -21
  252. data/vendor/v8/src/platform-freebsd.cc +36 -41
  253. data/vendor/v8/src/platform-linux.cc +160 -124
  254. data/vendor/v8/src/platform-macos.cc +30 -27
  255. data/vendor/v8/src/platform-nullos.cc +17 -1
  256. data/vendor/v8/src/platform-openbsd.cc +19 -50
  257. data/vendor/v8/src/platform-posix.cc +14 -0
  258. data/vendor/v8/src/platform-solaris.cc +20 -53
  259. data/vendor/v8/src/platform-win32.cc +49 -26
  260. data/vendor/v8/src/platform.h +40 -1
  261. data/vendor/v8/src/preparser.cc +8 -5
  262. data/vendor/v8/src/preparser.h +2 -2
  263. data/vendor/v8/src/prettyprinter.cc +16 -0
  264. data/vendor/v8/src/prettyprinter.h +2 -0
  265. data/vendor/v8/src/profile-generator-inl.h +1 -0
  266. data/vendor/v8/src/profile-generator.cc +209 -147
  267. data/vendor/v8/src/profile-generator.h +15 -12
  268. data/vendor/v8/src/property-details.h +46 -31
  269. data/vendor/v8/src/property.cc +27 -46
  270. data/vendor/v8/src/property.h +163 -83
  271. data/vendor/v8/src/proxy.js +7 -2
  272. data/vendor/v8/src/regexp-macro-assembler-irregexp.cc +4 -13
  273. data/vendor/v8/src/regexp-macro-assembler-irregexp.h +1 -2
  274. data/vendor/v8/src/regexp-macro-assembler-tracer.cc +1 -11
  275. data/vendor/v8/src/regexp-macro-assembler-tracer.h +0 -1
  276. data/vendor/v8/src/regexp-macro-assembler.cc +31 -14
  277. data/vendor/v8/src/regexp-macro-assembler.h +14 -11
  278. data/vendor/v8/src/regexp-stack.cc +1 -0
  279. data/vendor/v8/src/regexp.js +9 -8
  280. data/vendor/v8/src/rewriter.cc +18 -7
  281. data/vendor/v8/src/runtime-profiler.cc +52 -43
  282. data/vendor/v8/src/runtime-profiler.h +0 -25
  283. data/vendor/v8/src/runtime.cc +2006 -2023
  284. data/vendor/v8/src/runtime.h +56 -49
  285. data/vendor/v8/src/safepoint-table.cc +12 -18
  286. data/vendor/v8/src/safepoint-table.h +11 -8
  287. data/vendor/v8/src/scanner.cc +1 -0
  288. data/vendor/v8/src/scanner.h +4 -10
  289. data/vendor/v8/src/scopeinfo.cc +35 -9
  290. data/vendor/v8/src/scopeinfo.h +64 -3
  291. data/vendor/v8/src/scopes.cc +251 -156
  292. data/vendor/v8/src/scopes.h +61 -27
  293. data/vendor/v8/src/serialize.cc +348 -396
  294. data/vendor/v8/src/serialize.h +125 -114
  295. data/vendor/v8/src/small-pointer-list.h +11 -11
  296. data/vendor/v8/src/{smart-array-pointer.h → smart-pointers.h} +64 -15
  297. data/vendor/v8/src/snapshot-common.cc +64 -15
  298. data/vendor/v8/src/snapshot-empty.cc +7 -1
  299. data/vendor/v8/src/snapshot.h +9 -2
  300. data/vendor/v8/src/spaces-inl.h +17 -0
  301. data/vendor/v8/src/spaces.cc +477 -183
  302. data/vendor/v8/src/spaces.h +238 -58
  303. data/vendor/v8/src/splay-tree-inl.h +8 -7
  304. data/vendor/v8/src/splay-tree.h +24 -10
  305. data/vendor/v8/src/store-buffer.cc +12 -5
  306. data/vendor/v8/src/store-buffer.h +2 -4
  307. data/vendor/v8/src/string-search.h +22 -6
  308. data/vendor/v8/src/string-stream.cc +11 -8
  309. data/vendor/v8/src/string.js +47 -15
  310. data/vendor/v8/src/stub-cache.cc +461 -224
  311. data/vendor/v8/src/stub-cache.h +164 -102
  312. data/vendor/v8/src/sweeper-thread.cc +105 -0
  313. data/vendor/v8/src/sweeper-thread.h +81 -0
  314. data/vendor/v8/src/token.h +1 -0
  315. data/vendor/v8/src/transitions-inl.h +220 -0
  316. data/vendor/v8/src/transitions.cc +160 -0
  317. data/vendor/v8/src/transitions.h +207 -0
  318. data/vendor/v8/src/type-info.cc +182 -181
  319. data/vendor/v8/src/type-info.h +31 -19
  320. data/vendor/v8/src/unicode-inl.h +62 -106
  321. data/vendor/v8/src/unicode.cc +57 -67
  322. data/vendor/v8/src/unicode.h +45 -91
  323. data/vendor/v8/src/uri.js +57 -29
  324. data/vendor/v8/src/utils.h +105 -5
  325. data/vendor/v8/src/v8-counters.cc +54 -11
  326. data/vendor/v8/src/v8-counters.h +134 -19
  327. data/vendor/v8/src/v8.cc +29 -29
  328. data/vendor/v8/src/v8.h +1 -0
  329. data/vendor/v8/src/v8conversions.cc +26 -22
  330. data/vendor/v8/src/v8globals.h +56 -43
  331. data/vendor/v8/src/v8natives.js +83 -30
  332. data/vendor/v8/src/v8threads.cc +42 -21
  333. data/vendor/v8/src/v8threads.h +4 -1
  334. data/vendor/v8/src/v8utils.cc +9 -93
  335. data/vendor/v8/src/v8utils.h +37 -33
  336. data/vendor/v8/src/variables.cc +6 -3
  337. data/vendor/v8/src/variables.h +6 -13
  338. data/vendor/v8/src/version.cc +2 -2
  339. data/vendor/v8/src/vm-state-inl.h +11 -0
  340. data/vendor/v8/src/x64/assembler-x64-inl.h +39 -8
  341. data/vendor/v8/src/x64/assembler-x64.cc +78 -64
  342. data/vendor/v8/src/x64/assembler-x64.h +38 -33
  343. data/vendor/v8/src/x64/builtins-x64.cc +105 -7
  344. data/vendor/v8/src/x64/code-stubs-x64.cc +790 -413
  345. data/vendor/v8/src/x64/code-stubs-x64.h +10 -106
  346. data/vendor/v8/src/x64/codegen-x64.cc +210 -8
  347. data/vendor/v8/src/x64/codegen-x64.h +20 -1
  348. data/vendor/v8/src/x64/deoptimizer-x64.cc +336 -75
  349. data/vendor/v8/src/x64/disasm-x64.cc +15 -0
  350. data/vendor/v8/src/x64/frames-x64.h +0 -14
  351. data/vendor/v8/src/x64/full-codegen-x64.cc +293 -270
  352. data/vendor/v8/src/x64/ic-x64.cc +153 -251
  353. data/vendor/v8/src/x64/lithium-codegen-x64.cc +1379 -531
  354. data/vendor/v8/src/x64/lithium-codegen-x64.h +67 -23
  355. data/vendor/v8/src/x64/lithium-gap-resolver-x64.cc +2 -2
  356. data/vendor/v8/src/x64/lithium-x64.cc +349 -289
  357. data/vendor/v8/src/x64/lithium-x64.h +460 -250
  358. data/vendor/v8/src/x64/macro-assembler-x64.cc +350 -177
  359. data/vendor/v8/src/x64/macro-assembler-x64.h +67 -49
  360. data/vendor/v8/src/x64/regexp-macro-assembler-x64.cc +46 -33
  361. data/vendor/v8/src/x64/regexp-macro-assembler-x64.h +2 -3
  362. data/vendor/v8/src/x64/stub-cache-x64.cc +484 -653
  363. data/vendor/v8/src/zone-inl.h +9 -27
  364. data/vendor/v8/src/zone.cc +5 -5
  365. data/vendor/v8/src/zone.h +53 -27
  366. data/vendor/v8/test/benchmarks/testcfg.py +5 -0
  367. data/vendor/v8/test/cctest/cctest.cc +4 -0
  368. data/vendor/v8/test/cctest/cctest.gyp +3 -1
  369. data/vendor/v8/test/cctest/cctest.h +57 -9
  370. data/vendor/v8/test/cctest/cctest.status +15 -15
  371. data/vendor/v8/test/cctest/test-accessors.cc +26 -0
  372. data/vendor/v8/test/cctest/test-alloc.cc +22 -30
  373. data/vendor/v8/test/cctest/test-api.cc +1943 -314
  374. data/vendor/v8/test/cctest/test-assembler-arm.cc +133 -13
  375. data/vendor/v8/test/cctest/test-assembler-ia32.cc +1 -1
  376. data/vendor/v8/test/cctest/test-assembler-mips.cc +12 -0
  377. data/vendor/v8/test/cctest/test-ast.cc +4 -2
  378. data/vendor/v8/test/cctest/test-compiler.cc +61 -29
  379. data/vendor/v8/test/cctest/test-dataflow.cc +2 -2
  380. data/vendor/v8/test/cctest/test-debug.cc +212 -33
  381. data/vendor/v8/test/cctest/test-decls.cc +257 -11
  382. data/vendor/v8/test/cctest/test-dictionary.cc +24 -10
  383. data/vendor/v8/test/cctest/test-disasm-arm.cc +118 -1
  384. data/vendor/v8/test/cctest/test-disasm-ia32.cc +3 -2
  385. data/vendor/v8/test/cctest/test-flags.cc +14 -1
  386. data/vendor/v8/test/cctest/test-func-name-inference.cc +7 -4
  387. data/vendor/v8/test/cctest/test-global-object.cc +51 -0
  388. data/vendor/v8/test/cctest/test-hashing.cc +32 -23
  389. data/vendor/v8/test/cctest/test-heap-profiler.cc +131 -77
  390. data/vendor/v8/test/cctest/test-heap.cc +1084 -143
  391. data/vendor/v8/test/cctest/test-list.cc +1 -1
  392. data/vendor/v8/test/cctest/test-liveedit.cc +3 -2
  393. data/vendor/v8/test/cctest/test-lockers.cc +12 -13
  394. data/vendor/v8/test/cctest/test-log.cc +10 -8
  395. data/vendor/v8/test/cctest/test-macro-assembler-x64.cc +2 -2
  396. data/vendor/v8/test/cctest/test-mark-compact.cc +44 -22
  397. data/vendor/v8/test/cctest/test-object-observe.cc +434 -0
  398. data/vendor/v8/test/cctest/test-parsing.cc +86 -39
  399. data/vendor/v8/test/cctest/test-platform-linux.cc +6 -0
  400. data/vendor/v8/test/cctest/test-platform-win32.cc +7 -0
  401. data/vendor/v8/test/cctest/test-random.cc +5 -4
  402. data/vendor/v8/test/cctest/test-regexp.cc +137 -101
  403. data/vendor/v8/test/cctest/test-serialize.cc +150 -230
  404. data/vendor/v8/test/cctest/test-sockets.cc +1 -1
  405. data/vendor/v8/test/cctest/test-spaces.cc +139 -0
  406. data/vendor/v8/test/cctest/test-strings.cc +736 -74
  407. data/vendor/v8/test/cctest/test-thread-termination.cc +10 -11
  408. data/vendor/v8/test/cctest/test-threads.cc +4 -4
  409. data/vendor/v8/test/cctest/test-utils.cc +16 -0
  410. data/vendor/v8/test/cctest/test-weakmaps.cc +7 -3
  411. data/vendor/v8/test/cctest/testcfg.py +64 -5
  412. data/vendor/v8/test/es5conform/testcfg.py +5 -0
  413. data/vendor/v8/test/message/message.status +1 -1
  414. data/vendor/v8/test/message/overwritten-builtins.out +3 -0
  415. data/vendor/v8/test/message/testcfg.py +89 -8
  416. data/vendor/v8/test/message/try-catch-finally-no-message.out +26 -26
  417. data/vendor/v8/test/mjsunit/accessor-map-sharing.js +18 -2
  418. data/vendor/v8/test/mjsunit/allocation-site-info.js +126 -0
  419. data/vendor/v8/test/mjsunit/array-bounds-check-removal.js +62 -1
  420. data/vendor/v8/test/mjsunit/array-iteration.js +1 -1
  421. data/vendor/v8/test/mjsunit/array-literal-transitions.js +2 -0
  422. data/vendor/v8/test/mjsunit/array-natives-elements.js +317 -0
  423. data/vendor/v8/test/mjsunit/array-reduce.js +8 -8
  424. data/vendor/v8/test/mjsunit/array-slice.js +12 -0
  425. data/vendor/v8/test/mjsunit/array-store-and-grow.js +4 -1
  426. data/vendor/v8/test/mjsunit/assert-opt-and-deopt.js +1 -1
  427. data/vendor/v8/test/mjsunit/bugs/bug-2337.js +53 -0
  428. data/vendor/v8/test/mjsunit/compare-known-objects-slow.js +69 -0
  429. data/vendor/v8/test/mjsunit/compiler/alloc-object-huge.js +3 -1
  430. data/vendor/v8/test/mjsunit/compiler/inline-accessors.js +368 -0
  431. data/vendor/v8/test/mjsunit/compiler/inline-arguments.js +87 -1
  432. data/vendor/v8/test/mjsunit/compiler/inline-closures.js +49 -0
  433. data/vendor/v8/test/mjsunit/compiler/inline-construct.js +55 -43
  434. data/vendor/v8/test/mjsunit/compiler/inline-literals.js +39 -0
  435. data/vendor/v8/test/mjsunit/compiler/multiply-add.js +69 -0
  436. data/vendor/v8/test/mjsunit/compiler/optimized-closures.js +57 -0
  437. data/vendor/v8/test/mjsunit/compiler/parallel-proto-change.js +44 -0
  438. data/vendor/v8/test/mjsunit/compiler/property-static.js +69 -0
  439. data/vendor/v8/test/mjsunit/compiler/proto-chain-constant.js +55 -0
  440. data/vendor/v8/test/mjsunit/compiler/proto-chain-load.js +44 -0
  441. data/vendor/v8/test/mjsunit/compiler/regress-gvn.js +3 -2
  442. data/vendor/v8/test/mjsunit/compiler/regress-or.js +6 -2
  443. data/vendor/v8/test/mjsunit/compiler/rotate.js +224 -0
  444. data/vendor/v8/test/mjsunit/compiler/uint32.js +173 -0
  445. data/vendor/v8/test/mjsunit/count-based-osr.js +2 -1
  446. data/vendor/v8/test/mjsunit/d8-os.js +3 -3
  447. data/vendor/v8/test/mjsunit/date-parse.js +3 -0
  448. data/vendor/v8/test/mjsunit/date.js +22 -0
  449. data/vendor/v8/test/mjsunit/debug-break-inline.js +1 -0
  450. data/vendor/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js +22 -12
  451. data/vendor/v8/test/mjsunit/debug-evaluate-locals-optimized.js +21 -10
  452. data/vendor/v8/test/mjsunit/debug-liveedit-compile-error.js +60 -0
  453. data/vendor/v8/test/mjsunit/debug-liveedit-double-call.js +142 -0
  454. data/vendor/v8/test/mjsunit/debug-liveedit-literals.js +94 -0
  455. data/vendor/v8/test/mjsunit/debug-liveedit-restart-frame.js +153 -0
  456. data/vendor/v8/test/mjsunit/debug-multiple-breakpoints.js +1 -1
  457. data/vendor/v8/test/mjsunit/debug-script-breakpoints-closure.js +67 -0
  458. data/vendor/v8/test/mjsunit/debug-script-breakpoints-nested.js +82 -0
  459. data/vendor/v8/test/mjsunit/debug-script.js +4 -2
  460. data/vendor/v8/test/mjsunit/debug-set-variable-value.js +308 -0
  461. data/vendor/v8/test/mjsunit/debug-stepout-scope-part1.js +190 -0
  462. data/vendor/v8/test/mjsunit/debug-stepout-scope-part2.js +83 -0
  463. data/vendor/v8/test/mjsunit/debug-stepout-scope-part3.js +80 -0
  464. data/vendor/v8/test/mjsunit/debug-stepout-scope-part4.js +80 -0
  465. data/vendor/v8/test/mjsunit/debug-stepout-scope-part5.js +77 -0
  466. data/vendor/v8/test/mjsunit/debug-stepout-scope-part6.js +79 -0
  467. data/vendor/v8/test/mjsunit/debug-stepout-scope-part7.js +79 -0
  468. data/vendor/v8/test/mjsunit/{debug-stepout-scope.js → debug-stepout-scope-part8.js} +0 -189
  469. data/vendor/v8/test/mjsunit/delete-non-configurable.js +74 -0
  470. data/vendor/v8/test/mjsunit/deopt-minus-zero.js +56 -0
  471. data/vendor/v8/test/mjsunit/elements-kind.js +6 -4
  472. data/vendor/v8/test/mjsunit/elements-length-no-holey.js +33 -0
  473. data/vendor/v8/test/mjsunit/elements-transition-hoisting.js +46 -19
  474. data/vendor/v8/test/mjsunit/error-accessors.js +54 -0
  475. data/vendor/v8/test/mjsunit/error-constructors.js +1 -14
  476. data/vendor/v8/test/mjsunit/error-tostring.js +8 -0
  477. data/vendor/v8/test/mjsunit/eval-stack-trace.js +204 -0
  478. data/vendor/v8/test/mjsunit/external-array.js +364 -1
  479. data/vendor/v8/test/mjsunit/fast-array-length.js +37 -0
  480. data/vendor/v8/test/mjsunit/fast-non-keyed.js +113 -0
  481. data/vendor/v8/test/mjsunit/fast-prototype.js +117 -0
  482. data/vendor/v8/test/mjsunit/function-call.js +14 -18
  483. data/vendor/v8/test/mjsunit/fuzz-natives-part1.js +230 -0
  484. data/vendor/v8/test/mjsunit/fuzz-natives-part2.js +229 -0
  485. data/vendor/v8/test/mjsunit/fuzz-natives-part3.js +229 -0
  486. data/vendor/v8/test/mjsunit/{fuzz-natives.js → fuzz-natives-part4.js} +12 -2
  487. data/vendor/v8/test/mjsunit/generated-transition-stub.js +218 -0
  488. data/vendor/v8/test/mjsunit/greedy.js +1 -1
  489. data/vendor/v8/test/mjsunit/harmony/block-conflicts.js +2 -1
  490. data/vendor/v8/test/mjsunit/harmony/block-let-crankshaft.js +1 -1
  491. data/vendor/v8/test/mjsunit/harmony/collections.js +69 -11
  492. data/vendor/v8/test/mjsunit/harmony/debug-blockscopes.js +2 -2
  493. data/vendor/v8/test/mjsunit/harmony/module-linking.js +180 -3
  494. data/vendor/v8/test/mjsunit/harmony/module-parsing.js +31 -0
  495. data/vendor/v8/test/mjsunit/harmony/module-recompile.js +87 -0
  496. data/vendor/v8/test/mjsunit/harmony/module-resolution.js +15 -2
  497. data/vendor/v8/test/mjsunit/harmony/object-observe.js +1056 -0
  498. data/vendor/v8/test/mjsunit/harmony/proxies-json.js +178 -0
  499. data/vendor/v8/test/mjsunit/harmony/proxies.js +25 -10
  500. data/vendor/v8/test/mjsunit/json-parser-recursive.js +33 -0
  501. data/vendor/v8/test/mjsunit/json-stringify-recursive.js +52 -0
  502. data/vendor/v8/test/mjsunit/json.js +38 -2
  503. data/vendor/v8/test/mjsunit/json2.js +153 -0
  504. data/vendor/v8/test/mjsunit/limit-locals.js +5 -4
  505. data/vendor/v8/test/mjsunit/manual-parallel-recompile.js +79 -0
  506. data/vendor/v8/test/mjsunit/math-exp-precision.js +64 -0
  507. data/vendor/v8/test/mjsunit/math-floor-negative.js +59 -0
  508. data/vendor/v8/test/mjsunit/math-floor-of-div-minus-zero.js +41 -0
  509. data/vendor/v8/test/mjsunit/math-floor-of-div-nosudiv.js +288 -0
  510. data/vendor/v8/test/mjsunit/math-floor-of-div.js +81 -9
  511. data/vendor/v8/test/mjsunit/{math-floor.js → math-floor-part1.js} +1 -72
  512. data/vendor/v8/test/mjsunit/math-floor-part2.js +76 -0
  513. data/vendor/v8/test/mjsunit/math-floor-part3.js +78 -0
  514. data/vendor/v8/test/mjsunit/math-floor-part4.js +76 -0
  515. data/vendor/v8/test/mjsunit/mirror-object.js +43 -9
  516. data/vendor/v8/test/mjsunit/mjsunit.js +1 -1
  517. data/vendor/v8/test/mjsunit/mjsunit.status +52 -27
  518. data/vendor/v8/test/mjsunit/mul-exhaustive-part1.js +491 -0
  519. data/vendor/v8/test/mjsunit/mul-exhaustive-part10.js +470 -0
  520. data/vendor/v8/test/mjsunit/mul-exhaustive-part2.js +525 -0
  521. data/vendor/v8/test/mjsunit/mul-exhaustive-part3.js +532 -0
  522. data/vendor/v8/test/mjsunit/mul-exhaustive-part4.js +509 -0
  523. data/vendor/v8/test/mjsunit/mul-exhaustive-part5.js +505 -0
  524. data/vendor/v8/test/mjsunit/mul-exhaustive-part6.js +554 -0
  525. data/vendor/v8/test/mjsunit/mul-exhaustive-part7.js +497 -0
  526. data/vendor/v8/test/mjsunit/mul-exhaustive-part8.js +526 -0
  527. data/vendor/v8/test/mjsunit/mul-exhaustive-part9.js +533 -0
  528. data/vendor/v8/test/mjsunit/new-function.js +34 -0
  529. data/vendor/v8/test/mjsunit/numops-fuzz-part1.js +1172 -0
  530. data/vendor/v8/test/mjsunit/numops-fuzz-part2.js +1178 -0
  531. data/vendor/v8/test/mjsunit/numops-fuzz-part3.js +1178 -0
  532. data/vendor/v8/test/mjsunit/numops-fuzz-part4.js +1177 -0
  533. data/vendor/v8/test/mjsunit/object-define-property.js +107 -2
  534. data/vendor/v8/test/mjsunit/override-read-only-property.js +6 -4
  535. data/vendor/v8/test/mjsunit/packed-elements.js +2 -2
  536. data/vendor/v8/test/mjsunit/parse-int-float.js +4 -4
  537. data/vendor/v8/test/mjsunit/pixel-array-rounding.js +1 -1
  538. data/vendor/v8/test/mjsunit/readonly.js +228 -0
  539. data/vendor/v8/test/mjsunit/regexp-capture-3.js +16 -18
  540. data/vendor/v8/test/mjsunit/regexp-capture.js +2 -0
  541. data/vendor/v8/test/mjsunit/regexp-global.js +122 -0
  542. data/vendor/v8/test/mjsunit/regexp-results-cache.js +78 -0
  543. data/vendor/v8/test/mjsunit/regress/regress-1117.js +12 -3
  544. data/vendor/v8/test/mjsunit/regress/regress-1118.js +1 -1
  545. data/vendor/v8/test/mjsunit/regress/regress-115100.js +36 -0
  546. data/vendor/v8/test/mjsunit/regress/regress-1199637.js +1 -3
  547. data/vendor/v8/test/mjsunit/regress/regress-121407.js +1 -1
  548. data/vendor/v8/test/mjsunit/regress/regress-131923.js +30 -0
  549. data/vendor/v8/test/mjsunit/regress/regress-131994.js +70 -0
  550. data/vendor/v8/test/mjsunit/regress/regress-133211.js +35 -0
  551. data/vendor/v8/test/mjsunit/regress/regress-133211b.js +39 -0
  552. data/vendor/v8/test/mjsunit/regress/regress-136048.js +34 -0
  553. data/vendor/v8/test/mjsunit/regress/regress-137768.js +73 -0
  554. data/vendor/v8/test/mjsunit/regress/regress-143967.js +34 -0
  555. data/vendor/v8/test/mjsunit/regress/regress-145201.js +107 -0
  556. data/vendor/v8/test/mjsunit/regress/regress-147497.js +45 -0
  557. data/vendor/v8/test/mjsunit/regress/regress-148378.js +38 -0
  558. data/vendor/v8/test/mjsunit/regress/regress-1563.js +1 -1
  559. data/vendor/v8/test/mjsunit/regress/regress-1591.js +48 -0
  560. data/vendor/v8/test/mjsunit/regress/regress-164442.js +45 -0
  561. data/vendor/v8/test/mjsunit/regress/regress-165637.js +61 -0
  562. data/vendor/v8/test/mjsunit/regress/regress-166379.js +39 -0
  563. data/vendor/v8/test/mjsunit/regress/regress-166553.js +33 -0
  564. data/vendor/v8/test/mjsunit/regress/regress-1692.js +1 -1
  565. data/vendor/v8/test/mjsunit/regress/regress-171641.js +40 -0
  566. data/vendor/v8/test/mjsunit/regress/regress-1980.js +1 -1
  567. data/vendor/v8/test/mjsunit/regress/regress-2073.js +99 -0
  568. data/vendor/v8/test/mjsunit/regress/regress-2119.js +36 -0
  569. data/vendor/v8/test/mjsunit/regress/regress-2156.js +39 -0
  570. data/vendor/v8/test/mjsunit/regress/regress-2163.js +70 -0
  571. data/vendor/v8/test/mjsunit/regress/regress-2170.js +58 -0
  572. data/vendor/v8/test/mjsunit/regress/regress-2172.js +35 -0
  573. data/vendor/v8/test/mjsunit/regress/regress-2185-2.js +145 -0
  574. data/vendor/v8/test/mjsunit/regress/regress-2185.js +38 -0
  575. data/vendor/v8/test/mjsunit/regress/regress-2186.js +49 -0
  576. data/vendor/v8/test/mjsunit/regress/regress-2193.js +58 -0
  577. data/vendor/v8/test/mjsunit/regress/regress-2219.js +32 -0
  578. data/vendor/v8/test/mjsunit/regress/regress-2225.js +65 -0
  579. data/vendor/v8/test/mjsunit/regress/regress-2226.js +36 -0
  580. data/vendor/v8/test/mjsunit/regress/regress-2234.js +41 -0
  581. data/vendor/v8/test/mjsunit/regress/regress-2243.js +31 -0
  582. data/vendor/v8/test/mjsunit/regress/regress-2249.js +33 -0
  583. data/vendor/v8/test/mjsunit/regress/regress-2250.js +68 -0
  584. data/vendor/v8/test/mjsunit/regress/regress-2261.js +113 -0
  585. data/vendor/v8/test/mjsunit/regress/regress-2263.js +30 -0
  586. data/vendor/v8/test/mjsunit/regress/regress-2284.js +32 -0
  587. data/vendor/v8/test/mjsunit/regress/regress-2285.js +32 -0
  588. data/vendor/v8/test/mjsunit/regress/regress-2286.js +32 -0
  589. data/vendor/v8/test/mjsunit/regress/regress-2289.js +34 -0
  590. data/vendor/v8/test/mjsunit/regress/regress-2291.js +36 -0
  591. data/vendor/v8/test/mjsunit/regress/regress-2294.js +70 -0
  592. data/vendor/v8/test/mjsunit/regress/regress-2296.js +40 -0
  593. data/vendor/v8/test/mjsunit/regress/regress-2315.js +40 -0
  594. data/vendor/v8/test/mjsunit/regress/regress-2318.js +66 -0
  595. data/vendor/v8/test/mjsunit/regress/regress-2322.js +36 -0
  596. data/vendor/v8/test/mjsunit/regress/regress-2326.js +54 -0
  597. data/vendor/v8/test/mjsunit/regress/regress-2336.js +53 -0
  598. data/vendor/v8/test/mjsunit/regress/regress-2339.js +59 -0
  599. data/vendor/v8/test/mjsunit/regress/regress-2346.js +123 -0
  600. data/vendor/v8/test/mjsunit/regress/regress-2373.js +29 -0
  601. data/vendor/v8/test/mjsunit/regress/regress-2374.js +33 -0
  602. data/vendor/v8/test/mjsunit/regress/regress-2398.js +41 -0
  603. data/vendor/v8/test/mjsunit/regress/regress-2410.js +36 -0
  604. data/vendor/v8/test/mjsunit/regress/regress-2416.js +75 -0
  605. data/vendor/v8/test/mjsunit/regress/regress-2419.js +37 -0
  606. data/vendor/v8/test/mjsunit/regress/regress-2433.js +36 -0
  607. data/vendor/v8/test/mjsunit/regress/regress-2437.js +156 -0
  608. data/vendor/v8/test/mjsunit/regress/regress-2438.js +52 -0
  609. data/vendor/v8/test/mjsunit/regress/regress-2443.js +129 -0
  610. data/vendor/v8/test/mjsunit/regress/regress-2444.js +120 -0
  611. data/vendor/v8/test/mjsunit/regress/regress-2489.js +50 -0
  612. data/vendor/v8/test/mjsunit/regress/regress-2499.js +40 -0
  613. data/vendor/v8/test/mjsunit/regress/regress-334.js +1 -1
  614. data/vendor/v8/test/mjsunit/regress/regress-492.js +39 -1
  615. data/vendor/v8/test/mjsunit/regress/regress-builtin-array-op.js +38 -0
  616. data/vendor/v8/test/mjsunit/regress/regress-cnlt-elements.js +43 -0
  617. data/vendor/v8/test/mjsunit/regress/regress-cnlt-enum-indices.js +45 -0
  618. data/vendor/v8/test/mjsunit/regress/regress-cntl-descriptors-enum.js +46 -0
  619. data/vendor/v8/test/mjsunit/regress/regress-convert-enum.js +60 -0
  620. data/vendor/v8/test/mjsunit/regress/regress-convert-enum2.js +46 -0
  621. data/vendor/v8/test/mjsunit/regress/regress-convert-transition.js +40 -0
  622. data/vendor/v8/test/mjsunit/regress/regress-crbug-119926.js +3 -1
  623. data/vendor/v8/test/mjsunit/regress/regress-crbug-125148.js +90 -0
  624. data/vendor/v8/test/mjsunit/regress/regress-crbug-134055.js +63 -0
  625. data/vendor/v8/test/mjsunit/regress/regress-crbug-134609.js +59 -0
  626. data/vendor/v8/test/mjsunit/regress/regress-crbug-135008.js +45 -0
  627. data/vendor/v8/test/mjsunit/regress/regress-crbug-135066.js +55 -0
  628. data/vendor/v8/test/mjsunit/regress/regress-crbug-137689.js +47 -0
  629. data/vendor/v8/test/mjsunit/regress/regress-crbug-138887.js +48 -0
  630. data/vendor/v8/test/mjsunit/regress/regress-crbug-140083.js +44 -0
  631. data/vendor/v8/test/mjsunit/regress/regress-crbug-142087.js +38 -0
  632. data/vendor/v8/test/mjsunit/regress/regress-crbug-142218.js +44 -0
  633. data/vendor/v8/test/mjsunit/regress/regress-crbug-145961.js +39 -0
  634. data/vendor/v8/test/mjsunit/regress/regress-crbug-146910.js +33 -0
  635. data/vendor/v8/test/mjsunit/regress/regress-crbug-147475.js +48 -0
  636. data/vendor/v8/test/mjsunit/regress/regress-crbug-148376.js +35 -0
  637. data/vendor/v8/test/mjsunit/regress/regress-crbug-150545.js +53 -0
  638. data/vendor/v8/test/mjsunit/regress/regress-crbug-150729.js +39 -0
  639. data/vendor/v8/test/mjsunit/regress/regress-crbug-157019.js +54 -0
  640. data/vendor/v8/test/mjsunit/regress/regress-crbug-157520.js +38 -0
  641. data/vendor/v8/test/mjsunit/regress/regress-crbug-158185.js +39 -0
  642. data/vendor/v8/test/mjsunit/regress/regress-crbug-160010.js +35 -0
  643. data/vendor/v8/test/mjsunit/regress/regress-crbug-162085.js +71 -0
  644. data/vendor/v8/test/mjsunit/regress/regress-crbug-168545.js +34 -0
  645. data/vendor/v8/test/mjsunit/regress/regress-crbug-170856.js +33 -0
  646. data/vendor/v8/test/mjsunit/regress/regress-crbug-172345.js +34 -0
  647. data/vendor/v8/test/mjsunit/regress/regress-crbug-173974.js +36 -0
  648. data/vendor/v8/test/mjsunit/regress/regress-crbug-18639.js +9 -5
  649. data/vendor/v8/test/mjsunit/regress/regress-debug-code-recompilation.js +2 -1
  650. data/vendor/v8/test/mjsunit/regress/regress-deep-proto.js +45 -0
  651. data/vendor/v8/test/mjsunit/regress/regress-delete-empty-double.js +40 -0
  652. data/vendor/v8/test/mjsunit/regress/regress-iteration-order.js +42 -0
  653. data/vendor/v8/test/mjsunit/regress/regress-json-stringify-gc.js +41 -0
  654. data/vendor/v8/test/mjsunit/regress/regress-latin-1.js +78 -0
  655. data/vendor/v8/test/mjsunit/regress/regress-load-elements.js +49 -0
  656. data/vendor/v8/test/mjsunit/regress/regress-observe-empty-double-array.js +38 -0
  657. data/vendor/v8/test/mjsunit/regress/regress-undefined-store-keyed-fast-element.js +37 -0
  658. data/vendor/v8/test/mjsunit/shift-for-integer-div.js +59 -0
  659. data/vendor/v8/test/mjsunit/stack-traces-gc.js +119 -0
  660. data/vendor/v8/test/mjsunit/stack-traces-overflow.js +122 -0
  661. data/vendor/v8/test/mjsunit/stack-traces.js +39 -1
  662. data/vendor/v8/test/mjsunit/str-to-num.js +7 -2
  663. data/vendor/v8/test/mjsunit/strict-mode.js +36 -11
  664. data/vendor/v8/test/mjsunit/string-charcodeat.js +3 -0
  665. data/vendor/v8/test/mjsunit/string-natives.js +72 -0
  666. data/vendor/v8/test/mjsunit/string-split.js +17 -0
  667. data/vendor/v8/test/mjsunit/testcfg.py +76 -6
  668. data/vendor/v8/test/mjsunit/tools/tickprocessor.js +4 -1
  669. data/vendor/v8/test/mjsunit/try-finally-continue.js +72 -0
  670. data/vendor/v8/test/mjsunit/typed-array-slice.js +61 -0
  671. data/vendor/v8/test/mjsunit/unbox-double-arrays.js +2 -0
  672. data/vendor/v8/test/mjsunit/uri.js +12 -0
  673. data/vendor/v8/test/mjsunit/with-readonly.js +4 -2
  674. data/vendor/v8/test/mozilla/mozilla.status +19 -113
  675. data/vendor/v8/test/mozilla/testcfg.py +122 -3
  676. data/vendor/v8/test/preparser/preparser.status +5 -0
  677. data/vendor/v8/test/preparser/strict-identifiers.pyt +1 -1
  678. data/vendor/v8/test/preparser/testcfg.py +101 -5
  679. data/vendor/v8/test/sputnik/sputnik.status +1 -1
  680. data/vendor/v8/test/sputnik/testcfg.py +5 -0
  681. data/vendor/v8/test/test262/README +2 -2
  682. data/vendor/v8/test/test262/test262.status +13 -36
  683. data/vendor/v8/test/test262/testcfg.py +102 -8
  684. data/vendor/v8/tools/android-build.sh +0 -0
  685. data/vendor/v8/tools/android-ll-prof.sh +69 -0
  686. data/vendor/v8/tools/android-run.py +109 -0
  687. data/vendor/v8/tools/android-sync.sh +105 -0
  688. data/vendor/v8/tools/bash-completion.sh +0 -0
  689. data/vendor/v8/tools/check-static-initializers.sh +0 -0
  690. data/vendor/v8/tools/common-includes.sh +15 -22
  691. data/vendor/v8/tools/disasm.py +4 -4
  692. data/vendor/v8/tools/fuzz-harness.sh +0 -0
  693. data/vendor/v8/tools/gen-postmortem-metadata.py +6 -8
  694. data/vendor/v8/tools/grokdump.py +404 -129
  695. data/vendor/v8/tools/gyp/v8.gyp +105 -43
  696. data/vendor/v8/tools/linux-tick-processor +5 -5
  697. data/vendor/v8/tools/ll_prof.py +75 -15
  698. data/vendor/v8/tools/merge-to-branch.sh +2 -2
  699. data/vendor/v8/tools/plot-timer-events +70 -0
  700. data/vendor/v8/tools/plot-timer-events.js +510 -0
  701. data/vendor/v8/tools/presubmit.py +1 -0
  702. data/vendor/v8/tools/push-to-trunk.sh +14 -4
  703. data/vendor/v8/tools/run-llprof.sh +69 -0
  704. data/vendor/v8/tools/run-tests.py +372 -0
  705. data/vendor/v8/tools/run-valgrind.py +1 -1
  706. data/vendor/v8/tools/status-file-converter.py +39 -0
  707. data/vendor/v8/tools/test-server.py +224 -0
  708. data/vendor/v8/tools/test-wrapper-gypbuild.py +13 -16
  709. data/vendor/v8/tools/test.py +10 -19
  710. data/vendor/v8/tools/testrunner/README +174 -0
  711. data/vendor/v8/tools/testrunner/__init__.py +26 -0
  712. data/vendor/v8/tools/testrunner/local/__init__.py +26 -0
  713. data/vendor/v8/tools/testrunner/local/commands.py +153 -0
  714. data/vendor/v8/tools/testrunner/local/execution.py +182 -0
  715. data/vendor/v8/tools/testrunner/local/old_statusfile.py +460 -0
  716. data/vendor/v8/tools/testrunner/local/progress.py +238 -0
  717. data/vendor/v8/tools/testrunner/local/statusfile.py +145 -0
  718. data/vendor/v8/tools/testrunner/local/testsuite.py +187 -0
  719. data/vendor/v8/tools/testrunner/local/utils.py +108 -0
  720. data/vendor/v8/tools/testrunner/local/verbose.py +99 -0
  721. data/vendor/v8/tools/testrunner/network/__init__.py +26 -0
  722. data/vendor/v8/tools/testrunner/network/distro.py +90 -0
  723. data/vendor/v8/tools/testrunner/network/endpoint.py +124 -0
  724. data/vendor/v8/tools/testrunner/network/network_execution.py +253 -0
  725. data/vendor/v8/tools/testrunner/network/perfdata.py +120 -0
  726. data/vendor/v8/tools/testrunner/objects/__init__.py +26 -0
  727. data/vendor/v8/tools/testrunner/objects/context.py +50 -0
  728. data/vendor/v8/tools/testrunner/objects/output.py +60 -0
  729. data/vendor/v8/tools/testrunner/objects/peer.py +80 -0
  730. data/vendor/v8/tools/testrunner/objects/testcase.py +83 -0
  731. data/vendor/v8/tools/testrunner/objects/workpacket.py +90 -0
  732. data/vendor/v8/tools/testrunner/server/__init__.py +26 -0
  733. data/vendor/v8/tools/testrunner/server/compression.py +111 -0
  734. data/vendor/v8/tools/testrunner/server/constants.py +51 -0
  735. data/vendor/v8/tools/testrunner/server/daemon.py +147 -0
  736. data/vendor/v8/tools/testrunner/server/local_handler.py +119 -0
  737. data/vendor/v8/tools/testrunner/server/main.py +245 -0
  738. data/vendor/v8/tools/testrunner/server/presence_handler.py +120 -0
  739. data/vendor/v8/tools/testrunner/server/signatures.py +63 -0
  740. data/vendor/v8/tools/testrunner/server/status_handler.py +112 -0
  741. data/vendor/v8/tools/testrunner/server/work_handler.py +150 -0
  742. data/vendor/v8/tools/tick-processor.html +168 -0
  743. data/vendor/v8/tools/tickprocessor-driver.js +5 -3
  744. data/vendor/v8/tools/tickprocessor.js +58 -15
  745. metadata +534 -30
  746. data/patches/add-freebsd9-and-freebsd10-to-gyp-GetFlavor.patch +0 -11
  747. data/patches/do-not-imply-vfp3-and-armv7.patch +0 -44
  748. data/patches/fPIC-on-x64.patch +0 -14
  749. data/vendor/v8/src/liveobjectlist-inl.h +0 -126
  750. data/vendor/v8/src/liveobjectlist.cc +0 -2631
  751. data/vendor/v8/src/liveobjectlist.h +0 -319
  752. data/vendor/v8/test/mjsunit/mul-exhaustive.js +0 -4629
  753. data/vendor/v8/test/mjsunit/numops-fuzz.js +0 -4609
  754. data/vendor/v8/test/mjsunit/regress/regress-1969.js +0 -5045
@@ -1,4 +1,4 @@
1
- // Copyright 2012 the V8 project authors. All rights reserved.
1
+ // Copyright 2013 the V8 project authors. All rights reserved.
2
2
  // Redistribution and use in source and binary forms, with or without
3
3
  // modification, are permitted provided that the following conditions are
4
4
  // met:
@@ -92,17 +92,8 @@ void LCodeGen::FinishCode(Handle<Code> code) {
92
92
  }
93
93
 
94
94
 
95
- void LCodeGen::Abort(const char* format, ...) {
96
- if (FLAG_trace_bailout) {
97
- SmartArrayPointer<char> name(
98
- info()->shared_info()->DebugName()->ToCString());
99
- PrintF("Aborting LCodeGen in @\"%s\": ", *name);
100
- va_list arguments;
101
- va_start(arguments, format);
102
- OS::VPrint(format, arguments);
103
- va_end(arguments);
104
- PrintF("\n");
105
- }
95
+ void LChunkBuilder::Abort(const char* reason) {
96
+ info()->set_bailout_reason(reason);
106
97
  status_ = ABORTED;
107
98
  }
108
99
 
@@ -128,44 +119,61 @@ void LCodeGen::Comment(const char* format, ...) {
128
119
  bool LCodeGen::GeneratePrologue() {
129
120
  ASSERT(is_generating());
130
121
 
122
+ if (info()->IsOptimizing()) {
123
+ ProfileEntryHookStub::MaybeCallEntryHook(masm_);
124
+
131
125
  #ifdef DEBUG
132
- if (strlen(FLAG_stop_at) > 0 &&
133
- info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
134
- __ int3();
135
- }
126
+ if (strlen(FLAG_stop_at) > 0 &&
127
+ info_->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
128
+ __ int3();
129
+ }
136
130
  #endif
137
131
 
138
- // Strict mode functions need to replace the receiver with undefined
139
- // when called as functions (without an explicit receiver
140
- // object). rcx is zero for method calls and non-zero for function
141
- // calls.
142
- if (!info_->is_classic_mode() || info_->is_native()) {
143
- Label ok;
144
- __ testq(rcx, rcx);
145
- __ j(zero, &ok, Label::kNear);
146
- // +1 for return address.
147
- int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize;
148
- __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
149
- __ movq(Operand(rsp, receiver_offset), kScratchRegister);
150
- __ bind(&ok);
132
+ // Strict mode functions need to replace the receiver with undefined
133
+ // when called as functions (without an explicit receiver
134
+ // object). rcx is zero for method calls and non-zero for function
135
+ // calls.
136
+ if (!info_->is_classic_mode() || info_->is_native()) {
137
+ Label ok;
138
+ __ testq(rcx, rcx);
139
+ __ j(zero, &ok, Label::kNear);
140
+ // +1 for return address.
141
+ int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize;
142
+ __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
143
+ __ movq(Operand(rsp, receiver_offset), kScratchRegister);
144
+ __ bind(&ok);
145
+ }
151
146
  }
152
147
 
153
- __ push(rbp); // Caller's frame pointer.
154
- __ movq(rbp, rsp);
155
- __ push(rsi); // Callee's context.
156
- __ push(rdi); // Callee's JS function.
148
+ info()->set_prologue_offset(masm_->pc_offset());
149
+ if (NeedsEagerFrame()) {
150
+ ASSERT(!frame_is_built_);
151
+ frame_is_built_ = true;
152
+ __ push(rbp); // Caller's frame pointer.
153
+ __ movq(rbp, rsp);
154
+ __ push(rsi); // Callee's context.
155
+ if (info()->IsStub()) {
156
+ __ Push(Smi::FromInt(StackFrame::STUB));
157
+ } else {
158
+ __ push(rdi); // Callee's JS function.
159
+ }
160
+ }
157
161
 
158
162
  // Reserve space for the stack slots needed by the code.
159
163
  int slots = GetStackSlotCount();
160
164
  if (slots > 0) {
161
165
  if (FLAG_debug_code) {
166
+ __ subq(rsp, Immediate(slots * kPointerSize));
167
+ __ push(rax);
162
168
  __ Set(rax, slots);
163
- __ movq(kScratchRegister, kSlotsZapValue, RelocInfo::NONE);
169
+ __ movq(kScratchRegister, kSlotsZapValue, RelocInfo::NONE64);
164
170
  Label loop;
165
171
  __ bind(&loop);
166
- __ push(kScratchRegister);
172
+ __ movq(MemOperand(rsp, rax, times_pointer_size, 0),
173
+ kScratchRegister);
167
174
  __ decl(rax);
168
175
  __ j(not_zero, &loop);
176
+ __ pop(rax);
169
177
  } else {
170
178
  __ subq(rsp, Immediate(slots * kPointerSize));
171
179
  #ifdef _MSC_VER
@@ -180,10 +188,23 @@ bool LCodeGen::GeneratePrologue() {
180
188
  }
181
189
  #endif
182
190
  }
191
+
192
+ if (info()->saves_caller_doubles()) {
193
+ Comment(";;; Save clobbered callee double registers");
194
+ int count = 0;
195
+ BitVector* doubles = chunk()->allocated_double_registers();
196
+ BitVector::Iterator save_iterator(doubles);
197
+ while (!save_iterator.Done()) {
198
+ __ movsd(MemOperand(rsp, count * kDoubleSize),
199
+ XMMRegister::FromAllocationIndex(save_iterator.Current()));
200
+ save_iterator.Advance();
201
+ count++;
202
+ }
203
+ }
183
204
  }
184
205
 
185
206
  // Possibly allocate a local context.
186
- int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
207
+ int heap_slots = info_->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
187
208
  if (heap_slots > 0) {
188
209
  Comment(";;; Allocate local context");
189
210
  // Argument to NewContext is the function, which is still in rdi.
@@ -219,7 +240,7 @@ bool LCodeGen::GeneratePrologue() {
219
240
  }
220
241
 
221
242
  // Trace the call.
222
- if (FLAG_trace) {
243
+ if (FLAG_trace && info()->IsOptimizing()) {
223
244
  __ CallRuntime(Runtime::kTraceEnter, 0);
224
245
  }
225
246
  return !is_aborted();
@@ -239,7 +260,30 @@ bool LCodeGen::GenerateBody() {
239
260
  }
240
261
 
241
262
  if (emit_instructions) {
242
- Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
263
+ if (FLAG_code_comments) {
264
+ HValue* hydrogen = instr->hydrogen_value();
265
+ if (hydrogen != NULL) {
266
+ if (hydrogen->IsChange()) {
267
+ HValue* changed_value = HChange::cast(hydrogen)->value();
268
+ int use_id = 0;
269
+ const char* use_mnemo = "dead";
270
+ if (hydrogen->UseCount() >= 1) {
271
+ HValue* use_value = hydrogen->uses().value();
272
+ use_id = use_value->id();
273
+ use_mnemo = use_value->Mnemonic();
274
+ }
275
+ Comment(";;; @%d: %s. <of #%d %s for #%d %s>",
276
+ current_instruction_, instr->Mnemonic(),
277
+ changed_value->id(), changed_value->Mnemonic(),
278
+ use_id, use_mnemo);
279
+ } else {
280
+ Comment(";;; @%d: %s. <#%d>", current_instruction_,
281
+ instr->Mnemonic(), hydrogen->id());
282
+ }
283
+ } else {
284
+ Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
285
+ }
286
+ }
243
287
  instr->CompileToNative(this);
244
288
  }
245
289
  }
@@ -249,9 +293,64 @@ bool LCodeGen::GenerateBody() {
249
293
 
250
294
 
251
295
  bool LCodeGen::GenerateJumpTable() {
296
+ Label needs_frame_not_call;
297
+ Label needs_frame_is_call;
252
298
  for (int i = 0; i < jump_table_.length(); i++) {
253
299
  __ bind(&jump_table_[i].label);
254
- __ Jump(jump_table_[i].address, RelocInfo::RUNTIME_ENTRY);
300
+ Address entry = jump_table_[i].address;
301
+ bool is_lazy_deopt = jump_table_[i].is_lazy_deopt;
302
+ Deoptimizer::BailoutType type =
303
+ is_lazy_deopt ? Deoptimizer::LAZY : Deoptimizer::EAGER;
304
+ int id = Deoptimizer::GetDeoptimizationId(entry, type);
305
+ if (id == Deoptimizer::kNotDeoptimizationEntry) {
306
+ Comment(";;; jump table entry %d.", i);
307
+ } else {
308
+ Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id);
309
+ }
310
+ if (jump_table_[i].needs_frame) {
311
+ __ movq(kScratchRegister, ExternalReference::ForDeoptEntry(entry));
312
+ if (is_lazy_deopt) {
313
+ if (needs_frame_is_call.is_bound()) {
314
+ __ jmp(&needs_frame_is_call);
315
+ } else {
316
+ __ bind(&needs_frame_is_call);
317
+ __ push(rbp);
318
+ __ movq(rbp, rsp);
319
+ __ push(rsi);
320
+ // This variant of deopt can only be used with stubs. Since we don't
321
+ // have a function pointer to install in the stack frame that we're
322
+ // building, install a special marker there instead.
323
+ ASSERT(info()->IsStub());
324
+ __ Move(rsi, Smi::FromInt(StackFrame::STUB));
325
+ __ push(rsi);
326
+ __ movq(rsi, MemOperand(rsp, kPointerSize));
327
+ __ call(kScratchRegister);
328
+ }
329
+ } else {
330
+ if (needs_frame_not_call.is_bound()) {
331
+ __ jmp(&needs_frame_not_call);
332
+ } else {
333
+ __ bind(&needs_frame_not_call);
334
+ __ push(rbp);
335
+ __ movq(rbp, rsp);
336
+ __ push(rsi);
337
+ // This variant of deopt can only be used with stubs. Since we don't
338
+ // have a function pointer to install in the stack frame that we're
339
+ // building, install a special marker there instead.
340
+ ASSERT(info()->IsStub());
341
+ __ Move(rsi, Smi::FromInt(StackFrame::STUB));
342
+ __ push(rsi);
343
+ __ movq(rsi, MemOperand(rsp, kPointerSize));
344
+ __ jmp(kScratchRegister);
345
+ }
346
+ }
347
+ } else {
348
+ if (is_lazy_deopt) {
349
+ __ Call(entry, RelocInfo::RUNTIME_ENTRY);
350
+ } else {
351
+ __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
352
+ }
353
+ }
255
354
  }
256
355
  return !is_aborted();
257
356
  }
@@ -263,10 +362,32 @@ bool LCodeGen::GenerateDeferredCode() {
263
362
  for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
264
363
  LDeferredCode* code = deferred_[i];
265
364
  __ bind(code->entry());
365
+ if (NeedsDeferredFrame()) {
366
+ Comment(";;; Deferred build frame",
367
+ code->instruction_index(),
368
+ code->instr()->Mnemonic());
369
+ ASSERT(!frame_is_built_);
370
+ ASSERT(info()->IsStub());
371
+ frame_is_built_ = true;
372
+ // Build the frame in such a way that esi isn't trashed.
373
+ __ push(rbp); // Caller's frame pointer.
374
+ __ push(Operand(rbp, StandardFrameConstants::kContextOffset));
375
+ __ Push(Smi::FromInt(StackFrame::STUB));
376
+ __ lea(rbp, Operand(rsp, 2 * kPointerSize));
377
+ }
266
378
  Comment(";;; Deferred code @%d: %s.",
267
379
  code->instruction_index(),
268
380
  code->instr()->Mnemonic());
269
381
  code->Generate();
382
+ if (NeedsDeferredFrame()) {
383
+ Comment(";;; Deferred destroy frame",
384
+ code->instruction_index(),
385
+ code->instr()->Mnemonic());
386
+ ASSERT(frame_is_built_);
387
+ frame_is_built_ = false;
388
+ __ movq(rsp, rbp);
389
+ __ pop(rbp);
390
+ }
270
391
  __ jmp(code->exit());
271
392
  }
272
393
  }
@@ -320,24 +441,22 @@ bool LCodeGen::IsTaggedConstant(LConstantOperand* op) const {
320
441
 
321
442
 
322
443
  int LCodeGen::ToInteger32(LConstantOperand* op) const {
323
- Handle<Object> value = chunk_->LookupLiteral(op);
324
- ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
325
- ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
326
- value->Number());
327
- return static_cast<int32_t>(value->Number());
444
+ HConstant* constant = chunk_->LookupConstant(op);
445
+ return constant->Integer32Value();
328
446
  }
329
447
 
330
448
 
331
449
  double LCodeGen::ToDouble(LConstantOperand* op) const {
332
- Handle<Object> value = chunk_->LookupLiteral(op);
333
- return value->Number();
450
+ HConstant* constant = chunk_->LookupConstant(op);
451
+ ASSERT(constant->HasDoubleValue());
452
+ return constant->DoubleValue();
334
453
  }
335
454
 
336
455
 
337
456
  Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const {
338
- Handle<Object> literal = chunk_->LookupLiteral(op);
457
+ HConstant* constant = chunk_->LookupConstant(op);
339
458
  ASSERT(chunk_->LookupLiteralRepresentation(op).IsTagged());
340
- return literal;
459
+ return constant->handle();
341
460
  }
342
461
 
343
462
 
@@ -358,7 +477,9 @@ Operand LCodeGen::ToOperand(LOperand* op) const {
358
477
 
359
478
 
360
479
  void LCodeGen::WriteTranslation(LEnvironment* environment,
361
- Translation* translation) {
480
+ Translation* translation,
481
+ int* arguments_index,
482
+ int* arguments_count) {
362
483
  if (environment == NULL) return;
363
484
 
364
485
  // The translation includes one command per value in the environment.
@@ -366,8 +487,23 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
366
487
  // The output frame height does not include the parameters.
367
488
  int height = translation_size - environment->parameter_count();
368
489
 
369
- WriteTranslation(environment->outer(), translation);
370
- int closure_id = DefineDeoptimizationLiteral(environment->closure());
490
+ // Function parameters are arguments to the outermost environment. The
491
+ // arguments index points to the first element of a sequence of tagged
492
+ // values on the stack that represent the arguments. This needs to be
493
+ // kept in sync with the LArgumentsElements implementation.
494
+ *arguments_index = -environment->parameter_count();
495
+ *arguments_count = environment->parameter_count();
496
+
497
+ WriteTranslation(environment->outer(),
498
+ translation,
499
+ arguments_index,
500
+ arguments_count);
501
+ bool has_closure_id = !info()->closure().is_null() &&
502
+ *info()->closure() != *environment->closure();
503
+ int closure_id = has_closure_id
504
+ ? DefineDeoptimizationLiteral(environment->closure())
505
+ : Translation::kSelfLiteralId;
506
+
371
507
  switch (environment->frame_type()) {
372
508
  case JS_FUNCTION:
373
509
  translation->BeginJSFrame(environment->ast_id(), closure_id, height);
@@ -375,12 +511,34 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
375
511
  case JS_CONSTRUCT:
376
512
  translation->BeginConstructStubFrame(closure_id, translation_size);
377
513
  break;
514
+ case JS_GETTER:
515
+ ASSERT(translation_size == 1);
516
+ ASSERT(height == 0);
517
+ translation->BeginGetterStubFrame(closure_id);
518
+ break;
519
+ case JS_SETTER:
520
+ ASSERT(translation_size == 2);
521
+ ASSERT(height == 0);
522
+ translation->BeginSetterStubFrame(closure_id);
523
+ break;
378
524
  case ARGUMENTS_ADAPTOR:
379
525
  translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
380
526
  break;
381
- default:
382
- UNREACHABLE();
527
+ case STUB:
528
+ translation->BeginCompiledStubFrame();
529
+ break;
530
+ }
531
+
532
+ // Inlined frames which push their arguments cause the index to be
533
+ // bumped and a new stack area to be used for materialization.
534
+ if (environment->entry() != NULL &&
535
+ environment->entry()->arguments_pushed()) {
536
+ *arguments_index = *arguments_index < 0
537
+ ? GetStackSlotCount()
538
+ : *arguments_index + *arguments_count;
539
+ *arguments_count = environment->entry()->arguments_count() + 1;
383
540
  }
541
+
384
542
  for (int i = 0; i < translation_size; ++i) {
385
543
  LOperand* value = environment->values()->at(i);
386
544
  // spilled_registers_ and spilled_double_registers_ are either
@@ -391,7 +549,10 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
391
549
  translation->MarkDuplicate();
392
550
  AddToTranslation(translation,
393
551
  environment->spilled_registers()[value->index()],
394
- environment->HasTaggedValueAt(i));
552
+ environment->HasTaggedValueAt(i),
553
+ environment->HasUint32ValueAt(i),
554
+ *arguments_index,
555
+ *arguments_count);
395
556
  } else if (
396
557
  value->IsDoubleRegister() &&
397
558
  environment->spilled_double_registers()[value->index()] != NULL) {
@@ -399,26 +560,39 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
399
560
  AddToTranslation(
400
561
  translation,
401
562
  environment->spilled_double_registers()[value->index()],
402
- false);
563
+ false,
564
+ false,
565
+ *arguments_index,
566
+ *arguments_count);
403
567
  }
404
568
  }
405
569
 
406
- AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
570
+ AddToTranslation(translation,
571
+ value,
572
+ environment->HasTaggedValueAt(i),
573
+ environment->HasUint32ValueAt(i),
574
+ *arguments_index,
575
+ *arguments_count);
407
576
  }
408
577
  }
409
578
 
410
579
 
411
580
  void LCodeGen::AddToTranslation(Translation* translation,
412
581
  LOperand* op,
413
- bool is_tagged) {
582
+ bool is_tagged,
583
+ bool is_uint32,
584
+ int arguments_index,
585
+ int arguments_count) {
414
586
  if (op == NULL) {
415
587
  // TODO(twuerthinger): Introduce marker operands to indicate that this value
416
588
  // is not present and must be reconstructed from the deoptimizer. Currently
417
589
  // this is only used for the arguments object.
418
- translation->StoreArgumentsObject();
590
+ translation->StoreArgumentsObject(arguments_index, arguments_count);
419
591
  } else if (op->IsStackSlot()) {
420
592
  if (is_tagged) {
421
593
  translation->StoreStackSlot(op->index());
594
+ } else if (is_uint32) {
595
+ translation->StoreUint32StackSlot(op->index());
422
596
  } else {
423
597
  translation->StoreInt32StackSlot(op->index());
424
598
  }
@@ -432,6 +606,8 @@ void LCodeGen::AddToTranslation(Translation* translation,
432
606
  Register reg = ToRegister(op);
433
607
  if (is_tagged) {
434
608
  translation->StoreRegister(reg);
609
+ } else if (is_uint32) {
610
+ translation->StoreUint32Register(reg);
435
611
  } else {
436
612
  translation->StoreInt32Register(reg);
437
613
  }
@@ -439,8 +615,8 @@ void LCodeGen::AddToTranslation(Translation* translation,
439
615
  XMMRegister reg = ToDoubleRegister(op);
440
616
  translation->StoreDoubleRegister(reg);
441
617
  } else if (op->IsConstantOperand()) {
442
- Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
443
- int src_index = DefineDeoptimizationLiteral(literal);
618
+ HConstant* constant = chunk()->LookupConstant(LConstantOperand::cast(op));
619
+ int src_index = DefineDeoptimizationLiteral(constant->handle());
444
620
  translation->StoreLiteral(src_index);
445
621
  } else {
446
622
  UNREACHABLE();
@@ -517,20 +693,22 @@ void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
517
693
 
518
694
  int frame_count = 0;
519
695
  int jsframe_count = 0;
696
+ int args_index = 0;
697
+ int args_count = 0;
520
698
  for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
521
699
  ++frame_count;
522
700
  if (e->frame_type() == JS_FUNCTION) {
523
701
  ++jsframe_count;
524
702
  }
525
703
  }
526
- Translation translation(&translations_, frame_count, jsframe_count);
527
- WriteTranslation(environment, &translation);
704
+ Translation translation(&translations_, frame_count, jsframe_count, zone());
705
+ WriteTranslation(environment, &translation, &args_index, &args_count);
528
706
  int deoptimization_index = deoptimizations_.length();
529
707
  int pc_offset = masm()->pc_offset();
530
708
  environment->Register(deoptimization_index,
531
709
  translation.index(),
532
710
  (mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
533
- deoptimizations_.Add(environment);
711
+ deoptimizations_.Add(environment, environment->zone());
534
712
  }
535
713
  }
536
714
 
@@ -539,20 +717,33 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
539
717
  RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
540
718
  ASSERT(environment->HasBeenRegistered());
541
719
  int id = environment->deoptimization_index();
542
- Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
720
+ ASSERT(info()->IsOptimizing() || info()->IsStub());
721
+ Deoptimizer::BailoutType bailout_type = info()->IsStub()
722
+ ? Deoptimizer::LAZY
723
+ : Deoptimizer::EAGER;
724
+ Address entry = Deoptimizer::GetDeoptimizationEntry(id, bailout_type);
543
725
  if (entry == NULL) {
544
726
  Abort("bailout was not prepared");
545
727
  return;
546
728
  }
547
729
 
730
+ ASSERT(info()->IsStub() || frame_is_built_);
731
+ bool lazy_deopt = info()->IsStub();
548
732
  if (cc == no_condition) {
549
- __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
733
+ if (lazy_deopt) {
734
+ __ Call(entry, RelocInfo::RUNTIME_ENTRY);
735
+ } else {
736
+ __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
737
+ }
550
738
  } else {
551
739
  // We often have several deopts to the same entry, reuse the last
552
740
  // jump entry if this is the case.
553
741
  if (jump_table_.is_empty() ||
554
- jump_table_.last().address != entry) {
555
- jump_table_.Add(JumpTableEntry(entry));
742
+ jump_table_.last().address != entry ||
743
+ jump_table_.last().needs_frame != !frame_is_built_ ||
744
+ jump_table_.last().is_lazy_deopt != lazy_deopt) {
745
+ JumpTableEntry table_entry(entry, !frame_is_built_, lazy_deopt);
746
+ jump_table_.Add(table_entry, zone());
556
747
  }
557
748
  __ j(cc, &jump_table_.last().label);
558
749
  }
@@ -576,13 +767,13 @@ void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
576
767
  }
577
768
  data->SetLiteralArray(*literals);
578
769
 
579
- data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
770
+ data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id().ToInt()));
580
771
  data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
581
772
 
582
773
  // Populate the deoptimization entries.
583
774
  for (int i = 0; i < length; i++) {
584
775
  LEnvironment* env = deoptimizations_[i];
585
- data->SetAstId(i, Smi::FromInt(env->ast_id()));
776
+ data->SetAstId(i, env->ast_id());
586
777
  data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
587
778
  data->SetArgumentsStackHeight(i,
588
779
  Smi::FromInt(env->arguments_stack_height()));
@@ -597,7 +788,7 @@ int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
597
788
  for (int i = 0; i < deoptimization_literals_.length(); ++i) {
598
789
  if (deoptimization_literals_[i].is_identical_to(literal)) return i;
599
790
  }
600
- deoptimization_literals_.Add(literal);
791
+ deoptimization_literals_.Add(literal, zone());
601
792
  return result;
602
793
  }
603
794
 
@@ -644,14 +835,14 @@ void LCodeGen::RecordSafepoint(
644
835
  for (int i = 0; i < operands->length(); i++) {
645
836
  LOperand* pointer = operands->at(i);
646
837
  if (pointer->IsStackSlot()) {
647
- safepoint.DefinePointerSlot(pointer->index());
838
+ safepoint.DefinePointerSlot(pointer->index(), zone());
648
839
  } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
649
- safepoint.DefinePointerRegister(ToRegister(pointer));
840
+ safepoint.DefinePointerRegister(ToRegister(pointer), zone());
650
841
  }
651
842
  }
652
843
  if (kind & Safepoint::kWithRegisters) {
653
844
  // Register rsi always contains a pointer to the context.
654
- safepoint.DefinePointerRegister(rsi);
845
+ safepoint.DefinePointerRegister(rsi, zone());
655
846
  }
656
847
  }
657
848
 
@@ -663,7 +854,7 @@ void LCodeGen::RecordSafepoint(LPointerMap* pointers,
663
854
 
664
855
 
665
856
  void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) {
666
- LPointerMap empty_pointers(RelocInfo::kNoPosition);
857
+ LPointerMap empty_pointers(RelocInfo::kNoPosition, zone());
667
858
  RecordSafepoint(&empty_pointers, deopt_mode);
668
859
  }
669
860
 
@@ -771,7 +962,7 @@ void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
771
962
 
772
963
  void LCodeGen::DoModI(LModI* instr) {
773
964
  if (instr->hydrogen()->HasPowerOf2Divisor()) {
774
- Register dividend = ToRegister(instr->InputAt(0));
965
+ Register dividend = ToRegister(instr->left());
775
966
 
776
967
  int32_t divisor =
777
968
  HConstant::cast(instr->hydrogen()->right())->Integer32Value();
@@ -795,8 +986,8 @@ void LCodeGen::DoModI(LModI* instr) {
795
986
  __ bind(&done);
796
987
  } else {
797
988
  Label done, remainder_eq_dividend, slow, do_subtraction, both_positive;
798
- Register left_reg = ToRegister(instr->InputAt(0));
799
- Register right_reg = ToRegister(instr->InputAt(1));
989
+ Register left_reg = ToRegister(instr->left());
990
+ Register right_reg = ToRegister(instr->right());
800
991
  Register result_reg = ToRegister(instr->result());
801
992
 
802
993
  ASSERT(left_reg.is(rax));
@@ -826,7 +1017,7 @@ void LCodeGen::DoModI(LModI* instr) {
826
1017
  __ j(less, &remainder_eq_dividend, Label::kNear);
827
1018
 
828
1019
  // Check if the divisor is a PowerOfTwo integer.
829
- Register scratch = ToRegister(instr->TempAt(0));
1020
+ Register scratch = ToRegister(instr->temp());
830
1021
  __ movl(scratch, right_reg);
831
1022
  __ subl(scratch, Immediate(1));
832
1023
  __ testl(scratch, right_reg);
@@ -849,6 +1040,17 @@ void LCodeGen::DoModI(LModI* instr) {
849
1040
 
850
1041
  // Slow case, using idiv instruction.
851
1042
  __ bind(&slow);
1043
+
1044
+ // Check for (kMinInt % -1).
1045
+ if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1046
+ Label left_not_min_int;
1047
+ __ cmpl(left_reg, Immediate(kMinInt));
1048
+ __ j(not_zero, &left_not_min_int, Label::kNear);
1049
+ __ cmpl(right_reg, Immediate(-1));
1050
+ DeoptimizeIf(zero, instr->environment());
1051
+ __ bind(&left_not_min_int);
1052
+ }
1053
+
852
1054
  // Sign extend eax to edx.
853
1055
  // (We are using only the low 32 bits of the values.)
854
1056
  __ cdq();
@@ -882,24 +1084,144 @@ void LCodeGen::DoModI(LModI* instr) {
882
1084
  }
883
1085
 
884
1086
 
1087
+ void LCodeGen::DoMathFloorOfDiv(LMathFloorOfDiv* instr) {
1088
+ ASSERT(instr->right()->IsConstantOperand());
1089
+
1090
+ const Register dividend = ToRegister(instr->left());
1091
+ int32_t divisor = ToInteger32(LConstantOperand::cast(instr->right()));
1092
+ const Register result = ToRegister(instr->result());
1093
+
1094
+ switch (divisor) {
1095
+ case 0:
1096
+ DeoptimizeIf(no_condition, instr->environment());
1097
+ return;
1098
+
1099
+ case 1:
1100
+ if (!result.is(dividend)) {
1101
+ __ movl(result, dividend);
1102
+ }
1103
+ return;
1104
+
1105
+ case -1:
1106
+ if (!result.is(dividend)) {
1107
+ __ movl(result, dividend);
1108
+ }
1109
+ __ negl(result);
1110
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1111
+ DeoptimizeIf(zero, instr->environment());
1112
+ }
1113
+ if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1114
+ DeoptimizeIf(overflow, instr->environment());
1115
+ }
1116
+ return;
1117
+ }
1118
+
1119
+ uint32_t divisor_abs = abs(divisor);
1120
+ if (IsPowerOf2(divisor_abs)) {
1121
+ int32_t power = WhichPowerOf2(divisor_abs);
1122
+ if (divisor < 0) {
1123
+ __ movsxlq(result, dividend);
1124
+ __ neg(result);
1125
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1126
+ DeoptimizeIf(zero, instr->environment());
1127
+ }
1128
+ __ sar(result, Immediate(power));
1129
+ } else {
1130
+ if (!result.is(dividend)) {
1131
+ __ movl(result, dividend);
1132
+ }
1133
+ __ sarl(result, Immediate(power));
1134
+ }
1135
+ } else {
1136
+ Register reg1 = ToRegister(instr->temp());
1137
+ Register reg2 = ToRegister(instr->result());
1138
+
1139
+ // Find b which: 2^b < divisor_abs < 2^(b+1).
1140
+ unsigned b = 31 - CompilerIntrinsics::CountLeadingZeros(divisor_abs);
1141
+ unsigned shift = 32 + b; // Precision +1bit (effectively).
1142
+ double multiplier_f =
1143
+ static_cast<double>(static_cast<uint64_t>(1) << shift) / divisor_abs;
1144
+ int64_t multiplier;
1145
+ if (multiplier_f - floor(multiplier_f) < 0.5) {
1146
+ multiplier = static_cast<int64_t>(floor(multiplier_f));
1147
+ } else {
1148
+ multiplier = static_cast<int64_t>(floor(multiplier_f)) + 1;
1149
+ }
1150
+ // The multiplier is a uint32.
1151
+ ASSERT(multiplier > 0 &&
1152
+ multiplier < (static_cast<int64_t>(1) << 32));
1153
+ // The multiply is int64, so sign-extend to r64.
1154
+ __ movsxlq(reg1, dividend);
1155
+ if (divisor < 0 &&
1156
+ instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1157
+ __ neg(reg1);
1158
+ DeoptimizeIf(zero, instr->environment());
1159
+ }
1160
+ __ movq(reg2, multiplier, RelocInfo::NONE64);
1161
+ // Result just fit in r64, because it's int32 * uint32.
1162
+ __ imul(reg2, reg1);
1163
+
1164
+ __ addq(reg2, Immediate(1 << 30));
1165
+ __ sar(reg2, Immediate(shift));
1166
+ }
1167
+ }
1168
+
1169
+
885
1170
  void LCodeGen::DoDivI(LDivI* instr) {
886
- LOperand* right = instr->InputAt(1);
1171
+ if (!instr->is_flooring() && instr->hydrogen()->HasPowerOf2Divisor()) {
1172
+ Register dividend = ToRegister(instr->left());
1173
+ int32_t divisor =
1174
+ HConstant::cast(instr->hydrogen()->right())->Integer32Value();
1175
+ int32_t test_value = 0;
1176
+ int32_t power = 0;
1177
+
1178
+ if (divisor > 0) {
1179
+ test_value = divisor - 1;
1180
+ power = WhichPowerOf2(divisor);
1181
+ } else {
1182
+ // Check for (0 / -x) that will produce negative zero.
1183
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1184
+ __ testl(dividend, dividend);
1185
+ DeoptimizeIf(zero, instr->environment());
1186
+ }
1187
+ // Check for (kMinInt / -1).
1188
+ if (divisor == -1 && instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1189
+ __ cmpl(dividend, Immediate(kMinInt));
1190
+ DeoptimizeIf(zero, instr->environment());
1191
+ }
1192
+ test_value = - divisor - 1;
1193
+ power = WhichPowerOf2(-divisor);
1194
+ }
1195
+
1196
+ if (test_value != 0) {
1197
+ // Deoptimize if remainder is not 0.
1198
+ __ testl(dividend, Immediate(test_value));
1199
+ DeoptimizeIf(not_zero, instr->environment());
1200
+ __ sarl(dividend, Immediate(power));
1201
+ }
1202
+
1203
+ if (divisor < 0) __ negl(dividend);
1204
+
1205
+ return;
1206
+ }
1207
+
1208
+ LOperand* right = instr->right();
887
1209
  ASSERT(ToRegister(instr->result()).is(rax));
888
- ASSERT(ToRegister(instr->InputAt(0)).is(rax));
889
- ASSERT(!ToRegister(instr->InputAt(1)).is(rax));
890
- ASSERT(!ToRegister(instr->InputAt(1)).is(rdx));
1210
+ ASSERT(ToRegister(instr->left()).is(rax));
1211
+ ASSERT(!ToRegister(instr->right()).is(rax));
1212
+ ASSERT(!ToRegister(instr->right()).is(rdx));
891
1213
 
892
1214
  Register left_reg = rax;
893
1215
 
894
1216
  // Check for x / 0.
895
1217
  Register right_reg = ToRegister(right);
896
- if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
1218
+ if (instr->hydrogen_value()->CheckFlag(HValue::kCanBeDivByZero)) {
897
1219
  __ testl(right_reg, right_reg);
898
1220
  DeoptimizeIf(zero, instr->environment());
899
1221
  }
900
1222
 
901
1223
  // Check for (0 / -x) that will produce negative zero.
902
- if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1224
+ if (instr->hydrogen_value()->CheckFlag(HValue::kBailoutOnMinusZero)) {
903
1225
  Label left_not_zero;
904
1226
  __ testl(left_reg, left_reg);
905
1227
  __ j(not_zero, &left_not_zero, Label::kNear);
@@ -908,8 +1230,8 @@ void LCodeGen::DoDivI(LDivI* instr) {
908
1230
  __ bind(&left_not_zero);
909
1231
  }
910
1232
 
911
- // Check for (-kMinInt / -1).
912
- if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1233
+ // Check for (kMinInt / -1).
1234
+ if (instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow)) {
913
1235
  Label left_not_min_int;
914
1236
  __ cmpl(left_reg, Immediate(kMinInt));
915
1237
  __ j(not_zero, &left_not_min_int, Label::kNear);
@@ -922,15 +1244,25 @@ void LCodeGen::DoDivI(LDivI* instr) {
922
1244
  __ cdq();
923
1245
  __ idivl(right_reg);
924
1246
 
925
- // Deoptimize if remainder is not 0.
926
- __ testl(rdx, rdx);
927
- DeoptimizeIf(not_zero, instr->environment());
1247
+ if (!instr->is_flooring()) {
1248
+ // Deoptimize if remainder is not 0.
1249
+ __ testl(rdx, rdx);
1250
+ DeoptimizeIf(not_zero, instr->environment());
1251
+ } else {
1252
+ Label done;
1253
+ __ testl(rdx, rdx);
1254
+ __ j(zero, &done, Label::kNear);
1255
+ __ xorl(rdx, right_reg);
1256
+ __ sarl(rdx, Immediate(31));
1257
+ __ addl(rax, rdx);
1258
+ __ bind(&done);
1259
+ }
928
1260
  }
929
1261
 
930
1262
 
931
1263
  void LCodeGen::DoMulI(LMulI* instr) {
932
- Register left = ToRegister(instr->InputAt(0));
933
- LOperand* right = instr->InputAt(1);
1264
+ Register left = ToRegister(instr->left());
1265
+ LOperand* right = instr->right();
934
1266
 
935
1267
  if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
936
1268
  __ movl(kScratchRegister, left);
@@ -995,8 +1327,11 @@ void LCodeGen::DoMulI(LMulI* instr) {
995
1327
  __ testl(left, left);
996
1328
  __ j(not_zero, &done, Label::kNear);
997
1329
  if (right->IsConstantOperand()) {
998
- if (ToInteger32(LConstantOperand::cast(right)) <= 0) {
1330
+ if (ToInteger32(LConstantOperand::cast(right)) < 0) {
999
1331
  DeoptimizeIf(no_condition, instr->environment());
1332
+ } else if (ToInteger32(LConstantOperand::cast(right)) == 0) {
1333
+ __ cmpl(kScratchRegister, Immediate(0));
1334
+ DeoptimizeIf(less, instr->environment());
1000
1335
  }
1001
1336
  } else if (right->IsStackSlot()) {
1002
1337
  __ orl(kScratchRegister, ToOperand(right));
@@ -1012,8 +1347,8 @@ void LCodeGen::DoMulI(LMulI* instr) {
1012
1347
 
1013
1348
 
1014
1349
  void LCodeGen::DoBitI(LBitI* instr) {
1015
- LOperand* left = instr->InputAt(0);
1016
- LOperand* right = instr->InputAt(1);
1350
+ LOperand* left = instr->left();
1351
+ LOperand* right = instr->right();
1017
1352
  ASSERT(left->Equals(instr->result()));
1018
1353
  ASSERT(left->IsRegister());
1019
1354
 
@@ -1069,14 +1404,17 @@ void LCodeGen::DoBitI(LBitI* instr) {
1069
1404
 
1070
1405
 
1071
1406
  void LCodeGen::DoShiftI(LShiftI* instr) {
1072
- LOperand* left = instr->InputAt(0);
1073
- LOperand* right = instr->InputAt(1);
1407
+ LOperand* left = instr->left();
1408
+ LOperand* right = instr->right();
1074
1409
  ASSERT(left->Equals(instr->result()));
1075
1410
  ASSERT(left->IsRegister());
1076
1411
  if (right->IsRegister()) {
1077
1412
  ASSERT(ToRegister(right).is(rcx));
1078
1413
 
1079
1414
  switch (instr->op()) {
1415
+ case Token::ROR:
1416
+ __ rorl_cl(ToRegister(left));
1417
+ break;
1080
1418
  case Token::SAR:
1081
1419
  __ sarl_cl(ToRegister(left));
1082
1420
  break;
@@ -1098,6 +1436,11 @@ void LCodeGen::DoShiftI(LShiftI* instr) {
1098
1436
  int value = ToInteger32(LConstantOperand::cast(right));
1099
1437
  uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
1100
1438
  switch (instr->op()) {
1439
+ case Token::ROR:
1440
+ if (shift_count != 0) {
1441
+ __ rorl(ToRegister(left), Immediate(shift_count));
1442
+ }
1443
+ break;
1101
1444
  case Token::SAR:
1102
1445
  if (shift_count != 0) {
1103
1446
  __ sarl(ToRegister(left), Immediate(shift_count));
@@ -1125,8 +1468,8 @@ void LCodeGen::DoShiftI(LShiftI* instr) {
1125
1468
 
1126
1469
 
1127
1470
  void LCodeGen::DoSubI(LSubI* instr) {
1128
- LOperand* left = instr->InputAt(0);
1129
- LOperand* right = instr->InputAt(1);
1471
+ LOperand* left = instr->left();
1472
+ LOperand* right = instr->right();
1130
1473
  ASSERT(left->Equals(instr->result()));
1131
1474
 
1132
1475
  if (right->IsConstantOperand()) {
@@ -1160,7 +1503,7 @@ void LCodeGen::DoConstantD(LConstantD* instr) {
1160
1503
  if (int_val == 0) {
1161
1504
  __ xorps(res, res);
1162
1505
  } else {
1163
- Register tmp = ToRegister(instr->TempAt(0));
1506
+ Register tmp = ToRegister(instr->temp());
1164
1507
  __ Set(tmp, int_val);
1165
1508
  __ movq(res, tmp);
1166
1509
  }
@@ -1180,21 +1523,28 @@ void LCodeGen::DoConstantT(LConstantT* instr) {
1180
1523
 
1181
1524
  void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
1182
1525
  Register result = ToRegister(instr->result());
1183
- Register array = ToRegister(instr->InputAt(0));
1526
+ Register array = ToRegister(instr->value());
1184
1527
  __ movq(result, FieldOperand(array, JSArray::kLengthOffset));
1185
1528
  }
1186
1529
 
1187
1530
 
1188
1531
  void LCodeGen::DoFixedArrayBaseLength(LFixedArrayBaseLength* instr) {
1189
1532
  Register result = ToRegister(instr->result());
1190
- Register array = ToRegister(instr->InputAt(0));
1533
+ Register array = ToRegister(instr->value());
1191
1534
  __ movq(result, FieldOperand(array, FixedArrayBase::kLengthOffset));
1192
1535
  }
1193
1536
 
1194
1537
 
1538
+ void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) {
1539
+ Register result = ToRegister(instr->result());
1540
+ Register map = ToRegister(instr->value());
1541
+ __ EnumLength(result, map);
1542
+ }
1543
+
1544
+
1195
1545
  void LCodeGen::DoElementsKind(LElementsKind* instr) {
1196
1546
  Register result = ToRegister(instr->result());
1197
- Register input = ToRegister(instr->InputAt(0));
1547
+ Register input = ToRegister(instr->value());
1198
1548
 
1199
1549
  // Load map into |result|.
1200
1550
  __ movq(result, FieldOperand(input, HeapObject::kMapOffset));
@@ -1207,7 +1557,7 @@ void LCodeGen::DoElementsKind(LElementsKind* instr) {
1207
1557
 
1208
1558
 
1209
1559
  void LCodeGen::DoValueOf(LValueOf* instr) {
1210
- Register input = ToRegister(instr->InputAt(0));
1560
+ Register input = ToRegister(instr->value());
1211
1561
  Register result = ToRegister(instr->result());
1212
1562
  ASSERT(input.is(result));
1213
1563
  Label done;
@@ -1224,18 +1574,17 @@ void LCodeGen::DoValueOf(LValueOf* instr) {
1224
1574
 
1225
1575
 
1226
1576
  void LCodeGen::DoDateField(LDateField* instr) {
1227
- Register object = ToRegister(instr->InputAt(0));
1577
+ Register object = ToRegister(instr->date());
1228
1578
  Register result = ToRegister(instr->result());
1229
1579
  Smi* index = instr->index();
1230
- Label runtime, done;
1580
+ Label runtime, done, not_date_object;
1231
1581
  ASSERT(object.is(result));
1232
1582
  ASSERT(object.is(rax));
1233
1583
 
1234
- #ifdef DEBUG
1235
- __ AbortIfSmi(object);
1584
+ Condition cc = masm()->CheckSmi(object);
1585
+ DeoptimizeIf(cc, instr->environment());
1236
1586
  __ CmpObjectType(object, JS_DATE_TYPE, kScratchRegister);
1237
- __ Assert(equal, "Trying to get date field from non-date.");
1238
- #endif
1587
+ DeoptimizeIf(not_equal, instr->environment());
1239
1588
 
1240
1589
  if (index->value() == 0) {
1241
1590
  __ movq(result, FieldOperand(object, JSDate::kValueOffset));
@@ -1254,10 +1603,10 @@ void LCodeGen::DoDateField(LDateField* instr) {
1254
1603
  __ PrepareCallCFunction(2);
1255
1604
  #ifdef _WIN64
1256
1605
  __ movq(rcx, object);
1257
- __ movq(rdx, index, RelocInfo::NONE);
1606
+ __ movq(rdx, index, RelocInfo::NONE64);
1258
1607
  #else
1259
1608
  __ movq(rdi, object);
1260
- __ movq(rsi, index, RelocInfo::NONE);
1609
+ __ movq(rsi, index, RelocInfo::NONE64);
1261
1610
  #endif
1262
1611
  __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
1263
1612
  __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
@@ -1266,15 +1615,24 @@ void LCodeGen::DoDateField(LDateField* instr) {
1266
1615
  }
1267
1616
 
1268
1617
 
1618
+ void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) {
1619
+ SeqStringSetCharGenerator::Generate(masm(),
1620
+ instr->encoding(),
1621
+ ToRegister(instr->string()),
1622
+ ToRegister(instr->index()),
1623
+ ToRegister(instr->value()));
1624
+ }
1625
+
1626
+
1269
1627
  void LCodeGen::DoBitNotI(LBitNotI* instr) {
1270
- LOperand* input = instr->InputAt(0);
1628
+ LOperand* input = instr->value();
1271
1629
  ASSERT(input->Equals(instr->result()));
1272
1630
  __ not_(ToRegister(input));
1273
1631
  }
1274
1632
 
1275
1633
 
1276
1634
  void LCodeGen::DoThrow(LThrow* instr) {
1277
- __ push(ToRegister(instr->InputAt(0)));
1635
+ __ push(ToRegister(instr->value()));
1278
1636
  CallRuntime(Runtime::kThrow, 1, instr);
1279
1637
 
1280
1638
  if (FLAG_debug_code) {
@@ -1285,8 +1643,8 @@ void LCodeGen::DoThrow(LThrow* instr) {
1285
1643
 
1286
1644
 
1287
1645
  void LCodeGen::DoAddI(LAddI* instr) {
1288
- LOperand* left = instr->InputAt(0);
1289
- LOperand* right = instr->InputAt(1);
1646
+ LOperand* left = instr->left();
1647
+ LOperand* right = instr->right();
1290
1648
  ASSERT(left->Equals(instr->result()));
1291
1649
 
1292
1650
  if (right->IsConstantOperand()) {
@@ -1304,9 +1662,75 @@ void LCodeGen::DoAddI(LAddI* instr) {
1304
1662
  }
1305
1663
 
1306
1664
 
1665
+ void LCodeGen::DoMathMinMax(LMathMinMax* instr) {
1666
+ LOperand* left = instr->left();
1667
+ LOperand* right = instr->right();
1668
+ ASSERT(left->Equals(instr->result()));
1669
+ HMathMinMax::Operation operation = instr->hydrogen()->operation();
1670
+ if (instr->hydrogen()->representation().IsInteger32()) {
1671
+ Label return_left;
1672
+ Condition condition = (operation == HMathMinMax::kMathMin)
1673
+ ? less_equal
1674
+ : greater_equal;
1675
+ Register left_reg = ToRegister(left);
1676
+ if (right->IsConstantOperand()) {
1677
+ Immediate right_imm =
1678
+ Immediate(ToInteger32(LConstantOperand::cast(right)));
1679
+ __ cmpl(left_reg, right_imm);
1680
+ __ j(condition, &return_left, Label::kNear);
1681
+ __ movq(left_reg, right_imm);
1682
+ } else if (right->IsRegister()) {
1683
+ Register right_reg = ToRegister(right);
1684
+ __ cmpl(left_reg, right_reg);
1685
+ __ j(condition, &return_left, Label::kNear);
1686
+ __ movq(left_reg, right_reg);
1687
+ } else {
1688
+ Operand right_op = ToOperand(right);
1689
+ __ cmpl(left_reg, right_op);
1690
+ __ j(condition, &return_left, Label::kNear);
1691
+ __ movq(left_reg, right_op);
1692
+ }
1693
+ __ bind(&return_left);
1694
+ } else {
1695
+ ASSERT(instr->hydrogen()->representation().IsDouble());
1696
+ Label check_nan_left, check_zero, return_left, return_right;
1697
+ Condition condition = (operation == HMathMinMax::kMathMin) ? below : above;
1698
+ XMMRegister left_reg = ToDoubleRegister(left);
1699
+ XMMRegister right_reg = ToDoubleRegister(right);
1700
+ __ ucomisd(left_reg, right_reg);
1701
+ __ j(parity_even, &check_nan_left, Label::kNear); // At least one NaN.
1702
+ __ j(equal, &check_zero, Label::kNear); // left == right.
1703
+ __ j(condition, &return_left, Label::kNear);
1704
+ __ jmp(&return_right, Label::kNear);
1705
+
1706
+ __ bind(&check_zero);
1707
+ XMMRegister xmm_scratch = xmm0;
1708
+ __ xorps(xmm_scratch, xmm_scratch);
1709
+ __ ucomisd(left_reg, xmm_scratch);
1710
+ __ j(not_equal, &return_left, Label::kNear); // left == right != 0.
1711
+ // At this point, both left and right are either 0 or -0.
1712
+ if (operation == HMathMinMax::kMathMin) {
1713
+ __ orpd(left_reg, right_reg);
1714
+ } else {
1715
+ // Since we operate on +0 and/or -0, addsd and andsd have the same effect.
1716
+ __ addsd(left_reg, right_reg);
1717
+ }
1718
+ __ jmp(&return_left, Label::kNear);
1719
+
1720
+ __ bind(&check_nan_left);
1721
+ __ ucomisd(left_reg, left_reg); // NaN check.
1722
+ __ j(parity_even, &return_left, Label::kNear);
1723
+ __ bind(&return_right);
1724
+ __ movsd(left_reg, right_reg);
1725
+
1726
+ __ bind(&return_left);
1727
+ }
1728
+ }
1729
+
1730
+
1307
1731
  void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
1308
- XMMRegister left = ToDoubleRegister(instr->InputAt(0));
1309
- XMMRegister right = ToDoubleRegister(instr->InputAt(1));
1732
+ XMMRegister left = ToDoubleRegister(instr->left());
1733
+ XMMRegister right = ToDoubleRegister(instr->right());
1310
1734
  XMMRegister result = ToDoubleRegister(instr->result());
1311
1735
  // All operations except MOD are computed in-place.
1312
1736
  ASSERT(instr->op() == Token::MOD || left.is(result));
@@ -1322,6 +1746,7 @@ void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
1322
1746
  break;
1323
1747
  case Token::DIV:
1324
1748
  __ divsd(left, right);
1749
+ __ movaps(left, left);
1325
1750
  break;
1326
1751
  case Token::MOD:
1327
1752
  __ PrepareCallCFunction(2);
@@ -1340,8 +1765,8 @@ void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
1340
1765
 
1341
1766
 
1342
1767
  void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
1343
- ASSERT(ToRegister(instr->InputAt(0)).is(rdx));
1344
- ASSERT(ToRegister(instr->InputAt(1)).is(rax));
1768
+ ASSERT(ToRegister(instr->left()).is(rdx));
1769
+ ASSERT(ToRegister(instr->right()).is(rax));
1345
1770
  ASSERT(ToRegister(instr->result()).is(rax));
1346
1771
 
1347
1772
  BinaryOpStub stub(instr->op(), NO_OVERWRITE);
@@ -1385,17 +1810,17 @@ void LCodeGen::DoBranch(LBranch* instr) {
1385
1810
 
1386
1811
  Representation r = instr->hydrogen()->value()->representation();
1387
1812
  if (r.IsInteger32()) {
1388
- Register reg = ToRegister(instr->InputAt(0));
1813
+ Register reg = ToRegister(instr->value());
1389
1814
  __ testl(reg, reg);
1390
1815
  EmitBranch(true_block, false_block, not_zero);
1391
1816
  } else if (r.IsDouble()) {
1392
- XMMRegister reg = ToDoubleRegister(instr->InputAt(0));
1817
+ XMMRegister reg = ToDoubleRegister(instr->value());
1393
1818
  __ xorps(xmm0, xmm0);
1394
1819
  __ ucomisd(reg, xmm0);
1395
1820
  EmitBranch(true_block, false_block, not_equal);
1396
1821
  } else {
1397
1822
  ASSERT(r.IsTagged());
1398
- Register reg = ToRegister(instr->InputAt(0));
1823
+ Register reg = ToRegister(instr->value());
1399
1824
  HType type = instr->hydrogen()->value()->type();
1400
1825
  if (type.IsBoolean()) {
1401
1826
  __ CompareRoot(reg, Heap::kTrueValueRootIndex);
@@ -1532,8 +1957,8 @@ inline Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
1532
1957
 
1533
1958
 
1534
1959
  void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
1535
- LOperand* left = instr->InputAt(0);
1536
- LOperand* right = instr->InputAt(1);
1960
+ LOperand* left = instr->left();
1961
+ LOperand* right = instr->right();
1537
1962
  int false_block = chunk_->LookupDestination(instr->false_block_id());
1538
1963
  int true_block = chunk_->LookupDestination(instr->true_block_id());
1539
1964
  Condition cc = TokenToCondition(instr->op(), instr->is_double());
@@ -1580,8 +2005,8 @@ void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
1580
2005
 
1581
2006
 
1582
2007
  void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
1583
- Register left = ToRegister(instr->InputAt(0));
1584
- Register right = ToRegister(instr->InputAt(1));
2008
+ Register left = ToRegister(instr->left());
2009
+ Register right = ToRegister(instr->right());
1585
2010
  int false_block = chunk_->LookupDestination(instr->false_block_id());
1586
2011
  int true_block = chunk_->LookupDestination(instr->true_block_id());
1587
2012
 
@@ -1591,7 +2016,7 @@ void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
1591
2016
 
1592
2017
 
1593
2018
  void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
1594
- Register left = ToRegister(instr->InputAt(0));
2019
+ Register left = ToRegister(instr->left());
1595
2020
  int true_block = chunk_->LookupDestination(instr->true_block_id());
1596
2021
  int false_block = chunk_->LookupDestination(instr->false_block_id());
1597
2022
 
@@ -1601,7 +2026,7 @@ void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
1601
2026
 
1602
2027
 
1603
2028
  void LCodeGen::DoIsNilAndBranch(LIsNilAndBranch* instr) {
1604
- Register reg = ToRegister(instr->InputAt(0));
2029
+ Register reg = ToRegister(instr->value());
1605
2030
  int false_block = chunk_->LookupDestination(instr->false_block_id());
1606
2031
 
1607
2032
  // If the expression is known to be untagged or a smi, then it's definitely
@@ -1631,7 +2056,7 @@ void LCodeGen::DoIsNilAndBranch(LIsNilAndBranch* instr) {
1631
2056
  __ JumpIfSmi(reg, false_label);
1632
2057
  // Check for undetectable objects by looking in the bit field in
1633
2058
  // the map. The object has already been smi checked.
1634
- Register scratch = ToRegister(instr->TempAt(0));
2059
+ Register scratch = ToRegister(instr->temp());
1635
2060
  __ movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
1636
2061
  __ testb(FieldOperand(scratch, Map::kBitFieldOffset),
1637
2062
  Immediate(1 << Map::kIsUndetectable));
@@ -1666,7 +2091,7 @@ Condition LCodeGen::EmitIsObject(Register input,
1666
2091
 
1667
2092
 
1668
2093
  void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
1669
- Register reg = ToRegister(instr->InputAt(0));
2094
+ Register reg = ToRegister(instr->value());
1670
2095
 
1671
2096
  int true_block = chunk_->LookupDestination(instr->true_block_id());
1672
2097
  int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1690,8 +2115,8 @@ Condition LCodeGen::EmitIsString(Register input,
1690
2115
 
1691
2116
 
1692
2117
  void LCodeGen::DoIsStringAndBranch(LIsStringAndBranch* instr) {
1693
- Register reg = ToRegister(instr->InputAt(0));
1694
- Register temp = ToRegister(instr->TempAt(0));
2118
+ Register reg = ToRegister(instr->value());
2119
+ Register temp = ToRegister(instr->temp());
1695
2120
 
1696
2121
  int true_block = chunk_->LookupDestination(instr->true_block_id());
1697
2122
  int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1708,11 +2133,11 @@ void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
1708
2133
  int false_block = chunk_->LookupDestination(instr->false_block_id());
1709
2134
 
1710
2135
  Condition is_smi;
1711
- if (instr->InputAt(0)->IsRegister()) {
1712
- Register input = ToRegister(instr->InputAt(0));
2136
+ if (instr->value()->IsRegister()) {
2137
+ Register input = ToRegister(instr->value());
1713
2138
  is_smi = masm()->CheckSmi(input);
1714
2139
  } else {
1715
- Operand input = ToOperand(instr->InputAt(0));
2140
+ Operand input = ToOperand(instr->value());
1716
2141
  is_smi = masm()->CheckSmi(input);
1717
2142
  }
1718
2143
  EmitBranch(true_block, false_block, is_smi);
@@ -1720,8 +2145,8 @@ void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
1720
2145
 
1721
2146
 
1722
2147
  void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
1723
- Register input = ToRegister(instr->InputAt(0));
1724
- Register temp = ToRegister(instr->TempAt(0));
2148
+ Register input = ToRegister(instr->value());
2149
+ Register temp = ToRegister(instr->temp());
1725
2150
 
1726
2151
  int true_block = chunk_->LookupDestination(instr->true_block_id());
1727
2152
  int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1770,7 +2195,7 @@ static Condition BranchCondition(HHasInstanceTypeAndBranch* instr) {
1770
2195
 
1771
2196
 
1772
2197
  void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
1773
- Register input = ToRegister(instr->InputAt(0));
2198
+ Register input = ToRegister(instr->value());
1774
2199
 
1775
2200
  int true_block = chunk_->LookupDestination(instr->true_block_id());
1776
2201
  int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1785,12 +2210,10 @@ void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
1785
2210
 
1786
2211
 
1787
2212
  void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
1788
- Register input = ToRegister(instr->InputAt(0));
2213
+ Register input = ToRegister(instr->value());
1789
2214
  Register result = ToRegister(instr->result());
1790
2215
 
1791
- if (FLAG_debug_code) {
1792
- __ AbortIfNotString(input);
1793
- }
2216
+ __ AssertString(input);
1794
2217
 
1795
2218
  __ movl(result, FieldOperand(input, String::kHashFieldOffset));
1796
2219
  ASSERT(String::kHashShift >= kSmiTagSize);
@@ -1800,7 +2223,7 @@ void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
1800
2223
 
1801
2224
  void LCodeGen::DoHasCachedArrayIndexAndBranch(
1802
2225
  LHasCachedArrayIndexAndBranch* instr) {
1803
- Register input = ToRegister(instr->InputAt(0));
2226
+ Register input = ToRegister(instr->value());
1804
2227
 
1805
2228
  int true_block = chunk_->LookupDestination(instr->true_block_id());
1806
2229
  int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1825,7 +2248,7 @@ void LCodeGen::EmitClassOfTest(Label* is_true,
1825
2248
 
1826
2249
  __ JumpIfSmi(input, is_false);
1827
2250
 
1828
- if (class_name->IsEqualTo(CStrVector("Function"))) {
2251
+ if (class_name->IsOneByteEqualTo(STATIC_ASCII_VECTOR("Function"))) {
1829
2252
  // Assuming the following assertions, we can use the same compares to test
1830
2253
  // for both being a function type and being in the object type range.
1831
2254
  STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
@@ -1856,7 +2279,7 @@ void LCodeGen::EmitClassOfTest(Label* is_true,
1856
2279
 
1857
2280
  // Objects with a non-function constructor have class 'Object'.
1858
2281
  __ CmpObjectType(temp, JS_FUNCTION_TYPE, kScratchRegister);
1859
- if (class_name->IsEqualTo(CStrVector("Object"))) {
2282
+ if (class_name->IsOneByteEqualTo(STATIC_ASCII_VECTOR("Object"))) {
1860
2283
  __ j(not_equal, is_true);
1861
2284
  } else {
1862
2285
  __ j(not_equal, is_false);
@@ -1880,9 +2303,9 @@ void LCodeGen::EmitClassOfTest(Label* is_true,
1880
2303
 
1881
2304
 
1882
2305
  void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
1883
- Register input = ToRegister(instr->InputAt(0));
1884
- Register temp = ToRegister(instr->TempAt(0));
1885
- Register temp2 = ToRegister(instr->TempAt(1));
2306
+ Register input = ToRegister(instr->value());
2307
+ Register temp = ToRegister(instr->temp());
2308
+ Register temp2 = ToRegister(instr->temp2());
1886
2309
  Handle<String> class_name = instr->hydrogen()->class_name();
1887
2310
 
1888
2311
  int true_block = chunk_->LookupDestination(instr->true_block_id());
@@ -1898,7 +2321,7 @@ void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
1898
2321
 
1899
2322
 
1900
2323
  void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
1901
- Register reg = ToRegister(instr->InputAt(0));
2324
+ Register reg = ToRegister(instr->value());
1902
2325
  int true_block = instr->true_block_id();
1903
2326
  int false_block = instr->false_block_id();
1904
2327
 
@@ -1909,8 +2332,8 @@ void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
1909
2332
 
1910
2333
  void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
1911
2334
  InstanceofStub stub(InstanceofStub::kNoFlags);
1912
- __ push(ToRegister(instr->InputAt(0)));
1913
- __ push(ToRegister(instr->InputAt(1)));
2335
+ __ push(ToRegister(instr->left()));
2336
+ __ push(ToRegister(instr->right()));
1914
2337
  CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1915
2338
  Label true_value, done;
1916
2339
  __ testq(rax, rax);
@@ -1941,10 +2364,10 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
1941
2364
 
1942
2365
 
1943
2366
  DeferredInstanceOfKnownGlobal* deferred;
1944
- deferred = new DeferredInstanceOfKnownGlobal(this, instr);
2367
+ deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr);
1945
2368
 
1946
2369
  Label done, false_result;
1947
- Register object = ToRegister(instr->InputAt(0));
2370
+ Register object = ToRegister(instr->value());
1948
2371
 
1949
2372
  // A Smi is not an instance of anything.
1950
2373
  __ JumpIfSmi(object, &false_result);
@@ -1954,7 +2377,7 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
1954
2377
  // instanceof stub.
1955
2378
  Label cache_miss;
1956
2379
  // Use a temp register to avoid memory operands with variable lengths.
1957
- Register map = ToRegister(instr->TempAt(0));
2380
+ Register map = ToRegister(instr->temp());
1958
2381
  __ movq(map, FieldOperand(object, HeapObject::kMapOffset));
1959
2382
  __ bind(deferred->map_check()); // Label for calculating code patching.
1960
2383
  Handle<JSGlobalPropertyCell> cache_cell =
@@ -1997,7 +2420,7 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
1997
2420
  InstanceofStub::kNoFlags | InstanceofStub::kCallSiteInlineCheck);
1998
2421
  InstanceofStub stub(flags);
1999
2422
 
2000
- __ push(ToRegister(instr->InputAt(0)));
2423
+ __ push(ToRegister(instr->value()));
2001
2424
  __ PushHeapObject(instr->function());
2002
2425
 
2003
2426
  static const int kAdditionalDelta = 10;
@@ -2053,15 +2476,33 @@ void LCodeGen::DoCmpT(LCmpT* instr) {
2053
2476
 
2054
2477
 
2055
2478
  void LCodeGen::DoReturn(LReturn* instr) {
2056
- if (FLAG_trace) {
2479
+ if (FLAG_trace && info()->IsOptimizing()) {
2057
2480
  // Preserve the return value on the stack and rely on the runtime
2058
2481
  // call to return the value in the same register.
2059
2482
  __ push(rax);
2060
2483
  __ CallRuntime(Runtime::kTraceExit, 1);
2061
2484
  }
2062
- __ movq(rsp, rbp);
2063
- __ pop(rbp);
2064
- __ Ret((GetParameterCount() + 1) * kPointerSize, rcx);
2485
+ if (info()->saves_caller_doubles()) {
2486
+ ASSERT(NeedsEagerFrame());
2487
+ BitVector* doubles = chunk()->allocated_double_registers();
2488
+ BitVector::Iterator save_iterator(doubles);
2489
+ int count = 0;
2490
+ while (!save_iterator.Done()) {
2491
+ __ movsd(XMMRegister::FromAllocationIndex(save_iterator.Current()),
2492
+ MemOperand(rsp, count * kDoubleSize));
2493
+ save_iterator.Advance();
2494
+ count++;
2495
+ }
2496
+ }
2497
+ if (NeedsEagerFrame()) {
2498
+ __ movq(rsp, rbp);
2499
+ __ pop(rbp);
2500
+ }
2501
+ if (info()->IsStub()) {
2502
+ __ Ret(0, r10);
2503
+ } else {
2504
+ __ Ret((GetParameterCount() + 1) * kPointerSize, rcx);
2505
+ }
2065
2506
  }
2066
2507
 
2067
2508
 
@@ -2097,7 +2538,7 @@ void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
2097
2538
  // it as no longer deleted. We deoptimize in that case.
2098
2539
  if (instr->hydrogen()->RequiresHoleCheck()) {
2099
2540
  // We have a temp because CompareRoot might clobber kScratchRegister.
2100
- Register cell = ToRegister(instr->TempAt(0));
2541
+ Register cell = ToRegister(instr->temp());
2101
2542
  ASSERT(!value.is(cell));
2102
2543
  __ movq(cell, cell_handle, RelocInfo::GLOBAL_PROPERTY_CELL);
2103
2544
  __ CompareRoot(Operand(cell, 0), Heap::kTheHoleValueRootIndex);
@@ -2165,7 +2606,7 @@ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2165
2606
  SmiCheck check_needed =
2166
2607
  type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
2167
2608
  int offset = Context::SlotOffset(instr->slot_index());
2168
- Register scratch = ToRegister(instr->TempAt(0));
2609
+ Register scratch = ToRegister(instr->temp());
2169
2610
  __ RecordWriteContextSlot(context,
2170
2611
  offset,
2171
2612
  value,
@@ -2180,7 +2621,7 @@ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2180
2621
 
2181
2622
 
2182
2623
  void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
2183
- Register object = ToRegister(instr->InputAt(0));
2624
+ Register object = ToRegister(instr->object());
2184
2625
  Register result = ToRegister(instr->result());
2185
2626
  if (instr->hydrogen()->is_in_object()) {
2186
2627
  __ movq(result, FieldOperand(object, instr->hydrogen()->offset()));
@@ -2194,12 +2635,12 @@ void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
2194
2635
  void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
2195
2636
  Register object,
2196
2637
  Handle<Map> type,
2197
- Handle<String> name) {
2638
+ Handle<String> name,
2639
+ LEnvironment* env) {
2198
2640
  LookupResult lookup(isolate());
2199
- type->LookupInDescriptors(NULL, *name, &lookup);
2200
- ASSERT(lookup.IsFound() &&
2201
- (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION));
2202
- if (lookup.type() == FIELD) {
2641
+ type->LookupDescriptor(NULL, *name, &lookup);
2642
+ ASSERT(lookup.IsFound() || lookup.IsCacheable());
2643
+ if (lookup.IsField()) {
2203
2644
  int index = lookup.GetLocalFieldIndexFromMap(*type);
2204
2645
  int offset = index * kPointerSize;
2205
2646
  if (index < 0) {
@@ -2211,13 +2652,43 @@ void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
2211
2652
  __ movq(result, FieldOperand(object, JSObject::kPropertiesOffset));
2212
2653
  __ movq(result, FieldOperand(result, offset + FixedArray::kHeaderSize));
2213
2654
  }
2214
- } else {
2655
+ } else if (lookup.IsConstantFunction()) {
2215
2656
  Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type));
2216
2657
  __ LoadHeapObject(result, function);
2658
+ } else {
2659
+ // Negative lookup.
2660
+ // Check prototypes.
2661
+ Handle<HeapObject> current(HeapObject::cast((*type)->prototype()));
2662
+ Heap* heap = type->GetHeap();
2663
+ while (*current != heap->null_value()) {
2664
+ __ LoadHeapObject(result, current);
2665
+ __ Cmp(FieldOperand(result, HeapObject::kMapOffset),
2666
+ Handle<Map>(current->map()));
2667
+ DeoptimizeIf(not_equal, env);
2668
+ current =
2669
+ Handle<HeapObject>(HeapObject::cast(current->map()->prototype()));
2670
+ }
2671
+ __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2217
2672
  }
2218
2673
  }
2219
2674
 
2220
2675
 
2676
+ // Check for cases where EmitLoadFieldOrConstantFunction needs to walk the
2677
+ // prototype chain, which causes unbounded code generation.
2678
+ static bool CompactEmit(SmallMapList* list,
2679
+ Handle<String> name,
2680
+ int i,
2681
+ Isolate* isolate) {
2682
+ Handle<Map> map = list->at(i);
2683
+ // If the map has ElementsKind transitions, we will generate map checks
2684
+ // for each kind in __ CompareMap(..., ALLOW_ELEMENTS_TRANSITION_MAPS).
2685
+ if (map->HasElementsTransition()) return false;
2686
+ LookupResult lookup(isolate);
2687
+ map->LookupDescriptor(NULL, *name, &lookup);
2688
+ return lookup.IsField() || lookup.IsConstantFunction();
2689
+ }
2690
+
2691
+
2221
2692
  void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
2222
2693
  Register object = ToRegister(instr->object());
2223
2694
  Register result = ToRegister(instr->result());
@@ -2231,18 +2702,32 @@ void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
2231
2702
  }
2232
2703
  Handle<String> name = instr->hydrogen()->name();
2233
2704
  Label done;
2705
+ bool all_are_compact = true;
2706
+ for (int i = 0; i < map_count; ++i) {
2707
+ if (!CompactEmit(instr->hydrogen()->types(), name, i, isolate())) {
2708
+ all_are_compact = false;
2709
+ break;
2710
+ }
2711
+ }
2234
2712
  for (int i = 0; i < map_count; ++i) {
2235
2713
  bool last = (i == map_count - 1);
2236
2714
  Handle<Map> map = instr->hydrogen()->types()->at(i);
2237
- __ Cmp(FieldOperand(object, HeapObject::kMapOffset), map);
2715
+ Label check_passed;
2716
+ __ CompareMap(object, map, &check_passed, ALLOW_ELEMENT_TRANSITION_MAPS);
2238
2717
  if (last && !need_generic) {
2239
2718
  DeoptimizeIf(not_equal, instr->environment());
2240
- EmitLoadFieldOrConstantFunction(result, object, map, name);
2719
+ __ bind(&check_passed);
2720
+ EmitLoadFieldOrConstantFunction(
2721
+ result, object, map, name, instr->environment());
2241
2722
  } else {
2242
2723
  Label next;
2243
- __ j(not_equal, &next, Label::kNear);
2244
- EmitLoadFieldOrConstantFunction(result, object, map, name);
2245
- __ jmp(&done, Label::kNear);
2724
+ bool compact = all_are_compact ? true :
2725
+ CompactEmit(instr->hydrogen()->types(), name, i, isolate());
2726
+ __ j(not_equal, &next, compact ? Label::kNear : Label::kFar);
2727
+ __ bind(&check_passed);
2728
+ EmitLoadFieldOrConstantFunction(
2729
+ result, object, map, name, instr->environment());
2730
+ __ jmp(&done, all_are_compact ? Label::kNear : Label::kFar);
2246
2731
  __ bind(&next);
2247
2732
  }
2248
2733
  }
@@ -2308,7 +2793,7 @@ void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
2308
2793
 
2309
2794
  void LCodeGen::DoLoadElements(LLoadElements* instr) {
2310
2795
  Register result = ToRegister(instr->result());
2311
- Register input = ToRegister(instr->InputAt(0));
2796
+ Register input = ToRegister(instr->object());
2312
2797
  __ movq(result, FieldOperand(input, JSObject::kElementsOffset));
2313
2798
  if (FLAG_debug_code) {
2314
2799
  Label done, ok, fail;
@@ -2344,7 +2829,7 @@ void LCodeGen::DoLoadElements(LLoadElements* instr) {
2344
2829
  void LCodeGen::DoLoadExternalArrayPointer(
2345
2830
  LLoadExternalArrayPointer* instr) {
2346
2831
  Register result = ToRegister(instr->result());
2347
- Register input = ToRegister(instr->InputAt(0));
2832
+ Register input = ToRegister(instr->object());
2348
2833
  __ movq(result, FieldOperand(input,
2349
2834
  ExternalPixelArray::kExternalPointerOffset));
2350
2835
  }
@@ -2354,55 +2839,107 @@ void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
2354
2839
  Register arguments = ToRegister(instr->arguments());
2355
2840
  Register length = ToRegister(instr->length());
2356
2841
  Register result = ToRegister(instr->result());
2357
-
2842
+ // There are two words between the frame pointer and the last argument.
2843
+ // Subtracting from length accounts for one of them add one more.
2358
2844
  if (instr->index()->IsRegister()) {
2359
2845
  __ subl(length, ToRegister(instr->index()));
2360
2846
  } else {
2361
2847
  __ subl(length, ToOperand(instr->index()));
2362
2848
  }
2363
- DeoptimizeIf(below_equal, instr->environment());
2364
-
2365
- // There are two words between the frame pointer and the last argument.
2366
- // Subtracting from length accounts for one of them add one more.
2367
2849
  __ movq(result, Operand(arguments, length, times_pointer_size, kPointerSize));
2368
2850
  }
2369
2851
 
2370
2852
 
2371
- void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
2372
- Register result = ToRegister(instr->result());
2373
-
2374
- if (instr->hydrogen()->IsDehoisted() && !instr->key()->IsConstantOperand()) {
2375
- // Sign extend key because it could be a 32 bit negative value
2376
- // and the dehoisted address computation happens in 64 bits.
2377
- Register key_reg = ToRegister(instr->key());
2378
- __ movsxlq(key_reg, key_reg);
2853
+ void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) {
2854
+ ElementsKind elements_kind = instr->elements_kind();
2855
+ LOperand* key = instr->key();
2856
+ if (!key->IsConstantOperand()) {
2857
+ Register key_reg = ToRegister(key);
2858
+ // Even though the HLoad/StoreKeyed (in this case) instructions force
2859
+ // the input representation for the key to be an integer, the input
2860
+ // gets replaced during bound check elimination with the index argument
2861
+ // to the bounds check, which can be tagged, so that case must be
2862
+ // handled here, too.
2863
+ if (instr->hydrogen()->key()->representation().IsTagged()) {
2864
+ __ SmiToInteger64(key_reg, key_reg);
2865
+ } else if (instr->hydrogen()->IsDehoisted()) {
2866
+ // Sign extend key because it could be a 32 bit negative value
2867
+ // and the dehoisted address computation happens in 64 bits
2868
+ __ movsxlq(key_reg, key_reg);
2869
+ }
2379
2870
  }
2871
+ Operand operand(BuildFastArrayOperand(
2872
+ instr->elements(),
2873
+ key,
2874
+ elements_kind,
2875
+ 0,
2876
+ instr->additional_index()));
2380
2877
 
2381
- // Load the result.
2382
- __ movq(result,
2383
- BuildFastArrayOperand(instr->elements(),
2384
- instr->key(),
2385
- FAST_ELEMENTS,
2386
- FixedArray::kHeaderSize - kHeapObjectTag,
2387
- instr->additional_index()));
2388
-
2389
- // Check for the hole value.
2390
- if (instr->hydrogen()->RequiresHoleCheck()) {
2391
- __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
2392
- DeoptimizeIf(equal, instr->environment());
2878
+ if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
2879
+ XMMRegister result(ToDoubleRegister(instr->result()));
2880
+ __ movss(result, operand);
2881
+ __ cvtss2sd(result, result);
2882
+ } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
2883
+ __ movsd(ToDoubleRegister(instr->result()), operand);
2884
+ } else {
2885
+ Register result(ToRegister(instr->result()));
2886
+ switch (elements_kind) {
2887
+ case EXTERNAL_BYTE_ELEMENTS:
2888
+ __ movsxbq(result, operand);
2889
+ break;
2890
+ case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
2891
+ case EXTERNAL_PIXEL_ELEMENTS:
2892
+ __ movzxbq(result, operand);
2893
+ break;
2894
+ case EXTERNAL_SHORT_ELEMENTS:
2895
+ __ movsxwq(result, operand);
2896
+ break;
2897
+ case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
2898
+ __ movzxwq(result, operand);
2899
+ break;
2900
+ case EXTERNAL_INT_ELEMENTS:
2901
+ __ movsxlq(result, operand);
2902
+ break;
2903
+ case EXTERNAL_UNSIGNED_INT_ELEMENTS:
2904
+ __ movl(result, operand);
2905
+ if (!instr->hydrogen()->CheckFlag(HInstruction::kUint32)) {
2906
+ __ testl(result, result);
2907
+ DeoptimizeIf(negative, instr->environment());
2908
+ }
2909
+ break;
2910
+ case EXTERNAL_FLOAT_ELEMENTS:
2911
+ case EXTERNAL_DOUBLE_ELEMENTS:
2912
+ case FAST_ELEMENTS:
2913
+ case FAST_SMI_ELEMENTS:
2914
+ case FAST_DOUBLE_ELEMENTS:
2915
+ case FAST_HOLEY_ELEMENTS:
2916
+ case FAST_HOLEY_SMI_ELEMENTS:
2917
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
2918
+ case DICTIONARY_ELEMENTS:
2919
+ case NON_STRICT_ARGUMENTS_ELEMENTS:
2920
+ UNREACHABLE();
2921
+ break;
2922
+ }
2393
2923
  }
2394
2924
  }
2395
2925
 
2396
2926
 
2397
- void LCodeGen::DoLoadKeyedFastDoubleElement(
2398
- LLoadKeyedFastDoubleElement* instr) {
2927
+ void LCodeGen::DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr) {
2399
2928
  XMMRegister result(ToDoubleRegister(instr->result()));
2400
-
2401
- if (instr->hydrogen()->IsDehoisted() && !instr->key()->IsConstantOperand()) {
2402
- // Sign extend key because it could be a 32 bit negative value
2403
- // and the dehoisted address computation happens in 64 bits
2404
- Register key_reg = ToRegister(instr->key());
2405
- __ movsxlq(key_reg, key_reg);
2929
+ LOperand* key = instr->key();
2930
+ if (!key->IsConstantOperand()) {
2931
+ Register key_reg = ToRegister(key);
2932
+ // Even though the HLoad/StoreKeyed instructions force the input
2933
+ // representation for the key to be an integer, the input gets replaced
2934
+ // during bound check elimination with the index argument to the bounds
2935
+ // check, which can be tagged, so that case must be handled here, too.
2936
+ if (instr->hydrogen()->key()->representation().IsTagged()) {
2937
+ __ SmiToInteger64(key_reg, key_reg);
2938
+ } else if (instr->hydrogen()->IsDehoisted()) {
2939
+ // Sign extend key because it could be a 32 bit negative value
2940
+ // and the dehoisted address computation happens in 64 bits
2941
+ __ movsxlq(key_reg, key_reg);
2942
+ }
2406
2943
  }
2407
2944
 
2408
2945
  if (instr->hydrogen()->RequiresHoleCheck()) {
@@ -2410,7 +2947,7 @@ void LCodeGen::DoLoadKeyedFastDoubleElement(
2410
2947
  sizeof(kHoleNanLower32);
2411
2948
  Operand hole_check_operand = BuildFastArrayOperand(
2412
2949
  instr->elements(),
2413
- instr->key(),
2950
+ key,
2414
2951
  FAST_DOUBLE_ELEMENTS,
2415
2952
  offset,
2416
2953
  instr->additional_index());
@@ -2420,7 +2957,7 @@ void LCodeGen::DoLoadKeyedFastDoubleElement(
2420
2957
 
2421
2958
  Operand double_load_operand = BuildFastArrayOperand(
2422
2959
  instr->elements(),
2423
- instr->key(),
2960
+ key,
2424
2961
  FAST_DOUBLE_ELEMENTS,
2425
2962
  FixedDoubleArray::kHeaderSize - kHeapObjectTag,
2426
2963
  instr->additional_index());
@@ -2428,6 +2965,57 @@ void LCodeGen::DoLoadKeyedFastDoubleElement(
2428
2965
  }
2429
2966
 
2430
2967
 
2968
+ void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) {
2969
+ Register result = ToRegister(instr->result());
2970
+ LOperand* key = instr->key();
2971
+ if (!key->IsConstantOperand()) {
2972
+ Register key_reg = ToRegister(key);
2973
+ // Even though the HLoad/StoreKeyedFastElement instructions force
2974
+ // the input representation for the key to be an integer, the input
2975
+ // gets replaced during bound check elimination with the index
2976
+ // argument to the bounds check, which can be tagged, so that
2977
+ // case must be handled here, too.
2978
+ if (instr->hydrogen()->key()->representation().IsTagged()) {
2979
+ __ SmiToInteger64(key_reg, key_reg);
2980
+ } else if (instr->hydrogen()->IsDehoisted()) {
2981
+ // Sign extend key because it could be a 32 bit negative value
2982
+ // and the dehoisted address computation happens in 64 bits
2983
+ __ movsxlq(key_reg, key_reg);
2984
+ }
2985
+ }
2986
+
2987
+ // Load the result.
2988
+ __ movq(result,
2989
+ BuildFastArrayOperand(instr->elements(),
2990
+ key,
2991
+ FAST_ELEMENTS,
2992
+ FixedArray::kHeaderSize - kHeapObjectTag,
2993
+ instr->additional_index()));
2994
+
2995
+ // Check for the hole value.
2996
+ if (instr->hydrogen()->RequiresHoleCheck()) {
2997
+ if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) {
2998
+ Condition smi = __ CheckSmi(result);
2999
+ DeoptimizeIf(NegateCondition(smi), instr->environment());
3000
+ } else {
3001
+ __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
3002
+ DeoptimizeIf(equal, instr->environment());
3003
+ }
3004
+ }
3005
+ }
3006
+
3007
+
3008
+ void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) {
3009
+ if (instr->is_external()) {
3010
+ DoLoadKeyedExternalArray(instr);
3011
+ } else if (instr->hydrogen()->representation().IsDouble()) {
3012
+ DoLoadKeyedFixedDoubleArray(instr);
3013
+ } else {
3014
+ DoLoadKeyedFixedArray(instr);
3015
+ }
3016
+ }
3017
+
3018
+
2431
3019
  Operand LCodeGen::BuildFastArrayOperand(
2432
3020
  LOperand* elements_pointer,
2433
3021
  LOperand* key,
@@ -2454,71 +3042,6 @@ Operand LCodeGen::BuildFastArrayOperand(
2454
3042
  }
2455
3043
 
2456
3044
 
2457
- void LCodeGen::DoLoadKeyedSpecializedArrayElement(
2458
- LLoadKeyedSpecializedArrayElement* instr) {
2459
- ElementsKind elements_kind = instr->elements_kind();
2460
- Operand operand(BuildFastArrayOperand(instr->external_pointer(),
2461
- instr->key(),
2462
- elements_kind,
2463
- 0,
2464
- instr->additional_index()));
2465
- if (instr->hydrogen()->IsDehoisted() && !instr->key()->IsConstantOperand()) {
2466
- // Sign extend key because it could be a 32 bit negative value
2467
- // and the dehoisted address computation happens in 64 bits
2468
- Register key_reg = ToRegister(instr->key());
2469
- __ movsxlq(key_reg, key_reg);
2470
- }
2471
-
2472
- if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
2473
- XMMRegister result(ToDoubleRegister(instr->result()));
2474
- __ movss(result, operand);
2475
- __ cvtss2sd(result, result);
2476
- } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
2477
- __ movsd(ToDoubleRegister(instr->result()), operand);
2478
- } else {
2479
- Register result(ToRegister(instr->result()));
2480
- switch (elements_kind) {
2481
- case EXTERNAL_BYTE_ELEMENTS:
2482
- __ movsxbq(result, operand);
2483
- break;
2484
- case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
2485
- case EXTERNAL_PIXEL_ELEMENTS:
2486
- __ movzxbq(result, operand);
2487
- break;
2488
- case EXTERNAL_SHORT_ELEMENTS:
2489
- __ movsxwq(result, operand);
2490
- break;
2491
- case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
2492
- __ movzxwq(result, operand);
2493
- break;
2494
- case EXTERNAL_INT_ELEMENTS:
2495
- __ movsxlq(result, operand);
2496
- break;
2497
- case EXTERNAL_UNSIGNED_INT_ELEMENTS:
2498
- __ movl(result, operand);
2499
- __ testl(result, result);
2500
- // TODO(danno): we could be more clever here, perhaps having a special
2501
- // version of the stub that detects if the overflow case actually
2502
- // happens, and generate code that returns a double rather than int.
2503
- DeoptimizeIf(negative, instr->environment());
2504
- break;
2505
- case EXTERNAL_FLOAT_ELEMENTS:
2506
- case EXTERNAL_DOUBLE_ELEMENTS:
2507
- case FAST_ELEMENTS:
2508
- case FAST_SMI_ELEMENTS:
2509
- case FAST_DOUBLE_ELEMENTS:
2510
- case FAST_HOLEY_ELEMENTS:
2511
- case FAST_HOLEY_SMI_ELEMENTS:
2512
- case FAST_HOLEY_DOUBLE_ELEMENTS:
2513
- case DICTIONARY_ELEMENTS:
2514
- case NON_STRICT_ARGUMENTS_ELEMENTS:
2515
- UNREACHABLE();
2516
- break;
2517
- }
2518
- }
2519
- }
2520
-
2521
-
2522
3045
  void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
2523
3046
  ASSERT(ToRegister(instr->object()).is(rdx));
2524
3047
  ASSERT(ToRegister(instr->key()).is(rax));
@@ -2562,10 +3085,10 @@ void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
2562
3085
  Label done;
2563
3086
 
2564
3087
  // If no arguments adaptor frame the number of arguments is fixed.
2565
- if (instr->InputAt(0)->IsRegister()) {
2566
- __ cmpq(rbp, ToRegister(instr->InputAt(0)));
3088
+ if (instr->elements()->IsRegister()) {
3089
+ __ cmpq(rbp, ToRegister(instr->elements()));
2567
3090
  } else {
2568
- __ cmpq(rbp, ToOperand(instr->InputAt(0)));
3091
+ __ cmpq(rbp, ToOperand(instr->elements()));
2569
3092
  }
2570
3093
  __ movl(result, Immediate(scope()->num_parameters()));
2571
3094
  __ j(equal, &done, Label::kNear);
@@ -2622,7 +3145,7 @@ void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
2622
3145
  // TODO(kmillikin): We have a hydrogen value for the global object. See
2623
3146
  // if it's better to use it than to explicitly fetch it from the context
2624
3147
  // here.
2625
- __ movq(receiver, ContextOperand(rsi, Context::GLOBAL_INDEX));
3148
+ __ movq(receiver, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX));
2626
3149
  __ movq(receiver,
2627
3150
  FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
2628
3151
  __ bind(&receiver_ok);
@@ -2673,7 +3196,7 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
2673
3196
 
2674
3197
 
2675
3198
  void LCodeGen::DoPushArgument(LPushArgument* instr) {
2676
- LOperand* argument = instr->InputAt(0);
3199
+ LOperand* argument = instr->value();
2677
3200
  EmitPushTaggedOperand(argument);
2678
3201
  }
2679
3202
 
@@ -2685,7 +3208,7 @@ void LCodeGen::DoDrop(LDrop* instr) {
2685
3208
 
2686
3209
  void LCodeGen::DoThisFunction(LThisFunction* instr) {
2687
3210
  Register result = ToRegister(instr->result());
2688
- __ LoadHeapObject(result, instr->hydrogen()->closure());
3211
+ __ movq(result, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
2689
3212
  }
2690
3213
 
2691
3214
 
@@ -2740,14 +3263,8 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
2740
3263
  __ LoadHeapObject(rdi, function);
2741
3264
  }
2742
3265
 
2743
- // Change context if needed.
2744
- bool change_context =
2745
- (info()->closure()->context() != function->context()) ||
2746
- scope()->contains_with() ||
2747
- (scope()->num_heap_slots() > 0);
2748
- if (change_context) {
2749
- __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2750
- }
3266
+ // Change context.
3267
+ __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2751
3268
 
2752
3269
  // Set rax to arguments count if adaption is not needed. Assumes that rax
2753
3270
  // is available to write to at this point.
@@ -2789,7 +3306,7 @@ void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
2789
3306
 
2790
3307
 
2791
3308
  void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
2792
- Register input_reg = ToRegister(instr->InputAt(0));
3309
+ Register input_reg = ToRegister(instr->value());
2793
3310
  __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
2794
3311
  Heap::kHeapNumberMapRootIndex);
2795
3312
  DeoptimizeIf(not_equal, instr->environment());
@@ -2841,7 +3358,7 @@ void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
2841
3358
 
2842
3359
 
2843
3360
  void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
2844
- Register input_reg = ToRegister(instr->InputAt(0));
3361
+ Register input_reg = ToRegister(instr->value());
2845
3362
  __ testl(input_reg, input_reg);
2846
3363
  Label is_positive;
2847
3364
  __ j(not_sign, &is_positive);
@@ -2866,12 +3383,12 @@ void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
2866
3383
  LUnaryMathOperation* instr_;
2867
3384
  };
2868
3385
 
2869
- ASSERT(instr->InputAt(0)->Equals(instr->result()));
3386
+ ASSERT(instr->value()->Equals(instr->result()));
2870
3387
  Representation r = instr->hydrogen()->value()->representation();
2871
3388
 
2872
3389
  if (r.IsDouble()) {
2873
3390
  XMMRegister scratch = xmm0;
2874
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
3391
+ XMMRegister input_reg = ToDoubleRegister(instr->value());
2875
3392
  __ xorps(scratch, scratch);
2876
3393
  __ subsd(scratch, input_reg);
2877
3394
  __ andpd(input_reg, scratch);
@@ -2879,8 +3396,8 @@ void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
2879
3396
  EmitIntegerMathAbs(instr);
2880
3397
  } else { // Tagged case.
2881
3398
  DeferredMathAbsTaggedHeapNumber* deferred =
2882
- new DeferredMathAbsTaggedHeapNumber(this, instr);
2883
- Register input_reg = ToRegister(instr->InputAt(0));
3399
+ new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr);
3400
+ Register input_reg = ToRegister(instr->value());
2884
3401
  // Smi check.
2885
3402
  __ JumpIfNotSmi(input_reg, deferred->entry());
2886
3403
  __ SmiToInteger32(input_reg, input_reg);
@@ -2894,8 +3411,7 @@ void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
2894
3411
  void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
2895
3412
  XMMRegister xmm_scratch = xmm0;
2896
3413
  Register output_reg = ToRegister(instr->result());
2897
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2898
- Label done;
3414
+ XMMRegister input_reg = ToDoubleRegister(instr->value());
2899
3415
 
2900
3416
  if (CpuFeatures::IsSupported(SSE4_1)) {
2901
3417
  CpuFeatures::Scope scope(SSE4_1);
@@ -2910,10 +3426,13 @@ void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
2910
3426
  __ cmpl(output_reg, Immediate(0x80000000));
2911
3427
  DeoptimizeIf(equal, instr->environment());
2912
3428
  } else {
3429
+ Label negative_sign, done;
2913
3430
  // Deoptimize on negative inputs.
2914
3431
  __ xorps(xmm_scratch, xmm_scratch); // Zero the register.
2915
3432
  __ ucomisd(input_reg, xmm_scratch);
2916
- DeoptimizeIf(below, instr->environment());
3433
+ DeoptimizeIf(parity_even, instr->environment());
3434
+ __ j(below, &negative_sign, Label::kNear);
3435
+
2917
3436
  if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2918
3437
  // Check for negative zero.
2919
3438
  Label positive_sign;
@@ -2928,23 +3447,34 @@ void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
2928
3447
 
2929
3448
  // Use truncating instruction (OK because input is positive).
2930
3449
  __ cvttsd2si(output_reg, input_reg);
2931
-
2932
3450
  // Overflow is signalled with minint.
2933
3451
  __ cmpl(output_reg, Immediate(0x80000000));
2934
3452
  DeoptimizeIf(equal, instr->environment());
3453
+ __ jmp(&done, Label::kNear);
3454
+
3455
+ // Non-zero negative reaches here.
3456
+ __ bind(&negative_sign);
3457
+ // Truncate, then compare and compensate.
3458
+ __ cvttsd2si(output_reg, input_reg);
3459
+ __ cvtlsi2sd(xmm_scratch, output_reg);
3460
+ __ ucomisd(input_reg, xmm_scratch);
3461
+ __ j(equal, &done, Label::kNear);
3462
+ __ subl(output_reg, Immediate(1));
3463
+ DeoptimizeIf(overflow, instr->environment());
3464
+
3465
+ __ bind(&done);
2935
3466
  }
2936
- __ bind(&done);
2937
3467
  }
2938
3468
 
2939
3469
 
2940
3470
  void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
2941
3471
  const XMMRegister xmm_scratch = xmm0;
2942
3472
  Register output_reg = ToRegister(instr->result());
2943
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
3473
+ XMMRegister input_reg = ToDoubleRegister(instr->value());
2944
3474
 
2945
3475
  Label done;
2946
3476
  // xmm_scratch = 0.5
2947
- __ movq(kScratchRegister, V8_INT64_C(0x3FE0000000000000), RelocInfo::NONE);
3477
+ __ movq(kScratchRegister, V8_INT64_C(0x3FE0000000000000), RelocInfo::NONE64);
2948
3478
  __ movq(xmm_scratch, kScratchRegister);
2949
3479
  Label below_half;
2950
3480
  __ ucomisd(xmm_scratch, input_reg);
@@ -2973,7 +3503,9 @@ void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
2973
3503
  // Bailout if below -0.5, otherwise round to (positive) zero, even
2974
3504
  // if negative.
2975
3505
  // xmm_scrach = -0.5
2976
- __ movq(kScratchRegister, V8_INT64_C(0xBFE0000000000000), RelocInfo::NONE);
3506
+ __ movq(kScratchRegister,
3507
+ V8_INT64_C(0xBFE0000000000000),
3508
+ RelocInfo::NONE64);
2977
3509
  __ movq(xmm_scratch, kScratchRegister);
2978
3510
  __ ucomisd(input_reg, xmm_scratch);
2979
3511
  DeoptimizeIf(below, instr->environment());
@@ -2985,7 +3517,7 @@ void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
2985
3517
 
2986
3518
 
2987
3519
  void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
2988
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
3520
+ XMMRegister input_reg = ToDoubleRegister(instr->value());
2989
3521
  ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
2990
3522
  __ sqrtsd(input_reg, input_reg);
2991
3523
  }
@@ -2993,7 +3525,7 @@ void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
2993
3525
 
2994
3526
  void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
2995
3527
  XMMRegister xmm_scratch = xmm0;
2996
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
3528
+ XMMRegister input_reg = ToDoubleRegister(instr->value());
2997
3529
  ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
2998
3530
 
2999
3531
  // Note that according to ECMA-262 15.8.2.13:
@@ -3002,7 +3534,7 @@ void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
3002
3534
  Label done, sqrt;
3003
3535
  // Check base for -Infinity. According to IEEE-754, double-precision
3004
3536
  // -Infinity has the highest 12 bits set and the lowest 52 bits cleared.
3005
- __ movq(kScratchRegister, V8_INT64_C(0xFFF0000000000000), RelocInfo::NONE);
3537
+ __ movq(kScratchRegister, V8_INT64_C(0xFFF0000000000000), RelocInfo::NONE64);
3006
3538
  __ movq(xmm_scratch, kScratchRegister);
3007
3539
  __ ucomisd(xmm_scratch, input_reg);
3008
3540
  // Comparing -Infinity with NaN results in "unordered", which sets the
@@ -3034,11 +3566,11 @@ void LCodeGen::DoPower(LPower* instr) {
3034
3566
  #else
3035
3567
  Register exponent = rdi;
3036
3568
  #endif
3037
- ASSERT(!instr->InputAt(1)->IsRegister() ||
3038
- ToRegister(instr->InputAt(1)).is(exponent));
3039
- ASSERT(!instr->InputAt(1)->IsDoubleRegister() ||
3040
- ToDoubleRegister(instr->InputAt(1)).is(xmm1));
3041
- ASSERT(ToDoubleRegister(instr->InputAt(0)).is(xmm2));
3569
+ ASSERT(!instr->right()->IsRegister() ||
3570
+ ToRegister(instr->right()).is(exponent));
3571
+ ASSERT(!instr->right()->IsDoubleRegister() ||
3572
+ ToDoubleRegister(instr->right()).is(xmm1));
3573
+ ASSERT(ToDoubleRegister(instr->left()).is(xmm2));
3042
3574
  ASSERT(ToDoubleRegister(instr->result()).is(xmm3));
3043
3575
 
3044
3576
  if (exponent_type.IsTagged()) {
@@ -3071,7 +3603,7 @@ void LCodeGen::DoRandom(LRandom* instr) {
3071
3603
  LRandom* instr_;
3072
3604
  };
3073
3605
 
3074
- DeferredDoRandom* deferred = new DeferredDoRandom(this, instr);
3606
+ DeferredDoRandom* deferred = new(zone()) DeferredDoRandom(this, instr);
3075
3607
 
3076
3608
  // Having marked this instruction as a call we can use any
3077
3609
  // registers.
@@ -3080,10 +3612,10 @@ void LCodeGen::DoRandom(LRandom* instr) {
3080
3612
  // Choose the right register for the first argument depending on
3081
3613
  // calling convention.
3082
3614
  #ifdef _WIN64
3083
- ASSERT(ToRegister(instr->InputAt(0)).is(rcx));
3615
+ ASSERT(ToRegister(instr->global_object()).is(rcx));
3084
3616
  Register global_object = rcx;
3085
3617
  #else
3086
- ASSERT(ToRegister(instr->InputAt(0)).is(rdi));
3618
+ ASSERT(ToRegister(instr->global_object()).is(rdi));
3087
3619
  Register global_object = rdi;
3088
3620
  #endif
3089
3621
 
@@ -3091,11 +3623,11 @@ void LCodeGen::DoRandom(LRandom* instr) {
3091
3623
  STATIC_ASSERT(kPointerSize == 2 * kSeedSize);
3092
3624
 
3093
3625
  __ movq(global_object,
3094
- FieldOperand(global_object, GlobalObject::kGlobalContextOffset));
3626
+ FieldOperand(global_object, GlobalObject::kNativeContextOffset));
3095
3627
  static const int kRandomSeedOffset =
3096
3628
  FixedArray::kHeaderSize + Context::RANDOM_SEED_INDEX * kPointerSize;
3097
3629
  __ movq(rbx, FieldOperand(global_object, kRandomSeedOffset));
3098
- // rbx: FixedArray of the global context's random seeds
3630
+ // rbx: FixedArray of the native context's random seeds
3099
3631
 
3100
3632
  // Load state[0].
3101
3633
  __ movl(rax, FieldOperand(rbx, ByteArray::kHeaderSize));
@@ -3107,8 +3639,7 @@ void LCodeGen::DoRandom(LRandom* instr) {
3107
3639
 
3108
3640
  // state[0] = 18273 * (state[0] & 0xFFFF) + (state[0] >> 16)
3109
3641
  // Only operate on the lower 32 bit of rax.
3110
- __ movl(rdx, rax);
3111
- __ andl(rdx, Immediate(0xFFFF));
3642
+ __ movzxwl(rdx, rax);
3112
3643
  __ imull(rdx, rdx, Immediate(18273));
3113
3644
  __ shrl(rax, Immediate(16));
3114
3645
  __ addl(rax, rdx);
@@ -3116,8 +3647,7 @@ void LCodeGen::DoRandom(LRandom* instr) {
3116
3647
  __ movl(FieldOperand(rbx, ByteArray::kHeaderSize), rax);
3117
3648
 
3118
3649
  // state[1] = 36969 * (state[1] & 0xFFFF) + (state[1] >> 16)
3119
- __ movl(rdx, rcx);
3120
- __ andl(rdx, Immediate(0xFFFF));
3650
+ __ movzxwl(rdx, rcx);
3121
3651
  __ imull(rdx, rdx, Immediate(36969));
3122
3652
  __ shrl(rcx, Immediate(16));
3123
3653
  __ addl(rcx, rdx);
@@ -3133,10 +3663,10 @@ void LCodeGen::DoRandom(LRandom* instr) {
3133
3663
  // Convert 32 random bits in rax to 0.(32 random bits) in a double
3134
3664
  // by computing:
3135
3665
  // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
3136
- __ movl(rcx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
3137
- __ movd(xmm2, rcx);
3666
+ __ movq(rcx, V8_INT64_C(0x4130000000000000),
3667
+ RelocInfo::NONE64); // 1.0 x 2^20 as double
3668
+ __ movq(xmm2, rcx);
3138
3669
  __ movd(xmm1, rax);
3139
- __ cvtss2sd(xmm2, xmm2);
3140
3670
  __ xorps(xmm1, xmm2);
3141
3671
  __ subsd(xmm1, xmm2);
3142
3672
  }
@@ -3150,6 +3680,16 @@ void LCodeGen::DoDeferredRandom(LRandom* instr) {
3150
3680
  }
3151
3681
 
3152
3682
 
3683
+ void LCodeGen::DoMathExp(LMathExp* instr) {
3684
+ XMMRegister input = ToDoubleRegister(instr->value());
3685
+ XMMRegister result = ToDoubleRegister(instr->result());
3686
+ Register temp1 = ToRegister(instr->temp1());
3687
+ Register temp2 = ToRegister(instr->temp2());
3688
+
3689
+ MathExpGenerator::EmitMathExp(masm(), input, result, xmm0, temp1, temp2);
3690
+ }
3691
+
3692
+
3153
3693
  void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
3154
3694
  ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
3155
3695
  TranscendentalCacheStub stub(TranscendentalCache::LOG,
@@ -3298,7 +3838,7 @@ void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
3298
3838
 
3299
3839
 
3300
3840
  void LCodeGen::DoCallNew(LCallNew* instr) {
3301
- ASSERT(ToRegister(instr->InputAt(0)).is(rdi));
3841
+ ASSERT(ToRegister(instr->constructor()).is(rdi));
3302
3842
  ASSERT(ToRegister(instr->result()).is(rax));
3303
3843
 
3304
3844
  CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
@@ -3322,7 +3862,7 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
3322
3862
  __ Move(FieldOperand(object, HeapObject::kMapOffset),
3323
3863
  instr->transition());
3324
3864
  } else {
3325
- Register temp = ToRegister(instr->TempAt(0));
3865
+ Register temp = ToRegister(instr->temp());
3326
3866
  __ Move(kScratchRegister, instr->transition());
3327
3867
  __ movq(FieldOperand(object, HeapObject::kMapOffset), kScratchRegister);
3328
3868
  // Update the write barrier for the map field.
@@ -3343,7 +3883,7 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
3343
3883
  if (instr->is_in_object()) {
3344
3884
  __ movq(FieldOperand(object, offset), value);
3345
3885
  if (instr->hydrogen()->NeedsWriteBarrier()) {
3346
- Register temp = ToRegister(instr->TempAt(0));
3886
+ Register temp = ToRegister(instr->temp());
3347
3887
  // Update the write barrier for the object for in-object properties.
3348
3888
  __ RecordWriteField(object,
3349
3889
  offset,
@@ -3354,7 +3894,7 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
3354
3894
  check_needed);
3355
3895
  }
3356
3896
  } else {
3357
- Register temp = ToRegister(instr->TempAt(0));
3897
+ Register temp = ToRegister(instr->temp());
3358
3898
  __ movq(temp, FieldOperand(object, JSObject::kPropertiesOffset));
3359
3899
  __ movq(FieldOperand(temp, offset), value);
3360
3900
  if (instr->hydrogen()->NeedsWriteBarrier()) {
@@ -3384,21 +3924,90 @@ void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
3384
3924
  }
3385
3925
 
3386
3926
 
3387
- void LCodeGen::DoStoreKeyedSpecializedArrayElement(
3388
- LStoreKeyedSpecializedArrayElement* instr) {
3389
- ElementsKind elements_kind = instr->elements_kind();
3390
- Operand operand(BuildFastArrayOperand(instr->external_pointer(),
3391
- instr->key(),
3392
- elements_kind,
3393
- 0,
3394
- instr->additional_index()));
3927
+ void LCodeGen::DeoptIfTaggedButNotSmi(LEnvironment* environment,
3928
+ HValue* value,
3929
+ LOperand* operand) {
3930
+ if (value->representation().IsTagged() && !value->type().IsSmi()) {
3931
+ Condition cc;
3932
+ if (operand->IsRegister()) {
3933
+ cc = masm()->CheckSmi(ToRegister(operand));
3934
+ } else {
3935
+ cc = masm()->CheckSmi(ToOperand(operand));
3936
+ }
3937
+ DeoptimizeIf(NegateCondition(cc), environment);
3938
+ }
3939
+ }
3940
+
3941
+
3942
+ void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
3943
+ DeoptIfTaggedButNotSmi(instr->environment(),
3944
+ instr->hydrogen()->length(),
3945
+ instr->length());
3946
+ DeoptIfTaggedButNotSmi(instr->environment(),
3947
+ instr->hydrogen()->index(),
3948
+ instr->index());
3949
+ if (instr->length()->IsRegister()) {
3950
+ Register reg = ToRegister(instr->length());
3951
+ if (!instr->hydrogen()->length()->representation().IsTagged()) {
3952
+ __ AssertZeroExtended(reg);
3953
+ }
3954
+ if (instr->index()->IsConstantOperand()) {
3955
+ int constant_index =
3956
+ ToInteger32(LConstantOperand::cast(instr->index()));
3957
+ if (instr->hydrogen()->length()->representation().IsTagged()) {
3958
+ __ Cmp(reg, Smi::FromInt(constant_index));
3959
+ } else {
3960
+ __ cmpq(reg, Immediate(constant_index));
3961
+ }
3962
+ } else {
3963
+ Register reg2 = ToRegister(instr->index());
3964
+ if (!instr->hydrogen()->index()->representation().IsTagged()) {
3965
+ __ AssertZeroExtended(reg2);
3966
+ }
3967
+ __ cmpq(reg, reg2);
3968
+ }
3969
+ } else {
3970
+ Operand length = ToOperand(instr->length());
3971
+ if (instr->index()->IsConstantOperand()) {
3972
+ int constant_index =
3973
+ ToInteger32(LConstantOperand::cast(instr->index()));
3974
+ if (instr->hydrogen()->length()->representation().IsTagged()) {
3975
+ __ Cmp(length, Smi::FromInt(constant_index));
3976
+ } else {
3977
+ __ cmpq(length, Immediate(constant_index));
3978
+ }
3979
+ } else {
3980
+ __ cmpq(length, ToRegister(instr->index()));
3981
+ }
3982
+ }
3983
+ DeoptimizeIf(below_equal, instr->environment());
3984
+ }
3985
+
3395
3986
 
3396
- if (instr->hydrogen()->IsDehoisted() && !instr->key()->IsConstantOperand()) {
3397
- // Sign extend key because it could be a 32 bit negative value
3398
- // and the dehoisted address computation happens in 64 bits
3399
- Register key_reg = ToRegister(instr->key());
3400
- __ movsxlq(key_reg, key_reg);
3987
+ void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) {
3988
+ ElementsKind elements_kind = instr->elements_kind();
3989
+ LOperand* key = instr->key();
3990
+ if (!key->IsConstantOperand()) {
3991
+ Register key_reg = ToRegister(key);
3992
+ // Even though the HLoad/StoreKeyedFastElement instructions force
3993
+ // the input representation for the key to be an integer, the input
3994
+ // gets replaced during bound check elimination with the index
3995
+ // argument to the bounds check, which can be tagged, so that case
3996
+ // must be handled here, too.
3997
+ if (instr->hydrogen()->key()->representation().IsTagged()) {
3998
+ __ SmiToInteger64(key_reg, key_reg);
3999
+ } else if (instr->hydrogen()->IsDehoisted()) {
4000
+ // Sign extend key because it could be a 32 bit negative value
4001
+ // and the dehoisted address computation happens in 64 bits
4002
+ __ movsxlq(key_reg, key_reg);
4003
+ }
3401
4004
  }
4005
+ Operand operand(BuildFastArrayOperand(
4006
+ instr->elements(),
4007
+ key,
4008
+ elements_kind,
4009
+ 0,
4010
+ instr->additional_index()));
3402
4011
 
3403
4012
  if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
3404
4013
  XMMRegister value(ToDoubleRegister(instr->value()));
@@ -3439,106 +4048,108 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
3439
4048
  }
3440
4049
 
3441
4050
 
3442
- void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
3443
- if (instr->length()->IsRegister()) {
3444
- Register reg = ToRegister(instr->length());
3445
- if (FLAG_debug_code) {
3446
- __ AbortIfNotZeroExtended(reg);
3447
- }
3448
- if (instr->index()->IsConstantOperand()) {
3449
- __ cmpq(reg,
3450
- Immediate(ToInteger32(LConstantOperand::cast(instr->index()))));
3451
- } else {
3452
- Register reg2 = ToRegister(instr->index());
3453
- if (FLAG_debug_code) {
3454
- __ AbortIfNotZeroExtended(reg2);
3455
- }
3456
- __ cmpq(reg, reg2);
3457
- }
3458
- } else {
3459
- if (instr->index()->IsConstantOperand()) {
3460
- __ cmpq(ToOperand(instr->length()),
3461
- Immediate(ToInteger32(LConstantOperand::cast(instr->index()))));
3462
- } else {
3463
- __ cmpq(ToOperand(instr->length()), ToRegister(instr->index()));
4051
+ void LCodeGen::DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr) {
4052
+ XMMRegister value = ToDoubleRegister(instr->value());
4053
+ LOperand* key = instr->key();
4054
+ if (!key->IsConstantOperand()) {
4055
+ Register key_reg = ToRegister(key);
4056
+ // Even though the HLoad/StoreKeyedFastElement instructions force
4057
+ // the input representation for the key to be an integer, the
4058
+ // input gets replaced during bound check elimination with the index
4059
+ // argument to the bounds check, which can be tagged, so that case
4060
+ // must be handled here, too.
4061
+ if (instr->hydrogen()->key()->representation().IsTagged()) {
4062
+ __ SmiToInteger64(key_reg, key_reg);
4063
+ } else if (instr->hydrogen()->IsDehoisted()) {
4064
+ // Sign extend key because it could be a 32 bit negative value
4065
+ // and the dehoisted address computation happens in 64 bits
4066
+ __ movsxlq(key_reg, key_reg);
3464
4067
  }
3465
4068
  }
3466
- DeoptimizeIf(below_equal, instr->environment());
4069
+
4070
+ if (instr->NeedsCanonicalization()) {
4071
+ Label have_value;
4072
+
4073
+ __ ucomisd(value, value);
4074
+ __ j(parity_odd, &have_value); // NaN.
4075
+
4076
+ __ Set(kScratchRegister, BitCast<uint64_t>(
4077
+ FixedDoubleArray::canonical_not_the_hole_nan_as_double()));
4078
+ __ movq(value, kScratchRegister);
4079
+
4080
+ __ bind(&have_value);
4081
+ }
4082
+
4083
+ Operand double_store_operand = BuildFastArrayOperand(
4084
+ instr->elements(),
4085
+ key,
4086
+ FAST_DOUBLE_ELEMENTS,
4087
+ FixedDoubleArray::kHeaderSize - kHeapObjectTag,
4088
+ instr->additional_index());
4089
+
4090
+ __ movsd(double_store_operand, value);
3467
4091
  }
3468
4092
 
3469
4093
 
3470
- void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
4094
+ void LCodeGen::DoStoreKeyedFixedArray(LStoreKeyed* instr) {
3471
4095
  Register value = ToRegister(instr->value());
3472
- Register elements = ToRegister(instr->object());
3473
- Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
4096
+ Register elements = ToRegister(instr->elements());
4097
+ LOperand* key = instr->key();
4098
+ if (!key->IsConstantOperand()) {
4099
+ Register key_reg = ToRegister(key);
4100
+ // Even though the HLoad/StoreKeyedFastElement instructions force
4101
+ // the input representation for the key to be an integer, the
4102
+ // input gets replaced during bound check elimination with the index
4103
+ // argument to the bounds check, which can be tagged, so that case
4104
+ // must be handled here, too.
4105
+ if (instr->hydrogen()->key()->representation().IsTagged()) {
4106
+ __ SmiToInteger64(key_reg, key_reg);
4107
+ } else if (instr->hydrogen()->IsDehoisted()) {
4108
+ // Sign extend key because it could be a 32 bit negative value
4109
+ // and the dehoisted address computation happens in 64 bits
4110
+ __ movsxlq(key_reg, key_reg);
4111
+ }
4112
+ }
3474
4113
 
3475
4114
  Operand operand =
3476
- BuildFastArrayOperand(instr->object(),
3477
- instr->key(),
4115
+ BuildFastArrayOperand(instr->elements(),
4116
+ key,
3478
4117
  FAST_ELEMENTS,
3479
4118
  FixedArray::kHeaderSize - kHeapObjectTag,
3480
4119
  instr->additional_index());
3481
4120
 
3482
- if (instr->hydrogen()->IsDehoisted() && !instr->key()->IsConstantOperand()) {
3483
- // Sign extend key because it could be a 32 bit negative value
3484
- // and the dehoisted address computation happens in 64 bits
3485
- Register key_reg = ToRegister(instr->key());
3486
- __ movsxlq(key_reg, key_reg);
3487
- }
3488
-
3489
- __ movq(operand, value);
3490
-
3491
4121
  if (instr->hydrogen()->NeedsWriteBarrier()) {
3492
4122
  ASSERT(!instr->key()->IsConstantOperand());
3493
4123
  HType type = instr->hydrogen()->value()->type();
3494
4124
  SmiCheck check_needed =
3495
4125
  type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
3496
4126
  // Compute address of modified element and store it into key register.
3497
- __ lea(key, operand);
4127
+ Register key_reg(ToRegister(key));
4128
+ __ lea(key_reg, operand);
4129
+ __ movq(Operand(key_reg, 0), value);
3498
4130
  __ RecordWrite(elements,
3499
- key,
4131
+ key_reg,
3500
4132
  value,
3501
4133
  kSaveFPRegs,
3502
4134
  EMIT_REMEMBERED_SET,
3503
4135
  check_needed);
4136
+ } else {
4137
+ __ movq(operand, value);
3504
4138
  }
3505
4139
  }
3506
4140
 
3507
4141
 
3508
- void LCodeGen::DoStoreKeyedFastDoubleElement(
3509
- LStoreKeyedFastDoubleElement* instr) {
3510
- XMMRegister value = ToDoubleRegister(instr->value());
3511
-
3512
- if (instr->NeedsCanonicalization()) {
3513
- Label have_value;
3514
-
3515
- __ ucomisd(value, value);
3516
- __ j(parity_odd, &have_value); // NaN.
3517
-
3518
- __ Set(kScratchRegister, BitCast<uint64_t>(
3519
- FixedDoubleArray::canonical_not_the_hole_nan_as_double()));
3520
- __ movq(value, kScratchRegister);
3521
-
3522
- __ bind(&have_value);
3523
- }
3524
-
3525
- Operand double_store_operand = BuildFastArrayOperand(
3526
- instr->elements(),
3527
- instr->key(),
3528
- FAST_DOUBLE_ELEMENTS,
3529
- FixedDoubleArray::kHeaderSize - kHeapObjectTag,
3530
- instr->additional_index());
3531
-
3532
- if (instr->hydrogen()->IsDehoisted() && !instr->key()->IsConstantOperand()) {
3533
- // Sign extend key because it could be a 32 bit negative value
3534
- // and the dehoisted address computation happens in 64 bits
3535
- Register key_reg = ToRegister(instr->key());
3536
- __ movsxlq(key_reg, key_reg);
4142
+ void LCodeGen::DoStoreKeyed(LStoreKeyed* instr) {
4143
+ if (instr->is_external()) {
4144
+ DoStoreKeyedExternalArray(instr);
4145
+ } else if (instr->hydrogen()->value()->representation().IsDouble()) {
4146
+ DoStoreKeyedFixedDoubleArray(instr);
4147
+ } else {
4148
+ DoStoreKeyedFixedArray(instr);
3537
4149
  }
3538
-
3539
- __ movsd(double_store_operand, value);
3540
4150
  }
3541
4151
 
4152
+
3542
4153
  void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
3543
4154
  ASSERT(ToRegister(instr->object()).is(rdx));
3544
4155
  ASSERT(ToRegister(instr->key()).is(rcx));
@@ -3553,36 +4164,50 @@ void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
3553
4164
 
3554
4165
  void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
3555
4166
  Register object_reg = ToRegister(instr->object());
3556
- Register new_map_reg = ToRegister(instr->new_map_reg());
3557
4167
 
3558
4168
  Handle<Map> from_map = instr->original_map();
3559
4169
  Handle<Map> to_map = instr->transitioned_map();
3560
- ElementsKind from_kind = from_map->elements_kind();
3561
- ElementsKind to_kind = to_map->elements_kind();
4170
+ ElementsKind from_kind = instr->from_kind();
4171
+ ElementsKind to_kind = instr->to_kind();
3562
4172
 
3563
4173
  Label not_applicable;
3564
4174
  __ Cmp(FieldOperand(object_reg, HeapObject::kMapOffset), from_map);
3565
4175
  __ j(not_equal, &not_applicable);
3566
- __ movq(new_map_reg, to_map, RelocInfo::EMBEDDED_OBJECT);
3567
4176
  if (IsSimpleMapChangeTransition(from_kind, to_kind)) {
4177
+ Register new_map_reg = ToRegister(instr->new_map_temp());
4178
+ __ movq(new_map_reg, to_map, RelocInfo::EMBEDDED_OBJECT);
3568
4179
  __ movq(FieldOperand(object_reg, HeapObject::kMapOffset), new_map_reg);
3569
4180
  // Write barrier.
3570
- ASSERT_NE(instr->temp_reg(), NULL);
4181
+ ASSERT_NE(instr->temp(), NULL);
3571
4182
  __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg,
3572
- ToRegister(instr->temp_reg()), kDontSaveFPRegs);
4183
+ ToRegister(instr->temp()), kDontSaveFPRegs);
4184
+ } else if (FLAG_compiled_transitions) {
4185
+ PushSafepointRegistersScope scope(this);
4186
+ if (!object_reg.is(rax)) {
4187
+ __ movq(rax, object_reg);
4188
+ }
4189
+ __ Move(rbx, to_map);
4190
+ TransitionElementsKindStub stub(from_kind, to_kind);
4191
+ __ CallStub(&stub);
4192
+ RecordSafepointWithRegisters(
4193
+ instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
3573
4194
  } else if (IsFastSmiElementsKind(from_kind) &&
3574
- IsFastDoubleElementsKind(to_kind)) {
3575
- Register fixed_object_reg = ToRegister(instr->temp_reg());
4195
+ IsFastDoubleElementsKind(to_kind)) {
4196
+ Register fixed_object_reg = ToRegister(instr->temp());
3576
4197
  ASSERT(fixed_object_reg.is(rdx));
4198
+ Register new_map_reg = ToRegister(instr->new_map_temp());
3577
4199
  ASSERT(new_map_reg.is(rbx));
4200
+ __ movq(new_map_reg, to_map, RelocInfo::EMBEDDED_OBJECT);
3578
4201
  __ movq(fixed_object_reg, object_reg);
3579
4202
  CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(),
3580
4203
  RelocInfo::CODE_TARGET, instr);
3581
4204
  } else if (IsFastDoubleElementsKind(from_kind) &&
3582
4205
  IsFastObjectElementsKind(to_kind)) {
3583
- Register fixed_object_reg = ToRegister(instr->temp_reg());
4206
+ Register fixed_object_reg = ToRegister(instr->temp());
3584
4207
  ASSERT(fixed_object_reg.is(rdx));
4208
+ Register new_map_reg = ToRegister(instr->new_map_temp());
3585
4209
  ASSERT(new_map_reg.is(rbx));
4210
+ __ movq(new_map_reg, to_map, RelocInfo::EMBEDDED_OBJECT);
3586
4211
  __ movq(fixed_object_reg, object_reg);
3587
4212
  CallCode(isolate()->builtins()->TransitionElementsDoubleToObject(),
3588
4213
  RelocInfo::CODE_TARGET, instr);
@@ -3593,6 +4218,14 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
3593
4218
  }
3594
4219
 
3595
4220
 
4221
+ void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) {
4222
+ Register object = ToRegister(instr->object());
4223
+ Register temp = ToRegister(instr->temp());
4224
+ __ TestJSArrayForAllocationSiteInfo(object, temp);
4225
+ DeoptimizeIf(equal, instr->environment());
4226
+ }
4227
+
4228
+
3596
4229
  void LCodeGen::DoStringAdd(LStringAdd* instr) {
3597
4230
  EmitPushTaggedOperand(instr->left());
3598
4231
  EmitPushTaggedOperand(instr->right());
@@ -3613,7 +4246,7 @@ void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
3613
4246
  };
3614
4247
 
3615
4248
  DeferredStringCharCodeAt* deferred =
3616
- new DeferredStringCharCodeAt(this, instr);
4249
+ new(zone()) DeferredStringCharCodeAt(this, instr);
3617
4250
 
3618
4251
  StringCharLoadGenerator::Generate(masm(),
3619
4252
  ToRegister(instr->string()),
@@ -3647,9 +4280,7 @@ void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
3647
4280
  __ push(index);
3648
4281
  }
3649
4282
  CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr);
3650
- if (FLAG_debug_code) {
3651
- __ AbortIfNotSmi(rax);
3652
- }
4283
+ __ AssertSmi(rax);
3653
4284
  __ SmiToInteger32(rax, rax);
3654
4285
  __ StoreToSafepointRegisterSlot(result, rax);
3655
4286
  }
@@ -3667,14 +4298,14 @@ void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
3667
4298
  };
3668
4299
 
3669
4300
  DeferredStringCharFromCode* deferred =
3670
- new DeferredStringCharFromCode(this, instr);
4301
+ new(zone()) DeferredStringCharFromCode(this, instr);
3671
4302
 
3672
4303
  ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
3673
4304
  Register char_code = ToRegister(instr->char_code());
3674
4305
  Register result = ToRegister(instr->result());
3675
4306
  ASSERT(!char_code.is(result));
3676
4307
 
3677
- __ cmpl(char_code, Immediate(String::kMaxAsciiCharCode));
4308
+ __ cmpl(char_code, Immediate(String::kMaxOneByteCharCode));
3678
4309
  __ j(above, deferred->entry());
3679
4310
  __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex);
3680
4311
  __ movq(result, FieldOperand(result,
@@ -3711,7 +4342,7 @@ void LCodeGen::DoStringLength(LStringLength* instr) {
3711
4342
 
3712
4343
 
3713
4344
  void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
3714
- LOperand* input = instr->InputAt(0);
4345
+ LOperand* input = instr->value();
3715
4346
  ASSERT(input->IsRegister() || input->IsStackSlot());
3716
4347
  LOperand* output = instr->result();
3717
4348
  ASSERT(output->IsDoubleRegister());
@@ -3723,8 +4354,19 @@ void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
3723
4354
  }
3724
4355
 
3725
4356
 
4357
+ void LCodeGen::DoUint32ToDouble(LUint32ToDouble* instr) {
4358
+ LOperand* input = instr->value();
4359
+ LOperand* output = instr->result();
4360
+ LOperand* temp = instr->temp();
4361
+
4362
+ __ LoadUint32(ToDoubleRegister(output),
4363
+ ToRegister(input),
4364
+ ToDoubleRegister(temp));
4365
+ }
4366
+
4367
+
3726
4368
  void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
3727
- LOperand* input = instr->InputAt(0);
4369
+ LOperand* input = instr->value();
3728
4370
  ASSERT(input->IsRegister() && input->Equals(instr->result()));
3729
4371
  Register reg = ToRegister(input);
3730
4372
 
@@ -3732,6 +4374,69 @@ void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
3732
4374
  }
3733
4375
 
3734
4376
 
4377
+ void LCodeGen::DoNumberTagU(LNumberTagU* instr) {
4378
+ class DeferredNumberTagU: public LDeferredCode {
4379
+ public:
4380
+ DeferredNumberTagU(LCodeGen* codegen, LNumberTagU* instr)
4381
+ : LDeferredCode(codegen), instr_(instr) { }
4382
+ virtual void Generate() {
4383
+ codegen()->DoDeferredNumberTagU(instr_);
4384
+ }
4385
+ virtual LInstruction* instr() { return instr_; }
4386
+ private:
4387
+ LNumberTagU* instr_;
4388
+ };
4389
+
4390
+ LOperand* input = instr->value();
4391
+ ASSERT(input->IsRegister() && input->Equals(instr->result()));
4392
+ Register reg = ToRegister(input);
4393
+
4394
+ DeferredNumberTagU* deferred = new(zone()) DeferredNumberTagU(this, instr);
4395
+ __ cmpl(reg, Immediate(Smi::kMaxValue));
4396
+ __ j(above, deferred->entry());
4397
+ __ Integer32ToSmi(reg, reg);
4398
+ __ bind(deferred->exit());
4399
+ }
4400
+
4401
+
4402
+ void LCodeGen::DoDeferredNumberTagU(LNumberTagU* instr) {
4403
+ Label slow;
4404
+ Register reg = ToRegister(instr->value());
4405
+ Register tmp = reg.is(rax) ? rcx : rax;
4406
+
4407
+ // Preserve the value of all registers.
4408
+ PushSafepointRegistersScope scope(this);
4409
+
4410
+ Label done;
4411
+ // Load value into xmm1 which will be preserved across potential call to
4412
+ // runtime (MacroAssembler::EnterExitFrameEpilogue preserves only allocatable
4413
+ // XMM registers on x64).
4414
+ __ LoadUint32(xmm1, reg, xmm0);
4415
+
4416
+ if (FLAG_inline_new) {
4417
+ __ AllocateHeapNumber(reg, tmp, &slow);
4418
+ __ jmp(&done, Label::kNear);
4419
+ }
4420
+
4421
+ // Slow case: Call the runtime system to do the number allocation.
4422
+ __ bind(&slow);
4423
+
4424
+ // Put a valid pointer value in the stack slot where the result
4425
+ // register is stored, as this register is in the pointer map, but contains an
4426
+ // integer value.
4427
+ __ StoreToSafepointRegisterSlot(reg, Immediate(0));
4428
+
4429
+ CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
4430
+ if (!reg.is(rax)) __ movq(reg, rax);
4431
+
4432
+ // Done. Put the value in xmm1 into the value of the allocated heap
4433
+ // number.
4434
+ __ bind(&done);
4435
+ __ movsd(FieldOperand(reg, HeapNumber::kValueOffset), xmm1);
4436
+ __ StoreToSafepointRegisterSlot(reg, reg);
4437
+ }
4438
+
4439
+
3735
4440
  void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
3736
4441
  class DeferredNumberTagD: public LDeferredCode {
3737
4442
  public:
@@ -3743,11 +4448,41 @@ void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
3743
4448
  LNumberTagD* instr_;
3744
4449
  };
3745
4450
 
3746
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
4451
+ XMMRegister input_reg = ToDoubleRegister(instr->value());
3747
4452
  Register reg = ToRegister(instr->result());
3748
- Register tmp = ToRegister(instr->TempAt(0));
4453
+ Register tmp = ToRegister(instr->temp());
3749
4454
 
3750
- DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
4455
+ bool convert_hole = false;
4456
+ HValue* change_input = instr->hydrogen()->value();
4457
+ if (change_input->IsLoadKeyed()) {
4458
+ HLoadKeyed* load = HLoadKeyed::cast(change_input);
4459
+ convert_hole = load->UsesMustHandleHole();
4460
+ }
4461
+
4462
+ Label no_special_nan_handling;
4463
+ Label done;
4464
+ if (convert_hole) {
4465
+ XMMRegister input_reg = ToDoubleRegister(instr->value());
4466
+ __ ucomisd(input_reg, input_reg);
4467
+ __ j(parity_odd, &no_special_nan_handling);
4468
+ __ subq(rsp, Immediate(kDoubleSize));
4469
+ __ movsd(MemOperand(rsp, 0), input_reg);
4470
+ __ cmpl(MemOperand(rsp, sizeof(kHoleNanLower32)),
4471
+ Immediate(kHoleNanUpper32));
4472
+ Label canonicalize;
4473
+ __ j(not_equal, &canonicalize);
4474
+ __ addq(rsp, Immediate(kDoubleSize));
4475
+ __ Move(reg, factory()->the_hole_value());
4476
+ __ jmp(&done);
4477
+ __ bind(&canonicalize);
4478
+ __ addq(rsp, Immediate(kDoubleSize));
4479
+ __ Set(kScratchRegister, BitCast<uint64_t>(
4480
+ FixedDoubleArray::canonical_not_the_hole_nan_as_double()));
4481
+ __ movq(input_reg, kScratchRegister);
4482
+ }
4483
+
4484
+ __ bind(&no_special_nan_handling);
4485
+ DeferredNumberTagD* deferred = new(zone()) DeferredNumberTagD(this, instr);
3751
4486
  if (FLAG_inline_new) {
3752
4487
  __ AllocateHeapNumber(reg, tmp, deferred->entry());
3753
4488
  } else {
@@ -3755,6 +4490,8 @@ void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
3755
4490
  }
3756
4491
  __ bind(deferred->exit());
3757
4492
  __ movsd(FieldOperand(reg, HeapNumber::kValueOffset), input_reg);
4493
+
4494
+ __ bind(&done);
3758
4495
  }
3759
4496
 
3760
4497
 
@@ -3776,19 +4513,21 @@ void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
3776
4513
 
3777
4514
 
3778
4515
  void LCodeGen::DoSmiTag(LSmiTag* instr) {
3779
- ASSERT(instr->InputAt(0)->Equals(instr->result()));
3780
- Register input = ToRegister(instr->InputAt(0));
4516
+ ASSERT(instr->value()->Equals(instr->result()));
4517
+ Register input = ToRegister(instr->value());
3781
4518
  ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
3782
4519
  __ Integer32ToSmi(input, input);
3783
4520
  }
3784
4521
 
3785
4522
 
3786
4523
  void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
3787
- ASSERT(instr->InputAt(0)->Equals(instr->result()));
3788
- Register input = ToRegister(instr->InputAt(0));
4524
+ ASSERT(instr->value()->Equals(instr->result()));
4525
+ Register input = ToRegister(instr->value());
3789
4526
  if (instr->needs_check()) {
3790
4527
  Condition is_smi = __ CheckSmi(input);
3791
4528
  DeoptimizeIf(NegateCondition(is_smi), instr->environment());
4529
+ } else {
4530
+ __ AssertSmi(input);
3792
4531
  }
3793
4532
  __ SmiToInteger32(input, input);
3794
4533
  }
@@ -3798,43 +4537,58 @@ void LCodeGen::EmitNumberUntagD(Register input_reg,
3798
4537
  XMMRegister result_reg,
3799
4538
  bool deoptimize_on_undefined,
3800
4539
  bool deoptimize_on_minus_zero,
3801
- LEnvironment* env) {
4540
+ LEnvironment* env,
4541
+ NumberUntagDMode mode) {
3802
4542
  Label load_smi, done;
3803
4543
 
3804
- // Smi check.
3805
- __ JumpIfSmi(input_reg, &load_smi, Label::kNear);
4544
+ if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED) {
4545
+ // Smi check.
4546
+ __ JumpIfSmi(input_reg, &load_smi, Label::kNear);
3806
4547
 
3807
- // Heap number map check.
3808
- __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
3809
- Heap::kHeapNumberMapRootIndex);
3810
- if (deoptimize_on_undefined) {
3811
- DeoptimizeIf(not_equal, env);
3812
- } else {
3813
- Label heap_number;
3814
- __ j(equal, &heap_number, Label::kNear);
4548
+ // Heap number map check.
4549
+ __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
4550
+ Heap::kHeapNumberMapRootIndex);
4551
+ if (deoptimize_on_undefined) {
4552
+ DeoptimizeIf(not_equal, env);
4553
+ } else {
4554
+ Label heap_number;
4555
+ __ j(equal, &heap_number, Label::kNear);
3815
4556
 
3816
- __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
3817
- DeoptimizeIf(not_equal, env);
4557
+ __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
4558
+ DeoptimizeIf(not_equal, env);
3818
4559
 
3819
- // Convert undefined to NaN. Compute NaN as 0/0.
3820
- __ xorps(result_reg, result_reg);
3821
- __ divsd(result_reg, result_reg);
3822
- __ jmp(&done, Label::kNear);
4560
+ // Convert undefined to NaN. Compute NaN as 0/0.
4561
+ __ xorps(result_reg, result_reg);
4562
+ __ divsd(result_reg, result_reg);
4563
+ __ jmp(&done, Label::kNear);
3823
4564
 
3824
- __ bind(&heap_number);
3825
- }
3826
- // Heap number to XMM conversion.
3827
- __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
3828
- if (deoptimize_on_minus_zero) {
3829
- XMMRegister xmm_scratch = xmm0;
3830
- __ xorps(xmm_scratch, xmm_scratch);
3831
- __ ucomisd(xmm_scratch, result_reg);
3832
- __ j(not_equal, &done, Label::kNear);
3833
- __ movmskpd(kScratchRegister, result_reg);
3834
- __ testq(kScratchRegister, Immediate(1));
3835
- DeoptimizeIf(not_zero, env);
4565
+ __ bind(&heap_number);
4566
+ }
4567
+ // Heap number to XMM conversion.
4568
+ __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
4569
+ if (deoptimize_on_minus_zero) {
4570
+ XMMRegister xmm_scratch = xmm0;
4571
+ __ xorps(xmm_scratch, xmm_scratch);
4572
+ __ ucomisd(xmm_scratch, result_reg);
4573
+ __ j(not_equal, &done, Label::kNear);
4574
+ __ movmskpd(kScratchRegister, result_reg);
4575
+ __ testq(kScratchRegister, Immediate(1));
4576
+ DeoptimizeIf(not_zero, env);
4577
+ }
4578
+ __ jmp(&done, Label::kNear);
4579
+ } else if (mode == NUMBER_CANDIDATE_IS_SMI_OR_HOLE) {
4580
+ __ testq(input_reg, Immediate(kSmiTagMask));
4581
+ DeoptimizeIf(not_equal, env);
4582
+ } else if (mode == NUMBER_CANDIDATE_IS_SMI_CONVERT_HOLE) {
4583
+ __ testq(input_reg, Immediate(kSmiTagMask));
4584
+ __ j(zero, &load_smi);
4585
+ __ Set(kScratchRegister, BitCast<uint64_t>(
4586
+ FixedDoubleArray::hole_nan_as_double()));
4587
+ __ movq(result_reg, kScratchRegister);
4588
+ __ jmp(&done, Label::kNear);
4589
+ } else {
4590
+ ASSERT(mode == NUMBER_CANDIDATE_IS_SMI);
3836
4591
  }
3837
- __ jmp(&done, Label::kNear);
3838
4592
 
3839
4593
  // Smi to XMM conversion
3840
4594
  __ bind(&load_smi);
@@ -3846,7 +4600,7 @@ void LCodeGen::EmitNumberUntagD(Register input_reg,
3846
4600
 
3847
4601
  void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
3848
4602
  Label done, heap_number;
3849
- Register input_reg = ToRegister(instr->InputAt(0));
4603
+ Register input_reg = ToRegister(instr->value());
3850
4604
 
3851
4605
  // Heap number map check.
3852
4606
  __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
@@ -3872,7 +4626,7 @@ void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
3872
4626
  // Deoptimize if we don't have a heap number.
3873
4627
  DeoptimizeIf(not_equal, instr->environment());
3874
4628
 
3875
- XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0));
4629
+ XMMRegister xmm_temp = ToDoubleRegister(instr->temp());
3876
4630
  __ movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
3877
4631
  __ cvttsd2si(input_reg, xmm0);
3878
4632
  __ cvtlsi2sd(xmm_temp, input_reg);
@@ -3902,12 +4656,12 @@ void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
3902
4656
  LTaggedToI* instr_;
3903
4657
  };
3904
4658
 
3905
- LOperand* input = instr->InputAt(0);
4659
+ LOperand* input = instr->value();
3906
4660
  ASSERT(input->IsRegister());
3907
4661
  ASSERT(input->Equals(instr->result()));
3908
4662
 
3909
4663
  Register input_reg = ToRegister(input);
3910
- DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
4664
+ DeferredTaggedToI* deferred = new(zone()) DeferredTaggedToI(this, instr);
3911
4665
  __ JumpIfNotSmi(input_reg, deferred->entry());
3912
4666
  __ SmiToInteger32(input_reg, input_reg);
3913
4667
  __ bind(deferred->exit());
@@ -3915,7 +4669,7 @@ void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
3915
4669
 
3916
4670
 
3917
4671
  void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
3918
- LOperand* input = instr->InputAt(0);
4672
+ LOperand* input = instr->value();
3919
4673
  ASSERT(input->IsRegister());
3920
4674
  LOperand* result = instr->result();
3921
4675
  ASSERT(result->IsDoubleRegister());
@@ -3923,15 +4677,33 @@ void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
3923
4677
  Register input_reg = ToRegister(input);
3924
4678
  XMMRegister result_reg = ToDoubleRegister(result);
3925
4679
 
4680
+ NumberUntagDMode mode = NUMBER_CANDIDATE_IS_ANY_TAGGED;
4681
+ HValue* value = instr->hydrogen()->value();
4682
+ if (value->type().IsSmi()) {
4683
+ if (value->IsLoadKeyed()) {
4684
+ HLoadKeyed* load = HLoadKeyed::cast(value);
4685
+ if (load->UsesMustHandleHole()) {
4686
+ if (load->hole_mode() == ALLOW_RETURN_HOLE) {
4687
+ mode = NUMBER_CANDIDATE_IS_SMI_CONVERT_HOLE;
4688
+ } else {
4689
+ mode = NUMBER_CANDIDATE_IS_SMI_OR_HOLE;
4690
+ }
4691
+ } else {
4692
+ mode = NUMBER_CANDIDATE_IS_SMI;
4693
+ }
4694
+ }
4695
+ }
4696
+
3926
4697
  EmitNumberUntagD(input_reg, result_reg,
3927
4698
  instr->hydrogen()->deoptimize_on_undefined(),
3928
4699
  instr->hydrogen()->deoptimize_on_minus_zero(),
3929
- instr->environment());
4700
+ instr->environment(),
4701
+ mode);
3930
4702
  }
3931
4703
 
3932
4704
 
3933
4705
  void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
3934
- LOperand* input = instr->InputAt(0);
4706
+ LOperand* input = instr->value();
3935
4707
  ASSERT(input->IsDoubleRegister());
3936
4708
  LOperand* result = instr->result();
3937
4709
  ASSERT(result->IsRegister());
@@ -3943,7 +4715,9 @@ void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
3943
4715
  // Performs a truncating conversion of a floating point number as used by
3944
4716
  // the JS bitwise operations.
3945
4717
  __ cvttsd2siq(result_reg, input_reg);
3946
- __ movq(kScratchRegister, V8_INT64_C(0x8000000000000000), RelocInfo::NONE);
4718
+ __ movq(kScratchRegister,
4719
+ V8_INT64_C(0x8000000000000000),
4720
+ RelocInfo::NONE64);
3947
4721
  __ cmpq(result_reg, kScratchRegister);
3948
4722
  DeoptimizeIf(equal, instr->environment());
3949
4723
  } else {
@@ -3971,21 +4745,21 @@ void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
3971
4745
 
3972
4746
 
3973
4747
  void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
3974
- LOperand* input = instr->InputAt(0);
4748
+ LOperand* input = instr->value();
3975
4749
  Condition cc = masm()->CheckSmi(ToRegister(input));
3976
4750
  DeoptimizeIf(NegateCondition(cc), instr->environment());
3977
4751
  }
3978
4752
 
3979
4753
 
3980
4754
  void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
3981
- LOperand* input = instr->InputAt(0);
4755
+ LOperand* input = instr->value();
3982
4756
  Condition cc = masm()->CheckSmi(ToRegister(input));
3983
4757
  DeoptimizeIf(cc, instr->environment());
3984
4758
  }
3985
4759
 
3986
4760
 
3987
4761
  void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
3988
- Register input = ToRegister(instr->InputAt(0));
4762
+ Register input = ToRegister(instr->value());
3989
4763
 
3990
4764
  __ movq(kScratchRegister, FieldOperand(input, HeapObject::kMapOffset));
3991
4765
 
@@ -4048,16 +4822,16 @@ void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
4048
4822
  void LCodeGen::DoCheckMapCommon(Register reg,
4049
4823
  Handle<Map> map,
4050
4824
  CompareMapMode mode,
4051
- LEnvironment* env) {
4825
+ LInstruction* instr) {
4052
4826
  Label success;
4053
4827
  __ CompareMap(reg, map, &success, mode);
4054
- DeoptimizeIf(not_equal, env);
4828
+ DeoptimizeIf(not_equal, instr->environment());
4055
4829
  __ bind(&success);
4056
4830
  }
4057
4831
 
4058
4832
 
4059
4833
  void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
4060
- LOperand* input = instr->InputAt(0);
4834
+ LOperand* input = instr->value();
4061
4835
  ASSERT(input->IsRegister());
4062
4836
  Register reg = ToRegister(input);
4063
4837
 
@@ -4069,7 +4843,7 @@ void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
4069
4843
  __ j(equal, &success);
4070
4844
  }
4071
4845
  Handle<Map> map = map_set->last();
4072
- DoCheckMapCommon(reg, map, REQUIRE_EXACT_MAP, instr->environment());
4846
+ DoCheckMapCommon(reg, map, REQUIRE_EXACT_MAP, instr);
4073
4847
  __ bind(&success);
4074
4848
  }
4075
4849
 
@@ -4077,8 +4851,7 @@ void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
4077
4851
  void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
4078
4852
  XMMRegister value_reg = ToDoubleRegister(instr->unclamped());
4079
4853
  Register result_reg = ToRegister(instr->result());
4080
- Register temp_reg = ToRegister(instr->TempAt(0));
4081
- __ ClampDoubleToUint8(value_reg, xmm0, result_reg, temp_reg);
4854
+ __ ClampDoubleToUint8(value_reg, xmm0, result_reg);
4082
4855
  }
4083
4856
 
4084
4857
 
@@ -4092,8 +4865,7 @@ void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) {
4092
4865
  void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
4093
4866
  ASSERT(instr->unclamped()->Equals(instr->result()));
4094
4867
  Register input_reg = ToRegister(instr->unclamped());
4095
- Register temp_reg = ToRegister(instr->TempAt(0));
4096
- XMMRegister temp_xmm_reg = ToDoubleRegister(instr->TempAt(1));
4868
+ XMMRegister temp_xmm_reg = ToDoubleRegister(instr->temp_xmm());
4097
4869
  Label is_smi, done, heap_number;
4098
4870
 
4099
4871
  __ JumpIfSmi(input_reg, &is_smi);
@@ -4113,7 +4885,7 @@ void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
4113
4885
  // Heap number
4114
4886
  __ bind(&heap_number);
4115
4887
  __ movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
4116
- __ ClampDoubleToUint8(xmm0, temp_xmm_reg, input_reg, temp_reg);
4888
+ __ ClampDoubleToUint8(xmm0, temp_xmm_reg, input_reg);
4117
4889
  __ jmp(&done, Label::kNear);
4118
4890
 
4119
4891
  // smi
@@ -4126,27 +4898,18 @@ void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
4126
4898
 
4127
4899
 
4128
4900
  void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
4129
- Register reg = ToRegister(instr->TempAt(0));
4901
+ ASSERT(instr->temp()->Equals(instr->result()));
4902
+ Register reg = ToRegister(instr->temp());
4130
4903
 
4131
- Handle<JSObject> holder = instr->holder();
4132
- Handle<JSObject> current_prototype = instr->prototype();
4904
+ ZoneList<Handle<JSObject> >* prototypes = instr->prototypes();
4905
+ ZoneList<Handle<Map> >* maps = instr->maps();
4133
4906
 
4134
- // Load prototype object.
4135
- __ LoadHeapObject(reg, current_prototype);
4907
+ ASSERT(prototypes->length() == maps->length());
4136
4908
 
4137
- // Check prototype maps up to the holder.
4138
- while (!current_prototype.is_identical_to(holder)) {
4139
- DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
4140
- ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment());
4141
- current_prototype =
4142
- Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
4143
- // Load next prototype object.
4144
- __ LoadHeapObject(reg, current_prototype);
4909
+ for (int i = 0; i < prototypes->length(); i++) {
4910
+ __ LoadHeapObject(reg, prototypes->at(i));
4911
+ DoCheckMapCommon(reg, maps->at(i), ALLOW_ELEMENT_TRANSITION_MAPS, instr);
4145
4912
  }
4146
-
4147
- // Check the holder map.
4148
- DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
4149
- ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment());
4150
4913
  }
4151
4914
 
4152
4915
 
@@ -4161,10 +4924,11 @@ void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
4161
4924
  LAllocateObject* instr_;
4162
4925
  };
4163
4926
 
4164
- DeferredAllocateObject* deferred = new DeferredAllocateObject(this, instr);
4927
+ DeferredAllocateObject* deferred =
4928
+ new(zone()) DeferredAllocateObject(this, instr);
4165
4929
 
4166
4930
  Register result = ToRegister(instr->result());
4167
- Register scratch = ToRegister(instr->TempAt(0));
4931
+ Register scratch = ToRegister(instr->temp());
4168
4932
  Handle<JSFunction> constructor = instr->hydrogen()->constructor();
4169
4933
  Handle<Map> initial_map(constructor->initial_map());
4170
4934
  int instance_size = initial_map->instance_size();
@@ -4197,7 +4961,7 @@ void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
4197
4961
  __ movq(map, FieldOperand(scratch, JSFunction::kPrototypeOrInitialMapOffset));
4198
4962
 
4199
4963
  if (FLAG_debug_code) {
4200
- __ AbortIfSmi(map);
4964
+ __ AssertNotSmi(map);
4201
4965
  __ cmpb(FieldOperand(map, Map::kInstanceSizeOffset),
4202
4966
  Immediate(instance_size >> kPointerSizeLog2));
4203
4967
  __ Assert(equal, "Unexpected instance size");
@@ -4246,10 +5010,64 @@ void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
4246
5010
  }
4247
5011
 
4248
5012
 
5013
+ void LCodeGen::DoAllocate(LAllocate* instr) {
5014
+ class DeferredAllocate: public LDeferredCode {
5015
+ public:
5016
+ DeferredAllocate(LCodeGen* codegen, LAllocate* instr)
5017
+ : LDeferredCode(codegen), instr_(instr) { }
5018
+ virtual void Generate() { codegen()->DoDeferredAllocate(instr_); }
5019
+ virtual LInstruction* instr() { return instr_; }
5020
+ private:
5021
+ LAllocate* instr_;
5022
+ };
5023
+
5024
+ DeferredAllocate* deferred =
5025
+ new(zone()) DeferredAllocate(this, instr);
5026
+
5027
+ Register size = ToRegister(instr->size());
5028
+ Register result = ToRegister(instr->result());
5029
+ Register temp = ToRegister(instr->temp());
5030
+
5031
+ HAllocate* original_instr = instr->hydrogen();
5032
+ if (original_instr->size()->IsConstant()) {
5033
+ UNREACHABLE();
5034
+ } else {
5035
+ // Allocate memory for the object.
5036
+ AllocationFlags flags = TAG_OBJECT;
5037
+ if (original_instr->MustAllocateDoubleAligned()) {
5038
+ flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
5039
+ }
5040
+ __ AllocateInNewSpace(size, result, temp, no_reg,
5041
+ deferred->entry(), flags);
5042
+ }
5043
+
5044
+ __ bind(deferred->exit());
5045
+ }
5046
+
5047
+
5048
+ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
5049
+ Register size = ToRegister(instr->size());
5050
+ Register result = ToRegister(instr->result());
5051
+
5052
+ // TODO(3095996): Get rid of this. For now, we need to make the
5053
+ // result register contain a valid pointer because it is already
5054
+ // contained in the register pointer map.
5055
+ __ Set(result, 0);
5056
+
5057
+ PushSafepointRegistersScope scope(this);
5058
+ __ Integer32ToSmi(size, size);
5059
+ __ push(size);
5060
+ CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr);
5061
+ __ StoreToSafepointRegisterSlot(result, rax);
5062
+ }
5063
+
5064
+
4249
5065
  void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
4250
- Heap* heap = isolate()->heap();
5066
+ Handle<FixedArray> literals(instr->environment()->closure()->literals());
4251
5067
  ElementsKind boilerplate_elements_kind =
4252
5068
  instr->hydrogen()->boilerplate_elements_kind();
5069
+ AllocationSiteMode allocation_site_mode =
5070
+ instr->hydrogen()->allocation_site_mode();
4253
5071
 
4254
5072
  // Deopt if the array literal boilerplate ElementsKind is of a type different
4255
5073
  // than the expected one. The check isn't necessary if the boilerplate has
@@ -4268,12 +5086,11 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
4268
5086
  }
4269
5087
 
4270
5088
  // Set up the parameters to the stub/runtime call.
4271
- __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
4272
- __ push(FieldOperand(rax, JSFunction::kLiteralsOffset));
5089
+ __ PushHeapObject(literals);
4273
5090
  __ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
4274
5091
  // Boilerplate already exists, constant elements are never accessed.
4275
5092
  // Pass an empty fixed array.
4276
- __ Push(Handle<FixedArray>(heap->empty_fixed_array()));
5093
+ __ Push(isolate()->factory()->empty_fixed_array());
4277
5094
 
4278
5095
  // Pick the right runtime function or stub to call.
4279
5096
  int length = instr->hydrogen()->length();
@@ -4281,7 +5098,7 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
4281
5098
  ASSERT(instr->hydrogen()->depth() == 1);
4282
5099
  FastCloneShallowArrayStub::Mode mode =
4283
5100
  FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
4284
- FastCloneShallowArrayStub stub(mode, length);
5101
+ FastCloneShallowArrayStub stub(mode, DONT_TRACK_ALLOCATION_SITE, length);
4285
5102
  CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4286
5103
  } else if (instr->hydrogen()->depth() > 1) {
4287
5104
  CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
@@ -4290,9 +5107,9 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
4290
5107
  } else {
4291
5108
  FastCloneShallowArrayStub::Mode mode =
4292
5109
  boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS
4293
- ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
4294
- : FastCloneShallowArrayStub::CLONE_ELEMENTS;
4295
- FastCloneShallowArrayStub stub(mode, length);
5110
+ ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
5111
+ : FastCloneShallowArrayStub::CLONE_ELEMENTS;
5112
+ FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
4296
5113
  CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4297
5114
  }
4298
5115
  }
@@ -4301,10 +5118,14 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
4301
5118
  void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
4302
5119
  Register result,
4303
5120
  Register source,
4304
- int* offset) {
5121
+ int* offset,
5122
+ AllocationSiteMode mode) {
4305
5123
  ASSERT(!source.is(rcx));
4306
5124
  ASSERT(!result.is(rcx));
4307
5125
 
5126
+ bool create_allocation_site_info = mode == TRACK_ALLOCATION_SITE &&
5127
+ object->map()->CanTrackAllocationSite();
5128
+
4308
5129
  // Only elements backing stores for non-COW arrays need to be copied.
4309
5130
  Handle<FixedArrayBase> elements(object->elements());
4310
5131
  bool has_elements = elements->length() > 0 &&
@@ -4314,8 +5135,13 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
4314
5135
  // this object and its backing store.
4315
5136
  int object_offset = *offset;
4316
5137
  int object_size = object->map()->instance_size();
4317
- int elements_offset = *offset + object_size;
4318
5138
  int elements_size = has_elements ? elements->Size() : 0;
5139
+ int elements_offset = *offset + object_size;
5140
+ if (create_allocation_site_info) {
5141
+ elements_offset += AllocationSiteInfo::kSize;
5142
+ *offset += AllocationSiteInfo::kSize;
5143
+ }
5144
+
4319
5145
  *offset += object_size + elements_size;
4320
5146
 
4321
5147
  // Copy object header.
@@ -4340,16 +5166,24 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
4340
5166
  __ lea(rcx, Operand(result, *offset));
4341
5167
  __ movq(FieldOperand(result, total_offset), rcx);
4342
5168
  __ LoadHeapObject(source, value_object);
4343
- EmitDeepCopy(value_object, result, source, offset);
5169
+ EmitDeepCopy(value_object, result, source, offset,
5170
+ DONT_TRACK_ALLOCATION_SITE);
4344
5171
  } else if (value->IsHeapObject()) {
4345
5172
  __ LoadHeapObject(rcx, Handle<HeapObject>::cast(value));
4346
5173
  __ movq(FieldOperand(result, total_offset), rcx);
4347
5174
  } else {
4348
- __ movq(rcx, value, RelocInfo::NONE);
5175
+ __ movq(rcx, value, RelocInfo::NONE64);
4349
5176
  __ movq(FieldOperand(result, total_offset), rcx);
4350
5177
  }
4351
5178
  }
4352
5179
 
5180
+ // Build Allocation Site Info if desired
5181
+ if (create_allocation_site_info) {
5182
+ __ LoadRoot(kScratchRegister, Heap::kAllocationSiteInfoMapRootIndex);
5183
+ __ movq(FieldOperand(result, object_size), kScratchRegister);
5184
+ __ movq(FieldOperand(result, object_size + kPointerSize), source);
5185
+ }
5186
+
4353
5187
  if (has_elements) {
4354
5188
  // Copy elements backing store header.
4355
5189
  __ LoadHeapObject(source, elements);
@@ -4367,7 +5201,7 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
4367
5201
  int64_t value = double_array->get_representation(i);
4368
5202
  int total_offset =
4369
5203
  elements_offset + FixedDoubleArray::OffsetOfElementAt(i);
4370
- __ movq(rcx, value, RelocInfo::NONE);
5204
+ __ movq(rcx, value, RelocInfo::NONE64);
4371
5205
  __ movq(FieldOperand(result, total_offset), rcx);
4372
5206
  }
4373
5207
  } else if (elements->IsFixedArray()) {
@@ -4380,12 +5214,13 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
4380
5214
  __ lea(rcx, Operand(result, *offset));
4381
5215
  __ movq(FieldOperand(result, total_offset), rcx);
4382
5216
  __ LoadHeapObject(source, value_object);
4383
- EmitDeepCopy(value_object, result, source, offset);
5217
+ EmitDeepCopy(value_object, result, source, offset,
5218
+ DONT_TRACK_ALLOCATION_SITE);
4384
5219
  } else if (value->IsHeapObject()) {
4385
5220
  __ LoadHeapObject(rcx, Handle<HeapObject>::cast(value));
4386
5221
  __ movq(FieldOperand(result, total_offset), rcx);
4387
5222
  } else {
4388
- __ movq(rcx, value, RelocInfo::NONE);
5223
+ __ movq(rcx, value, RelocInfo::NONE64);
4389
5224
  __ movq(FieldOperand(result, total_offset), rcx);
4390
5225
  }
4391
5226
  }
@@ -4430,7 +5265,8 @@ void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
4430
5265
  __ bind(&allocated);
4431
5266
  int offset = 0;
4432
5267
  __ LoadHeapObject(rbx, instr->hydrogen()->boilerplate());
4433
- EmitDeepCopy(instr->hydrogen()->boilerplate(), rax, rbx, &offset);
5268
+ EmitDeepCopy(instr->hydrogen()->boilerplate(), rax, rbx, &offset,
5269
+ instr->hydrogen()->allocation_site_mode());
4434
5270
  ASSERT_EQ(size, offset);
4435
5271
  }
4436
5272
 
@@ -4467,7 +5303,7 @@ void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
4467
5303
 
4468
5304
 
4469
5305
  void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
4470
- ASSERT(ToRegister(instr->InputAt(0)).is(rax));
5306
+ ASSERT(ToRegister(instr->value()).is(rax));
4471
5307
  __ push(rax);
4472
5308
  CallRuntime(Runtime::kToFastProperties, 1, instr);
4473
5309
  }
@@ -4476,14 +5312,12 @@ void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
4476
5312
  void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
4477
5313
  Label materialized;
4478
5314
  // Registers will be used as follows:
4479
- // rdi = JS function.
4480
5315
  // rcx = literals array.
4481
5316
  // rbx = regexp literal.
4482
5317
  // rax = regexp literal clone.
4483
- __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
4484
- __ movq(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
4485
- int literal_offset = FixedArray::kHeaderSize +
4486
- instr->hydrogen()->literal_index() * kPointerSize;
5318
+ int literal_offset =
5319
+ FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index());
5320
+ __ LoadHeapObject(rcx, instr->hydrogen()->literals());
4487
5321
  __ movq(rbx, FieldOperand(rcx, literal_offset));
4488
5322
  __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
4489
5323
  __ j(not_equal, &materialized, Label::kNear);
@@ -4546,7 +5380,7 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
4546
5380
 
4547
5381
 
4548
5382
  void LCodeGen::DoTypeof(LTypeof* instr) {
4549
- LOperand* input = instr->InputAt(0);
5383
+ LOperand* input = instr->value();
4550
5384
  EmitPushTaggedOperand(input);
4551
5385
  CallRuntime(Runtime::kTypeof, 1, instr);
4552
5386
  }
@@ -4570,7 +5404,7 @@ void LCodeGen::EmitPushTaggedOperand(LOperand* operand) {
4570
5404
 
4571
5405
 
4572
5406
  void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
4573
- Register input = ToRegister(instr->InputAt(0));
5407
+ Register input = ToRegister(instr->value());
4574
5408
  int true_block = chunk_->LookupDestination(instr->true_block_id());
4575
5409
  int false_block = chunk_->LookupDestination(instr->false_block_id());
4576
5410
  Label* true_label = chunk_->GetAssemblyLabel(true_block);
@@ -4656,7 +5490,7 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
4656
5490
 
4657
5491
 
4658
5492
  void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
4659
- Register temp = ToRegister(instr->TempAt(0));
5493
+ Register temp = ToRegister(instr->temp());
4660
5494
  int true_block = chunk_->LookupDestination(instr->true_block_id());
4661
5495
  int false_block = chunk_->LookupDestination(instr->false_block_id());
4662
5496
 
@@ -4684,6 +5518,7 @@ void LCodeGen::EmitIsConstructCall(Register temp) {
4684
5518
 
4685
5519
 
4686
5520
  void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) {
5521
+ if (info()->IsStub()) return;
4687
5522
  // Ensure that we have enough space after the previous lazy-bailout
4688
5523
  // instruction for patching the code here.
4689
5524
  int current_pc = masm()->pc_offset();
@@ -4709,6 +5544,11 @@ void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
4709
5544
  }
4710
5545
 
4711
5546
 
5547
+ void LCodeGen::DoDummyUse(LDummyUse* instr) {
5548
+ // Nothing to see here, move on!
5549
+ }
5550
+
5551
+
4712
5552
  void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
4713
5553
  LOperand* obj = instr->object();
4714
5554
  LOperand* key = instr->key();
@@ -4783,7 +5623,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
4783
5623
  ASSERT(instr->hydrogen()->is_backwards_branch());
4784
5624
  // Perform stack overflow check if this goto needs it before jumping.
4785
5625
  DeferredStackCheck* deferred_stack_check =
4786
- new DeferredStackCheck(this, instr);
5626
+ new(zone()) DeferredStackCheck(this, instr);
4787
5627
  __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
4788
5628
  __ j(below, deferred_stack_check->entry());
4789
5629
  EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
@@ -4852,11 +5692,19 @@ void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
4852
5692
  void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
4853
5693
  Register map = ToRegister(instr->map());
4854
5694
  Register result = ToRegister(instr->result());
5695
+ Label load_cache, done;
5696
+ __ EnumLength(result, map);
5697
+ __ Cmp(result, Smi::FromInt(0));
5698
+ __ j(not_equal, &load_cache);
5699
+ __ LoadRoot(result, Heap::kEmptyFixedArrayRootIndex);
5700
+ __ jmp(&done);
5701
+ __ bind(&load_cache);
4855
5702
  __ LoadInstanceDescriptors(map, result);
4856
5703
  __ movq(result,
4857
- FieldOperand(result, DescriptorArray::kEnumerationIndexOffset));
5704
+ FieldOperand(result, DescriptorArray::kEnumCacheOffset));
4858
5705
  __ movq(result,
4859
5706
  FieldOperand(result, FixedArray::SizeFor(instr->idx())));
5707
+ __ bind(&done);
4860
5708
  Condition cc = masm()->CheckSmi(result);
4861
5709
  DeoptimizeIf(cc, instr->environment());
4862
5710
  }