libv8 3.11.8.17 → 3.16.14.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (754) hide show
  1. checksums.yaml +4 -4
  2. data/.travis.yml +1 -2
  3. data/Gemfile +1 -1
  4. data/Rakefile +6 -7
  5. data/lib/libv8/version.rb +1 -1
  6. data/vendor/v8/.gitignore +24 -3
  7. data/vendor/v8/AUTHORS +7 -0
  8. data/vendor/v8/ChangeLog +839 -0
  9. data/vendor/v8/DEPS +1 -1
  10. data/vendor/v8/Makefile.android +92 -0
  11. data/vendor/v8/OWNERS +11 -0
  12. data/vendor/v8/PRESUBMIT.py +71 -0
  13. data/vendor/v8/SConstruct +34 -39
  14. data/vendor/v8/build/android.gypi +56 -37
  15. data/vendor/v8/build/common.gypi +112 -30
  16. data/vendor/v8/build/gyp_v8 +1 -1
  17. data/vendor/v8/build/standalone.gypi +15 -11
  18. data/vendor/v8/include/v8-debug.h +9 -1
  19. data/vendor/v8/include/v8-preparser.h +4 -3
  20. data/vendor/v8/include/v8-profiler.h +25 -25
  21. data/vendor/v8/include/v8-testing.h +4 -3
  22. data/vendor/v8/include/v8.h +994 -540
  23. data/vendor/v8/preparser/preparser-process.cc +3 -3
  24. data/vendor/v8/samples/lineprocessor.cc +20 -27
  25. data/vendor/v8/samples/process.cc +18 -14
  26. data/vendor/v8/samples/shell.cc +16 -15
  27. data/vendor/v8/src/SConscript +15 -14
  28. data/vendor/v8/src/accessors.cc +169 -77
  29. data/vendor/v8/src/accessors.h +4 -0
  30. data/vendor/v8/src/allocation-inl.h +2 -2
  31. data/vendor/v8/src/allocation.h +7 -7
  32. data/vendor/v8/src/api.cc +810 -497
  33. data/vendor/v8/src/api.h +85 -60
  34. data/vendor/v8/src/arm/assembler-arm-inl.h +179 -22
  35. data/vendor/v8/src/arm/assembler-arm.cc +633 -264
  36. data/vendor/v8/src/arm/assembler-arm.h +264 -197
  37. data/vendor/v8/src/arm/builtins-arm.cc +117 -27
  38. data/vendor/v8/src/arm/code-stubs-arm.cc +1241 -700
  39. data/vendor/v8/src/arm/code-stubs-arm.h +35 -138
  40. data/vendor/v8/src/arm/codegen-arm.cc +285 -16
  41. data/vendor/v8/src/arm/codegen-arm.h +22 -0
  42. data/vendor/v8/src/arm/constants-arm.cc +5 -3
  43. data/vendor/v8/src/arm/constants-arm.h +24 -11
  44. data/vendor/v8/src/arm/debug-arm.cc +3 -3
  45. data/vendor/v8/src/arm/deoptimizer-arm.cc +382 -92
  46. data/vendor/v8/src/arm/disasm-arm.cc +61 -12
  47. data/vendor/v8/src/arm/frames-arm.h +0 -14
  48. data/vendor/v8/src/arm/full-codegen-arm.cc +332 -304
  49. data/vendor/v8/src/arm/ic-arm.cc +180 -259
  50. data/vendor/v8/src/arm/lithium-arm.cc +364 -316
  51. data/vendor/v8/src/arm/lithium-arm.h +512 -275
  52. data/vendor/v8/src/arm/lithium-codegen-arm.cc +1768 -809
  53. data/vendor/v8/src/arm/lithium-codegen-arm.h +97 -35
  54. data/vendor/v8/src/arm/lithium-gap-resolver-arm.cc +12 -5
  55. data/vendor/v8/src/arm/macro-assembler-arm.cc +439 -228
  56. data/vendor/v8/src/arm/macro-assembler-arm.h +116 -70
  57. data/vendor/v8/src/arm/regexp-macro-assembler-arm.cc +54 -44
  58. data/vendor/v8/src/arm/regexp-macro-assembler-arm.h +3 -10
  59. data/vendor/v8/src/arm/simulator-arm.cc +272 -238
  60. data/vendor/v8/src/arm/simulator-arm.h +38 -8
  61. data/vendor/v8/src/arm/stub-cache-arm.cc +522 -895
  62. data/vendor/v8/src/array.js +101 -70
  63. data/vendor/v8/src/assembler.cc +270 -19
  64. data/vendor/v8/src/assembler.h +110 -15
  65. data/vendor/v8/src/ast.cc +79 -69
  66. data/vendor/v8/src/ast.h +255 -301
  67. data/vendor/v8/src/atomicops.h +7 -1
  68. data/vendor/v8/src/atomicops_internals_tsan.h +335 -0
  69. data/vendor/v8/src/bootstrapper.cc +481 -418
  70. data/vendor/v8/src/bootstrapper.h +4 -4
  71. data/vendor/v8/src/builtins.cc +498 -311
  72. data/vendor/v8/src/builtins.h +75 -47
  73. data/vendor/v8/src/checks.cc +2 -1
  74. data/vendor/v8/src/checks.h +8 -0
  75. data/vendor/v8/src/code-stubs-hydrogen.cc +253 -0
  76. data/vendor/v8/src/code-stubs.cc +249 -84
  77. data/vendor/v8/src/code-stubs.h +501 -169
  78. data/vendor/v8/src/codegen.cc +36 -18
  79. data/vendor/v8/src/codegen.h +25 -3
  80. data/vendor/v8/src/collection.js +54 -17
  81. data/vendor/v8/src/compilation-cache.cc +24 -16
  82. data/vendor/v8/src/compilation-cache.h +15 -6
  83. data/vendor/v8/src/compiler.cc +497 -195
  84. data/vendor/v8/src/compiler.h +246 -38
  85. data/vendor/v8/src/contexts.cc +64 -24
  86. data/vendor/v8/src/contexts.h +60 -29
  87. data/vendor/v8/src/conversions-inl.h +24 -14
  88. data/vendor/v8/src/conversions.h +7 -4
  89. data/vendor/v8/src/counters.cc +21 -12
  90. data/vendor/v8/src/counters.h +44 -16
  91. data/vendor/v8/src/cpu-profiler.h +1 -1
  92. data/vendor/v8/src/d8-debug.cc +2 -2
  93. data/vendor/v8/src/d8-readline.cc +13 -2
  94. data/vendor/v8/src/d8.cc +681 -273
  95. data/vendor/v8/src/d8.gyp +4 -4
  96. data/vendor/v8/src/d8.h +38 -18
  97. data/vendor/v8/src/d8.js +0 -617
  98. data/vendor/v8/src/data-flow.h +55 -0
  99. data/vendor/v8/src/date.js +1 -42
  100. data/vendor/v8/src/dateparser-inl.h +5 -1
  101. data/vendor/v8/src/debug-agent.cc +10 -15
  102. data/vendor/v8/src/debug-debugger.js +147 -149
  103. data/vendor/v8/src/debug.cc +323 -164
  104. data/vendor/v8/src/debug.h +26 -14
  105. data/vendor/v8/src/deoptimizer.cc +765 -290
  106. data/vendor/v8/src/deoptimizer.h +130 -28
  107. data/vendor/v8/src/disassembler.cc +10 -4
  108. data/vendor/v8/src/elements-kind.cc +7 -2
  109. data/vendor/v8/src/elements-kind.h +19 -0
  110. data/vendor/v8/src/elements.cc +607 -285
  111. data/vendor/v8/src/elements.h +36 -13
  112. data/vendor/v8/src/execution.cc +52 -31
  113. data/vendor/v8/src/execution.h +4 -4
  114. data/vendor/v8/src/extensions/externalize-string-extension.cc +5 -4
  115. data/vendor/v8/src/extensions/gc-extension.cc +5 -1
  116. data/vendor/v8/src/extensions/statistics-extension.cc +153 -0
  117. data/vendor/v8/src/{inspector.h → extensions/statistics-extension.h} +12 -23
  118. data/vendor/v8/src/factory.cc +101 -134
  119. data/vendor/v8/src/factory.h +36 -31
  120. data/vendor/v8/src/flag-definitions.h +102 -25
  121. data/vendor/v8/src/flags.cc +9 -5
  122. data/vendor/v8/src/frames-inl.h +10 -0
  123. data/vendor/v8/src/frames.cc +116 -26
  124. data/vendor/v8/src/frames.h +96 -12
  125. data/vendor/v8/src/full-codegen.cc +219 -74
  126. data/vendor/v8/src/full-codegen.h +63 -21
  127. data/vendor/v8/src/func-name-inferrer.cc +8 -7
  128. data/vendor/v8/src/func-name-inferrer.h +5 -3
  129. data/vendor/v8/src/gdb-jit.cc +71 -57
  130. data/vendor/v8/src/global-handles.cc +230 -101
  131. data/vendor/v8/src/global-handles.h +26 -27
  132. data/vendor/v8/src/globals.h +17 -19
  133. data/vendor/v8/src/handles-inl.h +59 -12
  134. data/vendor/v8/src/handles.cc +180 -200
  135. data/vendor/v8/src/handles.h +80 -11
  136. data/vendor/v8/src/hashmap.h +60 -40
  137. data/vendor/v8/src/heap-inl.h +107 -45
  138. data/vendor/v8/src/heap-profiler.cc +38 -19
  139. data/vendor/v8/src/heap-profiler.h +24 -14
  140. data/vendor/v8/src/heap.cc +1123 -738
  141. data/vendor/v8/src/heap.h +385 -146
  142. data/vendor/v8/src/hydrogen-instructions.cc +700 -217
  143. data/vendor/v8/src/hydrogen-instructions.h +1158 -472
  144. data/vendor/v8/src/hydrogen.cc +3319 -1662
  145. data/vendor/v8/src/hydrogen.h +411 -170
  146. data/vendor/v8/src/ia32/assembler-ia32-inl.h +46 -16
  147. data/vendor/v8/src/ia32/assembler-ia32.cc +131 -61
  148. data/vendor/v8/src/ia32/assembler-ia32.h +115 -57
  149. data/vendor/v8/src/ia32/builtins-ia32.cc +99 -5
  150. data/vendor/v8/src/ia32/code-stubs-ia32.cc +787 -495
  151. data/vendor/v8/src/ia32/code-stubs-ia32.h +10 -100
  152. data/vendor/v8/src/ia32/codegen-ia32.cc +227 -23
  153. data/vendor/v8/src/ia32/codegen-ia32.h +14 -0
  154. data/vendor/v8/src/ia32/deoptimizer-ia32.cc +428 -87
  155. data/vendor/v8/src/ia32/disasm-ia32.cc +28 -1
  156. data/vendor/v8/src/ia32/frames-ia32.h +6 -16
  157. data/vendor/v8/src/ia32/full-codegen-ia32.cc +280 -272
  158. data/vendor/v8/src/ia32/ic-ia32.cc +150 -250
  159. data/vendor/v8/src/ia32/lithium-codegen-ia32.cc +1600 -517
  160. data/vendor/v8/src/ia32/lithium-codegen-ia32.h +90 -24
  161. data/vendor/v8/src/ia32/lithium-gap-resolver-ia32.cc +10 -6
  162. data/vendor/v8/src/ia32/lithium-gap-resolver-ia32.h +2 -2
  163. data/vendor/v8/src/ia32/lithium-ia32.cc +405 -302
  164. data/vendor/v8/src/ia32/lithium-ia32.h +526 -271
  165. data/vendor/v8/src/ia32/macro-assembler-ia32.cc +378 -119
  166. data/vendor/v8/src/ia32/macro-assembler-ia32.h +62 -28
  167. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.cc +43 -30
  168. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.h +2 -10
  169. data/vendor/v8/src/ia32/stub-cache-ia32.cc +492 -678
  170. data/vendor/v8/src/ic-inl.h +9 -4
  171. data/vendor/v8/src/ic.cc +836 -923
  172. data/vendor/v8/src/ic.h +228 -247
  173. data/vendor/v8/src/incremental-marking-inl.h +26 -30
  174. data/vendor/v8/src/incremental-marking.cc +276 -248
  175. data/vendor/v8/src/incremental-marking.h +29 -37
  176. data/vendor/v8/src/interface.cc +34 -25
  177. data/vendor/v8/src/interface.h +69 -25
  178. data/vendor/v8/src/interpreter-irregexp.cc +2 -2
  179. data/vendor/v8/src/isolate.cc +382 -76
  180. data/vendor/v8/src/isolate.h +109 -56
  181. data/vendor/v8/src/json-parser.h +217 -104
  182. data/vendor/v8/src/json-stringifier.h +745 -0
  183. data/vendor/v8/src/json.js +10 -132
  184. data/vendor/v8/src/jsregexp-inl.h +106 -0
  185. data/vendor/v8/src/jsregexp.cc +517 -285
  186. data/vendor/v8/src/jsregexp.h +145 -117
  187. data/vendor/v8/src/list-inl.h +35 -22
  188. data/vendor/v8/src/list.h +46 -19
  189. data/vendor/v8/src/lithium-allocator-inl.h +22 -2
  190. data/vendor/v8/src/lithium-allocator.cc +85 -70
  191. data/vendor/v8/src/lithium-allocator.h +21 -39
  192. data/vendor/v8/src/lithium.cc +259 -5
  193. data/vendor/v8/src/lithium.h +131 -32
  194. data/vendor/v8/src/liveedit-debugger.js +52 -3
  195. data/vendor/v8/src/liveedit.cc +393 -113
  196. data/vendor/v8/src/liveedit.h +7 -3
  197. data/vendor/v8/src/log-utils.cc +4 -2
  198. data/vendor/v8/src/log.cc +170 -140
  199. data/vendor/v8/src/log.h +62 -11
  200. data/vendor/v8/src/macro-assembler.h +17 -0
  201. data/vendor/v8/src/macros.py +2 -0
  202. data/vendor/v8/src/mark-compact-inl.h +3 -23
  203. data/vendor/v8/src/mark-compact.cc +801 -830
  204. data/vendor/v8/src/mark-compact.h +154 -47
  205. data/vendor/v8/src/marking-thread.cc +85 -0
  206. data/vendor/v8/src/{inspector.cc → marking-thread.h} +32 -24
  207. data/vendor/v8/src/math.js +12 -18
  208. data/vendor/v8/src/messages.cc +18 -8
  209. data/vendor/v8/src/messages.js +314 -261
  210. data/vendor/v8/src/mips/assembler-mips-inl.h +58 -6
  211. data/vendor/v8/src/mips/assembler-mips.cc +92 -75
  212. data/vendor/v8/src/mips/assembler-mips.h +54 -60
  213. data/vendor/v8/src/mips/builtins-mips.cc +116 -17
  214. data/vendor/v8/src/mips/code-stubs-mips.cc +919 -556
  215. data/vendor/v8/src/mips/code-stubs-mips.h +22 -131
  216. data/vendor/v8/src/mips/codegen-mips.cc +281 -6
  217. data/vendor/v8/src/mips/codegen-mips.h +22 -0
  218. data/vendor/v8/src/mips/constants-mips.cc +2 -0
  219. data/vendor/v8/src/mips/constants-mips.h +12 -2
  220. data/vendor/v8/src/mips/deoptimizer-mips.cc +286 -50
  221. data/vendor/v8/src/mips/disasm-mips.cc +13 -0
  222. data/vendor/v8/src/mips/full-codegen-mips.cc +297 -284
  223. data/vendor/v8/src/mips/ic-mips.cc +182 -263
  224. data/vendor/v8/src/mips/lithium-codegen-mips.cc +1208 -556
  225. data/vendor/v8/src/mips/lithium-codegen-mips.h +72 -19
  226. data/vendor/v8/src/mips/lithium-gap-resolver-mips.cc +9 -2
  227. data/vendor/v8/src/mips/lithium-mips.cc +290 -302
  228. data/vendor/v8/src/mips/lithium-mips.h +463 -266
  229. data/vendor/v8/src/mips/macro-assembler-mips.cc +208 -115
  230. data/vendor/v8/src/mips/macro-assembler-mips.h +67 -24
  231. data/vendor/v8/src/mips/regexp-macro-assembler-mips.cc +40 -25
  232. data/vendor/v8/src/mips/regexp-macro-assembler-mips.h +3 -9
  233. data/vendor/v8/src/mips/simulator-mips.cc +112 -40
  234. data/vendor/v8/src/mips/simulator-mips.h +5 -0
  235. data/vendor/v8/src/mips/stub-cache-mips.cc +502 -884
  236. data/vendor/v8/src/mirror-debugger.js +157 -30
  237. data/vendor/v8/src/mksnapshot.cc +88 -14
  238. data/vendor/v8/src/object-observe.js +235 -0
  239. data/vendor/v8/src/objects-debug.cc +178 -176
  240. data/vendor/v8/src/objects-inl.h +1333 -486
  241. data/vendor/v8/src/objects-printer.cc +125 -43
  242. data/vendor/v8/src/objects-visiting-inl.h +578 -6
  243. data/vendor/v8/src/objects-visiting.cc +2 -2
  244. data/vendor/v8/src/objects-visiting.h +172 -79
  245. data/vendor/v8/src/objects.cc +3533 -2885
  246. data/vendor/v8/src/objects.h +1352 -1131
  247. data/vendor/v8/src/optimizing-compiler-thread.cc +152 -0
  248. data/vendor/v8/src/optimizing-compiler-thread.h +111 -0
  249. data/vendor/v8/src/parser.cc +390 -500
  250. data/vendor/v8/src/parser.h +45 -33
  251. data/vendor/v8/src/platform-cygwin.cc +10 -21
  252. data/vendor/v8/src/platform-freebsd.cc +36 -41
  253. data/vendor/v8/src/platform-linux.cc +160 -124
  254. data/vendor/v8/src/platform-macos.cc +30 -27
  255. data/vendor/v8/src/platform-nullos.cc +17 -1
  256. data/vendor/v8/src/platform-openbsd.cc +19 -50
  257. data/vendor/v8/src/platform-posix.cc +14 -0
  258. data/vendor/v8/src/platform-solaris.cc +20 -53
  259. data/vendor/v8/src/platform-win32.cc +49 -26
  260. data/vendor/v8/src/platform.h +40 -1
  261. data/vendor/v8/src/preparser.cc +8 -5
  262. data/vendor/v8/src/preparser.h +2 -2
  263. data/vendor/v8/src/prettyprinter.cc +16 -0
  264. data/vendor/v8/src/prettyprinter.h +2 -0
  265. data/vendor/v8/src/profile-generator-inl.h +1 -0
  266. data/vendor/v8/src/profile-generator.cc +209 -147
  267. data/vendor/v8/src/profile-generator.h +15 -12
  268. data/vendor/v8/src/property-details.h +46 -31
  269. data/vendor/v8/src/property.cc +27 -46
  270. data/vendor/v8/src/property.h +163 -83
  271. data/vendor/v8/src/proxy.js +7 -2
  272. data/vendor/v8/src/regexp-macro-assembler-irregexp.cc +4 -13
  273. data/vendor/v8/src/regexp-macro-assembler-irregexp.h +1 -2
  274. data/vendor/v8/src/regexp-macro-assembler-tracer.cc +1 -11
  275. data/vendor/v8/src/regexp-macro-assembler-tracer.h +0 -1
  276. data/vendor/v8/src/regexp-macro-assembler.cc +31 -14
  277. data/vendor/v8/src/regexp-macro-assembler.h +14 -11
  278. data/vendor/v8/src/regexp-stack.cc +1 -0
  279. data/vendor/v8/src/regexp.js +9 -8
  280. data/vendor/v8/src/rewriter.cc +18 -7
  281. data/vendor/v8/src/runtime-profiler.cc +52 -43
  282. data/vendor/v8/src/runtime-profiler.h +0 -25
  283. data/vendor/v8/src/runtime.cc +2006 -2023
  284. data/vendor/v8/src/runtime.h +56 -49
  285. data/vendor/v8/src/safepoint-table.cc +12 -18
  286. data/vendor/v8/src/safepoint-table.h +11 -8
  287. data/vendor/v8/src/scanner.cc +1 -0
  288. data/vendor/v8/src/scanner.h +4 -10
  289. data/vendor/v8/src/scopeinfo.cc +35 -9
  290. data/vendor/v8/src/scopeinfo.h +64 -3
  291. data/vendor/v8/src/scopes.cc +251 -156
  292. data/vendor/v8/src/scopes.h +61 -27
  293. data/vendor/v8/src/serialize.cc +348 -396
  294. data/vendor/v8/src/serialize.h +125 -114
  295. data/vendor/v8/src/small-pointer-list.h +11 -11
  296. data/vendor/v8/src/{smart-array-pointer.h → smart-pointers.h} +64 -15
  297. data/vendor/v8/src/snapshot-common.cc +64 -15
  298. data/vendor/v8/src/snapshot-empty.cc +7 -1
  299. data/vendor/v8/src/snapshot.h +9 -2
  300. data/vendor/v8/src/spaces-inl.h +17 -0
  301. data/vendor/v8/src/spaces.cc +477 -183
  302. data/vendor/v8/src/spaces.h +238 -58
  303. data/vendor/v8/src/splay-tree-inl.h +8 -7
  304. data/vendor/v8/src/splay-tree.h +24 -10
  305. data/vendor/v8/src/store-buffer.cc +12 -5
  306. data/vendor/v8/src/store-buffer.h +2 -4
  307. data/vendor/v8/src/string-search.h +22 -6
  308. data/vendor/v8/src/string-stream.cc +11 -8
  309. data/vendor/v8/src/string.js +47 -15
  310. data/vendor/v8/src/stub-cache.cc +461 -224
  311. data/vendor/v8/src/stub-cache.h +164 -102
  312. data/vendor/v8/src/sweeper-thread.cc +105 -0
  313. data/vendor/v8/src/sweeper-thread.h +81 -0
  314. data/vendor/v8/src/token.h +1 -0
  315. data/vendor/v8/src/transitions-inl.h +220 -0
  316. data/vendor/v8/src/transitions.cc +160 -0
  317. data/vendor/v8/src/transitions.h +207 -0
  318. data/vendor/v8/src/type-info.cc +182 -181
  319. data/vendor/v8/src/type-info.h +31 -19
  320. data/vendor/v8/src/unicode-inl.h +62 -106
  321. data/vendor/v8/src/unicode.cc +57 -67
  322. data/vendor/v8/src/unicode.h +45 -91
  323. data/vendor/v8/src/uri.js +57 -29
  324. data/vendor/v8/src/utils.h +105 -5
  325. data/vendor/v8/src/v8-counters.cc +54 -11
  326. data/vendor/v8/src/v8-counters.h +134 -19
  327. data/vendor/v8/src/v8.cc +29 -29
  328. data/vendor/v8/src/v8.h +1 -0
  329. data/vendor/v8/src/v8conversions.cc +26 -22
  330. data/vendor/v8/src/v8globals.h +56 -43
  331. data/vendor/v8/src/v8natives.js +83 -30
  332. data/vendor/v8/src/v8threads.cc +42 -21
  333. data/vendor/v8/src/v8threads.h +4 -1
  334. data/vendor/v8/src/v8utils.cc +9 -93
  335. data/vendor/v8/src/v8utils.h +37 -33
  336. data/vendor/v8/src/variables.cc +6 -3
  337. data/vendor/v8/src/variables.h +6 -13
  338. data/vendor/v8/src/version.cc +2 -2
  339. data/vendor/v8/src/vm-state-inl.h +11 -0
  340. data/vendor/v8/src/x64/assembler-x64-inl.h +39 -8
  341. data/vendor/v8/src/x64/assembler-x64.cc +78 -64
  342. data/vendor/v8/src/x64/assembler-x64.h +38 -33
  343. data/vendor/v8/src/x64/builtins-x64.cc +105 -7
  344. data/vendor/v8/src/x64/code-stubs-x64.cc +790 -413
  345. data/vendor/v8/src/x64/code-stubs-x64.h +10 -106
  346. data/vendor/v8/src/x64/codegen-x64.cc +210 -8
  347. data/vendor/v8/src/x64/codegen-x64.h +20 -1
  348. data/vendor/v8/src/x64/deoptimizer-x64.cc +336 -75
  349. data/vendor/v8/src/x64/disasm-x64.cc +15 -0
  350. data/vendor/v8/src/x64/frames-x64.h +0 -14
  351. data/vendor/v8/src/x64/full-codegen-x64.cc +293 -270
  352. data/vendor/v8/src/x64/ic-x64.cc +153 -251
  353. data/vendor/v8/src/x64/lithium-codegen-x64.cc +1379 -531
  354. data/vendor/v8/src/x64/lithium-codegen-x64.h +67 -23
  355. data/vendor/v8/src/x64/lithium-gap-resolver-x64.cc +2 -2
  356. data/vendor/v8/src/x64/lithium-x64.cc +349 -289
  357. data/vendor/v8/src/x64/lithium-x64.h +460 -250
  358. data/vendor/v8/src/x64/macro-assembler-x64.cc +350 -177
  359. data/vendor/v8/src/x64/macro-assembler-x64.h +67 -49
  360. data/vendor/v8/src/x64/regexp-macro-assembler-x64.cc +46 -33
  361. data/vendor/v8/src/x64/regexp-macro-assembler-x64.h +2 -3
  362. data/vendor/v8/src/x64/stub-cache-x64.cc +484 -653
  363. data/vendor/v8/src/zone-inl.h +9 -27
  364. data/vendor/v8/src/zone.cc +5 -5
  365. data/vendor/v8/src/zone.h +53 -27
  366. data/vendor/v8/test/benchmarks/testcfg.py +5 -0
  367. data/vendor/v8/test/cctest/cctest.cc +4 -0
  368. data/vendor/v8/test/cctest/cctest.gyp +3 -1
  369. data/vendor/v8/test/cctest/cctest.h +57 -9
  370. data/vendor/v8/test/cctest/cctest.status +15 -15
  371. data/vendor/v8/test/cctest/test-accessors.cc +26 -0
  372. data/vendor/v8/test/cctest/test-alloc.cc +22 -30
  373. data/vendor/v8/test/cctest/test-api.cc +1943 -314
  374. data/vendor/v8/test/cctest/test-assembler-arm.cc +133 -13
  375. data/vendor/v8/test/cctest/test-assembler-ia32.cc +1 -1
  376. data/vendor/v8/test/cctest/test-assembler-mips.cc +12 -0
  377. data/vendor/v8/test/cctest/test-ast.cc +4 -2
  378. data/vendor/v8/test/cctest/test-compiler.cc +61 -29
  379. data/vendor/v8/test/cctest/test-dataflow.cc +2 -2
  380. data/vendor/v8/test/cctest/test-debug.cc +212 -33
  381. data/vendor/v8/test/cctest/test-decls.cc +257 -11
  382. data/vendor/v8/test/cctest/test-dictionary.cc +24 -10
  383. data/vendor/v8/test/cctest/test-disasm-arm.cc +118 -1
  384. data/vendor/v8/test/cctest/test-disasm-ia32.cc +3 -2
  385. data/vendor/v8/test/cctest/test-flags.cc +14 -1
  386. data/vendor/v8/test/cctest/test-func-name-inference.cc +7 -4
  387. data/vendor/v8/test/cctest/test-global-object.cc +51 -0
  388. data/vendor/v8/test/cctest/test-hashing.cc +32 -23
  389. data/vendor/v8/test/cctest/test-heap-profiler.cc +131 -77
  390. data/vendor/v8/test/cctest/test-heap.cc +1084 -143
  391. data/vendor/v8/test/cctest/test-list.cc +1 -1
  392. data/vendor/v8/test/cctest/test-liveedit.cc +3 -2
  393. data/vendor/v8/test/cctest/test-lockers.cc +12 -13
  394. data/vendor/v8/test/cctest/test-log.cc +10 -8
  395. data/vendor/v8/test/cctest/test-macro-assembler-x64.cc +2 -2
  396. data/vendor/v8/test/cctest/test-mark-compact.cc +44 -22
  397. data/vendor/v8/test/cctest/test-object-observe.cc +434 -0
  398. data/vendor/v8/test/cctest/test-parsing.cc +86 -39
  399. data/vendor/v8/test/cctest/test-platform-linux.cc +6 -0
  400. data/vendor/v8/test/cctest/test-platform-win32.cc +7 -0
  401. data/vendor/v8/test/cctest/test-random.cc +5 -4
  402. data/vendor/v8/test/cctest/test-regexp.cc +137 -101
  403. data/vendor/v8/test/cctest/test-serialize.cc +150 -230
  404. data/vendor/v8/test/cctest/test-sockets.cc +1 -1
  405. data/vendor/v8/test/cctest/test-spaces.cc +139 -0
  406. data/vendor/v8/test/cctest/test-strings.cc +736 -74
  407. data/vendor/v8/test/cctest/test-thread-termination.cc +10 -11
  408. data/vendor/v8/test/cctest/test-threads.cc +4 -4
  409. data/vendor/v8/test/cctest/test-utils.cc +16 -0
  410. data/vendor/v8/test/cctest/test-weakmaps.cc +7 -3
  411. data/vendor/v8/test/cctest/testcfg.py +64 -5
  412. data/vendor/v8/test/es5conform/testcfg.py +5 -0
  413. data/vendor/v8/test/message/message.status +1 -1
  414. data/vendor/v8/test/message/overwritten-builtins.out +3 -0
  415. data/vendor/v8/test/message/testcfg.py +89 -8
  416. data/vendor/v8/test/message/try-catch-finally-no-message.out +26 -26
  417. data/vendor/v8/test/mjsunit/accessor-map-sharing.js +18 -2
  418. data/vendor/v8/test/mjsunit/allocation-site-info.js +126 -0
  419. data/vendor/v8/test/mjsunit/array-bounds-check-removal.js +62 -1
  420. data/vendor/v8/test/mjsunit/array-iteration.js +1 -1
  421. data/vendor/v8/test/mjsunit/array-literal-transitions.js +2 -0
  422. data/vendor/v8/test/mjsunit/array-natives-elements.js +317 -0
  423. data/vendor/v8/test/mjsunit/array-reduce.js +8 -8
  424. data/vendor/v8/test/mjsunit/array-slice.js +12 -0
  425. data/vendor/v8/test/mjsunit/array-store-and-grow.js +4 -1
  426. data/vendor/v8/test/mjsunit/assert-opt-and-deopt.js +1 -1
  427. data/vendor/v8/test/mjsunit/bugs/bug-2337.js +53 -0
  428. data/vendor/v8/test/mjsunit/compare-known-objects-slow.js +69 -0
  429. data/vendor/v8/test/mjsunit/compiler/alloc-object-huge.js +3 -1
  430. data/vendor/v8/test/mjsunit/compiler/inline-accessors.js +368 -0
  431. data/vendor/v8/test/mjsunit/compiler/inline-arguments.js +87 -1
  432. data/vendor/v8/test/mjsunit/compiler/inline-closures.js +49 -0
  433. data/vendor/v8/test/mjsunit/compiler/inline-construct.js +55 -43
  434. data/vendor/v8/test/mjsunit/compiler/inline-literals.js +39 -0
  435. data/vendor/v8/test/mjsunit/compiler/multiply-add.js +69 -0
  436. data/vendor/v8/test/mjsunit/compiler/optimized-closures.js +57 -0
  437. data/vendor/v8/test/mjsunit/compiler/parallel-proto-change.js +44 -0
  438. data/vendor/v8/test/mjsunit/compiler/property-static.js +69 -0
  439. data/vendor/v8/test/mjsunit/compiler/proto-chain-constant.js +55 -0
  440. data/vendor/v8/test/mjsunit/compiler/proto-chain-load.js +44 -0
  441. data/vendor/v8/test/mjsunit/compiler/regress-gvn.js +3 -2
  442. data/vendor/v8/test/mjsunit/compiler/regress-or.js +6 -2
  443. data/vendor/v8/test/mjsunit/compiler/rotate.js +224 -0
  444. data/vendor/v8/test/mjsunit/compiler/uint32.js +173 -0
  445. data/vendor/v8/test/mjsunit/count-based-osr.js +2 -1
  446. data/vendor/v8/test/mjsunit/d8-os.js +3 -3
  447. data/vendor/v8/test/mjsunit/date-parse.js +3 -0
  448. data/vendor/v8/test/mjsunit/date.js +22 -0
  449. data/vendor/v8/test/mjsunit/debug-break-inline.js +1 -0
  450. data/vendor/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js +22 -12
  451. data/vendor/v8/test/mjsunit/debug-evaluate-locals-optimized.js +21 -10
  452. data/vendor/v8/test/mjsunit/debug-liveedit-compile-error.js +60 -0
  453. data/vendor/v8/test/mjsunit/debug-liveedit-double-call.js +142 -0
  454. data/vendor/v8/test/mjsunit/debug-liveedit-literals.js +94 -0
  455. data/vendor/v8/test/mjsunit/debug-liveedit-restart-frame.js +153 -0
  456. data/vendor/v8/test/mjsunit/debug-multiple-breakpoints.js +1 -1
  457. data/vendor/v8/test/mjsunit/debug-script-breakpoints-closure.js +67 -0
  458. data/vendor/v8/test/mjsunit/debug-script-breakpoints-nested.js +82 -0
  459. data/vendor/v8/test/mjsunit/debug-script.js +4 -2
  460. data/vendor/v8/test/mjsunit/debug-set-variable-value.js +308 -0
  461. data/vendor/v8/test/mjsunit/debug-stepout-scope-part1.js +190 -0
  462. data/vendor/v8/test/mjsunit/debug-stepout-scope-part2.js +83 -0
  463. data/vendor/v8/test/mjsunit/debug-stepout-scope-part3.js +80 -0
  464. data/vendor/v8/test/mjsunit/debug-stepout-scope-part4.js +80 -0
  465. data/vendor/v8/test/mjsunit/debug-stepout-scope-part5.js +77 -0
  466. data/vendor/v8/test/mjsunit/debug-stepout-scope-part6.js +79 -0
  467. data/vendor/v8/test/mjsunit/debug-stepout-scope-part7.js +79 -0
  468. data/vendor/v8/test/mjsunit/{debug-stepout-scope.js → debug-stepout-scope-part8.js} +0 -189
  469. data/vendor/v8/test/mjsunit/delete-non-configurable.js +74 -0
  470. data/vendor/v8/test/mjsunit/deopt-minus-zero.js +56 -0
  471. data/vendor/v8/test/mjsunit/elements-kind.js +6 -4
  472. data/vendor/v8/test/mjsunit/elements-length-no-holey.js +33 -0
  473. data/vendor/v8/test/mjsunit/elements-transition-hoisting.js +46 -19
  474. data/vendor/v8/test/mjsunit/error-accessors.js +54 -0
  475. data/vendor/v8/test/mjsunit/error-constructors.js +1 -14
  476. data/vendor/v8/test/mjsunit/error-tostring.js +8 -0
  477. data/vendor/v8/test/mjsunit/eval-stack-trace.js +204 -0
  478. data/vendor/v8/test/mjsunit/external-array.js +364 -1
  479. data/vendor/v8/test/mjsunit/fast-array-length.js +37 -0
  480. data/vendor/v8/test/mjsunit/fast-non-keyed.js +113 -0
  481. data/vendor/v8/test/mjsunit/fast-prototype.js +117 -0
  482. data/vendor/v8/test/mjsunit/function-call.js +14 -18
  483. data/vendor/v8/test/mjsunit/fuzz-natives-part1.js +230 -0
  484. data/vendor/v8/test/mjsunit/fuzz-natives-part2.js +229 -0
  485. data/vendor/v8/test/mjsunit/fuzz-natives-part3.js +229 -0
  486. data/vendor/v8/test/mjsunit/{fuzz-natives.js → fuzz-natives-part4.js} +12 -2
  487. data/vendor/v8/test/mjsunit/generated-transition-stub.js +218 -0
  488. data/vendor/v8/test/mjsunit/greedy.js +1 -1
  489. data/vendor/v8/test/mjsunit/harmony/block-conflicts.js +2 -1
  490. data/vendor/v8/test/mjsunit/harmony/block-let-crankshaft.js +1 -1
  491. data/vendor/v8/test/mjsunit/harmony/collections.js +69 -11
  492. data/vendor/v8/test/mjsunit/harmony/debug-blockscopes.js +2 -2
  493. data/vendor/v8/test/mjsunit/harmony/module-linking.js +180 -3
  494. data/vendor/v8/test/mjsunit/harmony/module-parsing.js +31 -0
  495. data/vendor/v8/test/mjsunit/harmony/module-recompile.js +87 -0
  496. data/vendor/v8/test/mjsunit/harmony/module-resolution.js +15 -2
  497. data/vendor/v8/test/mjsunit/harmony/object-observe.js +1056 -0
  498. data/vendor/v8/test/mjsunit/harmony/proxies-json.js +178 -0
  499. data/vendor/v8/test/mjsunit/harmony/proxies.js +25 -10
  500. data/vendor/v8/test/mjsunit/json-parser-recursive.js +33 -0
  501. data/vendor/v8/test/mjsunit/json-stringify-recursive.js +52 -0
  502. data/vendor/v8/test/mjsunit/json.js +38 -2
  503. data/vendor/v8/test/mjsunit/json2.js +153 -0
  504. data/vendor/v8/test/mjsunit/limit-locals.js +5 -4
  505. data/vendor/v8/test/mjsunit/manual-parallel-recompile.js +79 -0
  506. data/vendor/v8/test/mjsunit/math-exp-precision.js +64 -0
  507. data/vendor/v8/test/mjsunit/math-floor-negative.js +59 -0
  508. data/vendor/v8/test/mjsunit/math-floor-of-div-minus-zero.js +41 -0
  509. data/vendor/v8/test/mjsunit/math-floor-of-div-nosudiv.js +288 -0
  510. data/vendor/v8/test/mjsunit/math-floor-of-div.js +81 -9
  511. data/vendor/v8/test/mjsunit/{math-floor.js → math-floor-part1.js} +1 -72
  512. data/vendor/v8/test/mjsunit/math-floor-part2.js +76 -0
  513. data/vendor/v8/test/mjsunit/math-floor-part3.js +78 -0
  514. data/vendor/v8/test/mjsunit/math-floor-part4.js +76 -0
  515. data/vendor/v8/test/mjsunit/mirror-object.js +43 -9
  516. data/vendor/v8/test/mjsunit/mjsunit.js +1 -1
  517. data/vendor/v8/test/mjsunit/mjsunit.status +52 -27
  518. data/vendor/v8/test/mjsunit/mul-exhaustive-part1.js +491 -0
  519. data/vendor/v8/test/mjsunit/mul-exhaustive-part10.js +470 -0
  520. data/vendor/v8/test/mjsunit/mul-exhaustive-part2.js +525 -0
  521. data/vendor/v8/test/mjsunit/mul-exhaustive-part3.js +532 -0
  522. data/vendor/v8/test/mjsunit/mul-exhaustive-part4.js +509 -0
  523. data/vendor/v8/test/mjsunit/mul-exhaustive-part5.js +505 -0
  524. data/vendor/v8/test/mjsunit/mul-exhaustive-part6.js +554 -0
  525. data/vendor/v8/test/mjsunit/mul-exhaustive-part7.js +497 -0
  526. data/vendor/v8/test/mjsunit/mul-exhaustive-part8.js +526 -0
  527. data/vendor/v8/test/mjsunit/mul-exhaustive-part9.js +533 -0
  528. data/vendor/v8/test/mjsunit/new-function.js +34 -0
  529. data/vendor/v8/test/mjsunit/numops-fuzz-part1.js +1172 -0
  530. data/vendor/v8/test/mjsunit/numops-fuzz-part2.js +1178 -0
  531. data/vendor/v8/test/mjsunit/numops-fuzz-part3.js +1178 -0
  532. data/vendor/v8/test/mjsunit/numops-fuzz-part4.js +1177 -0
  533. data/vendor/v8/test/mjsunit/object-define-property.js +107 -2
  534. data/vendor/v8/test/mjsunit/override-read-only-property.js +6 -4
  535. data/vendor/v8/test/mjsunit/packed-elements.js +2 -2
  536. data/vendor/v8/test/mjsunit/parse-int-float.js +4 -4
  537. data/vendor/v8/test/mjsunit/pixel-array-rounding.js +1 -1
  538. data/vendor/v8/test/mjsunit/readonly.js +228 -0
  539. data/vendor/v8/test/mjsunit/regexp-capture-3.js +16 -18
  540. data/vendor/v8/test/mjsunit/regexp-capture.js +2 -0
  541. data/vendor/v8/test/mjsunit/regexp-global.js +122 -0
  542. data/vendor/v8/test/mjsunit/regexp-results-cache.js +78 -0
  543. data/vendor/v8/test/mjsunit/regress/regress-1117.js +12 -3
  544. data/vendor/v8/test/mjsunit/regress/regress-1118.js +1 -1
  545. data/vendor/v8/test/mjsunit/regress/regress-115100.js +36 -0
  546. data/vendor/v8/test/mjsunit/regress/regress-1199637.js +1 -3
  547. data/vendor/v8/test/mjsunit/regress/regress-121407.js +1 -1
  548. data/vendor/v8/test/mjsunit/regress/regress-131923.js +30 -0
  549. data/vendor/v8/test/mjsunit/regress/regress-131994.js +70 -0
  550. data/vendor/v8/test/mjsunit/regress/regress-133211.js +35 -0
  551. data/vendor/v8/test/mjsunit/regress/regress-133211b.js +39 -0
  552. data/vendor/v8/test/mjsunit/regress/regress-136048.js +34 -0
  553. data/vendor/v8/test/mjsunit/regress/regress-137768.js +73 -0
  554. data/vendor/v8/test/mjsunit/regress/regress-143967.js +34 -0
  555. data/vendor/v8/test/mjsunit/regress/regress-145201.js +107 -0
  556. data/vendor/v8/test/mjsunit/regress/regress-147497.js +45 -0
  557. data/vendor/v8/test/mjsunit/regress/regress-148378.js +38 -0
  558. data/vendor/v8/test/mjsunit/regress/regress-1563.js +1 -1
  559. data/vendor/v8/test/mjsunit/regress/regress-1591.js +48 -0
  560. data/vendor/v8/test/mjsunit/regress/regress-164442.js +45 -0
  561. data/vendor/v8/test/mjsunit/regress/regress-165637.js +61 -0
  562. data/vendor/v8/test/mjsunit/regress/regress-166379.js +39 -0
  563. data/vendor/v8/test/mjsunit/regress/regress-166553.js +33 -0
  564. data/vendor/v8/test/mjsunit/regress/regress-1692.js +1 -1
  565. data/vendor/v8/test/mjsunit/regress/regress-171641.js +40 -0
  566. data/vendor/v8/test/mjsunit/regress/regress-1980.js +1 -1
  567. data/vendor/v8/test/mjsunit/regress/regress-2073.js +99 -0
  568. data/vendor/v8/test/mjsunit/regress/regress-2119.js +36 -0
  569. data/vendor/v8/test/mjsunit/regress/regress-2156.js +39 -0
  570. data/vendor/v8/test/mjsunit/regress/regress-2163.js +70 -0
  571. data/vendor/v8/test/mjsunit/regress/regress-2170.js +58 -0
  572. data/vendor/v8/test/mjsunit/regress/regress-2172.js +35 -0
  573. data/vendor/v8/test/mjsunit/regress/regress-2185-2.js +145 -0
  574. data/vendor/v8/test/mjsunit/regress/regress-2185.js +38 -0
  575. data/vendor/v8/test/mjsunit/regress/regress-2186.js +49 -0
  576. data/vendor/v8/test/mjsunit/regress/regress-2193.js +58 -0
  577. data/vendor/v8/test/mjsunit/regress/regress-2219.js +32 -0
  578. data/vendor/v8/test/mjsunit/regress/regress-2225.js +65 -0
  579. data/vendor/v8/test/mjsunit/regress/regress-2226.js +36 -0
  580. data/vendor/v8/test/mjsunit/regress/regress-2234.js +41 -0
  581. data/vendor/v8/test/mjsunit/regress/regress-2243.js +31 -0
  582. data/vendor/v8/test/mjsunit/regress/regress-2249.js +33 -0
  583. data/vendor/v8/test/mjsunit/regress/regress-2250.js +68 -0
  584. data/vendor/v8/test/mjsunit/regress/regress-2261.js +113 -0
  585. data/vendor/v8/test/mjsunit/regress/regress-2263.js +30 -0
  586. data/vendor/v8/test/mjsunit/regress/regress-2284.js +32 -0
  587. data/vendor/v8/test/mjsunit/regress/regress-2285.js +32 -0
  588. data/vendor/v8/test/mjsunit/regress/regress-2286.js +32 -0
  589. data/vendor/v8/test/mjsunit/regress/regress-2289.js +34 -0
  590. data/vendor/v8/test/mjsunit/regress/regress-2291.js +36 -0
  591. data/vendor/v8/test/mjsunit/regress/regress-2294.js +70 -0
  592. data/vendor/v8/test/mjsunit/regress/regress-2296.js +40 -0
  593. data/vendor/v8/test/mjsunit/regress/regress-2315.js +40 -0
  594. data/vendor/v8/test/mjsunit/regress/regress-2318.js +66 -0
  595. data/vendor/v8/test/mjsunit/regress/regress-2322.js +36 -0
  596. data/vendor/v8/test/mjsunit/regress/regress-2326.js +54 -0
  597. data/vendor/v8/test/mjsunit/regress/regress-2336.js +53 -0
  598. data/vendor/v8/test/mjsunit/regress/regress-2339.js +59 -0
  599. data/vendor/v8/test/mjsunit/regress/regress-2346.js +123 -0
  600. data/vendor/v8/test/mjsunit/regress/regress-2373.js +29 -0
  601. data/vendor/v8/test/mjsunit/regress/regress-2374.js +33 -0
  602. data/vendor/v8/test/mjsunit/regress/regress-2398.js +41 -0
  603. data/vendor/v8/test/mjsunit/regress/regress-2410.js +36 -0
  604. data/vendor/v8/test/mjsunit/regress/regress-2416.js +75 -0
  605. data/vendor/v8/test/mjsunit/regress/regress-2419.js +37 -0
  606. data/vendor/v8/test/mjsunit/regress/regress-2433.js +36 -0
  607. data/vendor/v8/test/mjsunit/regress/regress-2437.js +156 -0
  608. data/vendor/v8/test/mjsunit/regress/regress-2438.js +52 -0
  609. data/vendor/v8/test/mjsunit/regress/regress-2443.js +129 -0
  610. data/vendor/v8/test/mjsunit/regress/regress-2444.js +120 -0
  611. data/vendor/v8/test/mjsunit/regress/regress-2489.js +50 -0
  612. data/vendor/v8/test/mjsunit/regress/regress-2499.js +40 -0
  613. data/vendor/v8/test/mjsunit/regress/regress-334.js +1 -1
  614. data/vendor/v8/test/mjsunit/regress/regress-492.js +39 -1
  615. data/vendor/v8/test/mjsunit/regress/regress-builtin-array-op.js +38 -0
  616. data/vendor/v8/test/mjsunit/regress/regress-cnlt-elements.js +43 -0
  617. data/vendor/v8/test/mjsunit/regress/regress-cnlt-enum-indices.js +45 -0
  618. data/vendor/v8/test/mjsunit/regress/regress-cntl-descriptors-enum.js +46 -0
  619. data/vendor/v8/test/mjsunit/regress/regress-convert-enum.js +60 -0
  620. data/vendor/v8/test/mjsunit/regress/regress-convert-enum2.js +46 -0
  621. data/vendor/v8/test/mjsunit/regress/regress-convert-transition.js +40 -0
  622. data/vendor/v8/test/mjsunit/regress/regress-crbug-119926.js +3 -1
  623. data/vendor/v8/test/mjsunit/regress/regress-crbug-125148.js +90 -0
  624. data/vendor/v8/test/mjsunit/regress/regress-crbug-134055.js +63 -0
  625. data/vendor/v8/test/mjsunit/regress/regress-crbug-134609.js +59 -0
  626. data/vendor/v8/test/mjsunit/regress/regress-crbug-135008.js +45 -0
  627. data/vendor/v8/test/mjsunit/regress/regress-crbug-135066.js +55 -0
  628. data/vendor/v8/test/mjsunit/regress/regress-crbug-137689.js +47 -0
  629. data/vendor/v8/test/mjsunit/regress/regress-crbug-138887.js +48 -0
  630. data/vendor/v8/test/mjsunit/regress/regress-crbug-140083.js +44 -0
  631. data/vendor/v8/test/mjsunit/regress/regress-crbug-142087.js +38 -0
  632. data/vendor/v8/test/mjsunit/regress/regress-crbug-142218.js +44 -0
  633. data/vendor/v8/test/mjsunit/regress/regress-crbug-145961.js +39 -0
  634. data/vendor/v8/test/mjsunit/regress/regress-crbug-146910.js +33 -0
  635. data/vendor/v8/test/mjsunit/regress/regress-crbug-147475.js +48 -0
  636. data/vendor/v8/test/mjsunit/regress/regress-crbug-148376.js +35 -0
  637. data/vendor/v8/test/mjsunit/regress/regress-crbug-150545.js +53 -0
  638. data/vendor/v8/test/mjsunit/regress/regress-crbug-150729.js +39 -0
  639. data/vendor/v8/test/mjsunit/regress/regress-crbug-157019.js +54 -0
  640. data/vendor/v8/test/mjsunit/regress/regress-crbug-157520.js +38 -0
  641. data/vendor/v8/test/mjsunit/regress/regress-crbug-158185.js +39 -0
  642. data/vendor/v8/test/mjsunit/regress/regress-crbug-160010.js +35 -0
  643. data/vendor/v8/test/mjsunit/regress/regress-crbug-162085.js +71 -0
  644. data/vendor/v8/test/mjsunit/regress/regress-crbug-168545.js +34 -0
  645. data/vendor/v8/test/mjsunit/regress/regress-crbug-170856.js +33 -0
  646. data/vendor/v8/test/mjsunit/regress/regress-crbug-172345.js +34 -0
  647. data/vendor/v8/test/mjsunit/regress/regress-crbug-173974.js +36 -0
  648. data/vendor/v8/test/mjsunit/regress/regress-crbug-18639.js +9 -5
  649. data/vendor/v8/test/mjsunit/regress/regress-debug-code-recompilation.js +2 -1
  650. data/vendor/v8/test/mjsunit/regress/regress-deep-proto.js +45 -0
  651. data/vendor/v8/test/mjsunit/regress/regress-delete-empty-double.js +40 -0
  652. data/vendor/v8/test/mjsunit/regress/regress-iteration-order.js +42 -0
  653. data/vendor/v8/test/mjsunit/regress/regress-json-stringify-gc.js +41 -0
  654. data/vendor/v8/test/mjsunit/regress/regress-latin-1.js +78 -0
  655. data/vendor/v8/test/mjsunit/regress/regress-load-elements.js +49 -0
  656. data/vendor/v8/test/mjsunit/regress/regress-observe-empty-double-array.js +38 -0
  657. data/vendor/v8/test/mjsunit/regress/regress-undefined-store-keyed-fast-element.js +37 -0
  658. data/vendor/v8/test/mjsunit/shift-for-integer-div.js +59 -0
  659. data/vendor/v8/test/mjsunit/stack-traces-gc.js +119 -0
  660. data/vendor/v8/test/mjsunit/stack-traces-overflow.js +122 -0
  661. data/vendor/v8/test/mjsunit/stack-traces.js +39 -1
  662. data/vendor/v8/test/mjsunit/str-to-num.js +7 -2
  663. data/vendor/v8/test/mjsunit/strict-mode.js +36 -11
  664. data/vendor/v8/test/mjsunit/string-charcodeat.js +3 -0
  665. data/vendor/v8/test/mjsunit/string-natives.js +72 -0
  666. data/vendor/v8/test/mjsunit/string-split.js +17 -0
  667. data/vendor/v8/test/mjsunit/testcfg.py +76 -6
  668. data/vendor/v8/test/mjsunit/tools/tickprocessor.js +4 -1
  669. data/vendor/v8/test/mjsunit/try-finally-continue.js +72 -0
  670. data/vendor/v8/test/mjsunit/typed-array-slice.js +61 -0
  671. data/vendor/v8/test/mjsunit/unbox-double-arrays.js +2 -0
  672. data/vendor/v8/test/mjsunit/uri.js +12 -0
  673. data/vendor/v8/test/mjsunit/with-readonly.js +4 -2
  674. data/vendor/v8/test/mozilla/mozilla.status +19 -113
  675. data/vendor/v8/test/mozilla/testcfg.py +122 -3
  676. data/vendor/v8/test/preparser/preparser.status +5 -0
  677. data/vendor/v8/test/preparser/strict-identifiers.pyt +1 -1
  678. data/vendor/v8/test/preparser/testcfg.py +101 -5
  679. data/vendor/v8/test/sputnik/sputnik.status +1 -1
  680. data/vendor/v8/test/sputnik/testcfg.py +5 -0
  681. data/vendor/v8/test/test262/README +2 -2
  682. data/vendor/v8/test/test262/test262.status +13 -36
  683. data/vendor/v8/test/test262/testcfg.py +102 -8
  684. data/vendor/v8/tools/android-build.sh +0 -0
  685. data/vendor/v8/tools/android-ll-prof.sh +69 -0
  686. data/vendor/v8/tools/android-run.py +109 -0
  687. data/vendor/v8/tools/android-sync.sh +105 -0
  688. data/vendor/v8/tools/bash-completion.sh +0 -0
  689. data/vendor/v8/tools/check-static-initializers.sh +0 -0
  690. data/vendor/v8/tools/common-includes.sh +15 -22
  691. data/vendor/v8/tools/disasm.py +4 -4
  692. data/vendor/v8/tools/fuzz-harness.sh +0 -0
  693. data/vendor/v8/tools/gen-postmortem-metadata.py +6 -8
  694. data/vendor/v8/tools/grokdump.py +404 -129
  695. data/vendor/v8/tools/gyp/v8.gyp +105 -43
  696. data/vendor/v8/tools/linux-tick-processor +5 -5
  697. data/vendor/v8/tools/ll_prof.py +75 -15
  698. data/vendor/v8/tools/merge-to-branch.sh +2 -2
  699. data/vendor/v8/tools/plot-timer-events +70 -0
  700. data/vendor/v8/tools/plot-timer-events.js +510 -0
  701. data/vendor/v8/tools/presubmit.py +1 -0
  702. data/vendor/v8/tools/push-to-trunk.sh +14 -4
  703. data/vendor/v8/tools/run-llprof.sh +69 -0
  704. data/vendor/v8/tools/run-tests.py +372 -0
  705. data/vendor/v8/tools/run-valgrind.py +1 -1
  706. data/vendor/v8/tools/status-file-converter.py +39 -0
  707. data/vendor/v8/tools/test-server.py +224 -0
  708. data/vendor/v8/tools/test-wrapper-gypbuild.py +13 -16
  709. data/vendor/v8/tools/test.py +10 -19
  710. data/vendor/v8/tools/testrunner/README +174 -0
  711. data/vendor/v8/tools/testrunner/__init__.py +26 -0
  712. data/vendor/v8/tools/testrunner/local/__init__.py +26 -0
  713. data/vendor/v8/tools/testrunner/local/commands.py +153 -0
  714. data/vendor/v8/tools/testrunner/local/execution.py +182 -0
  715. data/vendor/v8/tools/testrunner/local/old_statusfile.py +460 -0
  716. data/vendor/v8/tools/testrunner/local/progress.py +238 -0
  717. data/vendor/v8/tools/testrunner/local/statusfile.py +145 -0
  718. data/vendor/v8/tools/testrunner/local/testsuite.py +187 -0
  719. data/vendor/v8/tools/testrunner/local/utils.py +108 -0
  720. data/vendor/v8/tools/testrunner/local/verbose.py +99 -0
  721. data/vendor/v8/tools/testrunner/network/__init__.py +26 -0
  722. data/vendor/v8/tools/testrunner/network/distro.py +90 -0
  723. data/vendor/v8/tools/testrunner/network/endpoint.py +124 -0
  724. data/vendor/v8/tools/testrunner/network/network_execution.py +253 -0
  725. data/vendor/v8/tools/testrunner/network/perfdata.py +120 -0
  726. data/vendor/v8/tools/testrunner/objects/__init__.py +26 -0
  727. data/vendor/v8/tools/testrunner/objects/context.py +50 -0
  728. data/vendor/v8/tools/testrunner/objects/output.py +60 -0
  729. data/vendor/v8/tools/testrunner/objects/peer.py +80 -0
  730. data/vendor/v8/tools/testrunner/objects/testcase.py +83 -0
  731. data/vendor/v8/tools/testrunner/objects/workpacket.py +90 -0
  732. data/vendor/v8/tools/testrunner/server/__init__.py +26 -0
  733. data/vendor/v8/tools/testrunner/server/compression.py +111 -0
  734. data/vendor/v8/tools/testrunner/server/constants.py +51 -0
  735. data/vendor/v8/tools/testrunner/server/daemon.py +147 -0
  736. data/vendor/v8/tools/testrunner/server/local_handler.py +119 -0
  737. data/vendor/v8/tools/testrunner/server/main.py +245 -0
  738. data/vendor/v8/tools/testrunner/server/presence_handler.py +120 -0
  739. data/vendor/v8/tools/testrunner/server/signatures.py +63 -0
  740. data/vendor/v8/tools/testrunner/server/status_handler.py +112 -0
  741. data/vendor/v8/tools/testrunner/server/work_handler.py +150 -0
  742. data/vendor/v8/tools/tick-processor.html +168 -0
  743. data/vendor/v8/tools/tickprocessor-driver.js +5 -3
  744. data/vendor/v8/tools/tickprocessor.js +58 -15
  745. metadata +534 -30
  746. data/patches/add-freebsd9-and-freebsd10-to-gyp-GetFlavor.patch +0 -11
  747. data/patches/do-not-imply-vfp3-and-armv7.patch +0 -44
  748. data/patches/fPIC-on-x64.patch +0 -14
  749. data/vendor/v8/src/liveobjectlist-inl.h +0 -126
  750. data/vendor/v8/src/liveobjectlist.cc +0 -2631
  751. data/vendor/v8/src/liveobjectlist.h +0 -319
  752. data/vendor/v8/test/mjsunit/mul-exhaustive.js +0 -4629
  753. data/vendor/v8/test/mjsunit/numops-fuzz.js +0 -4609
  754. data/vendor/v8/test/mjsunit/regress/regress-1969.js +0 -5045
@@ -73,6 +73,45 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm,
73
73
  }
74
74
 
75
75
 
76
+ static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
77
+ __ movq(kScratchRegister,
78
+ FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
79
+ __ movq(kScratchRegister,
80
+ FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset));
81
+ __ lea(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize));
82
+ __ jmp(kScratchRegister);
83
+ }
84
+
85
+
86
+ void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
87
+ GenerateTailCallToSharedCode(masm);
88
+ }
89
+
90
+
91
+ void Builtins::Generate_ParallelRecompile(MacroAssembler* masm) {
92
+ {
93
+ FrameScope scope(masm, StackFrame::INTERNAL);
94
+
95
+ // Push a copy of the function onto the stack.
96
+ __ push(rdi);
97
+ // Push call kind information.
98
+ __ push(rcx);
99
+
100
+ __ push(rdi); // Function is also the parameter to the runtime call.
101
+ __ CallRuntime(Runtime::kParallelRecompile, 1);
102
+
103
+ // Restore call kind information.
104
+ __ pop(rcx);
105
+ // Restore receiver.
106
+ __ pop(rdi);
107
+
108
+ // Tear down internal frame.
109
+ }
110
+
111
+ GenerateTailCallToSharedCode(masm);
112
+ }
113
+
114
+
76
115
  static void Generate_JSConstructStubHelper(MacroAssembler* masm,
77
116
  bool is_api_function,
78
117
  bool count_constructions) {
@@ -567,6 +606,65 @@ void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
567
606
  }
568
607
 
569
608
 
609
+ static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
610
+ // For now, we are relying on the fact that make_code_young doesn't do any
611
+ // garbage collection which allows us to save/restore the registers without
612
+ // worrying about which of them contain pointers. We also don't build an
613
+ // internal frame to make the code faster, since we shouldn't have to do stack
614
+ // crawls in MakeCodeYoung. This seems a bit fragile.
615
+
616
+ // Re-execute the code that was patched back to the young age when
617
+ // the stub returns.
618
+ __ subq(Operand(rsp, 0), Immediate(5));
619
+ __ Pushad();
620
+ #ifdef _WIN64
621
+ __ movq(rcx, Operand(rsp, kNumSafepointRegisters * kPointerSize));
622
+ #else
623
+ __ movq(rdi, Operand(rsp, kNumSafepointRegisters * kPointerSize));
624
+ #endif
625
+ { // NOLINT
626
+ FrameScope scope(masm, StackFrame::MANUAL);
627
+ __ PrepareCallCFunction(1);
628
+ __ CallCFunction(
629
+ ExternalReference::get_make_code_young_function(masm->isolate()), 1);
630
+ }
631
+ __ Popad();
632
+ __ ret(0);
633
+ }
634
+
635
+
636
+ #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
637
+ void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
638
+ MacroAssembler* masm) { \
639
+ GenerateMakeCodeYoungAgainCommon(masm); \
640
+ } \
641
+ void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
642
+ MacroAssembler* masm) { \
643
+ GenerateMakeCodeYoungAgainCommon(masm); \
644
+ }
645
+ CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
646
+ #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
647
+
648
+
649
+ void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
650
+ // Enter an internal frame.
651
+ {
652
+ FrameScope scope(masm, StackFrame::INTERNAL);
653
+
654
+ // Preserve registers across notification, this is important for compiled
655
+ // stubs that tail call the runtime on deopts passing their parameters in
656
+ // registers.
657
+ __ Pushad();
658
+ __ CallRuntime(Runtime::kNotifyStubFailure, 0);
659
+ __ Popad();
660
+ // Tear down internal frame.
661
+ }
662
+
663
+ __ pop(MemOperand(rsp, 0)); // Ignore state offset
664
+ __ ret(0); // Return to IC Miss stub, continuation still on stack.
665
+ }
666
+
667
+
570
668
  static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
571
669
  Deoptimizer::BailoutType type) {
572
670
  // Enter an internal frame.
@@ -581,17 +679,17 @@ static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
581
679
  }
582
680
 
583
681
  // Get the full codegen state from the stack and untag it.
584
- __ SmiToInteger32(rcx, Operand(rsp, 1 * kPointerSize));
682
+ __ SmiToInteger32(r10, Operand(rsp, 1 * kPointerSize));
585
683
 
586
684
  // Switch on the state.
587
685
  Label not_no_registers, not_tos_rax;
588
- __ cmpq(rcx, Immediate(FullCodeGenerator::NO_REGISTERS));
686
+ __ cmpq(r10, Immediate(FullCodeGenerator::NO_REGISTERS));
589
687
  __ j(not_equal, &not_no_registers, Label::kNear);
590
688
  __ ret(1 * kPointerSize); // Remove state.
591
689
 
592
690
  __ bind(&not_no_registers);
593
691
  __ movq(rax, Operand(rsp, 2 * kPointerSize));
594
- __ cmpq(rcx, Immediate(FullCodeGenerator::TOS_REG));
692
+ __ cmpq(r10, Immediate(FullCodeGenerator::TOS_REG));
595
693
  __ j(not_equal, &not_tos_rax, Label::kNear);
596
694
  __ ret(2 * kPointerSize); // Remove state, rax.
597
695
 
@@ -711,9 +809,9 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
711
809
  // receiver.
712
810
  __ bind(&use_global_receiver);
713
811
  const int kGlobalIndex =
714
- Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
812
+ Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
715
813
  __ movq(rbx, FieldOperand(rsi, kGlobalIndex));
716
- __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset));
814
+ __ movq(rbx, FieldOperand(rbx, GlobalObject::kNativeContextOffset));
717
815
  __ movq(rbx, FieldOperand(rbx, kGlobalIndex));
718
816
  __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
719
817
 
@@ -896,9 +994,9 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
896
994
  // Use the current global receiver object as the receiver.
897
995
  __ bind(&use_global_receiver);
898
996
  const int kGlobalOffset =
899
- Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
997
+ Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
900
998
  __ movq(rbx, FieldOperand(rsi, kGlobalOffset));
901
- __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset));
999
+ __ movq(rbx, FieldOperand(rbx, GlobalObject::kNativeContextOffset));
902
1000
  __ movq(rbx, FieldOperand(rbx, kGlobalOffset));
903
1001
  __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
904
1002
 
@@ -32,10 +32,36 @@
32
32
  #include "bootstrapper.h"
33
33
  #include "code-stubs.h"
34
34
  #include "regexp-macro-assembler.h"
35
+ #include "stub-cache.h"
36
+ #include "runtime.h"
35
37
 
36
38
  namespace v8 {
37
39
  namespace internal {
38
40
 
41
+
42
+ void KeyedLoadFastElementStub::InitializeInterfaceDescriptor(
43
+ Isolate* isolate,
44
+ CodeStubInterfaceDescriptor* descriptor) {
45
+ static Register registers[] = { rdx, rax };
46
+ descriptor->register_param_count_ = 2;
47
+ descriptor->register_params_ = registers;
48
+ descriptor->stack_parameter_count_ = NULL;
49
+ descriptor->deoptimization_handler_ =
50
+ FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure);
51
+ }
52
+
53
+
54
+ void TransitionElementsKindStub::InitializeInterfaceDescriptor(
55
+ Isolate* isolate,
56
+ CodeStubInterfaceDescriptor* descriptor) {
57
+ static Register registers[] = { rax, rbx };
58
+ descriptor->register_param_count_ = 2;
59
+ descriptor->register_params_ = registers;
60
+ descriptor->deoptimization_handler_ =
61
+ Runtime::FunctionForId(Runtime::kTransitionElementsKind)->entry;
62
+ }
63
+
64
+
39
65
  #define __ ACCESS_MASM(masm)
40
66
 
41
67
  void ToNumberStub::Generate(MacroAssembler* masm) {
@@ -62,9 +88,13 @@ void ToNumberStub::Generate(MacroAssembler* masm) {
62
88
  void FastNewClosureStub::Generate(MacroAssembler* masm) {
63
89
  // Create a new closure from the given function info in new
64
90
  // space. Set the context to the current context in rsi.
91
+ Counters* counters = masm->isolate()->counters();
92
+
65
93
  Label gc;
66
94
  __ AllocateInNewSpace(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT);
67
95
 
96
+ __ IncrementCounter(counters->fast_new_closure_total(), 1);
97
+
68
98
  // Get the function info from the stack.
69
99
  __ movq(rdx, Operand(rsp, 1 * kPointerSize));
70
100
 
@@ -72,36 +102,113 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
72
102
  ? Context::FUNCTION_MAP_INDEX
73
103
  : Context::STRICT_MODE_FUNCTION_MAP_INDEX;
74
104
 
75
- // Compute the function map in the current global context and set that
105
+ // Compute the function map in the current native context and set that
76
106
  // as the map of the allocated object.
77
- __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
78
- __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset));
79
- __ movq(rcx, Operand(rcx, Context::SlotOffset(map_index)));
80
- __ movq(FieldOperand(rax, JSObject::kMapOffset), rcx);
107
+ __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
108
+ __ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset));
109
+ __ movq(rbx, Operand(rcx, Context::SlotOffset(map_index)));
110
+ __ movq(FieldOperand(rax, JSObject::kMapOffset), rbx);
81
111
 
82
112
  // Initialize the rest of the function. We don't have to update the
83
113
  // write barrier because the allocated object is in new space.
84
114
  __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex);
85
- __ LoadRoot(rcx, Heap::kTheHoleValueRootIndex);
115
+ __ LoadRoot(r8, Heap::kTheHoleValueRootIndex);
86
116
  __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
87
117
  __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), rbx);
88
118
  __ movq(FieldOperand(rax, JSObject::kElementsOffset), rbx);
89
- __ movq(FieldOperand(rax, JSFunction::kPrototypeOrInitialMapOffset), rcx);
119
+ __ movq(FieldOperand(rax, JSFunction::kPrototypeOrInitialMapOffset), r8);
90
120
  __ movq(FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset), rdx);
91
121
  __ movq(FieldOperand(rax, JSFunction::kContextOffset), rsi);
92
122
  __ movq(FieldOperand(rax, JSFunction::kLiteralsOffset), rbx);
93
- __ movq(FieldOperand(rax, JSFunction::kNextFunctionLinkOffset), rdi);
94
123
 
95
124
  // Initialize the code pointer in the function to be the one
96
125
  // found in the shared function info object.
126
+ // But first check if there is an optimized version for our context.
127
+ Label check_optimized;
128
+ Label install_unoptimized;
129
+ if (FLAG_cache_optimized_code) {
130
+ __ movq(rbx,
131
+ FieldOperand(rdx, SharedFunctionInfo::kOptimizedCodeMapOffset));
132
+ __ testq(rbx, rbx);
133
+ __ j(not_zero, &check_optimized, Label::kNear);
134
+ }
135
+ __ bind(&install_unoptimized);
136
+ __ movq(FieldOperand(rax, JSFunction::kNextFunctionLinkOffset),
137
+ rdi); // Initialize with undefined.
97
138
  __ movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
98
139
  __ lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
99
140
  __ movq(FieldOperand(rax, JSFunction::kCodeEntryOffset), rdx);
100
141
 
142
+ // Return and remove the on-stack parameter.
143
+ __ ret(1 * kPointerSize);
144
+
145
+ __ bind(&check_optimized);
146
+
147
+ __ IncrementCounter(counters->fast_new_closure_try_optimized(), 1);
148
+
149
+ // rcx holds native context, ebx points to fixed array of 3-element entries
150
+ // (native context, optimized code, literals).
151
+ // The optimized code map must never be empty, so check the first elements.
152
+ Label install_optimized;
153
+ // Speculatively move code object into edx.
154
+ __ movq(rdx, FieldOperand(rbx, FixedArray::kHeaderSize + kPointerSize));
155
+ __ cmpq(rcx, FieldOperand(rbx, FixedArray::kHeaderSize));
156
+ __ j(equal, &install_optimized);
157
+
158
+ // Iterate through the rest of map backwards. rdx holds an index.
159
+ Label loop;
160
+ Label restore;
161
+ __ movq(rdx, FieldOperand(rbx, FixedArray::kLengthOffset));
162
+ __ SmiToInteger32(rdx, rdx);
163
+ __ bind(&loop);
164
+ // Do not double check first entry.
165
+ __ cmpq(rdx, Immediate(SharedFunctionInfo::kEntryLength));
166
+ __ j(equal, &restore);
167
+ __ subq(rdx, Immediate(SharedFunctionInfo::kEntryLength)); // Skip an entry.
168
+ __ cmpq(rcx, FieldOperand(rbx,
169
+ rdx,
170
+ times_pointer_size,
171
+ FixedArray::kHeaderSize));
172
+ __ j(not_equal, &loop, Label::kNear);
173
+ // Hit: fetch the optimized code.
174
+ __ movq(rdx, FieldOperand(rbx,
175
+ rdx,
176
+ times_pointer_size,
177
+ FixedArray::kHeaderSize + 1 * kPointerSize));
178
+
179
+ __ bind(&install_optimized);
180
+ __ IncrementCounter(counters->fast_new_closure_install_optimized(), 1);
181
+
182
+ // TODO(fschneider): Idea: store proper code pointers in the map and either
183
+ // unmangle them on marking or do nothing as the whole map is discarded on
184
+ // major GC anyway.
185
+ __ lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
186
+ __ movq(FieldOperand(rax, JSFunction::kCodeEntryOffset), rdx);
187
+
188
+ // Now link a function into a list of optimized functions.
189
+ __ movq(rdx, ContextOperand(rcx, Context::OPTIMIZED_FUNCTIONS_LIST));
190
+
191
+ __ movq(FieldOperand(rax, JSFunction::kNextFunctionLinkOffset), rdx);
192
+ // No need for write barrier as JSFunction (rax) is in the new space.
193
+
194
+ __ movq(ContextOperand(rcx, Context::OPTIMIZED_FUNCTIONS_LIST), rax);
195
+ // Store JSFunction (rax) into rdx before issuing write barrier as
196
+ // it clobbers all the registers passed.
197
+ __ movq(rdx, rax);
198
+ __ RecordWriteContextSlot(
199
+ rcx,
200
+ Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST),
201
+ rdx,
202
+ rbx,
203
+ kDontSaveFPRegs);
101
204
 
102
205
  // Return and remove the on-stack parameter.
103
206
  __ ret(1 * kPointerSize);
104
207
 
208
+ __ bind(&restore);
209
+ __ movq(rdx, Operand(rsp, 1 * kPointerSize));
210
+ __ jmp(&install_unoptimized);
211
+
105
212
  // Create a new closure through the slower runtime call.
106
213
  __ bind(&gc);
107
214
  __ pop(rcx); // Temporarily remove return address.
@@ -136,8 +243,8 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
136
243
  __ movq(Operand(rax, Context::SlotOffset(Context::EXTENSION_INDEX)), rbx);
137
244
 
138
245
  // Copy the global object from the previous context.
139
- __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
140
- __ movq(Operand(rax, Context::SlotOffset(Context::GLOBAL_INDEX)), rbx);
246
+ __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
247
+ __ movq(Operand(rax, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)), rbx);
141
248
 
142
249
  // Initialize the rest of the slots to undefined.
143
250
  __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
@@ -178,9 +285,9 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
178
285
  __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
179
286
  __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length));
180
287
 
181
- // If this block context is nested in the global context we get a smi
288
+ // If this block context is nested in the native context we get a smi
182
289
  // sentinel instead of a function. The block context should get the
183
- // canonical empty function of the global context as its closure which
290
+ // canonical empty function of the native context as its closure which
184
291
  // we still have to look up.
185
292
  Label after_sentinel;
186
293
  __ JumpIfNotSmi(rcx, &after_sentinel, Label::kNear);
@@ -190,7 +297,7 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
190
297
  __ Assert(equal, message);
191
298
  }
192
299
  __ movq(rcx, GlobalObjectOperand());
193
- __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset));
300
+ __ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset));
194
301
  __ movq(rcx, ContextOperand(rcx, Context::CLOSURE_INDEX));
195
302
  __ bind(&after_sentinel);
196
303
 
@@ -200,8 +307,8 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
200
307
  __ movq(ContextOperand(rax, Context::EXTENSION_INDEX), rbx);
201
308
 
202
309
  // Copy the global object from the previous context.
203
- __ movq(rbx, ContextOperand(rsi, Context::GLOBAL_INDEX));
204
- __ movq(ContextOperand(rax, Context::GLOBAL_INDEX), rbx);
310
+ __ movq(rbx, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX));
311
+ __ movq(ContextOperand(rax, Context::GLOBAL_OBJECT_INDEX), rbx);
205
312
 
206
313
  // Initialize the rest of the slots to the hole value.
207
314
  __ LoadRoot(rbx, Heap::kTheHoleValueRootIndex);
@@ -223,6 +330,7 @@ static void GenerateFastCloneShallowArrayCommon(
223
330
  MacroAssembler* masm,
224
331
  int length,
225
332
  FastCloneShallowArrayStub::Mode mode,
333
+ AllocationSiteMode allocation_site_mode,
226
334
  Label* fail) {
227
335
  // Registers on entry:
228
336
  //
@@ -236,11 +344,26 @@ static void GenerateFastCloneShallowArrayCommon(
236
344
  ? FixedDoubleArray::SizeFor(length)
237
345
  : FixedArray::SizeFor(length);
238
346
  }
239
- int size = JSArray::kSize + elements_size;
347
+ int size = JSArray::kSize;
348
+ int allocation_info_start = size;
349
+ if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
350
+ size += AllocationSiteInfo::kSize;
351
+ }
352
+ size += elements_size;
240
353
 
241
354
  // Allocate both the JS array and the elements array in one big
242
355
  // allocation. This avoids multiple limit checks.
243
- __ AllocateInNewSpace(size, rax, rbx, rdx, fail, TAG_OBJECT);
356
+ AllocationFlags flags = TAG_OBJECT;
357
+ if (mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS) {
358
+ flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags);
359
+ }
360
+ __ AllocateInNewSpace(size, rax, rbx, rdx, fail, flags);
361
+
362
+ if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
363
+ __ LoadRoot(kScratchRegister, Heap::kAllocationSiteInfoMapRootIndex);
364
+ __ movq(FieldOperand(rax, allocation_info_start), kScratchRegister);
365
+ __ movq(FieldOperand(rax, allocation_info_start + kPointerSize), rcx);
366
+ }
244
367
 
245
368
  // Copy the JS array part.
246
369
  for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
@@ -254,7 +377,11 @@ static void GenerateFastCloneShallowArrayCommon(
254
377
  // Get hold of the elements array of the boilerplate and setup the
255
378
  // elements pointer in the resulting object.
256
379
  __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
257
- __ lea(rdx, Operand(rax, JSArray::kSize));
380
+ if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
381
+ __ lea(rdx, Operand(rax, JSArray::kSize + AllocationSiteInfo::kSize));
382
+ } else {
383
+ __ lea(rdx, Operand(rax, JSArray::kSize));
384
+ }
258
385
  __ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx);
259
386
 
260
387
  // Copy the elements array.
@@ -307,16 +434,18 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
307
434
  __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
308
435
  factory->fixed_cow_array_map());
309
436
  __ j(not_equal, &check_fast_elements);
310
- GenerateFastCloneShallowArrayCommon(masm, 0,
311
- COPY_ON_WRITE_ELEMENTS, &slow_case);
437
+ GenerateFastCloneShallowArrayCommon(masm, 0, COPY_ON_WRITE_ELEMENTS,
438
+ allocation_site_mode_,
439
+ &slow_case);
312
440
  __ ret(3 * kPointerSize);
313
441
 
314
442
  __ bind(&check_fast_elements);
315
443
  __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
316
444
  factory->fixed_array_map());
317
445
  __ j(not_equal, &double_elements);
318
- GenerateFastCloneShallowArrayCommon(masm, length_,
319
- CLONE_ELEMENTS, &slow_case);
446
+ GenerateFastCloneShallowArrayCommon(masm, length_, CLONE_ELEMENTS,
447
+ allocation_site_mode_,
448
+ &slow_case);
320
449
  __ ret(3 * kPointerSize);
321
450
 
322
451
  __ bind(&double_elements);
@@ -346,7 +475,9 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
346
475
  __ pop(rcx);
347
476
  }
348
477
 
349
- GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case);
478
+ GenerateFastCloneShallowArrayCommon(masm, length_, mode,
479
+ allocation_site_mode_,
480
+ &slow_case);
350
481
  __ ret(3 * kPointerSize);
351
482
 
352
483
  __ bind(&slow_case);
@@ -556,6 +687,10 @@ void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) {
556
687
 
557
688
  class FloatingPointHelper : public AllStatic {
558
689
  public:
690
+ enum ConvertUndefined {
691
+ CONVERT_UNDEFINED_TO_ZERO,
692
+ BAILOUT_ON_UNDEFINED
693
+ };
559
694
  // Load the operands from rdx and rax into xmm0 and xmm1, as doubles.
560
695
  // If the operands are not both numbers, jump to not_numbers.
561
696
  // Leaves rdx and rax unchanged. SmiOperands assumes both are smis.
@@ -591,7 +726,8 @@ class FloatingPointHelper : public AllStatic {
591
726
  Register scratch2,
592
727
  Register scratch3,
593
728
  Label* on_success,
594
- Label* on_not_smis);
729
+ Label* on_not_smis,
730
+ ConvertUndefined convert_undefined);
595
731
  };
596
732
 
597
733
 
@@ -916,16 +1052,15 @@ void UnaryOpStub::PrintName(StringStream* stream) {
916
1052
  }
917
1053
 
918
1054
 
1055
+ void BinaryOpStub::Initialize() {}
1056
+
1057
+
919
1058
  void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
920
1059
  __ pop(rcx); // Save return address.
921
1060
  __ push(rdx);
922
1061
  __ push(rax);
923
1062
  // Left and right arguments are now on top.
924
- // Push this stub's key. Although the operation and the type info are
925
- // encoded into the key, the encoding is opaque, so push them too.
926
1063
  __ Push(Smi::FromInt(MinorKey()));
927
- __ Push(Smi::FromInt(op_));
928
- __ Push(Smi::FromInt(operands_type_));
929
1064
 
930
1065
  __ push(rcx); // Push return address.
931
1066
 
@@ -934,80 +1069,27 @@ void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
934
1069
  __ TailCallExternalReference(
935
1070
  ExternalReference(IC_Utility(IC::kBinaryOp_Patch),
936
1071
  masm->isolate()),
937
- 5,
1072
+ 3,
938
1073
  1);
939
1074
  }
940
1075
 
941
1076
 
942
- void BinaryOpStub::Generate(MacroAssembler* masm) {
943
- // Explicitly allow generation of nested stubs. It is safe here because
944
- // generation code does not use any raw pointers.
945
- AllowStubCallsScope allow_stub_calls(masm, true);
946
-
947
- switch (operands_type_) {
948
- case BinaryOpIC::UNINITIALIZED:
949
- GenerateTypeTransition(masm);
950
- break;
951
- case BinaryOpIC::SMI:
952
- GenerateSmiStub(masm);
953
- break;
954
- case BinaryOpIC::INT32:
955
- UNREACHABLE();
956
- // The int32 case is identical to the Smi case. We avoid creating this
957
- // ic state on x64.
958
- break;
959
- case BinaryOpIC::HEAP_NUMBER:
960
- GenerateHeapNumberStub(masm);
961
- break;
962
- case BinaryOpIC::ODDBALL:
963
- GenerateOddballStub(masm);
964
- break;
965
- case BinaryOpIC::BOTH_STRING:
966
- GenerateBothStringStub(masm);
967
- break;
968
- case BinaryOpIC::STRING:
969
- GenerateStringStub(masm);
970
- break;
971
- case BinaryOpIC::GENERIC:
972
- GenerateGeneric(masm);
973
- break;
974
- default:
975
- UNREACHABLE();
976
- }
977
- }
978
-
979
-
980
- void BinaryOpStub::PrintName(StringStream* stream) {
981
- const char* op_name = Token::Name(op_);
982
- const char* overwrite_name;
983
- switch (mode_) {
984
- case NO_OVERWRITE: overwrite_name = "Alloc"; break;
985
- case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
986
- case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
987
- default: overwrite_name = "UnknownOverwrite"; break;
988
- }
989
- stream->Add("BinaryOpStub_%s_%s_%s",
990
- op_name,
991
- overwrite_name,
992
- BinaryOpIC::GetName(operands_type_));
993
- }
994
-
995
-
996
- void BinaryOpStub::GenerateSmiCode(
1077
+ static void BinaryOpStub_GenerateSmiCode(
997
1078
  MacroAssembler* masm,
998
1079
  Label* slow,
999
- SmiCodeGenerateHeapNumberResults allow_heapnumber_results) {
1080
+ BinaryOpStub::SmiCodeGenerateHeapNumberResults allow_heapnumber_results,
1081
+ Token::Value op) {
1000
1082
 
1001
1083
  // Arguments to BinaryOpStub are in rdx and rax.
1002
- Register left = rdx;
1003
- Register right = rax;
1084
+ const Register left = rdx;
1085
+ const Register right = rax;
1004
1086
 
1005
1087
  // We only generate heapnumber answers for overflowing calculations
1006
1088
  // for the four basic arithmetic operations and logical right shift by 0.
1007
1089
  bool generate_inline_heapnumber_results =
1008
- (allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS) &&
1009
- (op_ == Token::ADD || op_ == Token::SUB ||
1010
- op_ == Token::MUL || op_ == Token::DIV || op_ == Token::SHR);
1090
+ (allow_heapnumber_results == BinaryOpStub::ALLOW_HEAPNUMBER_RESULTS) &&
1091
+ (op == Token::ADD || op == Token::SUB ||
1092
+ op == Token::MUL || op == Token::DIV || op == Token::SHR);
1011
1093
 
1012
1094
  // Smi check of both operands. If op is BIT_OR, the check is delayed
1013
1095
  // until after the OR operation.
@@ -1015,7 +1097,7 @@ void BinaryOpStub::GenerateSmiCode(
1015
1097
  Label use_fp_on_smis;
1016
1098
  Label fail;
1017
1099
 
1018
- if (op_ != Token::BIT_OR) {
1100
+ if (op != Token::BIT_OR) {
1019
1101
  Comment smi_check_comment(masm, "-- Smi check arguments");
1020
1102
  __ JumpIfNotBothSmi(left, right, &not_smis);
1021
1103
  }
@@ -1024,7 +1106,7 @@ void BinaryOpStub::GenerateSmiCode(
1024
1106
  __ bind(&smi_values);
1025
1107
  // Perform the operation.
1026
1108
  Comment perform_smi(masm, "-- Perform smi operation");
1027
- switch (op_) {
1109
+ switch (op) {
1028
1110
  case Token::ADD:
1029
1111
  ASSERT(right.is(rax));
1030
1112
  __ SmiAdd(right, right, left, &use_fp_on_smis); // ADD is commutative.
@@ -1042,20 +1124,16 @@ void BinaryOpStub::GenerateSmiCode(
1042
1124
 
1043
1125
  case Token::DIV:
1044
1126
  // SmiDiv will not accept left in rdx or right in rax.
1045
- left = rcx;
1046
- right = rbx;
1047
1127
  __ movq(rbx, rax);
1048
1128
  __ movq(rcx, rdx);
1049
- __ SmiDiv(rax, left, right, &use_fp_on_smis);
1129
+ __ SmiDiv(rax, rcx, rbx, &use_fp_on_smis);
1050
1130
  break;
1051
1131
 
1052
1132
  case Token::MOD:
1053
1133
  // SmiMod will not accept left in rdx or right in rax.
1054
- left = rcx;
1055
- right = rbx;
1056
1134
  __ movq(rbx, rax);
1057
1135
  __ movq(rcx, rdx);
1058
- __ SmiMod(rax, left, right, &use_fp_on_smis);
1136
+ __ SmiMod(rax, rcx, rbx, &use_fp_on_smis);
1059
1137
  break;
1060
1138
 
1061
1139
  case Token::BIT_OR: {
@@ -1100,7 +1178,7 @@ void BinaryOpStub::GenerateSmiCode(
1100
1178
  // operations on known smis (e.g., if the result of the operation
1101
1179
  // overflowed the smi range).
1102
1180
  __ bind(&use_fp_on_smis);
1103
- if (op_ == Token::DIV || op_ == Token::MOD) {
1181
+ if (op == Token::DIV || op == Token::MOD) {
1104
1182
  // Restore left and right to rdx and rax.
1105
1183
  __ movq(rdx, rcx);
1106
1184
  __ movq(rax, rbx);
@@ -1109,12 +1187,12 @@ void BinaryOpStub::GenerateSmiCode(
1109
1187
  if (generate_inline_heapnumber_results) {
1110
1188
  __ AllocateHeapNumber(rcx, rbx, slow);
1111
1189
  Comment perform_float(masm, "-- Perform float operation on smis");
1112
- if (op_ == Token::SHR) {
1190
+ if (op == Token::SHR) {
1113
1191
  __ SmiToInteger32(left, left);
1114
1192
  __ cvtqsi2sd(xmm0, left);
1115
1193
  } else {
1116
1194
  FloatingPointHelper::LoadSSE2SmiOperands(masm);
1117
- switch (op_) {
1195
+ switch (op) {
1118
1196
  case Token::ADD: __ addsd(xmm0, xmm1); break;
1119
1197
  case Token::SUB: __ subsd(xmm0, xmm1); break;
1120
1198
  case Token::MUL: __ mulsd(xmm0, xmm1); break;
@@ -1137,31 +1215,50 @@ void BinaryOpStub::GenerateSmiCode(
1137
1215
  // values that could be smi.
1138
1216
  __ bind(&not_smis);
1139
1217
  Comment done_comment(masm, "-- Enter non-smi code");
1218
+ FloatingPointHelper::ConvertUndefined convert_undefined =
1219
+ FloatingPointHelper::BAILOUT_ON_UNDEFINED;
1220
+ // This list must be in sync with BinaryOpPatch() behavior in ic.cc.
1221
+ if (op == Token::BIT_AND ||
1222
+ op == Token::BIT_OR ||
1223
+ op == Token::BIT_XOR ||
1224
+ op == Token::SAR ||
1225
+ op == Token::SHL ||
1226
+ op == Token::SHR) {
1227
+ convert_undefined = FloatingPointHelper::CONVERT_UNDEFINED_TO_ZERO;
1228
+ }
1140
1229
  FloatingPointHelper::NumbersToSmis(masm, left, right, rbx, rdi, rcx,
1141
- &smi_values, &fail);
1230
+ &smi_values, &fail, convert_undefined);
1142
1231
  __ jmp(&smi_values);
1143
1232
  __ bind(&fail);
1144
1233
  }
1145
1234
 
1146
1235
 
1147
- void BinaryOpStub::GenerateFloatingPointCode(MacroAssembler* masm,
1148
- Label* allocation_failure,
1149
- Label* non_numeric_failure) {
1150
- switch (op_) {
1236
+ static void BinaryOpStub_GenerateHeapResultAllocation(MacroAssembler* masm,
1237
+ Label* alloc_failure,
1238
+ OverwriteMode mode);
1239
+
1240
+
1241
+ static void BinaryOpStub_GenerateFloatingPointCode(MacroAssembler* masm,
1242
+ Label* allocation_failure,
1243
+ Label* non_numeric_failure,
1244
+ Token::Value op,
1245
+ OverwriteMode mode) {
1246
+ switch (op) {
1151
1247
  case Token::ADD:
1152
1248
  case Token::SUB:
1153
1249
  case Token::MUL:
1154
1250
  case Token::DIV: {
1155
1251
  FloatingPointHelper::LoadSSE2UnknownOperands(masm, non_numeric_failure);
1156
1252
 
1157
- switch (op_) {
1253
+ switch (op) {
1158
1254
  case Token::ADD: __ addsd(xmm0, xmm1); break;
1159
1255
  case Token::SUB: __ subsd(xmm0, xmm1); break;
1160
1256
  case Token::MUL: __ mulsd(xmm0, xmm1); break;
1161
1257
  case Token::DIV: __ divsd(xmm0, xmm1); break;
1162
1258
  default: UNREACHABLE();
1163
1259
  }
1164
- GenerateHeapResultAllocation(masm, allocation_failure);
1260
+ BinaryOpStub_GenerateHeapResultAllocation(
1261
+ masm, allocation_failure, mode);
1165
1262
  __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0);
1166
1263
  __ ret(0);
1167
1264
  break;
@@ -1182,7 +1279,7 @@ void BinaryOpStub::GenerateFloatingPointCode(MacroAssembler* masm,
1182
1279
  __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
1183
1280
  FloatingPointHelper::LoadAsIntegers(masm, non_numeric_failure,
1184
1281
  heap_number_map);
1185
- switch (op_) {
1282
+ switch (op) {
1186
1283
  case Token::BIT_OR: __ orl(rax, rcx); break;
1187
1284
  case Token::BIT_AND: __ andl(rax, rcx); break;
1188
1285
  case Token::BIT_XOR: __ xorl(rax, rcx); break;
@@ -1206,7 +1303,7 @@ void BinaryOpStub::GenerateFloatingPointCode(MacroAssembler* masm,
1206
1303
  // Logical shift right can produce an unsigned int32 that is not
1207
1304
  // an int32, and so is not in the smi range. Allocate a heap number
1208
1305
  // in that case.
1209
- if (op_ == Token::SHR) {
1306
+ if (op == Token::SHR) {
1210
1307
  __ bind(&non_smi_shr_result);
1211
1308
  Label allocation_failed;
1212
1309
  __ movl(rbx, rax); // rbx holds result value (uint32 value as int64).
@@ -1220,11 +1317,9 @@ void BinaryOpStub::GenerateFloatingPointCode(MacroAssembler* masm,
1220
1317
  &allocation_failed,
1221
1318
  TAG_OBJECT);
1222
1319
  // Set the map.
1223
- if (FLAG_debug_code) {
1224
- __ AbortIfNotRootValue(heap_number_map,
1225
- Heap::kHeapNumberMapRootIndex,
1226
- "HeapNumberMap register clobbered.");
1227
- }
1320
+ __ AssertRootValue(heap_number_map,
1321
+ Heap::kHeapNumberMapRootIndex,
1322
+ "HeapNumberMap register clobbered.");
1228
1323
  __ movq(FieldOperand(rax, HeapObject::kMapOffset),
1229
1324
  heap_number_map);
1230
1325
  __ cvtqsi2sd(xmm0, rbx);
@@ -1245,12 +1340,12 @@ void BinaryOpStub::GenerateFloatingPointCode(MacroAssembler* masm,
1245
1340
  // No fall-through from this generated code.
1246
1341
  if (FLAG_debug_code) {
1247
1342
  __ Abort("Unexpected fall-through in "
1248
- "BinaryStub::GenerateFloatingPointCode.");
1343
+ "BinaryStub_GenerateFloatingPointCode.");
1249
1344
  }
1250
1345
  }
1251
1346
 
1252
1347
 
1253
- void BinaryOpStub::GenerateStringAddCode(MacroAssembler* masm) {
1348
+ void BinaryOpStub::GenerateAddStrings(MacroAssembler* masm) {
1254
1349
  ASSERT(op_ == Token::ADD);
1255
1350
  Label left_not_string, call_runtime;
1256
1351
 
@@ -1281,58 +1376,17 @@ void BinaryOpStub::GenerateStringAddCode(MacroAssembler* masm) {
1281
1376
  }
1282
1377
 
1283
1378
 
1284
- void BinaryOpStub::GenerateCallRuntimeCode(MacroAssembler* masm) {
1285
- GenerateRegisterArgsPush(masm);
1286
- switch (op_) {
1287
- case Token::ADD:
1288
- __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
1289
- break;
1290
- case Token::SUB:
1291
- __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
1292
- break;
1293
- case Token::MUL:
1294
- __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
1295
- break;
1296
- case Token::DIV:
1297
- __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
1298
- break;
1299
- case Token::MOD:
1300
- __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
1301
- break;
1302
- case Token::BIT_OR:
1303
- __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
1304
- break;
1305
- case Token::BIT_AND:
1306
- __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
1307
- break;
1308
- case Token::BIT_XOR:
1309
- __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
1310
- break;
1311
- case Token::SAR:
1312
- __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
1313
- break;
1314
- case Token::SHL:
1315
- __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
1316
- break;
1317
- case Token::SHR:
1318
- __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
1319
- break;
1320
- default:
1321
- UNREACHABLE();
1322
- }
1323
- }
1324
-
1325
-
1326
1379
  void BinaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
1327
1380
  Label call_runtime;
1328
1381
  if (result_type_ == BinaryOpIC::UNINITIALIZED ||
1329
1382
  result_type_ == BinaryOpIC::SMI) {
1330
1383
  // Only allow smi results.
1331
- GenerateSmiCode(masm, NULL, NO_HEAPNUMBER_RESULTS);
1384
+ BinaryOpStub_GenerateSmiCode(masm, NULL, NO_HEAPNUMBER_RESULTS, op_);
1332
1385
  } else {
1333
1386
  // Allow heap number result and don't make a transition if a heap number
1334
1387
  // cannot be allocated.
1335
- GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
1388
+ BinaryOpStub_GenerateSmiCode(
1389
+ masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS, op_);
1336
1390
  }
1337
1391
 
1338
1392
  // Code falls through if the result is not returned as either a smi or heap
@@ -1341,24 +1395,22 @@ void BinaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
1341
1395
 
1342
1396
  if (call_runtime.is_linked()) {
1343
1397
  __ bind(&call_runtime);
1344
- GenerateCallRuntimeCode(masm);
1398
+ GenerateRegisterArgsPush(masm);
1399
+ GenerateCallRuntime(masm);
1345
1400
  }
1346
1401
  }
1347
1402
 
1348
1403
 
1349
- void BinaryOpStub::GenerateStringStub(MacroAssembler* masm) {
1350
- ASSERT(operands_type_ == BinaryOpIC::STRING);
1351
- ASSERT(op_ == Token::ADD);
1352
- GenerateStringAddCode(masm);
1353
- // Try to add arguments as strings, otherwise, transition to the generic
1354
- // BinaryOpIC type.
1355
- GenerateTypeTransition(masm);
1404
+ void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
1405
+ // The int32 case is identical to the Smi case. We avoid creating this
1406
+ // ic state on x64.
1407
+ UNREACHABLE();
1356
1408
  }
1357
1409
 
1358
1410
 
1359
1411
  void BinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) {
1360
1412
  Label call_runtime;
1361
- ASSERT(operands_type_ == BinaryOpIC::BOTH_STRING);
1413
+ ASSERT(left_type_ == BinaryOpIC::STRING && right_type_ == BinaryOpIC::STRING);
1362
1414
  ASSERT(op_ == Token::ADD);
1363
1415
  // If both arguments are strings, call the string add stub.
1364
1416
  // Otherwise, do a transition.
@@ -1392,7 +1444,7 @@ void BinaryOpStub::GenerateOddballStub(MacroAssembler* masm) {
1392
1444
  if (op_ == Token::ADD) {
1393
1445
  // Handle string addition here, because it is the only operation
1394
1446
  // that does not do a ToNumber conversion on the operands.
1395
- GenerateStringAddCode(masm);
1447
+ GenerateAddStrings(masm);
1396
1448
  }
1397
1449
 
1398
1450
  // Convert oddball arguments to numbers.
@@ -1419,39 +1471,79 @@ void BinaryOpStub::GenerateOddballStub(MacroAssembler* masm) {
1419
1471
  }
1420
1472
 
1421
1473
 
1474
+ static void BinaryOpStub_CheckSmiInput(MacroAssembler* masm,
1475
+ Register input,
1476
+ Label* fail) {
1477
+ Label ok;
1478
+ __ JumpIfSmi(input, &ok, Label::kNear);
1479
+ Register heap_number_map = r8;
1480
+ Register scratch1 = r9;
1481
+ Register scratch2 = r10;
1482
+ // HeapNumbers containing 32bit integer values are also allowed.
1483
+ __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
1484
+ __ cmpq(FieldOperand(input, HeapObject::kMapOffset), heap_number_map);
1485
+ __ j(not_equal, fail);
1486
+ __ movsd(xmm0, FieldOperand(input, HeapNumber::kValueOffset));
1487
+ // Convert, convert back, and compare the two doubles' bits.
1488
+ __ cvttsd2siq(scratch2, xmm0);
1489
+ __ cvtlsi2sd(xmm1, scratch2);
1490
+ __ movq(scratch1, xmm0);
1491
+ __ movq(scratch2, xmm1);
1492
+ __ cmpq(scratch1, scratch2);
1493
+ __ j(not_equal, fail);
1494
+ __ bind(&ok);
1495
+ }
1496
+
1497
+
1422
1498
  void BinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
1423
1499
  Label gc_required, not_number;
1424
- GenerateFloatingPointCode(masm, &gc_required, &not_number);
1500
+
1501
+ // It could be that only SMIs have been seen at either the left
1502
+ // or the right operand. For precise type feedback, patch the IC
1503
+ // again if this changes.
1504
+ if (left_type_ == BinaryOpIC::SMI) {
1505
+ BinaryOpStub_CheckSmiInput(masm, rdx, &not_number);
1506
+ }
1507
+ if (right_type_ == BinaryOpIC::SMI) {
1508
+ BinaryOpStub_CheckSmiInput(masm, rax, &not_number);
1509
+ }
1510
+
1511
+ BinaryOpStub_GenerateFloatingPointCode(
1512
+ masm, &gc_required, &not_number, op_, mode_);
1425
1513
 
1426
1514
  __ bind(&not_number);
1427
1515
  GenerateTypeTransition(masm);
1428
1516
 
1429
1517
  __ bind(&gc_required);
1430
- GenerateCallRuntimeCode(masm);
1518
+ GenerateRegisterArgsPush(masm);
1519
+ GenerateCallRuntime(masm);
1431
1520
  }
1432
1521
 
1433
1522
 
1434
1523
  void BinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
1435
1524
  Label call_runtime, call_string_add_or_runtime;
1436
1525
 
1437
- GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
1526
+ BinaryOpStub_GenerateSmiCode(
1527
+ masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS, op_);
1438
1528
 
1439
- GenerateFloatingPointCode(masm, &call_runtime, &call_string_add_or_runtime);
1529
+ BinaryOpStub_GenerateFloatingPointCode(
1530
+ masm, &call_runtime, &call_string_add_or_runtime, op_, mode_);
1440
1531
 
1441
1532
  __ bind(&call_string_add_or_runtime);
1442
1533
  if (op_ == Token::ADD) {
1443
- GenerateStringAddCode(masm);
1534
+ GenerateAddStrings(masm);
1444
1535
  }
1445
1536
 
1446
1537
  __ bind(&call_runtime);
1447
- GenerateCallRuntimeCode(masm);
1538
+ GenerateRegisterArgsPush(masm);
1539
+ GenerateCallRuntime(masm);
1448
1540
  }
1449
1541
 
1450
1542
 
1451
- void BinaryOpStub::GenerateHeapResultAllocation(MacroAssembler* masm,
1452
- Label* alloc_failure) {
1543
+ static void BinaryOpStub_GenerateHeapResultAllocation(MacroAssembler* masm,
1544
+ Label* alloc_failure,
1545
+ OverwriteMode mode) {
1453
1546
  Label skip_allocation;
1454
- OverwriteMode mode = mode_;
1455
1547
  switch (mode) {
1456
1548
  case OVERWRITE_LEFT: {
1457
1549
  // If the argument in rdx is already an object, we skip the
@@ -1947,17 +2039,21 @@ void FloatingPointHelper::NumbersToSmis(MacroAssembler* masm,
1947
2039
  Register scratch2,
1948
2040
  Register scratch3,
1949
2041
  Label* on_success,
1950
- Label* on_not_smis) {
2042
+ Label* on_not_smis,
2043
+ ConvertUndefined convert_undefined) {
1951
2044
  Register heap_number_map = scratch3;
1952
2045
  Register smi_result = scratch1;
1953
- Label done;
2046
+ Label done, maybe_undefined_first, maybe_undefined_second, first_done;
1954
2047
 
1955
2048
  __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
1956
2049
 
1957
2050
  Label first_smi;
1958
2051
  __ JumpIfSmi(first, &first_smi, Label::kNear);
1959
2052
  __ cmpq(FieldOperand(first, HeapObject::kMapOffset), heap_number_map);
1960
- __ j(not_equal, on_not_smis);
2053
+ __ j(not_equal,
2054
+ (convert_undefined == CONVERT_UNDEFINED_TO_ZERO)
2055
+ ? &maybe_undefined_first
2056
+ : on_not_smis);
1961
2057
  // Convert HeapNumber to smi if possible.
1962
2058
  __ movsd(xmm0, FieldOperand(first, HeapNumber::kValueOffset));
1963
2059
  __ movq(scratch2, xmm0);
@@ -1970,14 +2066,15 @@ void FloatingPointHelper::NumbersToSmis(MacroAssembler* masm,
1970
2066
  __ j(not_equal, on_not_smis);
1971
2067
  __ Integer32ToSmi(first, smi_result);
1972
2068
 
2069
+ __ bind(&first_done);
1973
2070
  __ JumpIfSmi(second, (on_success != NULL) ? on_success : &done);
1974
2071
  __ bind(&first_smi);
1975
- if (FLAG_debug_code) {
1976
- // Second should be non-smi if we get here.
1977
- __ AbortIfSmi(second);
1978
- }
2072
+ __ AssertNotSmi(second);
1979
2073
  __ cmpq(FieldOperand(second, HeapObject::kMapOffset), heap_number_map);
1980
- __ j(not_equal, on_not_smis);
2074
+ __ j(not_equal,
2075
+ (convert_undefined == CONVERT_UNDEFINED_TO_ZERO)
2076
+ ? &maybe_undefined_second
2077
+ : on_not_smis);
1981
2078
  // Convert second to smi, if possible.
1982
2079
  __ movsd(xmm0, FieldOperand(second, HeapNumber::kValueOffset));
1983
2080
  __ movq(scratch2, xmm0);
@@ -1990,8 +2087,25 @@ void FloatingPointHelper::NumbersToSmis(MacroAssembler* masm,
1990
2087
  if (on_success != NULL) {
1991
2088
  __ jmp(on_success);
1992
2089
  } else {
1993
- __ bind(&done);
2090
+ __ jmp(&done);
2091
+ }
2092
+
2093
+ __ bind(&maybe_undefined_first);
2094
+ __ CompareRoot(first, Heap::kUndefinedValueRootIndex);
2095
+ __ j(not_equal, on_not_smis);
2096
+ __ xor_(first, first);
2097
+ __ jmp(&first_done);
2098
+
2099
+ __ bind(&maybe_undefined_second);
2100
+ __ CompareRoot(second, Heap::kUndefinedValueRootIndex);
2101
+ __ j(not_equal, on_not_smis);
2102
+ __ xor_(second, second);
2103
+ if (on_success != NULL) {
2104
+ __ jmp(on_success);
1994
2105
  }
2106
+ // Else: fall through.
2107
+
2108
+ __ bind(&done);
1995
2109
  }
1996
2110
 
1997
2111
 
@@ -2072,7 +2186,7 @@ void MathPowStub::Generate(MacroAssembler* masm) {
2072
2186
  Label continue_sqrt, continue_rsqrt, not_plus_half;
2073
2187
  // Test for 0.5.
2074
2188
  // Load double_scratch with 0.5.
2075
- __ movq(scratch, V8_UINT64_C(0x3FE0000000000000), RelocInfo::NONE);
2189
+ __ movq(scratch, V8_UINT64_C(0x3FE0000000000000), RelocInfo::NONE64);
2076
2190
  __ movq(double_scratch, scratch);
2077
2191
  // Already ruled out NaNs for exponent.
2078
2192
  __ ucomisd(double_scratch, double_exponent);
@@ -2082,7 +2196,7 @@ void MathPowStub::Generate(MacroAssembler* masm) {
2082
2196
  // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
2083
2197
  // According to IEEE-754, double-precision -Infinity has the highest
2084
2198
  // 12 bits set and the lowest 52 bits cleared.
2085
- __ movq(scratch, V8_UINT64_C(0xFFF0000000000000), RelocInfo::NONE);
2199
+ __ movq(scratch, V8_UINT64_C(0xFFF0000000000000), RelocInfo::NONE64);
2086
2200
  __ movq(double_scratch, scratch);
2087
2201
  __ ucomisd(double_scratch, double_base);
2088
2202
  // Comparing -Infinity with NaN results in "unordered", which sets the
@@ -2114,7 +2228,7 @@ void MathPowStub::Generate(MacroAssembler* masm) {
2114
2228
  // case of Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
2115
2229
  // According to IEEE-754, double-precision -Infinity has the highest
2116
2230
  // 12 bits set and the lowest 52 bits cleared.
2117
- __ movq(scratch, V8_UINT64_C(0xFFF0000000000000), RelocInfo::NONE);
2231
+ __ movq(scratch, V8_UINT64_C(0xFFF0000000000000), RelocInfo::NONE64);
2118
2232
  __ movq(double_scratch, scratch);
2119
2233
  __ ucomisd(double_scratch, double_base);
2120
2234
  // Comparing -Infinity with NaN results in "unordered", which sets the
@@ -2157,7 +2271,7 @@ void MathPowStub::Generate(MacroAssembler* masm) {
2157
2271
  // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
2158
2272
  __ f2xm1(); // 2^(X-rnd(X)) - 1, rnd(X)
2159
2273
  __ fld1(); // 1, 2^(X-rnd(X)) - 1, rnd(X)
2160
- __ faddp(1); // 1, 2^(X-rnd(X)), rnd(X)
2274
+ __ faddp(1); // 2^(X-rnd(X)), rnd(X)
2161
2275
  // FSCALE calculates st(0) * 2^st(1)
2162
2276
  __ fscale(); // 2^X, rnd(X)
2163
2277
  __ fstp(1);
@@ -2185,21 +2299,28 @@ void MathPowStub::Generate(MacroAssembler* masm) {
2185
2299
  __ movsd(double_scratch2, double_result); // Load double_exponent with 1.
2186
2300
 
2187
2301
  // Get absolute value of exponent.
2188
- Label no_neg, while_true, no_multiply;
2302
+ Label no_neg, while_true, while_false;
2189
2303
  __ testl(scratch, scratch);
2190
2304
  __ j(positive, &no_neg, Label::kNear);
2191
2305
  __ negl(scratch);
2192
2306
  __ bind(&no_neg);
2193
2307
 
2194
- __ bind(&while_true);
2308
+ __ j(zero, &while_false, Label::kNear);
2195
2309
  __ shrl(scratch, Immediate(1));
2196
- __ j(not_carry, &no_multiply, Label::kNear);
2197
- __ mulsd(double_result, double_scratch);
2198
- __ bind(&no_multiply);
2310
+ // Above condition means CF==0 && ZF==0. This means that the
2311
+ // bit that has been shifted out is 0 and the result is not 0.
2312
+ __ j(above, &while_true, Label::kNear);
2313
+ __ movsd(double_result, double_scratch);
2314
+ __ j(zero, &while_false, Label::kNear);
2199
2315
 
2316
+ __ bind(&while_true);
2317
+ __ shrl(scratch, Immediate(1));
2200
2318
  __ mulsd(double_scratch, double_scratch);
2319
+ __ j(above, &while_true, Label::kNear);
2320
+ __ mulsd(double_result, double_scratch);
2201
2321
  __ j(not_zero, &while_true);
2202
2322
 
2323
+ __ bind(&while_false);
2203
2324
  // If the exponent is negative, return 1/result.
2204
2325
  __ testl(exponent, exponent);
2205
2326
  __ j(greater, &done);
@@ -2252,6 +2373,150 @@ void MathPowStub::Generate(MacroAssembler* masm) {
2252
2373
  }
2253
2374
 
2254
2375
 
2376
+ void ArrayLengthStub::Generate(MacroAssembler* masm) {
2377
+ Label miss;
2378
+ Register receiver;
2379
+ if (kind() == Code::KEYED_LOAD_IC) {
2380
+ // ----------- S t a t e -------------
2381
+ // -- rax : key
2382
+ // -- rdx : receiver
2383
+ // -- rsp[0] : return address
2384
+ // -----------------------------------
2385
+ __ Cmp(rax, masm->isolate()->factory()->length_symbol());
2386
+ receiver = rdx;
2387
+ } else {
2388
+ ASSERT(kind() == Code::LOAD_IC);
2389
+ // ----------- S t a t e -------------
2390
+ // -- rax : receiver
2391
+ // -- rcx : name
2392
+ // -- rsp[0] : return address
2393
+ // -----------------------------------
2394
+ receiver = rax;
2395
+ }
2396
+
2397
+ StubCompiler::GenerateLoadArrayLength(masm, receiver, r8, &miss);
2398
+ __ bind(&miss);
2399
+ StubCompiler::GenerateLoadMiss(masm, kind());
2400
+ }
2401
+
2402
+
2403
+ void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
2404
+ Label miss;
2405
+ Register receiver;
2406
+ if (kind() == Code::KEYED_LOAD_IC) {
2407
+ // ----------- S t a t e -------------
2408
+ // -- rax : key
2409
+ // -- rdx : receiver
2410
+ // -- rsp[0] : return address
2411
+ // -----------------------------------
2412
+ __ Cmp(rax, masm->isolate()->factory()->prototype_symbol());
2413
+ receiver = rdx;
2414
+ } else {
2415
+ ASSERT(kind() == Code::LOAD_IC);
2416
+ // ----------- S t a t e -------------
2417
+ // -- rax : receiver
2418
+ // -- rcx : name
2419
+ // -- rsp[0] : return address
2420
+ // -----------------------------------
2421
+ receiver = rax;
2422
+ }
2423
+
2424
+ StubCompiler::GenerateLoadFunctionPrototype(masm, receiver, r8, r9, &miss);
2425
+ __ bind(&miss);
2426
+ StubCompiler::GenerateLoadMiss(masm, kind());
2427
+ }
2428
+
2429
+
2430
+ void StringLengthStub::Generate(MacroAssembler* masm) {
2431
+ Label miss;
2432
+ Register receiver;
2433
+ if (kind() == Code::KEYED_LOAD_IC) {
2434
+ // ----------- S t a t e -------------
2435
+ // -- rax : key
2436
+ // -- rdx : receiver
2437
+ // -- rsp[0] : return address
2438
+ // -----------------------------------
2439
+ __ Cmp(rax, masm->isolate()->factory()->length_symbol());
2440
+ receiver = rdx;
2441
+ } else {
2442
+ ASSERT(kind() == Code::LOAD_IC);
2443
+ // ----------- S t a t e -------------
2444
+ // -- rax : receiver
2445
+ // -- rcx : name
2446
+ // -- rsp[0] : return address
2447
+ // -----------------------------------
2448
+ receiver = rax;
2449
+ }
2450
+
2451
+ StubCompiler::GenerateLoadStringLength(masm, receiver, r8, r9, &miss,
2452
+ support_wrapper_);
2453
+ __ bind(&miss);
2454
+ StubCompiler::GenerateLoadMiss(masm, kind());
2455
+ }
2456
+
2457
+
2458
+ void StoreArrayLengthStub::Generate(MacroAssembler* masm) {
2459
+ // ----------- S t a t e -------------
2460
+ // -- rax : value
2461
+ // -- rcx : key
2462
+ // -- rdx : receiver
2463
+ // -- rsp[0] : return address
2464
+ // -----------------------------------
2465
+ //
2466
+ // This accepts as a receiver anything JSArray::SetElementsLength accepts
2467
+ // (currently anything except for external arrays which means anything with
2468
+ // elements of FixedArray type). Value must be a number, but only smis are
2469
+ // accepted as the most common case.
2470
+
2471
+ Label miss;
2472
+
2473
+ Register receiver = rdx;
2474
+ Register value = rax;
2475
+ Register scratch = rbx;
2476
+ if (kind() == Code::KEYED_STORE_IC) {
2477
+ __ Cmp(rcx, masm->isolate()->factory()->length_symbol());
2478
+ }
2479
+
2480
+ // Check that the receiver isn't a smi.
2481
+ __ JumpIfSmi(receiver, &miss);
2482
+
2483
+ // Check that the object is a JS array.
2484
+ __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
2485
+ __ j(not_equal, &miss);
2486
+
2487
+ // Check that elements are FixedArray.
2488
+ // We rely on StoreIC_ArrayLength below to deal with all types of
2489
+ // fast elements (including COW).
2490
+ __ movq(scratch, FieldOperand(receiver, JSArray::kElementsOffset));
2491
+ __ CmpObjectType(scratch, FIXED_ARRAY_TYPE, scratch);
2492
+ __ j(not_equal, &miss);
2493
+
2494
+ // Check that the array has fast properties, otherwise the length
2495
+ // property might have been redefined.
2496
+ __ movq(scratch, FieldOperand(receiver, JSArray::kPropertiesOffset));
2497
+ __ CompareRoot(FieldOperand(scratch, FixedArray::kMapOffset),
2498
+ Heap::kHashTableMapRootIndex);
2499
+ __ j(equal, &miss);
2500
+
2501
+ // Check that value is a smi.
2502
+ __ JumpIfNotSmi(value, &miss);
2503
+
2504
+ // Prepare tail call to StoreIC_ArrayLength.
2505
+ __ pop(scratch);
2506
+ __ push(receiver);
2507
+ __ push(value);
2508
+ __ push(scratch); // return address
2509
+
2510
+ ExternalReference ref =
2511
+ ExternalReference(IC_Utility(IC::kStoreIC_ArrayLength), masm->isolate());
2512
+ __ TailCallExternalReference(ref, 2, 1);
2513
+
2514
+ __ bind(&miss);
2515
+
2516
+ StubCompiler::GenerateStoreMiss(masm, kind());
2517
+ }
2518
+
2519
+
2255
2520
  void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
2256
2521
  // The key is in rdx and the parameter count is in rax.
2257
2522
 
@@ -2379,10 +2644,10 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
2379
2644
 
2380
2645
  // rax = address of new object(s) (tagged)
2381
2646
  // rcx = argument count (untagged)
2382
- // Get the arguments boilerplate from the current (global) context into rdi.
2647
+ // Get the arguments boilerplate from the current native context into rdi.
2383
2648
  Label has_mapped_parameters, copy;
2384
- __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2385
- __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset));
2649
+ __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2650
+ __ movq(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset));
2386
2651
  __ testq(rbx, rbx);
2387
2652
  __ j(not_zero, &has_mapped_parameters, Label::kNear);
2388
2653
 
@@ -2525,7 +2790,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
2525
2790
  __ bind(&runtime);
2526
2791
  __ Integer32ToSmi(rcx, rcx);
2527
2792
  __ movq(Operand(rsp, 1 * kPointerSize), rcx); // Patch argument count.
2528
- __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
2793
+ __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
2529
2794
  }
2530
2795
 
2531
2796
 
@@ -2595,9 +2860,9 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
2595
2860
  // Do the allocation of both objects in one go.
2596
2861
  __ AllocateInNewSpace(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT);
2597
2862
 
2598
- // Get the arguments boilerplate from the current (global) context.
2599
- __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2600
- __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset));
2863
+ // Get the arguments boilerplate from the current native context.
2864
+ __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2865
+ __ movq(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset));
2601
2866
  const int offset =
2602
2867
  Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX);
2603
2868
  __ movq(rdi, Operand(rdi, offset));
@@ -2714,7 +2979,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
2714
2979
  // Calculate number of capture registers (number_of_captures + 1) * 2.
2715
2980
  __ leal(rdx, Operand(rdx, rdx, times_1, 2));
2716
2981
  // Check that the static offsets vector buffer is large enough.
2717
- __ cmpl(rdx, Immediate(OffsetsVector::kStaticOffsetsVectorSize));
2982
+ __ cmpl(rdx, Immediate(Isolate::kJSRegexpStaticOffsetsVectorSize));
2718
2983
  __ j(above, &runtime);
2719
2984
 
2720
2985
  // rax: RegExp data (FixedArray)
@@ -2943,8 +3208,8 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
2943
3208
  // r15: original subject string
2944
3209
  __ testb(rcx, rcx); // Last use of rcx as encoding of subject string.
2945
3210
  __ j(zero, &setup_two_byte, Label::kNear);
2946
- __ lea(arg4, FieldOperand(rdi, r14, times_1, SeqAsciiString::kHeaderSize));
2947
- __ lea(arg3, FieldOperand(rdi, rbx, times_1, SeqAsciiString::kHeaderSize));
3211
+ __ lea(arg4, FieldOperand(rdi, r14, times_1, SeqOneByteString::kHeaderSize));
3212
+ __ lea(arg3, FieldOperand(rdi, rbx, times_1, SeqOneByteString::kHeaderSize));
2948
3213
  __ jmp(&setup_rest, Label::kNear);
2949
3214
  __ bind(&setup_two_byte);
2950
3215
  __ lea(arg4, FieldOperand(rdi, r14, times_2, SeqTwoByteString::kHeaderSize));
@@ -3084,7 +3349,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
3084
3349
  }
3085
3350
  __ movq(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset));
3086
3351
  // Move the pointer so that offset-wise, it looks like a sequential string.
3087
- STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqAsciiString::kHeaderSize);
3352
+ STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
3088
3353
  __ subq(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3089
3354
  STATIC_ASSERT(kTwoByteStringTag == 0);
3090
3355
  __ testb(rbx, Immediate(kStringEncodingMask));
@@ -3126,8 +3391,8 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
3126
3391
  // r8: Number of array elements as smi.
3127
3392
 
3128
3393
  // Set JSArray map to global.regexp_result_map().
3129
- __ movq(rdx, ContextOperand(rsi, Context::GLOBAL_INDEX));
3130
- __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalContextOffset));
3394
+ __ movq(rdx, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX));
3395
+ __ movq(rdx, FieldOperand(rdx, GlobalObject::kNativeContextOffset));
3131
3396
  __ movq(rdx, ContextOperand(rdx, Context::REGEXP_RESULT_MAP_INDEX));
3132
3397
  __ movq(FieldOperand(rax, HeapObject::kMapOffset), rdx);
3133
3398
 
@@ -3158,14 +3423,14 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
3158
3423
  // Set length.
3159
3424
  __ Integer32ToSmi(rdx, rbx);
3160
3425
  __ movq(FieldOperand(rcx, FixedArray::kLengthOffset), rdx);
3161
- // Fill contents of fixed-array with the-hole.
3162
- __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
3426
+ // Fill contents of fixed-array with undefined.
3427
+ __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
3163
3428
  __ lea(rcx, FieldOperand(rcx, FixedArray::kHeaderSize));
3164
- // Fill fixed array elements with hole.
3429
+ // Fill fixed array elements with undefined.
3165
3430
  // rax: JSArray.
3166
3431
  // rbx: Number of elements in array that remains to be filled, as int32.
3167
3432
  // rcx: Start of elements in FixedArray.
3168
- // rdx: the hole.
3433
+ // rdx: undefined.
3169
3434
  Label loop;
3170
3435
  __ testl(rbx, rbx);
3171
3436
  __ bind(&loop);
@@ -3299,30 +3564,59 @@ static int NegativeComparisonResult(Condition cc) {
3299
3564
  }
3300
3565
 
3301
3566
 
3302
- void CompareStub::Generate(MacroAssembler* masm) {
3303
- ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
3567
+ static void CheckInputType(MacroAssembler* masm,
3568
+ Register input,
3569
+ CompareIC::State expected,
3570
+ Label* fail) {
3571
+ Label ok;
3572
+ if (expected == CompareIC::SMI) {
3573
+ __ JumpIfNotSmi(input, fail);
3574
+ } else if (expected == CompareIC::HEAP_NUMBER) {
3575
+ __ JumpIfSmi(input, &ok);
3576
+ __ CompareMap(input, masm->isolate()->factory()->heap_number_map(), NULL);
3577
+ __ j(not_equal, fail);
3578
+ }
3579
+ // We could be strict about symbol/string here, but as long as
3580
+ // hydrogen doesn't care, the stub doesn't have to care either.
3581
+ __ bind(&ok);
3582
+ }
3583
+
3584
+
3585
+ static void BranchIfNonSymbol(MacroAssembler* masm,
3586
+ Label* label,
3587
+ Register object,
3588
+ Register scratch) {
3589
+ __ JumpIfSmi(object, label);
3590
+ __ movq(scratch, FieldOperand(object, HeapObject::kMapOffset));
3591
+ __ movzxbq(scratch,
3592
+ FieldOperand(scratch, Map::kInstanceTypeOffset));
3593
+ // Ensure that no non-strings have the symbol bit set.
3594
+ STATIC_ASSERT(LAST_TYPE < kNotStringTag + kIsSymbolMask);
3595
+ STATIC_ASSERT(kSymbolTag != 0);
3596
+ __ testb(scratch, Immediate(kIsSymbolMask));
3597
+ __ j(zero, label);
3598
+ }
3599
+
3304
3600
 
3601
+ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
3305
3602
  Label check_unequal_objects, done;
3603
+ Condition cc = GetCondition();
3306
3604
  Factory* factory = masm->isolate()->factory();
3307
3605
 
3308
- // Compare two smis if required.
3309
- if (include_smi_compare_) {
3310
- Label non_smi, smi_done;
3311
- __ JumpIfNotBothSmi(rax, rdx, &non_smi);
3312
- __ subq(rdx, rax);
3313
- __ j(no_overflow, &smi_done);
3314
- __ not_(rdx); // Correct sign in case of overflow. rdx cannot be 0 here.
3315
- __ bind(&smi_done);
3316
- __ movq(rax, rdx);
3317
- __ ret(0);
3318
- __ bind(&non_smi);
3319
- } else if (FLAG_debug_code) {
3320
- Label ok;
3321
- __ JumpIfNotSmi(rdx, &ok);
3322
- __ JumpIfNotSmi(rax, &ok);
3323
- __ Abort("CompareStub: smi operands");
3324
- __ bind(&ok);
3325
- }
3606
+ Label miss;
3607
+ CheckInputType(masm, rdx, left_, &miss);
3608
+ CheckInputType(masm, rax, right_, &miss);
3609
+
3610
+ // Compare two smis.
3611
+ Label non_smi, smi_done;
3612
+ __ JumpIfNotBothSmi(rax, rdx, &non_smi);
3613
+ __ subq(rdx, rax);
3614
+ __ j(no_overflow, &smi_done);
3615
+ __ not_(rdx); // Correct sign in case of overflow. rdx cannot be 0 here.
3616
+ __ bind(&smi_done);
3617
+ __ movq(rax, rdx);
3618
+ __ ret(0);
3619
+ __ bind(&non_smi);
3326
3620
 
3327
3621
  // The compare stub returns a positive, negative, or zero 64-bit integer
3328
3622
  // value in rax, corresponding to result of comparing the two inputs.
@@ -3335,66 +3629,58 @@ void CompareStub::Generate(MacroAssembler* masm) {
3335
3629
  __ cmpq(rax, rdx);
3336
3630
  __ j(not_equal, &not_identical, Label::kNear);
3337
3631
 
3338
- if (cc_ != equal) {
3632
+ if (cc != equal) {
3339
3633
  // Check for undefined. undefined OP undefined is false even though
3340
3634
  // undefined == undefined.
3341
3635
  Label check_for_nan;
3342
3636
  __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
3343
3637
  __ j(not_equal, &check_for_nan, Label::kNear);
3344
- __ Set(rax, NegativeComparisonResult(cc_));
3638
+ __ Set(rax, NegativeComparisonResult(cc));
3345
3639
  __ ret(0);
3346
3640
  __ bind(&check_for_nan);
3347
3641
  }
3348
3642
 
3349
3643
  // Test for NaN. Sadly, we can't just compare to FACTORY->nan_value(),
3350
3644
  // so we do the second best thing - test it ourselves.
3351
- // Note: if cc_ != equal, never_nan_nan_ is not used.
3352
- // We cannot set rax to EQUAL until just before return because
3353
- // rax must be unchanged on jump to not_identical.
3354
- if (never_nan_nan_ && (cc_ == equal)) {
3355
- __ Set(rax, EQUAL);
3356
- __ ret(0);
3357
- } else {
3358
- Label heap_number;
3359
- // If it's not a heap number, then return equal for (in)equality operator.
3360
- __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
3361
- factory->heap_number_map());
3362
- __ j(equal, &heap_number, Label::kNear);
3363
- if (cc_ != equal) {
3364
- // Call runtime on identical objects. Otherwise return equal.
3365
- __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
3366
- __ j(above_equal, &not_identical, Label::kNear);
3367
- }
3368
- __ Set(rax, EQUAL);
3369
- __ ret(0);
3645
+ Label heap_number;
3646
+ // If it's not a heap number, then return equal for (in)equality operator.
3647
+ __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
3648
+ factory->heap_number_map());
3649
+ __ j(equal, &heap_number, Label::kNear);
3650
+ if (cc != equal) {
3651
+ // Call runtime on identical objects. Otherwise return equal.
3652
+ __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
3653
+ __ j(above_equal, &not_identical, Label::kNear);
3654
+ }
3655
+ __ Set(rax, EQUAL);
3656
+ __ ret(0);
3370
3657
 
3371
- __ bind(&heap_number);
3372
- // It is a heap number, so return equal if it's not NaN.
3373
- // For NaN, return 1 for every condition except greater and
3374
- // greater-equal. Return -1 for them, so the comparison yields
3375
- // false for all conditions except not-equal.
3376
- __ Set(rax, EQUAL);
3377
- __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
3378
- __ ucomisd(xmm0, xmm0);
3379
- __ setcc(parity_even, rax);
3380
- // rax is 0 for equal non-NaN heapnumbers, 1 for NaNs.
3381
- if (cc_ == greater_equal || cc_ == greater) {
3382
- __ neg(rax);
3383
- }
3384
- __ ret(0);
3658
+ __ bind(&heap_number);
3659
+ // It is a heap number, so return equal if it's not NaN.
3660
+ // For NaN, return 1 for every condition except greater and
3661
+ // greater-equal. Return -1 for them, so the comparison yields
3662
+ // false for all conditions except not-equal.
3663
+ __ Set(rax, EQUAL);
3664
+ __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
3665
+ __ ucomisd(xmm0, xmm0);
3666
+ __ setcc(parity_even, rax);
3667
+ // rax is 0 for equal non-NaN heapnumbers, 1 for NaNs.
3668
+ if (cc == greater_equal || cc == greater) {
3669
+ __ neg(rax);
3385
3670
  }
3671
+ __ ret(0);
3386
3672
 
3387
3673
  __ bind(&not_identical);
3388
3674
  }
3389
3675
 
3390
- if (cc_ == equal) { // Both strict and non-strict.
3676
+ if (cc == equal) { // Both strict and non-strict.
3391
3677
  Label slow; // Fallthrough label.
3392
3678
 
3393
3679
  // If we're doing a strict equality comparison, we don't have to do
3394
3680
  // type conversion, so we generate code to do fast comparison for objects
3395
3681
  // and oddballs. Non-smi numbers and strings still go through the usual
3396
3682
  // slow-case code.
3397
- if (strict_) {
3683
+ if (strict()) {
3398
3684
  // If either is a Smi (we know that not both are), then they can only
3399
3685
  // be equal if the other is a HeapNumber. If so, use the slow case.
3400
3686
  {
@@ -3446,40 +3732,38 @@ void CompareStub::Generate(MacroAssembler* masm) {
3446
3732
  }
3447
3733
 
3448
3734
  // Generate the number comparison code.
3449
- if (include_number_compare_) {
3450
- Label non_number_comparison;
3451
- Label unordered;
3452
- FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison);
3453
- __ xorl(rax, rax);
3454
- __ xorl(rcx, rcx);
3455
- __ ucomisd(xmm0, xmm1);
3456
-
3457
- // Don't base result on EFLAGS when a NaN is involved.
3458
- __ j(parity_even, &unordered, Label::kNear);
3459
- // Return a result of -1, 0, or 1, based on EFLAGS.
3460
- __ setcc(above, rax);
3461
- __ setcc(below, rcx);
3462
- __ subq(rax, rcx);
3463
- __ ret(0);
3735
+ Label non_number_comparison;
3736
+ Label unordered;
3737
+ FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison);
3738
+ __ xorl(rax, rax);
3739
+ __ xorl(rcx, rcx);
3740
+ __ ucomisd(xmm0, xmm1);
3464
3741
 
3465
- // If one of the numbers was NaN, then the result is always false.
3466
- // The cc is never not-equal.
3467
- __ bind(&unordered);
3468
- ASSERT(cc_ != not_equal);
3469
- if (cc_ == less || cc_ == less_equal) {
3470
- __ Set(rax, 1);
3471
- } else {
3472
- __ Set(rax, -1);
3473
- }
3474
- __ ret(0);
3742
+ // Don't base result on EFLAGS when a NaN is involved.
3743
+ __ j(parity_even, &unordered, Label::kNear);
3744
+ // Return a result of -1, 0, or 1, based on EFLAGS.
3745
+ __ setcc(above, rax);
3746
+ __ setcc(below, rcx);
3747
+ __ subq(rax, rcx);
3748
+ __ ret(0);
3475
3749
 
3476
- // The number comparison code did not provide a valid result.
3477
- __ bind(&non_number_comparison);
3750
+ // If one of the numbers was NaN, then the result is always false.
3751
+ // The cc is never not-equal.
3752
+ __ bind(&unordered);
3753
+ ASSERT(cc != not_equal);
3754
+ if (cc == less || cc == less_equal) {
3755
+ __ Set(rax, 1);
3756
+ } else {
3757
+ __ Set(rax, -1);
3478
3758
  }
3759
+ __ ret(0);
3760
+
3761
+ // The number comparison code did not provide a valid result.
3762
+ __ bind(&non_number_comparison);
3479
3763
 
3480
3764
  // Fast negative check for symbol-to-symbol equality.
3481
3765
  Label check_for_strings;
3482
- if (cc_ == equal) {
3766
+ if (cc == equal) {
3483
3767
  BranchIfNonSymbol(masm, &check_for_strings, rax, kScratchRegister);
3484
3768
  BranchIfNonSymbol(masm, &check_for_strings, rdx, kScratchRegister);
3485
3769
 
@@ -3495,7 +3779,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
3495
3779
  rdx, rax, rcx, rbx, &check_unequal_objects);
3496
3780
 
3497
3781
  // Inline comparison of ASCII strings.
3498
- if (cc_ == equal) {
3782
+ if (cc == equal) {
3499
3783
  StringCompareStub::GenerateFlatAsciiStringEquals(masm,
3500
3784
  rdx,
3501
3785
  rax,
@@ -3516,7 +3800,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
3516
3800
  #endif
3517
3801
 
3518
3802
  __ bind(&check_unequal_objects);
3519
- if (cc_ == equal && !strict_) {
3803
+ if (cc == equal && !strict()) {
3520
3804
  // Not strict equality. Objects are unequal if
3521
3805
  // they are both JSObjects and not undetectable,
3522
3806
  // and their pointers are different.
@@ -3556,11 +3840,11 @@ void CompareStub::Generate(MacroAssembler* masm) {
3556
3840
 
3557
3841
  // Figure out which native to call and setup the arguments.
3558
3842
  Builtins::JavaScript builtin;
3559
- if (cc_ == equal) {
3560
- builtin = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
3843
+ if (cc == equal) {
3844
+ builtin = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
3561
3845
  } else {
3562
3846
  builtin = Builtins::COMPARE;
3563
- __ Push(Smi::FromInt(NegativeComparisonResult(cc_)));
3847
+ __ Push(Smi::FromInt(NegativeComparisonResult(cc)));
3564
3848
  }
3565
3849
 
3566
3850
  // Restore return address on the stack.
@@ -3569,22 +3853,9 @@ void CompareStub::Generate(MacroAssembler* masm) {
3569
3853
  // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
3570
3854
  // tagged as a small integer.
3571
3855
  __ InvokeBuiltin(builtin, JUMP_FUNCTION);
3572
- }
3573
3856
 
3574
-
3575
- void CompareStub::BranchIfNonSymbol(MacroAssembler* masm,
3576
- Label* label,
3577
- Register object,
3578
- Register scratch) {
3579
- __ JumpIfSmi(object, label);
3580
- __ movq(scratch, FieldOperand(object, HeapObject::kMapOffset));
3581
- __ movzxbq(scratch,
3582
- FieldOperand(scratch, Map::kInstanceTypeOffset));
3583
- // Ensure that no non-strings have the symbol bit set.
3584
- STATIC_ASSERT(LAST_TYPE < kNotStringTag + kIsSymbolMask);
3585
- STATIC_ASSERT(kSymbolTag != 0);
3586
- __ testb(scratch, Immediate(kIsSymbolMask));
3587
- __ j(zero, label);
3857
+ __ bind(&miss);
3858
+ GenerateMiss(masm);
3588
3859
  }
3589
3860
 
3590
3861
 
@@ -3806,6 +4077,19 @@ void CEntryStub::GenerateAheadOfTime() {
3806
4077
  }
3807
4078
 
3808
4079
 
4080
+ static void JumpIfOOM(MacroAssembler* masm,
4081
+ Register value,
4082
+ Register scratch,
4083
+ Label* oom_label) {
4084
+ __ movq(scratch, value);
4085
+ STATIC_ASSERT(Failure::OUT_OF_MEMORY_EXCEPTION == 3);
4086
+ STATIC_ASSERT(kFailureTag == 3);
4087
+ __ and_(scratch, Immediate(0xf));
4088
+ __ cmpq(scratch, Immediate(0xf));
4089
+ __ j(equal, oom_label);
4090
+ }
4091
+
4092
+
3809
4093
  void CEntryStub::GenerateCore(MacroAssembler* masm,
3810
4094
  Label* throw_normal_exception,
3811
4095
  Label* throw_termination_exception,
@@ -3841,8 +4125,7 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
3841
4125
  __ movq(rdi, rax);
3842
4126
  #endif
3843
4127
  __ movq(kScratchRegister,
3844
- FUNCTION_ADDR(Runtime::PerformGC),
3845
- RelocInfo::RUNTIME_ENTRY);
4128
+ ExternalReference::perform_gc_function(masm->isolate()));
3846
4129
  __ call(kScratchRegister);
3847
4130
  }
3848
4131
 
@@ -3920,9 +4203,7 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
3920
4203
  __ j(zero, &retry, Label::kNear);
3921
4204
 
3922
4205
  // Special handling of out of memory exceptions.
3923
- __ movq(kScratchRegister, Failure::OutOfMemoryException(), RelocInfo::NONE);
3924
- __ cmpq(rax, kScratchRegister);
3925
- __ j(equal, throw_out_of_memory_exception);
4206
+ JumpIfOOM(masm, rax, kScratchRegister, throw_out_of_memory_exception);
3926
4207
 
3927
4208
  // Retrieve the pending exception and clear the variable.
3928
4209
  ExternalReference pending_exception_address(
@@ -4000,7 +4281,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
4000
4281
 
4001
4282
  // Do full GC and retry runtime call one final time.
4002
4283
  Failure* failure = Failure::InternalError();
4003
- __ movq(rax, failure, RelocInfo::NONE);
4284
+ __ movq(rax, failure, RelocInfo::NONE64);
4004
4285
  GenerateCore(masm,
4005
4286
  &throw_normal_exception,
4006
4287
  &throw_termination_exception,
@@ -4019,7 +4300,10 @@ void CEntryStub::Generate(MacroAssembler* masm) {
4019
4300
  // Set pending exception and rax to out of memory exception.
4020
4301
  ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
4021
4302
  isolate);
4022
- __ movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE);
4303
+ Label already_have_failure;
4304
+ JumpIfOOM(masm, rax, kScratchRegister, &already_have_failure);
4305
+ __ movq(rax, Failure::OutOfMemoryException(0x1), RelocInfo::NONE64);
4306
+ __ bind(&already_have_failure);
4023
4307
  __ Store(pending_exception, rax);
4024
4308
  // Fall through to the next label.
4025
4309
 
@@ -4047,7 +4331,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
4047
4331
  // Cannot use smi-register for loading yet.
4048
4332
  __ movq(kScratchRegister,
4049
4333
  reinterpret_cast<uint64_t>(Smi::FromInt(marker)),
4050
- RelocInfo::NONE);
4334
+ RelocInfo::NONE64);
4051
4335
  __ push(kScratchRegister); // context slot
4052
4336
  __ push(kScratchRegister); // function slot
4053
4337
  // Save callee-saved registers (X64/Win64 calling conventions).
@@ -4102,7 +4386,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
4102
4386
  ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
4103
4387
  isolate);
4104
4388
  __ Store(pending_exception, rax);
4105
- __ movq(rax, Failure::Exception(), RelocInfo::NONE);
4389
+ __ movq(rax, Failure::Exception(), RelocInfo::NONE64);
4106
4390
  __ jmp(&exit);
4107
4391
 
4108
4392
  // Invoke: Link this frame into the handler chain. There's only one
@@ -4342,44 +4626,6 @@ Register InstanceofStub::left() { return no_reg; }
4342
4626
  Register InstanceofStub::right() { return no_reg; }
4343
4627
 
4344
4628
 
4345
- int CompareStub::MinorKey() {
4346
- // Encode the three parameters in a unique 16 bit value. To avoid duplicate
4347
- // stubs the never NaN NaN condition is only taken into account if the
4348
- // condition is equals.
4349
- ASSERT(static_cast<unsigned>(cc_) < (1 << 12));
4350
- ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
4351
- return ConditionField::encode(static_cast<unsigned>(cc_))
4352
- | RegisterField::encode(false) // lhs_ and rhs_ are not used
4353
- | StrictField::encode(strict_)
4354
- | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false)
4355
- | IncludeNumberCompareField::encode(include_number_compare_)
4356
- | IncludeSmiCompareField::encode(include_smi_compare_);
4357
- }
4358
-
4359
-
4360
- // Unfortunately you have to run without snapshots to see most of these
4361
- // names in the profile since most compare stubs end up in the snapshot.
4362
- void CompareStub::PrintName(StringStream* stream) {
4363
- ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
4364
- const char* cc_name;
4365
- switch (cc_) {
4366
- case less: cc_name = "LT"; break;
4367
- case greater: cc_name = "GT"; break;
4368
- case less_equal: cc_name = "LE"; break;
4369
- case greater_equal: cc_name = "GE"; break;
4370
- case equal: cc_name = "EQ"; break;
4371
- case not_equal: cc_name = "NE"; break;
4372
- default: cc_name = "UnknownCondition"; break;
4373
- }
4374
- bool is_equality = cc_ == equal || cc_ == not_equal;
4375
- stream->Add("CompareStub_%s", cc_name);
4376
- if (strict_ && is_equality) stream->Add("_STRICT");
4377
- if (never_nan_nan_ && is_equality) stream->Add("_NO_NAN");
4378
- if (!include_number_compare_) stream->Add("_NO_NUMBER");
4379
- if (!include_smi_compare_) stream->Add("_NO_SMI");
4380
- }
4381
-
4382
-
4383
4629
  // -------------------------------------------------------------------------
4384
4630
  // StringCharCodeAtGenerator
4385
4631
 
@@ -4480,7 +4726,7 @@ void StringCharCodeAtGenerator::GenerateSlow(
4480
4726
  void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
4481
4727
  // Fast case of Heap::LookupSingleCharacterStringFromCode.
4482
4728
  __ JumpIfNotSmi(code_, &slow_case_);
4483
- __ SmiCompare(code_, Smi::FromInt(String::kMaxAsciiCharCode));
4729
+ __ SmiCompare(code_, Smi::FromInt(String::kMaxOneByteCharCode));
4484
4730
  __ j(above, &slow_case_);
4485
4731
 
4486
4732
  __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
@@ -4617,8 +4863,8 @@ void StringAddStub::Generate(MacroAssembler* masm) {
4617
4863
  &call_runtime);
4618
4864
 
4619
4865
  // Get the two characters forming the sub string.
4620
- __ movzxbq(rbx, FieldOperand(rax, SeqAsciiString::kHeaderSize));
4621
- __ movzxbq(rcx, FieldOperand(rdx, SeqAsciiString::kHeaderSize));
4866
+ __ movzxbq(rbx, FieldOperand(rax, SeqOneByteString::kHeaderSize));
4867
+ __ movzxbq(rcx, FieldOperand(rdx, SeqOneByteString::kHeaderSize));
4622
4868
 
4623
4869
  // Try to lookup two character string in symbol table. If it is not found
4624
4870
  // just allocate a new one.
@@ -4634,11 +4880,11 @@ void StringAddStub::Generate(MacroAssembler* masm) {
4634
4880
  // rbx - first byte: first character
4635
4881
  // rbx - second byte: *maybe* second character
4636
4882
  // Make sure that the second byte of rbx contains the second character.
4637
- __ movzxbq(rcx, FieldOperand(rdx, SeqAsciiString::kHeaderSize));
4883
+ __ movzxbq(rcx, FieldOperand(rdx, SeqOneByteString::kHeaderSize));
4638
4884
  __ shll(rcx, Immediate(kBitsPerByte));
4639
4885
  __ orl(rbx, rcx);
4640
4886
  // Write both characters to the new string.
4641
- __ movw(FieldOperand(rax, SeqAsciiString::kHeaderSize), rbx);
4887
+ __ movw(FieldOperand(rax, SeqOneByteString::kHeaderSize), rbx);
4642
4888
  __ IncrementCounter(counters->string_add_native(), 1);
4643
4889
  __ ret(2 * kPointerSize);
4644
4890
 
@@ -4661,7 +4907,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
4661
4907
  Label non_ascii, allocated, ascii_data;
4662
4908
  __ movl(rcx, r8);
4663
4909
  __ and_(rcx, r9);
4664
- STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
4910
+ STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
4665
4911
  STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
4666
4912
  __ testl(rcx, Immediate(kStringEncodingMask));
4667
4913
  __ j(zero, &non_ascii);
@@ -4687,9 +4933,9 @@ void StringAddStub::Generate(MacroAssembler* masm) {
4687
4933
  __ testb(rcx, Immediate(kAsciiDataHintMask));
4688
4934
  __ j(not_zero, &ascii_data);
4689
4935
  __ xor_(r8, r9);
4690
- STATIC_ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0);
4691
- __ andb(r8, Immediate(kAsciiStringTag | kAsciiDataHintTag));
4692
- __ cmpb(r8, Immediate(kAsciiStringTag | kAsciiDataHintTag));
4936
+ STATIC_ASSERT(kOneByteStringTag != 0 && kAsciiDataHintTag != 0);
4937
+ __ andb(r8, Immediate(kOneByteStringTag | kAsciiDataHintTag));
4938
+ __ cmpb(r8, Immediate(kOneByteStringTag | kAsciiDataHintTag));
4693
4939
  __ j(equal, &ascii_data);
4694
4940
  // Allocate a two byte cons string.
4695
4941
  __ AllocateTwoByteConsString(rcx, rdi, no_reg, &call_runtime);
@@ -4720,8 +4966,8 @@ void StringAddStub::Generate(MacroAssembler* masm) {
4720
4966
  __ movq(rcx, FieldOperand(rax, ExternalString::kResourceDataOffset));
4721
4967
  __ jmp(&first_prepared, Label::kNear);
4722
4968
  __ bind(&first_is_sequential);
4723
- STATIC_ASSERT(SeqAsciiString::kHeaderSize == SeqTwoByteString::kHeaderSize);
4724
- __ lea(rcx, FieldOperand(rax, SeqAsciiString::kHeaderSize));
4969
+ STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
4970
+ __ lea(rcx, FieldOperand(rax, SeqOneByteString::kHeaderSize));
4725
4971
  __ bind(&first_prepared);
4726
4972
 
4727
4973
  // Check whether both strings have same encoding.
@@ -4741,8 +4987,8 @@ void StringAddStub::Generate(MacroAssembler* masm) {
4741
4987
  __ movq(rdx, FieldOperand(rdx, ExternalString::kResourceDataOffset));
4742
4988
  __ jmp(&second_prepared, Label::kNear);
4743
4989
  __ bind(&second_is_sequential);
4744
- STATIC_ASSERT(SeqAsciiString::kHeaderSize == SeqTwoByteString::kHeaderSize);
4745
- __ lea(rdx, FieldOperand(rdx, SeqAsciiString::kHeaderSize));
4990
+ STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
4991
+ __ lea(rdx, FieldOperand(rdx, SeqOneByteString::kHeaderSize));
4746
4992
  __ bind(&second_prepared);
4747
4993
 
4748
4994
  Label non_ascii_string_add_flat_result;
@@ -4758,7 +5004,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
4758
5004
  __ AllocateAsciiString(rax, rbx, rdi, r8, r9, &call_runtime);
4759
5005
  // rax: result string
4760
5006
  // Locate first character of result.
4761
- __ lea(rbx, FieldOperand(rax, SeqAsciiString::kHeaderSize));
5007
+ __ lea(rbx, FieldOperand(rax, SeqOneByteString::kHeaderSize));
4762
5008
  // rcx: first char of first string
4763
5009
  // rbx: first character of result
4764
5010
  // r14: length of first string
@@ -5031,7 +5277,7 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
5031
5277
  temp, temp, &next_probe[i]);
5032
5278
 
5033
5279
  // Check if the two characters match.
5034
- __ movl(temp, FieldOperand(candidate, SeqAsciiString::kHeaderSize));
5280
+ __ movl(temp, FieldOperand(candidate, SeqOneByteString::kHeaderSize));
5035
5281
  __ andl(temp, Immediate(0x0000ffff));
5036
5282
  __ cmpl(chars, temp);
5037
5283
  __ j(equal, &found_in_symbol_table);
@@ -5209,7 +5455,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
5209
5455
  // string's encoding is wrong because we always have to recheck encoding of
5210
5456
  // the newly created string's parent anyways due to externalized strings.
5211
5457
  Label two_byte_slice, set_slice_header;
5212
- STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
5458
+ STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
5213
5459
  STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
5214
5460
  __ testb(rbx, Immediate(kStringEncodingMask));
5215
5461
  __ j(zero, &two_byte_slice, Label::kNear);
@@ -5249,11 +5495,11 @@ void SubStringStub::Generate(MacroAssembler* masm) {
5249
5495
  __ j(not_zero, &runtime);
5250
5496
  __ movq(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset));
5251
5497
  // Move the pointer so that offset-wise, it looks like a sequential string.
5252
- STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqAsciiString::kHeaderSize);
5498
+ STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
5253
5499
  __ subq(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
5254
5500
 
5255
5501
  __ bind(&sequential_string);
5256
- STATIC_ASSERT((kAsciiStringTag & kStringEncodingMask) != 0);
5502
+ STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
5257
5503
  __ testb(rbx, Immediate(kStringEncodingMask));
5258
5504
  __ j(zero, &two_byte_sequential);
5259
5505
 
@@ -5266,10 +5512,10 @@ void SubStringStub::Generate(MacroAssembler* masm) {
5266
5512
  { // Locate character of sub string start.
5267
5513
  SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_1);
5268
5514
  __ lea(rsi, Operand(rdi, smi_as_index.reg, smi_as_index.scale,
5269
- SeqAsciiString::kHeaderSize - kHeapObjectTag));
5515
+ SeqOneByteString::kHeaderSize - kHeapObjectTag));
5270
5516
  }
5271
5517
  // Locate first character of result.
5272
- __ lea(rdi, FieldOperand(rax, SeqAsciiString::kHeaderSize));
5518
+ __ lea(rdi, FieldOperand(rax, SeqOneByteString::kHeaderSize));
5273
5519
 
5274
5520
  // rax: result string
5275
5521
  // rcx: result length
@@ -5291,7 +5537,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
5291
5537
  { // Locate character of sub string start.
5292
5538
  SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_2);
5293
5539
  __ lea(rsi, Operand(rdi, smi_as_index.reg, smi_as_index.scale,
5294
- SeqAsciiString::kHeaderSize - kHeapObjectTag));
5540
+ SeqOneByteString::kHeaderSize - kHeapObjectTag));
5295
5541
  }
5296
5542
  // Locate first character of result.
5297
5543
  __ lea(rdi, FieldOperand(rax, SeqTwoByteString::kHeaderSize));
@@ -5396,16 +5642,32 @@ void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
5396
5642
  // Compare lengths (precomputed).
5397
5643
  __ bind(&compare_lengths);
5398
5644
  __ SmiTest(length_difference);
5645
+ #ifndef ENABLE_LATIN_1
5399
5646
  __ j(not_zero, &result_not_equal, Label::kNear);
5647
+ #else
5648
+ Label length_not_equal;
5649
+ __ j(not_zero, &length_not_equal, Label::kNear);
5650
+ #endif
5400
5651
 
5401
5652
  // Result is EQUAL.
5402
5653
  __ Move(rax, Smi::FromInt(EQUAL));
5403
5654
  __ ret(0);
5404
5655
 
5405
5656
  Label result_greater;
5657
+ #ifdef ENABLE_LATIN_1
5658
+ Label result_less;
5659
+ __ bind(&length_not_equal);
5660
+ __ j(greater, &result_greater, Label::kNear);
5661
+ __ jmp(&result_less, Label::kNear);
5662
+ #endif
5406
5663
  __ bind(&result_not_equal);
5407
5664
  // Unequal comparison of left to right, either character or length.
5665
+ #ifndef ENABLE_LATIN_1
5408
5666
  __ j(greater, &result_greater, Label::kNear);
5667
+ #else
5668
+ __ j(above, &result_greater, Label::kNear);
5669
+ __ bind(&result_less);
5670
+ #endif
5409
5671
 
5410
5672
  // Result is LESS.
5411
5673
  __ Move(rax, Smi::FromInt(LESS));
@@ -5431,9 +5693,9 @@ void StringCompareStub::GenerateAsciiCharsCompareLoop(
5431
5693
  // doesn't need an additional compare.
5432
5694
  __ SmiToInteger32(length, length);
5433
5695
  __ lea(left,
5434
- FieldOperand(left, length, times_1, SeqAsciiString::kHeaderSize));
5696
+ FieldOperand(left, length, times_1, SeqOneByteString::kHeaderSize));
5435
5697
  __ lea(right,
5436
- FieldOperand(right, length, times_1, SeqAsciiString::kHeaderSize));
5698
+ FieldOperand(right, length, times_1, SeqOneByteString::kHeaderSize));
5437
5699
  __ neg(length);
5438
5700
  Register index = length; // index = -length;
5439
5701
 
@@ -5489,7 +5751,7 @@ void StringCompareStub::Generate(MacroAssembler* masm) {
5489
5751
 
5490
5752
 
5491
5753
  void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
5492
- ASSERT(state_ == CompareIC::SMIS);
5754
+ ASSERT(state_ == CompareIC::SMI);
5493
5755
  Label miss;
5494
5756
  __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear);
5495
5757
 
@@ -5501,7 +5763,7 @@ void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
5501
5763
  __ subq(rdx, rax);
5502
5764
  __ j(no_overflow, &done, Label::kNear);
5503
5765
  // Correct sign of result in case of overflow.
5504
- __ SmiNot(rdx, rdx);
5766
+ __ not_(rdx);
5505
5767
  __ bind(&done);
5506
5768
  __ movq(rax, rdx);
5507
5769
  }
@@ -5513,23 +5775,41 @@ void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
5513
5775
 
5514
5776
 
5515
5777
  void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
5516
- ASSERT(state_ == CompareIC::HEAP_NUMBERS);
5778
+ ASSERT(state_ == CompareIC::HEAP_NUMBER);
5517
5779
 
5518
5780
  Label generic_stub;
5519
5781
  Label unordered, maybe_undefined1, maybe_undefined2;
5520
5782
  Label miss;
5521
- Condition either_smi = masm->CheckEitherSmi(rax, rdx);
5522
- __ j(either_smi, &generic_stub, Label::kNear);
5523
5783
 
5524
- __ CmpObjectType(rax, HEAP_NUMBER_TYPE, rcx);
5784
+ if (left_ == CompareIC::SMI) {
5785
+ __ JumpIfNotSmi(rdx, &miss);
5786
+ }
5787
+ if (right_ == CompareIC::SMI) {
5788
+ __ JumpIfNotSmi(rax, &miss);
5789
+ }
5790
+
5791
+ // Load left and right operand.
5792
+ Label done, left, left_smi, right_smi;
5793
+ __ JumpIfSmi(rax, &right_smi, Label::kNear);
5794
+ __ CompareMap(rax, masm->isolate()->factory()->heap_number_map(), NULL);
5525
5795
  __ j(not_equal, &maybe_undefined1, Label::kNear);
5526
- __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
5796
+ __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
5797
+ __ jmp(&left, Label::kNear);
5798
+ __ bind(&right_smi);
5799
+ __ SmiToInteger32(rcx, rax); // Can't clobber rax yet.
5800
+ __ cvtlsi2sd(xmm1, rcx);
5801
+
5802
+ __ bind(&left);
5803
+ __ JumpIfSmi(rdx, &left_smi, Label::kNear);
5804
+ __ CompareMap(rdx, masm->isolate()->factory()->heap_number_map(), NULL);
5527
5805
  __ j(not_equal, &maybe_undefined2, Label::kNear);
5528
-
5529
- // Load left and right operand
5530
5806
  __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
5531
- __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
5807
+ __ jmp(&done);
5808
+ __ bind(&left_smi);
5809
+ __ SmiToInteger32(rcx, rdx); // Can't clobber rdx yet.
5810
+ __ cvtlsi2sd(xmm0, rcx);
5532
5811
 
5812
+ __ bind(&done);
5533
5813
  // Compare operands
5534
5814
  __ ucomisd(xmm0, xmm1);
5535
5815
 
@@ -5545,14 +5825,16 @@ void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
5545
5825
  __ ret(0);
5546
5826
 
5547
5827
  __ bind(&unordered);
5548
- CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS);
5549
5828
  __ bind(&generic_stub);
5829
+ ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC,
5830
+ CompareIC::GENERIC);
5550
5831
  __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
5551
5832
 
5552
5833
  __ bind(&maybe_undefined1);
5553
5834
  if (Token::IsOrderedRelationalCompareOp(op_)) {
5554
5835
  __ Cmp(rax, masm->isolate()->factory()->undefined_value());
5555
5836
  __ j(not_equal, &miss);
5837
+ __ JumpIfSmi(rdx, &unordered);
5556
5838
  __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
5557
5839
  __ j(not_equal, &maybe_undefined2, Label::kNear);
5558
5840
  __ jmp(&unordered);
@@ -5570,7 +5852,7 @@ void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
5570
5852
 
5571
5853
 
5572
5854
  void ICCompareStub::GenerateSymbols(MacroAssembler* masm) {
5573
- ASSERT(state_ == CompareIC::SYMBOLS);
5855
+ ASSERT(state_ == CompareIC::SYMBOL);
5574
5856
  ASSERT(GetCondition() == equal);
5575
5857
 
5576
5858
  // Registers containing left and right operands respectively.
@@ -5613,7 +5895,7 @@ void ICCompareStub::GenerateSymbols(MacroAssembler* masm) {
5613
5895
 
5614
5896
 
5615
5897
  void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
5616
- ASSERT(state_ == CompareIC::STRINGS);
5898
+ ASSERT(state_ == CompareIC::STRING);
5617
5899
  Label miss;
5618
5900
 
5619
5901
  bool equality = Token::IsEqualityOp(op_);
@@ -5699,7 +5981,7 @@ void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
5699
5981
 
5700
5982
 
5701
5983
  void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
5702
- ASSERT(state_ == CompareIC::OBJECTS);
5984
+ ASSERT(state_ == CompareIC::OBJECT);
5703
5985
  Label miss;
5704
5986
  Condition either_smi = masm->CheckEitherSmi(rdx, rax);
5705
5987
  __ j(either_smi, &miss, Label::kNear);
@@ -5845,8 +6127,7 @@ void StringDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
5845
6127
  ASSERT(!name.is(r0));
5846
6128
  ASSERT(!name.is(r1));
5847
6129
 
5848
- // Assert that name contains a string.
5849
- if (FLAG_debug_code) __ AbortIfNotString(name);
6130
+ __ AssertString(name);
5850
6131
 
5851
6132
  __ SmiToInteger32(r0, FieldOperand(elements, kCapacityOffset));
5852
6133
  __ decl(r0);
@@ -6014,6 +6295,8 @@ struct AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
6014
6295
  { REG(r11), REG(rax), REG(r15), EMIT_REMEMBERED_SET},
6015
6296
  // StoreArrayLiteralElementStub::Generate
6016
6297
  { REG(rbx), REG(rax), REG(rcx), EMIT_REMEMBERED_SET},
6298
+ // FastNewClosureStub::Generate
6299
+ { REG(rcx), REG(rdx), REG(rbx), EMIT_REMEMBERED_SET},
6017
6300
  // Null termination.
6018
6301
  { REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET}
6019
6302
  };
@@ -6058,6 +6341,11 @@ void RecordWriteStub::GenerateFixedRegStubsAheadOfTime() {
6058
6341
  }
6059
6342
 
6060
6343
 
6344
+ bool CodeStub::CanUseFPRegisters() {
6345
+ return true; // Always have SSE2 on x64.
6346
+ }
6347
+
6348
+
6061
6349
  // Takes the input in 3 registers: address_ value_ and object_. A pointer to
6062
6350
  // the value has just been written into the object, now this stub makes sure
6063
6351
  // we keep the GC informed. The word in the object where the value has been
@@ -6154,13 +6442,8 @@ void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm, Mode mode) {
6154
6442
  ASSERT(!address.is(arg1));
6155
6443
  __ Move(address, regs_.address());
6156
6444
  __ Move(arg1, regs_.object());
6157
- if (mode == INCREMENTAL_COMPACTION) {
6158
- // TODO(gc) Can we just set address arg2 in the beginning?
6159
- __ Move(arg2, address);
6160
- } else {
6161
- ASSERT(mode == INCREMENTAL);
6162
- __ movq(arg2, Operand(address, 0));
6163
- }
6445
+ // TODO(gc) Can we just set address arg2 in the beginning?
6446
+ __ Move(arg2, address);
6164
6447
  __ LoadAddress(arg3, ExternalReference::isolate_address());
6165
6448
  int argument_count = 3;
6166
6449
 
@@ -6190,6 +6473,17 @@ void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
6190
6473
  Label need_incremental;
6191
6474
  Label need_incremental_pop_object;
6192
6475
 
6476
+ __ movq(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask));
6477
+ __ and_(regs_.scratch0(), regs_.object());
6478
+ __ movq(regs_.scratch1(),
6479
+ Operand(regs_.scratch0(),
6480
+ MemoryChunk::kWriteBarrierCounterOffset));
6481
+ __ subq(regs_.scratch1(), Immediate(1));
6482
+ __ movq(Operand(regs_.scratch0(),
6483
+ MemoryChunk::kWriteBarrierCounterOffset),
6484
+ regs_.scratch1());
6485
+ __ j(negative, &need_incremental);
6486
+
6193
6487
  // Let's look at the color of the object: If it is not black we don't have
6194
6488
  // to inform the incremental marker.
6195
6489
  __ JumpIfBlack(regs_.object(),
@@ -6337,6 +6631,89 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
6337
6631
  __ ret(0);
6338
6632
  }
6339
6633
 
6634
+
6635
+ void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
6636
+ ASSERT(!Serializer::enabled());
6637
+ CEntryStub ces(1, kSaveFPRegs);
6638
+ __ Call(ces.GetCode(), RelocInfo::CODE_TARGET);
6639
+ int parameter_count_offset =
6640
+ StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
6641
+ __ movq(rbx, MemOperand(rbp, parameter_count_offset));
6642
+ masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
6643
+ __ pop(rcx);
6644
+ __ lea(rsp, MemOperand(rsp, rbx, times_pointer_size,
6645
+ extra_expression_stack_count_ * kPointerSize));
6646
+ __ jmp(rcx); // Return to IC Miss stub, continuation still on stack.
6647
+ }
6648
+
6649
+
6650
+ void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
6651
+ if (entry_hook_ != NULL) {
6652
+ ProfileEntryHookStub stub;
6653
+ masm->CallStub(&stub);
6654
+ }
6655
+ }
6656
+
6657
+
6658
+ void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
6659
+ // Save volatile registers.
6660
+ // Live registers at this point are the same as at the start of any
6661
+ // JS function:
6662
+ // o rdi: the JS function object being called (i.e. ourselves)
6663
+ // o rsi: our context
6664
+ // o rbp: our caller's frame pointer
6665
+ // o rsp: stack pointer (pointing to return address)
6666
+ // o rcx: rcx is zero for method calls and non-zero for function calls.
6667
+ #ifdef _WIN64
6668
+ const int kNumSavedRegisters = 1;
6669
+
6670
+ __ push(rcx);
6671
+ #else
6672
+ const int kNumSavedRegisters = 3;
6673
+
6674
+ __ push(rcx);
6675
+ __ push(rdi);
6676
+ __ push(rsi);
6677
+ #endif
6678
+
6679
+ // Calculate the original stack pointer and store it in the second arg.
6680
+ #ifdef _WIN64
6681
+ __ lea(rdx, Operand(rsp, kNumSavedRegisters * kPointerSize));
6682
+ #else
6683
+ __ lea(rsi, Operand(rsp, kNumSavedRegisters * kPointerSize));
6684
+ #endif
6685
+
6686
+ // Calculate the function address to the first arg.
6687
+ #ifdef _WIN64
6688
+ __ movq(rcx, Operand(rdx, 0));
6689
+ __ subq(rcx, Immediate(Assembler::kShortCallInstructionLength));
6690
+ #else
6691
+ __ movq(rdi, Operand(rsi, 0));
6692
+ __ subq(rdi, Immediate(Assembler::kShortCallInstructionLength));
6693
+ #endif
6694
+
6695
+ // Call the entry hook function.
6696
+ __ movq(rax, &entry_hook_, RelocInfo::NONE64);
6697
+ __ movq(rax, Operand(rax, 0));
6698
+
6699
+ AllowExternalCallThatCantCauseGC scope(masm);
6700
+
6701
+ const int kArgumentCount = 2;
6702
+ __ PrepareCallCFunction(kArgumentCount);
6703
+ __ CallCFunction(rax, kArgumentCount);
6704
+
6705
+ // Restore volatile regs.
6706
+ #ifdef _WIN64
6707
+ __ pop(rcx);
6708
+ #else
6709
+ __ pop(rsi);
6710
+ __ pop(rdi);
6711
+ __ pop(rcx);
6712
+ #endif
6713
+
6714
+ __ Ret();
6715
+ }
6716
+
6340
6717
  #undef __
6341
6718
 
6342
6719
  } } // namespace v8::internal