libv8 3.11.8.17 → 3.16.14.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (754) hide show
  1. checksums.yaml +4 -4
  2. data/.travis.yml +1 -2
  3. data/Gemfile +1 -1
  4. data/Rakefile +6 -7
  5. data/lib/libv8/version.rb +1 -1
  6. data/vendor/v8/.gitignore +24 -3
  7. data/vendor/v8/AUTHORS +7 -0
  8. data/vendor/v8/ChangeLog +839 -0
  9. data/vendor/v8/DEPS +1 -1
  10. data/vendor/v8/Makefile.android +92 -0
  11. data/vendor/v8/OWNERS +11 -0
  12. data/vendor/v8/PRESUBMIT.py +71 -0
  13. data/vendor/v8/SConstruct +34 -39
  14. data/vendor/v8/build/android.gypi +56 -37
  15. data/vendor/v8/build/common.gypi +112 -30
  16. data/vendor/v8/build/gyp_v8 +1 -1
  17. data/vendor/v8/build/standalone.gypi +15 -11
  18. data/vendor/v8/include/v8-debug.h +9 -1
  19. data/vendor/v8/include/v8-preparser.h +4 -3
  20. data/vendor/v8/include/v8-profiler.h +25 -25
  21. data/vendor/v8/include/v8-testing.h +4 -3
  22. data/vendor/v8/include/v8.h +994 -540
  23. data/vendor/v8/preparser/preparser-process.cc +3 -3
  24. data/vendor/v8/samples/lineprocessor.cc +20 -27
  25. data/vendor/v8/samples/process.cc +18 -14
  26. data/vendor/v8/samples/shell.cc +16 -15
  27. data/vendor/v8/src/SConscript +15 -14
  28. data/vendor/v8/src/accessors.cc +169 -77
  29. data/vendor/v8/src/accessors.h +4 -0
  30. data/vendor/v8/src/allocation-inl.h +2 -2
  31. data/vendor/v8/src/allocation.h +7 -7
  32. data/vendor/v8/src/api.cc +810 -497
  33. data/vendor/v8/src/api.h +85 -60
  34. data/vendor/v8/src/arm/assembler-arm-inl.h +179 -22
  35. data/vendor/v8/src/arm/assembler-arm.cc +633 -264
  36. data/vendor/v8/src/arm/assembler-arm.h +264 -197
  37. data/vendor/v8/src/arm/builtins-arm.cc +117 -27
  38. data/vendor/v8/src/arm/code-stubs-arm.cc +1241 -700
  39. data/vendor/v8/src/arm/code-stubs-arm.h +35 -138
  40. data/vendor/v8/src/arm/codegen-arm.cc +285 -16
  41. data/vendor/v8/src/arm/codegen-arm.h +22 -0
  42. data/vendor/v8/src/arm/constants-arm.cc +5 -3
  43. data/vendor/v8/src/arm/constants-arm.h +24 -11
  44. data/vendor/v8/src/arm/debug-arm.cc +3 -3
  45. data/vendor/v8/src/arm/deoptimizer-arm.cc +382 -92
  46. data/vendor/v8/src/arm/disasm-arm.cc +61 -12
  47. data/vendor/v8/src/arm/frames-arm.h +0 -14
  48. data/vendor/v8/src/arm/full-codegen-arm.cc +332 -304
  49. data/vendor/v8/src/arm/ic-arm.cc +180 -259
  50. data/vendor/v8/src/arm/lithium-arm.cc +364 -316
  51. data/vendor/v8/src/arm/lithium-arm.h +512 -275
  52. data/vendor/v8/src/arm/lithium-codegen-arm.cc +1768 -809
  53. data/vendor/v8/src/arm/lithium-codegen-arm.h +97 -35
  54. data/vendor/v8/src/arm/lithium-gap-resolver-arm.cc +12 -5
  55. data/vendor/v8/src/arm/macro-assembler-arm.cc +439 -228
  56. data/vendor/v8/src/arm/macro-assembler-arm.h +116 -70
  57. data/vendor/v8/src/arm/regexp-macro-assembler-arm.cc +54 -44
  58. data/vendor/v8/src/arm/regexp-macro-assembler-arm.h +3 -10
  59. data/vendor/v8/src/arm/simulator-arm.cc +272 -238
  60. data/vendor/v8/src/arm/simulator-arm.h +38 -8
  61. data/vendor/v8/src/arm/stub-cache-arm.cc +522 -895
  62. data/vendor/v8/src/array.js +101 -70
  63. data/vendor/v8/src/assembler.cc +270 -19
  64. data/vendor/v8/src/assembler.h +110 -15
  65. data/vendor/v8/src/ast.cc +79 -69
  66. data/vendor/v8/src/ast.h +255 -301
  67. data/vendor/v8/src/atomicops.h +7 -1
  68. data/vendor/v8/src/atomicops_internals_tsan.h +335 -0
  69. data/vendor/v8/src/bootstrapper.cc +481 -418
  70. data/vendor/v8/src/bootstrapper.h +4 -4
  71. data/vendor/v8/src/builtins.cc +498 -311
  72. data/vendor/v8/src/builtins.h +75 -47
  73. data/vendor/v8/src/checks.cc +2 -1
  74. data/vendor/v8/src/checks.h +8 -0
  75. data/vendor/v8/src/code-stubs-hydrogen.cc +253 -0
  76. data/vendor/v8/src/code-stubs.cc +249 -84
  77. data/vendor/v8/src/code-stubs.h +501 -169
  78. data/vendor/v8/src/codegen.cc +36 -18
  79. data/vendor/v8/src/codegen.h +25 -3
  80. data/vendor/v8/src/collection.js +54 -17
  81. data/vendor/v8/src/compilation-cache.cc +24 -16
  82. data/vendor/v8/src/compilation-cache.h +15 -6
  83. data/vendor/v8/src/compiler.cc +497 -195
  84. data/vendor/v8/src/compiler.h +246 -38
  85. data/vendor/v8/src/contexts.cc +64 -24
  86. data/vendor/v8/src/contexts.h +60 -29
  87. data/vendor/v8/src/conversions-inl.h +24 -14
  88. data/vendor/v8/src/conversions.h +7 -4
  89. data/vendor/v8/src/counters.cc +21 -12
  90. data/vendor/v8/src/counters.h +44 -16
  91. data/vendor/v8/src/cpu-profiler.h +1 -1
  92. data/vendor/v8/src/d8-debug.cc +2 -2
  93. data/vendor/v8/src/d8-readline.cc +13 -2
  94. data/vendor/v8/src/d8.cc +681 -273
  95. data/vendor/v8/src/d8.gyp +4 -4
  96. data/vendor/v8/src/d8.h +38 -18
  97. data/vendor/v8/src/d8.js +0 -617
  98. data/vendor/v8/src/data-flow.h +55 -0
  99. data/vendor/v8/src/date.js +1 -42
  100. data/vendor/v8/src/dateparser-inl.h +5 -1
  101. data/vendor/v8/src/debug-agent.cc +10 -15
  102. data/vendor/v8/src/debug-debugger.js +147 -149
  103. data/vendor/v8/src/debug.cc +323 -164
  104. data/vendor/v8/src/debug.h +26 -14
  105. data/vendor/v8/src/deoptimizer.cc +765 -290
  106. data/vendor/v8/src/deoptimizer.h +130 -28
  107. data/vendor/v8/src/disassembler.cc +10 -4
  108. data/vendor/v8/src/elements-kind.cc +7 -2
  109. data/vendor/v8/src/elements-kind.h +19 -0
  110. data/vendor/v8/src/elements.cc +607 -285
  111. data/vendor/v8/src/elements.h +36 -13
  112. data/vendor/v8/src/execution.cc +52 -31
  113. data/vendor/v8/src/execution.h +4 -4
  114. data/vendor/v8/src/extensions/externalize-string-extension.cc +5 -4
  115. data/vendor/v8/src/extensions/gc-extension.cc +5 -1
  116. data/vendor/v8/src/extensions/statistics-extension.cc +153 -0
  117. data/vendor/v8/src/{inspector.h → extensions/statistics-extension.h} +12 -23
  118. data/vendor/v8/src/factory.cc +101 -134
  119. data/vendor/v8/src/factory.h +36 -31
  120. data/vendor/v8/src/flag-definitions.h +102 -25
  121. data/vendor/v8/src/flags.cc +9 -5
  122. data/vendor/v8/src/frames-inl.h +10 -0
  123. data/vendor/v8/src/frames.cc +116 -26
  124. data/vendor/v8/src/frames.h +96 -12
  125. data/vendor/v8/src/full-codegen.cc +219 -74
  126. data/vendor/v8/src/full-codegen.h +63 -21
  127. data/vendor/v8/src/func-name-inferrer.cc +8 -7
  128. data/vendor/v8/src/func-name-inferrer.h +5 -3
  129. data/vendor/v8/src/gdb-jit.cc +71 -57
  130. data/vendor/v8/src/global-handles.cc +230 -101
  131. data/vendor/v8/src/global-handles.h +26 -27
  132. data/vendor/v8/src/globals.h +17 -19
  133. data/vendor/v8/src/handles-inl.h +59 -12
  134. data/vendor/v8/src/handles.cc +180 -200
  135. data/vendor/v8/src/handles.h +80 -11
  136. data/vendor/v8/src/hashmap.h +60 -40
  137. data/vendor/v8/src/heap-inl.h +107 -45
  138. data/vendor/v8/src/heap-profiler.cc +38 -19
  139. data/vendor/v8/src/heap-profiler.h +24 -14
  140. data/vendor/v8/src/heap.cc +1123 -738
  141. data/vendor/v8/src/heap.h +385 -146
  142. data/vendor/v8/src/hydrogen-instructions.cc +700 -217
  143. data/vendor/v8/src/hydrogen-instructions.h +1158 -472
  144. data/vendor/v8/src/hydrogen.cc +3319 -1662
  145. data/vendor/v8/src/hydrogen.h +411 -170
  146. data/vendor/v8/src/ia32/assembler-ia32-inl.h +46 -16
  147. data/vendor/v8/src/ia32/assembler-ia32.cc +131 -61
  148. data/vendor/v8/src/ia32/assembler-ia32.h +115 -57
  149. data/vendor/v8/src/ia32/builtins-ia32.cc +99 -5
  150. data/vendor/v8/src/ia32/code-stubs-ia32.cc +787 -495
  151. data/vendor/v8/src/ia32/code-stubs-ia32.h +10 -100
  152. data/vendor/v8/src/ia32/codegen-ia32.cc +227 -23
  153. data/vendor/v8/src/ia32/codegen-ia32.h +14 -0
  154. data/vendor/v8/src/ia32/deoptimizer-ia32.cc +428 -87
  155. data/vendor/v8/src/ia32/disasm-ia32.cc +28 -1
  156. data/vendor/v8/src/ia32/frames-ia32.h +6 -16
  157. data/vendor/v8/src/ia32/full-codegen-ia32.cc +280 -272
  158. data/vendor/v8/src/ia32/ic-ia32.cc +150 -250
  159. data/vendor/v8/src/ia32/lithium-codegen-ia32.cc +1600 -517
  160. data/vendor/v8/src/ia32/lithium-codegen-ia32.h +90 -24
  161. data/vendor/v8/src/ia32/lithium-gap-resolver-ia32.cc +10 -6
  162. data/vendor/v8/src/ia32/lithium-gap-resolver-ia32.h +2 -2
  163. data/vendor/v8/src/ia32/lithium-ia32.cc +405 -302
  164. data/vendor/v8/src/ia32/lithium-ia32.h +526 -271
  165. data/vendor/v8/src/ia32/macro-assembler-ia32.cc +378 -119
  166. data/vendor/v8/src/ia32/macro-assembler-ia32.h +62 -28
  167. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.cc +43 -30
  168. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.h +2 -10
  169. data/vendor/v8/src/ia32/stub-cache-ia32.cc +492 -678
  170. data/vendor/v8/src/ic-inl.h +9 -4
  171. data/vendor/v8/src/ic.cc +836 -923
  172. data/vendor/v8/src/ic.h +228 -247
  173. data/vendor/v8/src/incremental-marking-inl.h +26 -30
  174. data/vendor/v8/src/incremental-marking.cc +276 -248
  175. data/vendor/v8/src/incremental-marking.h +29 -37
  176. data/vendor/v8/src/interface.cc +34 -25
  177. data/vendor/v8/src/interface.h +69 -25
  178. data/vendor/v8/src/interpreter-irregexp.cc +2 -2
  179. data/vendor/v8/src/isolate.cc +382 -76
  180. data/vendor/v8/src/isolate.h +109 -56
  181. data/vendor/v8/src/json-parser.h +217 -104
  182. data/vendor/v8/src/json-stringifier.h +745 -0
  183. data/vendor/v8/src/json.js +10 -132
  184. data/vendor/v8/src/jsregexp-inl.h +106 -0
  185. data/vendor/v8/src/jsregexp.cc +517 -285
  186. data/vendor/v8/src/jsregexp.h +145 -117
  187. data/vendor/v8/src/list-inl.h +35 -22
  188. data/vendor/v8/src/list.h +46 -19
  189. data/vendor/v8/src/lithium-allocator-inl.h +22 -2
  190. data/vendor/v8/src/lithium-allocator.cc +85 -70
  191. data/vendor/v8/src/lithium-allocator.h +21 -39
  192. data/vendor/v8/src/lithium.cc +259 -5
  193. data/vendor/v8/src/lithium.h +131 -32
  194. data/vendor/v8/src/liveedit-debugger.js +52 -3
  195. data/vendor/v8/src/liveedit.cc +393 -113
  196. data/vendor/v8/src/liveedit.h +7 -3
  197. data/vendor/v8/src/log-utils.cc +4 -2
  198. data/vendor/v8/src/log.cc +170 -140
  199. data/vendor/v8/src/log.h +62 -11
  200. data/vendor/v8/src/macro-assembler.h +17 -0
  201. data/vendor/v8/src/macros.py +2 -0
  202. data/vendor/v8/src/mark-compact-inl.h +3 -23
  203. data/vendor/v8/src/mark-compact.cc +801 -830
  204. data/vendor/v8/src/mark-compact.h +154 -47
  205. data/vendor/v8/src/marking-thread.cc +85 -0
  206. data/vendor/v8/src/{inspector.cc → marking-thread.h} +32 -24
  207. data/vendor/v8/src/math.js +12 -18
  208. data/vendor/v8/src/messages.cc +18 -8
  209. data/vendor/v8/src/messages.js +314 -261
  210. data/vendor/v8/src/mips/assembler-mips-inl.h +58 -6
  211. data/vendor/v8/src/mips/assembler-mips.cc +92 -75
  212. data/vendor/v8/src/mips/assembler-mips.h +54 -60
  213. data/vendor/v8/src/mips/builtins-mips.cc +116 -17
  214. data/vendor/v8/src/mips/code-stubs-mips.cc +919 -556
  215. data/vendor/v8/src/mips/code-stubs-mips.h +22 -131
  216. data/vendor/v8/src/mips/codegen-mips.cc +281 -6
  217. data/vendor/v8/src/mips/codegen-mips.h +22 -0
  218. data/vendor/v8/src/mips/constants-mips.cc +2 -0
  219. data/vendor/v8/src/mips/constants-mips.h +12 -2
  220. data/vendor/v8/src/mips/deoptimizer-mips.cc +286 -50
  221. data/vendor/v8/src/mips/disasm-mips.cc +13 -0
  222. data/vendor/v8/src/mips/full-codegen-mips.cc +297 -284
  223. data/vendor/v8/src/mips/ic-mips.cc +182 -263
  224. data/vendor/v8/src/mips/lithium-codegen-mips.cc +1208 -556
  225. data/vendor/v8/src/mips/lithium-codegen-mips.h +72 -19
  226. data/vendor/v8/src/mips/lithium-gap-resolver-mips.cc +9 -2
  227. data/vendor/v8/src/mips/lithium-mips.cc +290 -302
  228. data/vendor/v8/src/mips/lithium-mips.h +463 -266
  229. data/vendor/v8/src/mips/macro-assembler-mips.cc +208 -115
  230. data/vendor/v8/src/mips/macro-assembler-mips.h +67 -24
  231. data/vendor/v8/src/mips/regexp-macro-assembler-mips.cc +40 -25
  232. data/vendor/v8/src/mips/regexp-macro-assembler-mips.h +3 -9
  233. data/vendor/v8/src/mips/simulator-mips.cc +112 -40
  234. data/vendor/v8/src/mips/simulator-mips.h +5 -0
  235. data/vendor/v8/src/mips/stub-cache-mips.cc +502 -884
  236. data/vendor/v8/src/mirror-debugger.js +157 -30
  237. data/vendor/v8/src/mksnapshot.cc +88 -14
  238. data/vendor/v8/src/object-observe.js +235 -0
  239. data/vendor/v8/src/objects-debug.cc +178 -176
  240. data/vendor/v8/src/objects-inl.h +1333 -486
  241. data/vendor/v8/src/objects-printer.cc +125 -43
  242. data/vendor/v8/src/objects-visiting-inl.h +578 -6
  243. data/vendor/v8/src/objects-visiting.cc +2 -2
  244. data/vendor/v8/src/objects-visiting.h +172 -79
  245. data/vendor/v8/src/objects.cc +3533 -2885
  246. data/vendor/v8/src/objects.h +1352 -1131
  247. data/vendor/v8/src/optimizing-compiler-thread.cc +152 -0
  248. data/vendor/v8/src/optimizing-compiler-thread.h +111 -0
  249. data/vendor/v8/src/parser.cc +390 -500
  250. data/vendor/v8/src/parser.h +45 -33
  251. data/vendor/v8/src/platform-cygwin.cc +10 -21
  252. data/vendor/v8/src/platform-freebsd.cc +36 -41
  253. data/vendor/v8/src/platform-linux.cc +160 -124
  254. data/vendor/v8/src/platform-macos.cc +30 -27
  255. data/vendor/v8/src/platform-nullos.cc +17 -1
  256. data/vendor/v8/src/platform-openbsd.cc +19 -50
  257. data/vendor/v8/src/platform-posix.cc +14 -0
  258. data/vendor/v8/src/platform-solaris.cc +20 -53
  259. data/vendor/v8/src/platform-win32.cc +49 -26
  260. data/vendor/v8/src/platform.h +40 -1
  261. data/vendor/v8/src/preparser.cc +8 -5
  262. data/vendor/v8/src/preparser.h +2 -2
  263. data/vendor/v8/src/prettyprinter.cc +16 -0
  264. data/vendor/v8/src/prettyprinter.h +2 -0
  265. data/vendor/v8/src/profile-generator-inl.h +1 -0
  266. data/vendor/v8/src/profile-generator.cc +209 -147
  267. data/vendor/v8/src/profile-generator.h +15 -12
  268. data/vendor/v8/src/property-details.h +46 -31
  269. data/vendor/v8/src/property.cc +27 -46
  270. data/vendor/v8/src/property.h +163 -83
  271. data/vendor/v8/src/proxy.js +7 -2
  272. data/vendor/v8/src/regexp-macro-assembler-irregexp.cc +4 -13
  273. data/vendor/v8/src/regexp-macro-assembler-irregexp.h +1 -2
  274. data/vendor/v8/src/regexp-macro-assembler-tracer.cc +1 -11
  275. data/vendor/v8/src/regexp-macro-assembler-tracer.h +0 -1
  276. data/vendor/v8/src/regexp-macro-assembler.cc +31 -14
  277. data/vendor/v8/src/regexp-macro-assembler.h +14 -11
  278. data/vendor/v8/src/regexp-stack.cc +1 -0
  279. data/vendor/v8/src/regexp.js +9 -8
  280. data/vendor/v8/src/rewriter.cc +18 -7
  281. data/vendor/v8/src/runtime-profiler.cc +52 -43
  282. data/vendor/v8/src/runtime-profiler.h +0 -25
  283. data/vendor/v8/src/runtime.cc +2006 -2023
  284. data/vendor/v8/src/runtime.h +56 -49
  285. data/vendor/v8/src/safepoint-table.cc +12 -18
  286. data/vendor/v8/src/safepoint-table.h +11 -8
  287. data/vendor/v8/src/scanner.cc +1 -0
  288. data/vendor/v8/src/scanner.h +4 -10
  289. data/vendor/v8/src/scopeinfo.cc +35 -9
  290. data/vendor/v8/src/scopeinfo.h +64 -3
  291. data/vendor/v8/src/scopes.cc +251 -156
  292. data/vendor/v8/src/scopes.h +61 -27
  293. data/vendor/v8/src/serialize.cc +348 -396
  294. data/vendor/v8/src/serialize.h +125 -114
  295. data/vendor/v8/src/small-pointer-list.h +11 -11
  296. data/vendor/v8/src/{smart-array-pointer.h → smart-pointers.h} +64 -15
  297. data/vendor/v8/src/snapshot-common.cc +64 -15
  298. data/vendor/v8/src/snapshot-empty.cc +7 -1
  299. data/vendor/v8/src/snapshot.h +9 -2
  300. data/vendor/v8/src/spaces-inl.h +17 -0
  301. data/vendor/v8/src/spaces.cc +477 -183
  302. data/vendor/v8/src/spaces.h +238 -58
  303. data/vendor/v8/src/splay-tree-inl.h +8 -7
  304. data/vendor/v8/src/splay-tree.h +24 -10
  305. data/vendor/v8/src/store-buffer.cc +12 -5
  306. data/vendor/v8/src/store-buffer.h +2 -4
  307. data/vendor/v8/src/string-search.h +22 -6
  308. data/vendor/v8/src/string-stream.cc +11 -8
  309. data/vendor/v8/src/string.js +47 -15
  310. data/vendor/v8/src/stub-cache.cc +461 -224
  311. data/vendor/v8/src/stub-cache.h +164 -102
  312. data/vendor/v8/src/sweeper-thread.cc +105 -0
  313. data/vendor/v8/src/sweeper-thread.h +81 -0
  314. data/vendor/v8/src/token.h +1 -0
  315. data/vendor/v8/src/transitions-inl.h +220 -0
  316. data/vendor/v8/src/transitions.cc +160 -0
  317. data/vendor/v8/src/transitions.h +207 -0
  318. data/vendor/v8/src/type-info.cc +182 -181
  319. data/vendor/v8/src/type-info.h +31 -19
  320. data/vendor/v8/src/unicode-inl.h +62 -106
  321. data/vendor/v8/src/unicode.cc +57 -67
  322. data/vendor/v8/src/unicode.h +45 -91
  323. data/vendor/v8/src/uri.js +57 -29
  324. data/vendor/v8/src/utils.h +105 -5
  325. data/vendor/v8/src/v8-counters.cc +54 -11
  326. data/vendor/v8/src/v8-counters.h +134 -19
  327. data/vendor/v8/src/v8.cc +29 -29
  328. data/vendor/v8/src/v8.h +1 -0
  329. data/vendor/v8/src/v8conversions.cc +26 -22
  330. data/vendor/v8/src/v8globals.h +56 -43
  331. data/vendor/v8/src/v8natives.js +83 -30
  332. data/vendor/v8/src/v8threads.cc +42 -21
  333. data/vendor/v8/src/v8threads.h +4 -1
  334. data/vendor/v8/src/v8utils.cc +9 -93
  335. data/vendor/v8/src/v8utils.h +37 -33
  336. data/vendor/v8/src/variables.cc +6 -3
  337. data/vendor/v8/src/variables.h +6 -13
  338. data/vendor/v8/src/version.cc +2 -2
  339. data/vendor/v8/src/vm-state-inl.h +11 -0
  340. data/vendor/v8/src/x64/assembler-x64-inl.h +39 -8
  341. data/vendor/v8/src/x64/assembler-x64.cc +78 -64
  342. data/vendor/v8/src/x64/assembler-x64.h +38 -33
  343. data/vendor/v8/src/x64/builtins-x64.cc +105 -7
  344. data/vendor/v8/src/x64/code-stubs-x64.cc +790 -413
  345. data/vendor/v8/src/x64/code-stubs-x64.h +10 -106
  346. data/vendor/v8/src/x64/codegen-x64.cc +210 -8
  347. data/vendor/v8/src/x64/codegen-x64.h +20 -1
  348. data/vendor/v8/src/x64/deoptimizer-x64.cc +336 -75
  349. data/vendor/v8/src/x64/disasm-x64.cc +15 -0
  350. data/vendor/v8/src/x64/frames-x64.h +0 -14
  351. data/vendor/v8/src/x64/full-codegen-x64.cc +293 -270
  352. data/vendor/v8/src/x64/ic-x64.cc +153 -251
  353. data/vendor/v8/src/x64/lithium-codegen-x64.cc +1379 -531
  354. data/vendor/v8/src/x64/lithium-codegen-x64.h +67 -23
  355. data/vendor/v8/src/x64/lithium-gap-resolver-x64.cc +2 -2
  356. data/vendor/v8/src/x64/lithium-x64.cc +349 -289
  357. data/vendor/v8/src/x64/lithium-x64.h +460 -250
  358. data/vendor/v8/src/x64/macro-assembler-x64.cc +350 -177
  359. data/vendor/v8/src/x64/macro-assembler-x64.h +67 -49
  360. data/vendor/v8/src/x64/regexp-macro-assembler-x64.cc +46 -33
  361. data/vendor/v8/src/x64/regexp-macro-assembler-x64.h +2 -3
  362. data/vendor/v8/src/x64/stub-cache-x64.cc +484 -653
  363. data/vendor/v8/src/zone-inl.h +9 -27
  364. data/vendor/v8/src/zone.cc +5 -5
  365. data/vendor/v8/src/zone.h +53 -27
  366. data/vendor/v8/test/benchmarks/testcfg.py +5 -0
  367. data/vendor/v8/test/cctest/cctest.cc +4 -0
  368. data/vendor/v8/test/cctest/cctest.gyp +3 -1
  369. data/vendor/v8/test/cctest/cctest.h +57 -9
  370. data/vendor/v8/test/cctest/cctest.status +15 -15
  371. data/vendor/v8/test/cctest/test-accessors.cc +26 -0
  372. data/vendor/v8/test/cctest/test-alloc.cc +22 -30
  373. data/vendor/v8/test/cctest/test-api.cc +1943 -314
  374. data/vendor/v8/test/cctest/test-assembler-arm.cc +133 -13
  375. data/vendor/v8/test/cctest/test-assembler-ia32.cc +1 -1
  376. data/vendor/v8/test/cctest/test-assembler-mips.cc +12 -0
  377. data/vendor/v8/test/cctest/test-ast.cc +4 -2
  378. data/vendor/v8/test/cctest/test-compiler.cc +61 -29
  379. data/vendor/v8/test/cctest/test-dataflow.cc +2 -2
  380. data/vendor/v8/test/cctest/test-debug.cc +212 -33
  381. data/vendor/v8/test/cctest/test-decls.cc +257 -11
  382. data/vendor/v8/test/cctest/test-dictionary.cc +24 -10
  383. data/vendor/v8/test/cctest/test-disasm-arm.cc +118 -1
  384. data/vendor/v8/test/cctest/test-disasm-ia32.cc +3 -2
  385. data/vendor/v8/test/cctest/test-flags.cc +14 -1
  386. data/vendor/v8/test/cctest/test-func-name-inference.cc +7 -4
  387. data/vendor/v8/test/cctest/test-global-object.cc +51 -0
  388. data/vendor/v8/test/cctest/test-hashing.cc +32 -23
  389. data/vendor/v8/test/cctest/test-heap-profiler.cc +131 -77
  390. data/vendor/v8/test/cctest/test-heap.cc +1084 -143
  391. data/vendor/v8/test/cctest/test-list.cc +1 -1
  392. data/vendor/v8/test/cctest/test-liveedit.cc +3 -2
  393. data/vendor/v8/test/cctest/test-lockers.cc +12 -13
  394. data/vendor/v8/test/cctest/test-log.cc +10 -8
  395. data/vendor/v8/test/cctest/test-macro-assembler-x64.cc +2 -2
  396. data/vendor/v8/test/cctest/test-mark-compact.cc +44 -22
  397. data/vendor/v8/test/cctest/test-object-observe.cc +434 -0
  398. data/vendor/v8/test/cctest/test-parsing.cc +86 -39
  399. data/vendor/v8/test/cctest/test-platform-linux.cc +6 -0
  400. data/vendor/v8/test/cctest/test-platform-win32.cc +7 -0
  401. data/vendor/v8/test/cctest/test-random.cc +5 -4
  402. data/vendor/v8/test/cctest/test-regexp.cc +137 -101
  403. data/vendor/v8/test/cctest/test-serialize.cc +150 -230
  404. data/vendor/v8/test/cctest/test-sockets.cc +1 -1
  405. data/vendor/v8/test/cctest/test-spaces.cc +139 -0
  406. data/vendor/v8/test/cctest/test-strings.cc +736 -74
  407. data/vendor/v8/test/cctest/test-thread-termination.cc +10 -11
  408. data/vendor/v8/test/cctest/test-threads.cc +4 -4
  409. data/vendor/v8/test/cctest/test-utils.cc +16 -0
  410. data/vendor/v8/test/cctest/test-weakmaps.cc +7 -3
  411. data/vendor/v8/test/cctest/testcfg.py +64 -5
  412. data/vendor/v8/test/es5conform/testcfg.py +5 -0
  413. data/vendor/v8/test/message/message.status +1 -1
  414. data/vendor/v8/test/message/overwritten-builtins.out +3 -0
  415. data/vendor/v8/test/message/testcfg.py +89 -8
  416. data/vendor/v8/test/message/try-catch-finally-no-message.out +26 -26
  417. data/vendor/v8/test/mjsunit/accessor-map-sharing.js +18 -2
  418. data/vendor/v8/test/mjsunit/allocation-site-info.js +126 -0
  419. data/vendor/v8/test/mjsunit/array-bounds-check-removal.js +62 -1
  420. data/vendor/v8/test/mjsunit/array-iteration.js +1 -1
  421. data/vendor/v8/test/mjsunit/array-literal-transitions.js +2 -0
  422. data/vendor/v8/test/mjsunit/array-natives-elements.js +317 -0
  423. data/vendor/v8/test/mjsunit/array-reduce.js +8 -8
  424. data/vendor/v8/test/mjsunit/array-slice.js +12 -0
  425. data/vendor/v8/test/mjsunit/array-store-and-grow.js +4 -1
  426. data/vendor/v8/test/mjsunit/assert-opt-and-deopt.js +1 -1
  427. data/vendor/v8/test/mjsunit/bugs/bug-2337.js +53 -0
  428. data/vendor/v8/test/mjsunit/compare-known-objects-slow.js +69 -0
  429. data/vendor/v8/test/mjsunit/compiler/alloc-object-huge.js +3 -1
  430. data/vendor/v8/test/mjsunit/compiler/inline-accessors.js +368 -0
  431. data/vendor/v8/test/mjsunit/compiler/inline-arguments.js +87 -1
  432. data/vendor/v8/test/mjsunit/compiler/inline-closures.js +49 -0
  433. data/vendor/v8/test/mjsunit/compiler/inline-construct.js +55 -43
  434. data/vendor/v8/test/mjsunit/compiler/inline-literals.js +39 -0
  435. data/vendor/v8/test/mjsunit/compiler/multiply-add.js +69 -0
  436. data/vendor/v8/test/mjsunit/compiler/optimized-closures.js +57 -0
  437. data/vendor/v8/test/mjsunit/compiler/parallel-proto-change.js +44 -0
  438. data/vendor/v8/test/mjsunit/compiler/property-static.js +69 -0
  439. data/vendor/v8/test/mjsunit/compiler/proto-chain-constant.js +55 -0
  440. data/vendor/v8/test/mjsunit/compiler/proto-chain-load.js +44 -0
  441. data/vendor/v8/test/mjsunit/compiler/regress-gvn.js +3 -2
  442. data/vendor/v8/test/mjsunit/compiler/regress-or.js +6 -2
  443. data/vendor/v8/test/mjsunit/compiler/rotate.js +224 -0
  444. data/vendor/v8/test/mjsunit/compiler/uint32.js +173 -0
  445. data/vendor/v8/test/mjsunit/count-based-osr.js +2 -1
  446. data/vendor/v8/test/mjsunit/d8-os.js +3 -3
  447. data/vendor/v8/test/mjsunit/date-parse.js +3 -0
  448. data/vendor/v8/test/mjsunit/date.js +22 -0
  449. data/vendor/v8/test/mjsunit/debug-break-inline.js +1 -0
  450. data/vendor/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js +22 -12
  451. data/vendor/v8/test/mjsunit/debug-evaluate-locals-optimized.js +21 -10
  452. data/vendor/v8/test/mjsunit/debug-liveedit-compile-error.js +60 -0
  453. data/vendor/v8/test/mjsunit/debug-liveedit-double-call.js +142 -0
  454. data/vendor/v8/test/mjsunit/debug-liveedit-literals.js +94 -0
  455. data/vendor/v8/test/mjsunit/debug-liveedit-restart-frame.js +153 -0
  456. data/vendor/v8/test/mjsunit/debug-multiple-breakpoints.js +1 -1
  457. data/vendor/v8/test/mjsunit/debug-script-breakpoints-closure.js +67 -0
  458. data/vendor/v8/test/mjsunit/debug-script-breakpoints-nested.js +82 -0
  459. data/vendor/v8/test/mjsunit/debug-script.js +4 -2
  460. data/vendor/v8/test/mjsunit/debug-set-variable-value.js +308 -0
  461. data/vendor/v8/test/mjsunit/debug-stepout-scope-part1.js +190 -0
  462. data/vendor/v8/test/mjsunit/debug-stepout-scope-part2.js +83 -0
  463. data/vendor/v8/test/mjsunit/debug-stepout-scope-part3.js +80 -0
  464. data/vendor/v8/test/mjsunit/debug-stepout-scope-part4.js +80 -0
  465. data/vendor/v8/test/mjsunit/debug-stepout-scope-part5.js +77 -0
  466. data/vendor/v8/test/mjsunit/debug-stepout-scope-part6.js +79 -0
  467. data/vendor/v8/test/mjsunit/debug-stepout-scope-part7.js +79 -0
  468. data/vendor/v8/test/mjsunit/{debug-stepout-scope.js → debug-stepout-scope-part8.js} +0 -189
  469. data/vendor/v8/test/mjsunit/delete-non-configurable.js +74 -0
  470. data/vendor/v8/test/mjsunit/deopt-minus-zero.js +56 -0
  471. data/vendor/v8/test/mjsunit/elements-kind.js +6 -4
  472. data/vendor/v8/test/mjsunit/elements-length-no-holey.js +33 -0
  473. data/vendor/v8/test/mjsunit/elements-transition-hoisting.js +46 -19
  474. data/vendor/v8/test/mjsunit/error-accessors.js +54 -0
  475. data/vendor/v8/test/mjsunit/error-constructors.js +1 -14
  476. data/vendor/v8/test/mjsunit/error-tostring.js +8 -0
  477. data/vendor/v8/test/mjsunit/eval-stack-trace.js +204 -0
  478. data/vendor/v8/test/mjsunit/external-array.js +364 -1
  479. data/vendor/v8/test/mjsunit/fast-array-length.js +37 -0
  480. data/vendor/v8/test/mjsunit/fast-non-keyed.js +113 -0
  481. data/vendor/v8/test/mjsunit/fast-prototype.js +117 -0
  482. data/vendor/v8/test/mjsunit/function-call.js +14 -18
  483. data/vendor/v8/test/mjsunit/fuzz-natives-part1.js +230 -0
  484. data/vendor/v8/test/mjsunit/fuzz-natives-part2.js +229 -0
  485. data/vendor/v8/test/mjsunit/fuzz-natives-part3.js +229 -0
  486. data/vendor/v8/test/mjsunit/{fuzz-natives.js → fuzz-natives-part4.js} +12 -2
  487. data/vendor/v8/test/mjsunit/generated-transition-stub.js +218 -0
  488. data/vendor/v8/test/mjsunit/greedy.js +1 -1
  489. data/vendor/v8/test/mjsunit/harmony/block-conflicts.js +2 -1
  490. data/vendor/v8/test/mjsunit/harmony/block-let-crankshaft.js +1 -1
  491. data/vendor/v8/test/mjsunit/harmony/collections.js +69 -11
  492. data/vendor/v8/test/mjsunit/harmony/debug-blockscopes.js +2 -2
  493. data/vendor/v8/test/mjsunit/harmony/module-linking.js +180 -3
  494. data/vendor/v8/test/mjsunit/harmony/module-parsing.js +31 -0
  495. data/vendor/v8/test/mjsunit/harmony/module-recompile.js +87 -0
  496. data/vendor/v8/test/mjsunit/harmony/module-resolution.js +15 -2
  497. data/vendor/v8/test/mjsunit/harmony/object-observe.js +1056 -0
  498. data/vendor/v8/test/mjsunit/harmony/proxies-json.js +178 -0
  499. data/vendor/v8/test/mjsunit/harmony/proxies.js +25 -10
  500. data/vendor/v8/test/mjsunit/json-parser-recursive.js +33 -0
  501. data/vendor/v8/test/mjsunit/json-stringify-recursive.js +52 -0
  502. data/vendor/v8/test/mjsunit/json.js +38 -2
  503. data/vendor/v8/test/mjsunit/json2.js +153 -0
  504. data/vendor/v8/test/mjsunit/limit-locals.js +5 -4
  505. data/vendor/v8/test/mjsunit/manual-parallel-recompile.js +79 -0
  506. data/vendor/v8/test/mjsunit/math-exp-precision.js +64 -0
  507. data/vendor/v8/test/mjsunit/math-floor-negative.js +59 -0
  508. data/vendor/v8/test/mjsunit/math-floor-of-div-minus-zero.js +41 -0
  509. data/vendor/v8/test/mjsunit/math-floor-of-div-nosudiv.js +288 -0
  510. data/vendor/v8/test/mjsunit/math-floor-of-div.js +81 -9
  511. data/vendor/v8/test/mjsunit/{math-floor.js → math-floor-part1.js} +1 -72
  512. data/vendor/v8/test/mjsunit/math-floor-part2.js +76 -0
  513. data/vendor/v8/test/mjsunit/math-floor-part3.js +78 -0
  514. data/vendor/v8/test/mjsunit/math-floor-part4.js +76 -0
  515. data/vendor/v8/test/mjsunit/mirror-object.js +43 -9
  516. data/vendor/v8/test/mjsunit/mjsunit.js +1 -1
  517. data/vendor/v8/test/mjsunit/mjsunit.status +52 -27
  518. data/vendor/v8/test/mjsunit/mul-exhaustive-part1.js +491 -0
  519. data/vendor/v8/test/mjsunit/mul-exhaustive-part10.js +470 -0
  520. data/vendor/v8/test/mjsunit/mul-exhaustive-part2.js +525 -0
  521. data/vendor/v8/test/mjsunit/mul-exhaustive-part3.js +532 -0
  522. data/vendor/v8/test/mjsunit/mul-exhaustive-part4.js +509 -0
  523. data/vendor/v8/test/mjsunit/mul-exhaustive-part5.js +505 -0
  524. data/vendor/v8/test/mjsunit/mul-exhaustive-part6.js +554 -0
  525. data/vendor/v8/test/mjsunit/mul-exhaustive-part7.js +497 -0
  526. data/vendor/v8/test/mjsunit/mul-exhaustive-part8.js +526 -0
  527. data/vendor/v8/test/mjsunit/mul-exhaustive-part9.js +533 -0
  528. data/vendor/v8/test/mjsunit/new-function.js +34 -0
  529. data/vendor/v8/test/mjsunit/numops-fuzz-part1.js +1172 -0
  530. data/vendor/v8/test/mjsunit/numops-fuzz-part2.js +1178 -0
  531. data/vendor/v8/test/mjsunit/numops-fuzz-part3.js +1178 -0
  532. data/vendor/v8/test/mjsunit/numops-fuzz-part4.js +1177 -0
  533. data/vendor/v8/test/mjsunit/object-define-property.js +107 -2
  534. data/vendor/v8/test/mjsunit/override-read-only-property.js +6 -4
  535. data/vendor/v8/test/mjsunit/packed-elements.js +2 -2
  536. data/vendor/v8/test/mjsunit/parse-int-float.js +4 -4
  537. data/vendor/v8/test/mjsunit/pixel-array-rounding.js +1 -1
  538. data/vendor/v8/test/mjsunit/readonly.js +228 -0
  539. data/vendor/v8/test/mjsunit/regexp-capture-3.js +16 -18
  540. data/vendor/v8/test/mjsunit/regexp-capture.js +2 -0
  541. data/vendor/v8/test/mjsunit/regexp-global.js +122 -0
  542. data/vendor/v8/test/mjsunit/regexp-results-cache.js +78 -0
  543. data/vendor/v8/test/mjsunit/regress/regress-1117.js +12 -3
  544. data/vendor/v8/test/mjsunit/regress/regress-1118.js +1 -1
  545. data/vendor/v8/test/mjsunit/regress/regress-115100.js +36 -0
  546. data/vendor/v8/test/mjsunit/regress/regress-1199637.js +1 -3
  547. data/vendor/v8/test/mjsunit/regress/regress-121407.js +1 -1
  548. data/vendor/v8/test/mjsunit/regress/regress-131923.js +30 -0
  549. data/vendor/v8/test/mjsunit/regress/regress-131994.js +70 -0
  550. data/vendor/v8/test/mjsunit/regress/regress-133211.js +35 -0
  551. data/vendor/v8/test/mjsunit/regress/regress-133211b.js +39 -0
  552. data/vendor/v8/test/mjsunit/regress/regress-136048.js +34 -0
  553. data/vendor/v8/test/mjsunit/regress/regress-137768.js +73 -0
  554. data/vendor/v8/test/mjsunit/regress/regress-143967.js +34 -0
  555. data/vendor/v8/test/mjsunit/regress/regress-145201.js +107 -0
  556. data/vendor/v8/test/mjsunit/regress/regress-147497.js +45 -0
  557. data/vendor/v8/test/mjsunit/regress/regress-148378.js +38 -0
  558. data/vendor/v8/test/mjsunit/regress/regress-1563.js +1 -1
  559. data/vendor/v8/test/mjsunit/regress/regress-1591.js +48 -0
  560. data/vendor/v8/test/mjsunit/regress/regress-164442.js +45 -0
  561. data/vendor/v8/test/mjsunit/regress/regress-165637.js +61 -0
  562. data/vendor/v8/test/mjsunit/regress/regress-166379.js +39 -0
  563. data/vendor/v8/test/mjsunit/regress/regress-166553.js +33 -0
  564. data/vendor/v8/test/mjsunit/regress/regress-1692.js +1 -1
  565. data/vendor/v8/test/mjsunit/regress/regress-171641.js +40 -0
  566. data/vendor/v8/test/mjsunit/regress/regress-1980.js +1 -1
  567. data/vendor/v8/test/mjsunit/regress/regress-2073.js +99 -0
  568. data/vendor/v8/test/mjsunit/regress/regress-2119.js +36 -0
  569. data/vendor/v8/test/mjsunit/regress/regress-2156.js +39 -0
  570. data/vendor/v8/test/mjsunit/regress/regress-2163.js +70 -0
  571. data/vendor/v8/test/mjsunit/regress/regress-2170.js +58 -0
  572. data/vendor/v8/test/mjsunit/regress/regress-2172.js +35 -0
  573. data/vendor/v8/test/mjsunit/regress/regress-2185-2.js +145 -0
  574. data/vendor/v8/test/mjsunit/regress/regress-2185.js +38 -0
  575. data/vendor/v8/test/mjsunit/regress/regress-2186.js +49 -0
  576. data/vendor/v8/test/mjsunit/regress/regress-2193.js +58 -0
  577. data/vendor/v8/test/mjsunit/regress/regress-2219.js +32 -0
  578. data/vendor/v8/test/mjsunit/regress/regress-2225.js +65 -0
  579. data/vendor/v8/test/mjsunit/regress/regress-2226.js +36 -0
  580. data/vendor/v8/test/mjsunit/regress/regress-2234.js +41 -0
  581. data/vendor/v8/test/mjsunit/regress/regress-2243.js +31 -0
  582. data/vendor/v8/test/mjsunit/regress/regress-2249.js +33 -0
  583. data/vendor/v8/test/mjsunit/regress/regress-2250.js +68 -0
  584. data/vendor/v8/test/mjsunit/regress/regress-2261.js +113 -0
  585. data/vendor/v8/test/mjsunit/regress/regress-2263.js +30 -0
  586. data/vendor/v8/test/mjsunit/regress/regress-2284.js +32 -0
  587. data/vendor/v8/test/mjsunit/regress/regress-2285.js +32 -0
  588. data/vendor/v8/test/mjsunit/regress/regress-2286.js +32 -0
  589. data/vendor/v8/test/mjsunit/regress/regress-2289.js +34 -0
  590. data/vendor/v8/test/mjsunit/regress/regress-2291.js +36 -0
  591. data/vendor/v8/test/mjsunit/regress/regress-2294.js +70 -0
  592. data/vendor/v8/test/mjsunit/regress/regress-2296.js +40 -0
  593. data/vendor/v8/test/mjsunit/regress/regress-2315.js +40 -0
  594. data/vendor/v8/test/mjsunit/regress/regress-2318.js +66 -0
  595. data/vendor/v8/test/mjsunit/regress/regress-2322.js +36 -0
  596. data/vendor/v8/test/mjsunit/regress/regress-2326.js +54 -0
  597. data/vendor/v8/test/mjsunit/regress/regress-2336.js +53 -0
  598. data/vendor/v8/test/mjsunit/regress/regress-2339.js +59 -0
  599. data/vendor/v8/test/mjsunit/regress/regress-2346.js +123 -0
  600. data/vendor/v8/test/mjsunit/regress/regress-2373.js +29 -0
  601. data/vendor/v8/test/mjsunit/regress/regress-2374.js +33 -0
  602. data/vendor/v8/test/mjsunit/regress/regress-2398.js +41 -0
  603. data/vendor/v8/test/mjsunit/regress/regress-2410.js +36 -0
  604. data/vendor/v8/test/mjsunit/regress/regress-2416.js +75 -0
  605. data/vendor/v8/test/mjsunit/regress/regress-2419.js +37 -0
  606. data/vendor/v8/test/mjsunit/regress/regress-2433.js +36 -0
  607. data/vendor/v8/test/mjsunit/regress/regress-2437.js +156 -0
  608. data/vendor/v8/test/mjsunit/regress/regress-2438.js +52 -0
  609. data/vendor/v8/test/mjsunit/regress/regress-2443.js +129 -0
  610. data/vendor/v8/test/mjsunit/regress/regress-2444.js +120 -0
  611. data/vendor/v8/test/mjsunit/regress/regress-2489.js +50 -0
  612. data/vendor/v8/test/mjsunit/regress/regress-2499.js +40 -0
  613. data/vendor/v8/test/mjsunit/regress/regress-334.js +1 -1
  614. data/vendor/v8/test/mjsunit/regress/regress-492.js +39 -1
  615. data/vendor/v8/test/mjsunit/regress/regress-builtin-array-op.js +38 -0
  616. data/vendor/v8/test/mjsunit/regress/regress-cnlt-elements.js +43 -0
  617. data/vendor/v8/test/mjsunit/regress/regress-cnlt-enum-indices.js +45 -0
  618. data/vendor/v8/test/mjsunit/regress/regress-cntl-descriptors-enum.js +46 -0
  619. data/vendor/v8/test/mjsunit/regress/regress-convert-enum.js +60 -0
  620. data/vendor/v8/test/mjsunit/regress/regress-convert-enum2.js +46 -0
  621. data/vendor/v8/test/mjsunit/regress/regress-convert-transition.js +40 -0
  622. data/vendor/v8/test/mjsunit/regress/regress-crbug-119926.js +3 -1
  623. data/vendor/v8/test/mjsunit/regress/regress-crbug-125148.js +90 -0
  624. data/vendor/v8/test/mjsunit/regress/regress-crbug-134055.js +63 -0
  625. data/vendor/v8/test/mjsunit/regress/regress-crbug-134609.js +59 -0
  626. data/vendor/v8/test/mjsunit/regress/regress-crbug-135008.js +45 -0
  627. data/vendor/v8/test/mjsunit/regress/regress-crbug-135066.js +55 -0
  628. data/vendor/v8/test/mjsunit/regress/regress-crbug-137689.js +47 -0
  629. data/vendor/v8/test/mjsunit/regress/regress-crbug-138887.js +48 -0
  630. data/vendor/v8/test/mjsunit/regress/regress-crbug-140083.js +44 -0
  631. data/vendor/v8/test/mjsunit/regress/regress-crbug-142087.js +38 -0
  632. data/vendor/v8/test/mjsunit/regress/regress-crbug-142218.js +44 -0
  633. data/vendor/v8/test/mjsunit/regress/regress-crbug-145961.js +39 -0
  634. data/vendor/v8/test/mjsunit/regress/regress-crbug-146910.js +33 -0
  635. data/vendor/v8/test/mjsunit/regress/regress-crbug-147475.js +48 -0
  636. data/vendor/v8/test/mjsunit/regress/regress-crbug-148376.js +35 -0
  637. data/vendor/v8/test/mjsunit/regress/regress-crbug-150545.js +53 -0
  638. data/vendor/v8/test/mjsunit/regress/regress-crbug-150729.js +39 -0
  639. data/vendor/v8/test/mjsunit/regress/regress-crbug-157019.js +54 -0
  640. data/vendor/v8/test/mjsunit/regress/regress-crbug-157520.js +38 -0
  641. data/vendor/v8/test/mjsunit/regress/regress-crbug-158185.js +39 -0
  642. data/vendor/v8/test/mjsunit/regress/regress-crbug-160010.js +35 -0
  643. data/vendor/v8/test/mjsunit/regress/regress-crbug-162085.js +71 -0
  644. data/vendor/v8/test/mjsunit/regress/regress-crbug-168545.js +34 -0
  645. data/vendor/v8/test/mjsunit/regress/regress-crbug-170856.js +33 -0
  646. data/vendor/v8/test/mjsunit/regress/regress-crbug-172345.js +34 -0
  647. data/vendor/v8/test/mjsunit/regress/regress-crbug-173974.js +36 -0
  648. data/vendor/v8/test/mjsunit/regress/regress-crbug-18639.js +9 -5
  649. data/vendor/v8/test/mjsunit/regress/regress-debug-code-recompilation.js +2 -1
  650. data/vendor/v8/test/mjsunit/regress/regress-deep-proto.js +45 -0
  651. data/vendor/v8/test/mjsunit/regress/regress-delete-empty-double.js +40 -0
  652. data/vendor/v8/test/mjsunit/regress/regress-iteration-order.js +42 -0
  653. data/vendor/v8/test/mjsunit/regress/regress-json-stringify-gc.js +41 -0
  654. data/vendor/v8/test/mjsunit/regress/regress-latin-1.js +78 -0
  655. data/vendor/v8/test/mjsunit/regress/regress-load-elements.js +49 -0
  656. data/vendor/v8/test/mjsunit/regress/regress-observe-empty-double-array.js +38 -0
  657. data/vendor/v8/test/mjsunit/regress/regress-undefined-store-keyed-fast-element.js +37 -0
  658. data/vendor/v8/test/mjsunit/shift-for-integer-div.js +59 -0
  659. data/vendor/v8/test/mjsunit/stack-traces-gc.js +119 -0
  660. data/vendor/v8/test/mjsunit/stack-traces-overflow.js +122 -0
  661. data/vendor/v8/test/mjsunit/stack-traces.js +39 -1
  662. data/vendor/v8/test/mjsunit/str-to-num.js +7 -2
  663. data/vendor/v8/test/mjsunit/strict-mode.js +36 -11
  664. data/vendor/v8/test/mjsunit/string-charcodeat.js +3 -0
  665. data/vendor/v8/test/mjsunit/string-natives.js +72 -0
  666. data/vendor/v8/test/mjsunit/string-split.js +17 -0
  667. data/vendor/v8/test/mjsunit/testcfg.py +76 -6
  668. data/vendor/v8/test/mjsunit/tools/tickprocessor.js +4 -1
  669. data/vendor/v8/test/mjsunit/try-finally-continue.js +72 -0
  670. data/vendor/v8/test/mjsunit/typed-array-slice.js +61 -0
  671. data/vendor/v8/test/mjsunit/unbox-double-arrays.js +2 -0
  672. data/vendor/v8/test/mjsunit/uri.js +12 -0
  673. data/vendor/v8/test/mjsunit/with-readonly.js +4 -2
  674. data/vendor/v8/test/mozilla/mozilla.status +19 -113
  675. data/vendor/v8/test/mozilla/testcfg.py +122 -3
  676. data/vendor/v8/test/preparser/preparser.status +5 -0
  677. data/vendor/v8/test/preparser/strict-identifiers.pyt +1 -1
  678. data/vendor/v8/test/preparser/testcfg.py +101 -5
  679. data/vendor/v8/test/sputnik/sputnik.status +1 -1
  680. data/vendor/v8/test/sputnik/testcfg.py +5 -0
  681. data/vendor/v8/test/test262/README +2 -2
  682. data/vendor/v8/test/test262/test262.status +13 -36
  683. data/vendor/v8/test/test262/testcfg.py +102 -8
  684. data/vendor/v8/tools/android-build.sh +0 -0
  685. data/vendor/v8/tools/android-ll-prof.sh +69 -0
  686. data/vendor/v8/tools/android-run.py +109 -0
  687. data/vendor/v8/tools/android-sync.sh +105 -0
  688. data/vendor/v8/tools/bash-completion.sh +0 -0
  689. data/vendor/v8/tools/check-static-initializers.sh +0 -0
  690. data/vendor/v8/tools/common-includes.sh +15 -22
  691. data/vendor/v8/tools/disasm.py +4 -4
  692. data/vendor/v8/tools/fuzz-harness.sh +0 -0
  693. data/vendor/v8/tools/gen-postmortem-metadata.py +6 -8
  694. data/vendor/v8/tools/grokdump.py +404 -129
  695. data/vendor/v8/tools/gyp/v8.gyp +105 -43
  696. data/vendor/v8/tools/linux-tick-processor +5 -5
  697. data/vendor/v8/tools/ll_prof.py +75 -15
  698. data/vendor/v8/tools/merge-to-branch.sh +2 -2
  699. data/vendor/v8/tools/plot-timer-events +70 -0
  700. data/vendor/v8/tools/plot-timer-events.js +510 -0
  701. data/vendor/v8/tools/presubmit.py +1 -0
  702. data/vendor/v8/tools/push-to-trunk.sh +14 -4
  703. data/vendor/v8/tools/run-llprof.sh +69 -0
  704. data/vendor/v8/tools/run-tests.py +372 -0
  705. data/vendor/v8/tools/run-valgrind.py +1 -1
  706. data/vendor/v8/tools/status-file-converter.py +39 -0
  707. data/vendor/v8/tools/test-server.py +224 -0
  708. data/vendor/v8/tools/test-wrapper-gypbuild.py +13 -16
  709. data/vendor/v8/tools/test.py +10 -19
  710. data/vendor/v8/tools/testrunner/README +174 -0
  711. data/vendor/v8/tools/testrunner/__init__.py +26 -0
  712. data/vendor/v8/tools/testrunner/local/__init__.py +26 -0
  713. data/vendor/v8/tools/testrunner/local/commands.py +153 -0
  714. data/vendor/v8/tools/testrunner/local/execution.py +182 -0
  715. data/vendor/v8/tools/testrunner/local/old_statusfile.py +460 -0
  716. data/vendor/v8/tools/testrunner/local/progress.py +238 -0
  717. data/vendor/v8/tools/testrunner/local/statusfile.py +145 -0
  718. data/vendor/v8/tools/testrunner/local/testsuite.py +187 -0
  719. data/vendor/v8/tools/testrunner/local/utils.py +108 -0
  720. data/vendor/v8/tools/testrunner/local/verbose.py +99 -0
  721. data/vendor/v8/tools/testrunner/network/__init__.py +26 -0
  722. data/vendor/v8/tools/testrunner/network/distro.py +90 -0
  723. data/vendor/v8/tools/testrunner/network/endpoint.py +124 -0
  724. data/vendor/v8/tools/testrunner/network/network_execution.py +253 -0
  725. data/vendor/v8/tools/testrunner/network/perfdata.py +120 -0
  726. data/vendor/v8/tools/testrunner/objects/__init__.py +26 -0
  727. data/vendor/v8/tools/testrunner/objects/context.py +50 -0
  728. data/vendor/v8/tools/testrunner/objects/output.py +60 -0
  729. data/vendor/v8/tools/testrunner/objects/peer.py +80 -0
  730. data/vendor/v8/tools/testrunner/objects/testcase.py +83 -0
  731. data/vendor/v8/tools/testrunner/objects/workpacket.py +90 -0
  732. data/vendor/v8/tools/testrunner/server/__init__.py +26 -0
  733. data/vendor/v8/tools/testrunner/server/compression.py +111 -0
  734. data/vendor/v8/tools/testrunner/server/constants.py +51 -0
  735. data/vendor/v8/tools/testrunner/server/daemon.py +147 -0
  736. data/vendor/v8/tools/testrunner/server/local_handler.py +119 -0
  737. data/vendor/v8/tools/testrunner/server/main.py +245 -0
  738. data/vendor/v8/tools/testrunner/server/presence_handler.py +120 -0
  739. data/vendor/v8/tools/testrunner/server/signatures.py +63 -0
  740. data/vendor/v8/tools/testrunner/server/status_handler.py +112 -0
  741. data/vendor/v8/tools/testrunner/server/work_handler.py +150 -0
  742. data/vendor/v8/tools/tick-processor.html +168 -0
  743. data/vendor/v8/tools/tickprocessor-driver.js +5 -3
  744. data/vendor/v8/tools/tickprocessor.js +58 -15
  745. metadata +534 -30
  746. data/patches/add-freebsd9-and-freebsd10-to-gyp-GetFlavor.patch +0 -11
  747. data/patches/do-not-imply-vfp3-and-armv7.patch +0 -44
  748. data/patches/fPIC-on-x64.patch +0 -14
  749. data/vendor/v8/src/liveobjectlist-inl.h +0 -126
  750. data/vendor/v8/src/liveobjectlist.cc +0 -2631
  751. data/vendor/v8/src/liveobjectlist.h +0 -319
  752. data/vendor/v8/test/mjsunit/mul-exhaustive.js +0 -4629
  753. data/vendor/v8/test/mjsunit/numops-fuzz.js +0 -4609
  754. data/vendor/v8/test/mjsunit/regress/regress-1969.js +0 -5045
@@ -184,7 +184,9 @@ class Simulator {
184
184
  // architecture specification and is off by a 8 from the currently executing
185
185
  // instruction.
186
186
  void set_register(int reg, int32_t value);
187
+ void set_dw_register(int dreg, const int* dbl);
187
188
  int32_t get_register(int reg) const;
189
+ double get_double_from_register_pair(int reg);
188
190
  // Same for FPURegisters.
189
191
  void set_fpu_register(int fpureg, int32_t value);
190
192
  void set_fpu_register_float(int fpureg, float value);
@@ -214,6 +216,8 @@ class Simulator {
214
216
  // generated RegExp code with 7 parameters. This is a convenience function,
215
217
  // which sets up the simulator state and grabs the result on return.
216
218
  int32_t Call(byte* entry, int argument_count, ...);
219
+ // Alternative: call a 2-argument double function.
220
+ double CallFP(byte* entry, double d0, double d1);
217
221
 
218
222
  // Push an address onto the JS stack.
219
223
  uintptr_t PushAddress(uintptr_t address);
@@ -353,6 +357,7 @@ class Simulator {
353
357
  void GetFpArgs(double* x, int32_t* y);
354
358
  void SetFpResult(const double& result);
355
359
 
360
+ void CallInternal(byte* entry);
356
361
 
357
362
  // Architecture state.
358
363
  // Registers.
@@ -270,11 +270,12 @@ void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
270
270
  int index,
271
271
  Register prototype) {
272
272
  // Load the global or builtins object from the current context.
273
- __ lw(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
274
- // Load the global context from the global or builtins object.
275
273
  __ lw(prototype,
276
- FieldMemOperand(prototype, GlobalObject::kGlobalContextOffset));
277
- // Load the function from the global context.
274
+ MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
275
+ // Load the native context from the global or builtins object.
276
+ __ lw(prototype,
277
+ FieldMemOperand(prototype, GlobalObject::kNativeContextOffset));
278
+ // Load the function from the native context.
278
279
  __ lw(prototype, MemOperand(prototype, Context::SlotOffset(index)));
279
280
  // Load the initial map. The global functions all have initial maps.
280
281
  __ lw(prototype,
@@ -291,13 +292,14 @@ void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
291
292
  Label* miss) {
292
293
  Isolate* isolate = masm->isolate();
293
294
  // Check we're still in the same context.
294
- __ lw(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
295
+ __ lw(prototype,
296
+ MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
295
297
  ASSERT(!prototype.is(at));
296
- __ li(at, isolate->global());
298
+ __ li(at, isolate->global_object());
297
299
  __ Branch(miss, ne, prototype, Operand(at));
298
300
  // Get the global function with the given index.
299
301
  Handle<JSFunction> function(
300
- JSFunction::cast(isolate->global_context()->get(index)));
302
+ JSFunction::cast(isolate->native_context()->get(index)));
301
303
  // Load its initial map. The global functions all have initial maps.
302
304
  __ li(prototype, Handle<Map>(function->initial_map()));
303
305
  // Load the prototype from the initial map.
@@ -312,18 +314,23 @@ void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
312
314
  Register dst,
313
315
  Register src,
314
316
  Handle<JSObject> holder,
315
- int index) {
316
- // Adjust for the number of properties stored in the holder.
317
- index -= holder->map()->inobject_properties();
318
- if (index < 0) {
319
- // Get the property straight out of the holder.
320
- int offset = holder->map()->instance_size() + (index * kPointerSize);
317
+ PropertyIndex index) {
318
+ if (index.is_header_index()) {
319
+ int offset = index.header_index() * kPointerSize;
321
320
  __ lw(dst, FieldMemOperand(src, offset));
322
321
  } else {
323
- // Calculate the offset into the properties array.
324
- int offset = index * kPointerSize + FixedArray::kHeaderSize;
325
- __ lw(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
326
- __ lw(dst, FieldMemOperand(dst, offset));
322
+ // Adjust for the number of properties stored in the holder.
323
+ int slot = index.field_index() - holder->map()->inobject_properties();
324
+ if (slot < 0) {
325
+ // Get the property straight out of the holder.
326
+ int offset = holder->map()->instance_size() + (slot * kPointerSize);
327
+ __ lw(dst, FieldMemOperand(src, offset));
328
+ } else {
329
+ // Calculate the offset into the properties array.
330
+ int offset = slot * kPointerSize + FixedArray::kHeaderSize;
331
+ __ lw(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
332
+ __ lw(dst, FieldMemOperand(dst, offset));
333
+ }
327
334
  }
328
335
  }
329
336
 
@@ -422,21 +429,59 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
422
429
  Handle<JSObject> object,
423
430
  int index,
424
431
  Handle<Map> transition,
432
+ Handle<String> name,
425
433
  Register receiver_reg,
426
434
  Register name_reg,
427
- Register scratch,
435
+ Register scratch1,
436
+ Register scratch2,
428
437
  Label* miss_label) {
429
438
  // a0 : value.
430
439
  Label exit;
440
+
441
+ LookupResult lookup(masm->isolate());
442
+ object->Lookup(*name, &lookup);
443
+ if (lookup.IsFound() && (lookup.IsReadOnly() || !lookup.IsCacheable())) {
444
+ // In sloppy mode, we could just return the value and be done. However, we
445
+ // might be in strict mode, where we have to throw. Since we cannot tell,
446
+ // go into slow case unconditionally.
447
+ __ jmp(miss_label);
448
+ return;
449
+ }
450
+
431
451
  // Check that the map of the object hasn't changed.
432
452
  CompareMapMode mode = transition.is_null() ? ALLOW_ELEMENT_TRANSITION_MAPS
433
453
  : REQUIRE_EXACT_MAP;
434
- __ CheckMap(receiver_reg, scratch, Handle<Map>(object->map()), miss_label,
454
+ __ CheckMap(receiver_reg, scratch1, Handle<Map>(object->map()), miss_label,
435
455
  DO_SMI_CHECK, mode);
436
456
 
437
457
  // Perform global security token check if needed.
438
458
  if (object->IsJSGlobalProxy()) {
439
- __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
459
+ __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label);
460
+ }
461
+
462
+ // Check that we are allowed to write this.
463
+ if (!transition.is_null() && object->GetPrototype()->IsJSObject()) {
464
+ JSObject* holder;
465
+ if (lookup.IsFound()) {
466
+ holder = lookup.holder();
467
+ } else {
468
+ // Find the top object.
469
+ holder = *object;
470
+ do {
471
+ holder = JSObject::cast(holder->GetPrototype());
472
+ } while (holder->GetPrototype()->IsJSObject());
473
+ }
474
+ // We need an extra register, push
475
+ __ push(name_reg);
476
+ Label miss_pop, done_check;
477
+ CheckPrototypes(object, receiver_reg, Handle<JSObject>(holder), name_reg,
478
+ scratch1, scratch2, name, &miss_pop);
479
+ __ jmp(&done_check);
480
+ __ bind(&miss_pop);
481
+ __ pop(name_reg);
482
+ __ jmp(miss_label);
483
+ __ bind(&done_check);
484
+ __ pop(name_reg);
440
485
  }
441
486
 
442
487
  // Stub never generated for non-global objects that require access
@@ -459,14 +504,14 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
459
504
 
460
505
  if (!transition.is_null()) {
461
506
  // Update the map of the object.
462
- __ li(scratch, Operand(transition));
463
- __ sw(scratch, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
507
+ __ li(scratch1, Operand(transition));
508
+ __ sw(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
464
509
 
465
510
  // Update the write barrier for the map field and pass the now unused
466
511
  // name_reg as scratch register.
467
512
  __ RecordWriteField(receiver_reg,
468
513
  HeapObject::kMapOffset,
469
- scratch,
514
+ scratch1,
470
515
  name_reg,
471
516
  kRAHasNotBeenSaved,
472
517
  kDontSaveFPRegs,
@@ -485,7 +530,7 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
485
530
  __ sw(a0, FieldMemOperand(receiver_reg, offset));
486
531
 
487
532
  // Skip updating write barrier if storing a smi.
488
- __ JumpIfSmi(a0, &exit, scratch);
533
+ __ JumpIfSmi(a0, &exit, scratch1);
489
534
 
490
535
  // Update the write barrier for the array address.
491
536
  // Pass the now unused name_reg as a scratch register.
@@ -493,15 +538,16 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
493
538
  __ RecordWriteField(receiver_reg,
494
539
  offset,
495
540
  name_reg,
496
- scratch,
541
+ scratch1,
497
542
  kRAHasNotBeenSaved,
498
543
  kDontSaveFPRegs);
499
544
  } else {
500
545
  // Write to the properties array.
501
546
  int offset = index * kPointerSize + FixedArray::kHeaderSize;
502
547
  // Get the properties array.
503
- __ lw(scratch, FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
504
- __ sw(a0, FieldMemOperand(scratch, offset));
548
+ __ lw(scratch1,
549
+ FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
550
+ __ sw(a0, FieldMemOperand(scratch1, offset));
505
551
 
506
552
  // Skip updating write barrier if storing a smi.
507
553
  __ JumpIfSmi(a0, &exit);
@@ -509,7 +555,7 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
509
555
  // Update the write barrier for the array address.
510
556
  // Ok to clobber receiver_reg and name_reg, since we return.
511
557
  __ mov(name_reg, a0);
512
- __ RecordWriteField(scratch,
558
+ __ RecordWriteField(scratch1,
513
559
  offset,
514
560
  name_reg,
515
561
  receiver_reg,
@@ -533,6 +579,15 @@ void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
533
579
  }
534
580
 
535
581
 
582
+ void StubCompiler::GenerateStoreMiss(MacroAssembler* masm, Code::Kind kind) {
583
+ ASSERT(kind == Code::STORE_IC || kind == Code::KEYED_STORE_IC);
584
+ Handle<Code> code = (kind == Code::STORE_IC)
585
+ ? masm->isolate()->builtins()->StoreIC_Miss()
586
+ : masm->isolate()->builtins()->KeyedStoreIC_Miss();
587
+ __ Jump(code, RelocInfo::CODE_TARGET);
588
+ }
589
+
590
+
536
591
  static void GenerateCallFunction(MacroAssembler* masm,
537
592
  Handle<Object> object,
538
593
  const ParameterCount& arguments,
@@ -1008,46 +1063,6 @@ static void StoreIntAsFloat(MacroAssembler* masm,
1008
1063
  }
1009
1064
 
1010
1065
 
1011
- // Convert unsigned integer with specified number of leading zeroes in binary
1012
- // representation to IEEE 754 double.
1013
- // Integer to convert is passed in register hiword.
1014
- // Resulting double is returned in registers hiword:loword.
1015
- // This functions does not work correctly for 0.
1016
- static void GenerateUInt2Double(MacroAssembler* masm,
1017
- Register hiword,
1018
- Register loword,
1019
- Register scratch,
1020
- int leading_zeroes) {
1021
- const int meaningful_bits = kBitsPerInt - leading_zeroes - 1;
1022
- const int biased_exponent = HeapNumber::kExponentBias + meaningful_bits;
1023
-
1024
- const int mantissa_shift_for_hi_word =
1025
- meaningful_bits - HeapNumber::kMantissaBitsInTopWord;
1026
-
1027
- const int mantissa_shift_for_lo_word =
1028
- kBitsPerInt - mantissa_shift_for_hi_word;
1029
-
1030
- __ li(scratch, biased_exponent << HeapNumber::kExponentShift);
1031
- if (mantissa_shift_for_hi_word > 0) {
1032
- __ sll(loword, hiword, mantissa_shift_for_lo_word);
1033
- __ srl(hiword, hiword, mantissa_shift_for_hi_word);
1034
- __ or_(hiword, scratch, hiword);
1035
- } else {
1036
- __ mov(loword, zero_reg);
1037
- __ sll(hiword, hiword, mantissa_shift_for_hi_word);
1038
- __ or_(hiword, scratch, hiword);
1039
- }
1040
-
1041
- // If least significant bit of biased exponent was not 1 it was corrupted
1042
- // by most significant bit of mantissa so we should fix that.
1043
- if (!(biased_exponent & 1)) {
1044
- __ li(scratch, 1 << HeapNumber::kExponentShift);
1045
- __ nor(scratch, scratch, scratch);
1046
- __ and_(hiword, hiword, scratch);
1047
- }
1048
- }
1049
-
1050
-
1051
1066
  #undef __
1052
1067
  #define __ ACCESS_MASM(masm())
1053
1068
 
@@ -1159,7 +1174,7 @@ void StubCompiler::GenerateLoadField(Handle<JSObject> object,
1159
1174
  Register scratch1,
1160
1175
  Register scratch2,
1161
1176
  Register scratch3,
1162
- int index,
1177
+ PropertyIndex index,
1163
1178
  Handle<String> name,
1164
1179
  Label* miss) {
1165
1180
  // Check that the receiver isn't a smi.
@@ -1195,6 +1210,44 @@ void StubCompiler::GenerateLoadConstant(Handle<JSObject> object,
1195
1210
  }
1196
1211
 
1197
1212
 
1213
+ void StubCompiler::GenerateDictionaryLoadCallback(Register receiver,
1214
+ Register name_reg,
1215
+ Register scratch1,
1216
+ Register scratch2,
1217
+ Register scratch3,
1218
+ Handle<AccessorInfo> callback,
1219
+ Handle<String> name,
1220
+ Label* miss) {
1221
+ ASSERT(!receiver.is(scratch1));
1222
+ ASSERT(!receiver.is(scratch2));
1223
+ ASSERT(!receiver.is(scratch3));
1224
+
1225
+ // Load the properties dictionary.
1226
+ Register dictionary = scratch1;
1227
+ __ lw(dictionary, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
1228
+
1229
+ // Probe the dictionary.
1230
+ Label probe_done;
1231
+ StringDictionaryLookupStub::GeneratePositiveLookup(masm(),
1232
+ miss,
1233
+ &probe_done,
1234
+ dictionary,
1235
+ name_reg,
1236
+ scratch2,
1237
+ scratch3);
1238
+ __ bind(&probe_done);
1239
+
1240
+ // If probing finds an entry in the dictionary, scratch3 contains the
1241
+ // pointer into the dictionary. Check that the value is the callback.
1242
+ Register pointer = scratch3;
1243
+ const int kElementsStartOffset = StringDictionary::kHeaderSize +
1244
+ StringDictionary::kElementsStartIndex * kPointerSize;
1245
+ const int kValueOffset = kElementsStartOffset + kPointerSize;
1246
+ __ lw(scratch2, FieldMemOperand(pointer, kValueOffset));
1247
+ __ Branch(miss, ne, scratch2, Operand(callback));
1248
+ }
1249
+
1250
+
1198
1251
  void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
1199
1252
  Handle<JSObject> holder,
1200
1253
  Register receiver,
@@ -1202,6 +1255,7 @@ void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
1202
1255
  Register scratch1,
1203
1256
  Register scratch2,
1204
1257
  Register scratch3,
1258
+ Register scratch4,
1205
1259
  Handle<AccessorInfo> callback,
1206
1260
  Handle<String> name,
1207
1261
  Label* miss) {
@@ -1212,6 +1266,11 @@ void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
1212
1266
  Register reg = CheckPrototypes(object, receiver, holder, scratch1,
1213
1267
  scratch2, scratch3, name, miss);
1214
1268
 
1269
+ if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
1270
+ GenerateDictionaryLoadCallback(
1271
+ reg, name_reg, scratch2, scratch3, scratch4, callback, name, miss);
1272
+ }
1273
+
1215
1274
  // Build AccessorInfo::args_ list on the stack and push property name below
1216
1275
  // the exit frame to make GC aware of them and store pointers to them.
1217
1276
  __ push(receiver);
@@ -1279,12 +1338,13 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
1279
1338
  // later.
1280
1339
  bool compile_followup_inline = false;
1281
1340
  if (lookup->IsFound() && lookup->IsCacheable()) {
1282
- if (lookup->type() == FIELD) {
1341
+ if (lookup->IsField()) {
1283
1342
  compile_followup_inline = true;
1284
1343
  } else if (lookup->type() == CALLBACKS &&
1285
1344
  lookup->GetCallbackObject()->IsAccessorInfo()) {
1286
- compile_followup_inline =
1287
- AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL;
1345
+ AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
1346
+ compile_followup_inline = callback->getter() != NULL &&
1347
+ callback->IsCompatibleReceiver(*object);
1288
1348
  }
1289
1349
  }
1290
1350
 
@@ -1351,7 +1411,7 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
1351
1411
  miss);
1352
1412
  }
1353
1413
 
1354
- if (lookup->type() == FIELD) {
1414
+ if (lookup->IsField()) {
1355
1415
  // We found FIELD property in prototype chain of interceptor's holder.
1356
1416
  // Retrieve a field from field's holder.
1357
1417
  GenerateFastPropertyLoad(masm(), v0, holder_reg,
@@ -1463,7 +1523,7 @@ void CallStubCompiler::GenerateMissBranch() {
1463
1523
 
1464
1524
  Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
1465
1525
  Handle<JSObject> holder,
1466
- int index,
1526
+ PropertyIndex index,
1467
1527
  Handle<String> name) {
1468
1528
  // ----------- S t a t e -------------
1469
1529
  // -- a2 : name
@@ -1491,7 +1551,7 @@ Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
1491
1551
  GenerateMissBranch();
1492
1552
 
1493
1553
  // Return the generated code.
1494
- return GetCode(FIELD, name);
1554
+ return GetCode(Code::FIELD, name);
1495
1555
  }
1496
1556
 
1497
1557
 
@@ -1537,7 +1597,7 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
1537
1597
  } else {
1538
1598
  Label call_builtin;
1539
1599
  if (argc == 1) { // Otherwise fall through to call the builtin.
1540
- Label attempt_to_grow_elements;
1600
+ Label attempt_to_grow_elements, with_write_barrier, check_double;
1541
1601
 
1542
1602
  Register elements = t2;
1543
1603
  Register end_elements = t1;
@@ -1548,7 +1608,7 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
1548
1608
  __ CheckMap(elements,
1549
1609
  v0,
1550
1610
  Heap::kFixedArrayMapRootIndex,
1551
- &call_builtin,
1611
+ &check_double,
1552
1612
  DONT_DO_SMI_CHECK);
1553
1613
 
1554
1614
  // Get the array's length into v0 and calculate new length.
@@ -1564,7 +1624,6 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
1564
1624
  __ Branch(&attempt_to_grow_elements, gt, v0, Operand(t0));
1565
1625
 
1566
1626
  // Check if value is a smi.
1567
- Label with_write_barrier;
1568
1627
  __ lw(t0, MemOperand(sp, (argc - 1) * kPointerSize));
1569
1628
  __ JumpIfNotSmi(t0, &with_write_barrier);
1570
1629
 
@@ -1585,6 +1644,39 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
1585
1644
  __ Drop(argc + 1);
1586
1645
  __ Ret();
1587
1646
 
1647
+ __ bind(&check_double);
1648
+
1649
+ // Check that the elements are in fast mode and writable.
1650
+ __ CheckMap(elements,
1651
+ a0,
1652
+ Heap::kFixedDoubleArrayMapRootIndex,
1653
+ &call_builtin,
1654
+ DONT_DO_SMI_CHECK);
1655
+
1656
+ // Get the array's length into r0 and calculate new length.
1657
+ __ lw(a0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1658
+ STATIC_ASSERT(kSmiTagSize == 1);
1659
+ STATIC_ASSERT(kSmiTag == 0);
1660
+ __ Addu(a0, a0, Operand(Smi::FromInt(argc)));
1661
+
1662
+ // Get the elements' length.
1663
+ __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1664
+
1665
+ // Check if we could survive without allocation.
1666
+ __ Branch(&call_builtin, gt, a0, Operand(t0));
1667
+
1668
+ __ lw(t0, MemOperand(sp, (argc - 1) * kPointerSize));
1669
+ __ StoreNumberToDoubleElements(
1670
+ t0, a0, elements, a3, t1, a2, t5,
1671
+ &call_builtin, argc * kDoubleSize);
1672
+
1673
+ // Save new length.
1674
+ __ sw(a0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1675
+
1676
+ // Check for a smi.
1677
+ __ Drop(argc + 1);
1678
+ __ Ret();
1679
+
1588
1680
  __ bind(&with_write_barrier);
1589
1681
 
1590
1682
  __ lw(a3, FieldMemOperand(receiver, HeapObject::kMapOffset));
@@ -1596,8 +1688,12 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
1596
1688
  // In case of fast smi-only, convert to fast object, otherwise bail out.
1597
1689
  __ bind(&not_fast_object);
1598
1690
  __ CheckFastSmiElements(a3, t3, &call_builtin);
1691
+
1692
+ __ lw(t3, FieldMemOperand(t0, HeapObject::kMapOffset));
1693
+ __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
1694
+ __ Branch(&call_builtin, eq, t3, Operand(at));
1599
1695
  // edx: receiver
1600
- // r3: map
1696
+ // a3: map
1601
1697
  Label try_holey_map;
1602
1698
  __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
1603
1699
  FAST_ELEMENTS,
@@ -1606,7 +1702,9 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
1606
1702
  &try_holey_map);
1607
1703
  __ mov(a2, receiver);
1608
1704
  ElementsTransitionGenerator::
1609
- GenerateMapChangeElementsTransition(masm());
1705
+ GenerateMapChangeElementsTransition(masm(),
1706
+ DONT_TRACK_ALLOCATION_SITE,
1707
+ NULL);
1610
1708
  __ jmp(&fast_object);
1611
1709
 
1612
1710
  __ bind(&try_holey_map);
@@ -1617,7 +1715,9 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
1617
1715
  &call_builtin);
1618
1716
  __ mov(a2, receiver);
1619
1717
  ElementsTransitionGenerator::
1620
- GenerateMapChangeElementsTransition(masm());
1718
+ GenerateMapChangeElementsTransition(masm(),
1719
+ DONT_TRACK_ALLOCATION_SITE,
1720
+ NULL);
1621
1721
  __ bind(&fast_object);
1622
1722
  } else {
1623
1723
  __ CheckFastObjectElements(a3, a3, &call_builtin);
@@ -2038,7 +2138,7 @@ Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall(
2038
2138
  GenerateMissBranch();
2039
2139
 
2040
2140
  // Return the generated code.
2041
- return cell.is_null() ? GetCode(function) : GetCode(NORMAL, name);
2141
+ return cell.is_null() ? GetCode(function) : GetCode(Code::NORMAL, name);
2042
2142
  }
2043
2143
 
2044
2144
 
@@ -2172,7 +2272,7 @@ Handle<Code> CallStubCompiler::CompileMathFloorCall(
2172
2272
  GenerateMissBranch();
2173
2273
 
2174
2274
  // Return the generated code.
2175
- return cell.is_null() ? GetCode(function) : GetCode(NORMAL, name);
2275
+ return cell.is_null() ? GetCode(function) : GetCode(Code::NORMAL, name);
2176
2276
  }
2177
2277
 
2178
2278
 
@@ -2273,7 +2373,7 @@ Handle<Code> CallStubCompiler::CompileMathAbsCall(
2273
2373
  GenerateMissBranch();
2274
2374
 
2275
2375
  // Return the generated code.
2276
- return cell.is_null() ? GetCode(function) : GetCode(NORMAL, name);
2376
+ return cell.is_null() ? GetCode(function) : GetCode(Code::NORMAL, name);
2277
2377
  }
2278
2378
 
2279
2379
 
@@ -2330,25 +2430,16 @@ Handle<Code> CallStubCompiler::CompileFastApiCall(
2330
2430
  }
2331
2431
 
2332
2432
 
2333
- Handle<Code> CallStubCompiler::CompileCallConstant(Handle<Object> object,
2334
- Handle<JSObject> holder,
2335
- Handle<JSFunction> function,
2336
- Handle<String> name,
2337
- CheckType check) {
2433
+ void CallStubCompiler::CompileHandlerFrontend(Handle<Object> object,
2434
+ Handle<JSObject> holder,
2435
+ Handle<String> name,
2436
+ CheckType check,
2437
+ Label* success) {
2338
2438
  // ----------- S t a t e -------------
2339
2439
  // -- a2 : name
2340
2440
  // -- ra : return address
2341
2441
  // -----------------------------------
2342
- if (HasCustomCallGenerator(function)) {
2343
- Handle<Code> code = CompileCustomCall(object, holder,
2344
- Handle<JSGlobalPropertyCell>::null(),
2345
- function, name);
2346
- // A null handle means bail out to the regular compiler code below.
2347
- if (!code.is_null()) return code;
2348
- }
2349
-
2350
2442
  Label miss;
2351
-
2352
2443
  GenerateNameCheck(name, &miss);
2353
2444
 
2354
2445
  // Get the receiver from the stack.
@@ -2381,77 +2472,87 @@ Handle<Code> CallStubCompiler::CompileCallConstant(Handle<Object> object,
2381
2472
  break;
2382
2473
 
2383
2474
  case STRING_CHECK:
2384
- if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
2385
- // Check that the object is a two-byte string or a symbol.
2386
- __ GetObjectType(a1, a3, a3);
2387
- __ Branch(&miss, Ugreater_equal, a3, Operand(FIRST_NONSTRING_TYPE));
2388
- // Check that the maps starting from the prototype haven't changed.
2389
- GenerateDirectLoadGlobalFunctionPrototype(
2390
- masm(), Context::STRING_FUNCTION_INDEX, a0, &miss);
2391
- CheckPrototypes(
2392
- Handle<JSObject>(JSObject::cast(object->GetPrototype())),
2393
- a0, holder, a3, a1, t0, name, &miss);
2394
- } else {
2395
- // Calling non-strict non-builtins with a value as the receiver
2396
- // requires boxing.
2397
- __ jmp(&miss);
2398
- }
2475
+ // Check that the object is a two-byte string or a symbol.
2476
+ __ GetObjectType(a1, a3, a3);
2477
+ __ Branch(&miss, Ugreater_equal, a3, Operand(FIRST_NONSTRING_TYPE));
2478
+ // Check that the maps starting from the prototype haven't changed.
2479
+ GenerateDirectLoadGlobalFunctionPrototype(
2480
+ masm(), Context::STRING_FUNCTION_INDEX, a0, &miss);
2481
+ CheckPrototypes(
2482
+ Handle<JSObject>(JSObject::cast(object->GetPrototype())),
2483
+ a0, holder, a3, a1, t0, name, &miss);
2399
2484
  break;
2400
2485
 
2401
- case NUMBER_CHECK:
2402
- if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
2486
+ case NUMBER_CHECK: {
2403
2487
  Label fast;
2404
- // Check that the object is a smi or a heap number.
2405
- __ JumpIfSmi(a1, &fast);
2406
- __ GetObjectType(a1, a0, a0);
2407
- __ Branch(&miss, ne, a0, Operand(HEAP_NUMBER_TYPE));
2408
- __ bind(&fast);
2409
- // Check that the maps starting from the prototype haven't changed.
2410
- GenerateDirectLoadGlobalFunctionPrototype(
2411
- masm(), Context::NUMBER_FUNCTION_INDEX, a0, &miss);
2412
- CheckPrototypes(
2413
- Handle<JSObject>(JSObject::cast(object->GetPrototype())),
2414
- a0, holder, a3, a1, t0, name, &miss);
2415
- } else {
2416
- // Calling non-strict non-builtins with a value as the receiver
2417
- // requires boxing.
2418
- __ jmp(&miss);
2419
- }
2488
+ // Check that the object is a smi or a heap number.
2489
+ __ JumpIfSmi(a1, &fast);
2490
+ __ GetObjectType(a1, a0, a0);
2491
+ __ Branch(&miss, ne, a0, Operand(HEAP_NUMBER_TYPE));
2492
+ __ bind(&fast);
2493
+ // Check that the maps starting from the prototype haven't changed.
2494
+ GenerateDirectLoadGlobalFunctionPrototype(
2495
+ masm(), Context::NUMBER_FUNCTION_INDEX, a0, &miss);
2496
+ CheckPrototypes(
2497
+ Handle<JSObject>(JSObject::cast(object->GetPrototype())),
2498
+ a0, holder, a3, a1, t0, name, &miss);
2420
2499
  break;
2421
-
2422
- case BOOLEAN_CHECK:
2423
- if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
2424
- Label fast;
2425
- // Check that the object is a boolean.
2426
- __ LoadRoot(t0, Heap::kTrueValueRootIndex);
2427
- __ Branch(&fast, eq, a1, Operand(t0));
2428
- __ LoadRoot(t0, Heap::kFalseValueRootIndex);
2429
- __ Branch(&miss, ne, a1, Operand(t0));
2430
- __ bind(&fast);
2431
- // Check that the maps starting from the prototype haven't changed.
2432
- GenerateDirectLoadGlobalFunctionPrototype(
2433
- masm(), Context::BOOLEAN_FUNCTION_INDEX, a0, &miss);
2434
- CheckPrototypes(
2435
- Handle<JSObject>(JSObject::cast(object->GetPrototype())),
2436
- a0, holder, a3, a1, t0, name, &miss);
2437
- } else {
2438
- // Calling non-strict non-builtins with a value as the receiver
2439
- // requires boxing.
2440
- __ jmp(&miss);
2441
- }
2500
+ }
2501
+ case BOOLEAN_CHECK: {
2502
+ Label fast;
2503
+ // Check that the object is a boolean.
2504
+ __ LoadRoot(t0, Heap::kTrueValueRootIndex);
2505
+ __ Branch(&fast, eq, a1, Operand(t0));
2506
+ __ LoadRoot(t0, Heap::kFalseValueRootIndex);
2507
+ __ Branch(&miss, ne, a1, Operand(t0));
2508
+ __ bind(&fast);
2509
+ // Check that the maps starting from the prototype haven't changed.
2510
+ GenerateDirectLoadGlobalFunctionPrototype(
2511
+ masm(), Context::BOOLEAN_FUNCTION_INDEX, a0, &miss);
2512
+ CheckPrototypes(
2513
+ Handle<JSObject>(JSObject::cast(object->GetPrototype())),
2514
+ a0, holder, a3, a1, t0, name, &miss);
2442
2515
  break;
2443
2516
  }
2517
+ }
2518
+
2519
+ __ jmp(success);
2520
+
2521
+ // Handle call cache miss.
2522
+ __ bind(&miss);
2523
+
2524
+ GenerateMissBranch();
2525
+ }
2526
+
2444
2527
 
2528
+ void CallStubCompiler::CompileHandlerBackend(Handle<JSFunction> function) {
2445
2529
  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
2446
2530
  ? CALL_AS_FUNCTION
2447
2531
  : CALL_AS_METHOD;
2448
2532
  __ InvokeFunction(
2449
2533
  function, arguments(), JUMP_FUNCTION, NullCallWrapper(), call_kind);
2534
+ }
2450
2535
 
2451
- // Handle call cache miss.
2452
- __ bind(&miss);
2453
2536
 
2454
- GenerateMissBranch();
2537
+ Handle<Code> CallStubCompiler::CompileCallConstant(
2538
+ Handle<Object> object,
2539
+ Handle<JSObject> holder,
2540
+ Handle<String> name,
2541
+ CheckType check,
2542
+ Handle<JSFunction> function) {
2543
+ if (HasCustomCallGenerator(function)) {
2544
+ Handle<Code> code = CompileCustomCall(object, holder,
2545
+ Handle<JSGlobalPropertyCell>::null(),
2546
+ function, name);
2547
+ // A null handle means bail out to the regular compiler code below.
2548
+ if (!code.is_null()) return code;
2549
+ }
2550
+
2551
+ Label success;
2552
+
2553
+ CompileHandlerFrontend(object, holder, name, check, &success);
2554
+ __ bind(&success);
2555
+ CompileHandlerBackend(function);
2455
2556
 
2456
2557
  // Return the generated code.
2457
2558
  return GetCode(function);
@@ -2494,7 +2595,7 @@ Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object,
2494
2595
  GenerateMissBranch();
2495
2596
 
2496
2597
  // Return the generated code.
2497
- return GetCode(INTERCEPTOR, name);
2598
+ return GetCode(Code::INTERCEPTOR, name);
2498
2599
  }
2499
2600
 
2500
2601
 
@@ -2553,7 +2654,7 @@ Handle<Code> CallStubCompiler::CompileCallGlobal(
2553
2654
  GenerateMissBranch();
2554
2655
 
2555
2656
  // Return the generated code.
2556
- return GetCode(NORMAL, name);
2657
+ return GetCode(Code::NORMAL, name);
2557
2658
  }
2558
2659
 
2559
2660
 
@@ -2570,21 +2671,30 @@ Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object,
2570
2671
  Label miss;
2571
2672
 
2572
2673
  // Name register might be clobbered.
2573
- GenerateStoreField(masm(), object, index, transition, a1, a2, a3, &miss);
2674
+ GenerateStoreField(masm(),
2675
+ object,
2676
+ index,
2677
+ transition,
2678
+ name,
2679
+ a1, a2, a3, t0,
2680
+ &miss);
2574
2681
  __ bind(&miss);
2575
2682
  __ li(a2, Operand(Handle<String>(name))); // Restore name.
2576
2683
  Handle<Code> ic = masm()->isolate()->builtins()->Builtins::StoreIC_Miss();
2577
2684
  __ Jump(ic, RelocInfo::CODE_TARGET);
2578
2685
 
2579
2686
  // Return the generated code.
2580
- return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name);
2687
+ return GetCode(transition.is_null()
2688
+ ? Code::FIELD
2689
+ : Code::MAP_TRANSITION, name);
2581
2690
  }
2582
2691
 
2583
2692
 
2584
2693
  Handle<Code> StoreStubCompiler::CompileStoreCallback(
2585
- Handle<JSObject> object,
2586
- Handle<AccessorInfo> callback,
2587
- Handle<String> name) {
2694
+ Handle<String> name,
2695
+ Handle<JSObject> receiver,
2696
+ Handle<JSObject> holder,
2697
+ Handle<AccessorInfo> callback) {
2588
2698
  // ----------- S t a t e -------------
2589
2699
  // -- a0 : value
2590
2700
  // -- a1 : receiver
@@ -2592,19 +2702,13 @@ Handle<Code> StoreStubCompiler::CompileStoreCallback(
2592
2702
  // -- ra : return address
2593
2703
  // -----------------------------------
2594
2704
  Label miss;
2595
-
2596
- // Check that the map of the object hasn't changed.
2597
- __ CheckMap(a1, a3, Handle<Map>(object->map()), &miss,
2598
- DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
2599
-
2600
- // Perform global security token check if needed.
2601
- if (object->IsJSGlobalProxy()) {
2602
- __ CheckAccessGlobalProxy(a1, a3, &miss);
2603
- }
2705
+ // Check that the maps haven't changed.
2706
+ __ JumpIfSmi(a1, &miss, a3);
2707
+ CheckPrototypes(receiver, a1, holder, a3, t0, t1, name, &miss);
2604
2708
 
2605
2709
  // Stub never generated for non-global objects that require access
2606
2710
  // checks.
2607
- ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2711
+ ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
2608
2712
 
2609
2713
  __ push(a1); // Receiver.
2610
2714
  __ li(a3, Operand(callback)); // Callback info.
@@ -2622,7 +2726,81 @@ Handle<Code> StoreStubCompiler::CompileStoreCallback(
2622
2726
  __ Jump(ic, RelocInfo::CODE_TARGET);
2623
2727
 
2624
2728
  // Return the generated code.
2625
- return GetCode(CALLBACKS, name);
2729
+ return GetCode(Code::CALLBACKS, name);
2730
+ }
2731
+
2732
+
2733
+ #undef __
2734
+ #define __ ACCESS_MASM(masm)
2735
+
2736
+
2737
+ void StoreStubCompiler::GenerateStoreViaSetter(
2738
+ MacroAssembler* masm,
2739
+ Handle<JSFunction> setter) {
2740
+ // ----------- S t a t e -------------
2741
+ // -- a0 : value
2742
+ // -- a1 : receiver
2743
+ // -- a2 : name
2744
+ // -- ra : return address
2745
+ // -----------------------------------
2746
+ {
2747
+ FrameScope scope(masm, StackFrame::INTERNAL);
2748
+
2749
+ // Save value register, so we can restore it later.
2750
+ __ push(a0);
2751
+
2752
+ if (!setter.is_null()) {
2753
+ // Call the JavaScript setter with receiver and value on the stack.
2754
+ __ push(a1);
2755
+ __ push(a0);
2756
+ ParameterCount actual(1);
2757
+ __ InvokeFunction(setter, actual, CALL_FUNCTION, NullCallWrapper(),
2758
+ CALL_AS_METHOD);
2759
+ } else {
2760
+ // If we generate a global code snippet for deoptimization only, remember
2761
+ // the place to continue after deoptimization.
2762
+ masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
2763
+ }
2764
+
2765
+ // We have to return the passed value, not the return value of the setter.
2766
+ __ pop(v0);
2767
+
2768
+ // Restore context register.
2769
+ __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2770
+ }
2771
+ __ Ret();
2772
+ }
2773
+
2774
+
2775
+ #undef __
2776
+ #define __ ACCESS_MASM(masm())
2777
+
2778
+
2779
+ Handle<Code> StoreStubCompiler::CompileStoreViaSetter(
2780
+ Handle<String> name,
2781
+ Handle<JSObject> receiver,
2782
+ Handle<JSObject> holder,
2783
+ Handle<JSFunction> setter) {
2784
+ // ----------- S t a t e -------------
2785
+ // -- a0 : value
2786
+ // -- a1 : receiver
2787
+ // -- a2 : name
2788
+ // -- ra : return address
2789
+ // -----------------------------------
2790
+ Label miss;
2791
+
2792
+ // Check that the maps haven't changed.
2793
+ __ JumpIfSmi(a1, &miss);
2794
+ CheckPrototypes(receiver, a1, holder, a3, t0, t1, name, &miss);
2795
+
2796
+ GenerateStoreViaSetter(masm(), setter);
2797
+
2798
+ __ bind(&miss);
2799
+ Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2800
+ __ Jump(ic, RelocInfo::CODE_TARGET);
2801
+
2802
+ // Return the generated code.
2803
+ return GetCode(Code::CALLBACKS, name);
2626
2804
  }
2627
2805
 
2628
2806
 
@@ -2667,7 +2845,7 @@ Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
2667
2845
  __ Jump(ic, RelocInfo::CODE_TARGET);
2668
2846
 
2669
2847
  // Return the generated code.
2670
- return GetCode(INTERCEPTOR, name);
2848
+ return GetCode(Code::INTERCEPTOR, name);
2671
2849
  }
2672
2850
 
2673
2851
 
@@ -2712,13 +2890,15 @@ Handle<Code> StoreStubCompiler::CompileStoreGlobal(
2712
2890
  __ Jump(ic, RelocInfo::CODE_TARGET);
2713
2891
 
2714
2892
  // Return the generated code.
2715
- return GetCode(NORMAL, name);
2893
+ return GetCode(Code::NORMAL, name);
2716
2894
  }
2717
2895
 
2718
2896
 
2719
- Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name,
2720
- Handle<JSObject> object,
2721
- Handle<JSObject> last) {
2897
+ Handle<Code> LoadStubCompiler::CompileLoadNonexistent(
2898
+ Handle<JSObject> object,
2899
+ Handle<JSObject> last,
2900
+ Handle<String> name,
2901
+ Handle<GlobalObject> global) {
2722
2902
  // ----------- S t a t e -------------
2723
2903
  // -- a0 : receiver
2724
2904
  // -- ra : return address
@@ -2728,14 +2908,22 @@ Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name,
2728
2908
  // Check that the receiver is not a smi.
2729
2909
  __ JumpIfSmi(a0, &miss);
2730
2910
 
2911
+ Register scratch = a1;
2912
+
2731
2913
  // Check the maps of the full prototype chain.
2732
- CheckPrototypes(object, a0, last, a3, a1, t0, name, &miss);
2914
+ Register result =
2915
+ CheckPrototypes(object, a0, last, a3, scratch, t0, name, &miss);
2733
2916
 
2734
2917
  // If the last object in the prototype chain is a global object,
2735
2918
  // check that the global property cell is empty.
2736
- if (last->IsGlobalObject()) {
2737
- GenerateCheckPropertyCell(
2738
- masm(), Handle<GlobalObject>::cast(last), name, a1, &miss);
2919
+ if (!global.is_null()) {
2920
+ GenerateCheckPropertyCell(masm(), global, name, scratch, &miss);
2921
+ }
2922
+
2923
+ if (!last->HasFastProperties()) {
2924
+ __ lw(scratch, FieldMemOperand(result, HeapObject::kMapOffset));
2925
+ __ lw(scratch, FieldMemOperand(scratch, Map::kPrototypeOffset));
2926
+ __ Branch(&miss, ne, scratch, Operand(isolate()->factory()->null_value()));
2739
2927
  }
2740
2928
 
2741
2929
  // Return undefined if maps of the full prototype chain is still the same.
@@ -2746,93 +2934,91 @@ Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name,
2746
2934
  GenerateLoadMiss(masm(), Code::LOAD_IC);
2747
2935
 
2748
2936
  // Return the generated code.
2749
- return GetCode(NONEXISTENT, factory()->empty_string());
2937
+ return GetCode(Code::NONEXISTENT, factory()->empty_string());
2750
2938
  }
2751
2939
 
2752
2940
 
2753
- Handle<Code> LoadStubCompiler::CompileLoadField(Handle<JSObject> object,
2754
- Handle<JSObject> holder,
2755
- int index,
2756
- Handle<String> name) {
2757
- // ----------- S t a t e -------------
2758
- // -- a0 : receiver
2759
- // -- a2 : name
2760
- // -- ra : return address
2761
- // -----------------------------------
2762
- Label miss;
2763
-
2764
- __ mov(v0, a0);
2941
+ Register* LoadStubCompiler::registers() {
2942
+ // receiver, name, scratch1, scratch2, scratch3, scratch4.
2943
+ static Register registers[] = { a0, a2, a3, a1, t0, t1 };
2944
+ return registers;
2945
+ }
2765
2946
 
2766
- GenerateLoadField(object, holder, v0, a3, a1, t0, index, name, &miss);
2767
- __ bind(&miss);
2768
- GenerateLoadMiss(masm(), Code::LOAD_IC);
2769
2947
 
2770
- // Return the generated code.
2771
- return GetCode(FIELD, name);
2948
+ Register* KeyedLoadStubCompiler::registers() {
2949
+ // receiver, name, scratch1, scratch2, scratch3, scratch4.
2950
+ static Register registers[] = { a1, a0, a2, a3, t0, t1 };
2951
+ return registers;
2772
2952
  }
2773
2953
 
2774
2954
 
2775
- Handle<Code> LoadStubCompiler::CompileLoadCallback(
2776
- Handle<String> name,
2777
- Handle<JSObject> object,
2778
- Handle<JSObject> holder,
2779
- Handle<AccessorInfo> callback) {
2780
- // ----------- S t a t e -------------
2781
- // -- a0 : receiver
2782
- // -- a2 : name
2783
- // -- ra : return address
2784
- // -----------------------------------
2785
- Label miss;
2786
- GenerateLoadCallback(object, holder, a0, a2, a3, a1, t0, callback, name,
2787
- &miss);
2788
- __ bind(&miss);
2789
- GenerateLoadMiss(masm(), Code::LOAD_IC);
2790
-
2791
- // Return the generated code.
2792
- return GetCode(CALLBACKS, name);
2955
+ void KeyedLoadStubCompiler::GenerateNameCheck(Handle<String> name,
2956
+ Register name_reg,
2957
+ Label* miss) {
2958
+ __ Branch(miss, ne, name_reg, Operand(name));
2793
2959
  }
2794
2960
 
2795
2961
 
2796
- Handle<Code> LoadStubCompiler::CompileLoadConstant(Handle<JSObject> object,
2797
- Handle<JSObject> holder,
2798
- Handle<JSFunction> value,
2799
- Handle<String> name) {
2962
+ #undef __
2963
+ #define __ ACCESS_MASM(masm)
2964
+
2965
+
2966
+ void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
2967
+ Handle<JSFunction> getter) {
2800
2968
  // ----------- S t a t e -------------
2801
2969
  // -- a0 : receiver
2802
2970
  // -- a2 : name
2803
2971
  // -- ra : return address
2804
2972
  // -----------------------------------
2805
- Label miss;
2973
+ {
2974
+ FrameScope scope(masm, StackFrame::INTERNAL);
2806
2975
 
2807
- GenerateLoadConstant(object, holder, a0, a3, a1, t0, value, name, &miss);
2808
- __ bind(&miss);
2809
- GenerateLoadMiss(masm(), Code::LOAD_IC);
2976
+ if (!getter.is_null()) {
2977
+ // Call the JavaScript getter with the receiver on the stack.
2978
+ __ push(a0);
2979
+ ParameterCount actual(0);
2980
+ __ InvokeFunction(getter, actual, CALL_FUNCTION, NullCallWrapper(),
2981
+ CALL_AS_METHOD);
2982
+ } else {
2983
+ // If we generate a global code snippet for deoptimization only, remember
2984
+ // the place to continue after deoptimization.
2985
+ masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
2986
+ }
2810
2987
 
2811
- // Return the generated code.
2812
- return GetCode(CONSTANT_FUNCTION, name);
2988
+ // Restore context register.
2989
+ __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2990
+ }
2991
+ __ Ret();
2813
2992
  }
2814
2993
 
2815
2994
 
2816
- Handle<Code> LoadStubCompiler::CompileLoadInterceptor(Handle<JSObject> object,
2817
- Handle<JSObject> holder,
2818
- Handle<String> name) {
2995
+ #undef __
2996
+ #define __ ACCESS_MASM(masm())
2997
+
2998
+
2999
+ Handle<Code> LoadStubCompiler::CompileLoadViaGetter(
3000
+ Handle<JSObject> receiver,
3001
+ Handle<JSObject> holder,
3002
+ Handle<String> name,
3003
+ Handle<JSFunction> getter) {
2819
3004
  // ----------- S t a t e -------------
2820
3005
  // -- a0 : receiver
2821
3006
  // -- a2 : name
2822
3007
  // -- ra : return address
2823
- // -- [sp] : receiver
2824
3008
  // -----------------------------------
2825
3009
  Label miss;
2826
3010
 
2827
- LookupResult lookup(isolate());
2828
- LookupPostInterceptor(holder, name, &lookup);
2829
- GenerateLoadInterceptor(object, holder, &lookup, a0, a2, a3, a1, t0, name,
2830
- &miss);
3011
+ // Check that the maps haven't changed.
3012
+ __ JumpIfSmi(a0, &miss);
3013
+ CheckPrototypes(receiver, a0, holder, a3, t0, a1, name, &miss);
3014
+
3015
+ GenerateLoadViaGetter(masm(), getter);
3016
+
2831
3017
  __ bind(&miss);
2832
3018
  GenerateLoadMiss(masm(), Code::LOAD_IC);
2833
3019
 
2834
3020
  // Return the generated code.
2835
- return GetCode(INTERCEPTOR, name);
3021
+ return GetCode(Code::CALLBACKS, name);
2836
3022
  }
2837
3023
 
2838
3024
 
@@ -2873,204 +3059,48 @@ Handle<Code> LoadStubCompiler::CompileLoadGlobal(
2873
3059
  GenerateLoadMiss(masm(), Code::LOAD_IC);
2874
3060
 
2875
3061
  // Return the generated code.
2876
- return GetCode(NORMAL, name);
3062
+ return GetCode(Code::NORMAL, name);
2877
3063
  }
2878
3064
 
2879
3065
 
2880
- Handle<Code> KeyedLoadStubCompiler::CompileLoadField(Handle<String> name,
2881
- Handle<JSObject> receiver,
2882
- Handle<JSObject> holder,
2883
- int index) {
3066
+ Handle<Code> KeyedLoadStubCompiler::CompileLoadElement(
3067
+ Handle<Map> receiver_map) {
2884
3068
  // ----------- S t a t e -------------
2885
3069
  // -- ra : return address
2886
3070
  // -- a0 : key
2887
3071
  // -- a1 : receiver
2888
3072
  // -----------------------------------
2889
- Label miss;
2890
-
2891
- // Check the key is the cached one.
2892
- __ Branch(&miss, ne, a0, Operand(name));
3073
+ ElementsKind elements_kind = receiver_map->elements_kind();
3074
+ if (receiver_map->has_fast_elements() ||
3075
+ receiver_map->has_external_array_elements()) {
3076
+ Handle<Code> stub = KeyedLoadFastElementStub(
3077
+ receiver_map->instance_type() == JS_ARRAY_TYPE,
3078
+ elements_kind).GetCode();
3079
+ __ DispatchMap(a1, a2, receiver_map, stub, DO_SMI_CHECK);
3080
+ } else {
3081
+ Handle<Code> stub =
3082
+ KeyedLoadDictionaryElementStub().GetCode();
3083
+ __ DispatchMap(a1, a2, receiver_map, stub, DO_SMI_CHECK);
3084
+ }
2893
3085
 
2894
- GenerateLoadField(receiver, holder, a1, a2, a3, t0, index, name, &miss);
2895
- __ bind(&miss);
2896
- GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3086
+ Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss();
3087
+ __ Jump(ic, RelocInfo::CODE_TARGET);
2897
3088
 
2898
- return GetCode(FIELD, name);
3089
+ // Return the generated code.
3090
+ return GetCode(Code::NORMAL, factory()->empty_string());
2899
3091
  }
2900
3092
 
2901
3093
 
2902
- Handle<Code> KeyedLoadStubCompiler::CompileLoadCallback(
2903
- Handle<String> name,
2904
- Handle<JSObject> receiver,
2905
- Handle<JSObject> holder,
2906
- Handle<AccessorInfo> callback) {
3094
+ Handle<Code> KeyedLoadStubCompiler::CompileLoadPolymorphic(
3095
+ MapHandleList* receiver_maps,
3096
+ CodeHandleList* handler_ics) {
2907
3097
  // ----------- S t a t e -------------
2908
3098
  // -- ra : return address
2909
3099
  // -- a0 : key
2910
3100
  // -- a1 : receiver
2911
3101
  // -----------------------------------
2912
3102
  Label miss;
2913
-
2914
- // Check the key is the cached one.
2915
- __ Branch(&miss, ne, a0, Operand(name));
2916
-
2917
- GenerateLoadCallback(receiver, holder, a1, a0, a2, a3, t0, callback, name,
2918
- &miss);
2919
- __ bind(&miss);
2920
- GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2921
-
2922
- return GetCode(CALLBACKS, name);
2923
- }
2924
-
2925
-
2926
- Handle<Code> KeyedLoadStubCompiler::CompileLoadConstant(
2927
- Handle<String> name,
2928
- Handle<JSObject> receiver,
2929
- Handle<JSObject> holder,
2930
- Handle<JSFunction> value) {
2931
- // ----------- S t a t e -------------
2932
- // -- ra : return address
2933
- // -- a0 : key
2934
- // -- a1 : receiver
2935
- // -----------------------------------
2936
- Label miss;
2937
-
2938
- // Check the key is the cached one.
2939
- __ Branch(&miss, ne, a0, Operand(name));
2940
-
2941
- GenerateLoadConstant(receiver, holder, a1, a2, a3, t0, value, name, &miss);
2942
- __ bind(&miss);
2943
- GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2944
-
2945
- // Return the generated code.
2946
- return GetCode(CONSTANT_FUNCTION, name);
2947
- }
2948
-
2949
-
2950
- Handle<Code> KeyedLoadStubCompiler::CompileLoadInterceptor(
2951
- Handle<JSObject> receiver,
2952
- Handle<JSObject> holder,
2953
- Handle<String> name) {
2954
- // ----------- S t a t e -------------
2955
- // -- ra : return address
2956
- // -- a0 : key
2957
- // -- a1 : receiver
2958
- // -----------------------------------
2959
- Label miss;
2960
-
2961
- // Check the key is the cached one.
2962
- __ Branch(&miss, ne, a0, Operand(name));
2963
-
2964
- LookupResult lookup(isolate());
2965
- LookupPostInterceptor(holder, name, &lookup);
2966
- GenerateLoadInterceptor(receiver, holder, &lookup, a1, a0, a2, a3, t0, name,
2967
- &miss);
2968
- __ bind(&miss);
2969
- GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2970
-
2971
- return GetCode(INTERCEPTOR, name);
2972
- }
2973
-
2974
-
2975
- Handle<Code> KeyedLoadStubCompiler::CompileLoadArrayLength(
2976
- Handle<String> name) {
2977
- // ----------- S t a t e -------------
2978
- // -- ra : return address
2979
- // -- a0 : key
2980
- // -- a1 : receiver
2981
- // -----------------------------------
2982
- Label miss;
2983
-
2984
- // Check the key is the cached one.
2985
- __ Branch(&miss, ne, a0, Operand(name));
2986
-
2987
- GenerateLoadArrayLength(masm(), a1, a2, &miss);
2988
- __ bind(&miss);
2989
- GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2990
-
2991
- return GetCode(CALLBACKS, name);
2992
- }
2993
-
2994
-
2995
- Handle<Code> KeyedLoadStubCompiler::CompileLoadStringLength(
2996
- Handle<String> name) {
2997
- // ----------- S t a t e -------------
2998
- // -- ra : return address
2999
- // -- a0 : key
3000
- // -- a1 : receiver
3001
- // -----------------------------------
3002
- Label miss;
3003
-
3004
- Counters* counters = masm()->isolate()->counters();
3005
- __ IncrementCounter(counters->keyed_load_string_length(), 1, a2, a3);
3006
-
3007
- // Check the key is the cached one.
3008
- __ Branch(&miss, ne, a0, Operand(name));
3009
-
3010
- GenerateLoadStringLength(masm(), a1, a2, a3, &miss, true);
3011
- __ bind(&miss);
3012
- __ DecrementCounter(counters->keyed_load_string_length(), 1, a2, a3);
3013
-
3014
- GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3015
-
3016
- return GetCode(CALLBACKS, name);
3017
- }
3018
-
3019
-
3020
- Handle<Code> KeyedLoadStubCompiler::CompileLoadFunctionPrototype(
3021
- Handle<String> name) {
3022
- // ----------- S t a t e -------------
3023
- // -- ra : return address
3024
- // -- a0 : key
3025
- // -- a1 : receiver
3026
- // -----------------------------------
3027
- Label miss;
3028
-
3029
- Counters* counters = masm()->isolate()->counters();
3030
- __ IncrementCounter(counters->keyed_load_function_prototype(), 1, a2, a3);
3031
-
3032
- // Check the name hasn't changed.
3033
- __ Branch(&miss, ne, a0, Operand(name));
3034
-
3035
- GenerateLoadFunctionPrototype(masm(), a1, a2, a3, &miss);
3036
- __ bind(&miss);
3037
- __ DecrementCounter(counters->keyed_load_function_prototype(), 1, a2, a3);
3038
- GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3039
-
3040
- return GetCode(CALLBACKS, name);
3041
- }
3042
-
3043
-
3044
- Handle<Code> KeyedLoadStubCompiler::CompileLoadElement(
3045
- Handle<Map> receiver_map) {
3046
- // ----------- S t a t e -------------
3047
- // -- ra : return address
3048
- // -- a0 : key
3049
- // -- a1 : receiver
3050
- // -----------------------------------
3051
- ElementsKind elements_kind = receiver_map->elements_kind();
3052
- Handle<Code> stub = KeyedLoadElementStub(elements_kind).GetCode();
3053
-
3054
- __ DispatchMap(a1, a2, receiver_map, stub, DO_SMI_CHECK);
3055
-
3056
- Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss();
3057
- __ Jump(ic, RelocInfo::CODE_TARGET);
3058
-
3059
- // Return the generated code.
3060
- return GetCode(NORMAL, factory()->empty_string());
3061
- }
3062
-
3063
-
3064
- Handle<Code> KeyedLoadStubCompiler::CompileLoadPolymorphic(
3065
- MapHandleList* receiver_maps,
3066
- CodeHandleList* handler_ics) {
3067
- // ----------- S t a t e -------------
3068
- // -- ra : return address
3069
- // -- a0 : key
3070
- // -- a1 : receiver
3071
- // -----------------------------------
3072
- Label miss;
3073
- __ JumpIfSmi(a1, &miss);
3103
+ __ JumpIfSmi(a1, &miss);
3074
3104
 
3075
3105
  int receiver_count = receiver_maps->length();
3076
3106
  __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset));
@@ -3084,7 +3114,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadPolymorphic(
3084
3114
  __ Jump(miss_ic, RelocInfo::CODE_TARGET);
3085
3115
 
3086
3116
  // Return the generated code.
3087
- return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC);
3117
+ return GetCode(Code::NORMAL, factory()->empty_string(), POLYMORPHIC);
3088
3118
  }
3089
3119
 
3090
3120
 
@@ -3109,7 +3139,13 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object,
3109
3139
 
3110
3140
  // a3 is used as scratch register. a1 and a2 keep their values if a jump to
3111
3141
  // the miss label is generated.
3112
- GenerateStoreField(masm(), object, index, transition, a2, a1, a3, &miss);
3142
+ GenerateStoreField(masm(),
3143
+ object,
3144
+ index,
3145
+ transition,
3146
+ name,
3147
+ a2, a1, a3, t0,
3148
+ &miss);
3113
3149
  __ bind(&miss);
3114
3150
 
3115
3151
  __ DecrementCounter(counters->keyed_store_field(), 1, a3, t0);
@@ -3117,7 +3153,9 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object,
3117
3153
  __ Jump(ic, RelocInfo::CODE_TARGET);
3118
3154
 
3119
3155
  // Return the generated code.
3120
- return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name);
3156
+ return GetCode(transition.is_null()
3157
+ ? Code::FIELD
3158
+ : Code::MAP_TRANSITION, name);
3121
3159
  }
3122
3160
 
3123
3161
 
@@ -3141,7 +3179,7 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreElement(
3141
3179
  __ Jump(ic, RelocInfo::CODE_TARGET);
3142
3180
 
3143
3181
  // Return the generated code.
3144
- return GetCode(NORMAL, factory()->empty_string());
3182
+ return GetCode(Code::NORMAL, factory()->empty_string());
3145
3183
  }
3146
3184
 
3147
3185
 
@@ -3179,7 +3217,7 @@ Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
3179
3217
  __ Jump(miss_ic, RelocInfo::CODE_TARGET);
3180
3218
 
3181
3219
  // Return the generated code.
3182
- return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC);
3220
+ return GetCode(Code::NORMAL, factory()->empty_string(), POLYMORPHIC);
3183
3221
  }
3184
3222
 
3185
3223
 
@@ -3219,7 +3257,7 @@ Handle<Code> ConstructStubCompiler::CompileConstructStub(
3219
3257
  // t7: undefined
3220
3258
  __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
3221
3259
  __ Check(ne, "Function constructed by construct stub.",
3222
- a3, Operand(JS_FUNCTION_TYPE));
3260
+ a3, Operand(JS_FUNCTION_TYPE));
3223
3261
  #endif
3224
3262
 
3225
3263
  // Now allocate the JSObject in new space.
@@ -3227,7 +3265,13 @@ Handle<Code> ConstructStubCompiler::CompileConstructStub(
3227
3265
  // a1: constructor function
3228
3266
  // a2: initial map
3229
3267
  // t7: undefined
3268
+ ASSERT(function->has_initial_map());
3230
3269
  __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset));
3270
+ #ifdef DEBUG
3271
+ int instance_size = function->initial_map()->instance_size();
3272
+ __ Check(eq, "Instance size of initial map changed.",
3273
+ a3, Operand(instance_size >> kPointerSizeLog2));
3274
+ #endif
3231
3275
  __ AllocateInNewSpace(a3, t4, t5, t6, &generic_stub_call, SIZE_IN_WORDS);
3232
3276
 
3233
3277
  // Allocated the JSObject, now initialize the fields. Map is set to initial
@@ -3290,7 +3334,6 @@ Handle<Code> ConstructStubCompiler::CompileConstructStub(
3290
3334
  }
3291
3335
 
3292
3336
  // Fill the unused in-object property fields with undefined.
3293
- ASSERT(function->has_initial_map());
3294
3337
  for (int i = shared->this_property_assignments_count();
3295
3338
  i < function->initial_map()->inobject_properties();
3296
3339
  i++) {
@@ -3415,6 +3458,7 @@ static void GenerateSmiKeyCheck(MacroAssembler* masm,
3415
3458
  Register scratch0,
3416
3459
  Register scratch1,
3417
3460
  FPURegister double_scratch0,
3461
+ FPURegister double_scratch1,
3418
3462
  Label* fail) {
3419
3463
  if (CpuFeatures::IsSupported(FPU)) {
3420
3464
  CpuFeatures::Scope scope(FPU);
@@ -3430,15 +3474,15 @@ static void GenerateSmiKeyCheck(MacroAssembler* masm,
3430
3474
  DONT_DO_SMI_CHECK);
3431
3475
  __ ldc1(double_scratch0, FieldMemOperand(key, HeapNumber::kValueOffset));
3432
3476
  __ EmitFPUTruncate(kRoundToZero,
3433
- double_scratch0,
3434
- double_scratch0,
3435
3477
  scratch0,
3478
+ double_scratch0,
3479
+ at,
3480
+ double_scratch1,
3436
3481
  scratch1,
3437
3482
  kCheckForInexactConversion);
3438
3483
 
3439
3484
  __ Branch(fail, ne, scratch1, Operand(zero_reg));
3440
3485
 
3441
- __ mfc1(scratch0, double_scratch0);
3442
3486
  __ SmiTagCheckOverflow(key, scratch0, scratch1);
3443
3487
  __ BranchOnOverflow(fail, scratch1);
3444
3488
  __ bind(&key_ok);
@@ -3449,343 +3493,6 @@ static void GenerateSmiKeyCheck(MacroAssembler* masm,
3449
3493
  }
3450
3494
 
3451
3495
 
3452
- void KeyedLoadStubCompiler::GenerateLoadExternalArray(
3453
- MacroAssembler* masm,
3454
- ElementsKind elements_kind) {
3455
- // ---------- S t a t e --------------
3456
- // -- ra : return address
3457
- // -- a0 : key
3458
- // -- a1 : receiver
3459
- // -----------------------------------
3460
- Label miss_force_generic, slow, failed_allocation;
3461
-
3462
- Register key = a0;
3463
- Register receiver = a1;
3464
-
3465
- // This stub is meant to be tail-jumped to, the receiver must already
3466
- // have been verified by the caller to not be a smi.
3467
-
3468
- // Check that the key is a smi or a heap number convertible to a smi.
3469
- GenerateSmiKeyCheck(masm, key, t0, t1, f2, &miss_force_generic);
3470
-
3471
- __ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3472
- // a3: elements array
3473
-
3474
- // Check that the index is in range.
3475
- __ lw(t1, FieldMemOperand(a3, ExternalArray::kLengthOffset));
3476
- __ sra(t2, key, kSmiTagSize);
3477
- // Unsigned comparison catches both negative and too-large values.
3478
- __ Branch(&miss_force_generic, Ugreater_equal, key, Operand(t1));
3479
-
3480
- __ lw(a3, FieldMemOperand(a3, ExternalArray::kExternalPointerOffset));
3481
- // a3: base pointer of external storage
3482
-
3483
- // We are not untagging smi key and instead work with it
3484
- // as if it was premultiplied by 2.
3485
- STATIC_ASSERT((kSmiTag == 0) && (kSmiTagSize == 1));
3486
-
3487
- Register value = a2;
3488
- switch (elements_kind) {
3489
- case EXTERNAL_BYTE_ELEMENTS:
3490
- __ srl(t2, key, 1);
3491
- __ addu(t3, a3, t2);
3492
- __ lb(value, MemOperand(t3, 0));
3493
- break;
3494
- case EXTERNAL_PIXEL_ELEMENTS:
3495
- case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3496
- __ srl(t2, key, 1);
3497
- __ addu(t3, a3, t2);
3498
- __ lbu(value, MemOperand(t3, 0));
3499
- break;
3500
- case EXTERNAL_SHORT_ELEMENTS:
3501
- __ addu(t3, a3, key);
3502
- __ lh(value, MemOperand(t3, 0));
3503
- break;
3504
- case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
3505
- __ addu(t3, a3, key);
3506
- __ lhu(value, MemOperand(t3, 0));
3507
- break;
3508
- case EXTERNAL_INT_ELEMENTS:
3509
- case EXTERNAL_UNSIGNED_INT_ELEMENTS:
3510
- __ sll(t2, key, 1);
3511
- __ addu(t3, a3, t2);
3512
- __ lw(value, MemOperand(t3, 0));
3513
- break;
3514
- case EXTERNAL_FLOAT_ELEMENTS:
3515
- __ sll(t3, t2, 2);
3516
- __ addu(t3, a3, t3);
3517
- if (CpuFeatures::IsSupported(FPU)) {
3518
- CpuFeatures::Scope scope(FPU);
3519
- __ lwc1(f0, MemOperand(t3, 0));
3520
- } else {
3521
- __ lw(value, MemOperand(t3, 0));
3522
- }
3523
- break;
3524
- case EXTERNAL_DOUBLE_ELEMENTS:
3525
- __ sll(t2, key, 2);
3526
- __ addu(t3, a3, t2);
3527
- if (CpuFeatures::IsSupported(FPU)) {
3528
- CpuFeatures::Scope scope(FPU);
3529
- __ ldc1(f0, MemOperand(t3, 0));
3530
- } else {
3531
- // t3: pointer to the beginning of the double we want to load.
3532
- __ lw(a2, MemOperand(t3, 0));
3533
- __ lw(a3, MemOperand(t3, Register::kSizeInBytes));
3534
- }
3535
- break;
3536
- case FAST_ELEMENTS:
3537
- case FAST_SMI_ELEMENTS:
3538
- case FAST_DOUBLE_ELEMENTS:
3539
- case FAST_HOLEY_ELEMENTS:
3540
- case FAST_HOLEY_SMI_ELEMENTS:
3541
- case FAST_HOLEY_DOUBLE_ELEMENTS:
3542
- case DICTIONARY_ELEMENTS:
3543
- case NON_STRICT_ARGUMENTS_ELEMENTS:
3544
- UNREACHABLE();
3545
- break;
3546
- }
3547
-
3548
- // For integer array types:
3549
- // a2: value
3550
- // For float array type:
3551
- // f0: value (if FPU is supported)
3552
- // a2: value (if FPU is not supported)
3553
- // For double array type:
3554
- // f0: value (if FPU is supported)
3555
- // a2/a3: value (if FPU is not supported)
3556
-
3557
- if (elements_kind == EXTERNAL_INT_ELEMENTS) {
3558
- // For the Int and UnsignedInt array types, we need to see whether
3559
- // the value can be represented in a Smi. If not, we need to convert
3560
- // it to a HeapNumber.
3561
- Label box_int;
3562
- __ Subu(t3, value, Operand(0xC0000000)); // Non-smi value gives neg result.
3563
- __ Branch(&box_int, lt, t3, Operand(zero_reg));
3564
- // Tag integer as smi and return it.
3565
- __ sll(v0, value, kSmiTagSize);
3566
- __ Ret();
3567
-
3568
- __ bind(&box_int);
3569
- // Allocate a HeapNumber for the result and perform int-to-double
3570
- // conversion.
3571
- // The arm version uses a temporary here to save r0, but we don't need to
3572
- // (a0 is not modified).
3573
- __ LoadRoot(t1, Heap::kHeapNumberMapRootIndex);
3574
- __ AllocateHeapNumber(v0, a3, t0, t1, &slow);
3575
-
3576
- if (CpuFeatures::IsSupported(FPU)) {
3577
- CpuFeatures::Scope scope(FPU);
3578
- __ mtc1(value, f0);
3579
- __ cvt_d_w(f0, f0);
3580
- __ sdc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset));
3581
- __ Ret();
3582
- } else {
3583
- Register dst1 = t2;
3584
- Register dst2 = t3;
3585
- FloatingPointHelper::Destination dest =
3586
- FloatingPointHelper::kCoreRegisters;
3587
- FloatingPointHelper::ConvertIntToDouble(masm,
3588
- value,
3589
- dest,
3590
- f0,
3591
- dst1,
3592
- dst2,
3593
- t1,
3594
- f2);
3595
- __ sw(dst1, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3596
- __ sw(dst2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3597
- __ Ret();
3598
- }
3599
- } else if (elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS) {
3600
- // The test is different for unsigned int values. Since we need
3601
- // the value to be in the range of a positive smi, we can't
3602
- // handle either of the top two bits being set in the value.
3603
- if (CpuFeatures::IsSupported(FPU)) {
3604
- CpuFeatures::Scope scope(FPU);
3605
- Label pl_box_int;
3606
- __ And(t2, value, Operand(0xC0000000));
3607
- __ Branch(&pl_box_int, ne, t2, Operand(zero_reg));
3608
-
3609
- // It can fit in an Smi.
3610
- // Tag integer as smi and return it.
3611
- __ sll(v0, value, kSmiTagSize);
3612
- __ Ret();
3613
-
3614
- __ bind(&pl_box_int);
3615
- // Allocate a HeapNumber for the result and perform int-to-double
3616
- // conversion. Don't use a0 and a1 as AllocateHeapNumber clobbers all
3617
- // registers - also when jumping due to exhausted young space.
3618
- __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3619
- __ AllocateHeapNumber(v0, t2, t3, t6, &slow);
3620
-
3621
- // This is replaced by a macro:
3622
- // __ mtc1(value, f0); // LS 32-bits.
3623
- // __ mtc1(zero_reg, f1); // MS 32-bits are all zero.
3624
- // __ cvt_d_l(f0, f0); // Use 64 bit conv to get correct unsigned 32-bit.
3625
-
3626
- __ Cvt_d_uw(f0, value, f22);
3627
-
3628
- __ sdc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset));
3629
-
3630
- __ Ret();
3631
- } else {
3632
- // Check whether unsigned integer fits into smi.
3633
- Label box_int_0, box_int_1, done;
3634
- __ And(t2, value, Operand(0x80000000));
3635
- __ Branch(&box_int_0, ne, t2, Operand(zero_reg));
3636
- __ And(t2, value, Operand(0x40000000));
3637
- __ Branch(&box_int_1, ne, t2, Operand(zero_reg));
3638
-
3639
- // Tag integer as smi and return it.
3640
- __ sll(v0, value, kSmiTagSize);
3641
- __ Ret();
3642
-
3643
- Register hiword = value; // a2.
3644
- Register loword = a3;
3645
-
3646
- __ bind(&box_int_0);
3647
- // Integer does not have leading zeros.
3648
- GenerateUInt2Double(masm, hiword, loword, t0, 0);
3649
- __ Branch(&done);
3650
-
3651
- __ bind(&box_int_1);
3652
- // Integer has one leading zero.
3653
- GenerateUInt2Double(masm, hiword, loword, t0, 1);
3654
-
3655
-
3656
- __ bind(&done);
3657
- // Integer was converted to double in registers hiword:loword.
3658
- // Wrap it into a HeapNumber. Don't use a0 and a1 as AllocateHeapNumber
3659
- // clobbers all registers - also when jumping due to exhausted young
3660
- // space.
3661
- __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3662
- __ AllocateHeapNumber(t2, t3, t5, t6, &slow);
3663
-
3664
- __ sw(hiword, FieldMemOperand(t2, HeapNumber::kExponentOffset));
3665
- __ sw(loword, FieldMemOperand(t2, HeapNumber::kMantissaOffset));
3666
-
3667
- __ mov(v0, t2);
3668
- __ Ret();
3669
- }
3670
- } else if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
3671
- // For the floating-point array type, we need to always allocate a
3672
- // HeapNumber.
3673
- if (CpuFeatures::IsSupported(FPU)) {
3674
- CpuFeatures::Scope scope(FPU);
3675
- // Allocate a HeapNumber for the result. Don't use a0 and a1 as
3676
- // AllocateHeapNumber clobbers all registers - also when jumping due to
3677
- // exhausted young space.
3678
- __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3679
- __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3680
- // The float (single) value is already in fpu reg f0 (if we use float).
3681
- __ cvt_d_s(f0, f0);
3682
- __ sdc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset));
3683
- __ Ret();
3684
- } else {
3685
- // Allocate a HeapNumber for the result. Don't use a0 and a1 as
3686
- // AllocateHeapNumber clobbers all registers - also when jumping due to
3687
- // exhausted young space.
3688
- __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3689
- __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3690
- // FPU is not available, do manual single to double conversion.
3691
-
3692
- // a2: floating point value (binary32).
3693
- // v0: heap number for result
3694
-
3695
- // Extract mantissa to t4.
3696
- __ And(t4, value, Operand(kBinary32MantissaMask));
3697
-
3698
- // Extract exponent to t5.
3699
- __ srl(t5, value, kBinary32MantissaBits);
3700
- __ And(t5, t5, Operand(kBinary32ExponentMask >> kBinary32MantissaBits));
3701
-
3702
- Label exponent_rebiased;
3703
- __ Branch(&exponent_rebiased, eq, t5, Operand(zero_reg));
3704
-
3705
- __ li(t0, 0x7ff);
3706
- __ Xor(t1, t5, Operand(0xFF));
3707
- __ Movz(t5, t0, t1); // Set t5 to 0x7ff only if t5 is equal to 0xff.
3708
- __ Branch(&exponent_rebiased, eq, t0, Operand(0xff));
3709
-
3710
- // Rebias exponent.
3711
- __ Addu(t5,
3712
- t5,
3713
- Operand(-kBinary32ExponentBias + HeapNumber::kExponentBias));
3714
-
3715
- __ bind(&exponent_rebiased);
3716
- __ And(a2, value, Operand(kBinary32SignMask));
3717
- value = no_reg;
3718
- __ sll(t0, t5, HeapNumber::kMantissaBitsInTopWord);
3719
- __ or_(a2, a2, t0);
3720
-
3721
- // Shift mantissa.
3722
- static const int kMantissaShiftForHiWord =
3723
- kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord;
3724
-
3725
- static const int kMantissaShiftForLoWord =
3726
- kBitsPerInt - kMantissaShiftForHiWord;
3727
-
3728
- __ srl(t0, t4, kMantissaShiftForHiWord);
3729
- __ or_(a2, a2, t0);
3730
- __ sll(a0, t4, kMantissaShiftForLoWord);
3731
-
3732
- __ sw(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3733
- __ sw(a0, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3734
- __ Ret();
3735
- }
3736
-
3737
- } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
3738
- if (CpuFeatures::IsSupported(FPU)) {
3739
- CpuFeatures::Scope scope(FPU);
3740
- // Allocate a HeapNumber for the result. Don't use a0 and a1 as
3741
- // AllocateHeapNumber clobbers all registers - also when jumping due to
3742
- // exhausted young space.
3743
- __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3744
- __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3745
- // The double value is already in f0
3746
- __ sdc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset));
3747
- __ Ret();
3748
- } else {
3749
- // Allocate a HeapNumber for the result. Don't use a0 and a1 as
3750
- // AllocateHeapNumber clobbers all registers - also when jumping due to
3751
- // exhausted young space.
3752
- __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3753
- __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3754
-
3755
- __ sw(a2, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3756
- __ sw(a3, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3757
- __ Ret();
3758
- }
3759
-
3760
- } else {
3761
- // Tag integer as smi and return it.
3762
- __ sll(v0, value, kSmiTagSize);
3763
- __ Ret();
3764
- }
3765
-
3766
- // Slow case, key and receiver still in a0 and a1.
3767
- __ bind(&slow);
3768
- __ IncrementCounter(
3769
- masm->isolate()->counters()->keyed_load_external_array_slow(),
3770
- 1, a2, a3);
3771
-
3772
- // ---------- S t a t e --------------
3773
- // -- ra : return address
3774
- // -- a0 : key
3775
- // -- a1 : receiver
3776
- // -----------------------------------
3777
-
3778
- __ Push(a1, a0);
3779
-
3780
- __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
3781
-
3782
- __ bind(&miss_force_generic);
3783
- Handle<Code> stub =
3784
- masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
3785
- __ Jump(stub, RelocInfo::CODE_TARGET);
3786
- }
3787
-
3788
-
3789
3496
  void KeyedStoreStubCompiler::GenerateStoreExternalArray(
3790
3497
  MacroAssembler* masm,
3791
3498
  ElementsKind elements_kind) {
@@ -3808,7 +3515,7 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
3808
3515
  // have been verified by the caller to not be a smi.
3809
3516
 
3810
3517
  // Check that the key is a smi or a heap number convertible to a smi.
3811
- GenerateSmiKeyCheck(masm, key, t0, t1, f2, &miss_force_generic);
3518
+ GenerateSmiKeyCheck(masm, key, t0, t1, f2, f4, &miss_force_generic);
3812
3519
 
3813
3520
  __ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3814
3521
 
@@ -3887,7 +3594,7 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
3887
3594
  }
3888
3595
  FloatingPointHelper::ConvertIntToDouble(
3889
3596
  masm, t1, destination,
3890
- f0, t2, t3, // These are: double_dst, dst1, dst2.
3597
+ f0, t2, t3, // These are: double_dst, dst_mantissa, dst_exponent.
3891
3598
  t0, f2); // These are: scratch2, single_scratch.
3892
3599
  if (destination == FloatingPointHelper::kFPURegisters) {
3893
3600
  CpuFeatures::Scope scope(FPU);
@@ -4005,7 +3712,7 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
4005
3712
  __ xor_(t1, t6, t5);
4006
3713
  __ li(t2, kBinary32ExponentMask);
4007
3714
  __ Movz(t6, t2, t1); // Only if t6 is equal to t5.
4008
- __ Branch(&nan_or_infinity_or_zero, eq, t6, Operand(t5));
3715
+ __ Branch(&nan_or_infinity_or_zero, eq, t1, Operand(zero_reg));
4009
3716
 
4010
3717
  // Rebias exponent.
4011
3718
  __ srl(t6, t6, HeapNumber::kExponentShift);
@@ -4036,7 +3743,7 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
4036
3743
 
4037
3744
  __ bind(&done);
4038
3745
  __ sll(t9, key, 1);
4039
- __ addu(t9, a2, t9);
3746
+ __ addu(t9, a3, t9);
4040
3747
  __ sw(t3, MemOperand(t9, 0));
4041
3748
 
4042
3749
  // Entry registers are intact, a0 holds the value which is the return
@@ -4054,7 +3761,7 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
4054
3761
  __ or_(t3, t6, t4);
4055
3762
  __ Branch(&done);
4056
3763
  } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
4057
- __ sll(t8, t0, 3);
3764
+ __ sll(t8, key, 2);
4058
3765
  __ addu(t8, a3, t8);
4059
3766
  // t8: effective address of destination element.
4060
3767
  __ sw(t4, MemOperand(t8, 0));
@@ -4185,115 +3892,6 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
4185
3892
  }
4186
3893
 
4187
3894
 
4188
- void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
4189
- // ----------- S t a t e -------------
4190
- // -- ra : return address
4191
- // -- a0 : key
4192
- // -- a1 : receiver
4193
- // -----------------------------------
4194
- Label miss_force_generic;
4195
-
4196
- // This stub is meant to be tail-jumped to, the receiver must already
4197
- // have been verified by the caller to not be a smi.
4198
-
4199
- // Check that the key is a smi or a heap number convertible to a smi.
4200
- GenerateSmiKeyCheck(masm, a0, t0, t1, f2, &miss_force_generic);
4201
-
4202
- // Get the elements array.
4203
- __ lw(a2, FieldMemOperand(a1, JSObject::kElementsOffset));
4204
- __ AssertFastElements(a2);
4205
-
4206
- // Check that the key is within bounds.
4207
- __ lw(a3, FieldMemOperand(a2, FixedArray::kLengthOffset));
4208
- __ Branch(USE_DELAY_SLOT, &miss_force_generic, hs, a0, Operand(a3));
4209
-
4210
- // Load the result and make sure it's not the hole.
4211
- __ Addu(a3, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4212
- STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
4213
- __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
4214
- __ Addu(t0, t0, a3);
4215
- __ lw(t0, MemOperand(t0));
4216
- __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
4217
- __ Branch(&miss_force_generic, eq, t0, Operand(t1));
4218
- __ Ret(USE_DELAY_SLOT);
4219
- __ mov(v0, t0);
4220
-
4221
- __ bind(&miss_force_generic);
4222
- Handle<Code> stub =
4223
- masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
4224
- __ Jump(stub, RelocInfo::CODE_TARGET);
4225
- }
4226
-
4227
-
4228
- void KeyedLoadStubCompiler::GenerateLoadFastDoubleElement(
4229
- MacroAssembler* masm) {
4230
- // ----------- S t a t e -------------
4231
- // -- ra : return address
4232
- // -- a0 : key
4233
- // -- a1 : receiver
4234
- // -----------------------------------
4235
- Label miss_force_generic, slow_allocate_heapnumber;
4236
-
4237
- Register key_reg = a0;
4238
- Register receiver_reg = a1;
4239
- Register elements_reg = a2;
4240
- Register heap_number_reg = a2;
4241
- Register indexed_double_offset = a3;
4242
- Register scratch = t0;
4243
- Register scratch2 = t1;
4244
- Register scratch3 = t2;
4245
- Register heap_number_map = t3;
4246
-
4247
- // This stub is meant to be tail-jumped to, the receiver must already
4248
- // have been verified by the caller to not be a smi.
4249
-
4250
- // Check that the key is a smi or a heap number convertible to a smi.
4251
- GenerateSmiKeyCheck(masm, key_reg, t0, t1, f2, &miss_force_generic);
4252
-
4253
- // Get the elements array.
4254
- __ lw(elements_reg,
4255
- FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
4256
-
4257
- // Check that the key is within bounds.
4258
- __ lw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4259
- __ Branch(&miss_force_generic, hs, key_reg, Operand(scratch));
4260
-
4261
- // Load the upper word of the double in the fixed array and test for NaN.
4262
- __ sll(scratch2, key_reg, kDoubleSizeLog2 - kSmiTagSize);
4263
- __ Addu(indexed_double_offset, elements_reg, Operand(scratch2));
4264
- uint32_t upper_32_offset = FixedArray::kHeaderSize + sizeof(kHoleNanLower32);
4265
- __ lw(scratch, FieldMemOperand(indexed_double_offset, upper_32_offset));
4266
- __ Branch(&miss_force_generic, eq, scratch, Operand(kHoleNanUpper32));
4267
-
4268
- // Non-NaN. Allocate a new heap number and copy the double value into it.
4269
- __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4270
- __ AllocateHeapNumber(heap_number_reg, scratch2, scratch3,
4271
- heap_number_map, &slow_allocate_heapnumber);
4272
-
4273
- // Don't need to reload the upper 32 bits of the double, it's already in
4274
- // scratch.
4275
- __ sw(scratch, FieldMemOperand(heap_number_reg,
4276
- HeapNumber::kExponentOffset));
4277
- __ lw(scratch, FieldMemOperand(indexed_double_offset,
4278
- FixedArray::kHeaderSize));
4279
- __ sw(scratch, FieldMemOperand(heap_number_reg,
4280
- HeapNumber::kMantissaOffset));
4281
-
4282
- __ mov(v0, heap_number_reg);
4283
- __ Ret();
4284
-
4285
- __ bind(&slow_allocate_heapnumber);
4286
- Handle<Code> slow_ic =
4287
- masm->isolate()->builtins()->KeyedLoadIC_Slow();
4288
- __ Jump(slow_ic, RelocInfo::CODE_TARGET);
4289
-
4290
- __ bind(&miss_force_generic);
4291
- Handle<Code> miss_ic =
4292
- masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
4293
- __ Jump(miss_ic, RelocInfo::CODE_TARGET);
4294
- }
4295
-
4296
-
4297
3895
  void KeyedStoreStubCompiler::GenerateStoreFastElement(
4298
3896
  MacroAssembler* masm,
4299
3897
  bool is_js_array,
@@ -4322,7 +3920,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
4322
3920
  // have been verified by the caller to not be a smi.
4323
3921
 
4324
3922
  // Check that the key is a smi or a heap number convertible to a smi.
4325
- GenerateSmiKeyCheck(masm, key_reg, t0, t1, f2, &miss_force_generic);
3923
+ GenerateSmiKeyCheck(masm, key_reg, t0, t1, f2, f4, &miss_force_generic);
4326
3924
 
4327
3925
  if (IsFastSmiElementsKind(elements_kind)) {
4328
3926
  __ JumpIfNotSmi(value_reg, &transition_elements_kind);
@@ -4466,11 +4064,12 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
4466
4064
  // -- a1 : key
4467
4065
  // -- a2 : receiver
4468
4066
  // -- ra : return address
4469
- // -- a3 : scratch
4067
+ // -- a3 : scratch (elements backing store)
4470
4068
  // -- t0 : scratch (elements_reg)
4471
4069
  // -- t1 : scratch (mantissa_reg)
4472
4070
  // -- t2 : scratch (exponent_reg)
4473
4071
  // -- t3 : scratch4
4072
+ // -- t4 : scratch
4474
4073
  // -----------------------------------
4475
4074
  Label miss_force_generic, transition_elements_kind, grow, slow;
4476
4075
  Label finish_store, check_capacity;
@@ -4483,13 +4082,14 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
4483
4082
  Register scratch2 = t1;
4484
4083
  Register scratch3 = t2;
4485
4084
  Register scratch4 = t3;
4085
+ Register scratch5 = t4;
4486
4086
  Register length_reg = t3;
4487
4087
 
4488
4088
  // This stub is meant to be tail-jumped to, the receiver must already
4489
4089
  // have been verified by the caller to not be a smi.
4490
4090
 
4491
4091
  // Check that the key is a smi or a heap number convertible to a smi.
4492
- GenerateSmiKeyCheck(masm, key_reg, t0, t1, f2, &miss_force_generic);
4092
+ GenerateSmiKeyCheck(masm, key_reg, t0, t1, f2, f4, &miss_force_generic);
4493
4093
 
4494
4094
  __ lw(elements_reg,
4495
4095
  FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
@@ -4513,7 +4113,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
4513
4113
 
4514
4114
  __ StoreNumberToDoubleElements(value_reg,
4515
4115
  key_reg,
4516
- receiver_reg,
4116
+ // All registers after this are overwritten.
4517
4117
  elements_reg,
4518
4118
  scratch1,
4519
4119
  scratch2,
@@ -4563,14 +4163,32 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
4563
4163
  __ AllocateInNewSpace(size, elements_reg, scratch1, scratch2, &slow,
4564
4164
  TAG_OBJECT);
4565
4165
 
4566
- // Initialize the new FixedDoubleArray. Leave elements unitialized for
4567
- // efficiency, they are guaranteed to be initialized before use.
4166
+ // Initialize the new FixedDoubleArray.
4568
4167
  __ LoadRoot(scratch1, Heap::kFixedDoubleArrayMapRootIndex);
4569
4168
  __ sw(scratch1, FieldMemOperand(elements_reg, JSObject::kMapOffset));
4570
4169
  __ li(scratch1, Operand(Smi::FromInt(JSArray::kPreallocatedArrayElements)));
4571
4170
  __ sw(scratch1,
4572
4171
  FieldMemOperand(elements_reg, FixedDoubleArray::kLengthOffset));
4573
4172
 
4173
+ __ mov(scratch1, elements_reg);
4174
+ __ StoreNumberToDoubleElements(value_reg,
4175
+ key_reg,
4176
+ // All registers after this are overwritten.
4177
+ scratch1,
4178
+ scratch2,
4179
+ scratch3,
4180
+ scratch4,
4181
+ scratch5,
4182
+ &transition_elements_kind);
4183
+
4184
+ __ li(scratch1, Operand(kHoleNanLower32));
4185
+ __ li(scratch2, Operand(kHoleNanUpper32));
4186
+ for (int i = 1; i < JSArray::kPreallocatedArrayElements; i++) {
4187
+ int offset = FixedDoubleArray::OffsetOfElementAt(i);
4188
+ __ sw(scratch1, FieldMemOperand(elements_reg, offset));
4189
+ __ sw(scratch2, FieldMemOperand(elements_reg, offset + kPointerSize));
4190
+ }
4191
+
4574
4192
  // Install the new backing store in the JSArray.
4575
4193
  __ sw(elements_reg,
4576
4194
  FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
@@ -4583,7 +4201,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
4583
4201
  __ sw(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4584
4202
  __ lw(elements_reg,
4585
4203
  FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
4586
- __ jmp(&finish_store);
4204
+ __ Ret();
4587
4205
 
4588
4206
  __ bind(&check_capacity);
4589
4207
  // Make sure that the backing store can hold additional elements.