libv8 3.10.8.0 → 3.11.8.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (215) hide show
  1. data/Rakefile +10 -3
  2. data/ext/libv8/compiler.rb +46 -0
  3. data/ext/libv8/extconf.rb +5 -1
  4. data/ext/libv8/make.rb +13 -0
  5. data/lib/libv8/version.rb +1 -1
  6. data/patches/add-freebsd9-and-freebsd10-to-gyp-GetFlavor.patch +11 -0
  7. data/patches/src_platform-freebsd.cc.patch +10 -0
  8. data/vendor/v8/ChangeLog +124 -0
  9. data/vendor/v8/DEPS +27 -0
  10. data/vendor/v8/Makefile +7 -0
  11. data/vendor/v8/SConstruct +15 -2
  12. data/vendor/v8/build/common.gypi +129 -157
  13. data/vendor/v8/build/gyp_v8 +11 -25
  14. data/vendor/v8/build/standalone.gypi +9 -3
  15. data/vendor/v8/include/v8.h +5 -3
  16. data/vendor/v8/src/SConscript +1 -0
  17. data/vendor/v8/src/api.cc +4 -33
  18. data/vendor/v8/src/api.h +2 -2
  19. data/vendor/v8/src/arm/builtins-arm.cc +5 -4
  20. data/vendor/v8/src/arm/code-stubs-arm.cc +21 -14
  21. data/vendor/v8/src/arm/codegen-arm.cc +2 -2
  22. data/vendor/v8/src/arm/debug-arm.cc +3 -1
  23. data/vendor/v8/src/arm/full-codegen-arm.cc +3 -102
  24. data/vendor/v8/src/arm/ic-arm.cc +30 -33
  25. data/vendor/v8/src/arm/lithium-arm.cc +20 -7
  26. data/vendor/v8/src/arm/lithium-arm.h +10 -4
  27. data/vendor/v8/src/arm/lithium-codegen-arm.cc +106 -60
  28. data/vendor/v8/src/arm/macro-assembler-arm.cc +49 -39
  29. data/vendor/v8/src/arm/macro-assembler-arm.h +5 -4
  30. data/vendor/v8/src/arm/regexp-macro-assembler-arm.cc +115 -55
  31. data/vendor/v8/src/arm/regexp-macro-assembler-arm.h +7 -6
  32. data/vendor/v8/src/arm/simulator-arm.h +6 -6
  33. data/vendor/v8/src/arm/stub-cache-arm.cc +64 -19
  34. data/vendor/v8/src/array.js +7 -3
  35. data/vendor/v8/src/ast.cc +11 -6
  36. data/vendor/v8/src/bootstrapper.cc +9 -11
  37. data/vendor/v8/src/builtins.cc +61 -31
  38. data/vendor/v8/src/code-stubs.cc +23 -9
  39. data/vendor/v8/src/code-stubs.h +1 -0
  40. data/vendor/v8/src/codegen.h +3 -3
  41. data/vendor/v8/src/compiler.cc +1 -1
  42. data/vendor/v8/src/contexts.h +2 -18
  43. data/vendor/v8/src/d8.cc +94 -93
  44. data/vendor/v8/src/d8.h +1 -1
  45. data/vendor/v8/src/debug-agent.cc +3 -3
  46. data/vendor/v8/src/debug.cc +41 -1
  47. data/vendor/v8/src/debug.h +50 -0
  48. data/vendor/v8/src/elements-kind.cc +134 -0
  49. data/vendor/v8/src/elements-kind.h +210 -0
  50. data/vendor/v8/src/elements.cc +356 -190
  51. data/vendor/v8/src/elements.h +36 -28
  52. data/vendor/v8/src/factory.cc +44 -4
  53. data/vendor/v8/src/factory.h +11 -7
  54. data/vendor/v8/src/flag-definitions.h +3 -0
  55. data/vendor/v8/src/frames.h +3 -0
  56. data/vendor/v8/src/full-codegen.cc +2 -1
  57. data/vendor/v8/src/func-name-inferrer.h +2 -0
  58. data/vendor/v8/src/globals.h +3 -0
  59. data/vendor/v8/src/heap-inl.h +16 -4
  60. data/vendor/v8/src/heap.cc +38 -32
  61. data/vendor/v8/src/heap.h +3 -17
  62. data/vendor/v8/src/hydrogen-instructions.cc +28 -5
  63. data/vendor/v8/src/hydrogen-instructions.h +142 -44
  64. data/vendor/v8/src/hydrogen.cc +160 -55
  65. data/vendor/v8/src/hydrogen.h +2 -0
  66. data/vendor/v8/src/ia32/assembler-ia32.h +3 -0
  67. data/vendor/v8/src/ia32/builtins-ia32.cc +5 -4
  68. data/vendor/v8/src/ia32/code-stubs-ia32.cc +22 -16
  69. data/vendor/v8/src/ia32/codegen-ia32.cc +2 -2
  70. data/vendor/v8/src/ia32/debug-ia32.cc +29 -2
  71. data/vendor/v8/src/ia32/full-codegen-ia32.cc +8 -101
  72. data/vendor/v8/src/ia32/ic-ia32.cc +23 -19
  73. data/vendor/v8/src/ia32/lithium-codegen-ia32.cc +126 -80
  74. data/vendor/v8/src/ia32/lithium-codegen-ia32.h +2 -1
  75. data/vendor/v8/src/ia32/lithium-ia32.cc +15 -9
  76. data/vendor/v8/src/ia32/lithium-ia32.h +14 -6
  77. data/vendor/v8/src/ia32/macro-assembler-ia32.cc +50 -40
  78. data/vendor/v8/src/ia32/macro-assembler-ia32.h +5 -4
  79. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.cc +113 -43
  80. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.h +9 -4
  81. data/vendor/v8/src/ia32/simulator-ia32.h +4 -4
  82. data/vendor/v8/src/ia32/stub-cache-ia32.cc +52 -14
  83. data/vendor/v8/src/ic.cc +77 -20
  84. data/vendor/v8/src/ic.h +18 -2
  85. data/vendor/v8/src/incremental-marking-inl.h +21 -5
  86. data/vendor/v8/src/incremental-marking.cc +35 -8
  87. data/vendor/v8/src/incremental-marking.h +12 -3
  88. data/vendor/v8/src/isolate.cc +12 -2
  89. data/vendor/v8/src/isolate.h +1 -1
  90. data/vendor/v8/src/jsregexp.cc +66 -26
  91. data/vendor/v8/src/jsregexp.h +60 -31
  92. data/vendor/v8/src/list-inl.h +8 -0
  93. data/vendor/v8/src/list.h +3 -0
  94. data/vendor/v8/src/lithium.cc +5 -2
  95. data/vendor/v8/src/liveedit.cc +57 -5
  96. data/vendor/v8/src/mark-compact-inl.h +17 -11
  97. data/vendor/v8/src/mark-compact.cc +100 -143
  98. data/vendor/v8/src/mark-compact.h +44 -20
  99. data/vendor/v8/src/messages.js +131 -99
  100. data/vendor/v8/src/mips/builtins-mips.cc +5 -4
  101. data/vendor/v8/src/mips/code-stubs-mips.cc +23 -15
  102. data/vendor/v8/src/mips/codegen-mips.cc +2 -2
  103. data/vendor/v8/src/mips/debug-mips.cc +3 -1
  104. data/vendor/v8/src/mips/full-codegen-mips.cc +4 -102
  105. data/vendor/v8/src/mips/ic-mips.cc +34 -36
  106. data/vendor/v8/src/mips/lithium-codegen-mips.cc +116 -68
  107. data/vendor/v8/src/mips/lithium-mips.cc +20 -7
  108. data/vendor/v8/src/mips/lithium-mips.h +11 -4
  109. data/vendor/v8/src/mips/macro-assembler-mips.cc +50 -39
  110. data/vendor/v8/src/mips/macro-assembler-mips.h +5 -4
  111. data/vendor/v8/src/mips/regexp-macro-assembler-mips.cc +110 -50
  112. data/vendor/v8/src/mips/regexp-macro-assembler-mips.h +6 -5
  113. data/vendor/v8/src/mips/simulator-mips.h +5 -5
  114. data/vendor/v8/src/mips/stub-cache-mips.cc +66 -20
  115. data/vendor/v8/src/mksnapshot.cc +5 -1
  116. data/vendor/v8/src/objects-debug.cc +103 -6
  117. data/vendor/v8/src/objects-inl.h +215 -116
  118. data/vendor/v8/src/objects-printer.cc +13 -8
  119. data/vendor/v8/src/objects.cc +608 -331
  120. data/vendor/v8/src/objects.h +129 -94
  121. data/vendor/v8/src/parser.cc +16 -4
  122. data/vendor/v8/src/platform-freebsd.cc +1 -0
  123. data/vendor/v8/src/platform-linux.cc +9 -30
  124. data/vendor/v8/src/platform-posix.cc +28 -7
  125. data/vendor/v8/src/platform-win32.cc +15 -3
  126. data/vendor/v8/src/platform.h +2 -1
  127. data/vendor/v8/src/profile-generator-inl.h +25 -2
  128. data/vendor/v8/src/profile-generator.cc +300 -822
  129. data/vendor/v8/src/profile-generator.h +97 -214
  130. data/vendor/v8/src/regexp-macro-assembler-irregexp.cc +2 -1
  131. data/vendor/v8/src/regexp-macro-assembler-irregexp.h +2 -2
  132. data/vendor/v8/src/regexp-macro-assembler-tracer.cc +6 -5
  133. data/vendor/v8/src/regexp-macro-assembler-tracer.h +1 -1
  134. data/vendor/v8/src/regexp-macro-assembler.cc +7 -3
  135. data/vendor/v8/src/regexp-macro-assembler.h +10 -2
  136. data/vendor/v8/src/regexp.js +6 -0
  137. data/vendor/v8/src/runtime.cc +265 -212
  138. data/vendor/v8/src/runtime.h +6 -5
  139. data/vendor/v8/src/scopes.cc +20 -0
  140. data/vendor/v8/src/scopes.h +6 -3
  141. data/vendor/v8/src/spaces.cc +0 -2
  142. data/vendor/v8/src/string-stream.cc +2 -2
  143. data/vendor/v8/src/v8-counters.h +0 -2
  144. data/vendor/v8/src/v8natives.js +2 -2
  145. data/vendor/v8/src/v8utils.h +6 -3
  146. data/vendor/v8/src/version.cc +1 -1
  147. data/vendor/v8/src/x64/assembler-x64.h +2 -1
  148. data/vendor/v8/src/x64/builtins-x64.cc +5 -4
  149. data/vendor/v8/src/x64/code-stubs-x64.cc +25 -16
  150. data/vendor/v8/src/x64/codegen-x64.cc +2 -2
  151. data/vendor/v8/src/x64/debug-x64.cc +14 -1
  152. data/vendor/v8/src/x64/disasm-x64.cc +1 -1
  153. data/vendor/v8/src/x64/full-codegen-x64.cc +10 -106
  154. data/vendor/v8/src/x64/ic-x64.cc +20 -16
  155. data/vendor/v8/src/x64/lithium-codegen-x64.cc +156 -79
  156. data/vendor/v8/src/x64/lithium-codegen-x64.h +2 -1
  157. data/vendor/v8/src/x64/lithium-x64.cc +18 -8
  158. data/vendor/v8/src/x64/lithium-x64.h +7 -2
  159. data/vendor/v8/src/x64/macro-assembler-x64.cc +50 -40
  160. data/vendor/v8/src/x64/macro-assembler-x64.h +5 -4
  161. data/vendor/v8/src/x64/regexp-macro-assembler-x64.cc +122 -51
  162. data/vendor/v8/src/x64/regexp-macro-assembler-x64.h +17 -8
  163. data/vendor/v8/src/x64/simulator-x64.h +4 -4
  164. data/vendor/v8/src/x64/stub-cache-x64.cc +55 -17
  165. data/vendor/v8/test/cctest/cctest.status +1 -0
  166. data/vendor/v8/test/cctest/test-api.cc +24 -0
  167. data/vendor/v8/test/cctest/test-func-name-inference.cc +38 -0
  168. data/vendor/v8/test/cctest/test-heap-profiler.cc +21 -77
  169. data/vendor/v8/test/cctest/test-heap.cc +164 -3
  170. data/vendor/v8/test/cctest/test-list.cc +12 -0
  171. data/vendor/v8/test/cctest/test-mark-compact.cc +5 -5
  172. data/vendor/v8/test/cctest/test-regexp.cc +14 -8
  173. data/vendor/v8/test/cctest/testcfg.py +2 -0
  174. data/vendor/v8/test/mjsunit/accessor-map-sharing.js +176 -0
  175. data/vendor/v8/test/mjsunit/array-construct-transition.js +3 -3
  176. data/vendor/v8/test/mjsunit/array-literal-transitions.js +10 -10
  177. data/vendor/v8/test/mjsunit/big-array-literal.js +3 -0
  178. data/vendor/v8/test/mjsunit/compiler/inline-construct.js +4 -2
  179. data/vendor/v8/test/mjsunit/debug-liveedit-stack-padding.js +88 -0
  180. data/vendor/v8/test/mjsunit/elements-kind.js +4 -4
  181. data/vendor/v8/test/mjsunit/elements-transition-hoisting.js +2 -2
  182. data/vendor/v8/test/mjsunit/elements-transition.js +5 -5
  183. data/vendor/v8/test/mjsunit/error-constructors.js +68 -33
  184. data/vendor/v8/test/mjsunit/harmony/proxies.js +14 -6
  185. data/vendor/v8/test/mjsunit/mjsunit.status +1 -0
  186. data/vendor/v8/test/mjsunit/packed-elements.js +112 -0
  187. data/vendor/v8/test/mjsunit/regexp-capture-3.js +6 -0
  188. data/vendor/v8/test/mjsunit/regexp-global.js +132 -0
  189. data/vendor/v8/test/mjsunit/regexp.js +11 -0
  190. data/vendor/v8/test/mjsunit/regress/regress-117409.js +52 -0
  191. data/vendor/v8/test/mjsunit/regress/regress-126412.js +33 -0
  192. data/vendor/v8/test/mjsunit/regress/regress-128018.js +35 -0
  193. data/vendor/v8/test/mjsunit/regress/regress-128146.js +33 -0
  194. data/vendor/v8/test/mjsunit/regress/regress-1639-2.js +4 -1
  195. data/vendor/v8/test/mjsunit/regress/regress-1639.js +14 -8
  196. data/vendor/v8/test/mjsunit/regress/regress-1849.js +3 -3
  197. data/vendor/v8/test/mjsunit/regress/regress-1878.js +2 -2
  198. data/vendor/v8/test/mjsunit/regress/regress-2071.js +79 -0
  199. data/vendor/v8/test/mjsunit/regress/regress-2153.js +32 -0
  200. data/vendor/v8/test/mjsunit/regress/regress-crbug-122271.js +4 -4
  201. data/vendor/v8/test/mjsunit/regress/regress-crbug-126414.js +32 -0
  202. data/vendor/v8/test/mjsunit/regress/regress-smi-only-concat.js +2 -2
  203. data/vendor/v8/test/mjsunit/regress/regress-transcendental.js +49 -0
  204. data/vendor/v8/test/mjsunit/stack-traces.js +14 -0
  205. data/vendor/v8/test/mjsunit/unbox-double-arrays.js +4 -3
  206. data/vendor/v8/test/test262/testcfg.py +6 -1
  207. data/vendor/v8/tools/check-static-initializers.sh +11 -3
  208. data/vendor/v8/tools/fuzz-harness.sh +92 -0
  209. data/vendor/v8/tools/grokdump.py +658 -67
  210. data/vendor/v8/tools/gyp/v8.gyp +21 -39
  211. data/vendor/v8/tools/js2c.py +3 -3
  212. data/vendor/v8/tools/jsmin.py +2 -2
  213. data/vendor/v8/tools/presubmit.py +2 -1
  214. data/vendor/v8/tools/test-wrapper-gypbuild.py +25 -11
  215. metadata +624 -612
@@ -774,7 +774,7 @@ static MemOperand GenerateMappedArgumentsLookup(MacroAssembler* masm,
774
774
  __ b(lt, slow_case);
775
775
 
776
776
  // Check that the key is a positive smi.
777
- __ tst(key, Operand(0x8000001));
777
+ __ tst(key, Operand(0x80000001));
778
778
  __ b(ne, slow_case);
779
779
 
780
780
  // Load the elements into scratch1 and check its map.
@@ -1249,7 +1249,7 @@ void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
1249
1249
  // Must return the modified receiver in r0.
1250
1250
  if (!FLAG_trace_elements_transitions) {
1251
1251
  Label fail;
1252
- ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &fail);
1252
+ ElementsTransitionGenerator::GenerateSmiToDouble(masm, &fail);
1253
1253
  __ mov(r0, r2);
1254
1254
  __ Ret();
1255
1255
  __ bind(&fail);
@@ -1462,27 +1462,27 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
1462
1462
  __ CompareRoot(r4, Heap::kHeapNumberMapRootIndex);
1463
1463
  __ b(ne, &non_double_value);
1464
1464
 
1465
- // Value is a double. Transition FAST_SMI_ONLY_ELEMENTS ->
1465
+ // Value is a double. Transition FAST_SMI_ELEMENTS ->
1466
1466
  // FAST_DOUBLE_ELEMENTS and complete the store.
1467
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
1467
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
1468
1468
  FAST_DOUBLE_ELEMENTS,
1469
1469
  receiver_map,
1470
1470
  r4,
1471
1471
  &slow);
1472
1472
  ASSERT(receiver_map.is(r3)); // Transition code expects map in r3
1473
- ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &slow);
1473
+ ElementsTransitionGenerator::GenerateSmiToDouble(masm, &slow);
1474
1474
  __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1475
1475
  __ jmp(&fast_double_without_map_check);
1476
1476
 
1477
1477
  __ bind(&non_double_value);
1478
- // Value is not a double, FAST_SMI_ONLY_ELEMENTS -> FAST_ELEMENTS
1479
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
1478
+ // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
1479
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
1480
1480
  FAST_ELEMENTS,
1481
1481
  receiver_map,
1482
1482
  r4,
1483
1483
  &slow);
1484
1484
  ASSERT(receiver_map.is(r3)); // Transition code expects map in r3
1485
- ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm);
1485
+ ElementsTransitionGenerator::GenerateMapChangeElementsTransition(masm);
1486
1486
  __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1487
1487
  __ jmp(&finish_object_store);
1488
1488
 
@@ -1690,12 +1690,12 @@ void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
1690
1690
 
1691
1691
  // Activate inlined smi code.
1692
1692
  if (previous_state == UNINITIALIZED) {
1693
- PatchInlinedSmiCode(address());
1693
+ PatchInlinedSmiCode(address(), ENABLE_INLINED_SMI_CHECK);
1694
1694
  }
1695
1695
  }
1696
1696
 
1697
1697
 
1698
- void PatchInlinedSmiCode(Address address) {
1698
+ void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) {
1699
1699
  Address cmp_instruction_address =
1700
1700
  address + Assembler::kCallTargetAddressOffset;
1701
1701
 
@@ -1729,34 +1729,31 @@ void PatchInlinedSmiCode(Address address) {
1729
1729
  Instr instr_at_patch = Assembler::instr_at(patch_address);
1730
1730
  Instr branch_instr =
1731
1731
  Assembler::instr_at(patch_address + Instruction::kInstrSize);
1732
- ASSERT(Assembler::IsCmpRegister(instr_at_patch));
1733
- ASSERT_EQ(Assembler::GetRn(instr_at_patch).code(),
1734
- Assembler::GetRm(instr_at_patch).code());
1732
+ // This is patching a conditional "jump if not smi/jump if smi" site.
1733
+ // Enabling by changing from
1734
+ // cmp rx, rx
1735
+ // b eq/ne, <target>
1736
+ // to
1737
+ // tst rx, #kSmiTagMask
1738
+ // b ne/eq, <target>
1739
+ // and vice-versa to be disabled again.
1740
+ CodePatcher patcher(patch_address, 2);
1741
+ Register reg = Assembler::GetRn(instr_at_patch);
1742
+ if (check == ENABLE_INLINED_SMI_CHECK) {
1743
+ ASSERT(Assembler::IsCmpRegister(instr_at_patch));
1744
+ ASSERT_EQ(Assembler::GetRn(instr_at_patch).code(),
1745
+ Assembler::GetRm(instr_at_patch).code());
1746
+ patcher.masm()->tst(reg, Operand(kSmiTagMask));
1747
+ } else {
1748
+ ASSERT(check == DISABLE_INLINED_SMI_CHECK);
1749
+ ASSERT(Assembler::IsTstImmediate(instr_at_patch));
1750
+ patcher.masm()->cmp(reg, reg);
1751
+ }
1735
1752
  ASSERT(Assembler::IsBranch(branch_instr));
1736
1753
  if (Assembler::GetCondition(branch_instr) == eq) {
1737
- // This is patching a "jump if not smi" site to be active.
1738
- // Changing
1739
- // cmp rx, rx
1740
- // b eq, <target>
1741
- // to
1742
- // tst rx, #kSmiTagMask
1743
- // b ne, <target>
1744
- CodePatcher patcher(patch_address, 2);
1745
- Register reg = Assembler::GetRn(instr_at_patch);
1746
- patcher.masm()->tst(reg, Operand(kSmiTagMask));
1747
1754
  patcher.EmitCondition(ne);
1748
1755
  } else {
1749
1756
  ASSERT(Assembler::GetCondition(branch_instr) == ne);
1750
- // This is patching a "jump if smi" site to be active.
1751
- // Changing
1752
- // cmp rx, rx
1753
- // b ne, <target>
1754
- // to
1755
- // tst rx, #kSmiTagMask
1756
- // b eq, <target>
1757
- CodePatcher patcher(patch_address, 2);
1758
- Register reg = Assembler::GetRn(instr_at_patch);
1759
- patcher.masm()->tst(reg, Operand(kSmiTagMask));
1760
1757
  patcher.EmitCondition(eq);
1761
1758
  }
1762
1759
  }
@@ -2082,8 +2082,9 @@ LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
2082
2082
 
2083
2083
  LInstruction* LChunkBuilder::DoTransitionElementsKind(
2084
2084
  HTransitionElementsKind* instr) {
2085
- if (instr->original_map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS &&
2086
- instr->transitioned_map()->elements_kind() == FAST_ELEMENTS) {
2085
+ ElementsKind from_kind = instr->original_map()->elements_kind();
2086
+ ElementsKind to_kind = instr->transitioned_map()->elements_kind();
2087
+ if (IsSimpleMapChangeTransition(from_kind, to_kind)) {
2087
2088
  LOperand* object = UseRegister(instr->object());
2088
2089
  LOperand* new_map_reg = TempRegister();
2089
2090
  LTransitionElementsKind* result =
@@ -2104,16 +2105,28 @@ LInstruction* LChunkBuilder::DoTransitionElementsKind(
2104
2105
 
2105
2106
  LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
2106
2107
  bool needs_write_barrier = instr->NeedsWriteBarrier();
2107
-
2108
- LOperand* obj = needs_write_barrier
2109
- ? UseTempRegister(instr->object())
2110
- : UseRegisterAtStart(instr->object());
2108
+ bool needs_write_barrier_for_map = !instr->transition().is_null() &&
2109
+ instr->NeedsWriteBarrierForMap();
2110
+
2111
+ LOperand* obj;
2112
+ if (needs_write_barrier) {
2113
+ obj = instr->is_in_object()
2114
+ ? UseRegister(instr->object())
2115
+ : UseTempRegister(instr->object());
2116
+ } else {
2117
+ obj = needs_write_barrier_for_map
2118
+ ? UseRegister(instr->object())
2119
+ : UseRegisterAtStart(instr->object());
2120
+ }
2111
2121
 
2112
2122
  LOperand* val = needs_write_barrier
2113
2123
  ? UseTempRegister(instr->value())
2114
2124
  : UseRegister(instr->value());
2115
2125
 
2116
- return new(zone()) LStoreNamedField(obj, val);
2126
+ // We need a temporary register for write barrier of the map field.
2127
+ LOperand* temp = needs_write_barrier_for_map ? TempRegister() : NULL;
2128
+
2129
+ return new(zone()) LStoreNamedField(obj, val, temp);
2117
2130
  }
2118
2131
 
2119
2132
 
@@ -1236,6 +1236,7 @@ class LLoadKeyedFastElement: public LTemplateInstruction<1, 2, 0> {
1236
1236
 
1237
1237
  LOperand* elements() { return inputs_[0]; }
1238
1238
  LOperand* key() { return inputs_[1]; }
1239
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
1239
1240
  };
1240
1241
 
1241
1242
 
@@ -1252,13 +1253,13 @@ class LLoadKeyedFastDoubleElement: public LTemplateInstruction<1, 2, 0> {
1252
1253
 
1253
1254
  LOperand* elements() { return inputs_[0]; }
1254
1255
  LOperand* key() { return inputs_[1]; }
1256
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
1255
1257
  };
1256
1258
 
1257
1259
 
1258
1260
  class LLoadKeyedSpecializedArrayElement: public LTemplateInstruction<1, 2, 0> {
1259
1261
  public:
1260
- LLoadKeyedSpecializedArrayElement(LOperand* external_pointer,
1261
- LOperand* key) {
1262
+ LLoadKeyedSpecializedArrayElement(LOperand* external_pointer, LOperand* key) {
1262
1263
  inputs_[0] = external_pointer;
1263
1264
  inputs_[1] = key;
1264
1265
  }
@@ -1272,6 +1273,7 @@ class LLoadKeyedSpecializedArrayElement: public LTemplateInstruction<1, 2, 0> {
1272
1273
  ElementsKind elements_kind() const {
1273
1274
  return hydrogen()->elements_kind();
1274
1275
  }
1276
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
1275
1277
  };
1276
1278
 
1277
1279
 
@@ -1682,11 +1684,12 @@ class LSmiUntag: public LTemplateInstruction<1, 1, 0> {
1682
1684
  };
1683
1685
 
1684
1686
 
1685
- class LStoreNamedField: public LTemplateInstruction<0, 2, 0> {
1687
+ class LStoreNamedField: public LTemplateInstruction<0, 2, 1> {
1686
1688
  public:
1687
- LStoreNamedField(LOperand* obj, LOperand* val) {
1689
+ LStoreNamedField(LOperand* obj, LOperand* val, LOperand* temp) {
1688
1690
  inputs_[0] = obj;
1689
1691
  inputs_[1] = val;
1692
+ temps_[0] = temp;
1690
1693
  }
1691
1694
 
1692
1695
  DECLARE_CONCRETE_INSTRUCTION(StoreNamedField, "store-named-field")
@@ -1740,6 +1743,7 @@ class LStoreKeyedFastElement: public LTemplateInstruction<0, 3, 0> {
1740
1743
  LOperand* object() { return inputs_[0]; }
1741
1744
  LOperand* key() { return inputs_[1]; }
1742
1745
  LOperand* value() { return inputs_[2]; }
1746
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
1743
1747
  };
1744
1748
 
1745
1749
 
@@ -1762,6 +1766,7 @@ class LStoreKeyedFastDoubleElement: public LTemplateInstruction<0, 3, 0> {
1762
1766
  LOperand* elements() { return inputs_[0]; }
1763
1767
  LOperand* key() { return inputs_[1]; }
1764
1768
  LOperand* value() { return inputs_[2]; }
1769
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
1765
1770
 
1766
1771
  bool NeedsCanonicalization() { return hydrogen()->NeedsCanonicalization(); }
1767
1772
  };
@@ -1806,6 +1811,7 @@ class LStoreKeyedSpecializedArrayElement: public LTemplateInstruction<0, 3, 0> {
1806
1811
  ElementsKind elements_kind() const {
1807
1812
  return hydrogen()->elements_kind();
1808
1813
  }
1814
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
1809
1815
  };
1810
1816
 
1811
1817
 
@@ -2587,42 +2587,38 @@ void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
2587
2587
  Register object = ToRegister(instr->object());
2588
2588
  Register result = ToRegister(instr->result());
2589
2589
  Register scratch = scratch0();
2590
+
2590
2591
  int map_count = instr->hydrogen()->types()->length();
2592
+ bool need_generic = instr->hydrogen()->need_generic();
2593
+
2594
+ if (map_count == 0 && !need_generic) {
2595
+ DeoptimizeIf(al, instr->environment());
2596
+ return;
2597
+ }
2591
2598
  Handle<String> name = instr->hydrogen()->name();
2592
- if (map_count == 0) {
2593
- ASSERT(instr->hydrogen()->need_generic());
2594
- __ mov(r2, Operand(name));
2595
- Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2596
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
2597
- } else {
2598
- Label done;
2599
- __ ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
2600
- for (int i = 0; i < map_count - 1; ++i) {
2601
- Handle<Map> map = instr->hydrogen()->types()->at(i);
2599
+ Label done;
2600
+ __ ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
2601
+ for (int i = 0; i < map_count; ++i) {
2602
+ bool last = (i == map_count - 1);
2603
+ Handle<Map> map = instr->hydrogen()->types()->at(i);
2604
+ __ cmp(scratch, Operand(map));
2605
+ if (last && !need_generic) {
2606
+ DeoptimizeIf(ne, instr->environment());
2607
+ EmitLoadFieldOrConstantFunction(result, object, map, name);
2608
+ } else {
2602
2609
  Label next;
2603
- __ cmp(scratch, Operand(map));
2604
2610
  __ b(ne, &next);
2605
2611
  EmitLoadFieldOrConstantFunction(result, object, map, name);
2606
2612
  __ b(&done);
2607
2613
  __ bind(&next);
2608
2614
  }
2609
- Handle<Map> map = instr->hydrogen()->types()->last();
2610
- __ cmp(scratch, Operand(map));
2611
- if (instr->hydrogen()->need_generic()) {
2612
- Label generic;
2613
- __ b(ne, &generic);
2614
- EmitLoadFieldOrConstantFunction(result, object, map, name);
2615
- __ b(&done);
2616
- __ bind(&generic);
2617
- __ mov(r2, Operand(name));
2618
- Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2619
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
2620
- } else {
2621
- DeoptimizeIf(ne, instr->environment());
2622
- EmitLoadFieldOrConstantFunction(result, object, map, name);
2623
- }
2624
- __ bind(&done);
2625
2615
  }
2616
+ if (need_generic) {
2617
+ __ mov(r2, Operand(name));
2618
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2619
+ CallCode(ic, RelocInfo::CODE_TARGET, instr);
2620
+ }
2621
+ __ bind(&done);
2626
2622
  }
2627
2623
 
2628
2624
 
@@ -2700,8 +2696,10 @@ void LCodeGen::DoLoadElements(LLoadElements* instr) {
2700
2696
  __ ldr(scratch, FieldMemOperand(scratch, Map::kBitField2Offset));
2701
2697
  __ ubfx(scratch, scratch, Map::kElementsKindShift,
2702
2698
  Map::kElementsKindBitCount);
2703
- __ cmp(scratch, Operand(FAST_ELEMENTS));
2704
- __ b(eq, &done);
2699
+ __ cmp(scratch, Operand(GetInitialFastElementsKind()));
2700
+ __ b(lt, &fail);
2701
+ __ cmp(scratch, Operand(TERMINAL_FAST_ELEMENTS_KIND));
2702
+ __ b(le, &done);
2705
2703
  __ cmp(scratch, Operand(FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND));
2706
2704
  __ b(lt, &fail);
2707
2705
  __ cmp(scratch, Operand(LAST_EXTERNAL_ARRAY_ELEMENTS_KIND));
@@ -2748,7 +2746,9 @@ void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
2748
2746
 
2749
2747
  // Load the result.
2750
2748
  __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
2751
- __ ldr(result, FieldMemOperand(scratch, FixedArray::kHeaderSize));
2749
+ uint32_t offset = FixedArray::kHeaderSize +
2750
+ (instr->additional_index() << kPointerSizeLog2);
2751
+ __ ldr(result, FieldMemOperand(scratch, offset));
2752
2752
 
2753
2753
  // Check for the hole value.
2754
2754
  if (instr->hydrogen()->RequiresHoleCheck()) {
@@ -2780,18 +2780,21 @@ void LCodeGen::DoLoadKeyedFastDoubleElement(
2780
2780
  }
2781
2781
 
2782
2782
  Operand operand = key_is_constant
2783
- ? Operand(constant_key * (1 << shift_size) +
2783
+ ? Operand(((constant_key + instr->additional_index()) << shift_size) +
2784
2784
  FixedDoubleArray::kHeaderSize - kHeapObjectTag)
2785
2785
  : Operand(key, LSL, shift_size);
2786
2786
  __ add(elements, elements, operand);
2787
2787
  if (!key_is_constant) {
2788
2788
  __ add(elements, elements,
2789
- Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
2789
+ Operand((FixedDoubleArray::kHeaderSize - kHeapObjectTag) +
2790
+ (instr->additional_index() << shift_size)));
2790
2791
  }
2791
2792
 
2792
- __ ldr(scratch, MemOperand(elements, sizeof(kHoleNanLower32)));
2793
- __ cmp(scratch, Operand(kHoleNanUpper32));
2794
- DeoptimizeIf(eq, instr->environment());
2793
+ if (instr->hydrogen()->RequiresHoleCheck()) {
2794
+ __ ldr(scratch, MemOperand(elements, sizeof(kHoleNanLower32)));
2795
+ __ cmp(scratch, Operand(kHoleNanUpper32));
2796
+ DeoptimizeIf(eq, instr->environment());
2797
+ }
2795
2798
 
2796
2799
  __ vldr(result, elements, 0);
2797
2800
  }
@@ -2813,26 +2816,33 @@ void LCodeGen::DoLoadKeyedSpecializedArrayElement(
2813
2816
  key = ToRegister(instr->key());
2814
2817
  }
2815
2818
  int shift_size = ElementsKindToShiftSize(elements_kind);
2819
+ int additional_offset = instr->additional_index() << shift_size;
2816
2820
 
2817
2821
  if (elements_kind == EXTERNAL_FLOAT_ELEMENTS ||
2818
2822
  elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
2819
2823
  CpuFeatures::Scope scope(VFP3);
2820
2824
  DwVfpRegister result = ToDoubleRegister(instr->result());
2821
2825
  Operand operand = key_is_constant
2822
- ? Operand(constant_key * (1 << shift_size))
2826
+ ? Operand(constant_key << shift_size)
2823
2827
  : Operand(key, LSL, shift_size);
2824
2828
  __ add(scratch0(), external_pointer, operand);
2825
2829
  if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
2826
- __ vldr(result.low(), scratch0(), 0);
2830
+ __ vldr(result.low(), scratch0(), additional_offset);
2827
2831
  __ vcvt_f64_f32(result, result.low());
2828
2832
  } else { // i.e. elements_kind == EXTERNAL_DOUBLE_ELEMENTS
2829
- __ vldr(result, scratch0(), 0);
2833
+ __ vldr(result, scratch0(), additional_offset);
2830
2834
  }
2831
2835
  } else {
2832
2836
  Register result = ToRegister(instr->result());
2837
+ if (instr->additional_index() != 0 && !key_is_constant) {
2838
+ __ add(scratch0(), key, Operand(instr->additional_index()));
2839
+ }
2833
2840
  MemOperand mem_operand(key_is_constant
2834
- ? MemOperand(external_pointer, constant_key * (1 << shift_size))
2835
- : MemOperand(external_pointer, key, LSL, shift_size));
2841
+ ? MemOperand(external_pointer,
2842
+ (constant_key << shift_size) + additional_offset)
2843
+ : (instr->additional_index() == 0
2844
+ ? MemOperand(external_pointer, key, LSL, shift_size)
2845
+ : MemOperand(external_pointer, scratch0(), LSL, shift_size)));
2836
2846
  switch (elements_kind) {
2837
2847
  case EXTERNAL_BYTE_ELEMENTS:
2838
2848
  __ ldrsb(result, mem_operand);
@@ -2860,9 +2870,12 @@ void LCodeGen::DoLoadKeyedSpecializedArrayElement(
2860
2870
  break;
2861
2871
  case EXTERNAL_FLOAT_ELEMENTS:
2862
2872
  case EXTERNAL_DOUBLE_ELEMENTS:
2873
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
2874
+ case FAST_HOLEY_ELEMENTS:
2875
+ case FAST_HOLEY_SMI_ELEMENTS:
2863
2876
  case FAST_DOUBLE_ELEMENTS:
2864
2877
  case FAST_ELEMENTS:
2865
- case FAST_SMI_ONLY_ELEMENTS:
2878
+ case FAST_SMI_ELEMENTS:
2866
2879
  case DICTIONARY_ELEMENTS:
2867
2880
  case NON_STRICT_ARGUMENTS_ELEMENTS:
2868
2881
  UNREACHABLE();
@@ -3662,6 +3675,18 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
3662
3675
  if (!instr->transition().is_null()) {
3663
3676
  __ mov(scratch, Operand(instr->transition()));
3664
3677
  __ str(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
3678
+ if (instr->hydrogen()->NeedsWriteBarrierForMap()) {
3679
+ Register temp = ToRegister(instr->TempAt(0));
3680
+ // Update the write barrier for the map field.
3681
+ __ RecordWriteField(object,
3682
+ HeapObject::kMapOffset,
3683
+ scratch,
3684
+ temp,
3685
+ kLRHasBeenSaved,
3686
+ kSaveFPRegs,
3687
+ OMIT_REMEMBERED_SET,
3688
+ OMIT_SMI_CHECK);
3689
+ }
3665
3690
  }
3666
3691
 
3667
3692
  // Do the store.
@@ -3730,10 +3755,16 @@ void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
3730
3755
  ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
3731
3756
  LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
3732
3757
  int offset =
3733
- ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
3758
+ (ToInteger32(const_operand) + instr->additional_index()) * kPointerSize
3759
+ + FixedArray::kHeaderSize;
3734
3760
  __ str(value, FieldMemOperand(elements, offset));
3735
3761
  } else {
3736
3762
  __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
3763
+ if (instr->additional_index() != 0) {
3764
+ __ add(scratch,
3765
+ scratch,
3766
+ Operand(instr->additional_index() << kPointerSizeLog2));
3767
+ }
3737
3768
  __ str(value, FieldMemOperand(scratch, FixedArray::kHeaderSize));
3738
3769
  }
3739
3770
 
@@ -3775,7 +3806,7 @@ void LCodeGen::DoStoreKeyedFastDoubleElement(
3775
3806
  }
3776
3807
  int shift_size = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS);
3777
3808
  Operand operand = key_is_constant
3778
- ? Operand(constant_key * (1 << shift_size) +
3809
+ ? Operand((constant_key << shift_size) +
3779
3810
  FixedDoubleArray::kHeaderSize - kHeapObjectTag)
3780
3811
  : Operand(key, LSL, shift_size);
3781
3812
  __ add(scratch, elements, operand);
@@ -3793,7 +3824,7 @@ void LCodeGen::DoStoreKeyedFastDoubleElement(
3793
3824
  vs);
3794
3825
  }
3795
3826
 
3796
- __ vstr(value, scratch, 0);
3827
+ __ vstr(value, scratch, instr->additional_index() << shift_size);
3797
3828
  }
3798
3829
 
3799
3830
 
@@ -3814,25 +3845,33 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
3814
3845
  key = ToRegister(instr->key());
3815
3846
  }
3816
3847
  int shift_size = ElementsKindToShiftSize(elements_kind);
3848
+ int additional_offset = instr->additional_index() << shift_size;
3817
3849
 
3818
3850
  if (elements_kind == EXTERNAL_FLOAT_ELEMENTS ||
3819
3851
  elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
3820
3852
  CpuFeatures::Scope scope(VFP3);
3821
3853
  DwVfpRegister value(ToDoubleRegister(instr->value()));
3822
- Operand operand(key_is_constant ? Operand(constant_key * (1 << shift_size))
3854
+ Operand operand(key_is_constant ? Operand(constant_key << shift_size)
3823
3855
  : Operand(key, LSL, shift_size));
3824
3856
  __ add(scratch0(), external_pointer, operand);
3825
3857
  if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
3826
3858
  __ vcvt_f32_f64(double_scratch0().low(), value);
3827
- __ vstr(double_scratch0().low(), scratch0(), 0);
3859
+ __ vstr(double_scratch0().low(), scratch0(), additional_offset);
3828
3860
  } else { // i.e. elements_kind == EXTERNAL_DOUBLE_ELEMENTS
3829
- __ vstr(value, scratch0(), 0);
3861
+ __ vstr(value, scratch0(), additional_offset);
3830
3862
  }
3831
3863
  } else {
3832
3864
  Register value(ToRegister(instr->value()));
3865
+ if (instr->additional_index() != 0 && !key_is_constant) {
3866
+ __ add(scratch0(), key, Operand(instr->additional_index()));
3867
+ }
3833
3868
  MemOperand mem_operand(key_is_constant
3834
- ? MemOperand(external_pointer, constant_key * (1 << shift_size))
3835
- : MemOperand(external_pointer, key, LSL, shift_size));
3869
+ ? MemOperand(external_pointer,
3870
+ ((constant_key + instr->additional_index())
3871
+ << shift_size))
3872
+ : (instr->additional_index() == 0
3873
+ ? MemOperand(external_pointer, key, LSL, shift_size)
3874
+ : MemOperand(external_pointer, scratch0(), LSL, shift_size)));
3836
3875
  switch (elements_kind) {
3837
3876
  case EXTERNAL_PIXEL_ELEMENTS:
3838
3877
  case EXTERNAL_BYTE_ELEMENTS:
@@ -3851,7 +3890,10 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
3851
3890
  case EXTERNAL_DOUBLE_ELEMENTS:
3852
3891
  case FAST_DOUBLE_ELEMENTS:
3853
3892
  case FAST_ELEMENTS:
3854
- case FAST_SMI_ONLY_ELEMENTS:
3893
+ case FAST_SMI_ELEMENTS:
3894
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
3895
+ case FAST_HOLEY_ELEMENTS:
3896
+ case FAST_HOLEY_SMI_ELEMENTS:
3855
3897
  case DICTIONARY_ELEMENTS:
3856
3898
  case NON_STRICT_ARGUMENTS_ELEMENTS:
3857
3899
  UNREACHABLE();
@@ -3888,20 +3930,22 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
3888
3930
  __ cmp(scratch, Operand(from_map));
3889
3931
  __ b(ne, &not_applicable);
3890
3932
  __ mov(new_map_reg, Operand(to_map));
3891
- if (from_kind == FAST_SMI_ONLY_ELEMENTS && to_kind == FAST_ELEMENTS) {
3933
+
3934
+ if (IsSimpleMapChangeTransition(from_kind, to_kind)) {
3892
3935
  __ str(new_map_reg, FieldMemOperand(object_reg, HeapObject::kMapOffset));
3893
3936
  // Write barrier.
3894
3937
  __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg,
3895
3938
  scratch, kLRHasBeenSaved, kDontSaveFPRegs);
3896
- } else if (from_kind == FAST_SMI_ONLY_ELEMENTS &&
3897
- to_kind == FAST_DOUBLE_ELEMENTS) {
3939
+ } else if (IsFastSmiElementsKind(from_kind) &&
3940
+ IsFastDoubleElementsKind(to_kind)) {
3898
3941
  Register fixed_object_reg = ToRegister(instr->temp_reg());
3899
3942
  ASSERT(fixed_object_reg.is(r2));
3900
3943
  ASSERT(new_map_reg.is(r3));
3901
3944
  __ mov(fixed_object_reg, object_reg);
3902
3945
  CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(),
3903
3946
  RelocInfo::CODE_TARGET, instr);
3904
- } else if (from_kind == FAST_DOUBLE_ELEMENTS && to_kind == FAST_ELEMENTS) {
3947
+ } else if (IsFastDoubleElementsKind(from_kind) &&
3948
+ IsFastObjectElementsKind(to_kind)) {
3905
3949
  Register fixed_object_reg = ToRegister(instr->temp_reg());
3906
3950
  ASSERT(fixed_object_reg.is(r2));
3907
3951
  ASSERT(new_map_reg.is(r3));
@@ -4675,8 +4719,9 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
4675
4719
 
4676
4720
  // Deopt if the array literal boilerplate ElementsKind is of a type different
4677
4721
  // than the expected one. The check isn't necessary if the boilerplate has
4678
- // already been converted to FAST_ELEMENTS.
4679
- if (boilerplate_elements_kind != FAST_ELEMENTS) {
4722
+ // already been converted to TERMINAL_FAST_ELEMENTS_KIND.
4723
+ if (CanTransitionToMoreGeneralFastElementsKind(
4724
+ boilerplate_elements_kind, true)) {
4680
4725
  __ LoadHeapObject(r1, instr->hydrogen()->boilerplate_object());
4681
4726
  // Load map into r2.
4682
4727
  __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
@@ -4827,10 +4872,11 @@ void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
4827
4872
  ElementsKind boilerplate_elements_kind =
4828
4873
  instr->hydrogen()->boilerplate()->GetElementsKind();
4829
4874
 
4830
- // Deopt if the literal boilerplate ElementsKind is of a type different than
4831
- // the expected one. The check isn't necessary if the boilerplate has already
4832
- // been converted to FAST_ELEMENTS.
4833
- if (boilerplate_elements_kind != FAST_ELEMENTS) {
4875
+ // Deopt if the array literal boilerplate ElementsKind is of a type different
4876
+ // than the expected one. The check isn't necessary if the boilerplate has
4877
+ // already been converted to TERMINAL_FAST_ELEMENTS_KIND.
4878
+ if (CanTransitionToMoreGeneralFastElementsKind(
4879
+ boilerplate_elements_kind, true)) {
4834
4880
  __ LoadHeapObject(r1, instr->hydrogen()->boilerplate());
4835
4881
  // Load map into r2.
4836
4882
  __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));