libv8 3.11.8.17 → 3.16.14.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.travis.yml +1 -2
- data/Gemfile +1 -1
- data/Rakefile +6 -7
- data/lib/libv8/version.rb +1 -1
- data/vendor/v8/.gitignore +24 -3
- data/vendor/v8/AUTHORS +7 -0
- data/vendor/v8/ChangeLog +839 -0
- data/vendor/v8/DEPS +1 -1
- data/vendor/v8/Makefile.android +92 -0
- data/vendor/v8/OWNERS +11 -0
- data/vendor/v8/PRESUBMIT.py +71 -0
- data/vendor/v8/SConstruct +34 -39
- data/vendor/v8/build/android.gypi +56 -37
- data/vendor/v8/build/common.gypi +112 -30
- data/vendor/v8/build/gyp_v8 +1 -1
- data/vendor/v8/build/standalone.gypi +15 -11
- data/vendor/v8/include/v8-debug.h +9 -1
- data/vendor/v8/include/v8-preparser.h +4 -3
- data/vendor/v8/include/v8-profiler.h +25 -25
- data/vendor/v8/include/v8-testing.h +4 -3
- data/vendor/v8/include/v8.h +994 -540
- data/vendor/v8/preparser/preparser-process.cc +3 -3
- data/vendor/v8/samples/lineprocessor.cc +20 -27
- data/vendor/v8/samples/process.cc +18 -14
- data/vendor/v8/samples/shell.cc +16 -15
- data/vendor/v8/src/SConscript +15 -14
- data/vendor/v8/src/accessors.cc +169 -77
- data/vendor/v8/src/accessors.h +4 -0
- data/vendor/v8/src/allocation-inl.h +2 -2
- data/vendor/v8/src/allocation.h +7 -7
- data/vendor/v8/src/api.cc +810 -497
- data/vendor/v8/src/api.h +85 -60
- data/vendor/v8/src/arm/assembler-arm-inl.h +179 -22
- data/vendor/v8/src/arm/assembler-arm.cc +633 -264
- data/vendor/v8/src/arm/assembler-arm.h +264 -197
- data/vendor/v8/src/arm/builtins-arm.cc +117 -27
- data/vendor/v8/src/arm/code-stubs-arm.cc +1241 -700
- data/vendor/v8/src/arm/code-stubs-arm.h +35 -138
- data/vendor/v8/src/arm/codegen-arm.cc +285 -16
- data/vendor/v8/src/arm/codegen-arm.h +22 -0
- data/vendor/v8/src/arm/constants-arm.cc +5 -3
- data/vendor/v8/src/arm/constants-arm.h +24 -11
- data/vendor/v8/src/arm/debug-arm.cc +3 -3
- data/vendor/v8/src/arm/deoptimizer-arm.cc +382 -92
- data/vendor/v8/src/arm/disasm-arm.cc +61 -12
- data/vendor/v8/src/arm/frames-arm.h +0 -14
- data/vendor/v8/src/arm/full-codegen-arm.cc +332 -304
- data/vendor/v8/src/arm/ic-arm.cc +180 -259
- data/vendor/v8/src/arm/lithium-arm.cc +364 -316
- data/vendor/v8/src/arm/lithium-arm.h +512 -275
- data/vendor/v8/src/arm/lithium-codegen-arm.cc +1768 -809
- data/vendor/v8/src/arm/lithium-codegen-arm.h +97 -35
- data/vendor/v8/src/arm/lithium-gap-resolver-arm.cc +12 -5
- data/vendor/v8/src/arm/macro-assembler-arm.cc +439 -228
- data/vendor/v8/src/arm/macro-assembler-arm.h +116 -70
- data/vendor/v8/src/arm/regexp-macro-assembler-arm.cc +54 -44
- data/vendor/v8/src/arm/regexp-macro-assembler-arm.h +3 -10
- data/vendor/v8/src/arm/simulator-arm.cc +272 -238
- data/vendor/v8/src/arm/simulator-arm.h +38 -8
- data/vendor/v8/src/arm/stub-cache-arm.cc +522 -895
- data/vendor/v8/src/array.js +101 -70
- data/vendor/v8/src/assembler.cc +270 -19
- data/vendor/v8/src/assembler.h +110 -15
- data/vendor/v8/src/ast.cc +79 -69
- data/vendor/v8/src/ast.h +255 -301
- data/vendor/v8/src/atomicops.h +7 -1
- data/vendor/v8/src/atomicops_internals_tsan.h +335 -0
- data/vendor/v8/src/bootstrapper.cc +481 -418
- data/vendor/v8/src/bootstrapper.h +4 -4
- data/vendor/v8/src/builtins.cc +498 -311
- data/vendor/v8/src/builtins.h +75 -47
- data/vendor/v8/src/checks.cc +2 -1
- data/vendor/v8/src/checks.h +8 -0
- data/vendor/v8/src/code-stubs-hydrogen.cc +253 -0
- data/vendor/v8/src/code-stubs.cc +249 -84
- data/vendor/v8/src/code-stubs.h +501 -169
- data/vendor/v8/src/codegen.cc +36 -18
- data/vendor/v8/src/codegen.h +25 -3
- data/vendor/v8/src/collection.js +54 -17
- data/vendor/v8/src/compilation-cache.cc +24 -16
- data/vendor/v8/src/compilation-cache.h +15 -6
- data/vendor/v8/src/compiler.cc +497 -195
- data/vendor/v8/src/compiler.h +246 -38
- data/vendor/v8/src/contexts.cc +64 -24
- data/vendor/v8/src/contexts.h +60 -29
- data/vendor/v8/src/conversions-inl.h +24 -14
- data/vendor/v8/src/conversions.h +7 -4
- data/vendor/v8/src/counters.cc +21 -12
- data/vendor/v8/src/counters.h +44 -16
- data/vendor/v8/src/cpu-profiler.h +1 -1
- data/vendor/v8/src/d8-debug.cc +2 -2
- data/vendor/v8/src/d8-readline.cc +13 -2
- data/vendor/v8/src/d8.cc +681 -273
- data/vendor/v8/src/d8.gyp +4 -4
- data/vendor/v8/src/d8.h +38 -18
- data/vendor/v8/src/d8.js +0 -617
- data/vendor/v8/src/data-flow.h +55 -0
- data/vendor/v8/src/date.js +1 -42
- data/vendor/v8/src/dateparser-inl.h +5 -1
- data/vendor/v8/src/debug-agent.cc +10 -15
- data/vendor/v8/src/debug-debugger.js +147 -149
- data/vendor/v8/src/debug.cc +323 -164
- data/vendor/v8/src/debug.h +26 -14
- data/vendor/v8/src/deoptimizer.cc +765 -290
- data/vendor/v8/src/deoptimizer.h +130 -28
- data/vendor/v8/src/disassembler.cc +10 -4
- data/vendor/v8/src/elements-kind.cc +7 -2
- data/vendor/v8/src/elements-kind.h +19 -0
- data/vendor/v8/src/elements.cc +607 -285
- data/vendor/v8/src/elements.h +36 -13
- data/vendor/v8/src/execution.cc +52 -31
- data/vendor/v8/src/execution.h +4 -4
- data/vendor/v8/src/extensions/externalize-string-extension.cc +5 -4
- data/vendor/v8/src/extensions/gc-extension.cc +5 -1
- data/vendor/v8/src/extensions/statistics-extension.cc +153 -0
- data/vendor/v8/src/{inspector.h → extensions/statistics-extension.h} +12 -23
- data/vendor/v8/src/factory.cc +101 -134
- data/vendor/v8/src/factory.h +36 -31
- data/vendor/v8/src/flag-definitions.h +102 -25
- data/vendor/v8/src/flags.cc +9 -5
- data/vendor/v8/src/frames-inl.h +10 -0
- data/vendor/v8/src/frames.cc +116 -26
- data/vendor/v8/src/frames.h +96 -12
- data/vendor/v8/src/full-codegen.cc +219 -74
- data/vendor/v8/src/full-codegen.h +63 -21
- data/vendor/v8/src/func-name-inferrer.cc +8 -7
- data/vendor/v8/src/func-name-inferrer.h +5 -3
- data/vendor/v8/src/gdb-jit.cc +71 -57
- data/vendor/v8/src/global-handles.cc +230 -101
- data/vendor/v8/src/global-handles.h +26 -27
- data/vendor/v8/src/globals.h +17 -19
- data/vendor/v8/src/handles-inl.h +59 -12
- data/vendor/v8/src/handles.cc +180 -200
- data/vendor/v8/src/handles.h +80 -11
- data/vendor/v8/src/hashmap.h +60 -40
- data/vendor/v8/src/heap-inl.h +107 -45
- data/vendor/v8/src/heap-profiler.cc +38 -19
- data/vendor/v8/src/heap-profiler.h +24 -14
- data/vendor/v8/src/heap.cc +1123 -738
- data/vendor/v8/src/heap.h +385 -146
- data/vendor/v8/src/hydrogen-instructions.cc +700 -217
- data/vendor/v8/src/hydrogen-instructions.h +1158 -472
- data/vendor/v8/src/hydrogen.cc +3319 -1662
- data/vendor/v8/src/hydrogen.h +411 -170
- data/vendor/v8/src/ia32/assembler-ia32-inl.h +46 -16
- data/vendor/v8/src/ia32/assembler-ia32.cc +131 -61
- data/vendor/v8/src/ia32/assembler-ia32.h +115 -57
- data/vendor/v8/src/ia32/builtins-ia32.cc +99 -5
- data/vendor/v8/src/ia32/code-stubs-ia32.cc +787 -495
- data/vendor/v8/src/ia32/code-stubs-ia32.h +10 -100
- data/vendor/v8/src/ia32/codegen-ia32.cc +227 -23
- data/vendor/v8/src/ia32/codegen-ia32.h +14 -0
- data/vendor/v8/src/ia32/deoptimizer-ia32.cc +428 -87
- data/vendor/v8/src/ia32/disasm-ia32.cc +28 -1
- data/vendor/v8/src/ia32/frames-ia32.h +6 -16
- data/vendor/v8/src/ia32/full-codegen-ia32.cc +280 -272
- data/vendor/v8/src/ia32/ic-ia32.cc +150 -250
- data/vendor/v8/src/ia32/lithium-codegen-ia32.cc +1600 -517
- data/vendor/v8/src/ia32/lithium-codegen-ia32.h +90 -24
- data/vendor/v8/src/ia32/lithium-gap-resolver-ia32.cc +10 -6
- data/vendor/v8/src/ia32/lithium-gap-resolver-ia32.h +2 -2
- data/vendor/v8/src/ia32/lithium-ia32.cc +405 -302
- data/vendor/v8/src/ia32/lithium-ia32.h +526 -271
- data/vendor/v8/src/ia32/macro-assembler-ia32.cc +378 -119
- data/vendor/v8/src/ia32/macro-assembler-ia32.h +62 -28
- data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.cc +43 -30
- data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.h +2 -10
- data/vendor/v8/src/ia32/stub-cache-ia32.cc +492 -678
- data/vendor/v8/src/ic-inl.h +9 -4
- data/vendor/v8/src/ic.cc +836 -923
- data/vendor/v8/src/ic.h +228 -247
- data/vendor/v8/src/incremental-marking-inl.h +26 -30
- data/vendor/v8/src/incremental-marking.cc +276 -248
- data/vendor/v8/src/incremental-marking.h +29 -37
- data/vendor/v8/src/interface.cc +34 -25
- data/vendor/v8/src/interface.h +69 -25
- data/vendor/v8/src/interpreter-irregexp.cc +2 -2
- data/vendor/v8/src/isolate.cc +382 -76
- data/vendor/v8/src/isolate.h +109 -56
- data/vendor/v8/src/json-parser.h +217 -104
- data/vendor/v8/src/json-stringifier.h +745 -0
- data/vendor/v8/src/json.js +10 -132
- data/vendor/v8/src/jsregexp-inl.h +106 -0
- data/vendor/v8/src/jsregexp.cc +517 -285
- data/vendor/v8/src/jsregexp.h +145 -117
- data/vendor/v8/src/list-inl.h +35 -22
- data/vendor/v8/src/list.h +46 -19
- data/vendor/v8/src/lithium-allocator-inl.h +22 -2
- data/vendor/v8/src/lithium-allocator.cc +85 -70
- data/vendor/v8/src/lithium-allocator.h +21 -39
- data/vendor/v8/src/lithium.cc +259 -5
- data/vendor/v8/src/lithium.h +131 -32
- data/vendor/v8/src/liveedit-debugger.js +52 -3
- data/vendor/v8/src/liveedit.cc +393 -113
- data/vendor/v8/src/liveedit.h +7 -3
- data/vendor/v8/src/log-utils.cc +4 -2
- data/vendor/v8/src/log.cc +170 -140
- data/vendor/v8/src/log.h +62 -11
- data/vendor/v8/src/macro-assembler.h +17 -0
- data/vendor/v8/src/macros.py +2 -0
- data/vendor/v8/src/mark-compact-inl.h +3 -23
- data/vendor/v8/src/mark-compact.cc +801 -830
- data/vendor/v8/src/mark-compact.h +154 -47
- data/vendor/v8/src/marking-thread.cc +85 -0
- data/vendor/v8/src/{inspector.cc → marking-thread.h} +32 -24
- data/vendor/v8/src/math.js +12 -18
- data/vendor/v8/src/messages.cc +18 -8
- data/vendor/v8/src/messages.js +314 -261
- data/vendor/v8/src/mips/assembler-mips-inl.h +58 -6
- data/vendor/v8/src/mips/assembler-mips.cc +92 -75
- data/vendor/v8/src/mips/assembler-mips.h +54 -60
- data/vendor/v8/src/mips/builtins-mips.cc +116 -17
- data/vendor/v8/src/mips/code-stubs-mips.cc +919 -556
- data/vendor/v8/src/mips/code-stubs-mips.h +22 -131
- data/vendor/v8/src/mips/codegen-mips.cc +281 -6
- data/vendor/v8/src/mips/codegen-mips.h +22 -0
- data/vendor/v8/src/mips/constants-mips.cc +2 -0
- data/vendor/v8/src/mips/constants-mips.h +12 -2
- data/vendor/v8/src/mips/deoptimizer-mips.cc +286 -50
- data/vendor/v8/src/mips/disasm-mips.cc +13 -0
- data/vendor/v8/src/mips/full-codegen-mips.cc +297 -284
- data/vendor/v8/src/mips/ic-mips.cc +182 -263
- data/vendor/v8/src/mips/lithium-codegen-mips.cc +1208 -556
- data/vendor/v8/src/mips/lithium-codegen-mips.h +72 -19
- data/vendor/v8/src/mips/lithium-gap-resolver-mips.cc +9 -2
- data/vendor/v8/src/mips/lithium-mips.cc +290 -302
- data/vendor/v8/src/mips/lithium-mips.h +463 -266
- data/vendor/v8/src/mips/macro-assembler-mips.cc +208 -115
- data/vendor/v8/src/mips/macro-assembler-mips.h +67 -24
- data/vendor/v8/src/mips/regexp-macro-assembler-mips.cc +40 -25
- data/vendor/v8/src/mips/regexp-macro-assembler-mips.h +3 -9
- data/vendor/v8/src/mips/simulator-mips.cc +112 -40
- data/vendor/v8/src/mips/simulator-mips.h +5 -0
- data/vendor/v8/src/mips/stub-cache-mips.cc +502 -884
- data/vendor/v8/src/mirror-debugger.js +157 -30
- data/vendor/v8/src/mksnapshot.cc +88 -14
- data/vendor/v8/src/object-observe.js +235 -0
- data/vendor/v8/src/objects-debug.cc +178 -176
- data/vendor/v8/src/objects-inl.h +1333 -486
- data/vendor/v8/src/objects-printer.cc +125 -43
- data/vendor/v8/src/objects-visiting-inl.h +578 -6
- data/vendor/v8/src/objects-visiting.cc +2 -2
- data/vendor/v8/src/objects-visiting.h +172 -79
- data/vendor/v8/src/objects.cc +3533 -2885
- data/vendor/v8/src/objects.h +1352 -1131
- data/vendor/v8/src/optimizing-compiler-thread.cc +152 -0
- data/vendor/v8/src/optimizing-compiler-thread.h +111 -0
- data/vendor/v8/src/parser.cc +390 -500
- data/vendor/v8/src/parser.h +45 -33
- data/vendor/v8/src/platform-cygwin.cc +10 -21
- data/vendor/v8/src/platform-freebsd.cc +36 -41
- data/vendor/v8/src/platform-linux.cc +160 -124
- data/vendor/v8/src/platform-macos.cc +30 -27
- data/vendor/v8/src/platform-nullos.cc +17 -1
- data/vendor/v8/src/platform-openbsd.cc +19 -50
- data/vendor/v8/src/platform-posix.cc +14 -0
- data/vendor/v8/src/platform-solaris.cc +20 -53
- data/vendor/v8/src/platform-win32.cc +49 -26
- data/vendor/v8/src/platform.h +40 -1
- data/vendor/v8/src/preparser.cc +8 -5
- data/vendor/v8/src/preparser.h +2 -2
- data/vendor/v8/src/prettyprinter.cc +16 -0
- data/vendor/v8/src/prettyprinter.h +2 -0
- data/vendor/v8/src/profile-generator-inl.h +1 -0
- data/vendor/v8/src/profile-generator.cc +209 -147
- data/vendor/v8/src/profile-generator.h +15 -12
- data/vendor/v8/src/property-details.h +46 -31
- data/vendor/v8/src/property.cc +27 -46
- data/vendor/v8/src/property.h +163 -83
- data/vendor/v8/src/proxy.js +7 -2
- data/vendor/v8/src/regexp-macro-assembler-irregexp.cc +4 -13
- data/vendor/v8/src/regexp-macro-assembler-irregexp.h +1 -2
- data/vendor/v8/src/regexp-macro-assembler-tracer.cc +1 -11
- data/vendor/v8/src/regexp-macro-assembler-tracer.h +0 -1
- data/vendor/v8/src/regexp-macro-assembler.cc +31 -14
- data/vendor/v8/src/regexp-macro-assembler.h +14 -11
- data/vendor/v8/src/regexp-stack.cc +1 -0
- data/vendor/v8/src/regexp.js +9 -8
- data/vendor/v8/src/rewriter.cc +18 -7
- data/vendor/v8/src/runtime-profiler.cc +52 -43
- data/vendor/v8/src/runtime-profiler.h +0 -25
- data/vendor/v8/src/runtime.cc +2006 -2023
- data/vendor/v8/src/runtime.h +56 -49
- data/vendor/v8/src/safepoint-table.cc +12 -18
- data/vendor/v8/src/safepoint-table.h +11 -8
- data/vendor/v8/src/scanner.cc +1 -0
- data/vendor/v8/src/scanner.h +4 -10
- data/vendor/v8/src/scopeinfo.cc +35 -9
- data/vendor/v8/src/scopeinfo.h +64 -3
- data/vendor/v8/src/scopes.cc +251 -156
- data/vendor/v8/src/scopes.h +61 -27
- data/vendor/v8/src/serialize.cc +348 -396
- data/vendor/v8/src/serialize.h +125 -114
- data/vendor/v8/src/small-pointer-list.h +11 -11
- data/vendor/v8/src/{smart-array-pointer.h → smart-pointers.h} +64 -15
- data/vendor/v8/src/snapshot-common.cc +64 -15
- data/vendor/v8/src/snapshot-empty.cc +7 -1
- data/vendor/v8/src/snapshot.h +9 -2
- data/vendor/v8/src/spaces-inl.h +17 -0
- data/vendor/v8/src/spaces.cc +477 -183
- data/vendor/v8/src/spaces.h +238 -58
- data/vendor/v8/src/splay-tree-inl.h +8 -7
- data/vendor/v8/src/splay-tree.h +24 -10
- data/vendor/v8/src/store-buffer.cc +12 -5
- data/vendor/v8/src/store-buffer.h +2 -4
- data/vendor/v8/src/string-search.h +22 -6
- data/vendor/v8/src/string-stream.cc +11 -8
- data/vendor/v8/src/string.js +47 -15
- data/vendor/v8/src/stub-cache.cc +461 -224
- data/vendor/v8/src/stub-cache.h +164 -102
- data/vendor/v8/src/sweeper-thread.cc +105 -0
- data/vendor/v8/src/sweeper-thread.h +81 -0
- data/vendor/v8/src/token.h +1 -0
- data/vendor/v8/src/transitions-inl.h +220 -0
- data/vendor/v8/src/transitions.cc +160 -0
- data/vendor/v8/src/transitions.h +207 -0
- data/vendor/v8/src/type-info.cc +182 -181
- data/vendor/v8/src/type-info.h +31 -19
- data/vendor/v8/src/unicode-inl.h +62 -106
- data/vendor/v8/src/unicode.cc +57 -67
- data/vendor/v8/src/unicode.h +45 -91
- data/vendor/v8/src/uri.js +57 -29
- data/vendor/v8/src/utils.h +105 -5
- data/vendor/v8/src/v8-counters.cc +54 -11
- data/vendor/v8/src/v8-counters.h +134 -19
- data/vendor/v8/src/v8.cc +29 -29
- data/vendor/v8/src/v8.h +1 -0
- data/vendor/v8/src/v8conversions.cc +26 -22
- data/vendor/v8/src/v8globals.h +56 -43
- data/vendor/v8/src/v8natives.js +83 -30
- data/vendor/v8/src/v8threads.cc +42 -21
- data/vendor/v8/src/v8threads.h +4 -1
- data/vendor/v8/src/v8utils.cc +9 -93
- data/vendor/v8/src/v8utils.h +37 -33
- data/vendor/v8/src/variables.cc +6 -3
- data/vendor/v8/src/variables.h +6 -13
- data/vendor/v8/src/version.cc +2 -2
- data/vendor/v8/src/vm-state-inl.h +11 -0
- data/vendor/v8/src/x64/assembler-x64-inl.h +39 -8
- data/vendor/v8/src/x64/assembler-x64.cc +78 -64
- data/vendor/v8/src/x64/assembler-x64.h +38 -33
- data/vendor/v8/src/x64/builtins-x64.cc +105 -7
- data/vendor/v8/src/x64/code-stubs-x64.cc +790 -413
- data/vendor/v8/src/x64/code-stubs-x64.h +10 -106
- data/vendor/v8/src/x64/codegen-x64.cc +210 -8
- data/vendor/v8/src/x64/codegen-x64.h +20 -1
- data/vendor/v8/src/x64/deoptimizer-x64.cc +336 -75
- data/vendor/v8/src/x64/disasm-x64.cc +15 -0
- data/vendor/v8/src/x64/frames-x64.h +0 -14
- data/vendor/v8/src/x64/full-codegen-x64.cc +293 -270
- data/vendor/v8/src/x64/ic-x64.cc +153 -251
- data/vendor/v8/src/x64/lithium-codegen-x64.cc +1379 -531
- data/vendor/v8/src/x64/lithium-codegen-x64.h +67 -23
- data/vendor/v8/src/x64/lithium-gap-resolver-x64.cc +2 -2
- data/vendor/v8/src/x64/lithium-x64.cc +349 -289
- data/vendor/v8/src/x64/lithium-x64.h +460 -250
- data/vendor/v8/src/x64/macro-assembler-x64.cc +350 -177
- data/vendor/v8/src/x64/macro-assembler-x64.h +67 -49
- data/vendor/v8/src/x64/regexp-macro-assembler-x64.cc +46 -33
- data/vendor/v8/src/x64/regexp-macro-assembler-x64.h +2 -3
- data/vendor/v8/src/x64/stub-cache-x64.cc +484 -653
- data/vendor/v8/src/zone-inl.h +9 -27
- data/vendor/v8/src/zone.cc +5 -5
- data/vendor/v8/src/zone.h +53 -27
- data/vendor/v8/test/benchmarks/testcfg.py +5 -0
- data/vendor/v8/test/cctest/cctest.cc +4 -0
- data/vendor/v8/test/cctest/cctest.gyp +3 -1
- data/vendor/v8/test/cctest/cctest.h +57 -9
- data/vendor/v8/test/cctest/cctest.status +15 -15
- data/vendor/v8/test/cctest/test-accessors.cc +26 -0
- data/vendor/v8/test/cctest/test-alloc.cc +22 -30
- data/vendor/v8/test/cctest/test-api.cc +1943 -314
- data/vendor/v8/test/cctest/test-assembler-arm.cc +133 -13
- data/vendor/v8/test/cctest/test-assembler-ia32.cc +1 -1
- data/vendor/v8/test/cctest/test-assembler-mips.cc +12 -0
- data/vendor/v8/test/cctest/test-ast.cc +4 -2
- data/vendor/v8/test/cctest/test-compiler.cc +61 -29
- data/vendor/v8/test/cctest/test-dataflow.cc +2 -2
- data/vendor/v8/test/cctest/test-debug.cc +212 -33
- data/vendor/v8/test/cctest/test-decls.cc +257 -11
- data/vendor/v8/test/cctest/test-dictionary.cc +24 -10
- data/vendor/v8/test/cctest/test-disasm-arm.cc +118 -1
- data/vendor/v8/test/cctest/test-disasm-ia32.cc +3 -2
- data/vendor/v8/test/cctest/test-flags.cc +14 -1
- data/vendor/v8/test/cctest/test-func-name-inference.cc +7 -4
- data/vendor/v8/test/cctest/test-global-object.cc +51 -0
- data/vendor/v8/test/cctest/test-hashing.cc +32 -23
- data/vendor/v8/test/cctest/test-heap-profiler.cc +131 -77
- data/vendor/v8/test/cctest/test-heap.cc +1084 -143
- data/vendor/v8/test/cctest/test-list.cc +1 -1
- data/vendor/v8/test/cctest/test-liveedit.cc +3 -2
- data/vendor/v8/test/cctest/test-lockers.cc +12 -13
- data/vendor/v8/test/cctest/test-log.cc +10 -8
- data/vendor/v8/test/cctest/test-macro-assembler-x64.cc +2 -2
- data/vendor/v8/test/cctest/test-mark-compact.cc +44 -22
- data/vendor/v8/test/cctest/test-object-observe.cc +434 -0
- data/vendor/v8/test/cctest/test-parsing.cc +86 -39
- data/vendor/v8/test/cctest/test-platform-linux.cc +6 -0
- data/vendor/v8/test/cctest/test-platform-win32.cc +7 -0
- data/vendor/v8/test/cctest/test-random.cc +5 -4
- data/vendor/v8/test/cctest/test-regexp.cc +137 -101
- data/vendor/v8/test/cctest/test-serialize.cc +150 -230
- data/vendor/v8/test/cctest/test-sockets.cc +1 -1
- data/vendor/v8/test/cctest/test-spaces.cc +139 -0
- data/vendor/v8/test/cctest/test-strings.cc +736 -74
- data/vendor/v8/test/cctest/test-thread-termination.cc +10 -11
- data/vendor/v8/test/cctest/test-threads.cc +4 -4
- data/vendor/v8/test/cctest/test-utils.cc +16 -0
- data/vendor/v8/test/cctest/test-weakmaps.cc +7 -3
- data/vendor/v8/test/cctest/testcfg.py +64 -5
- data/vendor/v8/test/es5conform/testcfg.py +5 -0
- data/vendor/v8/test/message/message.status +1 -1
- data/vendor/v8/test/message/overwritten-builtins.out +3 -0
- data/vendor/v8/test/message/testcfg.py +89 -8
- data/vendor/v8/test/message/try-catch-finally-no-message.out +26 -26
- data/vendor/v8/test/mjsunit/accessor-map-sharing.js +18 -2
- data/vendor/v8/test/mjsunit/allocation-site-info.js +126 -0
- data/vendor/v8/test/mjsunit/array-bounds-check-removal.js +62 -1
- data/vendor/v8/test/mjsunit/array-iteration.js +1 -1
- data/vendor/v8/test/mjsunit/array-literal-transitions.js +2 -0
- data/vendor/v8/test/mjsunit/array-natives-elements.js +317 -0
- data/vendor/v8/test/mjsunit/array-reduce.js +8 -8
- data/vendor/v8/test/mjsunit/array-slice.js +12 -0
- data/vendor/v8/test/mjsunit/array-store-and-grow.js +4 -1
- data/vendor/v8/test/mjsunit/assert-opt-and-deopt.js +1 -1
- data/vendor/v8/test/mjsunit/bugs/bug-2337.js +53 -0
- data/vendor/v8/test/mjsunit/compare-known-objects-slow.js +69 -0
- data/vendor/v8/test/mjsunit/compiler/alloc-object-huge.js +3 -1
- data/vendor/v8/test/mjsunit/compiler/inline-accessors.js +368 -0
- data/vendor/v8/test/mjsunit/compiler/inline-arguments.js +87 -1
- data/vendor/v8/test/mjsunit/compiler/inline-closures.js +49 -0
- data/vendor/v8/test/mjsunit/compiler/inline-construct.js +55 -43
- data/vendor/v8/test/mjsunit/compiler/inline-literals.js +39 -0
- data/vendor/v8/test/mjsunit/compiler/multiply-add.js +69 -0
- data/vendor/v8/test/mjsunit/compiler/optimized-closures.js +57 -0
- data/vendor/v8/test/mjsunit/compiler/parallel-proto-change.js +44 -0
- data/vendor/v8/test/mjsunit/compiler/property-static.js +69 -0
- data/vendor/v8/test/mjsunit/compiler/proto-chain-constant.js +55 -0
- data/vendor/v8/test/mjsunit/compiler/proto-chain-load.js +44 -0
- data/vendor/v8/test/mjsunit/compiler/regress-gvn.js +3 -2
- data/vendor/v8/test/mjsunit/compiler/regress-or.js +6 -2
- data/vendor/v8/test/mjsunit/compiler/rotate.js +224 -0
- data/vendor/v8/test/mjsunit/compiler/uint32.js +173 -0
- data/vendor/v8/test/mjsunit/count-based-osr.js +2 -1
- data/vendor/v8/test/mjsunit/d8-os.js +3 -3
- data/vendor/v8/test/mjsunit/date-parse.js +3 -0
- data/vendor/v8/test/mjsunit/date.js +22 -0
- data/vendor/v8/test/mjsunit/debug-break-inline.js +1 -0
- data/vendor/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js +22 -12
- data/vendor/v8/test/mjsunit/debug-evaluate-locals-optimized.js +21 -10
- data/vendor/v8/test/mjsunit/debug-liveedit-compile-error.js +60 -0
- data/vendor/v8/test/mjsunit/debug-liveedit-double-call.js +142 -0
- data/vendor/v8/test/mjsunit/debug-liveedit-literals.js +94 -0
- data/vendor/v8/test/mjsunit/debug-liveedit-restart-frame.js +153 -0
- data/vendor/v8/test/mjsunit/debug-multiple-breakpoints.js +1 -1
- data/vendor/v8/test/mjsunit/debug-script-breakpoints-closure.js +67 -0
- data/vendor/v8/test/mjsunit/debug-script-breakpoints-nested.js +82 -0
- data/vendor/v8/test/mjsunit/debug-script.js +4 -2
- data/vendor/v8/test/mjsunit/debug-set-variable-value.js +308 -0
- data/vendor/v8/test/mjsunit/debug-stepout-scope-part1.js +190 -0
- data/vendor/v8/test/mjsunit/debug-stepout-scope-part2.js +83 -0
- data/vendor/v8/test/mjsunit/debug-stepout-scope-part3.js +80 -0
- data/vendor/v8/test/mjsunit/debug-stepout-scope-part4.js +80 -0
- data/vendor/v8/test/mjsunit/debug-stepout-scope-part5.js +77 -0
- data/vendor/v8/test/mjsunit/debug-stepout-scope-part6.js +79 -0
- data/vendor/v8/test/mjsunit/debug-stepout-scope-part7.js +79 -0
- data/vendor/v8/test/mjsunit/{debug-stepout-scope.js → debug-stepout-scope-part8.js} +0 -189
- data/vendor/v8/test/mjsunit/delete-non-configurable.js +74 -0
- data/vendor/v8/test/mjsunit/deopt-minus-zero.js +56 -0
- data/vendor/v8/test/mjsunit/elements-kind.js +6 -4
- data/vendor/v8/test/mjsunit/elements-length-no-holey.js +33 -0
- data/vendor/v8/test/mjsunit/elements-transition-hoisting.js +46 -19
- data/vendor/v8/test/mjsunit/error-accessors.js +54 -0
- data/vendor/v8/test/mjsunit/error-constructors.js +1 -14
- data/vendor/v8/test/mjsunit/error-tostring.js +8 -0
- data/vendor/v8/test/mjsunit/eval-stack-trace.js +204 -0
- data/vendor/v8/test/mjsunit/external-array.js +364 -1
- data/vendor/v8/test/mjsunit/fast-array-length.js +37 -0
- data/vendor/v8/test/mjsunit/fast-non-keyed.js +113 -0
- data/vendor/v8/test/mjsunit/fast-prototype.js +117 -0
- data/vendor/v8/test/mjsunit/function-call.js +14 -18
- data/vendor/v8/test/mjsunit/fuzz-natives-part1.js +230 -0
- data/vendor/v8/test/mjsunit/fuzz-natives-part2.js +229 -0
- data/vendor/v8/test/mjsunit/fuzz-natives-part3.js +229 -0
- data/vendor/v8/test/mjsunit/{fuzz-natives.js → fuzz-natives-part4.js} +12 -2
- data/vendor/v8/test/mjsunit/generated-transition-stub.js +218 -0
- data/vendor/v8/test/mjsunit/greedy.js +1 -1
- data/vendor/v8/test/mjsunit/harmony/block-conflicts.js +2 -1
- data/vendor/v8/test/mjsunit/harmony/block-let-crankshaft.js +1 -1
- data/vendor/v8/test/mjsunit/harmony/collections.js +69 -11
- data/vendor/v8/test/mjsunit/harmony/debug-blockscopes.js +2 -2
- data/vendor/v8/test/mjsunit/harmony/module-linking.js +180 -3
- data/vendor/v8/test/mjsunit/harmony/module-parsing.js +31 -0
- data/vendor/v8/test/mjsunit/harmony/module-recompile.js +87 -0
- data/vendor/v8/test/mjsunit/harmony/module-resolution.js +15 -2
- data/vendor/v8/test/mjsunit/harmony/object-observe.js +1056 -0
- data/vendor/v8/test/mjsunit/harmony/proxies-json.js +178 -0
- data/vendor/v8/test/mjsunit/harmony/proxies.js +25 -10
- data/vendor/v8/test/mjsunit/json-parser-recursive.js +33 -0
- data/vendor/v8/test/mjsunit/json-stringify-recursive.js +52 -0
- data/vendor/v8/test/mjsunit/json.js +38 -2
- data/vendor/v8/test/mjsunit/json2.js +153 -0
- data/vendor/v8/test/mjsunit/limit-locals.js +5 -4
- data/vendor/v8/test/mjsunit/manual-parallel-recompile.js +79 -0
- data/vendor/v8/test/mjsunit/math-exp-precision.js +64 -0
- data/vendor/v8/test/mjsunit/math-floor-negative.js +59 -0
- data/vendor/v8/test/mjsunit/math-floor-of-div-minus-zero.js +41 -0
- data/vendor/v8/test/mjsunit/math-floor-of-div-nosudiv.js +288 -0
- data/vendor/v8/test/mjsunit/math-floor-of-div.js +81 -9
- data/vendor/v8/test/mjsunit/{math-floor.js → math-floor-part1.js} +1 -72
- data/vendor/v8/test/mjsunit/math-floor-part2.js +76 -0
- data/vendor/v8/test/mjsunit/math-floor-part3.js +78 -0
- data/vendor/v8/test/mjsunit/math-floor-part4.js +76 -0
- data/vendor/v8/test/mjsunit/mirror-object.js +43 -9
- data/vendor/v8/test/mjsunit/mjsunit.js +1 -1
- data/vendor/v8/test/mjsunit/mjsunit.status +52 -27
- data/vendor/v8/test/mjsunit/mul-exhaustive-part1.js +491 -0
- data/vendor/v8/test/mjsunit/mul-exhaustive-part10.js +470 -0
- data/vendor/v8/test/mjsunit/mul-exhaustive-part2.js +525 -0
- data/vendor/v8/test/mjsunit/mul-exhaustive-part3.js +532 -0
- data/vendor/v8/test/mjsunit/mul-exhaustive-part4.js +509 -0
- data/vendor/v8/test/mjsunit/mul-exhaustive-part5.js +505 -0
- data/vendor/v8/test/mjsunit/mul-exhaustive-part6.js +554 -0
- data/vendor/v8/test/mjsunit/mul-exhaustive-part7.js +497 -0
- data/vendor/v8/test/mjsunit/mul-exhaustive-part8.js +526 -0
- data/vendor/v8/test/mjsunit/mul-exhaustive-part9.js +533 -0
- data/vendor/v8/test/mjsunit/new-function.js +34 -0
- data/vendor/v8/test/mjsunit/numops-fuzz-part1.js +1172 -0
- data/vendor/v8/test/mjsunit/numops-fuzz-part2.js +1178 -0
- data/vendor/v8/test/mjsunit/numops-fuzz-part3.js +1178 -0
- data/vendor/v8/test/mjsunit/numops-fuzz-part4.js +1177 -0
- data/vendor/v8/test/mjsunit/object-define-property.js +107 -2
- data/vendor/v8/test/mjsunit/override-read-only-property.js +6 -4
- data/vendor/v8/test/mjsunit/packed-elements.js +2 -2
- data/vendor/v8/test/mjsunit/parse-int-float.js +4 -4
- data/vendor/v8/test/mjsunit/pixel-array-rounding.js +1 -1
- data/vendor/v8/test/mjsunit/readonly.js +228 -0
- data/vendor/v8/test/mjsunit/regexp-capture-3.js +16 -18
- data/vendor/v8/test/mjsunit/regexp-capture.js +2 -0
- data/vendor/v8/test/mjsunit/regexp-global.js +122 -0
- data/vendor/v8/test/mjsunit/regexp-results-cache.js +78 -0
- data/vendor/v8/test/mjsunit/regress/regress-1117.js +12 -3
- data/vendor/v8/test/mjsunit/regress/regress-1118.js +1 -1
- data/vendor/v8/test/mjsunit/regress/regress-115100.js +36 -0
- data/vendor/v8/test/mjsunit/regress/regress-1199637.js +1 -3
- data/vendor/v8/test/mjsunit/regress/regress-121407.js +1 -1
- data/vendor/v8/test/mjsunit/regress/regress-131923.js +30 -0
- data/vendor/v8/test/mjsunit/regress/regress-131994.js +70 -0
- data/vendor/v8/test/mjsunit/regress/regress-133211.js +35 -0
- data/vendor/v8/test/mjsunit/regress/regress-133211b.js +39 -0
- data/vendor/v8/test/mjsunit/regress/regress-136048.js +34 -0
- data/vendor/v8/test/mjsunit/regress/regress-137768.js +73 -0
- data/vendor/v8/test/mjsunit/regress/regress-143967.js +34 -0
- data/vendor/v8/test/mjsunit/regress/regress-145201.js +107 -0
- data/vendor/v8/test/mjsunit/regress/regress-147497.js +45 -0
- data/vendor/v8/test/mjsunit/regress/regress-148378.js +38 -0
- data/vendor/v8/test/mjsunit/regress/regress-1563.js +1 -1
- data/vendor/v8/test/mjsunit/regress/regress-1591.js +48 -0
- data/vendor/v8/test/mjsunit/regress/regress-164442.js +45 -0
- data/vendor/v8/test/mjsunit/regress/regress-165637.js +61 -0
- data/vendor/v8/test/mjsunit/regress/regress-166379.js +39 -0
- data/vendor/v8/test/mjsunit/regress/regress-166553.js +33 -0
- data/vendor/v8/test/mjsunit/regress/regress-1692.js +1 -1
- data/vendor/v8/test/mjsunit/regress/regress-171641.js +40 -0
- data/vendor/v8/test/mjsunit/regress/regress-1980.js +1 -1
- data/vendor/v8/test/mjsunit/regress/regress-2073.js +99 -0
- data/vendor/v8/test/mjsunit/regress/regress-2119.js +36 -0
- data/vendor/v8/test/mjsunit/regress/regress-2156.js +39 -0
- data/vendor/v8/test/mjsunit/regress/regress-2163.js +70 -0
- data/vendor/v8/test/mjsunit/regress/regress-2170.js +58 -0
- data/vendor/v8/test/mjsunit/regress/regress-2172.js +35 -0
- data/vendor/v8/test/mjsunit/regress/regress-2185-2.js +145 -0
- data/vendor/v8/test/mjsunit/regress/regress-2185.js +38 -0
- data/vendor/v8/test/mjsunit/regress/regress-2186.js +49 -0
- data/vendor/v8/test/mjsunit/regress/regress-2193.js +58 -0
- data/vendor/v8/test/mjsunit/regress/regress-2219.js +32 -0
- data/vendor/v8/test/mjsunit/regress/regress-2225.js +65 -0
- data/vendor/v8/test/mjsunit/regress/regress-2226.js +36 -0
- data/vendor/v8/test/mjsunit/regress/regress-2234.js +41 -0
- data/vendor/v8/test/mjsunit/regress/regress-2243.js +31 -0
- data/vendor/v8/test/mjsunit/regress/regress-2249.js +33 -0
- data/vendor/v8/test/mjsunit/regress/regress-2250.js +68 -0
- data/vendor/v8/test/mjsunit/regress/regress-2261.js +113 -0
- data/vendor/v8/test/mjsunit/regress/regress-2263.js +30 -0
- data/vendor/v8/test/mjsunit/regress/regress-2284.js +32 -0
- data/vendor/v8/test/mjsunit/regress/regress-2285.js +32 -0
- data/vendor/v8/test/mjsunit/regress/regress-2286.js +32 -0
- data/vendor/v8/test/mjsunit/regress/regress-2289.js +34 -0
- data/vendor/v8/test/mjsunit/regress/regress-2291.js +36 -0
- data/vendor/v8/test/mjsunit/regress/regress-2294.js +70 -0
- data/vendor/v8/test/mjsunit/regress/regress-2296.js +40 -0
- data/vendor/v8/test/mjsunit/regress/regress-2315.js +40 -0
- data/vendor/v8/test/mjsunit/regress/regress-2318.js +66 -0
- data/vendor/v8/test/mjsunit/regress/regress-2322.js +36 -0
- data/vendor/v8/test/mjsunit/regress/regress-2326.js +54 -0
- data/vendor/v8/test/mjsunit/regress/regress-2336.js +53 -0
- data/vendor/v8/test/mjsunit/regress/regress-2339.js +59 -0
- data/vendor/v8/test/mjsunit/regress/regress-2346.js +123 -0
- data/vendor/v8/test/mjsunit/regress/regress-2373.js +29 -0
- data/vendor/v8/test/mjsunit/regress/regress-2374.js +33 -0
- data/vendor/v8/test/mjsunit/regress/regress-2398.js +41 -0
- data/vendor/v8/test/mjsunit/regress/regress-2410.js +36 -0
- data/vendor/v8/test/mjsunit/regress/regress-2416.js +75 -0
- data/vendor/v8/test/mjsunit/regress/regress-2419.js +37 -0
- data/vendor/v8/test/mjsunit/regress/regress-2433.js +36 -0
- data/vendor/v8/test/mjsunit/regress/regress-2437.js +156 -0
- data/vendor/v8/test/mjsunit/regress/regress-2438.js +52 -0
- data/vendor/v8/test/mjsunit/regress/regress-2443.js +129 -0
- data/vendor/v8/test/mjsunit/regress/regress-2444.js +120 -0
- data/vendor/v8/test/mjsunit/regress/regress-2489.js +50 -0
- data/vendor/v8/test/mjsunit/regress/regress-2499.js +40 -0
- data/vendor/v8/test/mjsunit/regress/regress-334.js +1 -1
- data/vendor/v8/test/mjsunit/regress/regress-492.js +39 -1
- data/vendor/v8/test/mjsunit/regress/regress-builtin-array-op.js +38 -0
- data/vendor/v8/test/mjsunit/regress/regress-cnlt-elements.js +43 -0
- data/vendor/v8/test/mjsunit/regress/regress-cnlt-enum-indices.js +45 -0
- data/vendor/v8/test/mjsunit/regress/regress-cntl-descriptors-enum.js +46 -0
- data/vendor/v8/test/mjsunit/regress/regress-convert-enum.js +60 -0
- data/vendor/v8/test/mjsunit/regress/regress-convert-enum2.js +46 -0
- data/vendor/v8/test/mjsunit/regress/regress-convert-transition.js +40 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-119926.js +3 -1
- data/vendor/v8/test/mjsunit/regress/regress-crbug-125148.js +90 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-134055.js +63 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-134609.js +59 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-135008.js +45 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-135066.js +55 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-137689.js +47 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-138887.js +48 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-140083.js +44 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-142087.js +38 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-142218.js +44 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-145961.js +39 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-146910.js +33 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-147475.js +48 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-148376.js +35 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-150545.js +53 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-150729.js +39 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-157019.js +54 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-157520.js +38 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-158185.js +39 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-160010.js +35 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-162085.js +71 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-168545.js +34 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-170856.js +33 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-172345.js +34 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-173974.js +36 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-18639.js +9 -5
- data/vendor/v8/test/mjsunit/regress/regress-debug-code-recompilation.js +2 -1
- data/vendor/v8/test/mjsunit/regress/regress-deep-proto.js +45 -0
- data/vendor/v8/test/mjsunit/regress/regress-delete-empty-double.js +40 -0
- data/vendor/v8/test/mjsunit/regress/regress-iteration-order.js +42 -0
- data/vendor/v8/test/mjsunit/regress/regress-json-stringify-gc.js +41 -0
- data/vendor/v8/test/mjsunit/regress/regress-latin-1.js +78 -0
- data/vendor/v8/test/mjsunit/regress/regress-load-elements.js +49 -0
- data/vendor/v8/test/mjsunit/regress/regress-observe-empty-double-array.js +38 -0
- data/vendor/v8/test/mjsunit/regress/regress-undefined-store-keyed-fast-element.js +37 -0
- data/vendor/v8/test/mjsunit/shift-for-integer-div.js +59 -0
- data/vendor/v8/test/mjsunit/stack-traces-gc.js +119 -0
- data/vendor/v8/test/mjsunit/stack-traces-overflow.js +122 -0
- data/vendor/v8/test/mjsunit/stack-traces.js +39 -1
- data/vendor/v8/test/mjsunit/str-to-num.js +7 -2
- data/vendor/v8/test/mjsunit/strict-mode.js +36 -11
- data/vendor/v8/test/mjsunit/string-charcodeat.js +3 -0
- data/vendor/v8/test/mjsunit/string-natives.js +72 -0
- data/vendor/v8/test/mjsunit/string-split.js +17 -0
- data/vendor/v8/test/mjsunit/testcfg.py +76 -6
- data/vendor/v8/test/mjsunit/tools/tickprocessor.js +4 -1
- data/vendor/v8/test/mjsunit/try-finally-continue.js +72 -0
- data/vendor/v8/test/mjsunit/typed-array-slice.js +61 -0
- data/vendor/v8/test/mjsunit/unbox-double-arrays.js +2 -0
- data/vendor/v8/test/mjsunit/uri.js +12 -0
- data/vendor/v8/test/mjsunit/with-readonly.js +4 -2
- data/vendor/v8/test/mozilla/mozilla.status +19 -113
- data/vendor/v8/test/mozilla/testcfg.py +122 -3
- data/vendor/v8/test/preparser/preparser.status +5 -0
- data/vendor/v8/test/preparser/strict-identifiers.pyt +1 -1
- data/vendor/v8/test/preparser/testcfg.py +101 -5
- data/vendor/v8/test/sputnik/sputnik.status +1 -1
- data/vendor/v8/test/sputnik/testcfg.py +5 -0
- data/vendor/v8/test/test262/README +2 -2
- data/vendor/v8/test/test262/test262.status +13 -36
- data/vendor/v8/test/test262/testcfg.py +102 -8
- data/vendor/v8/tools/android-build.sh +0 -0
- data/vendor/v8/tools/android-ll-prof.sh +69 -0
- data/vendor/v8/tools/android-run.py +109 -0
- data/vendor/v8/tools/android-sync.sh +105 -0
- data/vendor/v8/tools/bash-completion.sh +0 -0
- data/vendor/v8/tools/check-static-initializers.sh +0 -0
- data/vendor/v8/tools/common-includes.sh +15 -22
- data/vendor/v8/tools/disasm.py +4 -4
- data/vendor/v8/tools/fuzz-harness.sh +0 -0
- data/vendor/v8/tools/gen-postmortem-metadata.py +6 -8
- data/vendor/v8/tools/grokdump.py +404 -129
- data/vendor/v8/tools/gyp/v8.gyp +105 -43
- data/vendor/v8/tools/linux-tick-processor +5 -5
- data/vendor/v8/tools/ll_prof.py +75 -15
- data/vendor/v8/tools/merge-to-branch.sh +2 -2
- data/vendor/v8/tools/plot-timer-events +70 -0
- data/vendor/v8/tools/plot-timer-events.js +510 -0
- data/vendor/v8/tools/presubmit.py +1 -0
- data/vendor/v8/tools/push-to-trunk.sh +14 -4
- data/vendor/v8/tools/run-llprof.sh +69 -0
- data/vendor/v8/tools/run-tests.py +372 -0
- data/vendor/v8/tools/run-valgrind.py +1 -1
- data/vendor/v8/tools/status-file-converter.py +39 -0
- data/vendor/v8/tools/test-server.py +224 -0
- data/vendor/v8/tools/test-wrapper-gypbuild.py +13 -16
- data/vendor/v8/tools/test.py +10 -19
- data/vendor/v8/tools/testrunner/README +174 -0
- data/vendor/v8/tools/testrunner/__init__.py +26 -0
- data/vendor/v8/tools/testrunner/local/__init__.py +26 -0
- data/vendor/v8/tools/testrunner/local/commands.py +153 -0
- data/vendor/v8/tools/testrunner/local/execution.py +182 -0
- data/vendor/v8/tools/testrunner/local/old_statusfile.py +460 -0
- data/vendor/v8/tools/testrunner/local/progress.py +238 -0
- data/vendor/v8/tools/testrunner/local/statusfile.py +145 -0
- data/vendor/v8/tools/testrunner/local/testsuite.py +187 -0
- data/vendor/v8/tools/testrunner/local/utils.py +108 -0
- data/vendor/v8/tools/testrunner/local/verbose.py +99 -0
- data/vendor/v8/tools/testrunner/network/__init__.py +26 -0
- data/vendor/v8/tools/testrunner/network/distro.py +90 -0
- data/vendor/v8/tools/testrunner/network/endpoint.py +124 -0
- data/vendor/v8/tools/testrunner/network/network_execution.py +253 -0
- data/vendor/v8/tools/testrunner/network/perfdata.py +120 -0
- data/vendor/v8/tools/testrunner/objects/__init__.py +26 -0
- data/vendor/v8/tools/testrunner/objects/context.py +50 -0
- data/vendor/v8/tools/testrunner/objects/output.py +60 -0
- data/vendor/v8/tools/testrunner/objects/peer.py +80 -0
- data/vendor/v8/tools/testrunner/objects/testcase.py +83 -0
- data/vendor/v8/tools/testrunner/objects/workpacket.py +90 -0
- data/vendor/v8/tools/testrunner/server/__init__.py +26 -0
- data/vendor/v8/tools/testrunner/server/compression.py +111 -0
- data/vendor/v8/tools/testrunner/server/constants.py +51 -0
- data/vendor/v8/tools/testrunner/server/daemon.py +147 -0
- data/vendor/v8/tools/testrunner/server/local_handler.py +119 -0
- data/vendor/v8/tools/testrunner/server/main.py +245 -0
- data/vendor/v8/tools/testrunner/server/presence_handler.py +120 -0
- data/vendor/v8/tools/testrunner/server/signatures.py +63 -0
- data/vendor/v8/tools/testrunner/server/status_handler.py +112 -0
- data/vendor/v8/tools/testrunner/server/work_handler.py +150 -0
- data/vendor/v8/tools/tick-processor.html +168 -0
- data/vendor/v8/tools/tickprocessor-driver.js +5 -3
- data/vendor/v8/tools/tickprocessor.js +58 -15
- metadata +534 -30
- data/patches/add-freebsd9-and-freebsd10-to-gyp-GetFlavor.patch +0 -11
- data/patches/do-not-imply-vfp3-and-armv7.patch +0 -44
- data/patches/fPIC-on-x64.patch +0 -14
- data/vendor/v8/src/liveobjectlist-inl.h +0 -126
- data/vendor/v8/src/liveobjectlist.cc +0 -2631
- data/vendor/v8/src/liveobjectlist.h +0 -319
- data/vendor/v8/test/mjsunit/mul-exhaustive.js +0 -4629
- data/vendor/v8/test/mjsunit/numops-fuzz.js +0 -4609
- data/vendor/v8/test/mjsunit/regress/regress-1969.js +0 -5045
@@ -65,23 +65,29 @@ void HeapProfiler::TearDown() {
|
|
65
65
|
}
|
66
66
|
|
67
67
|
|
68
|
-
HeapSnapshot* HeapProfiler::TakeSnapshot(
|
69
|
-
|
70
|
-
|
68
|
+
HeapSnapshot* HeapProfiler::TakeSnapshot(
|
69
|
+
const char* name,
|
70
|
+
int type,
|
71
|
+
v8::ActivityControl* control,
|
72
|
+
v8::HeapProfiler::ObjectNameResolver* resolver) {
|
71
73
|
ASSERT(Isolate::Current()->heap_profiler() != NULL);
|
72
74
|
return Isolate::Current()->heap_profiler()->TakeSnapshotImpl(name,
|
73
75
|
type,
|
74
|
-
control
|
76
|
+
control,
|
77
|
+
resolver);
|
75
78
|
}
|
76
79
|
|
77
80
|
|
78
|
-
HeapSnapshot* HeapProfiler::TakeSnapshot(
|
79
|
-
|
80
|
-
|
81
|
+
HeapSnapshot* HeapProfiler::TakeSnapshot(
|
82
|
+
String* name,
|
83
|
+
int type,
|
84
|
+
v8::ActivityControl* control,
|
85
|
+
v8::HeapProfiler::ObjectNameResolver* resolver) {
|
81
86
|
ASSERT(Isolate::Current()->heap_profiler() != NULL);
|
82
87
|
return Isolate::Current()->heap_profiler()->TakeSnapshotImpl(name,
|
83
88
|
type,
|
84
|
-
control
|
89
|
+
control,
|
90
|
+
resolver);
|
85
91
|
}
|
86
92
|
|
87
93
|
|
@@ -97,7 +103,7 @@ void HeapProfiler::StopHeapObjectsTracking() {
|
|
97
103
|
}
|
98
104
|
|
99
105
|
|
100
|
-
|
106
|
+
SnapshotObjectId HeapProfiler::PushHeapObjectsStats(v8::OutputStream* stream) {
|
101
107
|
ASSERT(Isolate::Current()->heap_profiler() != NULL);
|
102
108
|
return Isolate::Current()->heap_profiler()->PushHeapObjectsStatsImpl(stream);
|
103
109
|
}
|
@@ -122,16 +128,18 @@ v8::RetainedObjectInfo* HeapProfiler::ExecuteWrapperClassCallback(
|
|
122
128
|
}
|
123
129
|
|
124
130
|
|
125
|
-
HeapSnapshot* HeapProfiler::TakeSnapshotImpl(
|
126
|
-
|
127
|
-
|
131
|
+
HeapSnapshot* HeapProfiler::TakeSnapshotImpl(
|
132
|
+
const char* name,
|
133
|
+
int type,
|
134
|
+
v8::ActivityControl* control,
|
135
|
+
v8::HeapProfiler::ObjectNameResolver* resolver) {
|
128
136
|
HeapSnapshot::Type s_type = static_cast<HeapSnapshot::Type>(type);
|
129
137
|
HeapSnapshot* result =
|
130
138
|
snapshots_->NewSnapshot(s_type, name, next_snapshot_uid_++);
|
131
139
|
bool generation_completed = true;
|
132
140
|
switch (s_type) {
|
133
141
|
case HeapSnapshot::kFull: {
|
134
|
-
HeapSnapshotGenerator generator(result, control);
|
142
|
+
HeapSnapshotGenerator generator(result, control, resolver);
|
135
143
|
generation_completed = generator.GenerateSnapshot();
|
136
144
|
break;
|
137
145
|
}
|
@@ -147,10 +155,13 @@ HeapSnapshot* HeapProfiler::TakeSnapshotImpl(const char* name,
|
|
147
155
|
}
|
148
156
|
|
149
157
|
|
150
|
-
HeapSnapshot* HeapProfiler::TakeSnapshotImpl(
|
151
|
-
|
152
|
-
|
153
|
-
|
158
|
+
HeapSnapshot* HeapProfiler::TakeSnapshotImpl(
|
159
|
+
String* name,
|
160
|
+
int type,
|
161
|
+
v8::ActivityControl* control,
|
162
|
+
v8::HeapProfiler::ObjectNameResolver* resolver) {
|
163
|
+
return TakeSnapshotImpl(snapshots_->names()->GetName(name), type, control,
|
164
|
+
resolver);
|
154
165
|
}
|
155
166
|
|
156
167
|
void HeapProfiler::StartHeapObjectsTrackingImpl() {
|
@@ -158,8 +169,8 @@ void HeapProfiler::StartHeapObjectsTrackingImpl() {
|
|
158
169
|
}
|
159
170
|
|
160
171
|
|
161
|
-
|
162
|
-
snapshots_->PushHeapObjectsStats(stream);
|
172
|
+
SnapshotObjectId HeapProfiler::PushHeapObjectsStatsImpl(OutputStream* stream) {
|
173
|
+
return snapshots_->PushHeapObjectsStats(stream);
|
163
174
|
}
|
164
175
|
|
165
176
|
|
@@ -168,6 +179,14 @@ void HeapProfiler::StopHeapObjectsTrackingImpl() {
|
|
168
179
|
}
|
169
180
|
|
170
181
|
|
182
|
+
size_t HeapProfiler::GetMemorySizeUsedByProfiler() {
|
183
|
+
HeapProfiler* profiler = Isolate::Current()->heap_profiler();
|
184
|
+
ASSERT(profiler != NULL);
|
185
|
+
size_t size = profiler->snapshots_->GetUsedMemorySize();
|
186
|
+
return size;
|
187
|
+
}
|
188
|
+
|
189
|
+
|
171
190
|
int HeapProfiler::GetSnapshotsCount() {
|
172
191
|
HeapProfiler* profiler = Isolate::Current()->heap_profiler();
|
173
192
|
ASSERT(profiler != NULL);
|
@@ -49,16 +49,22 @@ class HeapProfiler {
|
|
49
49
|
static void SetUp();
|
50
50
|
static void TearDown();
|
51
51
|
|
52
|
-
static
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
52
|
+
static size_t GetMemorySizeUsedByProfiler();
|
53
|
+
|
54
|
+
static HeapSnapshot* TakeSnapshot(
|
55
|
+
const char* name,
|
56
|
+
int type,
|
57
|
+
v8::ActivityControl* control,
|
58
|
+
v8::HeapProfiler::ObjectNameResolver* resolver);
|
59
|
+
static HeapSnapshot* TakeSnapshot(
|
60
|
+
String* name,
|
61
|
+
int type,
|
62
|
+
v8::ActivityControl* control,
|
63
|
+
v8::HeapProfiler::ObjectNameResolver* resolver);
|
58
64
|
|
59
65
|
static void StartHeapObjectsTracking();
|
60
66
|
static void StopHeapObjectsTracking();
|
61
|
-
static
|
67
|
+
static SnapshotObjectId PushHeapObjectsStats(OutputStream* stream);
|
62
68
|
static int GetSnapshotsCount();
|
63
69
|
static HeapSnapshot* GetSnapshot(int index);
|
64
70
|
static HeapSnapshot* FindSnapshot(unsigned uid);
|
@@ -79,17 +85,21 @@ class HeapProfiler {
|
|
79
85
|
private:
|
80
86
|
HeapProfiler();
|
81
87
|
~HeapProfiler();
|
82
|
-
HeapSnapshot* TakeSnapshotImpl(
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
+
HeapSnapshot* TakeSnapshotImpl(
|
89
|
+
const char* name,
|
90
|
+
int type,
|
91
|
+
v8::ActivityControl* control,
|
92
|
+
v8::HeapProfiler::ObjectNameResolver* resolver);
|
93
|
+
HeapSnapshot* TakeSnapshotImpl(
|
94
|
+
String* name,
|
95
|
+
int type,
|
96
|
+
v8::ActivityControl* control,
|
97
|
+
v8::HeapProfiler::ObjectNameResolver* resolver);
|
88
98
|
void ResetSnapshots();
|
89
99
|
|
90
100
|
void StartHeapObjectsTrackingImpl();
|
91
101
|
void StopHeapObjectsTrackingImpl();
|
92
|
-
|
102
|
+
SnapshotObjectId PushHeapObjectsStatsImpl(OutputStream* stream);
|
93
103
|
|
94
104
|
HeapSnapshotsCollection* snapshots_;
|
95
105
|
unsigned next_snapshot_uid_;
|
data/vendor/v8/src/heap.cc
CHANGED
@@ -37,7 +37,6 @@
|
|
37
37
|
#include "global-handles.h"
|
38
38
|
#include "heap-profiler.h"
|
39
39
|
#include "incremental-marking.h"
|
40
|
-
#include "liveobjectlist-inl.h"
|
41
40
|
#include "mark-compact.h"
|
42
41
|
#include "natives.h"
|
43
42
|
#include "objects-visiting.h"
|
@@ -48,6 +47,7 @@
|
|
48
47
|
#include "snapshot.h"
|
49
48
|
#include "store-buffer.h"
|
50
49
|
#include "v8threads.h"
|
50
|
+
#include "v8utils.h"
|
51
51
|
#include "vm-state-inl.h"
|
52
52
|
#if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP
|
53
53
|
#include "regexp-macro-assembler.h"
|
@@ -66,21 +66,26 @@ Heap::Heap()
|
|
66
66
|
: isolate_(NULL),
|
67
67
|
// semispace_size_ should be a power of 2 and old_generation_size_ should be
|
68
68
|
// a multiple of Page::kPageSize.
|
69
|
-
#if defined(
|
70
|
-
#define LUMP_OF_MEMORY (128 * KB)
|
71
|
-
code_range_size_(0),
|
72
|
-
#elif defined(V8_TARGET_ARCH_X64)
|
69
|
+
#if defined(V8_TARGET_ARCH_X64)
|
73
70
|
#define LUMP_OF_MEMORY (2 * MB)
|
74
71
|
code_range_size_(512*MB),
|
75
72
|
#else
|
76
73
|
#define LUMP_OF_MEMORY MB
|
77
74
|
code_range_size_(0),
|
78
75
|
#endif
|
76
|
+
#if defined(ANDROID)
|
77
|
+
reserved_semispace_size_(4 * Max(LUMP_OF_MEMORY, Page::kPageSize)),
|
78
|
+
max_semispace_size_(4 * Max(LUMP_OF_MEMORY, Page::kPageSize)),
|
79
|
+
initial_semispace_size_(Page::kPageSize),
|
80
|
+
max_old_generation_size_(192*MB),
|
81
|
+
max_executable_size_(max_old_generation_size_),
|
82
|
+
#else
|
79
83
|
reserved_semispace_size_(8 * Max(LUMP_OF_MEMORY, Page::kPageSize)),
|
80
84
|
max_semispace_size_(8 * Max(LUMP_OF_MEMORY, Page::kPageSize)),
|
81
85
|
initial_semispace_size_(Page::kPageSize),
|
82
86
|
max_old_generation_size_(700ul * LUMP_OF_MEMORY),
|
83
87
|
max_executable_size_(256l * LUMP_OF_MEMORY),
|
88
|
+
#endif
|
84
89
|
|
85
90
|
// Variables set based on semispace_size_ and old_generation_size_ in
|
86
91
|
// ConfigureHeap (survived_since_last_expansion_, external_allocation_limit_)
|
@@ -92,6 +97,7 @@ Heap::Heap()
|
|
92
97
|
linear_allocation_scope_depth_(0),
|
93
98
|
contexts_disposed_(0),
|
94
99
|
global_ic_age_(0),
|
100
|
+
flush_monomorphic_ics_(false),
|
95
101
|
scan_on_scavenge_pages_(0),
|
96
102
|
new_space_(this),
|
97
103
|
old_pointer_space_(NULL),
|
@@ -110,7 +116,6 @@ Heap::Heap()
|
|
110
116
|
allocation_allowed_(true),
|
111
117
|
allocation_timeout_(0),
|
112
118
|
disallow_allocation_failure_(false),
|
113
|
-
debug_utils_(NULL),
|
114
119
|
#endif // DEBUG
|
115
120
|
new_space_high_promotion_mode_active_(false),
|
116
121
|
old_gen_promotion_limit_(kMinimumPromotionLimit),
|
@@ -130,14 +135,18 @@ Heap::Heap()
|
|
130
135
|
tracer_(NULL),
|
131
136
|
young_survivors_after_last_gc_(0),
|
132
137
|
high_survival_rate_period_length_(0),
|
138
|
+
low_survival_rate_period_length_(0),
|
133
139
|
survival_rate_(0),
|
134
140
|
previous_survival_rate_trend_(Heap::STABLE),
|
135
141
|
survival_rate_trend_(Heap::STABLE),
|
136
142
|
max_gc_pause_(0),
|
143
|
+
total_gc_time_ms_(0),
|
137
144
|
max_alive_after_gc_(0),
|
138
145
|
min_in_mutator_(kMaxInt),
|
139
146
|
alive_after_last_gc_(0),
|
140
147
|
last_gc_end_timestamp_(0.0),
|
148
|
+
marking_time_(0.0),
|
149
|
+
sweeping_time_(0.0),
|
141
150
|
store_buffer_(this),
|
142
151
|
marking_(this),
|
143
152
|
incremental_marking_(this),
|
@@ -148,9 +157,13 @@ Heap::Heap()
|
|
148
157
|
ms_count_at_last_idle_notification_(0),
|
149
158
|
gc_count_at_last_idle_gc_(0),
|
150
159
|
scavenges_since_last_idle_round_(kIdleScavengeThreshold),
|
160
|
+
#ifdef VERIFY_HEAP
|
161
|
+
no_weak_embedded_maps_verification_scope_depth_(0),
|
162
|
+
#endif
|
151
163
|
promotion_queue_(this),
|
152
164
|
configured_(false),
|
153
|
-
chunks_queued_for_free_(NULL)
|
165
|
+
chunks_queued_for_free_(NULL),
|
166
|
+
relocation_mutex_(NULL) {
|
154
167
|
// Allow build-time customization of the max semispace size. Building
|
155
168
|
// V8 with snapshots and a non-default max semispace size is much
|
156
169
|
// easier if you can define it as part of the build environment.
|
@@ -168,12 +181,14 @@ Heap::Heap()
|
|
168
181
|
}
|
169
182
|
|
170
183
|
memset(roots_, 0, sizeof(roots_[0]) * kRootListLength);
|
171
|
-
|
184
|
+
native_contexts_list_ = NULL;
|
172
185
|
mark_compact_collector_.heap_ = this;
|
173
186
|
external_string_table_.heap_ = this;
|
174
187
|
// Put a dummy entry in the remembered pages so we can find the list the
|
175
188
|
// minidump even if there are no real unmapped pages.
|
176
189
|
RememberUnmappedPage(NULL, false);
|
190
|
+
|
191
|
+
ClearObjectStats(true);
|
177
192
|
}
|
178
193
|
|
179
194
|
|
@@ -201,6 +216,20 @@ intptr_t Heap::CommittedMemory() {
|
|
201
216
|
lo_space_->Size();
|
202
217
|
}
|
203
218
|
|
219
|
+
|
220
|
+
size_t Heap::CommittedPhysicalMemory() {
|
221
|
+
if (!HasBeenSetUp()) return 0;
|
222
|
+
|
223
|
+
return new_space_.CommittedPhysicalMemory() +
|
224
|
+
old_pointer_space_->CommittedPhysicalMemory() +
|
225
|
+
old_data_space_->CommittedPhysicalMemory() +
|
226
|
+
code_space_->CommittedPhysicalMemory() +
|
227
|
+
map_space_->CommittedPhysicalMemory() +
|
228
|
+
cell_space_->CommittedPhysicalMemory() +
|
229
|
+
lo_space_->CommittedPhysicalMemory();
|
230
|
+
}
|
231
|
+
|
232
|
+
|
204
233
|
intptr_t Heap::CommittedMemoryExecutable() {
|
205
234
|
if (!HasBeenSetUp()) return 0;
|
206
235
|
|
@@ -315,48 +344,59 @@ void Heap::ReportStatisticsBeforeGC() {
|
|
315
344
|
|
316
345
|
void Heap::PrintShortHeapStatistics() {
|
317
346
|
if (!FLAG_trace_gc_verbose) return;
|
318
|
-
|
319
|
-
|
320
|
-
|
321
|
-
|
322
|
-
|
323
|
-
|
324
|
-
|
325
|
-
|
326
|
-
|
327
|
-
|
328
|
-
|
329
|
-
|
330
|
-
|
331
|
-
|
332
|
-
|
333
|
-
|
334
|
-
|
335
|
-
|
336
|
-
|
337
|
-
|
338
|
-
|
339
|
-
|
340
|
-
|
341
|
-
|
342
|
-
|
343
|
-
|
344
|
-
|
345
|
-
|
346
|
-
|
347
|
-
|
348
|
-
|
349
|
-
|
350
|
-
|
351
|
-
|
352
|
-
|
353
|
-
|
354
|
-
|
355
|
-
|
356
|
-
|
357
|
-
|
358
|
-
|
359
|
-
|
347
|
+
PrintPID("Memory allocator, used: %6" V8_PTR_PREFIX "d KB"
|
348
|
+
", available: %6" V8_PTR_PREFIX "d KB\n",
|
349
|
+
isolate_->memory_allocator()->Size() / KB,
|
350
|
+
isolate_->memory_allocator()->Available() / KB);
|
351
|
+
PrintPID("New space, used: %6" V8_PTR_PREFIX "d KB"
|
352
|
+
", available: %6" V8_PTR_PREFIX "d KB"
|
353
|
+
", committed: %6" V8_PTR_PREFIX "d KB\n",
|
354
|
+
new_space_.Size() / KB,
|
355
|
+
new_space_.Available() / KB,
|
356
|
+
new_space_.CommittedMemory() / KB);
|
357
|
+
PrintPID("Old pointers, used: %6" V8_PTR_PREFIX "d KB"
|
358
|
+
", available: %6" V8_PTR_PREFIX "d KB"
|
359
|
+
", committed: %6" V8_PTR_PREFIX "d KB\n",
|
360
|
+
old_pointer_space_->SizeOfObjects() / KB,
|
361
|
+
old_pointer_space_->Available() / KB,
|
362
|
+
old_pointer_space_->CommittedMemory() / KB);
|
363
|
+
PrintPID("Old data space, used: %6" V8_PTR_PREFIX "d KB"
|
364
|
+
", available: %6" V8_PTR_PREFIX "d KB"
|
365
|
+
", committed: %6" V8_PTR_PREFIX "d KB\n",
|
366
|
+
old_data_space_->SizeOfObjects() / KB,
|
367
|
+
old_data_space_->Available() / KB,
|
368
|
+
old_data_space_->CommittedMemory() / KB);
|
369
|
+
PrintPID("Code space, used: %6" V8_PTR_PREFIX "d KB"
|
370
|
+
", available: %6" V8_PTR_PREFIX "d KB"
|
371
|
+
", committed: %6" V8_PTR_PREFIX "d KB\n",
|
372
|
+
code_space_->SizeOfObjects() / KB,
|
373
|
+
code_space_->Available() / KB,
|
374
|
+
code_space_->CommittedMemory() / KB);
|
375
|
+
PrintPID("Map space, used: %6" V8_PTR_PREFIX "d KB"
|
376
|
+
", available: %6" V8_PTR_PREFIX "d KB"
|
377
|
+
", committed: %6" V8_PTR_PREFIX "d KB\n",
|
378
|
+
map_space_->SizeOfObjects() / KB,
|
379
|
+
map_space_->Available() / KB,
|
380
|
+
map_space_->CommittedMemory() / KB);
|
381
|
+
PrintPID("Cell space, used: %6" V8_PTR_PREFIX "d KB"
|
382
|
+
", available: %6" V8_PTR_PREFIX "d KB"
|
383
|
+
", committed: %6" V8_PTR_PREFIX "d KB\n",
|
384
|
+
cell_space_->SizeOfObjects() / KB,
|
385
|
+
cell_space_->Available() / KB,
|
386
|
+
cell_space_->CommittedMemory() / KB);
|
387
|
+
PrintPID("Large object space, used: %6" V8_PTR_PREFIX "d KB"
|
388
|
+
", available: %6" V8_PTR_PREFIX "d KB"
|
389
|
+
", committed: %6" V8_PTR_PREFIX "d KB\n",
|
390
|
+
lo_space_->SizeOfObjects() / KB,
|
391
|
+
lo_space_->Available() / KB,
|
392
|
+
lo_space_->CommittedMemory() / KB);
|
393
|
+
PrintPID("All spaces, used: %6" V8_PTR_PREFIX "d KB"
|
394
|
+
", available: %6" V8_PTR_PREFIX "d KB"
|
395
|
+
", committed: %6" V8_PTR_PREFIX "d KB\n",
|
396
|
+
this->SizeOfObjects() / KB,
|
397
|
+
this->Available() / KB,
|
398
|
+
this->CommittedMemory() / KB);
|
399
|
+
PrintPID("Total time spent in GC : %d ms\n", total_gc_time_ms_);
|
360
400
|
}
|
361
401
|
|
362
402
|
|
@@ -383,25 +423,30 @@ void Heap::GarbageCollectionPrologue() {
|
|
383
423
|
ClearJSFunctionResultCaches();
|
384
424
|
gc_count_++;
|
385
425
|
unflattened_strings_length_ = 0;
|
386
|
-
#ifdef DEBUG
|
387
|
-
ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
|
388
|
-
allow_allocation(false);
|
389
426
|
|
427
|
+
if (FLAG_flush_code && FLAG_flush_code_incrementally) {
|
428
|
+
mark_compact_collector()->EnableCodeFlushing(true);
|
429
|
+
}
|
430
|
+
|
431
|
+
#ifdef VERIFY_HEAP
|
390
432
|
if (FLAG_verify_heap) {
|
391
433
|
Verify();
|
392
434
|
}
|
435
|
+
#endif
|
436
|
+
|
437
|
+
#ifdef DEBUG
|
438
|
+
ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
|
439
|
+
allow_allocation(false);
|
393
440
|
|
394
441
|
if (FLAG_gc_verbose) Print();
|
395
|
-
#endif // DEBUG
|
396
442
|
|
397
|
-
#if defined(DEBUG)
|
398
443
|
ReportStatisticsBeforeGC();
|
399
444
|
#endif // DEBUG
|
400
445
|
|
401
|
-
LiveObjectList::GCPrologue();
|
402
446
|
store_buffer()->GCPrologue();
|
403
447
|
}
|
404
448
|
|
449
|
+
|
405
450
|
intptr_t Heap::SizeOfObjects() {
|
406
451
|
intptr_t total = 0;
|
407
452
|
AllSpaces spaces;
|
@@ -411,17 +456,33 @@ intptr_t Heap::SizeOfObjects() {
|
|
411
456
|
return total;
|
412
457
|
}
|
413
458
|
|
459
|
+
|
460
|
+
void Heap::RepairFreeListsAfterBoot() {
|
461
|
+
PagedSpaces spaces;
|
462
|
+
for (PagedSpace* space = spaces.next();
|
463
|
+
space != NULL;
|
464
|
+
space = spaces.next()) {
|
465
|
+
space->RepairFreeListsAfterBoot();
|
466
|
+
}
|
467
|
+
}
|
468
|
+
|
469
|
+
|
414
470
|
void Heap::GarbageCollectionEpilogue() {
|
415
471
|
store_buffer()->GCEpilogue();
|
416
|
-
LiveObjectList::GCEpilogue();
|
417
|
-
#ifdef DEBUG
|
418
|
-
allow_allocation(true);
|
419
|
-
ZapFromSpace();
|
420
472
|
|
473
|
+
// In release mode, we only zap the from space under heap verification.
|
474
|
+
if (Heap::ShouldZapGarbage()) {
|
475
|
+
ZapFromSpace();
|
476
|
+
}
|
477
|
+
|
478
|
+
#ifdef VERIFY_HEAP
|
421
479
|
if (FLAG_verify_heap) {
|
422
480
|
Verify();
|
423
481
|
}
|
482
|
+
#endif
|
424
483
|
|
484
|
+
#ifdef DEBUG
|
485
|
+
allow_allocation(true);
|
425
486
|
if (FLAG_print_global_handles) isolate_->global_handles()->Print();
|
426
487
|
if (FLAG_print_handles) PrintHandles();
|
427
488
|
if (FLAG_gc_verbose) Print();
|
@@ -435,12 +496,64 @@ void Heap::GarbageCollectionEpilogue() {
|
|
435
496
|
symbol_table()->Capacity());
|
436
497
|
isolate_->counters()->number_of_symbols()->Set(
|
437
498
|
symbol_table()->NumberOfElements());
|
499
|
+
|
500
|
+
if (CommittedMemory() > 0) {
|
501
|
+
isolate_->counters()->external_fragmentation_total()->AddSample(
|
502
|
+
static_cast<int>(100 - (SizeOfObjects() * 100.0) / CommittedMemory()));
|
503
|
+
|
504
|
+
isolate_->counters()->heap_fraction_map_space()->AddSample(
|
505
|
+
static_cast<int>(
|
506
|
+
(map_space()->CommittedMemory() * 100.0) / CommittedMemory()));
|
507
|
+
isolate_->counters()->heap_fraction_cell_space()->AddSample(
|
508
|
+
static_cast<int>(
|
509
|
+
(cell_space()->CommittedMemory() * 100.0) / CommittedMemory()));
|
510
|
+
|
511
|
+
isolate_->counters()->heap_sample_total_committed()->AddSample(
|
512
|
+
static_cast<int>(CommittedMemory() / KB));
|
513
|
+
isolate_->counters()->heap_sample_total_used()->AddSample(
|
514
|
+
static_cast<int>(SizeOfObjects() / KB));
|
515
|
+
isolate_->counters()->heap_sample_map_space_committed()->AddSample(
|
516
|
+
static_cast<int>(map_space()->CommittedMemory() / KB));
|
517
|
+
isolate_->counters()->heap_sample_cell_space_committed()->AddSample(
|
518
|
+
static_cast<int>(cell_space()->CommittedMemory() / KB));
|
519
|
+
}
|
520
|
+
|
521
|
+
#define UPDATE_COUNTERS_FOR_SPACE(space) \
|
522
|
+
isolate_->counters()->space##_bytes_available()->Set( \
|
523
|
+
static_cast<int>(space()->Available())); \
|
524
|
+
isolate_->counters()->space##_bytes_committed()->Set( \
|
525
|
+
static_cast<int>(space()->CommittedMemory())); \
|
526
|
+
isolate_->counters()->space##_bytes_used()->Set( \
|
527
|
+
static_cast<int>(space()->SizeOfObjects()));
|
528
|
+
#define UPDATE_FRAGMENTATION_FOR_SPACE(space) \
|
529
|
+
if (space()->CommittedMemory() > 0) { \
|
530
|
+
isolate_->counters()->external_fragmentation_##space()->AddSample( \
|
531
|
+
static_cast<int>(100 - \
|
532
|
+
(space()->SizeOfObjects() * 100.0) / space()->CommittedMemory())); \
|
533
|
+
}
|
534
|
+
#define UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(space) \
|
535
|
+
UPDATE_COUNTERS_FOR_SPACE(space) \
|
536
|
+
UPDATE_FRAGMENTATION_FOR_SPACE(space)
|
537
|
+
|
538
|
+
UPDATE_COUNTERS_FOR_SPACE(new_space)
|
539
|
+
UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(old_pointer_space)
|
540
|
+
UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(old_data_space)
|
541
|
+
UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(code_space)
|
542
|
+
UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(map_space)
|
543
|
+
UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(cell_space)
|
544
|
+
UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(lo_space)
|
545
|
+
#undef UPDATE_COUNTERS_FOR_SPACE
|
546
|
+
#undef UPDATE_FRAGMENTATION_FOR_SPACE
|
547
|
+
#undef UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE
|
548
|
+
|
438
549
|
#if defined(DEBUG)
|
439
550
|
ReportStatisticsAfterGC();
|
440
551
|
#endif // DEBUG
|
441
552
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
442
553
|
isolate_->debug()->AfterGarbageCollection();
|
443
554
|
#endif // ENABLE_DEBUGGER_SUPPORT
|
555
|
+
|
556
|
+
error_object_list_.DeferredFormatStackTrace(isolate());
|
444
557
|
}
|
445
558
|
|
446
559
|
|
@@ -506,7 +619,7 @@ bool Heap::CollectGarbage(AllocationSpace space,
|
|
506
619
|
}
|
507
620
|
|
508
621
|
if (collector == MARK_COMPACTOR &&
|
509
|
-
!mark_compact_collector()->
|
622
|
+
!mark_compact_collector()->abort_incremental_marking() &&
|
510
623
|
!incremental_marking()->IsStopped() &&
|
511
624
|
!incremental_marking()->should_hurry() &&
|
512
625
|
FLAG_incremental_marking_steps) {
|
@@ -534,22 +647,24 @@ bool Heap::CollectGarbage(AllocationSpace space,
|
|
534
647
|
// Tell the tracer which collector we've selected.
|
535
648
|
tracer.set_collector(collector);
|
536
649
|
|
537
|
-
|
538
|
-
|
539
|
-
|
540
|
-
|
541
|
-
|
542
|
-
|
543
|
-
|
650
|
+
{
|
651
|
+
HistogramTimerScope histogram_timer_scope(
|
652
|
+
(collector == SCAVENGER) ? isolate_->counters()->gc_scavenger()
|
653
|
+
: isolate_->counters()->gc_compactor());
|
654
|
+
next_gc_likely_to_collect_more =
|
655
|
+
PerformGarbageCollection(collector, &tracer);
|
656
|
+
}
|
544
657
|
|
545
658
|
GarbageCollectionEpilogue();
|
546
659
|
}
|
547
660
|
|
548
|
-
|
549
|
-
|
550
|
-
|
551
|
-
incremental_marking()->
|
552
|
-
|
661
|
+
// Start incremental marking for the next cycle. The heap snapshot
|
662
|
+
// generator needs incremental marking to stay off after it aborted.
|
663
|
+
if (!mark_compact_collector()->abort_incremental_marking() &&
|
664
|
+
incremental_marking()->IsStopped() &&
|
665
|
+
incremental_marking()->WorthActivating() &&
|
666
|
+
NextGCIsLikelyToBeFull()) {
|
667
|
+
incremental_marking()->Start();
|
553
668
|
}
|
554
669
|
|
555
670
|
return next_gc_likely_to_collect_more;
|
@@ -566,7 +681,30 @@ void Heap::PerformScavenge() {
|
|
566
681
|
}
|
567
682
|
|
568
683
|
|
569
|
-
|
684
|
+
void Heap::MoveElements(FixedArray* array,
|
685
|
+
int dst_index,
|
686
|
+
int src_index,
|
687
|
+
int len) {
|
688
|
+
if (len == 0) return;
|
689
|
+
|
690
|
+
ASSERT(array->map() != HEAP->fixed_cow_array_map());
|
691
|
+
Object** dst_objects = array->data_start() + dst_index;
|
692
|
+
memmove(dst_objects,
|
693
|
+
array->data_start() + src_index,
|
694
|
+
len * kPointerSize);
|
695
|
+
if (!InNewSpace(array)) {
|
696
|
+
for (int i = 0; i < len; i++) {
|
697
|
+
// TODO(hpayer): check store buffer for entries
|
698
|
+
if (InNewSpace(dst_objects[i])) {
|
699
|
+
RecordWrite(array->address(), array->OffsetOfElementAt(dst_index + i));
|
700
|
+
}
|
701
|
+
}
|
702
|
+
}
|
703
|
+
incremental_marking()->RecordWrites(array);
|
704
|
+
}
|
705
|
+
|
706
|
+
|
707
|
+
#ifdef VERIFY_HEAP
|
570
708
|
// Helper class for verifying the symbol table.
|
571
709
|
class SymbolTableVerifier : public ObjectVisitor {
|
572
710
|
public:
|
@@ -575,20 +713,18 @@ class SymbolTableVerifier : public ObjectVisitor {
|
|
575
713
|
for (Object** p = start; p < end; p++) {
|
576
714
|
if ((*p)->IsHeapObject()) {
|
577
715
|
// Check that the symbol is actually a symbol.
|
578
|
-
|
716
|
+
CHECK((*p)->IsTheHole() || (*p)->IsUndefined() || (*p)->IsSymbol());
|
579
717
|
}
|
580
718
|
}
|
581
719
|
}
|
582
720
|
};
|
583
|
-
#endif // DEBUG
|
584
721
|
|
585
722
|
|
586
723
|
static void VerifySymbolTable() {
|
587
|
-
#ifdef DEBUG
|
588
724
|
SymbolTableVerifier verifier;
|
589
725
|
HEAP->symbol_table()->IterateElements(&verifier);
|
590
|
-
#endif // DEBUG
|
591
726
|
}
|
727
|
+
#endif // VERIFY_HEAP
|
592
728
|
|
593
729
|
|
594
730
|
static bool AbortIncrementalMarkingAndCollectGarbage(
|
@@ -603,67 +739,42 @@ static bool AbortIncrementalMarkingAndCollectGarbage(
|
|
603
739
|
|
604
740
|
|
605
741
|
void Heap::ReserveSpace(
|
606
|
-
int
|
607
|
-
|
608
|
-
int data_space_size,
|
609
|
-
int code_space_size,
|
610
|
-
int map_space_size,
|
611
|
-
int cell_space_size,
|
612
|
-
int large_object_size) {
|
613
|
-
NewSpace* new_space = Heap::new_space();
|
614
|
-
PagedSpace* old_pointer_space = Heap::old_pointer_space();
|
615
|
-
PagedSpace* old_data_space = Heap::old_data_space();
|
616
|
-
PagedSpace* code_space = Heap::code_space();
|
617
|
-
PagedSpace* map_space = Heap::map_space();
|
618
|
-
PagedSpace* cell_space = Heap::cell_space();
|
619
|
-
LargeObjectSpace* lo_space = Heap::lo_space();
|
742
|
+
int *sizes,
|
743
|
+
Address *locations_out) {
|
620
744
|
bool gc_performed = true;
|
621
745
|
int counter = 0;
|
622
746
|
static const int kThreshold = 20;
|
623
747
|
while (gc_performed && counter++ < kThreshold) {
|
624
748
|
gc_performed = false;
|
625
|
-
|
626
|
-
|
627
|
-
|
628
|
-
|
629
|
-
|
630
|
-
|
631
|
-
|
632
|
-
|
633
|
-
|
634
|
-
|
635
|
-
|
636
|
-
|
637
|
-
|
638
|
-
|
639
|
-
|
640
|
-
|
641
|
-
|
642
|
-
|
643
|
-
|
644
|
-
|
645
|
-
|
646
|
-
|
647
|
-
|
648
|
-
|
649
|
-
|
650
|
-
|
651
|
-
|
652
|
-
|
653
|
-
|
654
|
-
}
|
655
|
-
// We add a slack-factor of 2 in order to have space for a series of
|
656
|
-
// large-object allocations that are only just larger than the page size.
|
657
|
-
large_object_size *= 2;
|
658
|
-
// The ReserveSpace method on the large object space checks how much
|
659
|
-
// we can expand the old generation. This includes expansion caused by
|
660
|
-
// allocation in the other spaces.
|
661
|
-
large_object_size += cell_space_size + map_space_size + code_space_size +
|
662
|
-
data_space_size + pointer_space_size;
|
663
|
-
if (!(lo_space->ReserveSpace(large_object_size))) {
|
664
|
-
AbortIncrementalMarkingAndCollectGarbage(this, LO_SPACE,
|
665
|
-
"failed to reserve space in the large object space");
|
666
|
-
gc_performed = true;
|
749
|
+
ASSERT(NEW_SPACE == FIRST_PAGED_SPACE - 1);
|
750
|
+
for (int space = NEW_SPACE; space <= LAST_PAGED_SPACE; space++) {
|
751
|
+
if (sizes[space] != 0) {
|
752
|
+
MaybeObject* allocation;
|
753
|
+
if (space == NEW_SPACE) {
|
754
|
+
allocation = new_space()->AllocateRaw(sizes[space]);
|
755
|
+
} else {
|
756
|
+
allocation = paged_space(space)->AllocateRaw(sizes[space]);
|
757
|
+
}
|
758
|
+
FreeListNode* node;
|
759
|
+
if (!allocation->To<FreeListNode>(&node)) {
|
760
|
+
if (space == NEW_SPACE) {
|
761
|
+
Heap::CollectGarbage(NEW_SPACE,
|
762
|
+
"failed to reserve space in the new space");
|
763
|
+
} else {
|
764
|
+
AbortIncrementalMarkingAndCollectGarbage(
|
765
|
+
this,
|
766
|
+
static_cast<AllocationSpace>(space),
|
767
|
+
"failed to reserve space in paged space");
|
768
|
+
}
|
769
|
+
gc_performed = true;
|
770
|
+
break;
|
771
|
+
} else {
|
772
|
+
// Mark with a free list node, in case we have a GC before
|
773
|
+
// deserializing.
|
774
|
+
node->set_size(this, sizes[space]);
|
775
|
+
locations_out[space] = node->address();
|
776
|
+
}
|
777
|
+
}
|
667
778
|
}
|
668
779
|
}
|
669
780
|
|
@@ -691,7 +802,7 @@ void Heap::EnsureFromSpaceIsCommitted() {
|
|
691
802
|
void Heap::ClearJSFunctionResultCaches() {
|
692
803
|
if (isolate_->bootstrapper()->IsActive()) return;
|
693
804
|
|
694
|
-
Object* context =
|
805
|
+
Object* context = native_contexts_list_;
|
695
806
|
while (!context->IsUndefined()) {
|
696
807
|
// Get the caches for this context. GC can happen when the context
|
697
808
|
// is not fully initialized, so the caches can be undefined.
|
@@ -718,7 +829,7 @@ void Heap::ClearNormalizedMapCaches() {
|
|
718
829
|
return;
|
719
830
|
}
|
720
831
|
|
721
|
-
Object* context =
|
832
|
+
Object* context = native_contexts_list_;
|
722
833
|
while (!context->IsUndefined()) {
|
723
834
|
// GC can happen when the context is not fully initialized,
|
724
835
|
// so the cache can be undefined.
|
@@ -770,22 +881,19 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector,
|
|
770
881
|
PROFILE(isolate_, CodeMovingGCEvent());
|
771
882
|
}
|
772
883
|
|
884
|
+
#ifdef VERIFY_HEAP
|
773
885
|
if (FLAG_verify_heap) {
|
774
886
|
VerifySymbolTable();
|
775
887
|
}
|
776
|
-
|
777
|
-
ASSERT(!allocation_allowed_);
|
778
|
-
GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
|
779
|
-
global_gc_prologue_callback_();
|
780
|
-
}
|
888
|
+
#endif
|
781
889
|
|
782
890
|
GCType gc_type =
|
783
891
|
collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge;
|
784
892
|
|
785
|
-
|
786
|
-
|
787
|
-
|
788
|
-
|
893
|
+
{
|
894
|
+
GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
|
895
|
+
VMState state(isolate_, EXTERNAL);
|
896
|
+
CallGCPrologueCallbacks(gc_type);
|
789
897
|
}
|
790
898
|
|
791
899
|
EnsureFromSpaceIsCommitted();
|
@@ -847,8 +955,8 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector,
|
|
847
955
|
// have to limit maximal capacity of the young generation.
|
848
956
|
new_space_high_promotion_mode_active_ = true;
|
849
957
|
if (FLAG_trace_gc) {
|
850
|
-
|
851
|
-
|
958
|
+
PrintPID("Limited new space size due to high promotion rate: %d MB\n",
|
959
|
+
new_space_.InitialCapacity() / MB);
|
852
960
|
}
|
853
961
|
} else if (new_space_high_promotion_mode_active_ &&
|
854
962
|
IsStableOrDecreasingSurvivalTrend() &&
|
@@ -858,8 +966,8 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector,
|
|
858
966
|
// to grow again.
|
859
967
|
new_space_high_promotion_mode_active_ = false;
|
860
968
|
if (FLAG_trace_gc) {
|
861
|
-
|
862
|
-
|
969
|
+
PrintPID("Unlimited new space size due to low promotion rate: %d MB\n",
|
970
|
+
new_space_.MaximumCapacity() / MB);
|
863
971
|
}
|
864
972
|
}
|
865
973
|
|
@@ -870,11 +978,16 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector,
|
|
870
978
|
|
871
979
|
isolate_->counters()->objs_since_last_young()->Set(0);
|
872
980
|
|
981
|
+
// Callbacks that fire after this point might trigger nested GCs and
|
982
|
+
// restart incremental marking, the assertion can't be moved down.
|
983
|
+
ASSERT(collector == SCAVENGER || incremental_marking()->IsStopped());
|
984
|
+
|
873
985
|
gc_post_processing_depth_++;
|
874
986
|
{ DisableAssertNoAllocation allow_allocation;
|
875
987
|
GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
|
876
988
|
next_gc_likely_to_collect_more =
|
877
|
-
isolate_->global_handles()->PostGarbageCollectionProcessing(
|
989
|
+
isolate_->global_handles()->PostGarbageCollectionProcessing(
|
990
|
+
collector, tracer);
|
878
991
|
}
|
879
992
|
gc_post_processing_depth_--;
|
880
993
|
|
@@ -887,26 +1000,46 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector,
|
|
887
1000
|
amount_of_external_allocated_memory_;
|
888
1001
|
}
|
889
1002
|
|
890
|
-
|
891
|
-
for (int i = 0; i < gc_epilogue_callbacks_.length(); ++i) {
|
892
|
-
if (gc_type & gc_epilogue_callbacks_[i].gc_type) {
|
893
|
-
gc_epilogue_callbacks_[i].callback(gc_type, callback_flags);
|
894
|
-
}
|
895
|
-
}
|
896
|
-
|
897
|
-
if (collector == MARK_COMPACTOR && global_gc_epilogue_callback_) {
|
898
|
-
ASSERT(!allocation_allowed_);
|
1003
|
+
{
|
899
1004
|
GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
|
900
|
-
|
1005
|
+
VMState state(isolate_, EXTERNAL);
|
1006
|
+
CallGCEpilogueCallbacks(gc_type);
|
901
1007
|
}
|
1008
|
+
|
1009
|
+
#ifdef VERIFY_HEAP
|
902
1010
|
if (FLAG_verify_heap) {
|
903
1011
|
VerifySymbolTable();
|
904
1012
|
}
|
1013
|
+
#endif
|
905
1014
|
|
906
1015
|
return next_gc_likely_to_collect_more;
|
907
1016
|
}
|
908
1017
|
|
909
1018
|
|
1019
|
+
void Heap::CallGCPrologueCallbacks(GCType gc_type) {
|
1020
|
+
if (gc_type == kGCTypeMarkSweepCompact && global_gc_prologue_callback_) {
|
1021
|
+
global_gc_prologue_callback_();
|
1022
|
+
}
|
1023
|
+
for (int i = 0; i < gc_prologue_callbacks_.length(); ++i) {
|
1024
|
+
if (gc_type & gc_prologue_callbacks_[i].gc_type) {
|
1025
|
+
gc_prologue_callbacks_[i].callback(gc_type, kNoGCCallbackFlags);
|
1026
|
+
}
|
1027
|
+
}
|
1028
|
+
}
|
1029
|
+
|
1030
|
+
|
1031
|
+
void Heap::CallGCEpilogueCallbacks(GCType gc_type) {
|
1032
|
+
for (int i = 0; i < gc_epilogue_callbacks_.length(); ++i) {
|
1033
|
+
if (gc_type & gc_epilogue_callbacks_[i].gc_type) {
|
1034
|
+
gc_epilogue_callbacks_[i].callback(gc_type, kNoGCCallbackFlags);
|
1035
|
+
}
|
1036
|
+
}
|
1037
|
+
if (gc_type == kGCTypeMarkSweepCompact && global_gc_epilogue_callback_) {
|
1038
|
+
global_gc_epilogue_callback_();
|
1039
|
+
}
|
1040
|
+
}
|
1041
|
+
|
1042
|
+
|
910
1043
|
void Heap::MarkCompact(GCTracer* tracer) {
|
911
1044
|
gc_state_ = MARK_COMPACT;
|
912
1045
|
LOG(isolate_, ResourceEvent("markcompact", "begin"));
|
@@ -928,7 +1061,7 @@ void Heap::MarkCompact(GCTracer* tracer) {
|
|
928
1061
|
|
929
1062
|
contexts_disposed_ = 0;
|
930
1063
|
|
931
|
-
|
1064
|
+
flush_monomorphic_ics_ = false;
|
932
1065
|
}
|
933
1066
|
|
934
1067
|
|
@@ -938,7 +1071,8 @@ void Heap::MarkCompactPrologue() {
|
|
938
1071
|
isolate_->keyed_lookup_cache()->Clear();
|
939
1072
|
isolate_->context_slot_cache()->Clear();
|
940
1073
|
isolate_->descriptor_lookup_cache()->Clear();
|
941
|
-
|
1074
|
+
RegExpResultsCache::Clear(string_split_cache());
|
1075
|
+
RegExpResultsCache::Clear(regexp_multiple_cache());
|
942
1076
|
|
943
1077
|
isolate_->compilation_cache()->MarkCompactPrologue();
|
944
1078
|
|
@@ -983,7 +1117,7 @@ class ScavengeVisitor: public ObjectVisitor {
|
|
983
1117
|
};
|
984
1118
|
|
985
1119
|
|
986
|
-
#ifdef
|
1120
|
+
#ifdef VERIFY_HEAP
|
987
1121
|
// Visitor class to verify pointers in code or data space do not point into
|
988
1122
|
// new space.
|
989
1123
|
class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor {
|
@@ -991,7 +1125,7 @@ class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor {
|
|
991
1125
|
void VisitPointers(Object** start, Object**end) {
|
992
1126
|
for (Object** current = start; current < end; current++) {
|
993
1127
|
if ((*current)->IsHeapObject()) {
|
994
|
-
|
1128
|
+
CHECK(!HEAP->InNewSpace(HeapObject::cast(*current)));
|
995
1129
|
}
|
996
1130
|
}
|
997
1131
|
}
|
@@ -1016,7 +1150,7 @@ static void VerifyNonPointerSpacePointers() {
|
|
1016
1150
|
object->Iterate(&v);
|
1017
1151
|
}
|
1018
1152
|
}
|
1019
|
-
#endif
|
1153
|
+
#endif // VERIFY_HEAP
|
1020
1154
|
|
1021
1155
|
|
1022
1156
|
void Heap::CheckNewSpaceExpansionCriteria() {
|
@@ -1154,7 +1288,9 @@ class ScavengeWeakObjectRetainer : public WeakObjectRetainer {
|
|
1154
1288
|
|
1155
1289
|
|
1156
1290
|
void Heap::Scavenge() {
|
1157
|
-
|
1291
|
+
RelocationLock relocation_lock(this);
|
1292
|
+
|
1293
|
+
#ifdef VERIFY_HEAP
|
1158
1294
|
if (FLAG_verify_heap) VerifyNonPointerSpacePointers();
|
1159
1295
|
#endif
|
1160
1296
|
|
@@ -1175,7 +1311,8 @@ void Heap::Scavenge() {
|
|
1175
1311
|
|
1176
1312
|
incremental_marking()->PrepareForScavenge();
|
1177
1313
|
|
1178
|
-
|
1314
|
+
paged_space(OLD_DATA_SPACE)->EnsureSweeperProgress(new_space_.Size());
|
1315
|
+
paged_space(OLD_POINTER_SPACE)->EnsureSweeperProgress(new_space_.Size());
|
1179
1316
|
|
1180
1317
|
// Flip the semispaces. After flipping, to space is empty, from space has
|
1181
1318
|
// live objects.
|
@@ -1220,20 +1357,34 @@ void Heap::Scavenge() {
|
|
1220
1357
|
|
1221
1358
|
// Copy objects reachable from cells by scavenging cell values directly.
|
1222
1359
|
HeapObjectIterator cell_iterator(cell_space_);
|
1223
|
-
for (HeapObject*
|
1224
|
-
|
1225
|
-
|
1226
|
-
|
1227
|
-
|
1228
|
-
|
1360
|
+
for (HeapObject* heap_object = cell_iterator.Next();
|
1361
|
+
heap_object != NULL;
|
1362
|
+
heap_object = cell_iterator.Next()) {
|
1363
|
+
if (heap_object->IsJSGlobalPropertyCell()) {
|
1364
|
+
JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(heap_object);
|
1365
|
+
Address value_address = cell->ValueAddress();
|
1229
1366
|
scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address));
|
1230
1367
|
}
|
1231
1368
|
}
|
1232
1369
|
|
1233
|
-
//
|
1234
|
-
|
1370
|
+
// Copy objects reachable from the code flushing candidates list.
|
1371
|
+
MarkCompactCollector* collector = mark_compact_collector();
|
1372
|
+
if (collector->is_code_flushing_enabled()) {
|
1373
|
+
collector->code_flusher()->IteratePointersToFromSpace(&scavenge_visitor);
|
1374
|
+
}
|
1375
|
+
|
1376
|
+
// Scavenge object reachable from the native contexts list directly.
|
1377
|
+
scavenge_visitor.VisitPointer(BitCast<Object**>(&native_contexts_list_));
|
1235
1378
|
|
1236
1379
|
new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
|
1380
|
+
|
1381
|
+
while (isolate()->global_handles()->IterateObjectGroups(
|
1382
|
+
&scavenge_visitor, &IsUnscavengedHeapObject)) {
|
1383
|
+
new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
|
1384
|
+
}
|
1385
|
+
isolate()->global_handles()->RemoveObjectGroups();
|
1386
|
+
isolate()->global_handles()->RemoveImplicitRefGroups();
|
1387
|
+
|
1237
1388
|
isolate_->global_handles()->IdentifyNewSpaceWeakIndependentHandles(
|
1238
1389
|
&IsUnscavengedHeapObject);
|
1239
1390
|
isolate_->global_handles()->IterateNewSpaceWeakIndependentRoots(
|
@@ -1243,9 +1394,10 @@ void Heap::Scavenge() {
|
|
1243
1394
|
UpdateNewSpaceReferencesInExternalStringTable(
|
1244
1395
|
&UpdateNewSpaceReferenceInExternalStringTableEntry);
|
1245
1396
|
|
1397
|
+
error_object_list_.UpdateReferencesInNewSpace(this);
|
1398
|
+
|
1246
1399
|
promotion_queue_.Destroy();
|
1247
1400
|
|
1248
|
-
LiveObjectList::UpdateReferencesForScavengeGC();
|
1249
1401
|
if (!FLAG_watch_ic_patching) {
|
1250
1402
|
isolate()->runtime_profiler()->UpdateSamplesAfterScavenge();
|
1251
1403
|
}
|
@@ -1291,9 +1443,11 @@ String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap,
|
|
1291
1443
|
|
1292
1444
|
void Heap::UpdateNewSpaceReferencesInExternalStringTable(
|
1293
1445
|
ExternalStringTableUpdaterCallback updater_func) {
|
1446
|
+
#ifdef VERIFY_HEAP
|
1294
1447
|
if (FLAG_verify_heap) {
|
1295
1448
|
external_string_table_.Verify();
|
1296
1449
|
}
|
1450
|
+
#endif
|
1297
1451
|
|
1298
1452
|
if (external_string_table_.new_space_strings_.is_empty()) return;
|
1299
1453
|
|
@@ -1391,7 +1545,7 @@ void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
|
|
1391
1545
|
Object* undefined = undefined_value();
|
1392
1546
|
Object* head = undefined;
|
1393
1547
|
Context* tail = NULL;
|
1394
|
-
Object* candidate =
|
1548
|
+
Object* candidate = native_contexts_list_;
|
1395
1549
|
|
1396
1550
|
// We don't record weak slots during marking or scavenges.
|
1397
1551
|
// Instead we do it once when we complete mark-compact cycle.
|
@@ -1464,20 +1618,47 @@ void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
|
|
1464
1618
|
}
|
1465
1619
|
|
1466
1620
|
// Update the head of the list of contexts.
|
1467
|
-
|
1621
|
+
native_contexts_list_ = head;
|
1468
1622
|
}
|
1469
1623
|
|
1470
1624
|
|
1471
1625
|
void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) {
|
1472
1626
|
AssertNoAllocation no_allocation;
|
1473
1627
|
|
1474
|
-
|
1628
|
+
// Both the external string table and the symbol table may contain
|
1629
|
+
// external strings, but neither lists them exhaustively, nor is the
|
1630
|
+
// intersection set empty. Therefore we iterate over the external string
|
1631
|
+
// table first, ignoring symbols, and then over the symbol table.
|
1632
|
+
|
1633
|
+
class ExternalStringTableVisitorAdapter : public ObjectVisitor {
|
1634
|
+
public:
|
1635
|
+
explicit ExternalStringTableVisitorAdapter(
|
1636
|
+
v8::ExternalResourceVisitor* visitor) : visitor_(visitor) {}
|
1637
|
+
virtual void VisitPointers(Object** start, Object** end) {
|
1638
|
+
for (Object** p = start; p < end; p++) {
|
1639
|
+
// Visit non-symbol external strings,
|
1640
|
+
// since symbols are listed in the symbol table.
|
1641
|
+
if (!(*p)->IsSymbol()) {
|
1642
|
+
ASSERT((*p)->IsExternalString());
|
1643
|
+
visitor_->VisitExternalString(Utils::ToLocal(
|
1644
|
+
Handle<String>(String::cast(*p))));
|
1645
|
+
}
|
1646
|
+
}
|
1647
|
+
}
|
1648
|
+
private:
|
1649
|
+
v8::ExternalResourceVisitor* visitor_;
|
1650
|
+
} external_string_table_visitor(visitor);
|
1651
|
+
|
1652
|
+
external_string_table_.Iterate(&external_string_table_visitor);
|
1653
|
+
|
1654
|
+
class SymbolTableVisitorAdapter : public ObjectVisitor {
|
1475
1655
|
public:
|
1476
|
-
explicit
|
1477
|
-
: visitor_(visitor) {}
|
1656
|
+
explicit SymbolTableVisitorAdapter(
|
1657
|
+
v8::ExternalResourceVisitor* visitor) : visitor_(visitor) {}
|
1478
1658
|
virtual void VisitPointers(Object** start, Object** end) {
|
1479
1659
|
for (Object** p = start; p < end; p++) {
|
1480
1660
|
if ((*p)->IsExternalString()) {
|
1661
|
+
ASSERT((*p)->IsSymbol());
|
1481
1662
|
visitor_->VisitExternalString(Utils::ToLocal(
|
1482
1663
|
Handle<String>(String::cast(*p))));
|
1483
1664
|
}
|
@@ -1485,8 +1666,9 @@ void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) {
|
|
1485
1666
|
}
|
1486
1667
|
private:
|
1487
1668
|
v8::ExternalResourceVisitor* visitor_;
|
1488
|
-
}
|
1489
|
-
|
1669
|
+
} symbol_table_visitor(visitor);
|
1670
|
+
|
1671
|
+
symbol_table()->IterateElements(&symbol_table_visitor);
|
1490
1672
|
}
|
1491
1673
|
|
1492
1674
|
|
@@ -1583,14 +1765,14 @@ template<MarksHandling marks_handling,
|
|
1583
1765
|
class ScavengingVisitor : public StaticVisitorBase {
|
1584
1766
|
public:
|
1585
1767
|
static void Initialize() {
|
1586
|
-
table_.Register(
|
1768
|
+
table_.Register(kVisitSeqOneByteString, &EvacuateSeqOneByteString);
|
1587
1769
|
table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString);
|
1588
1770
|
table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate);
|
1589
1771
|
table_.Register(kVisitByteArray, &EvacuateByteArray);
|
1590
1772
|
table_.Register(kVisitFixedArray, &EvacuateFixedArray);
|
1591
1773
|
table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray);
|
1592
1774
|
|
1593
|
-
table_.Register(
|
1775
|
+
table_.Register(kVisitNativeContext,
|
1594
1776
|
&ObjectEvacuationStrategy<POINTER_OBJECT>::
|
1595
1777
|
template VisitSpecialized<Context::kSize>);
|
1596
1778
|
|
@@ -1676,7 +1858,7 @@ class ScavengingVisitor : public StaticVisitorBase {
|
|
1676
1858
|
RecordCopiedObject(heap, target);
|
1677
1859
|
HEAP_PROFILE(heap, ObjectMoveEvent(source->address(), target->address()));
|
1678
1860
|
Isolate* isolate = heap->isolate();
|
1679
|
-
if (isolate->logger()->
|
1861
|
+
if (isolate->logger()->is_logging_code_events() ||
|
1680
1862
|
CpuProfiler::is_profiling(isolate)) {
|
1681
1863
|
if (target->IsSharedFunctionInfo()) {
|
1682
1864
|
PROFILE(isolate, SharedFunctionInfoMoveEvent(
|
@@ -1827,11 +2009,11 @@ class ScavengingVisitor : public StaticVisitorBase {
|
|
1827
2009
|
}
|
1828
2010
|
|
1829
2011
|
|
1830
|
-
static inline void
|
2012
|
+
static inline void EvacuateSeqOneByteString(Map* map,
|
1831
2013
|
HeapObject** slot,
|
1832
2014
|
HeapObject* object) {
|
1833
|
-
int object_size =
|
1834
|
-
|
2015
|
+
int object_size = SeqOneByteString::cast(object)->
|
2016
|
+
SeqOneByteStringSize(map->instance_type());
|
1835
2017
|
EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(
|
1836
2018
|
map, slot, object, object_size);
|
1837
2019
|
}
|
@@ -1984,9 +2166,8 @@ void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
|
|
1984
2166
|
MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type,
|
1985
2167
|
int instance_size) {
|
1986
2168
|
Object* result;
|
1987
|
-
|
1988
|
-
|
1989
|
-
}
|
2169
|
+
MaybeObject* maybe_result = AllocateRawMap();
|
2170
|
+
if (!maybe_result->ToObject(&result)) return maybe_result;
|
1990
2171
|
|
1991
2172
|
// Map::cast cannot be used due to uninitialized map field.
|
1992
2173
|
reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map());
|
@@ -1999,6 +2180,9 @@ MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type,
|
|
1999
2180
|
reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
|
2000
2181
|
reinterpret_cast<Map*>(result)->set_bit_field(0);
|
2001
2182
|
reinterpret_cast<Map*>(result)->set_bit_field2(0);
|
2183
|
+
int bit_field3 = Map::EnumLengthBits::encode(Map::kInvalidEnumCache) |
|
2184
|
+
Map::OwnsDescriptors::encode(true);
|
2185
|
+
reinterpret_cast<Map*>(result)->set_bit_field3(bit_field3);
|
2002
2186
|
return result;
|
2003
2187
|
}
|
2004
2188
|
|
@@ -2007,9 +2191,8 @@ MaybeObject* Heap::AllocateMap(InstanceType instance_type,
|
|
2007
2191
|
int instance_size,
|
2008
2192
|
ElementsKind elements_kind) {
|
2009
2193
|
Object* result;
|
2010
|
-
|
2011
|
-
|
2012
|
-
}
|
2194
|
+
MaybeObject* maybe_result = AllocateRawMap();
|
2195
|
+
if (!maybe_result->To(&result)) return maybe_result;
|
2013
2196
|
|
2014
2197
|
Map* map = reinterpret_cast<Map*>(result);
|
2015
2198
|
map->set_map_no_write_barrier(meta_map());
|
@@ -2021,20 +2204,19 @@ MaybeObject* Heap::AllocateMap(InstanceType instance_type,
|
|
2021
2204
|
map->set_instance_size(instance_size);
|
2022
2205
|
map->set_inobject_properties(0);
|
2023
2206
|
map->set_pre_allocated_property_fields(0);
|
2024
|
-
map->init_instance_descriptors();
|
2025
2207
|
map->set_code_cache(empty_fixed_array(), SKIP_WRITE_BARRIER);
|
2026
|
-
map->
|
2208
|
+
map->set_dependent_codes(DependentCodes::cast(empty_fixed_array()),
|
2209
|
+
SKIP_WRITE_BARRIER);
|
2210
|
+
map->init_back_pointer(undefined_value());
|
2027
2211
|
map->set_unused_property_fields(0);
|
2212
|
+
map->set_instance_descriptors(empty_descriptor_array());
|
2028
2213
|
map->set_bit_field(0);
|
2029
2214
|
map->set_bit_field2(1 << Map::kIsExtensible);
|
2215
|
+
int bit_field3 = Map::EnumLengthBits::encode(Map::kInvalidEnumCache) |
|
2216
|
+
Map::OwnsDescriptors::encode(true);
|
2217
|
+
map->set_bit_field3(bit_field3);
|
2030
2218
|
map->set_elements_kind(elements_kind);
|
2031
2219
|
|
2032
|
-
// If the map object is aligned fill the padding area with Smi 0 objects.
|
2033
|
-
if (Map::kPadStart < Map::kSize) {
|
2034
|
-
memset(reinterpret_cast<byte*>(map) + Map::kPadStart - kHeapObjectTag,
|
2035
|
-
0,
|
2036
|
-
Map::kSize - Map::kPadStart);
|
2037
|
-
}
|
2038
2220
|
return map;
|
2039
2221
|
}
|
2040
2222
|
|
@@ -2071,8 +2253,7 @@ MaybeObject* Heap::AllocateTypeFeedbackInfo() {
|
|
2071
2253
|
{ MaybeObject* maybe_info = AllocateStruct(TYPE_FEEDBACK_INFO_TYPE);
|
2072
2254
|
if (!maybe_info->To(&info)) return maybe_info;
|
2073
2255
|
}
|
2074
|
-
info->
|
2075
|
-
info->set_ic_with_type_info_count(0);
|
2256
|
+
info->initialize_storage();
|
2076
2257
|
info->set_type_feedback_cells(TypeFeedbackCells::cast(empty_fixed_array()),
|
2077
2258
|
SKIP_WRITE_BARRIER);
|
2078
2259
|
return info;
|
@@ -2160,17 +2341,21 @@ bool Heap::CreateInitialMaps() {
|
|
2160
2341
|
set_empty_descriptor_array(DescriptorArray::cast(obj));
|
2161
2342
|
|
2162
2343
|
// Fix the instance_descriptors for the existing maps.
|
2163
|
-
meta_map()->init_instance_descriptors();
|
2164
2344
|
meta_map()->set_code_cache(empty_fixed_array());
|
2165
|
-
meta_map()->
|
2345
|
+
meta_map()->set_dependent_codes(DependentCodes::cast(empty_fixed_array()));
|
2346
|
+
meta_map()->init_back_pointer(undefined_value());
|
2347
|
+
meta_map()->set_instance_descriptors(empty_descriptor_array());
|
2166
2348
|
|
2167
|
-
fixed_array_map()->init_instance_descriptors();
|
2168
2349
|
fixed_array_map()->set_code_cache(empty_fixed_array());
|
2169
|
-
fixed_array_map()->
|
2350
|
+
fixed_array_map()->set_dependent_codes(
|
2351
|
+
DependentCodes::cast(empty_fixed_array()));
|
2352
|
+
fixed_array_map()->init_back_pointer(undefined_value());
|
2353
|
+
fixed_array_map()->set_instance_descriptors(empty_descriptor_array());
|
2170
2354
|
|
2171
|
-
oddball_map()->init_instance_descriptors();
|
2172
2355
|
oddball_map()->set_code_cache(empty_fixed_array());
|
2173
|
-
oddball_map()->
|
2356
|
+
oddball_map()->set_dependent_codes(DependentCodes::cast(empty_fixed_array()));
|
2357
|
+
oddball_map()->init_back_pointer(undefined_value());
|
2358
|
+
oddball_map()->set_instance_descriptors(empty_descriptor_array());
|
2174
2359
|
|
2175
2360
|
// Fix prototype object for existing maps.
|
2176
2361
|
meta_map()->set_prototype(null_value());
|
@@ -2378,9 +2563,16 @@ bool Heap::CreateInitialMaps() {
|
|
2378
2563
|
AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
|
2379
2564
|
if (!maybe_obj->ToObject(&obj)) return false;
|
2380
2565
|
}
|
2381
|
-
Map
|
2382
|
-
|
2383
|
-
|
2566
|
+
set_global_context_map(Map::cast(obj));
|
2567
|
+
|
2568
|
+
{ MaybeObject* maybe_obj =
|
2569
|
+
AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
|
2570
|
+
if (!maybe_obj->ToObject(&obj)) return false;
|
2571
|
+
}
|
2572
|
+
Map* native_context_map = Map::cast(obj);
|
2573
|
+
native_context_map->set_dictionary_map(true);
|
2574
|
+
native_context_map->set_visitor_id(StaticVisitorBase::kVisitNativeContext);
|
2575
|
+
set_native_context_map(native_context_map);
|
2384
2576
|
|
2385
2577
|
{ MaybeObject* maybe_obj = AllocateMap(SHARED_FUNCTION_INFO_TYPE,
|
2386
2578
|
SharedFunctionInfo::kAlignedSize);
|
@@ -2394,6 +2586,14 @@ bool Heap::CreateInitialMaps() {
|
|
2394
2586
|
}
|
2395
2587
|
set_message_object_map(Map::cast(obj));
|
2396
2588
|
|
2589
|
+
Map* external_map;
|
2590
|
+
{ MaybeObject* maybe_obj =
|
2591
|
+
AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize + kPointerSize);
|
2592
|
+
if (!maybe_obj->To(&external_map)) return false;
|
2593
|
+
}
|
2594
|
+
external_map->set_is_extensible(false);
|
2595
|
+
set_external_map(external_map);
|
2596
|
+
|
2397
2597
|
ASSERT(!InNewSpace(empty_fixed_array()));
|
2398
2598
|
return true;
|
2399
2599
|
}
|
@@ -2612,14 +2812,14 @@ bool Heap::CreateInitialObjects() {
|
|
2612
2812
|
set_termination_exception(obj);
|
2613
2813
|
|
2614
2814
|
// Allocate the empty string.
|
2615
|
-
{ MaybeObject* maybe_obj =
|
2815
|
+
{ MaybeObject* maybe_obj = AllocateRawOneByteString(0, TENURED);
|
2616
2816
|
if (!maybe_obj->ToObject(&obj)) return false;
|
2617
2817
|
}
|
2618
2818
|
set_empty_string(String::cast(obj));
|
2619
2819
|
|
2620
2820
|
for (unsigned i = 0; i < ARRAY_SIZE(constant_symbol_table); i++) {
|
2621
2821
|
{ MaybeObject* maybe_obj =
|
2622
|
-
|
2822
|
+
LookupUtf8Symbol(constant_symbol_table[i].contents);
|
2623
2823
|
if (!maybe_obj->ToObject(&obj)) return false;
|
2624
2824
|
}
|
2625
2825
|
roots_[constant_symbol_table[i].index] = String::cast(obj);
|
@@ -2632,7 +2832,7 @@ bool Heap::CreateInitialObjects() {
|
|
2632
2832
|
// hash code in place. The hash code for the hidden_symbol is zero to ensure
|
2633
2833
|
// that it will always be at the first entry in property descriptors.
|
2634
2834
|
{ MaybeObject* maybe_obj =
|
2635
|
-
|
2835
|
+
AllocateOneByteSymbol(OneByteVector("", 0), String::kEmptyStringHash);
|
2636
2836
|
if (!maybe_obj->ToObject(&obj)) return false;
|
2637
2837
|
}
|
2638
2838
|
hidden_symbol_ = String::cast(obj);
|
@@ -2685,26 +2885,41 @@ bool Heap::CreateInitialObjects() {
|
|
2685
2885
|
}
|
2686
2886
|
set_number_string_cache(FixedArray::cast(obj));
|
2687
2887
|
|
2688
|
-
// Allocate cache for single character
|
2888
|
+
// Allocate cache for single character one byte strings.
|
2689
2889
|
{ MaybeObject* maybe_obj =
|
2690
|
-
AllocateFixedArray(String::
|
2890
|
+
AllocateFixedArray(String::kMaxOneByteCharCode + 1, TENURED);
|
2691
2891
|
if (!maybe_obj->ToObject(&obj)) return false;
|
2692
2892
|
}
|
2693
2893
|
set_single_character_string_cache(FixedArray::cast(obj));
|
2694
2894
|
|
2695
2895
|
// Allocate cache for string split.
|
2696
|
-
{ MaybeObject* maybe_obj =
|
2697
|
-
|
2896
|
+
{ MaybeObject* maybe_obj = AllocateFixedArray(
|
2897
|
+
RegExpResultsCache::kRegExpResultsCacheSize, TENURED);
|
2698
2898
|
if (!maybe_obj->ToObject(&obj)) return false;
|
2699
2899
|
}
|
2700
2900
|
set_string_split_cache(FixedArray::cast(obj));
|
2701
2901
|
|
2902
|
+
{ MaybeObject* maybe_obj = AllocateFixedArray(
|
2903
|
+
RegExpResultsCache::kRegExpResultsCacheSize, TENURED);
|
2904
|
+
if (!maybe_obj->ToObject(&obj)) return false;
|
2905
|
+
}
|
2906
|
+
set_regexp_multiple_cache(FixedArray::cast(obj));
|
2907
|
+
|
2702
2908
|
// Allocate cache for external strings pointing to native source code.
|
2703
2909
|
{ MaybeObject* maybe_obj = AllocateFixedArray(Natives::GetBuiltinsCount());
|
2704
2910
|
if (!maybe_obj->ToObject(&obj)) return false;
|
2705
2911
|
}
|
2706
2912
|
set_natives_source_cache(FixedArray::cast(obj));
|
2707
2913
|
|
2914
|
+
// Allocate object to hold object observation state.
|
2915
|
+
{ MaybeObject* maybe_obj = AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
|
2916
|
+
if (!maybe_obj->ToObject(&obj)) return false;
|
2917
|
+
}
|
2918
|
+
{ MaybeObject* maybe_obj = AllocateJSObjectFromMap(Map::cast(obj));
|
2919
|
+
if (!maybe_obj->ToObject(&obj)) return false;
|
2920
|
+
}
|
2921
|
+
set_observation_state(JSObject::cast(obj));
|
2922
|
+
|
2708
2923
|
// Handling of script id generation is in FACTORY->NewScript.
|
2709
2924
|
set_last_script_id(undefined_value());
|
2710
2925
|
|
@@ -2724,70 +2939,127 @@ bool Heap::CreateInitialObjects() {
|
|
2724
2939
|
}
|
2725
2940
|
|
2726
2941
|
|
2727
|
-
|
2728
|
-
|
2729
|
-
|
2730
|
-
|
2731
|
-
|
2942
|
+
bool Heap::RootCanBeWrittenAfterInitialization(Heap::RootListIndex root_index) {
|
2943
|
+
RootListIndex writable_roots[] = {
|
2944
|
+
kStoreBufferTopRootIndex,
|
2945
|
+
kStackLimitRootIndex,
|
2946
|
+
kNumberStringCacheRootIndex,
|
2947
|
+
kInstanceofCacheFunctionRootIndex,
|
2948
|
+
kInstanceofCacheMapRootIndex,
|
2949
|
+
kInstanceofCacheAnswerRootIndex,
|
2950
|
+
kCodeStubsRootIndex,
|
2951
|
+
kNonMonomorphicCacheRootIndex,
|
2952
|
+
kPolymorphicCodeCacheRootIndex,
|
2953
|
+
kLastScriptIdRootIndex,
|
2954
|
+
kEmptyScriptRootIndex,
|
2955
|
+
kRealStackLimitRootIndex,
|
2956
|
+
kArgumentsAdaptorDeoptPCOffsetRootIndex,
|
2957
|
+
kConstructStubDeoptPCOffsetRootIndex,
|
2958
|
+
kGetterStubDeoptPCOffsetRootIndex,
|
2959
|
+
kSetterStubDeoptPCOffsetRootIndex,
|
2960
|
+
kSymbolTableRootIndex,
|
2961
|
+
};
|
2962
|
+
|
2963
|
+
for (unsigned int i = 0; i < ARRAY_SIZE(writable_roots); i++) {
|
2964
|
+
if (root_index == writable_roots[i])
|
2965
|
+
return true;
|
2966
|
+
}
|
2967
|
+
return false;
|
2968
|
+
}
|
2969
|
+
|
2970
|
+
|
2971
|
+
Object* RegExpResultsCache::Lookup(Heap* heap,
|
2972
|
+
String* key_string,
|
2973
|
+
Object* key_pattern,
|
2974
|
+
ResultsCacheType type) {
|
2975
|
+
FixedArray* cache;
|
2976
|
+
if (!key_string->IsSymbol()) return Smi::FromInt(0);
|
2977
|
+
if (type == STRING_SPLIT_SUBSTRINGS) {
|
2978
|
+
ASSERT(key_pattern->IsString());
|
2979
|
+
if (!key_pattern->IsSymbol()) return Smi::FromInt(0);
|
2980
|
+
cache = heap->string_split_cache();
|
2981
|
+
} else {
|
2982
|
+
ASSERT(type == REGEXP_MULTIPLE_INDICES);
|
2983
|
+
ASSERT(key_pattern->IsFixedArray());
|
2984
|
+
cache = heap->regexp_multiple_cache();
|
2985
|
+
}
|
2986
|
+
|
2987
|
+
uint32_t hash = key_string->Hash();
|
2988
|
+
uint32_t index = ((hash & (kRegExpResultsCacheSize - 1)) &
|
2732
2989
|
~(kArrayEntriesPerCacheEntry - 1));
|
2733
|
-
if (cache->get(index + kStringOffset) ==
|
2734
|
-
cache->get(index + kPatternOffset) ==
|
2990
|
+
if (cache->get(index + kStringOffset) == key_string &&
|
2991
|
+
cache->get(index + kPatternOffset) == key_pattern) {
|
2735
2992
|
return cache->get(index + kArrayOffset);
|
2736
2993
|
}
|
2737
|
-
index =
|
2738
|
-
|
2739
|
-
|
2994
|
+
index =
|
2995
|
+
((index + kArrayEntriesPerCacheEntry) & (kRegExpResultsCacheSize - 1));
|
2996
|
+
if (cache->get(index + kStringOffset) == key_string &&
|
2997
|
+
cache->get(index + kPatternOffset) == key_pattern) {
|
2740
2998
|
return cache->get(index + kArrayOffset);
|
2741
2999
|
}
|
2742
3000
|
return Smi::FromInt(0);
|
2743
3001
|
}
|
2744
3002
|
|
2745
3003
|
|
2746
|
-
void
|
2747
|
-
|
2748
|
-
|
2749
|
-
|
2750
|
-
|
2751
|
-
|
2752
|
-
|
2753
|
-
|
3004
|
+
void RegExpResultsCache::Enter(Heap* heap,
|
3005
|
+
String* key_string,
|
3006
|
+
Object* key_pattern,
|
3007
|
+
FixedArray* value_array,
|
3008
|
+
ResultsCacheType type) {
|
3009
|
+
FixedArray* cache;
|
3010
|
+
if (!key_string->IsSymbol()) return;
|
3011
|
+
if (type == STRING_SPLIT_SUBSTRINGS) {
|
3012
|
+
ASSERT(key_pattern->IsString());
|
3013
|
+
if (!key_pattern->IsSymbol()) return;
|
3014
|
+
cache = heap->string_split_cache();
|
3015
|
+
} else {
|
3016
|
+
ASSERT(type == REGEXP_MULTIPLE_INDICES);
|
3017
|
+
ASSERT(key_pattern->IsFixedArray());
|
3018
|
+
cache = heap->regexp_multiple_cache();
|
3019
|
+
}
|
3020
|
+
|
3021
|
+
uint32_t hash = key_string->Hash();
|
3022
|
+
uint32_t index = ((hash & (kRegExpResultsCacheSize - 1)) &
|
2754
3023
|
~(kArrayEntriesPerCacheEntry - 1));
|
2755
3024
|
if (cache->get(index + kStringOffset) == Smi::FromInt(0)) {
|
2756
|
-
cache->set(index + kStringOffset,
|
2757
|
-
cache->set(index + kPatternOffset,
|
2758
|
-
cache->set(index + kArrayOffset,
|
3025
|
+
cache->set(index + kStringOffset, key_string);
|
3026
|
+
cache->set(index + kPatternOffset, key_pattern);
|
3027
|
+
cache->set(index + kArrayOffset, value_array);
|
2759
3028
|
} else {
|
2760
3029
|
uint32_t index2 =
|
2761
|
-
((index + kArrayEntriesPerCacheEntry) & (
|
3030
|
+
((index + kArrayEntriesPerCacheEntry) & (kRegExpResultsCacheSize - 1));
|
2762
3031
|
if (cache->get(index2 + kStringOffset) == Smi::FromInt(0)) {
|
2763
|
-
cache->set(index2 + kStringOffset,
|
2764
|
-
cache->set(index2 + kPatternOffset,
|
2765
|
-
cache->set(index2 + kArrayOffset,
|
3032
|
+
cache->set(index2 + kStringOffset, key_string);
|
3033
|
+
cache->set(index2 + kPatternOffset, key_pattern);
|
3034
|
+
cache->set(index2 + kArrayOffset, value_array);
|
2766
3035
|
} else {
|
2767
3036
|
cache->set(index2 + kStringOffset, Smi::FromInt(0));
|
2768
3037
|
cache->set(index2 + kPatternOffset, Smi::FromInt(0));
|
2769
3038
|
cache->set(index2 + kArrayOffset, Smi::FromInt(0));
|
2770
|
-
cache->set(index + kStringOffset,
|
2771
|
-
cache->set(index + kPatternOffset,
|
2772
|
-
cache->set(index + kArrayOffset,
|
3039
|
+
cache->set(index + kStringOffset, key_string);
|
3040
|
+
cache->set(index + kPatternOffset, key_pattern);
|
3041
|
+
cache->set(index + kArrayOffset, value_array);
|
2773
3042
|
}
|
2774
3043
|
}
|
2775
|
-
|
2776
|
-
|
2777
|
-
|
3044
|
+
// If the array is a reasonably short list of substrings, convert it into a
|
3045
|
+
// list of symbols.
|
3046
|
+
if (type == STRING_SPLIT_SUBSTRINGS && value_array->length() < 100) {
|
3047
|
+
for (int i = 0; i < value_array->length(); i++) {
|
3048
|
+
String* str = String::cast(value_array->get(i));
|
2778
3049
|
Object* symbol;
|
2779
3050
|
MaybeObject* maybe_symbol = heap->LookupSymbol(str);
|
2780
3051
|
if (maybe_symbol->ToObject(&symbol)) {
|
2781
|
-
|
3052
|
+
value_array->set(i, symbol);
|
2782
3053
|
}
|
2783
3054
|
}
|
2784
3055
|
}
|
2785
|
-
array
|
3056
|
+
// Convert backing store to a copy-on-write array.
|
3057
|
+
value_array->set_map_no_write_barrier(heap->fixed_cow_array_map());
|
2786
3058
|
}
|
2787
3059
|
|
2788
3060
|
|
2789
|
-
void
|
2790
|
-
for (int i = 0; i <
|
3061
|
+
void RegExpResultsCache::Clear(FixedArray* cache) {
|
3062
|
+
for (int i = 0; i < kRegExpResultsCacheSize; i++) {
|
2791
3063
|
cache->set(i, Smi::FromInt(0));
|
2792
3064
|
}
|
2793
3065
|
}
|
@@ -2817,7 +3089,7 @@ void Heap::AllocateFullSizeNumberStringCache() {
|
|
2817
3089
|
// The idea is to have a small number string cache in the snapshot to keep
|
2818
3090
|
// boot-time memory usage down. If we expand the number string cache already
|
2819
3091
|
// while creating the snapshot then that didn't work out.
|
2820
|
-
ASSERT(!Serializer::enabled());
|
3092
|
+
ASSERT(!Serializer::enabled() || FLAG_extra_code != NULL);
|
2821
3093
|
MaybeObject* maybe_obj =
|
2822
3094
|
AllocateFixedArray(FullSizeNumberStringCacheLength(), TENURED);
|
2823
3095
|
Object* new_cache;
|
@@ -2913,7 +3185,7 @@ MaybeObject* Heap::NumberToString(Object* number,
|
|
2913
3185
|
}
|
2914
3186
|
|
2915
3187
|
Object* js_string;
|
2916
|
-
MaybeObject* maybe_js_string =
|
3188
|
+
MaybeObject* maybe_js_string = AllocateStringFromOneByte(CStrVector(str));
|
2917
3189
|
if (maybe_js_string->ToObject(&js_string)) {
|
2918
3190
|
SetNumberStringCache(number, String::cast(js_string));
|
2919
3191
|
}
|
@@ -3005,6 +3277,7 @@ MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) {
|
|
3005
3277
|
share->set_name(name);
|
3006
3278
|
Code* illegal = isolate_->builtins()->builtin(Builtins::kIllegal);
|
3007
3279
|
share->set_code(illegal);
|
3280
|
+
share->ClearOptimizedCodeMap();
|
3008
3281
|
share->set_scope_info(ScopeInfo::Empty());
|
3009
3282
|
Code* construct_stub =
|
3010
3283
|
isolate_->builtins()->builtin(Builtins::kJSConstructStubGeneric);
|
@@ -3017,8 +3290,8 @@ MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) {
|
|
3017
3290
|
share->set_initial_map(undefined_value(), SKIP_WRITE_BARRIER);
|
3018
3291
|
share->set_this_property_assignments(undefined_value(), SKIP_WRITE_BARRIER);
|
3019
3292
|
share->set_ast_node_count(0);
|
3020
|
-
share->
|
3021
|
-
share->
|
3293
|
+
share->set_stress_deopt_counter(FLAG_deopt_every_n_times);
|
3294
|
+
share->set_counters(0);
|
3022
3295
|
|
3023
3296
|
// Set integer fields (smi or int, depending on the architecture).
|
3024
3297
|
share->set_length(0);
|
@@ -3073,8 +3346,8 @@ static inline bool Between(uint32_t character, uint32_t from, uint32_t to) {
|
|
3073
3346
|
|
3074
3347
|
MUST_USE_RESULT static inline MaybeObject* MakeOrFindTwoCharacterString(
|
3075
3348
|
Heap* heap,
|
3076
|
-
|
3077
|
-
|
3349
|
+
uint16_t c1,
|
3350
|
+
uint16_t c2) {
|
3078
3351
|
String* symbol;
|
3079
3352
|
// Numeric strings have a different hash algorithm not known by
|
3080
3353
|
// LookupTwoCharsSymbolIfExists, so we skip this step for such strings.
|
@@ -3083,15 +3356,16 @@ MUST_USE_RESULT static inline MaybeObject* MakeOrFindTwoCharacterString(
|
|
3083
3356
|
return symbol;
|
3084
3357
|
// Now we know the length is 2, we might as well make use of that fact
|
3085
3358
|
// when building the new string.
|
3086
|
-
} else if ((c1 | c2) <= String::
|
3087
|
-
|
3359
|
+
} else if (static_cast<unsigned>(c1 | c2) <= String::kMaxOneByteCharCodeU) {
|
3360
|
+
// We can do this.
|
3361
|
+
ASSERT(IsPowerOf2(String::kMaxOneByteCharCodeU + 1)); // because of this.
|
3088
3362
|
Object* result;
|
3089
|
-
{ MaybeObject* maybe_result = heap->
|
3363
|
+
{ MaybeObject* maybe_result = heap->AllocateRawOneByteString(2);
|
3090
3364
|
if (!maybe_result->ToObject(&result)) return maybe_result;
|
3091
3365
|
}
|
3092
|
-
|
3093
|
-
dest[0] = c1;
|
3094
|
-
dest[1] = c2;
|
3366
|
+
uint8_t* dest = SeqOneByteString::cast(result)->GetChars();
|
3367
|
+
dest[0] = static_cast<uint8_t>(c1);
|
3368
|
+
dest[1] = static_cast<uint8_t>(c2);
|
3095
3369
|
return result;
|
3096
3370
|
} else {
|
3097
3371
|
Object* result;
|
@@ -3123,24 +3397,23 @@ MaybeObject* Heap::AllocateConsString(String* first, String* second) {
|
|
3123
3397
|
// dictionary. Check whether we already have the string in the symbol
|
3124
3398
|
// table to prevent creation of many unneccesary strings.
|
3125
3399
|
if (length == 2) {
|
3126
|
-
|
3127
|
-
|
3400
|
+
uint16_t c1 = first->Get(0);
|
3401
|
+
uint16_t c2 = second->Get(0);
|
3128
3402
|
return MakeOrFindTwoCharacterString(this, c1, c2);
|
3129
3403
|
}
|
3130
3404
|
|
3131
|
-
bool
|
3132
|
-
bool
|
3133
|
-
bool
|
3134
|
-
|
3405
|
+
bool first_is_one_byte = first->IsOneByteRepresentation();
|
3406
|
+
bool second_is_one_byte = second->IsOneByteRepresentation();
|
3407
|
+
bool is_one_byte = first_is_one_byte && second_is_one_byte;
|
3135
3408
|
// Make sure that an out of memory exception is thrown if the length
|
3136
3409
|
// of the new cons string is too large.
|
3137
3410
|
if (length > String::kMaxLength || length < 0) {
|
3138
3411
|
isolate()->context()->mark_out_of_memory();
|
3139
|
-
return Failure::OutOfMemoryException();
|
3412
|
+
return Failure::OutOfMemoryException(0x4);
|
3140
3413
|
}
|
3141
3414
|
|
3142
3415
|
bool is_ascii_data_in_two_byte_string = false;
|
3143
|
-
if (!
|
3416
|
+
if (!is_one_byte) {
|
3144
3417
|
// At least one of the strings uses two-byte representation so we
|
3145
3418
|
// can't use the fast case code for short ASCII strings below, but
|
3146
3419
|
// we can try to save memory if all chars actually fit in ASCII.
|
@@ -3157,37 +3430,37 @@ MaybeObject* Heap::AllocateConsString(String* first, String* second) {
|
|
3157
3430
|
STATIC_ASSERT(ConsString::kMinLength <= SlicedString::kMinLength);
|
3158
3431
|
ASSERT(first->IsFlat());
|
3159
3432
|
ASSERT(second->IsFlat());
|
3160
|
-
if (
|
3433
|
+
if (is_one_byte) {
|
3161
3434
|
Object* result;
|
3162
|
-
{ MaybeObject* maybe_result =
|
3435
|
+
{ MaybeObject* maybe_result = AllocateRawOneByteString(length);
|
3163
3436
|
if (!maybe_result->ToObject(&result)) return maybe_result;
|
3164
3437
|
}
|
3165
3438
|
// Copy the characters into the new object.
|
3166
|
-
|
3439
|
+
uint8_t* dest = SeqOneByteString::cast(result)->GetChars();
|
3167
3440
|
// Copy first part.
|
3168
|
-
const
|
3441
|
+
const uint8_t* src;
|
3169
3442
|
if (first->IsExternalString()) {
|
3170
3443
|
src = ExternalAsciiString::cast(first)->GetChars();
|
3171
3444
|
} else {
|
3172
|
-
src =
|
3445
|
+
src = SeqOneByteString::cast(first)->GetChars();
|
3173
3446
|
}
|
3174
3447
|
for (int i = 0; i < first_length; i++) *dest++ = src[i];
|
3175
3448
|
// Copy second part.
|
3176
3449
|
if (second->IsExternalString()) {
|
3177
3450
|
src = ExternalAsciiString::cast(second)->GetChars();
|
3178
3451
|
} else {
|
3179
|
-
src =
|
3452
|
+
src = SeqOneByteString::cast(second)->GetChars();
|
3180
3453
|
}
|
3181
3454
|
for (int i = 0; i < second_length; i++) *dest++ = src[i];
|
3182
3455
|
return result;
|
3183
3456
|
} else {
|
3184
3457
|
if (is_ascii_data_in_two_byte_string) {
|
3185
3458
|
Object* result;
|
3186
|
-
{ MaybeObject* maybe_result =
|
3459
|
+
{ MaybeObject* maybe_result = AllocateRawOneByteString(length);
|
3187
3460
|
if (!maybe_result->ToObject(&result)) return maybe_result;
|
3188
3461
|
}
|
3189
3462
|
// Copy the characters into the new object.
|
3190
|
-
|
3463
|
+
uint8_t* dest = SeqOneByteString::cast(result)->GetChars();
|
3191
3464
|
String::WriteToFlat(first, dest, 0, first_length);
|
3192
3465
|
String::WriteToFlat(second, dest + first_length, 0, second_length);
|
3193
3466
|
isolate_->counters()->string_add_runtime_ext_to_ascii()->Increment();
|
@@ -3206,7 +3479,7 @@ MaybeObject* Heap::AllocateConsString(String* first, String* second) {
|
|
3206
3479
|
}
|
3207
3480
|
}
|
3208
3481
|
|
3209
|
-
Map* map = (
|
3482
|
+
Map* map = (is_one_byte || is_ascii_data_in_two_byte_string) ?
|
3210
3483
|
cons_ascii_string_map() : cons_string_map();
|
3211
3484
|
|
3212
3485
|
Object* result;
|
@@ -3238,8 +3511,8 @@ MaybeObject* Heap::AllocateSubString(String* buffer,
|
|
3238
3511
|
// Optimization for 2-byte strings often used as keys in a decompression
|
3239
3512
|
// dictionary. Check whether we already have the string in the symbol
|
3240
3513
|
// table to prevent creation of many unneccesary strings.
|
3241
|
-
|
3242
|
-
|
3514
|
+
uint16_t c1 = buffer->Get(start);
|
3515
|
+
uint16_t c2 = buffer->Get(start + 1);
|
3243
3516
|
return MakeOrFindTwoCharacterString(this, c1, c2);
|
3244
3517
|
}
|
3245
3518
|
|
@@ -3254,17 +3527,17 @@ MaybeObject* Heap::AllocateSubString(String* buffer,
|
|
3254
3527
|
// WriteToFlat takes care of the case when an indirect string has a
|
3255
3528
|
// different encoding from its underlying string. These encodings may
|
3256
3529
|
// differ because of externalization.
|
3257
|
-
bool
|
3258
|
-
{ MaybeObject* maybe_result =
|
3259
|
-
?
|
3530
|
+
bool is_one_byte = buffer->IsOneByteRepresentation();
|
3531
|
+
{ MaybeObject* maybe_result = is_one_byte
|
3532
|
+
? AllocateRawOneByteString(length, pretenure)
|
3260
3533
|
: AllocateRawTwoByteString(length, pretenure);
|
3261
3534
|
if (!maybe_result->ToObject(&result)) return maybe_result;
|
3262
3535
|
}
|
3263
3536
|
String* string_result = String::cast(result);
|
3264
3537
|
// Copy the characters into the new object.
|
3265
|
-
if (
|
3266
|
-
ASSERT(string_result->
|
3267
|
-
|
3538
|
+
if (is_one_byte) {
|
3539
|
+
ASSERT(string_result->IsOneByteRepresentation());
|
3540
|
+
uint8_t* dest = SeqOneByteString::cast(string_result)->GetChars();
|
3268
3541
|
String::WriteToFlat(buffer, dest, start, end);
|
3269
3542
|
} else {
|
3270
3543
|
ASSERT(string_result->IsTwoByteRepresentation());
|
@@ -3275,7 +3548,7 @@ MaybeObject* Heap::AllocateSubString(String* buffer,
|
|
3275
3548
|
}
|
3276
3549
|
|
3277
3550
|
ASSERT(buffer->IsFlat());
|
3278
|
-
#if
|
3551
|
+
#if VERIFY_HEAP
|
3279
3552
|
if (FLAG_verify_heap) {
|
3280
3553
|
buffer->StringVerify();
|
3281
3554
|
}
|
@@ -3288,7 +3561,7 @@ MaybeObject* Heap::AllocateSubString(String* buffer,
|
|
3288
3561
|
// indirect ASCII string is pointing to a two-byte string, the two-byte char
|
3289
3562
|
// codes of the underlying string must still fit into ASCII (because
|
3290
3563
|
// externalization must not change char codes).
|
3291
|
-
{ Map* map = buffer->
|
3564
|
+
{ Map* map = buffer->IsOneByteRepresentation()
|
3292
3565
|
? sliced_ascii_string_map()
|
3293
3566
|
: sliced_string_map();
|
3294
3567
|
MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
|
@@ -3324,7 +3597,7 @@ MaybeObject* Heap::AllocateExternalStringFromAscii(
|
|
3324
3597
|
size_t length = resource->length();
|
3325
3598
|
if (length > static_cast<size_t>(String::kMaxLength)) {
|
3326
3599
|
isolate()->context()->mark_out_of_memory();
|
3327
|
-
return Failure::OutOfMemoryException();
|
3600
|
+
return Failure::OutOfMemoryException(0x5);
|
3328
3601
|
}
|
3329
3602
|
|
3330
3603
|
ASSERT(String::IsAscii(resource->data(), static_cast<int>(length)));
|
@@ -3349,15 +3622,15 @@ MaybeObject* Heap::AllocateExternalStringFromTwoByte(
|
|
3349
3622
|
size_t length = resource->length();
|
3350
3623
|
if (length > static_cast<size_t>(String::kMaxLength)) {
|
3351
3624
|
isolate()->context()->mark_out_of_memory();
|
3352
|
-
return Failure::OutOfMemoryException();
|
3625
|
+
return Failure::OutOfMemoryException(0x6);
|
3353
3626
|
}
|
3354
3627
|
|
3355
3628
|
// For small strings we check whether the resource contains only
|
3356
|
-
//
|
3629
|
+
// one byte characters. If yes, we use a different string map.
|
3357
3630
|
static const size_t kAsciiCheckLengthLimit = 32;
|
3358
|
-
bool
|
3359
|
-
String::
|
3360
|
-
Map* map =
|
3631
|
+
bool is_one_byte = length <= kAsciiCheckLengthLimit &&
|
3632
|
+
String::IsOneByte(resource->data(), static_cast<int>(length));
|
3633
|
+
Map* map = is_one_byte ?
|
3361
3634
|
external_string_with_ascii_data_map() : external_string_map();
|
3362
3635
|
Object* result;
|
3363
3636
|
{ MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
|
@@ -3374,14 +3647,15 @@ MaybeObject* Heap::AllocateExternalStringFromTwoByte(
|
|
3374
3647
|
|
3375
3648
|
|
3376
3649
|
MaybeObject* Heap::LookupSingleCharacterStringFromCode(uint16_t code) {
|
3377
|
-
if (code <= String::
|
3650
|
+
if (code <= String::kMaxOneByteCharCode) {
|
3378
3651
|
Object* value = single_character_string_cache()->get(code);
|
3379
3652
|
if (value != undefined_value()) return value;
|
3380
3653
|
|
3381
|
-
|
3382
|
-
buffer[0] = static_cast<
|
3654
|
+
uint8_t buffer[1];
|
3655
|
+
buffer[0] = static_cast<uint8_t>(code);
|
3383
3656
|
Object* result;
|
3384
|
-
MaybeObject* maybe_result =
|
3657
|
+
MaybeObject* maybe_result =
|
3658
|
+
LookupOneByteSymbol(Vector<const uint8_t>(buffer, 1));
|
3385
3659
|
|
3386
3660
|
if (!maybe_result->ToObject(&result)) return maybe_result;
|
3387
3661
|
single_character_string_cache()->set(code, result);
|
@@ -3400,7 +3674,7 @@ MaybeObject* Heap::LookupSingleCharacterStringFromCode(uint16_t code) {
|
|
3400
3674
|
|
3401
3675
|
MaybeObject* Heap::AllocateByteArray(int length, PretenureFlag pretenure) {
|
3402
3676
|
if (length < 0 || length > ByteArray::kMaxLength) {
|
3403
|
-
return Failure::OutOfMemoryException();
|
3677
|
+
return Failure::OutOfMemoryException(0x7);
|
3404
3678
|
}
|
3405
3679
|
if (pretenure == NOT_TENURED) {
|
3406
3680
|
return AllocateByteArray(length);
|
@@ -3422,7 +3696,7 @@ MaybeObject* Heap::AllocateByteArray(int length, PretenureFlag pretenure) {
|
|
3422
3696
|
|
3423
3697
|
MaybeObject* Heap::AllocateByteArray(int length) {
|
3424
3698
|
if (length < 0 || length > ByteArray::kMaxLength) {
|
3425
|
-
return Failure::OutOfMemoryException();
|
3699
|
+
return Failure::OutOfMemoryException(0x8);
|
3426
3700
|
}
|
3427
3701
|
int size = ByteArray::SizeFor(length);
|
3428
3702
|
AllocationSpace space =
|
@@ -3492,17 +3766,27 @@ MaybeObject* Heap::CreateCode(const CodeDesc& desc,
|
|
3492
3766
|
MaybeObject* maybe_result;
|
3493
3767
|
// Large code objects and code objects which should stay at a fixed address
|
3494
3768
|
// are allocated in large object space.
|
3495
|
-
|
3769
|
+
HeapObject* result;
|
3770
|
+
bool force_lo_space = obj_size > code_space()->AreaSize();
|
3771
|
+
if (force_lo_space) {
|
3496
3772
|
maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE);
|
3497
3773
|
} else {
|
3498
3774
|
maybe_result = code_space_->AllocateRaw(obj_size);
|
3499
3775
|
}
|
3776
|
+
if (!maybe_result->To<HeapObject>(&result)) return maybe_result;
|
3500
3777
|
|
3501
|
-
|
3502
|
-
|
3778
|
+
if (immovable && !force_lo_space &&
|
3779
|
+
// Objects on the first page of each space are never moved.
|
3780
|
+
!code_space_->FirstPage()->Contains(result->address())) {
|
3781
|
+
// Discard the first code allocation, which was on a page where it could be
|
3782
|
+
// moved.
|
3783
|
+
CreateFillerObjectAt(result->address(), obj_size);
|
3784
|
+
maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE);
|
3785
|
+
if (!maybe_result->To<HeapObject>(&result)) return maybe_result;
|
3786
|
+
}
|
3503
3787
|
|
3504
3788
|
// Initialize the object
|
3505
|
-
|
3789
|
+
result->set_map_no_write_barrier(code_map());
|
3506
3790
|
Code* code = Code::cast(result);
|
3507
3791
|
ASSERT(!isolate_->code_range()->exists() ||
|
3508
3792
|
isolate_->code_range()->contains(code->address()));
|
@@ -3513,10 +3797,14 @@ MaybeObject* Heap::CreateCode(const CodeDesc& desc,
|
|
3513
3797
|
code->set_check_type(RECEIVER_MAP_CHECK);
|
3514
3798
|
}
|
3515
3799
|
code->set_deoptimization_data(empty_fixed_array(), SKIP_WRITE_BARRIER);
|
3516
|
-
code->
|
3800
|
+
code->InitializeTypeFeedbackInfoNoWriteBarrier(undefined_value());
|
3517
3801
|
code->set_handler_table(empty_fixed_array(), SKIP_WRITE_BARRIER);
|
3518
3802
|
code->set_gc_metadata(Smi::FromInt(0));
|
3519
3803
|
code->set_ic_age(global_ic_age_);
|
3804
|
+
code->set_prologue_offset(kPrologueOffsetNotSet);
|
3805
|
+
if (code->kind() == Code::OPTIMIZED_FUNCTION) {
|
3806
|
+
code->set_marked_for_deoptimization(false);
|
3807
|
+
}
|
3520
3808
|
// Allow self references to created code object by patching the handle to
|
3521
3809
|
// point to the newly allocated Code object.
|
3522
3810
|
if (!self_reference.is_null()) {
|
@@ -3529,7 +3817,7 @@ MaybeObject* Heap::CreateCode(const CodeDesc& desc,
|
|
3529
3817
|
// through the self_reference parameter.
|
3530
3818
|
code->CopyFrom(desc);
|
3531
3819
|
|
3532
|
-
#ifdef
|
3820
|
+
#ifdef VERIFY_HEAP
|
3533
3821
|
if (FLAG_verify_heap) {
|
3534
3822
|
code->Verify();
|
3535
3823
|
}
|
@@ -3611,7 +3899,7 @@ MaybeObject* Heap::CopyCode(Code* code, Vector<byte> reloc_info) {
|
|
3611
3899
|
isolate_->code_range()->contains(code->address()));
|
3612
3900
|
new_code->Relocate(new_addr - old_addr);
|
3613
3901
|
|
3614
|
-
#ifdef
|
3902
|
+
#ifdef VERIFY_HEAP
|
3615
3903
|
if (FLAG_verify_heap) {
|
3616
3904
|
code->Verify();
|
3617
3905
|
}
|
@@ -3658,29 +3946,27 @@ MaybeObject* Heap::AllocateFunctionPrototype(JSFunction* function) {
|
|
3658
3946
|
// from the function's context, since the function can be from a
|
3659
3947
|
// different context.
|
3660
3948
|
JSFunction* object_function =
|
3661
|
-
function->context()->
|
3949
|
+
function->context()->native_context()->object_function();
|
3662
3950
|
|
3663
3951
|
// Each function prototype gets a copy of the object function map.
|
3664
3952
|
// This avoid unwanted sharing of maps between prototypes of different
|
3665
3953
|
// constructors.
|
3666
3954
|
Map* new_map;
|
3667
3955
|
ASSERT(object_function->has_initial_map());
|
3668
|
-
|
3669
|
-
|
3670
|
-
|
3671
|
-
}
|
3956
|
+
MaybeObject* maybe_map = object_function->initial_map()->Copy();
|
3957
|
+
if (!maybe_map->To(&new_map)) return maybe_map;
|
3958
|
+
|
3672
3959
|
Object* prototype;
|
3673
|
-
|
3674
|
-
|
3675
|
-
|
3960
|
+
MaybeObject* maybe_prototype = AllocateJSObjectFromMap(new_map);
|
3961
|
+
if (!maybe_prototype->ToObject(&prototype)) return maybe_prototype;
|
3962
|
+
|
3676
3963
|
// When creating the prototype for the function we must set its
|
3677
3964
|
// constructor to the function.
|
3678
|
-
|
3679
|
-
|
3680
|
-
|
3681
|
-
|
3682
|
-
|
3683
|
-
}
|
3965
|
+
MaybeObject* maybe_failure =
|
3966
|
+
JSObject::cast(prototype)->SetLocalPropertyIgnoreAttributes(
|
3967
|
+
constructor_symbol(), function, DONT_ENUM);
|
3968
|
+
if (maybe_failure->IsFailure()) return maybe_failure;
|
3969
|
+
|
3684
3970
|
return prototype;
|
3685
3971
|
}
|
3686
3972
|
|
@@ -3710,12 +3996,12 @@ MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) {
|
|
3710
3996
|
!JSFunction::cast(callee)->shared()->is_classic_mode();
|
3711
3997
|
if (strict_mode_callee) {
|
3712
3998
|
boilerplate =
|
3713
|
-
isolate()->context()->
|
3999
|
+
isolate()->context()->native_context()->
|
3714
4000
|
strict_mode_arguments_boilerplate();
|
3715
4001
|
arguments_object_size = kArgumentsObjectSizeStrict;
|
3716
4002
|
} else {
|
3717
4003
|
boilerplate =
|
3718
|
-
isolate()->context()->
|
4004
|
+
isolate()->context()->native_context()->arguments_boilerplate();
|
3719
4005
|
arguments_object_size = kArgumentsObjectSize;
|
3720
4006
|
}
|
3721
4007
|
|
@@ -3781,21 +4067,18 @@ MaybeObject* Heap::AllocateInitialMap(JSFunction* fun) {
|
|
3781
4067
|
// suggested by the function.
|
3782
4068
|
int instance_size = fun->shared()->CalculateInstanceSize();
|
3783
4069
|
int in_object_properties = fun->shared()->CalculateInObjectProperties();
|
3784
|
-
|
3785
|
-
|
3786
|
-
|
3787
|
-
}
|
4070
|
+
Map* map;
|
4071
|
+
MaybeObject* maybe_map = AllocateMap(JS_OBJECT_TYPE, instance_size);
|
4072
|
+
if (!maybe_map->To(&map)) return maybe_map;
|
3788
4073
|
|
3789
4074
|
// Fetch or allocate prototype.
|
3790
4075
|
Object* prototype;
|
3791
4076
|
if (fun->has_instance_prototype()) {
|
3792
4077
|
prototype = fun->instance_prototype();
|
3793
4078
|
} else {
|
3794
|
-
|
3795
|
-
|
3796
|
-
}
|
4079
|
+
MaybeObject* maybe_prototype = AllocateFunctionPrototype(fun);
|
4080
|
+
if (!maybe_prototype->To(&prototype)) return maybe_prototype;
|
3797
4081
|
}
|
3798
|
-
Map* map = Map::cast(map_obj);
|
3799
4082
|
map->set_inobject_properties(in_object_properties);
|
3800
4083
|
map->set_unused_property_fields(in_object_properties);
|
3801
4084
|
map->set_prototype(prototype);
|
@@ -3814,21 +4097,17 @@ MaybeObject* Heap::AllocateInitialMap(JSFunction* fun) {
|
|
3814
4097
|
fun->shared()->ForbidInlineConstructor();
|
3815
4098
|
} else {
|
3816
4099
|
DescriptorArray* descriptors;
|
3817
|
-
|
3818
|
-
|
3819
|
-
|
3820
|
-
}
|
3821
|
-
}
|
4100
|
+
MaybeObject* maybe_descriptors = DescriptorArray::Allocate(count);
|
4101
|
+
if (!maybe_descriptors->To(&descriptors)) return maybe_descriptors;
|
4102
|
+
|
3822
4103
|
DescriptorArray::WhitenessWitness witness(descriptors);
|
3823
4104
|
for (int i = 0; i < count; i++) {
|
3824
4105
|
String* name = fun->shared()->GetThisPropertyAssignmentName(i);
|
3825
4106
|
ASSERT(name->IsSymbol());
|
3826
|
-
FieldDescriptor field(name, i, NONE);
|
3827
|
-
field.SetEnumerationIndex(i);
|
4107
|
+
FieldDescriptor field(name, i, NONE, i + 1);
|
3828
4108
|
descriptors->Set(i, &field, witness);
|
3829
4109
|
}
|
3830
|
-
descriptors->
|
3831
|
-
descriptors->SortUnchecked(witness);
|
4110
|
+
descriptors->Sort();
|
3832
4111
|
|
3833
4112
|
// The descriptors may contain duplicates because the compiler does not
|
3834
4113
|
// guarantee the uniqueness of property names (it would have required
|
@@ -3837,7 +4116,7 @@ MaybeObject* Heap::AllocateInitialMap(JSFunction* fun) {
|
|
3837
4116
|
if (HasDuplicates(descriptors)) {
|
3838
4117
|
fun->shared()->ForbidInlineConstructor();
|
3839
4118
|
} else {
|
3840
|
-
map->
|
4119
|
+
map->InitializeDescriptors(descriptors);
|
3841
4120
|
map->set_pre_allocated_property_fields(count);
|
3842
4121
|
map->set_unused_property_fields(in_object_properties - count);
|
3843
4122
|
}
|
@@ -3916,7 +4195,7 @@ MaybeObject* Heap::AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure) {
|
|
3916
4195
|
InitializeJSObjectFromMap(JSObject::cast(obj),
|
3917
4196
|
FixedArray::cast(properties),
|
3918
4197
|
map);
|
3919
|
-
ASSERT(JSObject::cast(obj)->
|
4198
|
+
ASSERT(JSObject::cast(obj)->HasFastElements());
|
3920
4199
|
return obj;
|
3921
4200
|
}
|
3922
4201
|
|
@@ -3944,13 +4223,18 @@ MaybeObject* Heap::AllocateJSObject(JSFunction* constructor,
|
|
3944
4223
|
}
|
3945
4224
|
|
3946
4225
|
|
3947
|
-
MaybeObject* Heap::AllocateJSModule() {
|
4226
|
+
MaybeObject* Heap::AllocateJSModule(Context* context, ScopeInfo* scope_info) {
|
3948
4227
|
// Allocate a fresh map. Modules do not have a prototype.
|
3949
4228
|
Map* map;
|
3950
4229
|
MaybeObject* maybe_map = AllocateMap(JS_MODULE_TYPE, JSModule::kSize);
|
3951
4230
|
if (!maybe_map->To(&map)) return maybe_map;
|
3952
4231
|
// Allocate the object based on the map.
|
3953
|
-
|
4232
|
+
JSModule* module;
|
4233
|
+
MaybeObject* maybe_module = AllocateJSObjectFromMap(map, TENURED);
|
4234
|
+
if (!maybe_module->To(&module)) return maybe_module;
|
4235
|
+
module->set_context(context);
|
4236
|
+
module->set_scope_info(scope_info);
|
4237
|
+
return module;
|
3954
4238
|
}
|
3955
4239
|
|
3956
4240
|
|
@@ -3961,9 +4245,6 @@ MaybeObject* Heap::AllocateJSArrayAndStorage(
|
|
3961
4245
|
ArrayStorageAllocationMode mode,
|
3962
4246
|
PretenureFlag pretenure) {
|
3963
4247
|
ASSERT(capacity >= length);
|
3964
|
-
if (length != 0 && mode == INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE) {
|
3965
|
-
elements_kind = GetHoleyElementsKind(elements_kind);
|
3966
|
-
}
|
3967
4248
|
MaybeObject* maybe_array = AllocateJSArray(elements_kind, pretenure);
|
3968
4249
|
JSArray* array;
|
3969
4250
|
if (!maybe_array->To(&array)) return maybe_array;
|
@@ -3976,7 +4257,7 @@ MaybeObject* Heap::AllocateJSArrayAndStorage(
|
|
3976
4257
|
|
3977
4258
|
FixedArrayBase* elms;
|
3978
4259
|
MaybeObject* maybe_elms = NULL;
|
3979
|
-
if (elements_kind
|
4260
|
+
if (IsFastDoubleElementsKind(elements_kind)) {
|
3980
4261
|
if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) {
|
3981
4262
|
maybe_elms = AllocateUninitializedFixedDoubleArray(capacity);
|
3982
4263
|
} else {
|
@@ -4003,13 +4284,14 @@ MaybeObject* Heap::AllocateJSArrayAndStorage(
|
|
4003
4284
|
MaybeObject* Heap::AllocateJSArrayWithElements(
|
4004
4285
|
FixedArrayBase* elements,
|
4005
4286
|
ElementsKind elements_kind,
|
4287
|
+
int length,
|
4006
4288
|
PretenureFlag pretenure) {
|
4007
4289
|
MaybeObject* maybe_array = AllocateJSArray(elements_kind, pretenure);
|
4008
4290
|
JSArray* array;
|
4009
4291
|
if (!maybe_array->To(&array)) return maybe_array;
|
4010
4292
|
|
4011
4293
|
array->set_elements(elements);
|
4012
|
-
array->set_length(Smi::FromInt(
|
4294
|
+
array->set_length(Smi::FromInt(length));
|
4013
4295
|
array->ValidateElements();
|
4014
4296
|
return array;
|
4015
4297
|
}
|
@@ -4064,6 +4346,7 @@ MaybeObject* Heap::AllocateJSFunctionProxy(Object* handler,
|
|
4064
4346
|
MaybeObject* Heap::AllocateGlobalObject(JSFunction* constructor) {
|
4065
4347
|
ASSERT(constructor->has_initial_map());
|
4066
4348
|
Map* map = constructor->initial_map();
|
4349
|
+
ASSERT(map->is_dictionary_map());
|
4067
4350
|
|
4068
4351
|
// Make sure no field properties are described in the initial map.
|
4069
4352
|
// This guarantees us that normalizing the properties does not
|
@@ -4081,13 +4364,11 @@ MaybeObject* Heap::AllocateGlobalObject(JSFunction* constructor) {
|
|
4081
4364
|
int initial_size = map->instance_type() == JS_GLOBAL_OBJECT_TYPE ? 64 : 512;
|
4082
4365
|
|
4083
4366
|
// Allocate a dictionary object for backing storage.
|
4084
|
-
|
4085
|
-
|
4086
|
-
|
4087
|
-
|
4088
|
-
|
4089
|
-
}
|
4090
|
-
StringDictionary* dictionary = StringDictionary::cast(obj);
|
4367
|
+
StringDictionary* dictionary;
|
4368
|
+
MaybeObject* maybe_dictionary =
|
4369
|
+
StringDictionary::Allocate(
|
4370
|
+
map->NumberOfOwnDescriptors() * 2 + initial_size);
|
4371
|
+
if (!maybe_dictionary->To(&dictionary)) return maybe_dictionary;
|
4091
4372
|
|
4092
4373
|
// The global object might be created from an object template with accessors.
|
4093
4374
|
// Fill these accessors into the dictionary.
|
@@ -4095,36 +4376,32 @@ MaybeObject* Heap::AllocateGlobalObject(JSFunction* constructor) {
|
|
4095
4376
|
for (int i = 0; i < descs->number_of_descriptors(); i++) {
|
4096
4377
|
PropertyDetails details = descs->GetDetails(i);
|
4097
4378
|
ASSERT(details.type() == CALLBACKS); // Only accessors are expected.
|
4098
|
-
PropertyDetails d =
|
4099
|
-
|
4379
|
+
PropertyDetails d = PropertyDetails(details.attributes(),
|
4380
|
+
CALLBACKS,
|
4381
|
+
details.descriptor_index());
|
4100
4382
|
Object* value = descs->GetCallbacksObject(i);
|
4101
|
-
|
4102
|
-
|
4103
|
-
}
|
4383
|
+
MaybeObject* maybe_value = AllocateJSGlobalPropertyCell(value);
|
4384
|
+
if (!maybe_value->ToObject(&value)) return maybe_value;
|
4104
4385
|
|
4105
|
-
|
4106
|
-
|
4107
|
-
if (!maybe_result->ToObject(&result)) return maybe_result;
|
4108
|
-
}
|
4109
|
-
dictionary = StringDictionary::cast(result);
|
4386
|
+
MaybeObject* maybe_added = dictionary->Add(descs->GetKey(i), value, d);
|
4387
|
+
if (!maybe_added->To(&dictionary)) return maybe_added;
|
4110
4388
|
}
|
4111
4389
|
|
4112
4390
|
// Allocate the global object and initialize it with the backing store.
|
4113
|
-
|
4114
|
-
|
4115
|
-
|
4116
|
-
|
4391
|
+
JSObject* global;
|
4392
|
+
MaybeObject* maybe_global = Allocate(map, OLD_POINTER_SPACE);
|
4393
|
+
if (!maybe_global->To(&global)) return maybe_global;
|
4394
|
+
|
4117
4395
|
InitializeJSObjectFromMap(global, dictionary, map);
|
4118
4396
|
|
4119
4397
|
// Create a new map for the global object.
|
4120
|
-
|
4121
|
-
|
4122
|
-
|
4123
|
-
|
4398
|
+
Map* new_map;
|
4399
|
+
MaybeObject* maybe_map = map->CopyDropDescriptors();
|
4400
|
+
if (!maybe_map->To(&new_map)) return maybe_map;
|
4401
|
+
new_map->set_dictionary_map(true);
|
4124
4402
|
|
4125
4403
|
// Set up the global object as a normalized object.
|
4126
4404
|
global->set_map(new_map);
|
4127
|
-
global->map()->clear_instance_descriptors();
|
4128
4405
|
global->set_properties(dictionary);
|
4129
4406
|
|
4130
4407
|
// Make sure result is a global object with properties in dictionary.
|
@@ -4134,7 +4411,8 @@ MaybeObject* Heap::AllocateGlobalObject(JSFunction* constructor) {
|
|
4134
4411
|
}
|
4135
4412
|
|
4136
4413
|
|
4137
|
-
MaybeObject* Heap::CopyJSObject(JSObject* source
|
4414
|
+
MaybeObject* Heap::CopyJSObject(JSObject* source,
|
4415
|
+
AllocationSiteMode mode) {
|
4138
4416
|
// Never used to copy functions. If functions need to be copied we
|
4139
4417
|
// have to be careful to clear the literals array.
|
4140
4418
|
SLOW_ASSERT(!source->IsJSFunction());
|
@@ -4144,13 +4422,25 @@ MaybeObject* Heap::CopyJSObject(JSObject* source) {
|
|
4144
4422
|
int object_size = map->instance_size();
|
4145
4423
|
Object* clone;
|
4146
4424
|
|
4425
|
+
bool track_origin = mode == TRACK_ALLOCATION_SITE &&
|
4426
|
+
map->CanTrackAllocationSite();
|
4427
|
+
|
4147
4428
|
WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER;
|
4148
4429
|
|
4149
4430
|
// If we're forced to always allocate, we use the general allocation
|
4150
4431
|
// functions which may leave us with an object in old space.
|
4432
|
+
int adjusted_object_size = object_size;
|
4151
4433
|
if (always_allocate()) {
|
4434
|
+
// We'll only track origin if we are certain to allocate in new space
|
4435
|
+
if (track_origin) {
|
4436
|
+
const int kMinFreeNewSpaceAfterGC = InitialSemiSpaceSize() * 3/4;
|
4437
|
+
if ((object_size + AllocationSiteInfo::kSize) < kMinFreeNewSpaceAfterGC) {
|
4438
|
+
adjusted_object_size += AllocationSiteInfo::kSize;
|
4439
|
+
}
|
4440
|
+
}
|
4441
|
+
|
4152
4442
|
{ MaybeObject* maybe_clone =
|
4153
|
-
AllocateRaw(
|
4443
|
+
AllocateRaw(adjusted_object_size, NEW_SPACE, OLD_POINTER_SPACE);
|
4154
4444
|
if (!maybe_clone->ToObject(&clone)) return maybe_clone;
|
4155
4445
|
}
|
4156
4446
|
Address clone_address = HeapObject::cast(clone)->address();
|
@@ -4163,7 +4453,11 @@ MaybeObject* Heap::CopyJSObject(JSObject* source) {
|
|
4163
4453
|
(object_size - JSObject::kHeaderSize) / kPointerSize);
|
4164
4454
|
} else {
|
4165
4455
|
wb_mode = SKIP_WRITE_BARRIER;
|
4166
|
-
|
4456
|
+
if (track_origin) {
|
4457
|
+
adjusted_object_size += AllocationSiteInfo::kSize;
|
4458
|
+
}
|
4459
|
+
|
4460
|
+
{ MaybeObject* maybe_clone = new_space_.AllocateRaw(adjusted_object_size);
|
4167
4461
|
if (!maybe_clone->ToObject(&clone)) return maybe_clone;
|
4168
4462
|
}
|
4169
4463
|
SLOW_ASSERT(InNewSpace(clone));
|
@@ -4174,6 +4468,13 @@ MaybeObject* Heap::CopyJSObject(JSObject* source) {
|
|
4174
4468
|
object_size);
|
4175
4469
|
}
|
4176
4470
|
|
4471
|
+
if (adjusted_object_size > object_size) {
|
4472
|
+
AllocationSiteInfo* alloc_info = reinterpret_cast<AllocationSiteInfo*>(
|
4473
|
+
reinterpret_cast<Address>(clone) + object_size);
|
4474
|
+
alloc_info->set_map(allocation_site_info_map());
|
4475
|
+
alloc_info->set_payload(source);
|
4476
|
+
}
|
4477
|
+
|
4177
4478
|
SLOW_ASSERT(
|
4178
4479
|
JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind());
|
4179
4480
|
FixedArrayBase* elements = FixedArrayBase::cast(source->elements());
|
@@ -4232,7 +4533,7 @@ MaybeObject* Heap::ReinitializeJSReceiver(
|
|
4232
4533
|
SharedFunctionInfo* shared = NULL;
|
4233
4534
|
if (type == JS_FUNCTION_TYPE) {
|
4234
4535
|
String* name;
|
4235
|
-
maybe =
|
4536
|
+
maybe = LookupOneByteSymbol(STATIC_ASCII_VECTOR("<freezing call trap>"));
|
4236
4537
|
if (!maybe->To<String>(&name)) return maybe;
|
4237
4538
|
maybe = AllocateSharedFunctionInfo(name);
|
4238
4539
|
if (!maybe->To<SharedFunctionInfo>(&shared)) return maybe;
|
@@ -4253,7 +4554,7 @@ MaybeObject* Heap::ReinitializeJSReceiver(
|
|
4253
4554
|
map->set_function_with_prototype(true);
|
4254
4555
|
InitializeFunction(JSFunction::cast(object), shared, the_hole_value());
|
4255
4556
|
JSFunction::cast(object)->set_context(
|
4256
|
-
isolate()->context()->
|
4557
|
+
isolate()->context()->native_context());
|
4257
4558
|
}
|
4258
4559
|
|
4259
4560
|
// Put in filler if the new object is smaller than the old.
|
@@ -4292,61 +4593,56 @@ MaybeObject* Heap::ReinitializeJSGlobalProxy(JSFunction* constructor,
|
|
4292
4593
|
}
|
4293
4594
|
|
4294
4595
|
|
4295
|
-
MaybeObject* Heap::
|
4596
|
+
MaybeObject* Heap::AllocateStringFromOneByte(Vector<const uint8_t> string,
|
4296
4597
|
PretenureFlag pretenure) {
|
4297
|
-
|
4598
|
+
int length = string.length();
|
4599
|
+
if (length == 1) {
|
4298
4600
|
return Heap::LookupSingleCharacterStringFromCode(string[0]);
|
4299
4601
|
}
|
4300
4602
|
Object* result;
|
4301
4603
|
{ MaybeObject* maybe_result =
|
4302
|
-
|
4604
|
+
AllocateRawOneByteString(string.length(), pretenure);
|
4303
4605
|
if (!maybe_result->ToObject(&result)) return maybe_result;
|
4304
4606
|
}
|
4305
4607
|
|
4306
4608
|
// Copy the characters into the new object.
|
4307
|
-
|
4308
|
-
|
4309
|
-
|
4310
|
-
}
|
4609
|
+
CopyChars(SeqOneByteString::cast(result)->GetChars(),
|
4610
|
+
string.start(),
|
4611
|
+
length);
|
4311
4612
|
return result;
|
4312
4613
|
}
|
4313
4614
|
|
4314
4615
|
|
4315
4616
|
MaybeObject* Heap::AllocateStringFromUtf8Slow(Vector<const char> string,
|
4617
|
+
int non_ascii_start,
|
4316
4618
|
PretenureFlag pretenure) {
|
4317
|
-
//
|
4318
|
-
//
|
4619
|
+
// Continue counting the number of characters in the UTF-8 string, starting
|
4620
|
+
// from the first non-ascii character or word.
|
4319
4621
|
Access<UnicodeCache::Utf8Decoder>
|
4320
4622
|
decoder(isolate_->unicode_cache()->utf8_decoder());
|
4321
|
-
decoder->Reset(string.start(),
|
4322
|
-
|
4323
|
-
|
4324
|
-
|
4325
|
-
|
4326
|
-
chars++;
|
4327
|
-
} else {
|
4328
|
-
chars += 2;
|
4329
|
-
}
|
4330
|
-
}
|
4331
|
-
|
4623
|
+
decoder->Reset(string.start() + non_ascii_start,
|
4624
|
+
string.length() - non_ascii_start);
|
4625
|
+
int utf16_length = decoder->Utf16Length();
|
4626
|
+
ASSERT(utf16_length > 0);
|
4627
|
+
// Allocate string.
|
4332
4628
|
Object* result;
|
4333
|
-
{
|
4629
|
+
{
|
4630
|
+
int chars = non_ascii_start + utf16_length;
|
4631
|
+
MaybeObject* maybe_result = AllocateRawTwoByteString(chars, pretenure);
|
4334
4632
|
if (!maybe_result->ToObject(&result)) return maybe_result;
|
4335
4633
|
}
|
4336
|
-
|
4337
4634
|
// Convert and copy the characters into the new object.
|
4338
|
-
|
4339
|
-
|
4340
|
-
|
4341
|
-
|
4342
|
-
|
4343
|
-
|
4344
|
-
|
4345
|
-
string_result->Set(i++, unibrow::Utf16::TrailSurrogate(r));
|
4346
|
-
} else {
|
4347
|
-
string_result->Set(i++, r);
|
4635
|
+
SeqTwoByteString* twobyte = SeqTwoByteString::cast(result);
|
4636
|
+
// Copy ascii portion.
|
4637
|
+
uint16_t* data = twobyte->GetChars();
|
4638
|
+
if (non_ascii_start != 0) {
|
4639
|
+
const char* ascii_data = string.start();
|
4640
|
+
for (int i = 0; i < non_ascii_start; i++) {
|
4641
|
+
*data++ = *ascii_data++;
|
4348
4642
|
}
|
4349
4643
|
}
|
4644
|
+
// Now write the remainder.
|
4645
|
+
decoder->WriteUtf16(data, utf16_length);
|
4350
4646
|
return result;
|
4351
4647
|
}
|
4352
4648
|
|
@@ -4354,20 +4650,18 @@ MaybeObject* Heap::AllocateStringFromUtf8Slow(Vector<const char> string,
|
|
4354
4650
|
MaybeObject* Heap::AllocateStringFromTwoByte(Vector<const uc16> string,
|
4355
4651
|
PretenureFlag pretenure) {
|
4356
4652
|
// Check if the string is an ASCII string.
|
4357
|
-
MaybeObject* maybe_result;
|
4358
|
-
if (String::IsAscii(string.start(), string.length())) {
|
4359
|
-
maybe_result = AllocateRawAsciiString(string.length(), pretenure);
|
4360
|
-
} else { // It's not an ASCII string.
|
4361
|
-
maybe_result = AllocateRawTwoByteString(string.length(), pretenure);
|
4362
|
-
}
|
4363
4653
|
Object* result;
|
4364
|
-
|
4654
|
+
int length = string.length();
|
4655
|
+
const uc16* start = string.start();
|
4365
4656
|
|
4366
|
-
|
4367
|
-
|
4368
|
-
|
4369
|
-
|
4370
|
-
|
4657
|
+
if (String::IsOneByte(start, length)) {
|
4658
|
+
MaybeObject* maybe_result = AllocateRawOneByteString(length, pretenure);
|
4659
|
+
if (!maybe_result->ToObject(&result)) return maybe_result;
|
4660
|
+
CopyChars(SeqOneByteString::cast(result)->GetChars(), start, length);
|
4661
|
+
} else { // It's not a one byte string.
|
4662
|
+
MaybeObject* maybe_result = AllocateRawTwoByteString(length, pretenure);
|
4663
|
+
if (!maybe_result->ToObject(&result)) return maybe_result;
|
4664
|
+
CopyChars(SeqTwoByteString::cast(result)->GetChars(), start, length);
|
4371
4665
|
}
|
4372
4666
|
return result;
|
4373
4667
|
}
|
@@ -4397,35 +4691,71 @@ Map* Heap::SymbolMapForString(String* string) {
|
|
4397
4691
|
}
|
4398
4692
|
|
4399
4693
|
|
4400
|
-
|
4401
|
-
|
4402
|
-
|
4403
|
-
|
4404
|
-
|
4405
|
-
|
4406
|
-
|
4407
|
-
|
4408
|
-
|
4409
|
-
|
4410
|
-
|
4411
|
-
|
4694
|
+
static inline void WriteOneByteData(Vector<const char> vector,
|
4695
|
+
uint8_t* chars,
|
4696
|
+
int len) {
|
4697
|
+
// Only works for ascii.
|
4698
|
+
ASSERT(vector.length() == len);
|
4699
|
+
memcpy(chars, vector.start(), len);
|
4700
|
+
}
|
4701
|
+
|
4702
|
+
static inline void WriteTwoByteData(Vector<const char> vector,
|
4703
|
+
uint16_t* chars,
|
4704
|
+
int len) {
|
4705
|
+
const uint8_t* stream = reinterpret_cast<const uint8_t*>(vector.start());
|
4706
|
+
unsigned stream_length = vector.length();
|
4707
|
+
while (stream_length != 0) {
|
4708
|
+
unsigned consumed = 0;
|
4709
|
+
uint32_t c = unibrow::Utf8::ValueOf(stream, stream_length, &consumed);
|
4710
|
+
ASSERT(c != unibrow::Utf8::kBadChar);
|
4711
|
+
ASSERT(consumed <= stream_length);
|
4712
|
+
stream_length -= consumed;
|
4713
|
+
stream += consumed;
|
4714
|
+
if (c > unibrow::Utf16::kMaxNonSurrogateCharCode) {
|
4715
|
+
len -= 2;
|
4716
|
+
if (len < 0) break;
|
4717
|
+
*chars++ = unibrow::Utf16::LeadSurrogate(c);
|
4718
|
+
*chars++ = unibrow::Utf16::TrailSurrogate(c);
|
4719
|
+
} else {
|
4720
|
+
len -= 1;
|
4721
|
+
if (len < 0) break;
|
4722
|
+
*chars++ = c;
|
4412
4723
|
}
|
4413
4724
|
}
|
4414
|
-
|
4725
|
+
ASSERT(stream_length == 0);
|
4726
|
+
ASSERT(len == 0);
|
4727
|
+
}
|
4728
|
+
|
4729
|
+
|
4730
|
+
static inline void WriteOneByteData(String* s, uint8_t* chars, int len) {
|
4731
|
+
ASSERT(s->length() == len);
|
4732
|
+
String::WriteToFlat(s, chars, 0, len);
|
4733
|
+
}
|
4734
|
+
|
4735
|
+
static inline void WriteTwoByteData(String* s, uint16_t* chars, int len) {
|
4736
|
+
ASSERT(s->length() == len);
|
4737
|
+
String::WriteToFlat(s, chars, 0, len);
|
4738
|
+
}
|
4415
4739
|
|
4740
|
+
|
4741
|
+
template<bool is_one_byte, typename T>
|
4742
|
+
MaybeObject* Heap::AllocateInternalSymbol(T t,
|
4743
|
+
int chars,
|
4744
|
+
uint32_t hash_field) {
|
4745
|
+
ASSERT(chars >= 0);
|
4416
4746
|
// Compute map and object size.
|
4417
4747
|
int size;
|
4418
4748
|
Map* map;
|
4419
4749
|
|
4420
|
-
if (
|
4421
|
-
if (chars >
|
4422
|
-
return Failure::OutOfMemoryException();
|
4750
|
+
if (is_one_byte) {
|
4751
|
+
if (chars > SeqOneByteString::kMaxLength) {
|
4752
|
+
return Failure::OutOfMemoryException(0x9);
|
4423
4753
|
}
|
4424
4754
|
map = ascii_symbol_map();
|
4425
|
-
size =
|
4755
|
+
size = SeqOneByteString::SizeFor(chars);
|
4426
4756
|
} else {
|
4427
4757
|
if (chars > SeqTwoByteString::kMaxLength) {
|
4428
|
-
return Failure::OutOfMemoryException();
|
4758
|
+
return Failure::OutOfMemoryException(0xa);
|
4429
4759
|
}
|
4430
4760
|
map = symbol_map();
|
4431
4761
|
size = SeqTwoByteString::SizeFor(chars);
|
@@ -4447,28 +4777,34 @@ MaybeObject* Heap::AllocateInternalSymbol(unibrow::CharacterStream* buffer,
|
|
4447
4777
|
|
4448
4778
|
ASSERT_EQ(size, answer->Size());
|
4449
4779
|
|
4450
|
-
|
4451
|
-
|
4452
|
-
|
4453
|
-
|
4454
|
-
if (character > unibrow::Utf16::kMaxNonSurrogateCharCode) {
|
4455
|
-
answer->Set(i++, unibrow::Utf16::LeadSurrogate(character));
|
4456
|
-
answer->Set(i++, unibrow::Utf16::TrailSurrogate(character));
|
4457
|
-
} else {
|
4458
|
-
answer->Set(i++, character);
|
4459
|
-
}
|
4780
|
+
if (is_one_byte) {
|
4781
|
+
WriteOneByteData(t, SeqOneByteString::cast(answer)->GetChars(), chars);
|
4782
|
+
} else {
|
4783
|
+
WriteTwoByteData(t, SeqTwoByteString::cast(answer)->GetChars(), chars);
|
4460
4784
|
}
|
4461
4785
|
return answer;
|
4462
4786
|
}
|
4463
4787
|
|
4464
4788
|
|
4465
|
-
|
4466
|
-
|
4467
|
-
|
4789
|
+
// Need explicit instantiations.
|
4790
|
+
template
|
4791
|
+
MaybeObject* Heap::AllocateInternalSymbol<true>(String*, int, uint32_t);
|
4792
|
+
template
|
4793
|
+
MaybeObject* Heap::AllocateInternalSymbol<false>(String*, int, uint32_t);
|
4794
|
+
template
|
4795
|
+
MaybeObject* Heap::AllocateInternalSymbol<false>(Vector<const char>,
|
4796
|
+
int,
|
4797
|
+
uint32_t);
|
4798
|
+
|
4799
|
+
|
4800
|
+
MaybeObject* Heap::AllocateRawOneByteString(int length,
|
4801
|
+
PretenureFlag pretenure) {
|
4802
|
+
if (length < 0 || length > SeqOneByteString::kMaxLength) {
|
4803
|
+
return Failure::OutOfMemoryException(0xb);
|
4468
4804
|
}
|
4469
4805
|
|
4470
|
-
int size =
|
4471
|
-
ASSERT(size <=
|
4806
|
+
int size = SeqOneByteString::SizeFor(length);
|
4807
|
+
ASSERT(size <= SeqOneByteString::kMaxSize);
|
4472
4808
|
|
4473
4809
|
AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
|
4474
4810
|
AllocationSpace retry_space = OLD_DATA_SPACE;
|
@@ -4496,14 +4832,16 @@ MaybeObject* Heap::AllocateRawAsciiString(int length, PretenureFlag pretenure) {
|
|
4496
4832
|
String::cast(result)->set_hash_field(String::kEmptyHashField);
|
4497
4833
|
ASSERT_EQ(size, HeapObject::cast(result)->Size());
|
4498
4834
|
|
4499
|
-
#
|
4835
|
+
#ifndef ENABLE_LATIN_1
|
4836
|
+
#ifdef VERIFY_HEAP
|
4500
4837
|
if (FLAG_verify_heap) {
|
4501
4838
|
// Initialize string's content to ensure ASCII-ness (character range 0-127)
|
4502
4839
|
// as required when verifying the heap.
|
4503
|
-
|
4840
|
+
uint8_t* dest = SeqOneByteString::cast(result)->GetChars();
|
4504
4841
|
memset(dest, 0x0F, length * kCharSize);
|
4505
4842
|
}
|
4506
|
-
#endif
|
4843
|
+
#endif
|
4844
|
+
#endif
|
4507
4845
|
|
4508
4846
|
return result;
|
4509
4847
|
}
|
@@ -4512,7 +4850,7 @@ MaybeObject* Heap::AllocateRawAsciiString(int length, PretenureFlag pretenure) {
|
|
4512
4850
|
MaybeObject* Heap::AllocateRawTwoByteString(int length,
|
4513
4851
|
PretenureFlag pretenure) {
|
4514
4852
|
if (length < 0 || length > SeqTwoByteString::kMaxLength) {
|
4515
|
-
return Failure::OutOfMemoryException();
|
4853
|
+
return Failure::OutOfMemoryException(0xc);
|
4516
4854
|
}
|
4517
4855
|
int size = SeqTwoByteString::SizeFor(length);
|
4518
4856
|
ASSERT(size <= SeqTwoByteString::kMaxSize);
|
@@ -4548,10 +4886,10 @@ MaybeObject* Heap::AllocateRawTwoByteString(int length,
|
|
4548
4886
|
MaybeObject* Heap::AllocateJSArray(
|
4549
4887
|
ElementsKind elements_kind,
|
4550
4888
|
PretenureFlag pretenure) {
|
4551
|
-
Context*
|
4552
|
-
JSFunction* array_function =
|
4889
|
+
Context* native_context = isolate()->context()->native_context();
|
4890
|
+
JSFunction* array_function = native_context->array_function();
|
4553
4891
|
Map* map = array_function->initial_map();
|
4554
|
-
Object* maybe_map_array =
|
4892
|
+
Object* maybe_map_array = native_context->js_array_maps();
|
4555
4893
|
if (!maybe_map_array->IsUndefined()) {
|
4556
4894
|
Object* maybe_transitioned_map =
|
4557
4895
|
FixedArray::cast(maybe_map_array)->get(elements_kind);
|
@@ -4581,7 +4919,7 @@ MaybeObject* Heap::AllocateEmptyFixedArray() {
|
|
4581
4919
|
|
4582
4920
|
MaybeObject* Heap::AllocateRawFixedArray(int length) {
|
4583
4921
|
if (length < 0 || length > FixedArray::kMaxLength) {
|
4584
|
-
return Failure::OutOfMemoryException();
|
4922
|
+
return Failure::OutOfMemoryException(0xd);
|
4585
4923
|
}
|
4586
4924
|
ASSERT(length > 0);
|
4587
4925
|
// Use the general function if we're forced to always allocate.
|
@@ -4657,7 +4995,7 @@ MaybeObject* Heap::AllocateFixedArray(int length) {
|
|
4657
4995
|
|
4658
4996
|
MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) {
|
4659
4997
|
if (length < 0 || length > FixedArray::kMaxLength) {
|
4660
|
-
return Failure::OutOfMemoryException();
|
4998
|
+
return Failure::OutOfMemoryException(0xe);
|
4661
4999
|
}
|
4662
5000
|
|
4663
5001
|
AllocationSpace space =
|
@@ -4790,7 +5128,7 @@ MaybeObject* Heap::AllocateFixedDoubleArrayWithHoles(
|
|
4790
5128
|
MaybeObject* Heap::AllocateRawFixedDoubleArray(int length,
|
4791
5129
|
PretenureFlag pretenure) {
|
4792
5130
|
if (length < 0 || length > FixedDoubleArray::kMaxLength) {
|
4793
|
-
return Failure::OutOfMemoryException();
|
5131
|
+
return Failure::OutOfMemoryException(0xf);
|
4794
5132
|
}
|
4795
5133
|
|
4796
5134
|
AllocationSpace space =
|
@@ -4834,33 +5172,50 @@ MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) {
|
|
4834
5172
|
}
|
4835
5173
|
|
4836
5174
|
|
4837
|
-
MaybeObject* Heap::
|
5175
|
+
MaybeObject* Heap::AllocateNativeContext() {
|
4838
5176
|
Object* result;
|
4839
5177
|
{ MaybeObject* maybe_result =
|
4840
|
-
AllocateFixedArray(Context::
|
5178
|
+
AllocateFixedArray(Context::NATIVE_CONTEXT_SLOTS);
|
4841
5179
|
if (!maybe_result->ToObject(&result)) return maybe_result;
|
4842
5180
|
}
|
4843
5181
|
Context* context = reinterpret_cast<Context*>(result);
|
4844
|
-
context->set_map_no_write_barrier(
|
5182
|
+
context->set_map_no_write_barrier(native_context_map());
|
4845
5183
|
context->set_js_array_maps(undefined_value());
|
4846
|
-
ASSERT(context->
|
5184
|
+
ASSERT(context->IsNativeContext());
|
4847
5185
|
ASSERT(result->IsContext());
|
4848
5186
|
return result;
|
4849
5187
|
}
|
4850
5188
|
|
4851
5189
|
|
4852
|
-
MaybeObject* Heap::
|
5190
|
+
MaybeObject* Heap::AllocateGlobalContext(JSFunction* function,
|
4853
5191
|
ScopeInfo* scope_info) {
|
4854
5192
|
Object* result;
|
4855
5193
|
{ MaybeObject* maybe_result =
|
4856
|
-
|
5194
|
+
AllocateFixedArray(scope_info->ContextLength(), TENURED);
|
4857
5195
|
if (!maybe_result->ToObject(&result)) return maybe_result;
|
4858
5196
|
}
|
4859
5197
|
Context* context = reinterpret_cast<Context*>(result);
|
4860
|
-
context->set_map_no_write_barrier(
|
4861
|
-
context->
|
5198
|
+
context->set_map_no_write_barrier(global_context_map());
|
5199
|
+
context->set_closure(function);
|
5200
|
+
context->set_previous(function->context());
|
4862
5201
|
context->set_extension(scope_info);
|
4863
|
-
context->
|
5202
|
+
context->set_global_object(function->context()->global_object());
|
5203
|
+
ASSERT(context->IsGlobalContext());
|
5204
|
+
ASSERT(result->IsContext());
|
5205
|
+
return context;
|
5206
|
+
}
|
5207
|
+
|
5208
|
+
|
5209
|
+
MaybeObject* Heap::AllocateModuleContext(ScopeInfo* scope_info) {
|
5210
|
+
Object* result;
|
5211
|
+
{ MaybeObject* maybe_result =
|
5212
|
+
AllocateFixedArray(scope_info->ContextLength(), TENURED);
|
5213
|
+
if (!maybe_result->ToObject(&result)) return maybe_result;
|
5214
|
+
}
|
5215
|
+
Context* context = reinterpret_cast<Context*>(result);
|
5216
|
+
context->set_map_no_write_barrier(module_context_map());
|
5217
|
+
// Instance link will be set later.
|
5218
|
+
context->set_extension(Smi::FromInt(0));
|
4864
5219
|
return context;
|
4865
5220
|
}
|
4866
5221
|
|
@@ -4875,8 +5230,8 @@ MaybeObject* Heap::AllocateFunctionContext(int length, JSFunction* function) {
|
|
4875
5230
|
context->set_map_no_write_barrier(function_context_map());
|
4876
5231
|
context->set_closure(function);
|
4877
5232
|
context->set_previous(function->context());
|
4878
|
-
context->set_extension(
|
4879
|
-
context->
|
5233
|
+
context->set_extension(Smi::FromInt(0));
|
5234
|
+
context->set_global_object(function->context()->global_object());
|
4880
5235
|
return context;
|
4881
5236
|
}
|
4882
5237
|
|
@@ -4896,7 +5251,7 @@ MaybeObject* Heap::AllocateCatchContext(JSFunction* function,
|
|
4896
5251
|
context->set_closure(function);
|
4897
5252
|
context->set_previous(previous);
|
4898
5253
|
context->set_extension(name);
|
4899
|
-
context->
|
5254
|
+
context->set_global_object(previous->global_object());
|
4900
5255
|
context->set(Context::THROWN_OBJECT_INDEX, thrown_object);
|
4901
5256
|
return context;
|
4902
5257
|
}
|
@@ -4914,7 +5269,7 @@ MaybeObject* Heap::AllocateWithContext(JSFunction* function,
|
|
4914
5269
|
context->set_closure(function);
|
4915
5270
|
context->set_previous(previous);
|
4916
5271
|
context->set_extension(extension);
|
4917
|
-
context->
|
5272
|
+
context->set_global_object(previous->global_object());
|
4918
5273
|
return context;
|
4919
5274
|
}
|
4920
5275
|
|
@@ -4932,7 +5287,7 @@ MaybeObject* Heap::AllocateBlockContext(JSFunction* function,
|
|
4932
5287
|
context->set_closure(function);
|
4933
5288
|
context->set_previous(previous);
|
4934
5289
|
context->set_extension(scope_info);
|
4935
|
-
context->
|
5290
|
+
context->set_global_object(previous->global_object());
|
4936
5291
|
return context;
|
4937
5292
|
}
|
4938
5293
|
|
@@ -4946,6 +5301,20 @@ MaybeObject* Heap::AllocateScopeInfo(int length) {
|
|
4946
5301
|
}
|
4947
5302
|
|
4948
5303
|
|
5304
|
+
MaybeObject* Heap::AllocateExternal(void* value) {
|
5305
|
+
Foreign* foreign;
|
5306
|
+
{ MaybeObject* maybe_result = AllocateForeign(static_cast<Address>(value));
|
5307
|
+
if (!maybe_result->To(&foreign)) return maybe_result;
|
5308
|
+
}
|
5309
|
+
JSObject* external;
|
5310
|
+
{ MaybeObject* maybe_result = AllocateJSObjectFromMap(external_map());
|
5311
|
+
if (!maybe_result->To(&external)) return maybe_result;
|
5312
|
+
}
|
5313
|
+
external->SetInternalField(0, foreign);
|
5314
|
+
return external;
|
5315
|
+
}
|
5316
|
+
|
5317
|
+
|
4949
5318
|
MaybeObject* Heap::AllocateStruct(InstanceType type) {
|
4950
5319
|
Map* map;
|
4951
5320
|
switch (type) {
|
@@ -5006,12 +5375,17 @@ void Heap::AdvanceIdleIncrementalMarking(intptr_t step_size) {
|
|
5006
5375
|
|
5007
5376
|
|
5008
5377
|
bool Heap::IdleNotification(int hint) {
|
5378
|
+
// Hints greater than this value indicate that
|
5379
|
+
// the embedder is requesting a lot of GC work.
|
5009
5380
|
const int kMaxHint = 1000;
|
5381
|
+
// Minimal hint that allows to do full GC.
|
5382
|
+
const int kMinHintForFullGC = 100;
|
5010
5383
|
intptr_t size_factor = Min(Max(hint, 20), kMaxHint) / 4;
|
5011
5384
|
// The size factor is in range [5..250]. The numbers here are chosen from
|
5012
5385
|
// experiments. If you changes them, make sure to test with
|
5013
5386
|
// chrome/performance_ui_tests --gtest_filter="GeneralMixMemoryTest.*
|
5014
|
-
intptr_t step_size =
|
5387
|
+
intptr_t step_size =
|
5388
|
+
size_factor * IncrementalMarking::kAllocatedThreshold;
|
5015
5389
|
|
5016
5390
|
if (contexts_disposed_ > 0) {
|
5017
5391
|
if (hint >= kMaxHint) {
|
@@ -5030,10 +5404,6 @@ bool Heap::IdleNotification(int hint) {
|
|
5030
5404
|
AdvanceIdleIncrementalMarking(step_size);
|
5031
5405
|
contexts_disposed_ = 0;
|
5032
5406
|
}
|
5033
|
-
// Make sure that we have no pending context disposals.
|
5034
|
-
// Take into account that we might have decided to delay full collection
|
5035
|
-
// because incremental marking is in progress.
|
5036
|
-
ASSERT((contexts_disposed_ == 0) || !incremental_marking()->IsStopped());
|
5037
5407
|
// After context disposal there is likely a lot of garbage remaining, reset
|
5038
5408
|
// the idle notification counters in order to trigger more incremental GCs
|
5039
5409
|
// on subsequent idle notifications.
|
@@ -5054,9 +5424,9 @@ bool Heap::IdleNotification(int hint) {
|
|
5054
5424
|
// 3. many lazy sweep steps.
|
5055
5425
|
// Use mark-sweep-compact events to count incremental GCs in a round.
|
5056
5426
|
|
5057
|
-
|
5058
5427
|
if (incremental_marking()->IsStopped()) {
|
5059
|
-
if (!
|
5428
|
+
if (!mark_compact_collector()->AreSweeperThreadsActivated() &&
|
5429
|
+
!IsSweepingComplete() &&
|
5060
5430
|
!AdvanceSweepers(static_cast<int>(step_size))) {
|
5061
5431
|
return false;
|
5062
5432
|
}
|
@@ -5074,16 +5444,30 @@ bool Heap::IdleNotification(int hint) {
|
|
5074
5444
|
mark_sweeps_since_idle_round_started_ += new_mark_sweeps;
|
5075
5445
|
ms_count_at_last_idle_notification_ = ms_count_;
|
5076
5446
|
|
5077
|
-
|
5447
|
+
int remaining_mark_sweeps = kMaxMarkSweepsInIdleRound -
|
5448
|
+
mark_sweeps_since_idle_round_started_;
|
5449
|
+
|
5450
|
+
if (remaining_mark_sweeps <= 0) {
|
5078
5451
|
FinishIdleRound();
|
5079
5452
|
return true;
|
5080
5453
|
}
|
5081
5454
|
|
5082
5455
|
if (incremental_marking()->IsStopped()) {
|
5083
|
-
|
5456
|
+
// If there are no more than two GCs left in this idle round and we are
|
5457
|
+
// allowed to do a full GC, then make those GCs full in order to compact
|
5458
|
+
// the code space.
|
5459
|
+
// TODO(ulan): Once we enable code compaction for incremental marking,
|
5460
|
+
// we can get rid of this special case and always start incremental marking.
|
5461
|
+
if (remaining_mark_sweeps <= 2 && hint >= kMinHintForFullGC) {
|
5462
|
+
CollectAllGarbage(kReduceMemoryFootprintMask,
|
5463
|
+
"idle notification: finalize idle round");
|
5464
|
+
} else {
|
5465
|
+
incremental_marking()->Start();
|
5466
|
+
}
|
5467
|
+
}
|
5468
|
+
if (!incremental_marking()->IsStopped()) {
|
5469
|
+
AdvanceIdleIncrementalMarking(step_size);
|
5084
5470
|
}
|
5085
|
-
|
5086
|
-
AdvanceIdleIncrementalMarking(step_size);
|
5087
5471
|
return false;
|
5088
5472
|
}
|
5089
5473
|
|
@@ -5257,9 +5641,9 @@ bool Heap::InSpace(Address addr, AllocationSpace space) {
|
|
5257
5641
|
}
|
5258
5642
|
|
5259
5643
|
|
5260
|
-
#ifdef
|
5644
|
+
#ifdef VERIFY_HEAP
|
5261
5645
|
void Heap::Verify() {
|
5262
|
-
|
5646
|
+
CHECK(HasBeenSetUp());
|
5263
5647
|
|
5264
5648
|
store_buffer()->Verify();
|
5265
5649
|
|
@@ -5277,45 +5661,15 @@ void Heap::Verify() {
|
|
5277
5661
|
cell_space_->Verify(&no_dirty_regions_visitor);
|
5278
5662
|
|
5279
5663
|
lo_space_->Verify();
|
5280
|
-
|
5281
|
-
VerifyNoAccessorPairSharing();
|
5282
|
-
}
|
5283
|
-
|
5284
|
-
|
5285
|
-
void Heap::VerifyNoAccessorPairSharing() {
|
5286
|
-
// Verification is done in 2 phases: First we mark all AccessorPairs, checking
|
5287
|
-
// that we mark only unmarked pairs, then we clear all marks, restoring the
|
5288
|
-
// initial state. We use the Smi tag of the AccessorPair's getter as the
|
5289
|
-
// marking bit, because we can never see a Smi as the getter.
|
5290
|
-
for (int phase = 0; phase < 2; phase++) {
|
5291
|
-
HeapObjectIterator iter(map_space());
|
5292
|
-
for (HeapObject* obj = iter.Next(); obj != NULL; obj = iter.Next()) {
|
5293
|
-
if (obj->IsMap()) {
|
5294
|
-
DescriptorArray* descs = Map::cast(obj)->instance_descriptors();
|
5295
|
-
for (int i = 0; i < descs->number_of_descriptors(); i++) {
|
5296
|
-
if (descs->GetType(i) == CALLBACKS &&
|
5297
|
-
descs->GetValue(i)->IsAccessorPair()) {
|
5298
|
-
AccessorPair* accessors = AccessorPair::cast(descs->GetValue(i));
|
5299
|
-
uintptr_t before = reinterpret_cast<intptr_t>(accessors->getter());
|
5300
|
-
uintptr_t after = (phase == 0) ?
|
5301
|
-
((before & ~kSmiTagMask) | kSmiTag) :
|
5302
|
-
((before & ~kHeapObjectTag) | kHeapObjectTag);
|
5303
|
-
CHECK(before != after);
|
5304
|
-
accessors->set_getter(reinterpret_cast<Object*>(after));
|
5305
|
-
}
|
5306
|
-
}
|
5307
|
-
}
|
5308
|
-
}
|
5309
|
-
}
|
5310
5664
|
}
|
5311
|
-
#endif
|
5665
|
+
#endif
|
5312
5666
|
|
5313
5667
|
|
5314
|
-
MaybeObject* Heap::
|
5668
|
+
MaybeObject* Heap::LookupUtf8Symbol(Vector<const char> string) {
|
5315
5669
|
Object* symbol = NULL;
|
5316
5670
|
Object* new_table;
|
5317
5671
|
{ MaybeObject* maybe_new_table =
|
5318
|
-
symbol_table()->
|
5672
|
+
symbol_table()->LookupUtf8Symbol(string, &symbol);
|
5319
5673
|
if (!maybe_new_table->ToObject(&new_table)) return maybe_new_table;
|
5320
5674
|
}
|
5321
5675
|
// Can't use set_symbol_table because SymbolTable::cast knows that
|
@@ -5326,11 +5680,11 @@ MaybeObject* Heap::LookupSymbol(Vector<const char> string) {
|
|
5326
5680
|
}
|
5327
5681
|
|
5328
5682
|
|
5329
|
-
MaybeObject* Heap::
|
5683
|
+
MaybeObject* Heap::LookupOneByteSymbol(Vector<const uint8_t> string) {
|
5330
5684
|
Object* symbol = NULL;
|
5331
5685
|
Object* new_table;
|
5332
5686
|
{ MaybeObject* maybe_new_table =
|
5333
|
-
symbol_table()->
|
5687
|
+
symbol_table()->LookupOneByteSymbol(string, &symbol);
|
5334
5688
|
if (!maybe_new_table->ToObject(&new_table)) return maybe_new_table;
|
5335
5689
|
}
|
5336
5690
|
// Can't use set_symbol_table because SymbolTable::cast knows that
|
@@ -5341,13 +5695,13 @@ MaybeObject* Heap::LookupAsciiSymbol(Vector<const char> string) {
|
|
5341
5695
|
}
|
5342
5696
|
|
5343
5697
|
|
5344
|
-
MaybeObject* Heap::
|
5698
|
+
MaybeObject* Heap::LookupOneByteSymbol(Handle<SeqOneByteString> string,
|
5345
5699
|
int from,
|
5346
5700
|
int length) {
|
5347
5701
|
Object* symbol = NULL;
|
5348
5702
|
Object* new_table;
|
5349
5703
|
{ MaybeObject* maybe_new_table =
|
5350
|
-
symbol_table()->
|
5704
|
+
symbol_table()->LookupSubStringOneByteSymbol(string,
|
5351
5705
|
from,
|
5352
5706
|
length,
|
5353
5707
|
&symbol);
|
@@ -5401,7 +5755,6 @@ bool Heap::LookupSymbolIfExists(String* string, String** symbol) {
|
|
5401
5755
|
}
|
5402
5756
|
|
5403
5757
|
|
5404
|
-
#ifdef DEBUG
|
5405
5758
|
void Heap::ZapFromSpace() {
|
5406
5759
|
NewSpacePageIterator it(new_space_.FromSpaceStart(),
|
5407
5760
|
new_space_.FromSpaceEnd());
|
@@ -5414,7 +5767,6 @@ void Heap::ZapFromSpace() {
|
|
5414
5767
|
}
|
5415
5768
|
}
|
5416
5769
|
}
|
5417
|
-
#endif // DEBUG
|
5418
5770
|
|
5419
5771
|
|
5420
5772
|
void Heap::IterateAndMarkPointersToFromSpace(Address start,
|
@@ -5633,6 +5985,7 @@ void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) {
|
|
5633
5985
|
mode != VISIT_ALL_IN_SWEEP_NEWSPACE) {
|
5634
5986
|
// Scavenge collections have special processing for this.
|
5635
5987
|
external_string_table_.Iterate(v);
|
5988
|
+
error_object_list_.Iterate(v);
|
5636
5989
|
}
|
5637
5990
|
v->Synchronize(VisitorSynchronization::kExternalStringsTable);
|
5638
5991
|
}
|
@@ -5664,6 +6017,7 @@ void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
|
|
5664
6017
|
|
5665
6018
|
// Iterate over local handles in handle scopes.
|
5666
6019
|
isolate_->handle_scope_implementer()->Iterate(v);
|
6020
|
+
isolate_->IterateDeferredHandles(v);
|
5667
6021
|
v->Synchronize(VisitorSynchronization::kHandleScope);
|
5668
6022
|
|
5669
6023
|
// Iterate over the builtin code objects and code stubs in the
|
@@ -5726,8 +6080,8 @@ bool Heap::ConfigureHeap(int max_semispace_size,
|
|
5726
6080
|
if (max_semispace_size < Page::kPageSize) {
|
5727
6081
|
max_semispace_size = Page::kPageSize;
|
5728
6082
|
if (FLAG_trace_gc) {
|
5729
|
-
|
5730
|
-
|
6083
|
+
PrintPID("Max semispace size cannot be less than %dkbytes\n",
|
6084
|
+
Page::kPageSize >> 10);
|
5731
6085
|
}
|
5732
6086
|
}
|
5733
6087
|
max_semispace_size_ = max_semispace_size;
|
@@ -5742,8 +6096,8 @@ bool Heap::ConfigureHeap(int max_semispace_size,
|
|
5742
6096
|
if (max_semispace_size_ > reserved_semispace_size_) {
|
5743
6097
|
max_semispace_size_ = reserved_semispace_size_;
|
5744
6098
|
if (FLAG_trace_gc) {
|
5745
|
-
|
5746
|
-
|
6099
|
+
PrintPID("Max semispace size cannot be more than %dkbytes\n",
|
6100
|
+
reserved_semispace_size_ >> 10);
|
5747
6101
|
}
|
5748
6102
|
}
|
5749
6103
|
} else {
|
@@ -5768,7 +6122,7 @@ bool Heap::ConfigureHeap(int max_semispace_size,
|
|
5768
6122
|
max_semispace_size_ = RoundUpToPowerOf2(max_semispace_size_);
|
5769
6123
|
reserved_semispace_size_ = RoundUpToPowerOf2(reserved_semispace_size_);
|
5770
6124
|
initial_semispace_size_ = Min(initial_semispace_size_, max_semispace_size_);
|
5771
|
-
external_allocation_limit_ =
|
6125
|
+
external_allocation_limit_ = 16 * max_semispace_size_;
|
5772
6126
|
|
5773
6127
|
// The old generation is paged and needs at least one page for each space.
|
5774
6128
|
int paged_space_count = LAST_PAGED_SPACE - FIRST_PAGED_SPACE + 1;
|
@@ -5843,172 +6197,6 @@ intptr_t Heap::PromotedExternalMemorySize() {
|
|
5843
6197
|
- amount_of_external_allocated_memory_at_last_global_gc_;
|
5844
6198
|
}
|
5845
6199
|
|
5846
|
-
#ifdef DEBUG
|
5847
|
-
|
5848
|
-
// Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
|
5849
|
-
static const int kMarkTag = 2;
|
5850
|
-
|
5851
|
-
|
5852
|
-
class HeapDebugUtils {
|
5853
|
-
public:
|
5854
|
-
explicit HeapDebugUtils(Heap* heap)
|
5855
|
-
: search_for_any_global_(false),
|
5856
|
-
search_target_(NULL),
|
5857
|
-
found_target_(false),
|
5858
|
-
object_stack_(20),
|
5859
|
-
heap_(heap) {
|
5860
|
-
}
|
5861
|
-
|
5862
|
-
class MarkObjectVisitor : public ObjectVisitor {
|
5863
|
-
public:
|
5864
|
-
explicit MarkObjectVisitor(HeapDebugUtils* utils) : utils_(utils) { }
|
5865
|
-
|
5866
|
-
void VisitPointers(Object** start, Object** end) {
|
5867
|
-
// Copy all HeapObject pointers in [start, end)
|
5868
|
-
for (Object** p = start; p < end; p++) {
|
5869
|
-
if ((*p)->IsHeapObject())
|
5870
|
-
utils_->MarkObjectRecursively(p);
|
5871
|
-
}
|
5872
|
-
}
|
5873
|
-
|
5874
|
-
HeapDebugUtils* utils_;
|
5875
|
-
};
|
5876
|
-
|
5877
|
-
void MarkObjectRecursively(Object** p) {
|
5878
|
-
if (!(*p)->IsHeapObject()) return;
|
5879
|
-
|
5880
|
-
HeapObject* obj = HeapObject::cast(*p);
|
5881
|
-
|
5882
|
-
Object* map = obj->map();
|
5883
|
-
|
5884
|
-
if (!map->IsHeapObject()) return; // visited before
|
5885
|
-
|
5886
|
-
if (found_target_) return; // stop if target found
|
5887
|
-
object_stack_.Add(obj);
|
5888
|
-
if ((search_for_any_global_ && obj->IsJSGlobalObject()) ||
|
5889
|
-
(!search_for_any_global_ && (obj == search_target_))) {
|
5890
|
-
found_target_ = true;
|
5891
|
-
return;
|
5892
|
-
}
|
5893
|
-
|
5894
|
-
// not visited yet
|
5895
|
-
Map* map_p = reinterpret_cast<Map*>(HeapObject::cast(map));
|
5896
|
-
|
5897
|
-
Address map_addr = map_p->address();
|
5898
|
-
|
5899
|
-
obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_addr + kMarkTag));
|
5900
|
-
|
5901
|
-
MarkObjectRecursively(&map);
|
5902
|
-
|
5903
|
-
MarkObjectVisitor mark_visitor(this);
|
5904
|
-
|
5905
|
-
obj->IterateBody(map_p->instance_type(), obj->SizeFromMap(map_p),
|
5906
|
-
&mark_visitor);
|
5907
|
-
|
5908
|
-
if (!found_target_) // don't pop if found the target
|
5909
|
-
object_stack_.RemoveLast();
|
5910
|
-
}
|
5911
|
-
|
5912
|
-
|
5913
|
-
class UnmarkObjectVisitor : public ObjectVisitor {
|
5914
|
-
public:
|
5915
|
-
explicit UnmarkObjectVisitor(HeapDebugUtils* utils) : utils_(utils) { }
|
5916
|
-
|
5917
|
-
void VisitPointers(Object** start, Object** end) {
|
5918
|
-
// Copy all HeapObject pointers in [start, end)
|
5919
|
-
for (Object** p = start; p < end; p++) {
|
5920
|
-
if ((*p)->IsHeapObject())
|
5921
|
-
utils_->UnmarkObjectRecursively(p);
|
5922
|
-
}
|
5923
|
-
}
|
5924
|
-
|
5925
|
-
HeapDebugUtils* utils_;
|
5926
|
-
};
|
5927
|
-
|
5928
|
-
|
5929
|
-
void UnmarkObjectRecursively(Object** p) {
|
5930
|
-
if (!(*p)->IsHeapObject()) return;
|
5931
|
-
|
5932
|
-
HeapObject* obj = HeapObject::cast(*p);
|
5933
|
-
|
5934
|
-
Object* map = obj->map();
|
5935
|
-
|
5936
|
-
if (map->IsHeapObject()) return; // unmarked already
|
5937
|
-
|
5938
|
-
Address map_addr = reinterpret_cast<Address>(map);
|
5939
|
-
|
5940
|
-
map_addr -= kMarkTag;
|
5941
|
-
|
5942
|
-
ASSERT_TAG_ALIGNED(map_addr);
|
5943
|
-
|
5944
|
-
HeapObject* map_p = HeapObject::FromAddress(map_addr);
|
5945
|
-
|
5946
|
-
obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_p));
|
5947
|
-
|
5948
|
-
UnmarkObjectRecursively(reinterpret_cast<Object**>(&map_p));
|
5949
|
-
|
5950
|
-
UnmarkObjectVisitor unmark_visitor(this);
|
5951
|
-
|
5952
|
-
obj->IterateBody(Map::cast(map_p)->instance_type(),
|
5953
|
-
obj->SizeFromMap(Map::cast(map_p)),
|
5954
|
-
&unmark_visitor);
|
5955
|
-
}
|
5956
|
-
|
5957
|
-
|
5958
|
-
void MarkRootObjectRecursively(Object** root) {
|
5959
|
-
if (search_for_any_global_) {
|
5960
|
-
ASSERT(search_target_ == NULL);
|
5961
|
-
} else {
|
5962
|
-
ASSERT(search_target_->IsHeapObject());
|
5963
|
-
}
|
5964
|
-
found_target_ = false;
|
5965
|
-
object_stack_.Clear();
|
5966
|
-
|
5967
|
-
MarkObjectRecursively(root);
|
5968
|
-
UnmarkObjectRecursively(root);
|
5969
|
-
|
5970
|
-
if (found_target_) {
|
5971
|
-
PrintF("=====================================\n");
|
5972
|
-
PrintF("==== Path to object ====\n");
|
5973
|
-
PrintF("=====================================\n\n");
|
5974
|
-
|
5975
|
-
ASSERT(!object_stack_.is_empty());
|
5976
|
-
for (int i = 0; i < object_stack_.length(); i++) {
|
5977
|
-
if (i > 0) PrintF("\n |\n |\n V\n\n");
|
5978
|
-
Object* obj = object_stack_[i];
|
5979
|
-
obj->Print();
|
5980
|
-
}
|
5981
|
-
PrintF("=====================================\n");
|
5982
|
-
}
|
5983
|
-
}
|
5984
|
-
|
5985
|
-
// Helper class for visiting HeapObjects recursively.
|
5986
|
-
class MarkRootVisitor: public ObjectVisitor {
|
5987
|
-
public:
|
5988
|
-
explicit MarkRootVisitor(HeapDebugUtils* utils) : utils_(utils) { }
|
5989
|
-
|
5990
|
-
void VisitPointers(Object** start, Object** end) {
|
5991
|
-
// Visit all HeapObject pointers in [start, end)
|
5992
|
-
for (Object** p = start; p < end; p++) {
|
5993
|
-
if ((*p)->IsHeapObject())
|
5994
|
-
utils_->MarkRootObjectRecursively(p);
|
5995
|
-
}
|
5996
|
-
}
|
5997
|
-
|
5998
|
-
HeapDebugUtils* utils_;
|
5999
|
-
};
|
6000
|
-
|
6001
|
-
bool search_for_any_global_;
|
6002
|
-
Object* search_target_;
|
6003
|
-
bool found_target_;
|
6004
|
-
List<Object*> object_stack_;
|
6005
|
-
Heap* heap_;
|
6006
|
-
|
6007
|
-
friend class Heap;
|
6008
|
-
};
|
6009
|
-
|
6010
|
-
#endif
|
6011
|
-
|
6012
6200
|
|
6013
6201
|
V8_DECLARE_ONCE(initialize_gc_once);
|
6014
6202
|
|
@@ -6021,7 +6209,6 @@ static void InitializeGCOnce() {
|
|
6021
6209
|
bool Heap::SetUp(bool create_heap_objects) {
|
6022
6210
|
#ifdef DEBUG
|
6023
6211
|
allocation_timeout_ = FLAG_gc_interval;
|
6024
|
-
debug_utils_ = new HeapDebugUtils(this);
|
6025
6212
|
#endif
|
6026
6213
|
|
6027
6214
|
// Initialize heap spaces and initial maps and objects. Whenever something
|
@@ -6118,7 +6305,7 @@ bool Heap::SetUp(bool create_heap_objects) {
|
|
6118
6305
|
// Create initial objects
|
6119
6306
|
if (!CreateInitialObjects()) return false;
|
6120
6307
|
|
6121
|
-
|
6308
|
+
native_contexts_list_ = undefined_value();
|
6122
6309
|
}
|
6123
6310
|
|
6124
6311
|
LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity()));
|
@@ -6126,6 +6313,8 @@ bool Heap::SetUp(bool create_heap_objects) {
|
|
6126
6313
|
|
6127
6314
|
store_buffer()->SetUp();
|
6128
6315
|
|
6316
|
+
if (FLAG_parallel_recompilation) relocation_mutex_ = OS::CreateMutex();
|
6317
|
+
|
6129
6318
|
return true;
|
6130
6319
|
}
|
6131
6320
|
|
@@ -6148,19 +6337,23 @@ void Heap::SetStackLimits() {
|
|
6148
6337
|
|
6149
6338
|
|
6150
6339
|
void Heap::TearDown() {
|
6151
|
-
#ifdef
|
6340
|
+
#ifdef VERIFY_HEAP
|
6152
6341
|
if (FLAG_verify_heap) {
|
6153
6342
|
Verify();
|
6154
6343
|
}
|
6155
6344
|
#endif
|
6345
|
+
|
6156
6346
|
if (FLAG_print_cumulative_gc_stat) {
|
6157
|
-
PrintF("\n
|
6347
|
+
PrintF("\n");
|
6158
6348
|
PrintF("gc_count=%d ", gc_count_);
|
6159
6349
|
PrintF("mark_sweep_count=%d ", ms_count_);
|
6160
6350
|
PrintF("max_gc_pause=%d ", get_max_gc_pause());
|
6351
|
+
PrintF("total_gc_time=%d ", total_gc_time_ms_);
|
6161
6352
|
PrintF("min_in_mutator=%d ", get_min_in_mutator());
|
6162
6353
|
PrintF("max_alive_after_gc=%" V8_PTR_PREFIX "d ",
|
6163
6354
|
get_max_alive_after_gc());
|
6355
|
+
PrintF("total_marking_time=%f ", marking_time());
|
6356
|
+
PrintF("total_sweeping_time=%f ", sweeping_time());
|
6164
6357
|
PrintF("\n\n");
|
6165
6358
|
}
|
6166
6359
|
|
@@ -6168,6 +6361,8 @@ void Heap::TearDown() {
|
|
6168
6361
|
|
6169
6362
|
external_string_table_.TearDown();
|
6170
6363
|
|
6364
|
+
error_object_list_.TearDown();
|
6365
|
+
|
6171
6366
|
new_space_.TearDown();
|
6172
6367
|
|
6173
6368
|
if (old_pointer_space_ != NULL) {
|
@@ -6211,10 +6406,7 @@ void Heap::TearDown() {
|
|
6211
6406
|
|
6212
6407
|
isolate_->memory_allocator()->TearDown();
|
6213
6408
|
|
6214
|
-
|
6215
|
-
delete debug_utils_;
|
6216
|
-
debug_utils_ = NULL;
|
6217
|
-
#endif
|
6409
|
+
delete relocation_mutex_;
|
6218
6410
|
}
|
6219
6411
|
|
6220
6412
|
|
@@ -6570,7 +6762,7 @@ void HeapIterator::reset() {
|
|
6570
6762
|
}
|
6571
6763
|
|
6572
6764
|
|
6573
|
-
#
|
6765
|
+
#ifdef DEBUG
|
6574
6766
|
|
6575
6767
|
Object* const PathTracer::kAnyGlobalObject = reinterpret_cast<Object*>(NULL);
|
6576
6768
|
|
@@ -6628,7 +6820,7 @@ void PathTracer::TracePathFrom(Object** root) {
|
|
6628
6820
|
ASSERT((search_target_ == kAnyGlobalObject) ||
|
6629
6821
|
search_target_->IsHeapObject());
|
6630
6822
|
found_target_in_trace_ = false;
|
6631
|
-
|
6823
|
+
Reset();
|
6632
6824
|
|
6633
6825
|
MarkVisitor mark_visitor(this);
|
6634
6826
|
MarkRecursively(root, &mark_visitor);
|
@@ -6640,8 +6832,8 @@ void PathTracer::TracePathFrom(Object** root) {
|
|
6640
6832
|
}
|
6641
6833
|
|
6642
6834
|
|
6643
|
-
static bool
|
6644
|
-
return obj->map() == obj->GetHeap()->
|
6835
|
+
static bool SafeIsNativeContext(HeapObject* obj) {
|
6836
|
+
return obj->map() == obj->GetHeap()->raw_unchecked_native_context_map();
|
6645
6837
|
}
|
6646
6838
|
|
6647
6839
|
|
@@ -6663,7 +6855,7 @@ void PathTracer::MarkRecursively(Object** p, MarkVisitor* mark_visitor) {
|
|
6663
6855
|
return;
|
6664
6856
|
}
|
6665
6857
|
|
6666
|
-
bool
|
6858
|
+
bool is_native_context = SafeIsNativeContext(obj);
|
6667
6859
|
|
6668
6860
|
// not visited yet
|
6669
6861
|
Map* map_p = reinterpret_cast<Map*>(HeapObject::cast(map));
|
@@ -6673,7 +6865,7 @@ void PathTracer::MarkRecursively(Object** p, MarkVisitor* mark_visitor) {
|
|
6673
6865
|
obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_addr + kMarkTag));
|
6674
6866
|
|
6675
6867
|
// Scan the object body.
|
6676
|
-
if (
|
6868
|
+
if (is_native_context && (visit_mode_ == VISIT_ONLY_STRONG)) {
|
6677
6869
|
// This is specialized to scan Context's properly.
|
6678
6870
|
Object** start = reinterpret_cast<Object**>(obj->address() +
|
6679
6871
|
Context::kHeaderSize);
|
@@ -6732,19 +6924,22 @@ void PathTracer::ProcessResults() {
|
|
6732
6924
|
for (int i = 0; i < object_stack_.length(); i++) {
|
6733
6925
|
if (i > 0) PrintF("\n |\n |\n V\n\n");
|
6734
6926
|
Object* obj = object_stack_[i];
|
6735
|
-
#ifdef OBJECT_PRINT
|
6736
6927
|
obj->Print();
|
6737
|
-
#else
|
6738
|
-
obj->ShortPrint();
|
6739
|
-
#endif
|
6740
6928
|
}
|
6741
6929
|
PrintF("=====================================\n");
|
6742
6930
|
}
|
6743
6931
|
}
|
6744
|
-
#endif // DEBUG || LIVE_OBJECT_LIST
|
6745
6932
|
|
6746
6933
|
|
6747
|
-
|
6934
|
+
// Triggers a depth-first traversal of reachable objects from one
|
6935
|
+
// given root object and finds a path to a specific heap object and
|
6936
|
+
// prints it.
|
6937
|
+
void Heap::TracePathToObjectFrom(Object* target, Object* root) {
|
6938
|
+
PathTracer tracer(target, PathTracer::FIND_ALL, VISIT_ALL);
|
6939
|
+
tracer.VisitPointer(&root);
|
6940
|
+
}
|
6941
|
+
|
6942
|
+
|
6748
6943
|
// Triggers a depth-first traversal of reachable objects from roots
|
6749
6944
|
// and finds a path to a specific heap object and prints it.
|
6750
6945
|
void Heap::TracePathToObject(Object* target) {
|
@@ -6788,6 +6983,9 @@ GCTracer::GCTracer(Heap* heap,
|
|
6788
6983
|
allocated_since_last_gc_(0),
|
6789
6984
|
spent_in_mutator_(0),
|
6790
6985
|
promoted_objects_size_(0),
|
6986
|
+
nodes_died_in_new_space_(0),
|
6987
|
+
nodes_copied_in_new_space_(0),
|
6988
|
+
nodes_promoted_(0),
|
6791
6989
|
heap_(heap),
|
6792
6990
|
gc_reason_(gc_reason),
|
6793
6991
|
collector_reason_(collector_reason) {
|
@@ -6832,6 +7030,7 @@ GCTracer::~GCTracer() {
|
|
6832
7030
|
|
6833
7031
|
// Update cumulative GC statistics if required.
|
6834
7032
|
if (FLAG_print_cumulative_gc_stat) {
|
7033
|
+
heap_->total_gc_time_ms_ += time;
|
6835
7034
|
heap_->max_gc_pause_ = Max(heap_->max_gc_pause_, time);
|
6836
7035
|
heap_->max_alive_after_gc_ = Max(heap_->max_alive_after_gc_,
|
6837
7036
|
heap_->alive_after_last_gc_);
|
@@ -6839,9 +7038,16 @@ GCTracer::~GCTracer() {
|
|
6839
7038
|
heap_->min_in_mutator_ = Min(heap_->min_in_mutator_,
|
6840
7039
|
static_cast<int>(spent_in_mutator_));
|
6841
7040
|
}
|
7041
|
+
} else if (FLAG_trace_gc_verbose) {
|
7042
|
+
heap_->total_gc_time_ms_ += time;
|
6842
7043
|
}
|
6843
7044
|
|
6844
|
-
|
7045
|
+
if (collector_ == SCAVENGER && FLAG_trace_gc_ignore_scavenger) return;
|
7046
|
+
|
7047
|
+
heap_->AddMarkingTime(scopes_[Scope::MC_MARK]);
|
7048
|
+
|
7049
|
+
if (FLAG_print_cumulative_gc_stat && !FLAG_trace_gc) return;
|
7050
|
+
PrintPID("%8.0f ms: ", heap_->isolate()->time_millis_since_init());
|
6845
7051
|
|
6846
7052
|
if (!FLAG_trace_gc_nvp) {
|
6847
7053
|
int external_time = static_cast<int>(scopes_[Scope::EXTERNAL]);
|
@@ -6883,9 +7089,7 @@ GCTracer::~GCTracer() {
|
|
6883
7089
|
PrintF(".\n");
|
6884
7090
|
} else {
|
6885
7091
|
PrintF("pause=%d ", time);
|
6886
|
-
PrintF("mutator=%d ",
|
6887
|
-
static_cast<int>(spent_in_mutator_));
|
6888
|
-
|
7092
|
+
PrintF("mutator=%d ", static_cast<int>(spent_in_mutator_));
|
6889
7093
|
PrintF("gc=");
|
6890
7094
|
switch (collector_) {
|
6891
7095
|
case SCAVENGER:
|
@@ -6925,6 +7129,9 @@ GCTracer::~GCTracer() {
|
|
6925
7129
|
|
6926
7130
|
PrintF("allocated=%" V8_PTR_PREFIX "d ", allocated_since_last_gc_);
|
6927
7131
|
PrintF("promoted=%" V8_PTR_PREFIX "d ", promoted_objects_size_);
|
7132
|
+
PrintF("nodes_died_in_new=%d ", nodes_died_in_new_space_);
|
7133
|
+
PrintF("nodes_copied_in_new=%d ", nodes_copied_in_new_space_);
|
7134
|
+
PrintF("nodes_promoted=%d ", nodes_promoted_);
|
6928
7135
|
|
6929
7136
|
if (collector_ == SCAVENGER) {
|
6930
7137
|
PrintF("stepscount=%d ", steps_count_since_last_gc_);
|
@@ -6932,6 +7139,7 @@ GCTracer::~GCTracer() {
|
|
6932
7139
|
} else {
|
6933
7140
|
PrintF("stepscount=%d ", steps_count_);
|
6934
7141
|
PrintF("stepstook=%d ", static_cast<int>(steps_took_));
|
7142
|
+
PrintF("longeststep=%.f ", longest_step_);
|
6935
7143
|
}
|
6936
7144
|
|
6937
7145
|
PrintF("\n");
|
@@ -7012,7 +7220,7 @@ void KeyedLookupCache::Clear() {
|
|
7012
7220
|
|
7013
7221
|
|
7014
7222
|
void DescriptorLookupCache::Clear() {
|
7015
|
-
for (int index = 0; index < kLength; index++) keys_[index].
|
7223
|
+
for (int index = 0; index < kLength; index++) keys_[index].source = NULL;
|
7016
7224
|
}
|
7017
7225
|
|
7018
7226
|
|
@@ -7052,7 +7260,7 @@ void TranscendentalCache::Clear() {
|
|
7052
7260
|
void ExternalStringTable::CleanUp() {
|
7053
7261
|
int last = 0;
|
7054
7262
|
for (int i = 0; i < new_space_strings_.length(); ++i) {
|
7055
|
-
if (new_space_strings_[i] == heap_->
|
7263
|
+
if (new_space_strings_[i] == heap_->the_hole_value()) {
|
7056
7264
|
continue;
|
7057
7265
|
}
|
7058
7266
|
if (heap_->InNewSpace(new_space_strings_[i])) {
|
@@ -7062,18 +7270,23 @@ void ExternalStringTable::CleanUp() {
|
|
7062
7270
|
}
|
7063
7271
|
}
|
7064
7272
|
new_space_strings_.Rewind(last);
|
7273
|
+
new_space_strings_.Trim();
|
7274
|
+
|
7065
7275
|
last = 0;
|
7066
7276
|
for (int i = 0; i < old_space_strings_.length(); ++i) {
|
7067
|
-
if (old_space_strings_[i] == heap_->
|
7277
|
+
if (old_space_strings_[i] == heap_->the_hole_value()) {
|
7068
7278
|
continue;
|
7069
7279
|
}
|
7070
7280
|
ASSERT(!heap_->InNewSpace(old_space_strings_[i]));
|
7071
7281
|
old_space_strings_[last++] = old_space_strings_[i];
|
7072
7282
|
}
|
7073
7283
|
old_space_strings_.Rewind(last);
|
7284
|
+
old_space_strings_.Trim();
|
7285
|
+
#ifdef VERIFY_HEAP
|
7074
7286
|
if (FLAG_verify_heap) {
|
7075
7287
|
Verify();
|
7076
7288
|
}
|
7289
|
+
#endif
|
7077
7290
|
}
|
7078
7291
|
|
7079
7292
|
|
@@ -7083,6 +7296,119 @@ void ExternalStringTable::TearDown() {
|
|
7083
7296
|
}
|
7084
7297
|
|
7085
7298
|
|
7299
|
+
// Update all references.
|
7300
|
+
void ErrorObjectList::UpdateReferences() {
|
7301
|
+
for (int i = 0; i < list_.length(); i++) {
|
7302
|
+
HeapObject* object = HeapObject::cast(list_[i]);
|
7303
|
+
MapWord first_word = object->map_word();
|
7304
|
+
if (first_word.IsForwardingAddress()) {
|
7305
|
+
list_[i] = first_word.ToForwardingAddress();
|
7306
|
+
}
|
7307
|
+
}
|
7308
|
+
}
|
7309
|
+
|
7310
|
+
|
7311
|
+
// Unforwarded objects in new space are dead and removed from the list.
|
7312
|
+
void ErrorObjectList::UpdateReferencesInNewSpace(Heap* heap) {
|
7313
|
+
if (list_.is_empty()) return;
|
7314
|
+
if (!nested_) {
|
7315
|
+
int write_index = 0;
|
7316
|
+
for (int i = 0; i < list_.length(); i++) {
|
7317
|
+
MapWord first_word = HeapObject::cast(list_[i])->map_word();
|
7318
|
+
if (first_word.IsForwardingAddress()) {
|
7319
|
+
list_[write_index++] = first_word.ToForwardingAddress();
|
7320
|
+
}
|
7321
|
+
}
|
7322
|
+
list_.Rewind(write_index);
|
7323
|
+
} else {
|
7324
|
+
// If a GC is triggered during DeferredFormatStackTrace, we do not move
|
7325
|
+
// objects in the list, just remove dead ones, as to not confuse the
|
7326
|
+
// loop in DeferredFormatStackTrace.
|
7327
|
+
for (int i = 0; i < list_.length(); i++) {
|
7328
|
+
MapWord first_word = HeapObject::cast(list_[i])->map_word();
|
7329
|
+
list_[i] = first_word.IsForwardingAddress()
|
7330
|
+
? first_word.ToForwardingAddress()
|
7331
|
+
: heap->the_hole_value();
|
7332
|
+
}
|
7333
|
+
}
|
7334
|
+
}
|
7335
|
+
|
7336
|
+
|
7337
|
+
void ErrorObjectList::DeferredFormatStackTrace(Isolate* isolate) {
|
7338
|
+
// If formatting the stack trace causes a GC, this method will be
|
7339
|
+
// recursively called. In that case, skip the recursive call, since
|
7340
|
+
// the loop modifies the list while iterating over it.
|
7341
|
+
if (nested_ || list_.is_empty() || isolate->has_pending_exception()) return;
|
7342
|
+
nested_ = true;
|
7343
|
+
HandleScope scope(isolate);
|
7344
|
+
Handle<String> stack_key = isolate->factory()->stack_symbol();
|
7345
|
+
int write_index = 0;
|
7346
|
+
int budget = kBudgetPerGC;
|
7347
|
+
for (int i = 0; i < list_.length(); i++) {
|
7348
|
+
Object* object = list_[i];
|
7349
|
+
JSFunction* getter_fun;
|
7350
|
+
|
7351
|
+
{ AssertNoAllocation assert;
|
7352
|
+
// Skip possible holes in the list.
|
7353
|
+
if (object->IsTheHole()) continue;
|
7354
|
+
if (isolate->heap()->InNewSpace(object) || budget == 0) {
|
7355
|
+
list_[write_index++] = object;
|
7356
|
+
continue;
|
7357
|
+
}
|
7358
|
+
|
7359
|
+
// Check whether the stack property is backed by the original getter.
|
7360
|
+
LookupResult lookup(isolate);
|
7361
|
+
JSObject::cast(object)->LocalLookupRealNamedProperty(*stack_key, &lookup);
|
7362
|
+
if (!lookup.IsFound() || lookup.type() != CALLBACKS) continue;
|
7363
|
+
Object* callback = lookup.GetCallbackObject();
|
7364
|
+
if (!callback->IsAccessorPair()) continue;
|
7365
|
+
Object* getter_obj = AccessorPair::cast(callback)->getter();
|
7366
|
+
if (!getter_obj->IsJSFunction()) continue;
|
7367
|
+
getter_fun = JSFunction::cast(getter_obj);
|
7368
|
+
String* key = isolate->heap()->hidden_stack_trace_symbol();
|
7369
|
+
if (key != getter_fun->GetHiddenProperty(key)) continue;
|
7370
|
+
}
|
7371
|
+
|
7372
|
+
budget--;
|
7373
|
+
HandleScope scope(isolate);
|
7374
|
+
bool has_exception = false;
|
7375
|
+
#ifdef DEBUG
|
7376
|
+
Handle<Map> map(HeapObject::cast(object)->map(), isolate);
|
7377
|
+
#endif
|
7378
|
+
Handle<Object> object_handle(object, isolate);
|
7379
|
+
Handle<Object> getter_handle(getter_fun, isolate);
|
7380
|
+
Execution::Call(getter_handle, object_handle, 0, NULL, &has_exception);
|
7381
|
+
ASSERT(*map == HeapObject::cast(*object_handle)->map());
|
7382
|
+
if (has_exception) {
|
7383
|
+
// Hit an exception (most likely a stack overflow).
|
7384
|
+
// Wrap up this pass and retry after another GC.
|
7385
|
+
isolate->clear_pending_exception();
|
7386
|
+
// We use the handle since calling the getter might have caused a GC.
|
7387
|
+
list_[write_index++] = *object_handle;
|
7388
|
+
budget = 0;
|
7389
|
+
}
|
7390
|
+
}
|
7391
|
+
list_.Rewind(write_index);
|
7392
|
+
list_.Trim();
|
7393
|
+
nested_ = false;
|
7394
|
+
}
|
7395
|
+
|
7396
|
+
|
7397
|
+
void ErrorObjectList::RemoveUnmarked(Heap* heap) {
|
7398
|
+
for (int i = 0; i < list_.length(); i++) {
|
7399
|
+
HeapObject* object = HeapObject::cast(list_[i]);
|
7400
|
+
if (!Marking::MarkBitFrom(object).Get()) {
|
7401
|
+
list_[i] = heap->the_hole_value();
|
7402
|
+
}
|
7403
|
+
}
|
7404
|
+
}
|
7405
|
+
|
7406
|
+
|
7407
|
+
void ErrorObjectList::TearDown() {
|
7408
|
+
list_.Free();
|
7409
|
+
}
|
7410
|
+
|
7411
|
+
|
7086
7412
|
void Heap::QueueMemoryChunkForFree(MemoryChunk* chunk) {
|
7087
7413
|
chunk->set_next_chunk(chunks_queued_for_free_);
|
7088
7414
|
chunks_queued_for_free_ = chunk;
|
@@ -7151,4 +7477,63 @@ void Heap::RememberUnmappedPage(Address page, bool compacted) {
|
|
7151
7477
|
remembered_unmapped_pages_index_ %= kRememberedUnmappedPages;
|
7152
7478
|
}
|
7153
7479
|
|
7480
|
+
|
7481
|
+
void Heap::ClearObjectStats(bool clear_last_time_stats) {
|
7482
|
+
memset(object_counts_, 0, sizeof(object_counts_));
|
7483
|
+
memset(object_sizes_, 0, sizeof(object_sizes_));
|
7484
|
+
if (clear_last_time_stats) {
|
7485
|
+
memset(object_counts_last_time_, 0, sizeof(object_counts_last_time_));
|
7486
|
+
memset(object_sizes_last_time_, 0, sizeof(object_sizes_last_time_));
|
7487
|
+
}
|
7488
|
+
}
|
7489
|
+
|
7490
|
+
|
7491
|
+
static LazyMutex checkpoint_object_stats_mutex = LAZY_MUTEX_INITIALIZER;
|
7492
|
+
|
7493
|
+
|
7494
|
+
void Heap::CheckpointObjectStats() {
|
7495
|
+
ScopedLock lock(checkpoint_object_stats_mutex.Pointer());
|
7496
|
+
Counters* counters = isolate()->counters();
|
7497
|
+
#define ADJUST_LAST_TIME_OBJECT_COUNT(name) \
|
7498
|
+
counters->count_of_##name()->Increment( \
|
7499
|
+
static_cast<int>(object_counts_[name])); \
|
7500
|
+
counters->count_of_##name()->Decrement( \
|
7501
|
+
static_cast<int>(object_counts_last_time_[name])); \
|
7502
|
+
counters->size_of_##name()->Increment( \
|
7503
|
+
static_cast<int>(object_sizes_[name])); \
|
7504
|
+
counters->size_of_##name()->Decrement( \
|
7505
|
+
static_cast<int>(object_sizes_last_time_[name]));
|
7506
|
+
INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT)
|
7507
|
+
#undef ADJUST_LAST_TIME_OBJECT_COUNT
|
7508
|
+
int index;
|
7509
|
+
#define ADJUST_LAST_TIME_OBJECT_COUNT(name) \
|
7510
|
+
index = FIRST_CODE_KIND_SUB_TYPE + Code::name; \
|
7511
|
+
counters->count_of_CODE_TYPE_##name()->Increment( \
|
7512
|
+
static_cast<int>(object_counts_[index])); \
|
7513
|
+
counters->count_of_CODE_TYPE_##name()->Decrement( \
|
7514
|
+
static_cast<int>(object_counts_last_time_[index])); \
|
7515
|
+
counters->size_of_CODE_TYPE_##name()->Increment( \
|
7516
|
+
static_cast<int>(object_sizes_[index])); \
|
7517
|
+
counters->size_of_CODE_TYPE_##name()->Decrement( \
|
7518
|
+
static_cast<int>(object_sizes_last_time_[index]));
|
7519
|
+
CODE_KIND_LIST(ADJUST_LAST_TIME_OBJECT_COUNT)
|
7520
|
+
#undef ADJUST_LAST_TIME_OBJECT_COUNT
|
7521
|
+
#define ADJUST_LAST_TIME_OBJECT_COUNT(name) \
|
7522
|
+
index = FIRST_FIXED_ARRAY_SUB_TYPE + name; \
|
7523
|
+
counters->count_of_FIXED_ARRAY_##name()->Increment( \
|
7524
|
+
static_cast<int>(object_counts_[index])); \
|
7525
|
+
counters->count_of_FIXED_ARRAY_##name()->Decrement( \
|
7526
|
+
static_cast<int>(object_counts_last_time_[index])); \
|
7527
|
+
counters->size_of_FIXED_ARRAY_##name()->Increment( \
|
7528
|
+
static_cast<int>(object_sizes_[index])); \
|
7529
|
+
counters->size_of_FIXED_ARRAY_##name()->Decrement( \
|
7530
|
+
static_cast<int>(object_sizes_last_time_[index]));
|
7531
|
+
FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT)
|
7532
|
+
#undef ADJUST_LAST_TIME_OBJECT_COUNT
|
7533
|
+
|
7534
|
+
memcpy(object_counts_last_time_, object_counts_, sizeof(object_counts_));
|
7535
|
+
memcpy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_));
|
7536
|
+
ClearObjectStats();
|
7537
|
+
}
|
7538
|
+
|
7154
7539
|
} } // namespace v8::internal
|