libv8 3.11.8.17 → 3.16.14.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.travis.yml +1 -2
- data/Gemfile +1 -1
- data/Rakefile +6 -7
- data/lib/libv8/version.rb +1 -1
- data/vendor/v8/.gitignore +24 -3
- data/vendor/v8/AUTHORS +7 -0
- data/vendor/v8/ChangeLog +839 -0
- data/vendor/v8/DEPS +1 -1
- data/vendor/v8/Makefile.android +92 -0
- data/vendor/v8/OWNERS +11 -0
- data/vendor/v8/PRESUBMIT.py +71 -0
- data/vendor/v8/SConstruct +34 -39
- data/vendor/v8/build/android.gypi +56 -37
- data/vendor/v8/build/common.gypi +112 -30
- data/vendor/v8/build/gyp_v8 +1 -1
- data/vendor/v8/build/standalone.gypi +15 -11
- data/vendor/v8/include/v8-debug.h +9 -1
- data/vendor/v8/include/v8-preparser.h +4 -3
- data/vendor/v8/include/v8-profiler.h +25 -25
- data/vendor/v8/include/v8-testing.h +4 -3
- data/vendor/v8/include/v8.h +994 -540
- data/vendor/v8/preparser/preparser-process.cc +3 -3
- data/vendor/v8/samples/lineprocessor.cc +20 -27
- data/vendor/v8/samples/process.cc +18 -14
- data/vendor/v8/samples/shell.cc +16 -15
- data/vendor/v8/src/SConscript +15 -14
- data/vendor/v8/src/accessors.cc +169 -77
- data/vendor/v8/src/accessors.h +4 -0
- data/vendor/v8/src/allocation-inl.h +2 -2
- data/vendor/v8/src/allocation.h +7 -7
- data/vendor/v8/src/api.cc +810 -497
- data/vendor/v8/src/api.h +85 -60
- data/vendor/v8/src/arm/assembler-arm-inl.h +179 -22
- data/vendor/v8/src/arm/assembler-arm.cc +633 -264
- data/vendor/v8/src/arm/assembler-arm.h +264 -197
- data/vendor/v8/src/arm/builtins-arm.cc +117 -27
- data/vendor/v8/src/arm/code-stubs-arm.cc +1241 -700
- data/vendor/v8/src/arm/code-stubs-arm.h +35 -138
- data/vendor/v8/src/arm/codegen-arm.cc +285 -16
- data/vendor/v8/src/arm/codegen-arm.h +22 -0
- data/vendor/v8/src/arm/constants-arm.cc +5 -3
- data/vendor/v8/src/arm/constants-arm.h +24 -11
- data/vendor/v8/src/arm/debug-arm.cc +3 -3
- data/vendor/v8/src/arm/deoptimizer-arm.cc +382 -92
- data/vendor/v8/src/arm/disasm-arm.cc +61 -12
- data/vendor/v8/src/arm/frames-arm.h +0 -14
- data/vendor/v8/src/arm/full-codegen-arm.cc +332 -304
- data/vendor/v8/src/arm/ic-arm.cc +180 -259
- data/vendor/v8/src/arm/lithium-arm.cc +364 -316
- data/vendor/v8/src/arm/lithium-arm.h +512 -275
- data/vendor/v8/src/arm/lithium-codegen-arm.cc +1768 -809
- data/vendor/v8/src/arm/lithium-codegen-arm.h +97 -35
- data/vendor/v8/src/arm/lithium-gap-resolver-arm.cc +12 -5
- data/vendor/v8/src/arm/macro-assembler-arm.cc +439 -228
- data/vendor/v8/src/arm/macro-assembler-arm.h +116 -70
- data/vendor/v8/src/arm/regexp-macro-assembler-arm.cc +54 -44
- data/vendor/v8/src/arm/regexp-macro-assembler-arm.h +3 -10
- data/vendor/v8/src/arm/simulator-arm.cc +272 -238
- data/vendor/v8/src/arm/simulator-arm.h +38 -8
- data/vendor/v8/src/arm/stub-cache-arm.cc +522 -895
- data/vendor/v8/src/array.js +101 -70
- data/vendor/v8/src/assembler.cc +270 -19
- data/vendor/v8/src/assembler.h +110 -15
- data/vendor/v8/src/ast.cc +79 -69
- data/vendor/v8/src/ast.h +255 -301
- data/vendor/v8/src/atomicops.h +7 -1
- data/vendor/v8/src/atomicops_internals_tsan.h +335 -0
- data/vendor/v8/src/bootstrapper.cc +481 -418
- data/vendor/v8/src/bootstrapper.h +4 -4
- data/vendor/v8/src/builtins.cc +498 -311
- data/vendor/v8/src/builtins.h +75 -47
- data/vendor/v8/src/checks.cc +2 -1
- data/vendor/v8/src/checks.h +8 -0
- data/vendor/v8/src/code-stubs-hydrogen.cc +253 -0
- data/vendor/v8/src/code-stubs.cc +249 -84
- data/vendor/v8/src/code-stubs.h +501 -169
- data/vendor/v8/src/codegen.cc +36 -18
- data/vendor/v8/src/codegen.h +25 -3
- data/vendor/v8/src/collection.js +54 -17
- data/vendor/v8/src/compilation-cache.cc +24 -16
- data/vendor/v8/src/compilation-cache.h +15 -6
- data/vendor/v8/src/compiler.cc +497 -195
- data/vendor/v8/src/compiler.h +246 -38
- data/vendor/v8/src/contexts.cc +64 -24
- data/vendor/v8/src/contexts.h +60 -29
- data/vendor/v8/src/conversions-inl.h +24 -14
- data/vendor/v8/src/conversions.h +7 -4
- data/vendor/v8/src/counters.cc +21 -12
- data/vendor/v8/src/counters.h +44 -16
- data/vendor/v8/src/cpu-profiler.h +1 -1
- data/vendor/v8/src/d8-debug.cc +2 -2
- data/vendor/v8/src/d8-readline.cc +13 -2
- data/vendor/v8/src/d8.cc +681 -273
- data/vendor/v8/src/d8.gyp +4 -4
- data/vendor/v8/src/d8.h +38 -18
- data/vendor/v8/src/d8.js +0 -617
- data/vendor/v8/src/data-flow.h +55 -0
- data/vendor/v8/src/date.js +1 -42
- data/vendor/v8/src/dateparser-inl.h +5 -1
- data/vendor/v8/src/debug-agent.cc +10 -15
- data/vendor/v8/src/debug-debugger.js +147 -149
- data/vendor/v8/src/debug.cc +323 -164
- data/vendor/v8/src/debug.h +26 -14
- data/vendor/v8/src/deoptimizer.cc +765 -290
- data/vendor/v8/src/deoptimizer.h +130 -28
- data/vendor/v8/src/disassembler.cc +10 -4
- data/vendor/v8/src/elements-kind.cc +7 -2
- data/vendor/v8/src/elements-kind.h +19 -0
- data/vendor/v8/src/elements.cc +607 -285
- data/vendor/v8/src/elements.h +36 -13
- data/vendor/v8/src/execution.cc +52 -31
- data/vendor/v8/src/execution.h +4 -4
- data/vendor/v8/src/extensions/externalize-string-extension.cc +5 -4
- data/vendor/v8/src/extensions/gc-extension.cc +5 -1
- data/vendor/v8/src/extensions/statistics-extension.cc +153 -0
- data/vendor/v8/src/{inspector.h → extensions/statistics-extension.h} +12 -23
- data/vendor/v8/src/factory.cc +101 -134
- data/vendor/v8/src/factory.h +36 -31
- data/vendor/v8/src/flag-definitions.h +102 -25
- data/vendor/v8/src/flags.cc +9 -5
- data/vendor/v8/src/frames-inl.h +10 -0
- data/vendor/v8/src/frames.cc +116 -26
- data/vendor/v8/src/frames.h +96 -12
- data/vendor/v8/src/full-codegen.cc +219 -74
- data/vendor/v8/src/full-codegen.h +63 -21
- data/vendor/v8/src/func-name-inferrer.cc +8 -7
- data/vendor/v8/src/func-name-inferrer.h +5 -3
- data/vendor/v8/src/gdb-jit.cc +71 -57
- data/vendor/v8/src/global-handles.cc +230 -101
- data/vendor/v8/src/global-handles.h +26 -27
- data/vendor/v8/src/globals.h +17 -19
- data/vendor/v8/src/handles-inl.h +59 -12
- data/vendor/v8/src/handles.cc +180 -200
- data/vendor/v8/src/handles.h +80 -11
- data/vendor/v8/src/hashmap.h +60 -40
- data/vendor/v8/src/heap-inl.h +107 -45
- data/vendor/v8/src/heap-profiler.cc +38 -19
- data/vendor/v8/src/heap-profiler.h +24 -14
- data/vendor/v8/src/heap.cc +1123 -738
- data/vendor/v8/src/heap.h +385 -146
- data/vendor/v8/src/hydrogen-instructions.cc +700 -217
- data/vendor/v8/src/hydrogen-instructions.h +1158 -472
- data/vendor/v8/src/hydrogen.cc +3319 -1662
- data/vendor/v8/src/hydrogen.h +411 -170
- data/vendor/v8/src/ia32/assembler-ia32-inl.h +46 -16
- data/vendor/v8/src/ia32/assembler-ia32.cc +131 -61
- data/vendor/v8/src/ia32/assembler-ia32.h +115 -57
- data/vendor/v8/src/ia32/builtins-ia32.cc +99 -5
- data/vendor/v8/src/ia32/code-stubs-ia32.cc +787 -495
- data/vendor/v8/src/ia32/code-stubs-ia32.h +10 -100
- data/vendor/v8/src/ia32/codegen-ia32.cc +227 -23
- data/vendor/v8/src/ia32/codegen-ia32.h +14 -0
- data/vendor/v8/src/ia32/deoptimizer-ia32.cc +428 -87
- data/vendor/v8/src/ia32/disasm-ia32.cc +28 -1
- data/vendor/v8/src/ia32/frames-ia32.h +6 -16
- data/vendor/v8/src/ia32/full-codegen-ia32.cc +280 -272
- data/vendor/v8/src/ia32/ic-ia32.cc +150 -250
- data/vendor/v8/src/ia32/lithium-codegen-ia32.cc +1600 -517
- data/vendor/v8/src/ia32/lithium-codegen-ia32.h +90 -24
- data/vendor/v8/src/ia32/lithium-gap-resolver-ia32.cc +10 -6
- data/vendor/v8/src/ia32/lithium-gap-resolver-ia32.h +2 -2
- data/vendor/v8/src/ia32/lithium-ia32.cc +405 -302
- data/vendor/v8/src/ia32/lithium-ia32.h +526 -271
- data/vendor/v8/src/ia32/macro-assembler-ia32.cc +378 -119
- data/vendor/v8/src/ia32/macro-assembler-ia32.h +62 -28
- data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.cc +43 -30
- data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.h +2 -10
- data/vendor/v8/src/ia32/stub-cache-ia32.cc +492 -678
- data/vendor/v8/src/ic-inl.h +9 -4
- data/vendor/v8/src/ic.cc +836 -923
- data/vendor/v8/src/ic.h +228 -247
- data/vendor/v8/src/incremental-marking-inl.h +26 -30
- data/vendor/v8/src/incremental-marking.cc +276 -248
- data/vendor/v8/src/incremental-marking.h +29 -37
- data/vendor/v8/src/interface.cc +34 -25
- data/vendor/v8/src/interface.h +69 -25
- data/vendor/v8/src/interpreter-irregexp.cc +2 -2
- data/vendor/v8/src/isolate.cc +382 -76
- data/vendor/v8/src/isolate.h +109 -56
- data/vendor/v8/src/json-parser.h +217 -104
- data/vendor/v8/src/json-stringifier.h +745 -0
- data/vendor/v8/src/json.js +10 -132
- data/vendor/v8/src/jsregexp-inl.h +106 -0
- data/vendor/v8/src/jsregexp.cc +517 -285
- data/vendor/v8/src/jsregexp.h +145 -117
- data/vendor/v8/src/list-inl.h +35 -22
- data/vendor/v8/src/list.h +46 -19
- data/vendor/v8/src/lithium-allocator-inl.h +22 -2
- data/vendor/v8/src/lithium-allocator.cc +85 -70
- data/vendor/v8/src/lithium-allocator.h +21 -39
- data/vendor/v8/src/lithium.cc +259 -5
- data/vendor/v8/src/lithium.h +131 -32
- data/vendor/v8/src/liveedit-debugger.js +52 -3
- data/vendor/v8/src/liveedit.cc +393 -113
- data/vendor/v8/src/liveedit.h +7 -3
- data/vendor/v8/src/log-utils.cc +4 -2
- data/vendor/v8/src/log.cc +170 -140
- data/vendor/v8/src/log.h +62 -11
- data/vendor/v8/src/macro-assembler.h +17 -0
- data/vendor/v8/src/macros.py +2 -0
- data/vendor/v8/src/mark-compact-inl.h +3 -23
- data/vendor/v8/src/mark-compact.cc +801 -830
- data/vendor/v8/src/mark-compact.h +154 -47
- data/vendor/v8/src/marking-thread.cc +85 -0
- data/vendor/v8/src/{inspector.cc → marking-thread.h} +32 -24
- data/vendor/v8/src/math.js +12 -18
- data/vendor/v8/src/messages.cc +18 -8
- data/vendor/v8/src/messages.js +314 -261
- data/vendor/v8/src/mips/assembler-mips-inl.h +58 -6
- data/vendor/v8/src/mips/assembler-mips.cc +92 -75
- data/vendor/v8/src/mips/assembler-mips.h +54 -60
- data/vendor/v8/src/mips/builtins-mips.cc +116 -17
- data/vendor/v8/src/mips/code-stubs-mips.cc +919 -556
- data/vendor/v8/src/mips/code-stubs-mips.h +22 -131
- data/vendor/v8/src/mips/codegen-mips.cc +281 -6
- data/vendor/v8/src/mips/codegen-mips.h +22 -0
- data/vendor/v8/src/mips/constants-mips.cc +2 -0
- data/vendor/v8/src/mips/constants-mips.h +12 -2
- data/vendor/v8/src/mips/deoptimizer-mips.cc +286 -50
- data/vendor/v8/src/mips/disasm-mips.cc +13 -0
- data/vendor/v8/src/mips/full-codegen-mips.cc +297 -284
- data/vendor/v8/src/mips/ic-mips.cc +182 -263
- data/vendor/v8/src/mips/lithium-codegen-mips.cc +1208 -556
- data/vendor/v8/src/mips/lithium-codegen-mips.h +72 -19
- data/vendor/v8/src/mips/lithium-gap-resolver-mips.cc +9 -2
- data/vendor/v8/src/mips/lithium-mips.cc +290 -302
- data/vendor/v8/src/mips/lithium-mips.h +463 -266
- data/vendor/v8/src/mips/macro-assembler-mips.cc +208 -115
- data/vendor/v8/src/mips/macro-assembler-mips.h +67 -24
- data/vendor/v8/src/mips/regexp-macro-assembler-mips.cc +40 -25
- data/vendor/v8/src/mips/regexp-macro-assembler-mips.h +3 -9
- data/vendor/v8/src/mips/simulator-mips.cc +112 -40
- data/vendor/v8/src/mips/simulator-mips.h +5 -0
- data/vendor/v8/src/mips/stub-cache-mips.cc +502 -884
- data/vendor/v8/src/mirror-debugger.js +157 -30
- data/vendor/v8/src/mksnapshot.cc +88 -14
- data/vendor/v8/src/object-observe.js +235 -0
- data/vendor/v8/src/objects-debug.cc +178 -176
- data/vendor/v8/src/objects-inl.h +1333 -486
- data/vendor/v8/src/objects-printer.cc +125 -43
- data/vendor/v8/src/objects-visiting-inl.h +578 -6
- data/vendor/v8/src/objects-visiting.cc +2 -2
- data/vendor/v8/src/objects-visiting.h +172 -79
- data/vendor/v8/src/objects.cc +3533 -2885
- data/vendor/v8/src/objects.h +1352 -1131
- data/vendor/v8/src/optimizing-compiler-thread.cc +152 -0
- data/vendor/v8/src/optimizing-compiler-thread.h +111 -0
- data/vendor/v8/src/parser.cc +390 -500
- data/vendor/v8/src/parser.h +45 -33
- data/vendor/v8/src/platform-cygwin.cc +10 -21
- data/vendor/v8/src/platform-freebsd.cc +36 -41
- data/vendor/v8/src/platform-linux.cc +160 -124
- data/vendor/v8/src/platform-macos.cc +30 -27
- data/vendor/v8/src/platform-nullos.cc +17 -1
- data/vendor/v8/src/platform-openbsd.cc +19 -50
- data/vendor/v8/src/platform-posix.cc +14 -0
- data/vendor/v8/src/platform-solaris.cc +20 -53
- data/vendor/v8/src/platform-win32.cc +49 -26
- data/vendor/v8/src/platform.h +40 -1
- data/vendor/v8/src/preparser.cc +8 -5
- data/vendor/v8/src/preparser.h +2 -2
- data/vendor/v8/src/prettyprinter.cc +16 -0
- data/vendor/v8/src/prettyprinter.h +2 -0
- data/vendor/v8/src/profile-generator-inl.h +1 -0
- data/vendor/v8/src/profile-generator.cc +209 -147
- data/vendor/v8/src/profile-generator.h +15 -12
- data/vendor/v8/src/property-details.h +46 -31
- data/vendor/v8/src/property.cc +27 -46
- data/vendor/v8/src/property.h +163 -83
- data/vendor/v8/src/proxy.js +7 -2
- data/vendor/v8/src/regexp-macro-assembler-irregexp.cc +4 -13
- data/vendor/v8/src/regexp-macro-assembler-irregexp.h +1 -2
- data/vendor/v8/src/regexp-macro-assembler-tracer.cc +1 -11
- data/vendor/v8/src/regexp-macro-assembler-tracer.h +0 -1
- data/vendor/v8/src/regexp-macro-assembler.cc +31 -14
- data/vendor/v8/src/regexp-macro-assembler.h +14 -11
- data/vendor/v8/src/regexp-stack.cc +1 -0
- data/vendor/v8/src/regexp.js +9 -8
- data/vendor/v8/src/rewriter.cc +18 -7
- data/vendor/v8/src/runtime-profiler.cc +52 -43
- data/vendor/v8/src/runtime-profiler.h +0 -25
- data/vendor/v8/src/runtime.cc +2006 -2023
- data/vendor/v8/src/runtime.h +56 -49
- data/vendor/v8/src/safepoint-table.cc +12 -18
- data/vendor/v8/src/safepoint-table.h +11 -8
- data/vendor/v8/src/scanner.cc +1 -0
- data/vendor/v8/src/scanner.h +4 -10
- data/vendor/v8/src/scopeinfo.cc +35 -9
- data/vendor/v8/src/scopeinfo.h +64 -3
- data/vendor/v8/src/scopes.cc +251 -156
- data/vendor/v8/src/scopes.h +61 -27
- data/vendor/v8/src/serialize.cc +348 -396
- data/vendor/v8/src/serialize.h +125 -114
- data/vendor/v8/src/small-pointer-list.h +11 -11
- data/vendor/v8/src/{smart-array-pointer.h → smart-pointers.h} +64 -15
- data/vendor/v8/src/snapshot-common.cc +64 -15
- data/vendor/v8/src/snapshot-empty.cc +7 -1
- data/vendor/v8/src/snapshot.h +9 -2
- data/vendor/v8/src/spaces-inl.h +17 -0
- data/vendor/v8/src/spaces.cc +477 -183
- data/vendor/v8/src/spaces.h +238 -58
- data/vendor/v8/src/splay-tree-inl.h +8 -7
- data/vendor/v8/src/splay-tree.h +24 -10
- data/vendor/v8/src/store-buffer.cc +12 -5
- data/vendor/v8/src/store-buffer.h +2 -4
- data/vendor/v8/src/string-search.h +22 -6
- data/vendor/v8/src/string-stream.cc +11 -8
- data/vendor/v8/src/string.js +47 -15
- data/vendor/v8/src/stub-cache.cc +461 -224
- data/vendor/v8/src/stub-cache.h +164 -102
- data/vendor/v8/src/sweeper-thread.cc +105 -0
- data/vendor/v8/src/sweeper-thread.h +81 -0
- data/vendor/v8/src/token.h +1 -0
- data/vendor/v8/src/transitions-inl.h +220 -0
- data/vendor/v8/src/transitions.cc +160 -0
- data/vendor/v8/src/transitions.h +207 -0
- data/vendor/v8/src/type-info.cc +182 -181
- data/vendor/v8/src/type-info.h +31 -19
- data/vendor/v8/src/unicode-inl.h +62 -106
- data/vendor/v8/src/unicode.cc +57 -67
- data/vendor/v8/src/unicode.h +45 -91
- data/vendor/v8/src/uri.js +57 -29
- data/vendor/v8/src/utils.h +105 -5
- data/vendor/v8/src/v8-counters.cc +54 -11
- data/vendor/v8/src/v8-counters.h +134 -19
- data/vendor/v8/src/v8.cc +29 -29
- data/vendor/v8/src/v8.h +1 -0
- data/vendor/v8/src/v8conversions.cc +26 -22
- data/vendor/v8/src/v8globals.h +56 -43
- data/vendor/v8/src/v8natives.js +83 -30
- data/vendor/v8/src/v8threads.cc +42 -21
- data/vendor/v8/src/v8threads.h +4 -1
- data/vendor/v8/src/v8utils.cc +9 -93
- data/vendor/v8/src/v8utils.h +37 -33
- data/vendor/v8/src/variables.cc +6 -3
- data/vendor/v8/src/variables.h +6 -13
- data/vendor/v8/src/version.cc +2 -2
- data/vendor/v8/src/vm-state-inl.h +11 -0
- data/vendor/v8/src/x64/assembler-x64-inl.h +39 -8
- data/vendor/v8/src/x64/assembler-x64.cc +78 -64
- data/vendor/v8/src/x64/assembler-x64.h +38 -33
- data/vendor/v8/src/x64/builtins-x64.cc +105 -7
- data/vendor/v8/src/x64/code-stubs-x64.cc +790 -413
- data/vendor/v8/src/x64/code-stubs-x64.h +10 -106
- data/vendor/v8/src/x64/codegen-x64.cc +210 -8
- data/vendor/v8/src/x64/codegen-x64.h +20 -1
- data/vendor/v8/src/x64/deoptimizer-x64.cc +336 -75
- data/vendor/v8/src/x64/disasm-x64.cc +15 -0
- data/vendor/v8/src/x64/frames-x64.h +0 -14
- data/vendor/v8/src/x64/full-codegen-x64.cc +293 -270
- data/vendor/v8/src/x64/ic-x64.cc +153 -251
- data/vendor/v8/src/x64/lithium-codegen-x64.cc +1379 -531
- data/vendor/v8/src/x64/lithium-codegen-x64.h +67 -23
- data/vendor/v8/src/x64/lithium-gap-resolver-x64.cc +2 -2
- data/vendor/v8/src/x64/lithium-x64.cc +349 -289
- data/vendor/v8/src/x64/lithium-x64.h +460 -250
- data/vendor/v8/src/x64/macro-assembler-x64.cc +350 -177
- data/vendor/v8/src/x64/macro-assembler-x64.h +67 -49
- data/vendor/v8/src/x64/regexp-macro-assembler-x64.cc +46 -33
- data/vendor/v8/src/x64/regexp-macro-assembler-x64.h +2 -3
- data/vendor/v8/src/x64/stub-cache-x64.cc +484 -653
- data/vendor/v8/src/zone-inl.h +9 -27
- data/vendor/v8/src/zone.cc +5 -5
- data/vendor/v8/src/zone.h +53 -27
- data/vendor/v8/test/benchmarks/testcfg.py +5 -0
- data/vendor/v8/test/cctest/cctest.cc +4 -0
- data/vendor/v8/test/cctest/cctest.gyp +3 -1
- data/vendor/v8/test/cctest/cctest.h +57 -9
- data/vendor/v8/test/cctest/cctest.status +15 -15
- data/vendor/v8/test/cctest/test-accessors.cc +26 -0
- data/vendor/v8/test/cctest/test-alloc.cc +22 -30
- data/vendor/v8/test/cctest/test-api.cc +1943 -314
- data/vendor/v8/test/cctest/test-assembler-arm.cc +133 -13
- data/vendor/v8/test/cctest/test-assembler-ia32.cc +1 -1
- data/vendor/v8/test/cctest/test-assembler-mips.cc +12 -0
- data/vendor/v8/test/cctest/test-ast.cc +4 -2
- data/vendor/v8/test/cctest/test-compiler.cc +61 -29
- data/vendor/v8/test/cctest/test-dataflow.cc +2 -2
- data/vendor/v8/test/cctest/test-debug.cc +212 -33
- data/vendor/v8/test/cctest/test-decls.cc +257 -11
- data/vendor/v8/test/cctest/test-dictionary.cc +24 -10
- data/vendor/v8/test/cctest/test-disasm-arm.cc +118 -1
- data/vendor/v8/test/cctest/test-disasm-ia32.cc +3 -2
- data/vendor/v8/test/cctest/test-flags.cc +14 -1
- data/vendor/v8/test/cctest/test-func-name-inference.cc +7 -4
- data/vendor/v8/test/cctest/test-global-object.cc +51 -0
- data/vendor/v8/test/cctest/test-hashing.cc +32 -23
- data/vendor/v8/test/cctest/test-heap-profiler.cc +131 -77
- data/vendor/v8/test/cctest/test-heap.cc +1084 -143
- data/vendor/v8/test/cctest/test-list.cc +1 -1
- data/vendor/v8/test/cctest/test-liveedit.cc +3 -2
- data/vendor/v8/test/cctest/test-lockers.cc +12 -13
- data/vendor/v8/test/cctest/test-log.cc +10 -8
- data/vendor/v8/test/cctest/test-macro-assembler-x64.cc +2 -2
- data/vendor/v8/test/cctest/test-mark-compact.cc +44 -22
- data/vendor/v8/test/cctest/test-object-observe.cc +434 -0
- data/vendor/v8/test/cctest/test-parsing.cc +86 -39
- data/vendor/v8/test/cctest/test-platform-linux.cc +6 -0
- data/vendor/v8/test/cctest/test-platform-win32.cc +7 -0
- data/vendor/v8/test/cctest/test-random.cc +5 -4
- data/vendor/v8/test/cctest/test-regexp.cc +137 -101
- data/vendor/v8/test/cctest/test-serialize.cc +150 -230
- data/vendor/v8/test/cctest/test-sockets.cc +1 -1
- data/vendor/v8/test/cctest/test-spaces.cc +139 -0
- data/vendor/v8/test/cctest/test-strings.cc +736 -74
- data/vendor/v8/test/cctest/test-thread-termination.cc +10 -11
- data/vendor/v8/test/cctest/test-threads.cc +4 -4
- data/vendor/v8/test/cctest/test-utils.cc +16 -0
- data/vendor/v8/test/cctest/test-weakmaps.cc +7 -3
- data/vendor/v8/test/cctest/testcfg.py +64 -5
- data/vendor/v8/test/es5conform/testcfg.py +5 -0
- data/vendor/v8/test/message/message.status +1 -1
- data/vendor/v8/test/message/overwritten-builtins.out +3 -0
- data/vendor/v8/test/message/testcfg.py +89 -8
- data/vendor/v8/test/message/try-catch-finally-no-message.out +26 -26
- data/vendor/v8/test/mjsunit/accessor-map-sharing.js +18 -2
- data/vendor/v8/test/mjsunit/allocation-site-info.js +126 -0
- data/vendor/v8/test/mjsunit/array-bounds-check-removal.js +62 -1
- data/vendor/v8/test/mjsunit/array-iteration.js +1 -1
- data/vendor/v8/test/mjsunit/array-literal-transitions.js +2 -0
- data/vendor/v8/test/mjsunit/array-natives-elements.js +317 -0
- data/vendor/v8/test/mjsunit/array-reduce.js +8 -8
- data/vendor/v8/test/mjsunit/array-slice.js +12 -0
- data/vendor/v8/test/mjsunit/array-store-and-grow.js +4 -1
- data/vendor/v8/test/mjsunit/assert-opt-and-deopt.js +1 -1
- data/vendor/v8/test/mjsunit/bugs/bug-2337.js +53 -0
- data/vendor/v8/test/mjsunit/compare-known-objects-slow.js +69 -0
- data/vendor/v8/test/mjsunit/compiler/alloc-object-huge.js +3 -1
- data/vendor/v8/test/mjsunit/compiler/inline-accessors.js +368 -0
- data/vendor/v8/test/mjsunit/compiler/inline-arguments.js +87 -1
- data/vendor/v8/test/mjsunit/compiler/inline-closures.js +49 -0
- data/vendor/v8/test/mjsunit/compiler/inline-construct.js +55 -43
- data/vendor/v8/test/mjsunit/compiler/inline-literals.js +39 -0
- data/vendor/v8/test/mjsunit/compiler/multiply-add.js +69 -0
- data/vendor/v8/test/mjsunit/compiler/optimized-closures.js +57 -0
- data/vendor/v8/test/mjsunit/compiler/parallel-proto-change.js +44 -0
- data/vendor/v8/test/mjsunit/compiler/property-static.js +69 -0
- data/vendor/v8/test/mjsunit/compiler/proto-chain-constant.js +55 -0
- data/vendor/v8/test/mjsunit/compiler/proto-chain-load.js +44 -0
- data/vendor/v8/test/mjsunit/compiler/regress-gvn.js +3 -2
- data/vendor/v8/test/mjsunit/compiler/regress-or.js +6 -2
- data/vendor/v8/test/mjsunit/compiler/rotate.js +224 -0
- data/vendor/v8/test/mjsunit/compiler/uint32.js +173 -0
- data/vendor/v8/test/mjsunit/count-based-osr.js +2 -1
- data/vendor/v8/test/mjsunit/d8-os.js +3 -3
- data/vendor/v8/test/mjsunit/date-parse.js +3 -0
- data/vendor/v8/test/mjsunit/date.js +22 -0
- data/vendor/v8/test/mjsunit/debug-break-inline.js +1 -0
- data/vendor/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js +22 -12
- data/vendor/v8/test/mjsunit/debug-evaluate-locals-optimized.js +21 -10
- data/vendor/v8/test/mjsunit/debug-liveedit-compile-error.js +60 -0
- data/vendor/v8/test/mjsunit/debug-liveedit-double-call.js +142 -0
- data/vendor/v8/test/mjsunit/debug-liveedit-literals.js +94 -0
- data/vendor/v8/test/mjsunit/debug-liveedit-restart-frame.js +153 -0
- data/vendor/v8/test/mjsunit/debug-multiple-breakpoints.js +1 -1
- data/vendor/v8/test/mjsunit/debug-script-breakpoints-closure.js +67 -0
- data/vendor/v8/test/mjsunit/debug-script-breakpoints-nested.js +82 -0
- data/vendor/v8/test/mjsunit/debug-script.js +4 -2
- data/vendor/v8/test/mjsunit/debug-set-variable-value.js +308 -0
- data/vendor/v8/test/mjsunit/debug-stepout-scope-part1.js +190 -0
- data/vendor/v8/test/mjsunit/debug-stepout-scope-part2.js +83 -0
- data/vendor/v8/test/mjsunit/debug-stepout-scope-part3.js +80 -0
- data/vendor/v8/test/mjsunit/debug-stepout-scope-part4.js +80 -0
- data/vendor/v8/test/mjsunit/debug-stepout-scope-part5.js +77 -0
- data/vendor/v8/test/mjsunit/debug-stepout-scope-part6.js +79 -0
- data/vendor/v8/test/mjsunit/debug-stepout-scope-part7.js +79 -0
- data/vendor/v8/test/mjsunit/{debug-stepout-scope.js → debug-stepout-scope-part8.js} +0 -189
- data/vendor/v8/test/mjsunit/delete-non-configurable.js +74 -0
- data/vendor/v8/test/mjsunit/deopt-minus-zero.js +56 -0
- data/vendor/v8/test/mjsunit/elements-kind.js +6 -4
- data/vendor/v8/test/mjsunit/elements-length-no-holey.js +33 -0
- data/vendor/v8/test/mjsunit/elements-transition-hoisting.js +46 -19
- data/vendor/v8/test/mjsunit/error-accessors.js +54 -0
- data/vendor/v8/test/mjsunit/error-constructors.js +1 -14
- data/vendor/v8/test/mjsunit/error-tostring.js +8 -0
- data/vendor/v8/test/mjsunit/eval-stack-trace.js +204 -0
- data/vendor/v8/test/mjsunit/external-array.js +364 -1
- data/vendor/v8/test/mjsunit/fast-array-length.js +37 -0
- data/vendor/v8/test/mjsunit/fast-non-keyed.js +113 -0
- data/vendor/v8/test/mjsunit/fast-prototype.js +117 -0
- data/vendor/v8/test/mjsunit/function-call.js +14 -18
- data/vendor/v8/test/mjsunit/fuzz-natives-part1.js +230 -0
- data/vendor/v8/test/mjsunit/fuzz-natives-part2.js +229 -0
- data/vendor/v8/test/mjsunit/fuzz-natives-part3.js +229 -0
- data/vendor/v8/test/mjsunit/{fuzz-natives.js → fuzz-natives-part4.js} +12 -2
- data/vendor/v8/test/mjsunit/generated-transition-stub.js +218 -0
- data/vendor/v8/test/mjsunit/greedy.js +1 -1
- data/vendor/v8/test/mjsunit/harmony/block-conflicts.js +2 -1
- data/vendor/v8/test/mjsunit/harmony/block-let-crankshaft.js +1 -1
- data/vendor/v8/test/mjsunit/harmony/collections.js +69 -11
- data/vendor/v8/test/mjsunit/harmony/debug-blockscopes.js +2 -2
- data/vendor/v8/test/mjsunit/harmony/module-linking.js +180 -3
- data/vendor/v8/test/mjsunit/harmony/module-parsing.js +31 -0
- data/vendor/v8/test/mjsunit/harmony/module-recompile.js +87 -0
- data/vendor/v8/test/mjsunit/harmony/module-resolution.js +15 -2
- data/vendor/v8/test/mjsunit/harmony/object-observe.js +1056 -0
- data/vendor/v8/test/mjsunit/harmony/proxies-json.js +178 -0
- data/vendor/v8/test/mjsunit/harmony/proxies.js +25 -10
- data/vendor/v8/test/mjsunit/json-parser-recursive.js +33 -0
- data/vendor/v8/test/mjsunit/json-stringify-recursive.js +52 -0
- data/vendor/v8/test/mjsunit/json.js +38 -2
- data/vendor/v8/test/mjsunit/json2.js +153 -0
- data/vendor/v8/test/mjsunit/limit-locals.js +5 -4
- data/vendor/v8/test/mjsunit/manual-parallel-recompile.js +79 -0
- data/vendor/v8/test/mjsunit/math-exp-precision.js +64 -0
- data/vendor/v8/test/mjsunit/math-floor-negative.js +59 -0
- data/vendor/v8/test/mjsunit/math-floor-of-div-minus-zero.js +41 -0
- data/vendor/v8/test/mjsunit/math-floor-of-div-nosudiv.js +288 -0
- data/vendor/v8/test/mjsunit/math-floor-of-div.js +81 -9
- data/vendor/v8/test/mjsunit/{math-floor.js → math-floor-part1.js} +1 -72
- data/vendor/v8/test/mjsunit/math-floor-part2.js +76 -0
- data/vendor/v8/test/mjsunit/math-floor-part3.js +78 -0
- data/vendor/v8/test/mjsunit/math-floor-part4.js +76 -0
- data/vendor/v8/test/mjsunit/mirror-object.js +43 -9
- data/vendor/v8/test/mjsunit/mjsunit.js +1 -1
- data/vendor/v8/test/mjsunit/mjsunit.status +52 -27
- data/vendor/v8/test/mjsunit/mul-exhaustive-part1.js +491 -0
- data/vendor/v8/test/mjsunit/mul-exhaustive-part10.js +470 -0
- data/vendor/v8/test/mjsunit/mul-exhaustive-part2.js +525 -0
- data/vendor/v8/test/mjsunit/mul-exhaustive-part3.js +532 -0
- data/vendor/v8/test/mjsunit/mul-exhaustive-part4.js +509 -0
- data/vendor/v8/test/mjsunit/mul-exhaustive-part5.js +505 -0
- data/vendor/v8/test/mjsunit/mul-exhaustive-part6.js +554 -0
- data/vendor/v8/test/mjsunit/mul-exhaustive-part7.js +497 -0
- data/vendor/v8/test/mjsunit/mul-exhaustive-part8.js +526 -0
- data/vendor/v8/test/mjsunit/mul-exhaustive-part9.js +533 -0
- data/vendor/v8/test/mjsunit/new-function.js +34 -0
- data/vendor/v8/test/mjsunit/numops-fuzz-part1.js +1172 -0
- data/vendor/v8/test/mjsunit/numops-fuzz-part2.js +1178 -0
- data/vendor/v8/test/mjsunit/numops-fuzz-part3.js +1178 -0
- data/vendor/v8/test/mjsunit/numops-fuzz-part4.js +1177 -0
- data/vendor/v8/test/mjsunit/object-define-property.js +107 -2
- data/vendor/v8/test/mjsunit/override-read-only-property.js +6 -4
- data/vendor/v8/test/mjsunit/packed-elements.js +2 -2
- data/vendor/v8/test/mjsunit/parse-int-float.js +4 -4
- data/vendor/v8/test/mjsunit/pixel-array-rounding.js +1 -1
- data/vendor/v8/test/mjsunit/readonly.js +228 -0
- data/vendor/v8/test/mjsunit/regexp-capture-3.js +16 -18
- data/vendor/v8/test/mjsunit/regexp-capture.js +2 -0
- data/vendor/v8/test/mjsunit/regexp-global.js +122 -0
- data/vendor/v8/test/mjsunit/regexp-results-cache.js +78 -0
- data/vendor/v8/test/mjsunit/regress/regress-1117.js +12 -3
- data/vendor/v8/test/mjsunit/regress/regress-1118.js +1 -1
- data/vendor/v8/test/mjsunit/regress/regress-115100.js +36 -0
- data/vendor/v8/test/mjsunit/regress/regress-1199637.js +1 -3
- data/vendor/v8/test/mjsunit/regress/regress-121407.js +1 -1
- data/vendor/v8/test/mjsunit/regress/regress-131923.js +30 -0
- data/vendor/v8/test/mjsunit/regress/regress-131994.js +70 -0
- data/vendor/v8/test/mjsunit/regress/regress-133211.js +35 -0
- data/vendor/v8/test/mjsunit/regress/regress-133211b.js +39 -0
- data/vendor/v8/test/mjsunit/regress/regress-136048.js +34 -0
- data/vendor/v8/test/mjsunit/regress/regress-137768.js +73 -0
- data/vendor/v8/test/mjsunit/regress/regress-143967.js +34 -0
- data/vendor/v8/test/mjsunit/regress/regress-145201.js +107 -0
- data/vendor/v8/test/mjsunit/regress/regress-147497.js +45 -0
- data/vendor/v8/test/mjsunit/regress/regress-148378.js +38 -0
- data/vendor/v8/test/mjsunit/regress/regress-1563.js +1 -1
- data/vendor/v8/test/mjsunit/regress/regress-1591.js +48 -0
- data/vendor/v8/test/mjsunit/regress/regress-164442.js +45 -0
- data/vendor/v8/test/mjsunit/regress/regress-165637.js +61 -0
- data/vendor/v8/test/mjsunit/regress/regress-166379.js +39 -0
- data/vendor/v8/test/mjsunit/regress/regress-166553.js +33 -0
- data/vendor/v8/test/mjsunit/regress/regress-1692.js +1 -1
- data/vendor/v8/test/mjsunit/regress/regress-171641.js +40 -0
- data/vendor/v8/test/mjsunit/regress/regress-1980.js +1 -1
- data/vendor/v8/test/mjsunit/regress/regress-2073.js +99 -0
- data/vendor/v8/test/mjsunit/regress/regress-2119.js +36 -0
- data/vendor/v8/test/mjsunit/regress/regress-2156.js +39 -0
- data/vendor/v8/test/mjsunit/regress/regress-2163.js +70 -0
- data/vendor/v8/test/mjsunit/regress/regress-2170.js +58 -0
- data/vendor/v8/test/mjsunit/regress/regress-2172.js +35 -0
- data/vendor/v8/test/mjsunit/regress/regress-2185-2.js +145 -0
- data/vendor/v8/test/mjsunit/regress/regress-2185.js +38 -0
- data/vendor/v8/test/mjsunit/regress/regress-2186.js +49 -0
- data/vendor/v8/test/mjsunit/regress/regress-2193.js +58 -0
- data/vendor/v8/test/mjsunit/regress/regress-2219.js +32 -0
- data/vendor/v8/test/mjsunit/regress/regress-2225.js +65 -0
- data/vendor/v8/test/mjsunit/regress/regress-2226.js +36 -0
- data/vendor/v8/test/mjsunit/regress/regress-2234.js +41 -0
- data/vendor/v8/test/mjsunit/regress/regress-2243.js +31 -0
- data/vendor/v8/test/mjsunit/regress/regress-2249.js +33 -0
- data/vendor/v8/test/mjsunit/regress/regress-2250.js +68 -0
- data/vendor/v8/test/mjsunit/regress/regress-2261.js +113 -0
- data/vendor/v8/test/mjsunit/regress/regress-2263.js +30 -0
- data/vendor/v8/test/mjsunit/regress/regress-2284.js +32 -0
- data/vendor/v8/test/mjsunit/regress/regress-2285.js +32 -0
- data/vendor/v8/test/mjsunit/regress/regress-2286.js +32 -0
- data/vendor/v8/test/mjsunit/regress/regress-2289.js +34 -0
- data/vendor/v8/test/mjsunit/regress/regress-2291.js +36 -0
- data/vendor/v8/test/mjsunit/regress/regress-2294.js +70 -0
- data/vendor/v8/test/mjsunit/regress/regress-2296.js +40 -0
- data/vendor/v8/test/mjsunit/regress/regress-2315.js +40 -0
- data/vendor/v8/test/mjsunit/regress/regress-2318.js +66 -0
- data/vendor/v8/test/mjsunit/regress/regress-2322.js +36 -0
- data/vendor/v8/test/mjsunit/regress/regress-2326.js +54 -0
- data/vendor/v8/test/mjsunit/regress/regress-2336.js +53 -0
- data/vendor/v8/test/mjsunit/regress/regress-2339.js +59 -0
- data/vendor/v8/test/mjsunit/regress/regress-2346.js +123 -0
- data/vendor/v8/test/mjsunit/regress/regress-2373.js +29 -0
- data/vendor/v8/test/mjsunit/regress/regress-2374.js +33 -0
- data/vendor/v8/test/mjsunit/regress/regress-2398.js +41 -0
- data/vendor/v8/test/mjsunit/regress/regress-2410.js +36 -0
- data/vendor/v8/test/mjsunit/regress/regress-2416.js +75 -0
- data/vendor/v8/test/mjsunit/regress/regress-2419.js +37 -0
- data/vendor/v8/test/mjsunit/regress/regress-2433.js +36 -0
- data/vendor/v8/test/mjsunit/regress/regress-2437.js +156 -0
- data/vendor/v8/test/mjsunit/regress/regress-2438.js +52 -0
- data/vendor/v8/test/mjsunit/regress/regress-2443.js +129 -0
- data/vendor/v8/test/mjsunit/regress/regress-2444.js +120 -0
- data/vendor/v8/test/mjsunit/regress/regress-2489.js +50 -0
- data/vendor/v8/test/mjsunit/regress/regress-2499.js +40 -0
- data/vendor/v8/test/mjsunit/regress/regress-334.js +1 -1
- data/vendor/v8/test/mjsunit/regress/regress-492.js +39 -1
- data/vendor/v8/test/mjsunit/regress/regress-builtin-array-op.js +38 -0
- data/vendor/v8/test/mjsunit/regress/regress-cnlt-elements.js +43 -0
- data/vendor/v8/test/mjsunit/regress/regress-cnlt-enum-indices.js +45 -0
- data/vendor/v8/test/mjsunit/regress/regress-cntl-descriptors-enum.js +46 -0
- data/vendor/v8/test/mjsunit/regress/regress-convert-enum.js +60 -0
- data/vendor/v8/test/mjsunit/regress/regress-convert-enum2.js +46 -0
- data/vendor/v8/test/mjsunit/regress/regress-convert-transition.js +40 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-119926.js +3 -1
- data/vendor/v8/test/mjsunit/regress/regress-crbug-125148.js +90 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-134055.js +63 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-134609.js +59 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-135008.js +45 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-135066.js +55 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-137689.js +47 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-138887.js +48 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-140083.js +44 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-142087.js +38 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-142218.js +44 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-145961.js +39 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-146910.js +33 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-147475.js +48 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-148376.js +35 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-150545.js +53 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-150729.js +39 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-157019.js +54 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-157520.js +38 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-158185.js +39 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-160010.js +35 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-162085.js +71 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-168545.js +34 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-170856.js +33 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-172345.js +34 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-173974.js +36 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-18639.js +9 -5
- data/vendor/v8/test/mjsunit/regress/regress-debug-code-recompilation.js +2 -1
- data/vendor/v8/test/mjsunit/regress/regress-deep-proto.js +45 -0
- data/vendor/v8/test/mjsunit/regress/regress-delete-empty-double.js +40 -0
- data/vendor/v8/test/mjsunit/regress/regress-iteration-order.js +42 -0
- data/vendor/v8/test/mjsunit/regress/regress-json-stringify-gc.js +41 -0
- data/vendor/v8/test/mjsunit/regress/regress-latin-1.js +78 -0
- data/vendor/v8/test/mjsunit/regress/regress-load-elements.js +49 -0
- data/vendor/v8/test/mjsunit/regress/regress-observe-empty-double-array.js +38 -0
- data/vendor/v8/test/mjsunit/regress/regress-undefined-store-keyed-fast-element.js +37 -0
- data/vendor/v8/test/mjsunit/shift-for-integer-div.js +59 -0
- data/vendor/v8/test/mjsunit/stack-traces-gc.js +119 -0
- data/vendor/v8/test/mjsunit/stack-traces-overflow.js +122 -0
- data/vendor/v8/test/mjsunit/stack-traces.js +39 -1
- data/vendor/v8/test/mjsunit/str-to-num.js +7 -2
- data/vendor/v8/test/mjsunit/strict-mode.js +36 -11
- data/vendor/v8/test/mjsunit/string-charcodeat.js +3 -0
- data/vendor/v8/test/mjsunit/string-natives.js +72 -0
- data/vendor/v8/test/mjsunit/string-split.js +17 -0
- data/vendor/v8/test/mjsunit/testcfg.py +76 -6
- data/vendor/v8/test/mjsunit/tools/tickprocessor.js +4 -1
- data/vendor/v8/test/mjsunit/try-finally-continue.js +72 -0
- data/vendor/v8/test/mjsunit/typed-array-slice.js +61 -0
- data/vendor/v8/test/mjsunit/unbox-double-arrays.js +2 -0
- data/vendor/v8/test/mjsunit/uri.js +12 -0
- data/vendor/v8/test/mjsunit/with-readonly.js +4 -2
- data/vendor/v8/test/mozilla/mozilla.status +19 -113
- data/vendor/v8/test/mozilla/testcfg.py +122 -3
- data/vendor/v8/test/preparser/preparser.status +5 -0
- data/vendor/v8/test/preparser/strict-identifiers.pyt +1 -1
- data/vendor/v8/test/preparser/testcfg.py +101 -5
- data/vendor/v8/test/sputnik/sputnik.status +1 -1
- data/vendor/v8/test/sputnik/testcfg.py +5 -0
- data/vendor/v8/test/test262/README +2 -2
- data/vendor/v8/test/test262/test262.status +13 -36
- data/vendor/v8/test/test262/testcfg.py +102 -8
- data/vendor/v8/tools/android-build.sh +0 -0
- data/vendor/v8/tools/android-ll-prof.sh +69 -0
- data/vendor/v8/tools/android-run.py +109 -0
- data/vendor/v8/tools/android-sync.sh +105 -0
- data/vendor/v8/tools/bash-completion.sh +0 -0
- data/vendor/v8/tools/check-static-initializers.sh +0 -0
- data/vendor/v8/tools/common-includes.sh +15 -22
- data/vendor/v8/tools/disasm.py +4 -4
- data/vendor/v8/tools/fuzz-harness.sh +0 -0
- data/vendor/v8/tools/gen-postmortem-metadata.py +6 -8
- data/vendor/v8/tools/grokdump.py +404 -129
- data/vendor/v8/tools/gyp/v8.gyp +105 -43
- data/vendor/v8/tools/linux-tick-processor +5 -5
- data/vendor/v8/tools/ll_prof.py +75 -15
- data/vendor/v8/tools/merge-to-branch.sh +2 -2
- data/vendor/v8/tools/plot-timer-events +70 -0
- data/vendor/v8/tools/plot-timer-events.js +510 -0
- data/vendor/v8/tools/presubmit.py +1 -0
- data/vendor/v8/tools/push-to-trunk.sh +14 -4
- data/vendor/v8/tools/run-llprof.sh +69 -0
- data/vendor/v8/tools/run-tests.py +372 -0
- data/vendor/v8/tools/run-valgrind.py +1 -1
- data/vendor/v8/tools/status-file-converter.py +39 -0
- data/vendor/v8/tools/test-server.py +224 -0
- data/vendor/v8/tools/test-wrapper-gypbuild.py +13 -16
- data/vendor/v8/tools/test.py +10 -19
- data/vendor/v8/tools/testrunner/README +174 -0
- data/vendor/v8/tools/testrunner/__init__.py +26 -0
- data/vendor/v8/tools/testrunner/local/__init__.py +26 -0
- data/vendor/v8/tools/testrunner/local/commands.py +153 -0
- data/vendor/v8/tools/testrunner/local/execution.py +182 -0
- data/vendor/v8/tools/testrunner/local/old_statusfile.py +460 -0
- data/vendor/v8/tools/testrunner/local/progress.py +238 -0
- data/vendor/v8/tools/testrunner/local/statusfile.py +145 -0
- data/vendor/v8/tools/testrunner/local/testsuite.py +187 -0
- data/vendor/v8/tools/testrunner/local/utils.py +108 -0
- data/vendor/v8/tools/testrunner/local/verbose.py +99 -0
- data/vendor/v8/tools/testrunner/network/__init__.py +26 -0
- data/vendor/v8/tools/testrunner/network/distro.py +90 -0
- data/vendor/v8/tools/testrunner/network/endpoint.py +124 -0
- data/vendor/v8/tools/testrunner/network/network_execution.py +253 -0
- data/vendor/v8/tools/testrunner/network/perfdata.py +120 -0
- data/vendor/v8/tools/testrunner/objects/__init__.py +26 -0
- data/vendor/v8/tools/testrunner/objects/context.py +50 -0
- data/vendor/v8/tools/testrunner/objects/output.py +60 -0
- data/vendor/v8/tools/testrunner/objects/peer.py +80 -0
- data/vendor/v8/tools/testrunner/objects/testcase.py +83 -0
- data/vendor/v8/tools/testrunner/objects/workpacket.py +90 -0
- data/vendor/v8/tools/testrunner/server/__init__.py +26 -0
- data/vendor/v8/tools/testrunner/server/compression.py +111 -0
- data/vendor/v8/tools/testrunner/server/constants.py +51 -0
- data/vendor/v8/tools/testrunner/server/daemon.py +147 -0
- data/vendor/v8/tools/testrunner/server/local_handler.py +119 -0
- data/vendor/v8/tools/testrunner/server/main.py +245 -0
- data/vendor/v8/tools/testrunner/server/presence_handler.py +120 -0
- data/vendor/v8/tools/testrunner/server/signatures.py +63 -0
- data/vendor/v8/tools/testrunner/server/status_handler.py +112 -0
- data/vendor/v8/tools/testrunner/server/work_handler.py +150 -0
- data/vendor/v8/tools/tick-processor.html +168 -0
- data/vendor/v8/tools/tickprocessor-driver.js +5 -3
- data/vendor/v8/tools/tickprocessor.js +58 -15
- metadata +534 -30
- data/patches/add-freebsd9-and-freebsd10-to-gyp-GetFlavor.patch +0 -11
- data/patches/do-not-imply-vfp3-and-armv7.patch +0 -44
- data/patches/fPIC-on-x64.patch +0 -14
- data/vendor/v8/src/liveobjectlist-inl.h +0 -126
- data/vendor/v8/src/liveobjectlist.cc +0 -2631
- data/vendor/v8/src/liveobjectlist.h +0 -319
- data/vendor/v8/test/mjsunit/mul-exhaustive.js +0 -4629
- data/vendor/v8/test/mjsunit/numops-fuzz.js +0 -4609
- data/vendor/v8/test/mjsunit/regress/regress-1969.js +0 -5045
data/vendor/v8/src/log.h
CHANGED
@@ -74,8 +74,8 @@ namespace internal {
|
|
74
74
|
class LogMessageBuilder;
|
75
75
|
class Profiler;
|
76
76
|
class Semaphore;
|
77
|
-
class SlidingStateWindow;
|
78
77
|
class Ticker;
|
78
|
+
class Isolate;
|
79
79
|
|
80
80
|
#undef LOG
|
81
81
|
#define LOG(isolate, Call) \
|
@@ -86,6 +86,15 @@ class Ticker;
|
|
86
86
|
logger->Call; \
|
87
87
|
} while (false)
|
88
88
|
|
89
|
+
#define LOG_CODE_EVENT(isolate, Call) \
|
90
|
+
do { \
|
91
|
+
v8::internal::Logger* logger = \
|
92
|
+
(isolate)->logger(); \
|
93
|
+
if (logger->is_logging_code_events()) \
|
94
|
+
logger->Call; \
|
95
|
+
} while (false)
|
96
|
+
|
97
|
+
|
89
98
|
#define LOG_EVENTS_AND_TAGS_LIST(V) \
|
90
99
|
V(CODE_CREATION_EVENT, "code-creation") \
|
91
100
|
V(CODE_MOVE_EVENT, "code-move") \
|
@@ -118,10 +127,10 @@ class Ticker;
|
|
118
127
|
V(EVAL_TAG, "Eval") \
|
119
128
|
V(FUNCTION_TAG, "Function") \
|
120
129
|
V(KEYED_LOAD_IC_TAG, "KeyedLoadIC") \
|
121
|
-
V(
|
130
|
+
V(KEYED_LOAD_POLYMORPHIC_IC_TAG, "KeyedLoadPolymorphicIC") \
|
122
131
|
V(KEYED_EXTERNAL_ARRAY_LOAD_IC_TAG, "KeyedExternalArrayLoadIC") \
|
123
132
|
V(KEYED_STORE_IC_TAG, "KeyedStoreIC") \
|
124
|
-
V(
|
133
|
+
V(KEYED_STORE_POLYMORPHIC_IC_TAG, "KeyedStorePolymorphicIC") \
|
125
134
|
V(KEYED_EXTERNAL_ARRAY_STORE_IC_TAG, "KeyedExternalArrayStoreIC") \
|
126
135
|
V(LAZY_COMPILE_TAG, "LazyCompile") \
|
127
136
|
V(LOAD_IC_TAG, "LoadIC") \
|
@@ -151,6 +160,10 @@ class Logger {
|
|
151
160
|
// Acquires resources for logging if the right flags are set.
|
152
161
|
bool SetUp();
|
153
162
|
|
163
|
+
// Sets the current code event handler.
|
164
|
+
void SetCodeEventHandler(uint32_t options,
|
165
|
+
JitCodeEventHandler event_handler);
|
166
|
+
|
154
167
|
void EnsureTickerStarted();
|
155
168
|
void EnsureTickerStopped();
|
156
169
|
|
@@ -161,9 +174,6 @@ class Logger {
|
|
161
174
|
// leaving the file open.
|
162
175
|
FILE* TearDown();
|
163
176
|
|
164
|
-
// Enable the computation of a sliding window of states.
|
165
|
-
void EnableSlidingStateWindow();
|
166
|
-
|
167
177
|
// Emits an event with a string value -> (name, value).
|
168
178
|
void StringEvent(const char* name, const char* value);
|
169
179
|
|
@@ -262,6 +272,38 @@ class Logger {
|
|
262
272
|
uintptr_t start,
|
263
273
|
uintptr_t end);
|
264
274
|
|
275
|
+
// ==== Events logged by --log-timer-events. ====
|
276
|
+
enum StartEnd { START, END };
|
277
|
+
|
278
|
+
void TimerEvent(StartEnd se, const char* name);
|
279
|
+
|
280
|
+
static void EnterExternal();
|
281
|
+
static void LeaveExternal();
|
282
|
+
|
283
|
+
class TimerEventScope {
|
284
|
+
public:
|
285
|
+
TimerEventScope(Isolate* isolate, const char* name)
|
286
|
+
: isolate_(isolate), name_(name) {
|
287
|
+
if (FLAG_log_internal_timer_events) LogTimerEvent(START);
|
288
|
+
}
|
289
|
+
|
290
|
+
~TimerEventScope() {
|
291
|
+
if (FLAG_log_internal_timer_events) LogTimerEvent(END);
|
292
|
+
}
|
293
|
+
|
294
|
+
void LogTimerEvent(StartEnd se);
|
295
|
+
|
296
|
+
static const char* v8_recompile_synchronous;
|
297
|
+
static const char* v8_recompile_parallel;
|
298
|
+
static const char* v8_compile_full_code;
|
299
|
+
static const char* v8_execute;
|
300
|
+
static const char* v8_external;
|
301
|
+
|
302
|
+
private:
|
303
|
+
Isolate* isolate_;
|
304
|
+
const char* name_;
|
305
|
+
};
|
306
|
+
|
265
307
|
// ==== Events logged by --log-regexp ====
|
266
308
|
// Regexp compilation and execution events.
|
267
309
|
|
@@ -274,6 +316,10 @@ class Logger {
|
|
274
316
|
return logging_nesting_ > 0;
|
275
317
|
}
|
276
318
|
|
319
|
+
bool is_logging_code_events() {
|
320
|
+
return is_logging() || code_event_handler_ != NULL;
|
321
|
+
}
|
322
|
+
|
277
323
|
// Pause/Resume collection of profiling data.
|
278
324
|
// When data collection is paused, CPU Tick events are discarded until
|
279
325
|
// data collection is Resumed.
|
@@ -312,6 +358,11 @@ class Logger {
|
|
312
358
|
Logger();
|
313
359
|
~Logger();
|
314
360
|
|
361
|
+
// Issue code notifications.
|
362
|
+
void IssueCodeAddedEvent(Code* code, const char* name, size_t name_len);
|
363
|
+
void IssueCodeMovedEvent(Address from, Address to);
|
364
|
+
void IssueCodeRemovedEvent(Address from);
|
365
|
+
|
315
366
|
// Emits the profiler's first message.
|
316
367
|
void ProfilerBeginEvent();
|
317
368
|
|
@@ -379,10 +430,6 @@ class Logger {
|
|
379
430
|
// of samples.
|
380
431
|
Profiler* profiler_;
|
381
432
|
|
382
|
-
// SlidingStateWindow instance keeping a sliding window of the most
|
383
|
-
// recent VM states.
|
384
|
-
SlidingStateWindow* sliding_state_window_;
|
385
|
-
|
386
433
|
// An array of log events names.
|
387
434
|
const char* const* log_events_;
|
388
435
|
|
@@ -393,7 +440,6 @@ class Logger {
|
|
393
440
|
friend class LogMessageBuilder;
|
394
441
|
friend class TimeLog;
|
395
442
|
friend class Profiler;
|
396
|
-
friend class SlidingStateWindow;
|
397
443
|
friend class StackTracer;
|
398
444
|
friend class VMState;
|
399
445
|
|
@@ -413,6 +459,9 @@ class Logger {
|
|
413
459
|
// 'true' between SetUp() and TearDown().
|
414
460
|
bool is_initialized_;
|
415
461
|
|
462
|
+
// The code event handler - if any.
|
463
|
+
JitCodeEventHandler code_event_handler_;
|
464
|
+
|
416
465
|
// Support for 'incremental addresses' in compressed logs:
|
417
466
|
// LogMessageBuilder::AppendAddress(Address addr)
|
418
467
|
Address last_address_;
|
@@ -424,6 +473,8 @@ class Logger {
|
|
424
473
|
// Logger::FunctionCreateEvent(...)
|
425
474
|
Address prev_code_;
|
426
475
|
|
476
|
+
int64_t epoch_;
|
477
|
+
|
427
478
|
friend class CpuProfiler;
|
428
479
|
};
|
429
480
|
|
@@ -36,6 +36,23 @@ enum InvokeFlag {
|
|
36
36
|
};
|
37
37
|
|
38
38
|
|
39
|
+
// Flags used for the AllocateInNewSpace functions.
|
40
|
+
enum AllocationFlags {
|
41
|
+
// No special flags.
|
42
|
+
NO_ALLOCATION_FLAGS = 0,
|
43
|
+
// Return the pointer to the allocated already tagged as a heap object.
|
44
|
+
TAG_OBJECT = 1 << 0,
|
45
|
+
// The content of the result register already contains the allocation top in
|
46
|
+
// new space.
|
47
|
+
RESULT_CONTAINS_TOP = 1 << 1,
|
48
|
+
// Specify that the requested size of the space to allocate is specified in
|
49
|
+
// words instead of bytes.
|
50
|
+
SIZE_IN_WORDS = 1 << 2,
|
51
|
+
// Align the allocation to a multiple of kDoubleSize
|
52
|
+
DOUBLE_ALIGNMENT = 1 << 3
|
53
|
+
};
|
54
|
+
|
55
|
+
|
39
56
|
// Invalid depth in prototype chain.
|
40
57
|
const int kInvalidProtoDepth = -1;
|
41
58
|
|
data/vendor/v8/src/macros.py
CHANGED
@@ -52,32 +52,15 @@ void MarkCompactCollector::SetFlags(int flags) {
|
|
52
52
|
}
|
53
53
|
|
54
54
|
|
55
|
-
bool MarkCompactCollector::MarkObjectAndPush(HeapObject* obj) {
|
56
|
-
if (MarkObjectWithoutPush(obj)) {
|
57
|
-
marking_deque_.PushBlack(obj);
|
58
|
-
return true;
|
59
|
-
}
|
60
|
-
return false;
|
61
|
-
}
|
62
|
-
|
63
|
-
|
64
55
|
void MarkCompactCollector::MarkObject(HeapObject* obj, MarkBit mark_bit) {
|
65
56
|
ASSERT(Marking::MarkBitFrom(obj) == mark_bit);
|
66
57
|
if (!mark_bit.Get()) {
|
67
58
|
mark_bit.Set();
|
68
59
|
MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
bool MarkCompactCollector::MarkObjectWithoutPush(HeapObject* obj) {
|
75
|
-
MarkBit mark_bit = Marking::MarkBitFrom(obj);
|
76
|
-
if (!mark_bit.Get()) {
|
77
|
-
SetMark(obj, mark_bit);
|
78
|
-
return true;
|
60
|
+
ASSERT(IsMarked(obj));
|
61
|
+
ASSERT(HEAP->Contains(obj));
|
62
|
+
marking_deque_.PushBlack(obj);
|
79
63
|
}
|
80
|
-
return false;
|
81
64
|
}
|
82
65
|
|
83
66
|
|
@@ -86,9 +69,6 @@ void MarkCompactCollector::SetMark(HeapObject* obj, MarkBit mark_bit) {
|
|
86
69
|
ASSERT(Marking::MarkBitFrom(obj) == mark_bit);
|
87
70
|
mark_bit.Set();
|
88
71
|
MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
|
89
|
-
if (obj->IsMap()) {
|
90
|
-
heap_->ClearCacheOnMap(Map::cast(obj));
|
91
|
-
}
|
92
72
|
}
|
93
73
|
|
94
74
|
|
@@ -36,11 +36,12 @@
|
|
36
36
|
#include "heap-profiler.h"
|
37
37
|
#include "ic-inl.h"
|
38
38
|
#include "incremental-marking.h"
|
39
|
-
#include "liveobjectlist-inl.h"
|
40
39
|
#include "mark-compact.h"
|
40
|
+
#include "marking-thread.h"
|
41
41
|
#include "objects-visiting.h"
|
42
42
|
#include "objects-visiting-inl.h"
|
43
43
|
#include "stub-cache.h"
|
44
|
+
#include "sweeper-thread.h"
|
44
45
|
|
45
46
|
namespace v8 {
|
46
47
|
namespace internal {
|
@@ -62,28 +63,37 @@ MarkCompactCollector::MarkCompactCollector() : // NOLINT
|
|
62
63
|
sweep_precisely_(false),
|
63
64
|
reduce_memory_footprint_(false),
|
64
65
|
abort_incremental_marking_(false),
|
66
|
+
marking_parity_(ODD_MARKING_PARITY),
|
65
67
|
compacting_(false),
|
66
68
|
was_marked_incrementally_(false),
|
67
|
-
flush_monomorphic_ics_(false),
|
68
69
|
tracer_(NULL),
|
69
70
|
migration_slots_buffer_(NULL),
|
70
71
|
heap_(NULL),
|
71
72
|
code_flusher_(NULL),
|
72
|
-
encountered_weak_maps_(NULL)
|
73
|
-
marker_(this, this) { }
|
73
|
+
encountered_weak_maps_(NULL) { }
|
74
74
|
|
75
75
|
|
76
|
-
#ifdef
|
76
|
+
#ifdef VERIFY_HEAP
|
77
77
|
class VerifyMarkingVisitor: public ObjectVisitor {
|
78
78
|
public:
|
79
79
|
void VisitPointers(Object** start, Object** end) {
|
80
80
|
for (Object** current = start; current < end; current++) {
|
81
81
|
if ((*current)->IsHeapObject()) {
|
82
82
|
HeapObject* object = HeapObject::cast(*current);
|
83
|
-
|
83
|
+
CHECK(HEAP->mark_compact_collector()->IsMarked(object));
|
84
84
|
}
|
85
85
|
}
|
86
86
|
}
|
87
|
+
|
88
|
+
void VisitEmbeddedPointer(RelocInfo* rinfo) {
|
89
|
+
ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
|
90
|
+
if (!FLAG_weak_embedded_maps_in_optimized_code || !FLAG_collect_maps ||
|
91
|
+
rinfo->host()->kind() != Code::OPTIMIZED_FUNCTION ||
|
92
|
+
!rinfo->target_object()->IsMap() ||
|
93
|
+
!Map::cast(rinfo->target_object())->CanTransition()) {
|
94
|
+
VisitPointer(rinfo->target_object_address());
|
95
|
+
}
|
96
|
+
}
|
87
97
|
};
|
88
98
|
|
89
99
|
|
@@ -97,7 +107,7 @@ static void VerifyMarking(Address bottom, Address top) {
|
|
97
107
|
current += kPointerSize) {
|
98
108
|
object = HeapObject::FromAddress(current);
|
99
109
|
if (MarkCompactCollector::IsMarked(object)) {
|
100
|
-
|
110
|
+
CHECK(current >= next_object_must_be_here_or_later);
|
101
111
|
object->Iterate(&visitor);
|
102
112
|
next_object_must_be_here_or_later = current + object->Size();
|
103
113
|
}
|
@@ -110,12 +120,12 @@ static void VerifyMarking(NewSpace* space) {
|
|
110
120
|
NewSpacePageIterator it(space->bottom(), end);
|
111
121
|
// The bottom position is at the start of its page. Allows us to use
|
112
122
|
// page->area_start() as start of range on all pages.
|
113
|
-
|
123
|
+
CHECK_EQ(space->bottom(),
|
114
124
|
NewSpacePage::FromAddress(space->bottom())->area_start());
|
115
125
|
while (it.has_next()) {
|
116
126
|
NewSpacePage* page = it.next();
|
117
127
|
Address limit = it.has_next() ? page->area_end() : end;
|
118
|
-
|
128
|
+
CHECK(limit == end || !page->Contains(end));
|
119
129
|
VerifyMarking(page->area_start(), limit);
|
120
130
|
}
|
121
131
|
}
|
@@ -175,7 +185,7 @@ static void VerifyEvacuation(Address bottom, Address top) {
|
|
175
185
|
current += kPointerSize) {
|
176
186
|
object = HeapObject::FromAddress(current);
|
177
187
|
if (MarkCompactCollector::IsMarked(object)) {
|
178
|
-
|
188
|
+
CHECK(current >= next_object_must_be_here_or_later);
|
179
189
|
object->Iterate(&visitor);
|
180
190
|
next_object_must_be_here_or_later = current + object->Size();
|
181
191
|
}
|
@@ -191,7 +201,7 @@ static void VerifyEvacuation(NewSpace* space) {
|
|
191
201
|
NewSpacePage* page = it.next();
|
192
202
|
Address current = page->area_start();
|
193
203
|
Address limit = it.has_next() ? page->area_end() : space->top();
|
194
|
-
|
204
|
+
CHECK(limit == space->top() || !page->Contains(space->top()));
|
195
205
|
while (current < limit) {
|
196
206
|
HeapObject* object = HeapObject::FromAddress(current);
|
197
207
|
object->Iterate(&visitor);
|
@@ -223,6 +233,101 @@ static void VerifyEvacuation(Heap* heap) {
|
|
223
233
|
VerifyEvacuationVisitor visitor;
|
224
234
|
heap->IterateStrongRoots(&visitor, VISIT_ALL);
|
225
235
|
}
|
236
|
+
#endif // VERIFY_HEAP
|
237
|
+
|
238
|
+
|
239
|
+
#ifdef DEBUG
|
240
|
+
class VerifyNativeContextSeparationVisitor: public ObjectVisitor {
|
241
|
+
public:
|
242
|
+
VerifyNativeContextSeparationVisitor() : current_native_context_(NULL) {}
|
243
|
+
|
244
|
+
void VisitPointers(Object** start, Object** end) {
|
245
|
+
for (Object** current = start; current < end; current++) {
|
246
|
+
if ((*current)->IsHeapObject()) {
|
247
|
+
HeapObject* object = HeapObject::cast(*current);
|
248
|
+
if (object->IsString()) continue;
|
249
|
+
switch (object->map()->instance_type()) {
|
250
|
+
case JS_FUNCTION_TYPE:
|
251
|
+
CheckContext(JSFunction::cast(object)->context());
|
252
|
+
break;
|
253
|
+
case JS_GLOBAL_PROXY_TYPE:
|
254
|
+
CheckContext(JSGlobalProxy::cast(object)->native_context());
|
255
|
+
break;
|
256
|
+
case JS_GLOBAL_OBJECT_TYPE:
|
257
|
+
case JS_BUILTINS_OBJECT_TYPE:
|
258
|
+
CheckContext(GlobalObject::cast(object)->native_context());
|
259
|
+
break;
|
260
|
+
case JS_ARRAY_TYPE:
|
261
|
+
case JS_DATE_TYPE:
|
262
|
+
case JS_OBJECT_TYPE:
|
263
|
+
case JS_REGEXP_TYPE:
|
264
|
+
VisitPointer(HeapObject::RawField(object, JSObject::kMapOffset));
|
265
|
+
break;
|
266
|
+
case MAP_TYPE:
|
267
|
+
VisitPointer(HeapObject::RawField(object, Map::kPrototypeOffset));
|
268
|
+
VisitPointer(HeapObject::RawField(object, Map::kConstructorOffset));
|
269
|
+
break;
|
270
|
+
case FIXED_ARRAY_TYPE:
|
271
|
+
if (object->IsContext()) {
|
272
|
+
CheckContext(object);
|
273
|
+
} else {
|
274
|
+
FixedArray* array = FixedArray::cast(object);
|
275
|
+
int length = array->length();
|
276
|
+
// Set array length to zero to prevent cycles while iterating
|
277
|
+
// over array bodies, this is easier than intrusive marking.
|
278
|
+
array->set_length(0);
|
279
|
+
array->IterateBody(
|
280
|
+
FIXED_ARRAY_TYPE, FixedArray::SizeFor(length), this);
|
281
|
+
array->set_length(length);
|
282
|
+
}
|
283
|
+
break;
|
284
|
+
case JS_GLOBAL_PROPERTY_CELL_TYPE:
|
285
|
+
case JS_PROXY_TYPE:
|
286
|
+
case JS_VALUE_TYPE:
|
287
|
+
case TYPE_FEEDBACK_INFO_TYPE:
|
288
|
+
object->Iterate(this);
|
289
|
+
break;
|
290
|
+
case ACCESSOR_INFO_TYPE:
|
291
|
+
case BYTE_ARRAY_TYPE:
|
292
|
+
case CALL_HANDLER_INFO_TYPE:
|
293
|
+
case CODE_TYPE:
|
294
|
+
case FIXED_DOUBLE_ARRAY_TYPE:
|
295
|
+
case HEAP_NUMBER_TYPE:
|
296
|
+
case INTERCEPTOR_INFO_TYPE:
|
297
|
+
case ODDBALL_TYPE:
|
298
|
+
case SCRIPT_TYPE:
|
299
|
+
case SHARED_FUNCTION_INFO_TYPE:
|
300
|
+
break;
|
301
|
+
default:
|
302
|
+
UNREACHABLE();
|
303
|
+
}
|
304
|
+
}
|
305
|
+
}
|
306
|
+
}
|
307
|
+
|
308
|
+
private:
|
309
|
+
void CheckContext(Object* context) {
|
310
|
+
if (!context->IsContext()) return;
|
311
|
+
Context* native_context = Context::cast(context)->native_context();
|
312
|
+
if (current_native_context_ == NULL) {
|
313
|
+
current_native_context_ = native_context;
|
314
|
+
} else {
|
315
|
+
CHECK_EQ(current_native_context_, native_context);
|
316
|
+
}
|
317
|
+
}
|
318
|
+
|
319
|
+
Context* current_native_context_;
|
320
|
+
};
|
321
|
+
|
322
|
+
|
323
|
+
static void VerifyNativeContextSeparation(Heap* heap) {
|
324
|
+
HeapObjectIterator it(heap->code_space());
|
325
|
+
|
326
|
+
for (Object* object = it.Next(); object != NULL; object = it.Next()) {
|
327
|
+
VerifyNativeContextSeparationVisitor visitor;
|
328
|
+
Code::cast(object)->CodeIterateBody(&visitor);
|
329
|
+
}
|
330
|
+
}
|
226
331
|
#endif
|
227
332
|
|
228
333
|
|
@@ -248,10 +353,17 @@ bool MarkCompactCollector::StartCompaction(CompactionMode mode) {
|
|
248
353
|
if (!compacting_) {
|
249
354
|
ASSERT(evacuation_candidates_.length() == 0);
|
250
355
|
|
356
|
+
#ifdef ENABLE_GDB_JIT_INTERFACE
|
357
|
+
// If GDBJIT interface is active disable compaction.
|
358
|
+
if (FLAG_gdbjit) return false;
|
359
|
+
#endif
|
360
|
+
|
251
361
|
CollectEvacuationCandidates(heap()->old_pointer_space());
|
252
362
|
CollectEvacuationCandidates(heap()->old_data_space());
|
253
363
|
|
254
|
-
if (FLAG_compact_code_space &&
|
364
|
+
if (FLAG_compact_code_space &&
|
365
|
+
(mode == NON_INCREMENTAL_COMPACTION ||
|
366
|
+
FLAG_incremental_code_compaction)) {
|
255
367
|
CollectEvacuationCandidates(heap()->code_space());
|
256
368
|
} else if (FLAG_trace_fragmentation) {
|
257
369
|
TraceFragmentation(heap()->code_space());
|
@@ -282,11 +394,11 @@ void MarkCompactCollector::CollectGarbage() {
|
|
282
394
|
MarkLiveObjects();
|
283
395
|
ASSERT(heap_->incremental_marking()->IsStopped());
|
284
396
|
|
285
|
-
if (FLAG_collect_maps)
|
397
|
+
if (FLAG_collect_maps) ClearNonLiveReferences();
|
286
398
|
|
287
399
|
ClearWeakMaps();
|
288
400
|
|
289
|
-
#ifdef
|
401
|
+
#ifdef VERIFY_HEAP
|
290
402
|
if (FLAG_verify_heap) {
|
291
403
|
VerifyMarking(heap_);
|
292
404
|
}
|
@@ -296,13 +408,33 @@ void MarkCompactCollector::CollectGarbage() {
|
|
296
408
|
|
297
409
|
if (!FLAG_collect_maps) ReattachInitialMaps();
|
298
410
|
|
411
|
+
#ifdef DEBUG
|
412
|
+
if (FLAG_verify_native_context_separation) {
|
413
|
+
VerifyNativeContextSeparation(heap_);
|
414
|
+
}
|
415
|
+
#endif
|
416
|
+
|
417
|
+
#ifdef VERIFY_HEAP
|
418
|
+
if (FLAG_collect_maps && FLAG_weak_embedded_maps_in_optimized_code &&
|
419
|
+
heap()->weak_embedded_maps_verification_enabled()) {
|
420
|
+
VerifyWeakEmbeddedMapsInOptimizedCode();
|
421
|
+
}
|
422
|
+
#endif
|
423
|
+
|
299
424
|
Finish();
|
300
425
|
|
426
|
+
if (marking_parity_ == EVEN_MARKING_PARITY) {
|
427
|
+
marking_parity_ = ODD_MARKING_PARITY;
|
428
|
+
} else {
|
429
|
+
ASSERT(marking_parity_ == ODD_MARKING_PARITY);
|
430
|
+
marking_parity_ = EVEN_MARKING_PARITY;
|
431
|
+
}
|
432
|
+
|
301
433
|
tracer_ = NULL;
|
302
434
|
}
|
303
435
|
|
304
436
|
|
305
|
-
#ifdef
|
437
|
+
#ifdef VERIFY_HEAP
|
306
438
|
void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) {
|
307
439
|
PageIterator it(space);
|
308
440
|
|
@@ -313,6 +445,7 @@ void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) {
|
|
313
445
|
}
|
314
446
|
}
|
315
447
|
|
448
|
+
|
316
449
|
void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) {
|
317
450
|
NewSpacePageIterator it(space->bottom(), space->top());
|
318
451
|
|
@@ -323,6 +456,7 @@ void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) {
|
|
323
456
|
}
|
324
457
|
}
|
325
458
|
|
459
|
+
|
326
460
|
void MarkCompactCollector::VerifyMarkbitsAreClean() {
|
327
461
|
VerifyMarkbitsAreClean(heap_->old_pointer_space());
|
328
462
|
VerifyMarkbitsAreClean(heap_->old_data_space());
|
@@ -334,11 +468,24 @@ void MarkCompactCollector::VerifyMarkbitsAreClean() {
|
|
334
468
|
LargeObjectIterator it(heap_->lo_space());
|
335
469
|
for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
|
336
470
|
MarkBit mark_bit = Marking::MarkBitFrom(obj);
|
337
|
-
|
338
|
-
|
471
|
+
CHECK(Marking::IsWhite(mark_bit));
|
472
|
+
CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes());
|
339
473
|
}
|
340
474
|
}
|
341
|
-
|
475
|
+
|
476
|
+
|
477
|
+
void MarkCompactCollector::VerifyWeakEmbeddedMapsInOptimizedCode() {
|
478
|
+
HeapObjectIterator code_iterator(heap()->code_space());
|
479
|
+
for (HeapObject* obj = code_iterator.Next();
|
480
|
+
obj != NULL;
|
481
|
+
obj = code_iterator.Next()) {
|
482
|
+
Code* code = Code::cast(obj);
|
483
|
+
if (code->kind() != Code::OPTIMIZED_FUNCTION) continue;
|
484
|
+
if (code->marked_for_deoptimization()) continue;
|
485
|
+
code->VerifyEmbeddedMapsDependency();
|
486
|
+
}
|
487
|
+
}
|
488
|
+
#endif // VERIFY_HEAP
|
342
489
|
|
343
490
|
|
344
491
|
static void ClearMarkbitsInPagedSpace(PagedSpace* space) {
|
@@ -372,11 +519,67 @@ void MarkCompactCollector::ClearMarkbits() {
|
|
372
519
|
MarkBit mark_bit = Marking::MarkBitFrom(obj);
|
373
520
|
mark_bit.Clear();
|
374
521
|
mark_bit.Next().Clear();
|
522
|
+
Page::FromAddress(obj->address())->ResetProgressBar();
|
375
523
|
Page::FromAddress(obj->address())->ResetLiveBytes();
|
376
524
|
}
|
377
525
|
}
|
378
526
|
|
379
527
|
|
528
|
+
void MarkCompactCollector::StartSweeperThreads() {
|
529
|
+
SweeperThread::set_sweeping_pending(true);
|
530
|
+
for (int i = 0; i < FLAG_sweeper_threads; i++) {
|
531
|
+
heap()->isolate()->sweeper_threads()[i]->StartSweeping();
|
532
|
+
}
|
533
|
+
}
|
534
|
+
|
535
|
+
|
536
|
+
void MarkCompactCollector::WaitUntilSweepingCompleted() {
|
537
|
+
if (SweeperThread::sweeping_pending()) {
|
538
|
+
for (int i = 0; i < FLAG_sweeper_threads; i++) {
|
539
|
+
heap()->isolate()->sweeper_threads()[i]->WaitForSweeperThread();
|
540
|
+
}
|
541
|
+
SweeperThread::set_sweeping_pending(false);
|
542
|
+
StealMemoryFromSweeperThreads(heap()->paged_space(OLD_DATA_SPACE));
|
543
|
+
StealMemoryFromSweeperThreads(heap()->paged_space(OLD_POINTER_SPACE));
|
544
|
+
heap()->FreeQueuedChunks();
|
545
|
+
}
|
546
|
+
}
|
547
|
+
|
548
|
+
|
549
|
+
intptr_t MarkCompactCollector::
|
550
|
+
StealMemoryFromSweeperThreads(PagedSpace* space) {
|
551
|
+
intptr_t freed_bytes = 0;
|
552
|
+
for (int i = 0; i < FLAG_sweeper_threads; i++) {
|
553
|
+
freed_bytes += heap()->isolate()->sweeper_threads()[i]->StealMemory(space);
|
554
|
+
}
|
555
|
+
return freed_bytes;
|
556
|
+
}
|
557
|
+
|
558
|
+
|
559
|
+
bool MarkCompactCollector::AreSweeperThreadsActivated() {
|
560
|
+
return heap()->isolate()->sweeper_threads() != NULL;
|
561
|
+
}
|
562
|
+
|
563
|
+
|
564
|
+
bool MarkCompactCollector::IsConcurrentSweepingInProgress() {
|
565
|
+
return SweeperThread::sweeping_pending();
|
566
|
+
}
|
567
|
+
|
568
|
+
|
569
|
+
void MarkCompactCollector::MarkInParallel() {
|
570
|
+
for (int i = 0; i < FLAG_marking_threads; i++) {
|
571
|
+
heap()->isolate()->marking_threads()[i]->StartMarking();
|
572
|
+
}
|
573
|
+
}
|
574
|
+
|
575
|
+
|
576
|
+
void MarkCompactCollector::WaitUntilMarkingCompleted() {
|
577
|
+
for (int i = 0; i < FLAG_marking_threads; i++) {
|
578
|
+
heap()->isolate()->marking_threads()[i]->WaitForMarkingThread();
|
579
|
+
}
|
580
|
+
}
|
581
|
+
|
582
|
+
|
380
583
|
bool Marking::TransferMark(Address old_start, Address new_start) {
|
381
584
|
// This is only used when resizing an object.
|
382
585
|
ASSERT(MemoryChunk::FromAddress(old_start) ==
|
@@ -500,12 +703,10 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
|
|
500
703
|
space->identity() == OLD_DATA_SPACE ||
|
501
704
|
space->identity() == CODE_SPACE);
|
502
705
|
|
706
|
+
static const int kMaxMaxEvacuationCandidates = 1000;
|
503
707
|
int number_of_pages = space->CountTotalPages();
|
504
|
-
|
505
|
-
|
506
|
-
int max_evacuation_candidates = Min(
|
507
|
-
kMaxMaxEvacuationCandidates,
|
508
|
-
static_cast<int>(sqrt(static_cast<double>(number_of_pages / 2)) + 1));
|
708
|
+
int max_evacuation_candidates =
|
709
|
+
static_cast<int>(sqrt(number_of_pages / 2.0) + 1);
|
509
710
|
|
510
711
|
if (FLAG_stress_compaction || FLAG_always_compact) {
|
511
712
|
max_evacuation_candidates = kMaxMaxEvacuationCandidates;
|
@@ -535,25 +736,37 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
|
|
535
736
|
intptr_t over_reserved = reserved - space->SizeOfObjects();
|
536
737
|
static const intptr_t kFreenessThreshold = 50;
|
537
738
|
|
538
|
-
if (over_reserved >=
|
539
|
-
|
739
|
+
if (reduce_memory_footprint_ && over_reserved >= space->AreaSize()) {
|
740
|
+
// If reduction of memory footprint was requested, we are aggressive
|
741
|
+
// about choosing pages to free. We expect that half-empty pages
|
742
|
+
// are easier to compact so slightly bump the limit.
|
540
743
|
mode = REDUCE_MEMORY_FOOTPRINT;
|
541
|
-
|
542
|
-
// We expect that empty pages are easier to compact so slightly bump the
|
543
|
-
// limit.
|
544
744
|
max_evacuation_candidates += 2;
|
745
|
+
}
|
545
746
|
|
546
|
-
|
547
|
-
|
548
|
-
|
549
|
-
|
550
|
-
|
747
|
+
|
748
|
+
if (over_reserved > reserved / 3 && over_reserved >= 2 * space->AreaSize()) {
|
749
|
+
// If over-usage is very high (more than a third of the space), we
|
750
|
+
// try to free all mostly empty pages. We expect that almost empty
|
751
|
+
// pages are even easier to compact so bump the limit even more.
|
752
|
+
mode = REDUCE_MEMORY_FOOTPRINT;
|
753
|
+
max_evacuation_candidates *= 2;
|
754
|
+
}
|
755
|
+
|
756
|
+
if (FLAG_trace_fragmentation && mode == REDUCE_MEMORY_FOOTPRINT) {
|
757
|
+
PrintF("Estimated over reserved memory: %.1f / %.1f MB (threshold %d)\n",
|
758
|
+
static_cast<double>(over_reserved) / MB,
|
759
|
+
static_cast<double>(reserved) / MB,
|
760
|
+
static_cast<int>(kFreenessThreshold));
|
551
761
|
}
|
552
762
|
|
553
763
|
intptr_t estimated_release = 0;
|
554
764
|
|
555
765
|
Candidate candidates[kMaxMaxEvacuationCandidates];
|
556
766
|
|
767
|
+
max_evacuation_candidates =
|
768
|
+
Min(kMaxMaxEvacuationCandidates, max_evacuation_candidates);
|
769
|
+
|
557
770
|
int count = 0;
|
558
771
|
int fragmentation = 0;
|
559
772
|
Candidate* least = NULL;
|
@@ -566,7 +779,7 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
|
|
566
779
|
p->ClearEvacuationCandidate();
|
567
780
|
|
568
781
|
if (FLAG_stress_compaction) {
|
569
|
-
int counter = space->heap()->ms_count();
|
782
|
+
unsigned int counter = space->heap()->ms_count();
|
570
783
|
uintptr_t page_number = reinterpret_cast<uintptr_t>(p) >> kPageSizeBits;
|
571
784
|
if ((counter & 1) == (page_number & 1)) fragmentation = 1;
|
572
785
|
} else if (mode == REDUCE_MEMORY_FOOTPRINT) {
|
@@ -658,12 +871,6 @@ void MarkCompactCollector::AbortCompaction() {
|
|
658
871
|
void MarkCompactCollector::Prepare(GCTracer* tracer) {
|
659
872
|
was_marked_incrementally_ = heap()->incremental_marking()->IsMarking();
|
660
873
|
|
661
|
-
// Monomorphic ICs are preserved when possible, but need to be flushed
|
662
|
-
// when they might be keeping a Context alive, or when the heap is about
|
663
|
-
// to be serialized.
|
664
|
-
flush_monomorphic_ics_ =
|
665
|
-
heap()->isolate()->context_exit_happened() || Serializer::enabled();
|
666
|
-
|
667
874
|
// Rather than passing the tracer around we stash it in a static member
|
668
875
|
// variable.
|
669
876
|
tracer_ = tracer;
|
@@ -675,12 +882,10 @@ void MarkCompactCollector::Prepare(GCTracer* tracer) {
|
|
675
882
|
|
676
883
|
ASSERT(!FLAG_never_compact || !FLAG_always_compact);
|
677
884
|
|
678
|
-
|
679
|
-
|
680
|
-
|
681
|
-
compacting_collection_ = false;
|
885
|
+
if (AreSweeperThreadsActivated() && FLAG_concurrent_sweeping) {
|
886
|
+
// Instead of waiting we could also abort the sweeper threads here.
|
887
|
+
WaitUntilSweepingCompleted();
|
682
888
|
}
|
683
|
-
#endif
|
684
889
|
|
685
890
|
// Clear marking bits if incremental marking is aborted.
|
686
891
|
if (was_marked_incrementally_ && abort_incremental_marking_) {
|
@@ -703,7 +908,7 @@ void MarkCompactCollector::Prepare(GCTracer* tracer) {
|
|
703
908
|
space->PrepareForMarkCompact();
|
704
909
|
}
|
705
910
|
|
706
|
-
#ifdef
|
911
|
+
#ifdef VERIFY_HEAP
|
707
912
|
if (!was_marked_incrementally_ && FLAG_verify_heap) {
|
708
913
|
VerifyMarkbitsAreClean();
|
709
914
|
}
|
@@ -711,6 +916,14 @@ void MarkCompactCollector::Prepare(GCTracer* tracer) {
|
|
711
916
|
}
|
712
917
|
|
713
918
|
|
919
|
+
class DeoptimizeMarkedCodeFilter : public OptimizedFunctionFilter {
|
920
|
+
public:
|
921
|
+
virtual bool TakeFunction(JSFunction* function) {
|
922
|
+
return function->code()->marked_for_deoptimization();
|
923
|
+
}
|
924
|
+
};
|
925
|
+
|
926
|
+
|
714
927
|
void MarkCompactCollector::Finish() {
|
715
928
|
#ifdef DEBUG
|
716
929
|
ASSERT(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS);
|
@@ -722,7 +935,8 @@ void MarkCompactCollector::Finish() {
|
|
722
935
|
// objects (empty string, illegal builtin).
|
723
936
|
heap()->isolate()->stub_cache()->Clear();
|
724
937
|
|
725
|
-
|
938
|
+
DeoptimizeMarkedCodeFilter filter;
|
939
|
+
Deoptimizer::DeoptimizeAllFunctionsWith(&filter);
|
726
940
|
}
|
727
941
|
|
728
942
|
|
@@ -754,133 +968,182 @@ void MarkCompactCollector::Finish() {
|
|
754
968
|
// and continue with marking. This process repeats until all reachable
|
755
969
|
// objects have been marked.
|
756
970
|
|
757
|
-
|
758
|
-
|
759
|
-
|
760
|
-
|
761
|
-
|
762
|
-
|
971
|
+
void CodeFlusher::ProcessJSFunctionCandidates() {
|
972
|
+
Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
|
973
|
+
Object* undefined = isolate_->heap()->undefined_value();
|
974
|
+
|
975
|
+
JSFunction* candidate = jsfunction_candidates_head_;
|
976
|
+
JSFunction* next_candidate;
|
977
|
+
while (candidate != NULL) {
|
978
|
+
next_candidate = GetNextCandidate(candidate);
|
979
|
+
ClearNextCandidate(candidate, undefined);
|
980
|
+
|
981
|
+
SharedFunctionInfo* shared = candidate->shared();
|
982
|
+
|
983
|
+
Code* code = shared->code();
|
984
|
+
MarkBit code_mark = Marking::MarkBitFrom(code);
|
985
|
+
if (!code_mark.Get()) {
|
986
|
+
shared->set_code(lazy_compile);
|
987
|
+
candidate->set_code(lazy_compile);
|
988
|
+
} else {
|
989
|
+
candidate->set_code(code);
|
990
|
+
}
|
991
|
+
|
992
|
+
// We are in the middle of a GC cycle so the write barrier in the code
|
993
|
+
// setter did not record the slot update and we have to do that manually.
|
994
|
+
Address slot = candidate->address() + JSFunction::kCodeEntryOffset;
|
995
|
+
Code* target = Code::cast(Code::GetObjectFromEntryAddress(slot));
|
996
|
+
isolate_->heap()->mark_compact_collector()->
|
997
|
+
RecordCodeEntrySlot(slot, target);
|
763
998
|
|
764
|
-
|
765
|
-
|
766
|
-
|
999
|
+
Object** shared_code_slot =
|
1000
|
+
HeapObject::RawField(shared, SharedFunctionInfo::kCodeOffset);
|
1001
|
+
isolate_->heap()->mark_compact_collector()->
|
1002
|
+
RecordSlot(shared_code_slot, shared_code_slot, *shared_code_slot);
|
1003
|
+
|
1004
|
+
candidate = next_candidate;
|
767
1005
|
}
|
768
1006
|
|
769
|
-
|
770
|
-
|
1007
|
+
jsfunction_candidates_head_ = NULL;
|
1008
|
+
}
|
1009
|
+
|
1010
|
+
|
1011
|
+
void CodeFlusher::ProcessSharedFunctionInfoCandidates() {
|
1012
|
+
Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
|
1013
|
+
|
1014
|
+
SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
|
1015
|
+
SharedFunctionInfo* next_candidate;
|
1016
|
+
while (candidate != NULL) {
|
1017
|
+
next_candidate = GetNextCandidate(candidate);
|
1018
|
+
ClearNextCandidate(candidate);
|
1019
|
+
|
1020
|
+
Code* code = candidate->code();
|
1021
|
+
MarkBit code_mark = Marking::MarkBitFrom(code);
|
1022
|
+
if (!code_mark.Get()) {
|
1023
|
+
candidate->set_code(lazy_compile);
|
1024
|
+
}
|
1025
|
+
|
1026
|
+
Object** code_slot =
|
1027
|
+
HeapObject::RawField(candidate, SharedFunctionInfo::kCodeOffset);
|
1028
|
+
isolate_->heap()->mark_compact_collector()->
|
1029
|
+
RecordSlot(code_slot, code_slot, *code_slot);
|
771
1030
|
|
772
|
-
|
773
|
-
jsfunction_candidates_head_ = function;
|
1031
|
+
candidate = next_candidate;
|
774
1032
|
}
|
775
1033
|
|
776
|
-
|
777
|
-
|
778
|
-
|
1034
|
+
shared_function_info_candidates_head_ = NULL;
|
1035
|
+
}
|
1036
|
+
|
1037
|
+
|
1038
|
+
bool CodeFlusher::ContainsCandidate(SharedFunctionInfo* shared_info) {
|
1039
|
+
SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
|
1040
|
+
while (candidate != NULL) {
|
1041
|
+
if (candidate == shared_info) return true;
|
1042
|
+
candidate = GetNextCandidate(candidate);
|
779
1043
|
}
|
1044
|
+
return false;
|
1045
|
+
}
|
780
1046
|
|
781
|
-
private:
|
782
|
-
void ProcessJSFunctionCandidates() {
|
783
|
-
Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
|
784
1047
|
|
785
|
-
|
786
|
-
|
1048
|
+
void CodeFlusher::EvictCandidate(SharedFunctionInfo* shared_info) {
|
1049
|
+
// Make sure previous flushing decisions are revisited.
|
1050
|
+
isolate_->heap()->incremental_marking()->RecordWrites(shared_info);
|
1051
|
+
|
1052
|
+
SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
|
1053
|
+
SharedFunctionInfo* next_candidate;
|
1054
|
+
if (candidate == shared_info) {
|
1055
|
+
next_candidate = GetNextCandidate(shared_info);
|
1056
|
+
shared_function_info_candidates_head_ = next_candidate;
|
1057
|
+
ClearNextCandidate(shared_info);
|
1058
|
+
} else {
|
787
1059
|
while (candidate != NULL) {
|
788
1060
|
next_candidate = GetNextCandidate(candidate);
|
789
1061
|
|
790
|
-
|
791
|
-
|
792
|
-
|
793
|
-
|
794
|
-
|
795
|
-
shared->set_code(lazy_compile);
|
796
|
-
candidate->set_code(lazy_compile);
|
797
|
-
} else {
|
798
|
-
candidate->set_code(shared->code());
|
1062
|
+
if (next_candidate == shared_info) {
|
1063
|
+
next_candidate = GetNextCandidate(shared_info);
|
1064
|
+
SetNextCandidate(candidate, next_candidate);
|
1065
|
+
ClearNextCandidate(shared_info);
|
1066
|
+
break;
|
799
1067
|
}
|
800
1068
|
|
801
|
-
// We are in the middle of a GC cycle so the write barrier in the code
|
802
|
-
// setter did not record the slot update and we have to do that manually.
|
803
|
-
Address slot = candidate->address() + JSFunction::kCodeEntryOffset;
|
804
|
-
Code* target = Code::cast(Code::GetObjectFromEntryAddress(slot));
|
805
|
-
isolate_->heap()->mark_compact_collector()->
|
806
|
-
RecordCodeEntrySlot(slot, target);
|
807
|
-
|
808
|
-
RecordSharedFunctionInfoCodeSlot(shared);
|
809
|
-
|
810
1069
|
candidate = next_candidate;
|
811
1070
|
}
|
812
|
-
|
813
|
-
jsfunction_candidates_head_ = NULL;
|
814
1071
|
}
|
1072
|
+
}
|
815
1073
|
|
816
1074
|
|
817
|
-
|
818
|
-
|
1075
|
+
void CodeFlusher::EvictCandidate(JSFunction* function) {
|
1076
|
+
ASSERT(!function->next_function_link()->IsUndefined());
|
1077
|
+
Object* undefined = isolate_->heap()->undefined_value();
|
819
1078
|
|
820
|
-
|
821
|
-
|
1079
|
+
// Make sure previous flushing decisions are revisited.
|
1080
|
+
isolate_->heap()->incremental_marking()->RecordWrites(function);
|
1081
|
+
isolate_->heap()->incremental_marking()->RecordWrites(function->shared());
|
1082
|
+
|
1083
|
+
JSFunction* candidate = jsfunction_candidates_head_;
|
1084
|
+
JSFunction* next_candidate;
|
1085
|
+
if (candidate == function) {
|
1086
|
+
next_candidate = GetNextCandidate(function);
|
1087
|
+
jsfunction_candidates_head_ = next_candidate;
|
1088
|
+
ClearNextCandidate(function, undefined);
|
1089
|
+
} else {
|
822
1090
|
while (candidate != NULL) {
|
823
1091
|
next_candidate = GetNextCandidate(candidate);
|
824
|
-
SetNextCandidate(candidate, NULL);
|
825
1092
|
|
826
|
-
|
827
|
-
|
828
|
-
|
829
|
-
|
1093
|
+
if (next_candidate == function) {
|
1094
|
+
next_candidate = GetNextCandidate(function);
|
1095
|
+
SetNextCandidate(candidate, next_candidate);
|
1096
|
+
ClearNextCandidate(function, undefined);
|
1097
|
+
break;
|
830
1098
|
}
|
831
1099
|
|
832
|
-
RecordSharedFunctionInfoCodeSlot(candidate);
|
833
|
-
|
834
1100
|
candidate = next_candidate;
|
835
1101
|
}
|
836
|
-
|
837
|
-
shared_function_info_candidates_head_ = NULL;
|
838
1102
|
}
|
1103
|
+
}
|
839
1104
|
|
840
|
-
void RecordSharedFunctionInfoCodeSlot(SharedFunctionInfo* shared) {
|
841
|
-
Object** slot = HeapObject::RawField(shared,
|
842
|
-
SharedFunctionInfo::kCodeOffset);
|
843
|
-
isolate_->heap()->mark_compact_collector()->
|
844
|
-
RecordSlot(slot, slot, HeapObject::cast(*slot));
|
845
|
-
}
|
846
1105
|
|
847
|
-
|
848
|
-
|
849
|
-
candidate->address() + JSFunction::kCodeEntryOffset);
|
850
|
-
}
|
1106
|
+
void CodeFlusher::EvictJSFunctionCandidates() {
|
1107
|
+
Object* undefined = isolate_->heap()->undefined_value();
|
851
1108
|
|
852
|
-
|
853
|
-
|
1109
|
+
JSFunction* candidate = jsfunction_candidates_head_;
|
1110
|
+
JSFunction* next_candidate;
|
1111
|
+
while (candidate != NULL) {
|
1112
|
+
next_candidate = GetNextCandidate(candidate);
|
1113
|
+
ClearNextCandidate(candidate, undefined);
|
1114
|
+
candidate = next_candidate;
|
854
1115
|
}
|
855
1116
|
|
856
|
-
|
857
|
-
|
858
|
-
*GetNextCandidateField(candidate) = next_candidate;
|
859
|
-
}
|
1117
|
+
jsfunction_candidates_head_ = NULL;
|
1118
|
+
}
|
860
1119
|
|
861
|
-
static SharedFunctionInfo** GetNextCandidateField(
|
862
|
-
SharedFunctionInfo* candidate) {
|
863
|
-
Code* code = candidate->code();
|
864
|
-
return reinterpret_cast<SharedFunctionInfo**>(
|
865
|
-
code->address() + Code::kGCMetadataOffset);
|
866
|
-
}
|
867
1120
|
|
868
|
-
|
869
|
-
|
870
|
-
|
1121
|
+
void CodeFlusher::EvictSharedFunctionInfoCandidates() {
|
1122
|
+
SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
|
1123
|
+
SharedFunctionInfo* next_candidate;
|
1124
|
+
while (candidate != NULL) {
|
1125
|
+
next_candidate = GetNextCandidate(candidate);
|
1126
|
+
ClearNextCandidate(candidate);
|
1127
|
+
candidate = next_candidate;
|
871
1128
|
}
|
872
1129
|
|
873
|
-
|
874
|
-
|
875
|
-
candidate->code()->set_gc_metadata(next_candidate);
|
876
|
-
}
|
1130
|
+
shared_function_info_candidates_head_ = NULL;
|
1131
|
+
}
|
877
1132
|
|
878
|
-
Isolate* isolate_;
|
879
|
-
JSFunction* jsfunction_candidates_head_;
|
880
|
-
SharedFunctionInfo* shared_function_info_candidates_head_;
|
881
1133
|
|
882
|
-
|
883
|
-
|
1134
|
+
void CodeFlusher::IteratePointersToFromSpace(ObjectVisitor* v) {
|
1135
|
+
Heap* heap = isolate_->heap();
|
1136
|
+
|
1137
|
+
JSFunction** slot = &jsfunction_candidates_head_;
|
1138
|
+
JSFunction* candidate = jsfunction_candidates_head_;
|
1139
|
+
while (candidate != NULL) {
|
1140
|
+
if (heap->InFromSpace(candidate)) {
|
1141
|
+
v->VisitPointer(reinterpret_cast<Object**>(slot));
|
1142
|
+
}
|
1143
|
+
candidate = GetNextCandidate(*slot);
|
1144
|
+
slot = GetNextCandidateSlot(*slot);
|
1145
|
+
}
|
1146
|
+
}
|
884
1147
|
|
885
1148
|
|
886
1149
|
MarkCompactCollector::~MarkCompactCollector() {
|
@@ -927,81 +1190,24 @@ static inline HeapObject* ShortCircuitConsString(Object** p) {
|
|
927
1190
|
}
|
928
1191
|
|
929
1192
|
|
930
|
-
class
|
1193
|
+
class MarkCompactMarkingVisitor
|
1194
|
+
: public StaticMarkingVisitor<MarkCompactMarkingVisitor> {
|
931
1195
|
public:
|
932
|
-
static
|
933
|
-
|
934
|
-
}
|
935
|
-
|
936
|
-
static void Initialize() {
|
937
|
-
table_.Register(kVisitShortcutCandidate,
|
938
|
-
&FixedBodyVisitor<StaticMarkingVisitor,
|
939
|
-
ConsString::BodyDescriptor,
|
940
|
-
void>::Visit);
|
941
|
-
|
942
|
-
table_.Register(kVisitConsString,
|
943
|
-
&FixedBodyVisitor<StaticMarkingVisitor,
|
944
|
-
ConsString::BodyDescriptor,
|
945
|
-
void>::Visit);
|
946
|
-
|
947
|
-
table_.Register(kVisitSlicedString,
|
948
|
-
&FixedBodyVisitor<StaticMarkingVisitor,
|
949
|
-
SlicedString::BodyDescriptor,
|
950
|
-
void>::Visit);
|
951
|
-
|
952
|
-
table_.Register(kVisitFixedArray,
|
953
|
-
&FlexibleBodyVisitor<StaticMarkingVisitor,
|
954
|
-
FixedArray::BodyDescriptor,
|
955
|
-
void>::Visit);
|
956
|
-
|
957
|
-
table_.Register(kVisitGlobalContext, &VisitGlobalContext);
|
958
|
-
|
959
|
-
table_.Register(kVisitFixedDoubleArray, DataObjectVisitor::Visit);
|
960
|
-
|
961
|
-
table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
|
962
|
-
table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
|
963
|
-
table_.Register(kVisitSeqAsciiString, &DataObjectVisitor::Visit);
|
964
|
-
table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
|
965
|
-
|
966
|
-
table_.Register(kVisitJSWeakMap, &VisitJSWeakMap);
|
967
|
-
|
968
|
-
table_.Register(kVisitOddball,
|
969
|
-
&FixedBodyVisitor<StaticMarkingVisitor,
|
970
|
-
Oddball::BodyDescriptor,
|
971
|
-
void>::Visit);
|
972
|
-
table_.Register(kVisitMap,
|
973
|
-
&FixedBodyVisitor<StaticMarkingVisitor,
|
974
|
-
Map::BodyDescriptor,
|
975
|
-
void>::Visit);
|
1196
|
+
static void ObjectStatsVisitBase(StaticVisitorBase::VisitorId id,
|
1197
|
+
Map* map, HeapObject* obj);
|
976
1198
|
|
977
|
-
|
1199
|
+
static void ObjectStatsCountFixedArray(
|
1200
|
+
FixedArrayBase* fixed_array,
|
1201
|
+
FixedArraySubInstanceType fast_type,
|
1202
|
+
FixedArraySubInstanceType dictionary_type);
|
978
1203
|
|
979
|
-
|
980
|
-
|
981
|
-
|
982
|
-
|
983
|
-
|
984
|
-
|
985
|
-
table_.Register(kVisitJSRegExp,
|
986
|
-
&VisitRegExpAndFlushCode);
|
987
|
-
|
988
|
-
table_.Register(kVisitPropertyCell,
|
989
|
-
&FixedBodyVisitor<StaticMarkingVisitor,
|
990
|
-
JSGlobalPropertyCell::BodyDescriptor,
|
991
|
-
void>::Visit);
|
992
|
-
|
993
|
-
table_.RegisterSpecializations<DataObjectVisitor,
|
994
|
-
kVisitDataObject,
|
995
|
-
kVisitDataObjectGeneric>();
|
996
|
-
|
997
|
-
table_.RegisterSpecializations<JSObjectVisitor,
|
998
|
-
kVisitJSObject,
|
999
|
-
kVisitJSObjectGeneric>();
|
1204
|
+
template<MarkCompactMarkingVisitor::VisitorId id>
|
1205
|
+
class ObjectStatsTracker {
|
1206
|
+
public:
|
1207
|
+
static inline void Visit(Map* map, HeapObject* obj);
|
1208
|
+
};
|
1000
1209
|
|
1001
|
-
|
1002
|
-
kVisitStruct,
|
1003
|
-
kVisitStructGeneric>();
|
1004
|
-
}
|
1210
|
+
static void Initialize();
|
1005
1211
|
|
1006
1212
|
INLINE(static void VisitPointer(Heap* heap, Object** p)) {
|
1007
1213
|
MarkObjectByPointer(heap->mark_compact_collector(), p, p);
|
@@ -1020,48 +1226,21 @@ class StaticMarkingVisitor : public StaticVisitorBase {
|
|
1020
1226
|
}
|
1021
1227
|
}
|
1022
1228
|
|
1023
|
-
|
1024
|
-
|
1025
|
-
JSGlobalPropertyCell* cell =
|
1026
|
-
JSGlobalPropertyCell::cast(rinfo->target_cell());
|
1027
|
-
MarkBit mark = Marking::MarkBitFrom(cell);
|
1028
|
-
heap->mark_compact_collector()->MarkObject(cell, mark);
|
1029
|
-
}
|
1030
|
-
|
1031
|
-
static inline void VisitEmbeddedPointer(Heap* heap, RelocInfo* rinfo) {
|
1032
|
-
ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
|
1033
|
-
// TODO(mstarzinger): We do not short-circuit cons strings here, verify
|
1034
|
-
// that there can be no such embedded pointers and add assertion here.
|
1035
|
-
HeapObject* object = HeapObject::cast(rinfo->target_object());
|
1036
|
-
heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
|
1229
|
+
// Marks the object black and pushes it on the marking stack.
|
1230
|
+
INLINE(static void MarkObject(Heap* heap, HeapObject* object)) {
|
1037
1231
|
MarkBit mark = Marking::MarkBitFrom(object);
|
1038
1232
|
heap->mark_compact_collector()->MarkObject(object, mark);
|
1039
1233
|
}
|
1040
1234
|
|
1041
|
-
|
1042
|
-
|
1043
|
-
|
1044
|
-
|
1045
|
-
|
1046
|
-
|
1047
|
-
|
1048
|
-
IC::Clear(rinfo->pc());
|
1049
|
-
target = Code::GetCodeFromTargetAddress(rinfo->target_address());
|
1235
|
+
// Marks the object black without pushing it on the marking stack.
|
1236
|
+
// Returns true if object needed marking and false otherwise.
|
1237
|
+
INLINE(static bool MarkObjectWithoutPush(Heap* heap, HeapObject* object)) {
|
1238
|
+
MarkBit mark_bit = Marking::MarkBitFrom(object);
|
1239
|
+
if (!mark_bit.Get()) {
|
1240
|
+
heap->mark_compact_collector()->SetMark(object, mark_bit);
|
1241
|
+
return true;
|
1050
1242
|
}
|
1051
|
-
|
1052
|
-
heap->mark_compact_collector()->MarkObject(target, code_mark);
|
1053
|
-
heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
|
1054
|
-
}
|
1055
|
-
|
1056
|
-
static inline void VisitDebugTarget(Heap* heap, RelocInfo* rinfo) {
|
1057
|
-
ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
|
1058
|
-
rinfo->IsPatchedReturnSequence()) ||
|
1059
|
-
(RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
|
1060
|
-
rinfo->IsPatchedDebugBreakSlotSequence()));
|
1061
|
-
Code* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
|
1062
|
-
MarkBit code_mark = Marking::MarkBitFrom(target);
|
1063
|
-
heap->mark_compact_collector()->MarkObject(target, code_mark);
|
1064
|
-
heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
|
1243
|
+
return false;
|
1065
1244
|
}
|
1066
1245
|
|
1067
1246
|
// Mark object pointed to by p.
|
@@ -1116,28 +1295,10 @@ class StaticMarkingVisitor : public StaticVisitorBase {
|
|
1116
1295
|
return true;
|
1117
1296
|
}
|
1118
1297
|
|
1119
|
-
static
|
1120
|
-
|
1121
|
-
|
1122
|
-
|
1123
|
-
private:
|
1124
|
-
class DataObjectVisitor {
|
1125
|
-
public:
|
1126
|
-
template<int size>
|
1127
|
-
static void VisitSpecialized(Map* map, HeapObject* object) {
|
1128
|
-
}
|
1129
|
-
|
1130
|
-
static void Visit(Map* map, HeapObject* object) {
|
1131
|
-
}
|
1132
|
-
};
|
1133
|
-
|
1134
|
-
typedef FlexibleBodyVisitor<StaticMarkingVisitor,
|
1135
|
-
JSObject::BodyDescriptor,
|
1136
|
-
void> JSObjectVisitor;
|
1137
|
-
|
1138
|
-
typedef FlexibleBodyVisitor<StaticMarkingVisitor,
|
1139
|
-
StructBodyDescriptor,
|
1140
|
-
void> StructObjectVisitor;
|
1298
|
+
INLINE(static void BeforeVisitingSharedFunctionInfo(HeapObject* object)) {
|
1299
|
+
SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
|
1300
|
+
shared->BeforeVisitingPointers();
|
1301
|
+
}
|
1141
1302
|
|
1142
1303
|
static void VisitJSWeakMap(Map* map, HeapObject* object) {
|
1143
1304
|
MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
|
@@ -1151,12 +1312,12 @@ class StaticMarkingVisitor : public StaticVisitorBase {
|
|
1151
1312
|
|
1152
1313
|
// Skip visiting the backing hash table containing the mappings.
|
1153
1314
|
int object_size = JSWeakMap::BodyDescriptor::SizeOf(map, object);
|
1154
|
-
BodyVisitorBase<
|
1315
|
+
BodyVisitorBase<MarkCompactMarkingVisitor>::IteratePointers(
|
1155
1316
|
map->GetHeap(),
|
1156
1317
|
object,
|
1157
1318
|
JSWeakMap::BodyDescriptor::kStartOffset,
|
1158
1319
|
JSWeakMap::kTableOffset);
|
1159
|
-
BodyVisitorBase<
|
1320
|
+
BodyVisitorBase<MarkCompactMarkingVisitor>::IteratePointers(
|
1160
1321
|
map->GetHeap(),
|
1161
1322
|
object,
|
1162
1323
|
JSWeakMap::kTableOffset + kPointerSize,
|
@@ -1176,136 +1337,14 @@ class StaticMarkingVisitor : public StaticVisitorBase {
|
|
1176
1337
|
ASSERT(MarkCompactCollector::IsMarked(table->map()));
|
1177
1338
|
}
|
1178
1339
|
|
1179
|
-
|
1180
|
-
|
1181
|
-
|
1182
|
-
if (FLAG_cleanup_code_caches_at_gc) {
|
1183
|
-
code->ClearTypeFeedbackCells(heap);
|
1184
|
-
}
|
1185
|
-
code->CodeIterateBody<StaticMarkingVisitor>(heap);
|
1186
|
-
}
|
1340
|
+
private:
|
1341
|
+
template<int id>
|
1342
|
+
static inline void TrackObjectStatsAndVisit(Map* map, HeapObject* obj);
|
1187
1343
|
|
1188
1344
|
// Code flushing support.
|
1189
1345
|
|
1190
|
-
// How many collections newly compiled code object will survive before being
|
1191
|
-
// flushed.
|
1192
|
-
static const int kCodeAgeThreshold = 5;
|
1193
|
-
|
1194
1346
|
static const int kRegExpCodeThreshold = 5;
|
1195
1347
|
|
1196
|
-
inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
|
1197
|
-
Object* undefined = heap->undefined_value();
|
1198
|
-
return (info->script() != undefined) &&
|
1199
|
-
(reinterpret_cast<Script*>(info->script())->source() != undefined);
|
1200
|
-
}
|
1201
|
-
|
1202
|
-
|
1203
|
-
inline static bool IsCompiled(JSFunction* function) {
|
1204
|
-
return function->code() !=
|
1205
|
-
function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
|
1206
|
-
}
|
1207
|
-
|
1208
|
-
inline static bool IsCompiled(SharedFunctionInfo* function) {
|
1209
|
-
return function->code() !=
|
1210
|
-
function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
|
1211
|
-
}
|
1212
|
-
|
1213
|
-
inline static bool IsFlushable(Heap* heap, JSFunction* function) {
|
1214
|
-
SharedFunctionInfo* shared_info = function->unchecked_shared();
|
1215
|
-
|
1216
|
-
// Code is either on stack, in compilation cache or referenced
|
1217
|
-
// by optimized version of function.
|
1218
|
-
MarkBit code_mark = Marking::MarkBitFrom(function->code());
|
1219
|
-
if (code_mark.Get()) {
|
1220
|
-
if (!Marking::MarkBitFrom(shared_info).Get()) {
|
1221
|
-
shared_info->set_code_age(0);
|
1222
|
-
}
|
1223
|
-
return false;
|
1224
|
-
}
|
1225
|
-
|
1226
|
-
// We do not flush code for optimized functions.
|
1227
|
-
if (function->code() != shared_info->code()) {
|
1228
|
-
return false;
|
1229
|
-
}
|
1230
|
-
|
1231
|
-
return IsFlushable(heap, shared_info);
|
1232
|
-
}
|
1233
|
-
|
1234
|
-
inline static bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info) {
|
1235
|
-
// Code is either on stack, in compilation cache or referenced
|
1236
|
-
// by optimized version of function.
|
1237
|
-
MarkBit code_mark =
|
1238
|
-
Marking::MarkBitFrom(shared_info->code());
|
1239
|
-
if (code_mark.Get()) {
|
1240
|
-
return false;
|
1241
|
-
}
|
1242
|
-
|
1243
|
-
// The function must be compiled and have the source code available,
|
1244
|
-
// to be able to recompile it in case we need the function again.
|
1245
|
-
if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
|
1246
|
-
return false;
|
1247
|
-
}
|
1248
|
-
|
1249
|
-
// We never flush code for Api functions.
|
1250
|
-
Object* function_data = shared_info->function_data();
|
1251
|
-
if (function_data->IsFunctionTemplateInfo()) {
|
1252
|
-
return false;
|
1253
|
-
}
|
1254
|
-
|
1255
|
-
// Only flush code for functions.
|
1256
|
-
if (shared_info->code()->kind() != Code::FUNCTION) {
|
1257
|
-
return false;
|
1258
|
-
}
|
1259
|
-
|
1260
|
-
// Function must be lazy compilable.
|
1261
|
-
if (!shared_info->allows_lazy_compilation()) {
|
1262
|
-
return false;
|
1263
|
-
}
|
1264
|
-
|
1265
|
-
// If this is a full script wrapped in a function we do no flush the code.
|
1266
|
-
if (shared_info->is_toplevel()) {
|
1267
|
-
return false;
|
1268
|
-
}
|
1269
|
-
|
1270
|
-
// Age this shared function info.
|
1271
|
-
if (shared_info->code_age() < kCodeAgeThreshold) {
|
1272
|
-
shared_info->set_code_age(shared_info->code_age() + 1);
|
1273
|
-
return false;
|
1274
|
-
}
|
1275
|
-
|
1276
|
-
return true;
|
1277
|
-
}
|
1278
|
-
|
1279
|
-
|
1280
|
-
static bool FlushCodeForFunction(Heap* heap, JSFunction* function) {
|
1281
|
-
if (!IsFlushable(heap, function)) return false;
|
1282
|
-
|
1283
|
-
// This function's code looks flushable. But we have to postpone the
|
1284
|
-
// decision until we see all functions that point to the same
|
1285
|
-
// SharedFunctionInfo because some of them might be optimized.
|
1286
|
-
// That would make the nonoptimized version of the code nonflushable,
|
1287
|
-
// because it is required for bailing out from optimized code.
|
1288
|
-
heap->mark_compact_collector()->code_flusher()->AddCandidate(function);
|
1289
|
-
return true;
|
1290
|
-
}
|
1291
|
-
|
1292
|
-
static inline bool IsValidNotBuiltinContext(Object* ctx) {
|
1293
|
-
return ctx->IsContext() &&
|
1294
|
-
!Context::cast(ctx)->global()->IsJSBuiltinsObject();
|
1295
|
-
}
|
1296
|
-
|
1297
|
-
|
1298
|
-
static void VisitSharedFunctionInfoGeneric(Map* map, HeapObject* object) {
|
1299
|
-
SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object);
|
1300
|
-
|
1301
|
-
if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap();
|
1302
|
-
|
1303
|
-
FixedBodyVisitor<StaticMarkingVisitor,
|
1304
|
-
SharedFunctionInfo::BodyDescriptor,
|
1305
|
-
void>::Visit(map, object);
|
1306
|
-
}
|
1307
|
-
|
1308
|
-
|
1309
1348
|
static void UpdateRegExpCodeAgeAndFlush(Heap* heap,
|
1310
1349
|
JSRegExp* re,
|
1311
1350
|
bool is_ascii) {
|
@@ -1368,7 +1407,7 @@ class StaticMarkingVisitor : public StaticVisitorBase {
|
|
1368
1407
|
Heap* heap = map->GetHeap();
|
1369
1408
|
MarkCompactCollector* collector = heap->mark_compact_collector();
|
1370
1409
|
if (!collector->is_code_flushing_enabled()) {
|
1371
|
-
|
1410
|
+
VisitJSRegExp(map, object);
|
1372
1411
|
return;
|
1373
1412
|
}
|
1374
1413
|
JSRegExp* re = reinterpret_cast<JSRegExp*>(object);
|
@@ -1376,183 +1415,161 @@ class StaticMarkingVisitor : public StaticVisitorBase {
|
|
1376
1415
|
UpdateRegExpCodeAgeAndFlush(heap, re, true);
|
1377
1416
|
UpdateRegExpCodeAgeAndFlush(heap, re, false);
|
1378
1417
|
// Visit the fields of the RegExp, including the updated FixedArray.
|
1379
|
-
|
1418
|
+
VisitJSRegExp(map, object);
|
1380
1419
|
}
|
1381
1420
|
|
1421
|
+
static VisitorDispatchTable<Callback> non_count_table_;
|
1422
|
+
};
|
1382
1423
|
|
1383
|
-
static void VisitSharedFunctionInfoAndFlushCode(Map* map,
|
1384
|
-
HeapObject* object) {
|
1385
|
-
Heap* heap = map->GetHeap();
|
1386
|
-
SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object);
|
1387
|
-
if (shared->ic_age() != heap->global_ic_age()) {
|
1388
|
-
shared->ResetForNewContext(heap->global_ic_age());
|
1389
|
-
}
|
1390
1424
|
|
1391
|
-
|
1392
|
-
|
1393
|
-
|
1394
|
-
|
1425
|
+
void MarkCompactMarkingVisitor::ObjectStatsCountFixedArray(
|
1426
|
+
FixedArrayBase* fixed_array,
|
1427
|
+
FixedArraySubInstanceType fast_type,
|
1428
|
+
FixedArraySubInstanceType dictionary_type) {
|
1429
|
+
Heap* heap = fixed_array->map()->GetHeap();
|
1430
|
+
if (fixed_array->map() != heap->fixed_cow_array_map() &&
|
1431
|
+
fixed_array->map() != heap->fixed_double_array_map() &&
|
1432
|
+
fixed_array != heap->empty_fixed_array()) {
|
1433
|
+
if (fixed_array->IsDictionary()) {
|
1434
|
+
heap->RecordObjectStats(FIXED_ARRAY_TYPE,
|
1435
|
+
dictionary_type,
|
1436
|
+
fixed_array->Size());
|
1437
|
+
} else {
|
1438
|
+
heap->RecordObjectStats(FIXED_ARRAY_TYPE,
|
1439
|
+
fast_type,
|
1440
|
+
fixed_array->Size());
|
1395
1441
|
}
|
1396
|
-
VisitSharedFunctionInfoAndFlushCodeGeneric(map, object, false);
|
1397
1442
|
}
|
1443
|
+
}
|
1398
1444
|
|
1399
1445
|
|
1400
|
-
|
1401
|
-
|
1402
|
-
|
1403
|
-
|
1404
|
-
|
1405
|
-
|
1406
|
-
|
1407
|
-
|
1408
|
-
|
1409
|
-
|
1410
|
-
|
1411
|
-
|
1412
|
-
|
1413
|
-
|
1414
|
-
VisitSharedFunctionInfoFields(heap, object, known_flush_code_candidate);
|
1446
|
+
void MarkCompactMarkingVisitor::ObjectStatsVisitBase(
|
1447
|
+
MarkCompactMarkingVisitor::VisitorId id, Map* map, HeapObject* obj) {
|
1448
|
+
Heap* heap = map->GetHeap();
|
1449
|
+
int object_size = obj->Size();
|
1450
|
+
heap->RecordObjectStats(map->instance_type(), -1, object_size);
|
1451
|
+
non_count_table_.GetVisitorById(id)(map, obj);
|
1452
|
+
if (obj->IsJSObject()) {
|
1453
|
+
JSObject* object = JSObject::cast(obj);
|
1454
|
+
ObjectStatsCountFixedArray(object->elements(),
|
1455
|
+
DICTIONARY_ELEMENTS_SUB_TYPE,
|
1456
|
+
FAST_ELEMENTS_SUB_TYPE);
|
1457
|
+
ObjectStatsCountFixedArray(object->properties(),
|
1458
|
+
DICTIONARY_PROPERTIES_SUB_TYPE,
|
1459
|
+
FAST_PROPERTIES_SUB_TYPE);
|
1415
1460
|
}
|
1461
|
+
}
|
1416
1462
|
|
1417
1463
|
|
1418
|
-
|
1419
|
-
|
1420
|
-
|
1421
|
-
|
1422
|
-
|
1423
|
-
RecordCodeEntrySlot(entry_address, code);
|
1424
|
-
}
|
1425
|
-
|
1426
|
-
static void VisitGlobalContext(Map* map, HeapObject* object) {
|
1427
|
-
FixedBodyVisitor<StaticMarkingVisitor,
|
1428
|
-
Context::MarkCompactBodyDescriptor,
|
1429
|
-
void>::Visit(map, object);
|
1464
|
+
template<MarkCompactMarkingVisitor::VisitorId id>
|
1465
|
+
void MarkCompactMarkingVisitor::ObjectStatsTracker<id>::Visit(
|
1466
|
+
Map* map, HeapObject* obj) {
|
1467
|
+
ObjectStatsVisitBase(id, map, obj);
|
1468
|
+
}
|
1430
1469
|
|
1431
|
-
MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
|
1432
|
-
for (int idx = Context::FIRST_WEAK_SLOT;
|
1433
|
-
idx < Context::GLOBAL_CONTEXT_SLOTS;
|
1434
|
-
++idx) {
|
1435
|
-
Object** slot =
|
1436
|
-
HeapObject::RawField(object, FixedArray::OffsetOfElementAt(idx));
|
1437
|
-
collector->RecordSlot(slot, slot, *slot);
|
1438
|
-
}
|
1439
|
-
}
|
1440
1470
|
|
1441
|
-
|
1471
|
+
template<>
|
1472
|
+
class MarkCompactMarkingVisitor::ObjectStatsTracker<
|
1473
|
+
MarkCompactMarkingVisitor::kVisitMap> {
|
1474
|
+
public:
|
1475
|
+
static inline void Visit(Map* map, HeapObject* obj) {
|
1442
1476
|
Heap* heap = map->GetHeap();
|
1443
|
-
|
1444
|
-
|
1445
|
-
|
1446
|
-
|
1447
|
-
|
1448
|
-
|
1449
|
-
|
1450
|
-
|
1451
|
-
|
1452
|
-
|
1453
|
-
|
1454
|
-
|
1455
|
-
|
1456
|
-
|
1457
|
-
|
1458
|
-
|
1459
|
-
|
1460
|
-
|
1461
|
-
|
1462
|
-
|
1463
|
-
|
1464
|
-
}
|
1465
|
-
|
1466
|
-
VisitJSFunctionFields(map,
|
1467
|
-
reinterpret_cast<JSFunction*>(object),
|
1468
|
-
flush_code_candidate);
|
1477
|
+
Map* map_obj = Map::cast(obj);
|
1478
|
+
ASSERT(map->instance_type() == MAP_TYPE);
|
1479
|
+
DescriptorArray* array = map_obj->instance_descriptors();
|
1480
|
+
if (map_obj->owns_descriptors() &&
|
1481
|
+
array != heap->empty_descriptor_array()) {
|
1482
|
+
int fixed_array_size = array->Size();
|
1483
|
+
heap->RecordObjectStats(FIXED_ARRAY_TYPE,
|
1484
|
+
DESCRIPTOR_ARRAY_SUB_TYPE,
|
1485
|
+
fixed_array_size);
|
1486
|
+
}
|
1487
|
+
if (map_obj->HasTransitionArray()) {
|
1488
|
+
int fixed_array_size = map_obj->transitions()->Size();
|
1489
|
+
heap->RecordObjectStats(FIXED_ARRAY_TYPE,
|
1490
|
+
TRANSITION_ARRAY_SUB_TYPE,
|
1491
|
+
fixed_array_size);
|
1492
|
+
}
|
1493
|
+
if (map_obj->code_cache() != heap->empty_fixed_array()) {
|
1494
|
+
heap->RecordObjectStats(
|
1495
|
+
FIXED_ARRAY_TYPE,
|
1496
|
+
MAP_CODE_CACHE_SUB_TYPE,
|
1497
|
+
FixedArray::cast(map_obj->code_cache())->Size());
|
1498
|
+
}
|
1499
|
+
ObjectStatsVisitBase(kVisitMap, map, obj);
|
1469
1500
|
}
|
1501
|
+
};
|
1470
1502
|
|
1471
1503
|
|
1472
|
-
|
1473
|
-
|
1474
|
-
|
1475
|
-
|
1504
|
+
template<>
|
1505
|
+
class MarkCompactMarkingVisitor::ObjectStatsTracker<
|
1506
|
+
MarkCompactMarkingVisitor::kVisitCode> {
|
1507
|
+
public:
|
1508
|
+
static inline void Visit(Map* map, HeapObject* obj) {
|
1509
|
+
Heap* heap = map->GetHeap();
|
1510
|
+
int object_size = obj->Size();
|
1511
|
+
ASSERT(map->instance_type() == CODE_TYPE);
|
1512
|
+
heap->RecordObjectStats(CODE_TYPE, Code::cast(obj)->kind(), object_size);
|
1513
|
+
ObjectStatsVisitBase(kVisitCode, map, obj);
|
1476
1514
|
}
|
1515
|
+
};
|
1477
1516
|
|
1478
1517
|
|
1479
|
-
|
1480
|
-
|
1481
|
-
|
1482
|
-
|
1483
|
-
static inline void
|
1484
|
-
JSFunction* object,
|
1485
|
-
bool flush_code_candidate) {
|
1518
|
+
template<>
|
1519
|
+
class MarkCompactMarkingVisitor::ObjectStatsTracker<
|
1520
|
+
MarkCompactMarkingVisitor::kVisitSharedFunctionInfo> {
|
1521
|
+
public:
|
1522
|
+
static inline void Visit(Map* map, HeapObject* obj) {
|
1486
1523
|
Heap* heap = map->GetHeap();
|
1487
|
-
|
1488
|
-
|
1489
|
-
|
1490
|
-
|
1491
|
-
|
1492
|
-
|
1493
|
-
VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
|
1494
|
-
} else {
|
1495
|
-
// Don't visit code object.
|
1496
|
-
|
1497
|
-
// Visit shared function info to avoid double checking of it's
|
1498
|
-
// flushability.
|
1499
|
-
SharedFunctionInfo* shared_info = object->unchecked_shared();
|
1500
|
-
MarkBit shared_info_mark = Marking::MarkBitFrom(shared_info);
|
1501
|
-
if (!shared_info_mark.Get()) {
|
1502
|
-
Map* shared_info_map = shared_info->map();
|
1503
|
-
MarkBit shared_info_map_mark =
|
1504
|
-
Marking::MarkBitFrom(shared_info_map);
|
1505
|
-
heap->mark_compact_collector()->SetMark(shared_info, shared_info_mark);
|
1506
|
-
heap->mark_compact_collector()->MarkObject(shared_info_map,
|
1507
|
-
shared_info_map_mark);
|
1508
|
-
VisitSharedFunctionInfoAndFlushCodeGeneric(shared_info_map,
|
1509
|
-
shared_info,
|
1510
|
-
true);
|
1511
|
-
}
|
1524
|
+
SharedFunctionInfo* sfi = SharedFunctionInfo::cast(obj);
|
1525
|
+
if (sfi->scope_info() != heap->empty_fixed_array()) {
|
1526
|
+
heap->RecordObjectStats(
|
1527
|
+
FIXED_ARRAY_TYPE,
|
1528
|
+
SCOPE_INFO_SUB_TYPE,
|
1529
|
+
FixedArray::cast(sfi->scope_info())->Size());
|
1512
1530
|
}
|
1513
|
-
|
1514
|
-
VisitPointers(
|
1515
|
-
heap,
|
1516
|
-
HeapObject::RawField(object,
|
1517
|
-
JSFunction::kCodeEntryOffset + kPointerSize),
|
1518
|
-
HeapObject::RawField(object,
|
1519
|
-
JSFunction::kNonWeakFieldsEndOffset));
|
1520
|
-
}
|
1521
|
-
|
1522
|
-
static inline void VisitJSRegExpFields(Map* map,
|
1523
|
-
HeapObject* object) {
|
1524
|
-
int last_property_offset =
|
1525
|
-
JSRegExp::kSize + kPointerSize * map->inobject_properties();
|
1526
|
-
VisitPointers(map->GetHeap(),
|
1527
|
-
SLOT_ADDR(object, JSRegExp::kPropertiesOffset),
|
1528
|
-
SLOT_ADDR(object, last_property_offset));
|
1531
|
+
ObjectStatsVisitBase(kVisitSharedFunctionInfo, map, obj);
|
1529
1532
|
}
|
1533
|
+
};
|
1530
1534
|
|
1531
1535
|
|
1532
|
-
|
1533
|
-
|
1534
|
-
|
1535
|
-
|
1536
|
-
|
1537
|
-
|
1538
|
-
|
1536
|
+
template<>
|
1537
|
+
class MarkCompactMarkingVisitor::ObjectStatsTracker<
|
1538
|
+
MarkCompactMarkingVisitor::kVisitFixedArray> {
|
1539
|
+
public:
|
1540
|
+
static inline void Visit(Map* map, HeapObject* obj) {
|
1541
|
+
Heap* heap = map->GetHeap();
|
1542
|
+
FixedArray* fixed_array = FixedArray::cast(obj);
|
1543
|
+
if (fixed_array == heap->symbol_table()) {
|
1544
|
+
heap->RecordObjectStats(
|
1545
|
+
FIXED_ARRAY_TYPE,
|
1546
|
+
SYMBOL_TABLE_SUB_TYPE,
|
1547
|
+
fixed_array->Size());
|
1539
1548
|
}
|
1540
|
-
|
1541
|
-
VisitPointers(heap,
|
1542
|
-
SLOT_ADDR(object, SharedFunctionInfo::kScopeInfoOffset),
|
1543
|
-
SLOT_ADDR(object, SharedFunctionInfo::kSize));
|
1549
|
+
ObjectStatsVisitBase(kVisitFixedArray, map, obj);
|
1544
1550
|
}
|
1551
|
+
};
|
1545
1552
|
|
1546
|
-
#undef SLOT_ADDR
|
1547
1553
|
|
1548
|
-
|
1554
|
+
void MarkCompactMarkingVisitor::Initialize() {
|
1555
|
+
StaticMarkingVisitor<MarkCompactMarkingVisitor>::Initialize();
|
1549
1556
|
|
1550
|
-
|
1551
|
-
|
1557
|
+
table_.Register(kVisitJSRegExp,
|
1558
|
+
&VisitRegExpAndFlushCode);
|
1552
1559
|
|
1560
|
+
if (FLAG_track_gc_object_stats) {
|
1561
|
+
// Copy the visitor table to make call-through possible.
|
1562
|
+
non_count_table_.CopyFrom(&table_);
|
1563
|
+
#define VISITOR_ID_COUNT_FUNCTION(id) \
|
1564
|
+
table_.Register(kVisit##id, ObjectStatsTracker<kVisit##id>::Visit);
|
1565
|
+
VISITOR_ID_LIST(VISITOR_ID_COUNT_FUNCTION)
|
1566
|
+
#undef VISITOR_ID_COUNT_FUNCTION
|
1567
|
+
}
|
1568
|
+
}
|
1553
1569
|
|
1554
|
-
|
1555
|
-
|
1570
|
+
|
1571
|
+
VisitorDispatchTable<MarkCompactMarkingVisitor::Callback>
|
1572
|
+
MarkCompactMarkingVisitor::non_count_table_;
|
1556
1573
|
|
1557
1574
|
|
1558
1575
|
class MarkingVisitor : public ObjectVisitor {
|
@@ -1560,11 +1577,11 @@ class MarkingVisitor : public ObjectVisitor {
|
|
1560
1577
|
explicit MarkingVisitor(Heap* heap) : heap_(heap) { }
|
1561
1578
|
|
1562
1579
|
void VisitPointer(Object** p) {
|
1563
|
-
|
1580
|
+
MarkCompactMarkingVisitor::VisitPointer(heap_, p);
|
1564
1581
|
}
|
1565
1582
|
|
1566
1583
|
void VisitPointers(Object** start, Object** end) {
|
1567
|
-
|
1584
|
+
MarkCompactMarkingVisitor::VisitPointers(heap_, start, end);
|
1568
1585
|
}
|
1569
1586
|
|
1570
1587
|
private:
|
@@ -1611,26 +1628,6 @@ class SharedFunctionInfoMarkingVisitor : public ObjectVisitor {
|
|
1611
1628
|
};
|
1612
1629
|
|
1613
1630
|
|
1614
|
-
void MarkCompactCollector::MarkInlinedFunctionsCode(Code* code) {
|
1615
|
-
// For optimized functions we should retain both non-optimized version
|
1616
|
-
// of it's code and non-optimized version of all inlined functions.
|
1617
|
-
// This is required to support bailing out from inlined code.
|
1618
|
-
DeoptimizationInputData* data =
|
1619
|
-
DeoptimizationInputData::cast(code->deoptimization_data());
|
1620
|
-
|
1621
|
-
FixedArray* literals = data->LiteralArray();
|
1622
|
-
|
1623
|
-
for (int i = 0, count = data->InlinedFunctionCount()->value();
|
1624
|
-
i < count;
|
1625
|
-
i++) {
|
1626
|
-
JSFunction* inlined = JSFunction::cast(literals->get(i));
|
1627
|
-
Code* inlined_code = inlined->shared()->code();
|
1628
|
-
MarkBit inlined_code_mark = Marking::MarkBitFrom(inlined_code);
|
1629
|
-
MarkObject(inlined_code, inlined_code_mark);
|
1630
|
-
}
|
1631
|
-
}
|
1632
|
-
|
1633
|
-
|
1634
1631
|
void MarkCompactCollector::PrepareThreadForCodeFlushing(Isolate* isolate,
|
1635
1632
|
ThreadLocalTop* top) {
|
1636
1633
|
for (StackFrameIterator it(isolate, top); !it.done(); it.Advance()) {
|
@@ -1643,7 +1640,8 @@ void MarkCompactCollector::PrepareThreadForCodeFlushing(Isolate* isolate,
|
|
1643
1640
|
MarkBit code_mark = Marking::MarkBitFrom(code);
|
1644
1641
|
MarkObject(code, code_mark);
|
1645
1642
|
if (frame->is_optimized()) {
|
1646
|
-
MarkInlinedFunctionsCode(
|
1643
|
+
MarkCompactMarkingVisitor::MarkInlinedFunctionsCode(heap(),
|
1644
|
+
frame->LookupCode());
|
1647
1645
|
}
|
1648
1646
|
}
|
1649
1647
|
}
|
@@ -1652,21 +1650,13 @@ void MarkCompactCollector::PrepareThreadForCodeFlushing(Isolate* isolate,
|
|
1652
1650
|
void MarkCompactCollector::PrepareForCodeFlushing() {
|
1653
1651
|
ASSERT(heap() == Isolate::Current()->heap());
|
1654
1652
|
|
1655
|
-
//
|
1656
|
-
if (
|
1657
|
-
EnableCodeFlushing(
|
1658
|
-
return;
|
1653
|
+
// Enable code flushing for non-incremental cycles.
|
1654
|
+
if (FLAG_flush_code && !FLAG_flush_code_incrementally) {
|
1655
|
+
EnableCodeFlushing(!was_marked_incrementally_);
|
1659
1656
|
}
|
1660
1657
|
|
1661
|
-
|
1662
|
-
if (
|
1663
|
-
heap()->isolate()->debug()->has_break_points()) {
|
1664
|
-
EnableCodeFlushing(false);
|
1665
|
-
return;
|
1666
|
-
}
|
1667
|
-
#endif
|
1668
|
-
|
1669
|
-
EnableCodeFlushing(true);
|
1658
|
+
// If code flushing is disabled, there is no need to prepare for it.
|
1659
|
+
if (!is_code_flushing_enabled()) return;
|
1670
1660
|
|
1671
1661
|
// Ensure that empty descriptor array is marked. Method MarkDescriptorArray
|
1672
1662
|
// relies on it being marked before any other descriptor array.
|
@@ -1723,7 +1713,7 @@ class RootMarkingVisitor : public ObjectVisitor {
|
|
1723
1713
|
// Mark the map pointer and body, and push them on the marking stack.
|
1724
1714
|
MarkBit map_mark = Marking::MarkBitFrom(map);
|
1725
1715
|
collector_->MarkObject(map, map_mark);
|
1726
|
-
|
1716
|
+
MarkCompactMarkingVisitor::IterateBody(map, object);
|
1727
1717
|
|
1728
1718
|
// Mark all the objects reachable from the map and body. May leave
|
1729
1719
|
// overflowed objects in the heap.
|
@@ -1785,150 +1775,6 @@ class MarkCompactWeakObjectRetainer : public WeakObjectRetainer {
|
|
1785
1775
|
};
|
1786
1776
|
|
1787
1777
|
|
1788
|
-
void MarkCompactCollector::ProcessNewlyMarkedObject(HeapObject* object) {
|
1789
|
-
ASSERT(IsMarked(object));
|
1790
|
-
ASSERT(HEAP->Contains(object));
|
1791
|
-
if (object->IsMap()) {
|
1792
|
-
Map* map = Map::cast(object);
|
1793
|
-
heap_->ClearCacheOnMap(map);
|
1794
|
-
|
1795
|
-
// When map collection is enabled we have to mark through map's transitions
|
1796
|
-
// in a special way to make transition links weak. Only maps for subclasses
|
1797
|
-
// of JSReceiver can have transitions.
|
1798
|
-
STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
|
1799
|
-
if (FLAG_collect_maps && map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
|
1800
|
-
marker_.MarkMapContents(map);
|
1801
|
-
} else {
|
1802
|
-
marking_deque_.PushBlack(map);
|
1803
|
-
}
|
1804
|
-
} else {
|
1805
|
-
marking_deque_.PushBlack(object);
|
1806
|
-
}
|
1807
|
-
}
|
1808
|
-
|
1809
|
-
|
1810
|
-
// Force instantiation of template instances.
|
1811
|
-
template void Marker<IncrementalMarking>::MarkMapContents(Map* map);
|
1812
|
-
template void Marker<MarkCompactCollector>::MarkMapContents(Map* map);
|
1813
|
-
|
1814
|
-
|
1815
|
-
template <class T>
|
1816
|
-
void Marker<T>::MarkMapContents(Map* map) {
|
1817
|
-
// Mark prototype transitions array but don't push it into marking stack.
|
1818
|
-
// This will make references from it weak. We will clean dead prototype
|
1819
|
-
// transitions in ClearNonLiveTransitions.
|
1820
|
-
Object** proto_trans_slot =
|
1821
|
-
HeapObject::RawField(map, Map::kPrototypeTransitionsOrBackPointerOffset);
|
1822
|
-
HeapObject* prototype_transitions = HeapObject::cast(*proto_trans_slot);
|
1823
|
-
if (prototype_transitions->IsFixedArray()) {
|
1824
|
-
mark_compact_collector()->RecordSlot(proto_trans_slot,
|
1825
|
-
proto_trans_slot,
|
1826
|
-
prototype_transitions);
|
1827
|
-
MarkBit mark = Marking::MarkBitFrom(prototype_transitions);
|
1828
|
-
if (!mark.Get()) {
|
1829
|
-
mark.Set();
|
1830
|
-
MemoryChunk::IncrementLiveBytesFromGC(prototype_transitions->address(),
|
1831
|
-
prototype_transitions->Size());
|
1832
|
-
}
|
1833
|
-
}
|
1834
|
-
|
1835
|
-
// Make sure that the back pointer stored either in the map itself or inside
|
1836
|
-
// its prototype transitions array is marked. Treat pointers in the descriptor
|
1837
|
-
// array as weak and also mark that array to prevent visiting it later.
|
1838
|
-
base_marker()->MarkObjectAndPush(HeapObject::cast(map->GetBackPointer()));
|
1839
|
-
|
1840
|
-
Object** descriptor_array_slot =
|
1841
|
-
HeapObject::RawField(map, Map::kInstanceDescriptorsOrBitField3Offset);
|
1842
|
-
Object* descriptor_array = *descriptor_array_slot;
|
1843
|
-
if (!descriptor_array->IsSmi()) {
|
1844
|
-
MarkDescriptorArray(reinterpret_cast<DescriptorArray*>(descriptor_array));
|
1845
|
-
}
|
1846
|
-
|
1847
|
-
// Mark the Object* fields of the Map. Since the descriptor array has been
|
1848
|
-
// marked already, it is fine that one of these fields contains a pointer
|
1849
|
-
// to it. But make sure to skip back pointer and prototype transitions.
|
1850
|
-
STATIC_ASSERT(Map::kPointerFieldsEndOffset ==
|
1851
|
-
Map::kPrototypeTransitionsOrBackPointerOffset + kPointerSize);
|
1852
|
-
Object** start_slot = HeapObject::RawField(
|
1853
|
-
map, Map::kPointerFieldsBeginOffset);
|
1854
|
-
Object** end_slot = HeapObject::RawField(
|
1855
|
-
map, Map::kPrototypeTransitionsOrBackPointerOffset);
|
1856
|
-
for (Object** slot = start_slot; slot < end_slot; slot++) {
|
1857
|
-
Object* obj = *slot;
|
1858
|
-
if (!obj->NonFailureIsHeapObject()) continue;
|
1859
|
-
mark_compact_collector()->RecordSlot(start_slot, slot, obj);
|
1860
|
-
base_marker()->MarkObjectAndPush(reinterpret_cast<HeapObject*>(obj));
|
1861
|
-
}
|
1862
|
-
}
|
1863
|
-
|
1864
|
-
|
1865
|
-
template <class T>
|
1866
|
-
void Marker<T>::MarkDescriptorArray(DescriptorArray* descriptors) {
|
1867
|
-
// Empty descriptor array is marked as a root before any maps are marked.
|
1868
|
-
ASSERT(descriptors != descriptors->GetHeap()->empty_descriptor_array());
|
1869
|
-
|
1870
|
-
// The DescriptorArray contains a pointer to its contents array, but the
|
1871
|
-
// contents array will be marked black and hence not be visited again.
|
1872
|
-
if (!base_marker()->MarkObjectAndPush(descriptors)) return;
|
1873
|
-
FixedArray* contents = FixedArray::cast(
|
1874
|
-
descriptors->get(DescriptorArray::kContentArrayIndex));
|
1875
|
-
ASSERT(Marking::IsWhite(Marking::MarkBitFrom(contents)));
|
1876
|
-
base_marker()->MarkObjectWithoutPush(contents);
|
1877
|
-
|
1878
|
-
// If the descriptor contains a transition (value is a Map), we don't mark the
|
1879
|
-
// value as live. It might be set to the NULL_DESCRIPTOR in
|
1880
|
-
// ClearNonLiveTransitions later.
|
1881
|
-
for (int i = 0; i < descriptors->number_of_descriptors(); ++i) {
|
1882
|
-
PropertyDetails details(descriptors->GetDetails(i));
|
1883
|
-
Object** slot = descriptors->GetValueSlot(i);
|
1884
|
-
|
1885
|
-
if (!(*slot)->IsHeapObject()) continue;
|
1886
|
-
HeapObject* value = HeapObject::cast(*slot);
|
1887
|
-
|
1888
|
-
mark_compact_collector()->RecordSlot(slot, slot, *slot);
|
1889
|
-
|
1890
|
-
switch (details.type()) {
|
1891
|
-
case NORMAL:
|
1892
|
-
case FIELD:
|
1893
|
-
case CONSTANT_FUNCTION:
|
1894
|
-
case HANDLER:
|
1895
|
-
case INTERCEPTOR:
|
1896
|
-
base_marker()->MarkObjectAndPush(value);
|
1897
|
-
break;
|
1898
|
-
case CALLBACKS:
|
1899
|
-
if (!value->IsAccessorPair()) {
|
1900
|
-
base_marker()->MarkObjectAndPush(value);
|
1901
|
-
} else if (base_marker()->MarkObjectWithoutPush(value)) {
|
1902
|
-
AccessorPair* accessors = AccessorPair::cast(value);
|
1903
|
-
MarkAccessorPairSlot(accessors, AccessorPair::kGetterOffset);
|
1904
|
-
MarkAccessorPairSlot(accessors, AccessorPair::kSetterOffset);
|
1905
|
-
}
|
1906
|
-
break;
|
1907
|
-
case ELEMENTS_TRANSITION:
|
1908
|
-
// For maps with multiple elements transitions, the transition maps are
|
1909
|
-
// stored in a FixedArray. Keep the fixed array alive but not the maps
|
1910
|
-
// that it refers to.
|
1911
|
-
if (value->IsFixedArray()) base_marker()->MarkObjectWithoutPush(value);
|
1912
|
-
break;
|
1913
|
-
case MAP_TRANSITION:
|
1914
|
-
case CONSTANT_TRANSITION:
|
1915
|
-
case NULL_DESCRIPTOR:
|
1916
|
-
break;
|
1917
|
-
}
|
1918
|
-
}
|
1919
|
-
}
|
1920
|
-
|
1921
|
-
|
1922
|
-
template <class T>
|
1923
|
-
void Marker<T>::MarkAccessorPairSlot(AccessorPair* accessors, int offset) {
|
1924
|
-
Object** slot = HeapObject::RawField(accessors, offset);
|
1925
|
-
HeapObject* accessor = HeapObject::cast(*slot);
|
1926
|
-
if (accessor->IsMap()) return;
|
1927
|
-
mark_compact_collector()->RecordSlot(slot, slot, accessor);
|
1928
|
-
base_marker()->MarkObjectAndPush(accessor);
|
1929
|
-
}
|
1930
|
-
|
1931
|
-
|
1932
1778
|
// Fill the marking stack with overflowed objects returned by the given
|
1933
1779
|
// iterator. Stop when the marking stack is filled or the end of the space
|
1934
1780
|
// is reached, whichever comes first.
|
@@ -1981,10 +1827,10 @@ static void DiscoverGreyObjectsOnPage(MarkingDeque* marking_deque, Page* p) {
|
|
1981
1827
|
for (;
|
1982
1828
|
cell_index < last_cell_index;
|
1983
1829
|
cell_index++, cell_base += 32 * kPointerSize) {
|
1984
|
-
ASSERT((
|
1985
|
-
|
1986
|
-
|
1987
|
-
|
1830
|
+
ASSERT(static_cast<unsigned>(cell_index) ==
|
1831
|
+
Bitmap::IndexToCell(
|
1832
|
+
Bitmap::CellAlignIndex(
|
1833
|
+
p->AddressToMarkbitIndex(cell_base))));
|
1988
1834
|
|
1989
1835
|
const MarkBit::CellType current_cell = cells[cell_index];
|
1990
1836
|
if (current_cell == 0) continue;
|
@@ -2041,6 +1887,16 @@ bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) {
|
|
2041
1887
|
}
|
2042
1888
|
|
2043
1889
|
|
1890
|
+
bool MarkCompactCollector::IsUnmarkedHeapObjectWithHeap(Heap* heap,
|
1891
|
+
Object** p) {
|
1892
|
+
Object* o = *p;
|
1893
|
+
ASSERT(o->IsHeapObject());
|
1894
|
+
HeapObject* heap_object = HeapObject::cast(o);
|
1895
|
+
MarkBit mark = Marking::MarkBitFrom(heap_object);
|
1896
|
+
return !mark.Get();
|
1897
|
+
}
|
1898
|
+
|
1899
|
+
|
2044
1900
|
void MarkCompactCollector::MarkSymbolTable() {
|
2045
1901
|
SymbolTable* symbol_table = heap()->symbol_table();
|
2046
1902
|
// Mark the symbol table itself.
|
@@ -2069,54 +1925,6 @@ void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
|
|
2069
1925
|
}
|
2070
1926
|
|
2071
1927
|
|
2072
|
-
void MarkCompactCollector::MarkObjectGroups() {
|
2073
|
-
List<ObjectGroup*>* object_groups =
|
2074
|
-
heap()->isolate()->global_handles()->object_groups();
|
2075
|
-
|
2076
|
-
int last = 0;
|
2077
|
-
for (int i = 0; i < object_groups->length(); i++) {
|
2078
|
-
ObjectGroup* entry = object_groups->at(i);
|
2079
|
-
ASSERT(entry != NULL);
|
2080
|
-
|
2081
|
-
Object*** objects = entry->objects_;
|
2082
|
-
bool group_marked = false;
|
2083
|
-
for (size_t j = 0; j < entry->length_; j++) {
|
2084
|
-
Object* object = *objects[j];
|
2085
|
-
if (object->IsHeapObject()) {
|
2086
|
-
HeapObject* heap_object = HeapObject::cast(object);
|
2087
|
-
MarkBit mark = Marking::MarkBitFrom(heap_object);
|
2088
|
-
if (mark.Get()) {
|
2089
|
-
group_marked = true;
|
2090
|
-
break;
|
2091
|
-
}
|
2092
|
-
}
|
2093
|
-
}
|
2094
|
-
|
2095
|
-
if (!group_marked) {
|
2096
|
-
(*object_groups)[last++] = entry;
|
2097
|
-
continue;
|
2098
|
-
}
|
2099
|
-
|
2100
|
-
// An object in the group is marked, so mark as grey all white heap
|
2101
|
-
// objects in the group.
|
2102
|
-
for (size_t j = 0; j < entry->length_; ++j) {
|
2103
|
-
Object* object = *objects[j];
|
2104
|
-
if (object->IsHeapObject()) {
|
2105
|
-
HeapObject* heap_object = HeapObject::cast(object);
|
2106
|
-
MarkBit mark = Marking::MarkBitFrom(heap_object);
|
2107
|
-
MarkObject(heap_object, mark);
|
2108
|
-
}
|
2109
|
-
}
|
2110
|
-
|
2111
|
-
// Once the entire group has been colored grey, set the object group
|
2112
|
-
// to NULL so it won't be processed again.
|
2113
|
-
entry->Dispose();
|
2114
|
-
object_groups->at(i) = NULL;
|
2115
|
-
}
|
2116
|
-
object_groups->Rewind(last);
|
2117
|
-
}
|
2118
|
-
|
2119
|
-
|
2120
1928
|
void MarkCompactCollector::MarkImplicitRefGroups() {
|
2121
1929
|
List<ImplicitRefGroup*>* ref_groups =
|
2122
1930
|
heap()->isolate()->global_handles()->implicit_ref_groups();
|
@@ -2165,7 +1973,7 @@ void MarkCompactCollector::EmptyMarkingDeque() {
|
|
2165
1973
|
MarkBit map_mark = Marking::MarkBitFrom(map);
|
2166
1974
|
MarkObject(map, map_mark);
|
2167
1975
|
|
2168
|
-
|
1976
|
+
MarkCompactMarkingVisitor::IterateBody(map, object);
|
2169
1977
|
}
|
2170
1978
|
|
2171
1979
|
// Process encountered weak maps, mark objects only reachable by those
|
@@ -2235,11 +2043,12 @@ void MarkCompactCollector::ProcessMarkingDeque() {
|
|
2235
2043
|
}
|
2236
2044
|
|
2237
2045
|
|
2238
|
-
void MarkCompactCollector::ProcessExternalMarking() {
|
2046
|
+
void MarkCompactCollector::ProcessExternalMarking(RootMarkingVisitor* visitor) {
|
2239
2047
|
bool work_to_do = true;
|
2240
2048
|
ASSERT(marking_deque_.IsEmpty());
|
2241
2049
|
while (work_to_do) {
|
2242
|
-
|
2050
|
+
heap()->isolate()->global_handles()->IterateObjectGroups(
|
2051
|
+
visitor, &IsUnmarkedHeapObjectWithHeap);
|
2243
2052
|
MarkImplicitRefGroups();
|
2244
2053
|
work_to_do = !marking_deque_.IsEmpty();
|
2245
2054
|
ProcessMarkingDeque();
|
@@ -2262,7 +2071,7 @@ void MarkCompactCollector::MarkLiveObjects() {
|
|
2262
2071
|
// non-incremental marker can deal with them as if overflow
|
2263
2072
|
// occured during normal marking.
|
2264
2073
|
// But incremental marker uses a separate marking deque
|
2265
|
-
// so we have to explicitly copy
|
2074
|
+
// so we have to explicitly copy its overflow state.
|
2266
2075
|
incremental_marking->Finalize();
|
2267
2076
|
incremental_marking_overflowed =
|
2268
2077
|
incremental_marking->marking_deque()->overflowed();
|
@@ -2304,7 +2113,7 @@ void MarkCompactCollector::MarkLiveObjects() {
|
|
2304
2113
|
ASSERT(cell->IsJSGlobalPropertyCell());
|
2305
2114
|
if (IsMarked(cell)) {
|
2306
2115
|
int offset = JSGlobalPropertyCell::kValueOffset;
|
2307
|
-
|
2116
|
+
MarkCompactMarkingVisitor::VisitPointer(
|
2308
2117
|
heap(),
|
2309
2118
|
reinterpret_cast<Object**>(cell->address() + offset));
|
2310
2119
|
}
|
@@ -2318,7 +2127,7 @@ void MarkCompactCollector::MarkLiveObjects() {
|
|
2318
2127
|
// The objects reachable from the roots are marked, yet unreachable
|
2319
2128
|
// objects are unmarked. Mark objects reachable due to host
|
2320
2129
|
// application specific logic.
|
2321
|
-
ProcessExternalMarking();
|
2130
|
+
ProcessExternalMarking(&root_visitor);
|
2322
2131
|
|
2323
2132
|
// The objects reachable from the roots or object groups are marked,
|
2324
2133
|
// yet unreachable objects are unmarked. Mark objects reachable
|
@@ -2337,7 +2146,7 @@ void MarkCompactCollector::MarkLiveObjects() {
|
|
2337
2146
|
|
2338
2147
|
// Repeat host application specific marking to mark unmarked objects
|
2339
2148
|
// reachable from the weak roots.
|
2340
|
-
ProcessExternalMarking();
|
2149
|
+
ProcessExternalMarking(&root_visitor);
|
2341
2150
|
|
2342
2151
|
AfterMarking();
|
2343
2152
|
}
|
@@ -2359,6 +2168,7 @@ void MarkCompactCollector::AfterMarking() {
|
|
2359
2168
|
symbol_table->ElementsRemoved(v.PointersRemoved());
|
2360
2169
|
heap()->external_string_table_.Iterate(&v);
|
2361
2170
|
heap()->external_string_table_.CleanUp();
|
2171
|
+
heap()->error_object_list_.RemoveUnmarked(heap());
|
2362
2172
|
|
2363
2173
|
// Process the weak references.
|
2364
2174
|
MarkCompactWeakObjectRetainer mark_compact_object_retainer;
|
@@ -2371,17 +2181,26 @@ void MarkCompactCollector::AfterMarking() {
|
|
2371
2181
|
// Flush code from collected candidates.
|
2372
2182
|
if (is_code_flushing_enabled()) {
|
2373
2183
|
code_flusher_->ProcessCandidates();
|
2184
|
+
// If incremental marker does not support code flushing, we need to
|
2185
|
+
// disable it before incremental marking steps for next cycle.
|
2186
|
+
if (FLAG_flush_code && !FLAG_flush_code_incrementally) {
|
2187
|
+
EnableCodeFlushing(false);
|
2188
|
+
}
|
2374
2189
|
}
|
2375
2190
|
|
2376
2191
|
if (!FLAG_watch_ic_patching) {
|
2377
2192
|
// Clean up dead objects from the runtime profiler.
|
2378
2193
|
heap()->isolate()->runtime_profiler()->RemoveDeadSamples();
|
2379
2194
|
}
|
2195
|
+
|
2196
|
+
if (FLAG_track_gc_object_stats) {
|
2197
|
+
heap()->CheckpointObjectStats();
|
2198
|
+
}
|
2380
2199
|
}
|
2381
2200
|
|
2382
2201
|
|
2383
2202
|
void MarkCompactCollector::ProcessMapCaches() {
|
2384
|
-
Object* raw_context = heap()->
|
2203
|
+
Object* raw_context = heap()->native_contexts_list_;
|
2385
2204
|
while (raw_context != heap()->undefined_value()) {
|
2386
2205
|
Context* context = reinterpret_cast<Context*>(raw_context);
|
2387
2206
|
if (IsMarked(context)) {
|
@@ -2449,7 +2268,7 @@ void MarkCompactCollector::ReattachInitialMaps() {
|
|
2449
2268
|
}
|
2450
2269
|
|
2451
2270
|
|
2452
|
-
void MarkCompactCollector::
|
2271
|
+
void MarkCompactCollector::ClearNonLiveReferences() {
|
2453
2272
|
HeapObjectIterator map_iterator(heap()->map_space());
|
2454
2273
|
// Iterate over the map space, setting map transitions that go from
|
2455
2274
|
// a marked map to an unmarked map to null transitions. This action
|
@@ -2461,9 +2280,7 @@ void MarkCompactCollector::ClearNonLiveTransitions() {
|
|
2461
2280
|
if (map->IsFreeSpace()) continue;
|
2462
2281
|
|
2463
2282
|
ASSERT(map->IsMap());
|
2464
|
-
|
2465
|
-
STATIC_ASSERT(LAST_TYPE == LAST_JS_OBJECT_TYPE);
|
2466
|
-
if (map->instance_type() < FIRST_JS_OBJECT_TYPE) continue;
|
2283
|
+
if (!map->CanTransition()) continue;
|
2467
2284
|
|
2468
2285
|
if (map_mark.Get() &&
|
2469
2286
|
map->attached_to_shared_function_info()) {
|
@@ -2475,13 +2292,19 @@ void MarkCompactCollector::ClearNonLiveTransitions() {
|
|
2475
2292
|
|
2476
2293
|
ClearNonLivePrototypeTransitions(map);
|
2477
2294
|
ClearNonLiveMapTransitions(map, map_mark);
|
2295
|
+
|
2296
|
+
if (map_mark.Get()) {
|
2297
|
+
ClearNonLiveDependentCodes(map);
|
2298
|
+
} else {
|
2299
|
+
ClearAndDeoptimizeDependentCodes(map);
|
2300
|
+
}
|
2478
2301
|
}
|
2479
2302
|
}
|
2480
2303
|
|
2481
2304
|
|
2482
2305
|
void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) {
|
2483
2306
|
int number_of_transitions = map->NumberOfProtoTransitions();
|
2484
|
-
FixedArray* prototype_transitions = map->
|
2307
|
+
FixedArray* prototype_transitions = map->GetPrototypeTransitions();
|
2485
2308
|
|
2486
2309
|
int new_number_of_transitions = 0;
|
2487
2310
|
const int header = Map::kProtoTransitionHeaderSize;
|
@@ -2543,6 +2366,46 @@ void MarkCompactCollector::ClearNonLiveMapTransitions(Map* map,
|
|
2543
2366
|
}
|
2544
2367
|
|
2545
2368
|
|
2369
|
+
void MarkCompactCollector::ClearAndDeoptimizeDependentCodes(Map* map) {
|
2370
|
+
AssertNoAllocation no_allocation_scope;
|
2371
|
+
DependentCodes* codes = map->dependent_codes();
|
2372
|
+
int number_of_codes = codes->number_of_codes();
|
2373
|
+
if (number_of_codes == 0) return;
|
2374
|
+
for (int i = 0; i < number_of_codes; i++) {
|
2375
|
+
Code* code = codes->code_at(i);
|
2376
|
+
if (IsMarked(code) && !code->marked_for_deoptimization()) {
|
2377
|
+
code->set_marked_for_deoptimization(true);
|
2378
|
+
}
|
2379
|
+
codes->clear_code_at(i);
|
2380
|
+
}
|
2381
|
+
map->set_dependent_codes(DependentCodes::cast(heap()->empty_fixed_array()));
|
2382
|
+
}
|
2383
|
+
|
2384
|
+
|
2385
|
+
void MarkCompactCollector::ClearNonLiveDependentCodes(Map* map) {
|
2386
|
+
AssertNoAllocation no_allocation_scope;
|
2387
|
+
DependentCodes* codes = map->dependent_codes();
|
2388
|
+
int number_of_codes = codes->number_of_codes();
|
2389
|
+
if (number_of_codes == 0) return;
|
2390
|
+
int new_number_of_codes = 0;
|
2391
|
+
for (int i = 0; i < number_of_codes; i++) {
|
2392
|
+
Code* code = codes->code_at(i);
|
2393
|
+
if (IsMarked(code) && !code->marked_for_deoptimization()) {
|
2394
|
+
if (new_number_of_codes != i) {
|
2395
|
+
codes->set_code_at(new_number_of_codes, code);
|
2396
|
+
}
|
2397
|
+
Object** slot = codes->code_slot_at(new_number_of_codes);
|
2398
|
+
RecordSlot(slot, slot, code);
|
2399
|
+
new_number_of_codes++;
|
2400
|
+
}
|
2401
|
+
}
|
2402
|
+
for (int i = new_number_of_codes; i < number_of_codes; i++) {
|
2403
|
+
codes->clear_code_at(i);
|
2404
|
+
}
|
2405
|
+
codes->set_number_of_codes(new_number_of_codes);
|
2406
|
+
}
|
2407
|
+
|
2408
|
+
|
2546
2409
|
void MarkCompactCollector::ProcessWeakMaps() {
|
2547
2410
|
Object* weak_map_obj = encountered_weak_maps();
|
2548
2411
|
while (weak_map_obj != Smi::FromInt(0)) {
|
@@ -2559,7 +2422,8 @@ void MarkCompactCollector::ProcessWeakMaps() {
|
|
2559
2422
|
Object** value_slot =
|
2560
2423
|
HeapObject::RawField(table, FixedArray::OffsetOfElementAt(
|
2561
2424
|
ObjectHashTable::EntryToValueIndex(i)));
|
2562
|
-
|
2425
|
+
MarkCompactMarkingVisitor::MarkObjectByPointer(
|
2426
|
+
this, anchor, value_slot);
|
2563
2427
|
}
|
2564
2428
|
}
|
2565
2429
|
weak_map_obj = weak_map->next();
|
@@ -2673,15 +2537,33 @@ class PointersUpdatingVisitor: public ObjectVisitor {
|
|
2673
2537
|
void VisitEmbeddedPointer(RelocInfo* rinfo) {
|
2674
2538
|
ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
|
2675
2539
|
Object* target = rinfo->target_object();
|
2540
|
+
Object* old_target = target;
|
2676
2541
|
VisitPointer(&target);
|
2677
|
-
|
2542
|
+
// Avoid unnecessary changes that might unnecessary flush the instruction
|
2543
|
+
// cache.
|
2544
|
+
if (target != old_target) {
|
2545
|
+
rinfo->set_target_object(target);
|
2546
|
+
}
|
2678
2547
|
}
|
2679
2548
|
|
2680
2549
|
void VisitCodeTarget(RelocInfo* rinfo) {
|
2681
2550
|
ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
|
2682
2551
|
Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
|
2552
|
+
Object* old_target = target;
|
2683
2553
|
VisitPointer(&target);
|
2684
|
-
|
2554
|
+
if (target != old_target) {
|
2555
|
+
rinfo->set_target_address(Code::cast(target)->instruction_start());
|
2556
|
+
}
|
2557
|
+
}
|
2558
|
+
|
2559
|
+
void VisitCodeAgeSequence(RelocInfo* rinfo) {
|
2560
|
+
ASSERT(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
|
2561
|
+
Object* stub = rinfo->code_age_stub();
|
2562
|
+
ASSERT(stub != NULL);
|
2563
|
+
VisitPointer(&stub);
|
2564
|
+
if (stub != rinfo->code_age_stub()) {
|
2565
|
+
rinfo->set_code_age_stub(Code::cast(stub));
|
2566
|
+
}
|
2685
2567
|
}
|
2686
2568
|
|
2687
2569
|
void VisitDebugTarget(RelocInfo* rinfo) {
|
@@ -2855,9 +2737,6 @@ void MarkCompactCollector::EvacuateNewSpace() {
|
|
2855
2737
|
size,
|
2856
2738
|
NEW_SPACE);
|
2857
2739
|
} else {
|
2858
|
-
// Process the dead object before we write a NULL into its header.
|
2859
|
-
LiveObjectList::ProcessNonLive(object);
|
2860
|
-
|
2861
2740
|
// Mark dead objects in the new space with null in their map field.
|
2862
2741
|
Memory::Address_at(object->address()) = NULL;
|
2863
2742
|
}
|
@@ -2890,10 +2769,10 @@ void MarkCompactCollector::EvacuateLiveObjectsFromPage(Page* p) {
|
|
2890
2769
|
for (;
|
2891
2770
|
cell_index < last_cell_index;
|
2892
2771
|
cell_index++, cell_base += 32 * kPointerSize) {
|
2893
|
-
ASSERT((
|
2894
|
-
|
2895
|
-
|
2896
|
-
|
2772
|
+
ASSERT(static_cast<unsigned>(cell_index) ==
|
2773
|
+
Bitmap::IndexToCell(
|
2774
|
+
Bitmap::CellAlignIndex(
|
2775
|
+
p->AddressToMarkbitIndex(cell_base))));
|
2897
2776
|
if (cells[cell_index] == 0) continue;
|
2898
2777
|
|
2899
2778
|
int live_objects = MarkWordToObjectStarts(cells[cell_index], offsets);
|
@@ -3036,6 +2915,11 @@ static void SweepPrecisely(PagedSpace* space,
|
|
3036
2915
|
space->identity() == CODE_SPACE);
|
3037
2916
|
ASSERT((p->skip_list() == NULL) || (skip_list_mode == REBUILD_SKIP_LIST));
|
3038
2917
|
|
2918
|
+
double start_time = 0.0;
|
2919
|
+
if (FLAG_print_cumulative_gc_stat) {
|
2920
|
+
start_time = OS::TimeCurrentMillis();
|
2921
|
+
}
|
2922
|
+
|
3039
2923
|
MarkBit::CellType* cells = p->markbits()->cells();
|
3040
2924
|
p->MarkSweptPrecisely();
|
3041
2925
|
|
@@ -3063,10 +2947,10 @@ static void SweepPrecisely(PagedSpace* space,
|
|
3063
2947
|
for (;
|
3064
2948
|
cell_index < last_cell_index;
|
3065
2949
|
cell_index++, object_address += 32 * kPointerSize) {
|
3066
|
-
ASSERT((
|
3067
|
-
|
3068
|
-
|
3069
|
-
|
2950
|
+
ASSERT(static_cast<unsigned>(cell_index) ==
|
2951
|
+
Bitmap::IndexToCell(
|
2952
|
+
Bitmap::CellAlignIndex(
|
2953
|
+
p->AddressToMarkbitIndex(object_address))));
|
3070
2954
|
int live_objects = MarkWordToObjectStarts(cells[cell_index], offsets);
|
3071
2955
|
int live_index = 0;
|
3072
2956
|
for ( ; live_objects != 0; live_objects--) {
|
@@ -3101,6 +2985,9 @@ static void SweepPrecisely(PagedSpace* space,
|
|
3101
2985
|
space->Free(free_start, static_cast<int>(p->area_end() - free_start));
|
3102
2986
|
}
|
3103
2987
|
p->ResetLiveBytes();
|
2988
|
+
if (FLAG_print_cumulative_gc_stat) {
|
2989
|
+
space->heap()->AddSweepingTime(OS::TimeCurrentMillis() - start_time);
|
2990
|
+
}
|
3104
2991
|
}
|
3105
2992
|
|
3106
2993
|
|
@@ -3221,6 +3108,8 @@ void MarkCompactCollector::ProcessInvalidatedCode(ObjectVisitor* visitor) {
|
|
3221
3108
|
|
3222
3109
|
|
3223
3110
|
void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
|
3111
|
+
Heap::RelocationLock relocation_lock(heap());
|
3112
|
+
|
3224
3113
|
bool code_slots_filtering_required;
|
3225
3114
|
{ GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE);
|
3226
3115
|
code_slots_filtering_required = MarkInvalidatedCode();
|
@@ -3255,7 +3144,6 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
|
|
3255
3144
|
GCTracer::Scope::MC_UPDATE_ROOT_TO_NEW_POINTERS);
|
3256
3145
|
// Update roots.
|
3257
3146
|
heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE);
|
3258
|
-
LiveObjectList::IterateElements(&updating_visitor);
|
3259
3147
|
}
|
3260
3148
|
|
3261
3149
|
{ GCTracer::Scope gc_scope(tracer_,
|
@@ -3326,7 +3214,7 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
|
|
3326
3214
|
|
3327
3215
|
switch (space->identity()) {
|
3328
3216
|
case OLD_DATA_SPACE:
|
3329
|
-
SweepConservatively(space, p);
|
3217
|
+
SweepConservatively<SWEEP_SEQUENTIALLY>(space, NULL, p);
|
3330
3218
|
break;
|
3331
3219
|
case OLD_POINTER_SPACE:
|
3332
3220
|
SweepPrecisely<SWEEP_AND_VISIT_LIVE_OBJECTS, IGNORE_SKIP_LIST>(
|
@@ -3359,8 +3247,8 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
|
|
3359
3247
|
}
|
3360
3248
|
}
|
3361
3249
|
|
3362
|
-
// Update pointer from the
|
3363
|
-
updating_visitor.VisitPointer(heap_->
|
3250
|
+
// Update pointer from the native contexts list.
|
3251
|
+
updating_visitor.VisitPointer(heap_->native_contexts_list_address());
|
3364
3252
|
|
3365
3253
|
heap_->symbol_table()->Iterate(&updating_visitor);
|
3366
3254
|
|
@@ -3368,6 +3256,9 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
|
|
3368
3256
|
heap_->UpdateReferencesInExternalStringTable(
|
3369
3257
|
&UpdateReferenceInExternalStringTableEntry);
|
3370
3258
|
|
3259
|
+
// Update pointers in the new error object list.
|
3260
|
+
heap_->error_object_list()->UpdateReferences();
|
3261
|
+
|
3371
3262
|
if (!FLAG_watch_ic_patching) {
|
3372
3263
|
// Update JSFunction pointers from the runtime profiler.
|
3373
3264
|
heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact(
|
@@ -3383,7 +3274,7 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
|
|
3383
3274
|
|
3384
3275
|
heap_->isolate()->inner_pointer_to_code_cache()->Flush();
|
3385
3276
|
|
3386
|
-
#ifdef
|
3277
|
+
#ifdef VERIFY_HEAP
|
3387
3278
|
if (FLAG_verify_heap) {
|
3388
3279
|
VerifyEvacuation(heap_);
|
3389
3280
|
}
|
@@ -3682,6 +3573,33 @@ static inline Address StartOfLiveObject(Address block_address, uint32_t cell) {
|
|
3682
3573
|
}
|
3683
3574
|
|
3684
3575
|
|
3576
|
+
template<MarkCompactCollector::SweepingParallelism mode>
|
3577
|
+
static intptr_t Free(PagedSpace* space,
|
3578
|
+
FreeList* free_list,
|
3579
|
+
Address start,
|
3580
|
+
int size) {
|
3581
|
+
if (mode == MarkCompactCollector::SWEEP_SEQUENTIALLY) {
|
3582
|
+
return space->Free(start, size);
|
3583
|
+
} else {
|
3584
|
+
return size - free_list->Free(start, size);
|
3585
|
+
}
|
3586
|
+
}
|
3587
|
+
|
3588
|
+
|
3589
|
+
// Force instantiation of templatized SweepConservatively method for
|
3590
|
+
// SWEEP_SEQUENTIALLY mode.
|
3591
|
+
template intptr_t MarkCompactCollector::
|
3592
|
+
SweepConservatively<MarkCompactCollector::SWEEP_SEQUENTIALLY>(
|
3593
|
+
PagedSpace*, FreeList*, Page*);
|
3594
|
+
|
3595
|
+
|
3596
|
+
// Force instantiation of templatized SweepConservatively method for
|
3597
|
+
// SWEEP_IN_PARALLEL mode.
|
3598
|
+
template intptr_t MarkCompactCollector::
|
3599
|
+
SweepConservatively<MarkCompactCollector::SWEEP_IN_PARALLEL>(
|
3600
|
+
PagedSpace*, FreeList*, Page*);
|
3601
|
+
|
3602
|
+
|
3685
3603
|
// Sweeps a space conservatively. After this has been done the larger free
|
3686
3604
|
// spaces have been put on the free list and the smaller ones have been
|
3687
3605
|
// ignored and left untouched. A free space is always either ignored or put
|
@@ -3689,8 +3607,16 @@ static inline Address StartOfLiveObject(Address block_address, uint32_t cell) {
|
|
3689
3607
|
// because it means that any FreeSpace maps left actually describe a region of
|
3690
3608
|
// memory that can be ignored when scanning. Dead objects other than free
|
3691
3609
|
// spaces will not contain the free space map.
|
3692
|
-
|
3610
|
+
template<MarkCompactCollector::SweepingParallelism mode>
|
3611
|
+
intptr_t MarkCompactCollector::SweepConservatively(PagedSpace* space,
|
3612
|
+
FreeList* free_list,
|
3613
|
+
Page* p) {
|
3693
3614
|
ASSERT(!p->IsEvacuationCandidate() && !p->WasSwept());
|
3615
|
+
ASSERT((mode == MarkCompactCollector::SWEEP_IN_PARALLEL &&
|
3616
|
+
free_list != NULL) ||
|
3617
|
+
(mode == MarkCompactCollector::SWEEP_SEQUENTIALLY &&
|
3618
|
+
free_list == NULL));
|
3619
|
+
|
3694
3620
|
MarkBit::CellType* cells = p->markbits()->cells();
|
3695
3621
|
p->MarkSweptConservatively();
|
3696
3622
|
|
@@ -3717,8 +3643,8 @@ intptr_t MarkCompactCollector::SweepConservatively(PagedSpace* space, Page* p) {
|
|
3717
3643
|
}
|
3718
3644
|
size_t size = block_address - p->area_start();
|
3719
3645
|
if (cell_index == last_cell_index) {
|
3720
|
-
freed_bytes +=
|
3721
|
-
|
3646
|
+
freed_bytes += Free<mode>(space, free_list, p->area_start(),
|
3647
|
+
static_cast<int>(size));
|
3722
3648
|
ASSERT_EQ(0, p->LiveBytes());
|
3723
3649
|
return freed_bytes;
|
3724
3650
|
}
|
@@ -3727,8 +3653,9 @@ intptr_t MarkCompactCollector::SweepConservatively(PagedSpace* space, Page* p) {
|
|
3727
3653
|
Address free_end = StartOfLiveObject(block_address, cells[cell_index]);
|
3728
3654
|
// Free the first free space.
|
3729
3655
|
size = free_end - p->area_start();
|
3730
|
-
freed_bytes +=
|
3731
|
-
|
3656
|
+
freed_bytes += Free<mode>(space, free_list, p->area_start(),
|
3657
|
+
static_cast<int>(size));
|
3658
|
+
|
3732
3659
|
// The start of the current free area is represented in undigested form by
|
3733
3660
|
// the address of the last 32-word section that contained a live object and
|
3734
3661
|
// the marking bitmap for that cell, which describes where the live object
|
@@ -3757,8 +3684,8 @@ intptr_t MarkCompactCollector::SweepConservatively(PagedSpace* space, Page* p) {
|
|
3757
3684
|
// so now we need to find the start of the first live object at the
|
3758
3685
|
// end of the free space.
|
3759
3686
|
free_end = StartOfLiveObject(block_address, cell);
|
3760
|
-
freed_bytes +=
|
3761
|
-
|
3687
|
+
freed_bytes += Free<mode>(space, free_list, free_start,
|
3688
|
+
static_cast<int>(free_end - free_start));
|
3762
3689
|
}
|
3763
3690
|
}
|
3764
3691
|
// Update our undigested record of where the current free area started.
|
@@ -3772,8 +3699,8 @@ intptr_t MarkCompactCollector::SweepConservatively(PagedSpace* space, Page* p) {
|
|
3772
3699
|
// Handle the free space at the end of the page.
|
3773
3700
|
if (block_address - free_start > 32 * kPointerSize) {
|
3774
3701
|
free_start = DigestFreeStart(free_start, free_start_cell);
|
3775
|
-
freed_bytes +=
|
3776
|
-
|
3702
|
+
freed_bytes += Free<mode>(space, free_list, free_start,
|
3703
|
+
static_cast<int>(block_address - free_start));
|
3777
3704
|
}
|
3778
3705
|
|
3779
3706
|
p->ResetLiveBytes();
|
@@ -3781,28 +3708,37 @@ intptr_t MarkCompactCollector::SweepConservatively(PagedSpace* space, Page* p) {
|
|
3781
3708
|
}
|
3782
3709
|
|
3783
3710
|
|
3711
|
+
void MarkCompactCollector::SweepInParallel(PagedSpace* space,
|
3712
|
+
FreeList* private_free_list,
|
3713
|
+
FreeList* free_list) {
|
3714
|
+
PageIterator it(space);
|
3715
|
+
while (it.has_next()) {
|
3716
|
+
Page* p = it.next();
|
3717
|
+
|
3718
|
+
if (p->TryParallelSweeping()) {
|
3719
|
+
SweepConservatively<SWEEP_IN_PARALLEL>(space, private_free_list, p);
|
3720
|
+
free_list->Concatenate(private_free_list);
|
3721
|
+
}
|
3722
|
+
}
|
3723
|
+
}
|
3724
|
+
|
3725
|
+
|
3784
3726
|
void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) {
|
3785
3727
|
space->set_was_swept_conservatively(sweeper == CONSERVATIVE ||
|
3786
3728
|
sweeper == LAZY_CONSERVATIVE);
|
3787
|
-
|
3788
3729
|
space->ClearStats();
|
3789
3730
|
|
3790
3731
|
PageIterator it(space);
|
3791
3732
|
|
3792
3733
|
intptr_t freed_bytes = 0;
|
3793
3734
|
int pages_swept = 0;
|
3794
|
-
intptr_t newspace_size = space->heap()->new_space()->Size();
|
3795
3735
|
bool lazy_sweeping_active = false;
|
3796
3736
|
bool unused_page_present = false;
|
3797
3737
|
|
3798
|
-
intptr_t old_space_size = heap()->PromotedSpaceSizeOfObjects();
|
3799
|
-
intptr_t space_left =
|
3800
|
-
Min(heap()->OldGenPromotionLimit(old_space_size),
|
3801
|
-
heap()->OldGenAllocationLimit(old_space_size)) - old_space_size;
|
3802
|
-
|
3803
3738
|
while (it.has_next()) {
|
3804
3739
|
Page* p = it.next();
|
3805
3740
|
|
3741
|
+
ASSERT(p->parallel_sweeping() == 0);
|
3806
3742
|
// Clear sweeping flags indicating that marking bits are still intact.
|
3807
3743
|
p->ClearSweptPrecisely();
|
3808
3744
|
p->ClearSweptConservatively();
|
@@ -3848,7 +3784,7 @@ void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) {
|
|
3848
3784
|
PrintF("Sweeping 0x%" V8PRIxPTR " conservatively.\n",
|
3849
3785
|
reinterpret_cast<intptr_t>(p));
|
3850
3786
|
}
|
3851
|
-
SweepConservatively(space, p);
|
3787
|
+
SweepConservatively<SWEEP_SEQUENTIALLY>(space, NULL, p);
|
3852
3788
|
pages_swept++;
|
3853
3789
|
break;
|
3854
3790
|
}
|
@@ -3857,17 +3793,18 @@ void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) {
|
|
3857
3793
|
PrintF("Sweeping 0x%" V8PRIxPTR " conservatively as needed.\n",
|
3858
3794
|
reinterpret_cast<intptr_t>(p));
|
3859
3795
|
}
|
3860
|
-
freed_bytes += SweepConservatively(space, p);
|
3796
|
+
freed_bytes += SweepConservatively<SWEEP_SEQUENTIALLY>(space, NULL, p);
|
3861
3797
|
pages_swept++;
|
3862
|
-
|
3863
|
-
|
3864
|
-
|
3865
|
-
|
3866
|
-
|
3867
|
-
|
3868
|
-
|
3869
|
-
|
3798
|
+
space->SetPagesToSweep(p->next_page());
|
3799
|
+
lazy_sweeping_active = true;
|
3800
|
+
break;
|
3801
|
+
}
|
3802
|
+
case PARALLEL_CONSERVATIVE: {
|
3803
|
+
if (FLAG_gc_verbose) {
|
3804
|
+
PrintF("Sweeping 0x%" V8PRIxPTR " conservatively in parallel.\n",
|
3805
|
+
reinterpret_cast<intptr_t>(p));
|
3870
3806
|
}
|
3807
|
+
p->set_parallel_sweeping(1);
|
3871
3808
|
break;
|
3872
3809
|
}
|
3873
3810
|
case PRECISE: {
|
@@ -3909,11 +3846,13 @@ void MarkCompactCollector::SweepSpaces() {
|
|
3909
3846
|
FLAG_lazy_sweeping ? LAZY_CONSERVATIVE : CONSERVATIVE;
|
3910
3847
|
if (FLAG_expose_gc) how_to_sweep = CONSERVATIVE;
|
3911
3848
|
if (sweep_precisely_) how_to_sweep = PRECISE;
|
3849
|
+
if (AreSweeperThreadsActivated()) how_to_sweep = PARALLEL_CONSERVATIVE;
|
3912
3850
|
// Noncompacting collections simply sweep the spaces to clear the mark
|
3913
3851
|
// bits and free the nonlive blocks (for old and map spaces). We sweep
|
3914
3852
|
// the map space last because freeing non-live maps overwrites them and
|
3915
3853
|
// the other spaces rely on possibly non-live maps to get the sizes for
|
3916
3854
|
// non-live objects.
|
3855
|
+
|
3917
3856
|
SweepSpace(heap()->old_pointer_space(), how_to_sweep);
|
3918
3857
|
SweepSpace(heap()->old_data_space(), how_to_sweep);
|
3919
3858
|
|
@@ -3924,6 +3863,15 @@ void MarkCompactCollector::SweepSpaces() {
|
|
3924
3863
|
|
3925
3864
|
EvacuateNewSpaceAndCandidates();
|
3926
3865
|
|
3866
|
+
if (AreSweeperThreadsActivated()) {
|
3867
|
+
// TODO(hpayer): The starting of the sweeper threads should be after
|
3868
|
+
// SweepSpace old data space.
|
3869
|
+
StartSweeperThreads();
|
3870
|
+
if (FLAG_parallel_sweeping && !FLAG_concurrent_sweeping) {
|
3871
|
+
WaitUntilSweepingCompleted();
|
3872
|
+
}
|
3873
|
+
}
|
3874
|
+
|
3927
3875
|
// ClearNonLiveTransitions depends on precise sweeping of map space to
|
3928
3876
|
// detect whether unmarked map became dead in this collection or in one
|
3929
3877
|
// of the previous ones.
|
@@ -3935,11 +3883,19 @@ void MarkCompactCollector::SweepSpaces() {
|
|
3935
3883
|
|
3936
3884
|
|
3937
3885
|
void MarkCompactCollector::EnableCodeFlushing(bool enable) {
|
3886
|
+
#ifdef ENABLE_DEBUGGER_SUPPORT
|
3887
|
+
if (heap()->isolate()->debug()->IsLoaded() ||
|
3888
|
+
heap()->isolate()->debug()->has_break_points()) {
|
3889
|
+
enable = false;
|
3890
|
+
}
|
3891
|
+
#endif
|
3892
|
+
|
3938
3893
|
if (enable) {
|
3939
3894
|
if (code_flusher_ != NULL) return;
|
3940
3895
|
code_flusher_ = new CodeFlusher(heap()->isolate());
|
3941
3896
|
} else {
|
3942
3897
|
if (code_flusher_ == NULL) return;
|
3898
|
+
code_flusher_->EvictAllCandidates();
|
3943
3899
|
delete code_flusher_;
|
3944
3900
|
code_flusher_ = NULL;
|
3945
3901
|
}
|
@@ -3963,7 +3919,8 @@ void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj,
|
|
3963
3919
|
|
3964
3920
|
|
3965
3921
|
void MarkCompactCollector::Initialize() {
|
3966
|
-
|
3922
|
+
MarkCompactMarkingVisitor::Initialize();
|
3923
|
+
IncrementalMarking::Initialize();
|
3967
3924
|
}
|
3968
3925
|
|
3969
3926
|
|
@@ -4039,6 +3996,20 @@ void MarkCompactCollector::RecordCodeEntrySlot(Address slot, Code* target) {
|
|
4039
3996
|
}
|
4040
3997
|
|
4041
3998
|
|
3999
|
+
void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) {
|
4000
|
+
ASSERT(heap()->gc_state() == Heap::MARK_COMPACT);
|
4001
|
+
if (is_compacting()) {
|
4002
|
+
Code* host = heap()->isolate()->inner_pointer_to_code_cache()->
|
4003
|
+
GcSafeFindCodeForInnerPointer(pc);
|
4004
|
+
MarkBit mark_bit = Marking::MarkBitFrom(host);
|
4005
|
+
if (Marking::IsBlack(mark_bit)) {
|
4006
|
+
RelocInfo rinfo(pc, RelocInfo::CODE_TARGET, 0, host);
|
4007
|
+
RecordRelocSlot(&rinfo, target);
|
4008
|
+
}
|
4009
|
+
}
|
4010
|
+
}
|
4011
|
+
|
4012
|
+
|
4042
4013
|
static inline SlotsBuffer::SlotType DecodeSlotType(
|
4043
4014
|
SlotsBuffer::ObjectSlot slot) {
|
4044
4015
|
return static_cast<SlotsBuffer::SlotType>(reinterpret_cast<intptr_t>(slot));
|