mustang 0.0.1 → 0.1.0
Sign up to get free protection for your applications and to get access to all the features.
- data/.rspec +1 -0
- data/Isolate +9 -0
- data/README.md +6 -12
- data/Rakefile +30 -4
- data/TODO.md +9 -0
- data/ext/v8/extconf.rb +56 -0
- data/ext/v8/v8.cpp +37 -0
- data/ext/v8/v8_array.cpp +161 -0
- data/ext/v8/v8_array.h +17 -0
- data/ext/v8/v8_base.cpp +147 -0
- data/ext/v8/v8_base.h +23 -0
- data/ext/v8/v8_cast.cpp +151 -0
- data/ext/v8/v8_cast.h +64 -0
- data/ext/v8/v8_context.cpp +174 -0
- data/ext/v8/v8_context.h +12 -0
- data/ext/v8/v8_date.cpp +61 -0
- data/ext/v8/v8_date.h +16 -0
- data/ext/v8/v8_errors.cpp +147 -0
- data/ext/v8/v8_errors.h +19 -0
- data/ext/v8/v8_external.cpp +66 -0
- data/ext/v8/v8_external.h +16 -0
- data/ext/v8/v8_function.cpp +182 -0
- data/ext/v8/v8_function.h +14 -0
- data/ext/v8/v8_integer.cpp +70 -0
- data/ext/v8/v8_integer.h +16 -0
- data/ext/v8/v8_macros.h +30 -0
- data/ext/v8/v8_main.cpp +53 -0
- data/ext/v8/v8_main.h +13 -0
- data/ext/v8/v8_number.cpp +62 -0
- data/ext/v8/v8_number.h +16 -0
- data/ext/v8/v8_object.cpp +172 -0
- data/ext/v8/v8_object.h +17 -0
- data/ext/v8/v8_ref.cpp +72 -0
- data/ext/v8/v8_ref.h +43 -0
- data/ext/v8/v8_regexp.cpp +148 -0
- data/ext/v8/v8_regexp.h +16 -0
- data/ext/v8/v8_string.cpp +78 -0
- data/ext/v8/v8_string.h +16 -0
- data/ext/v8/v8_value.cpp +370 -0
- data/ext/v8/v8_value.h +19 -0
- data/gemspec.yml +2 -1
- data/lib/core_ext/class.rb +14 -0
- data/lib/core_ext/object.rb +12 -0
- data/lib/core_ext/symbol.rb +23 -0
- data/lib/mustang.rb +44 -0
- data/lib/mustang/context.rb +69 -0
- data/lib/mustang/errors.rb +36 -0
- data/lib/support/delegated.rb +25 -0
- data/lib/v8/array.rb +21 -0
- data/lib/v8/context.rb +13 -0
- data/lib/v8/date.rb +20 -0
- data/lib/v8/error.rb +15 -0
- data/lib/v8/external.rb +16 -0
- data/lib/v8/function.rb +11 -0
- data/lib/v8/integer.rb +16 -0
- data/lib/v8/number.rb +16 -0
- data/lib/v8/object.rb +66 -0
- data/lib/v8/regexp.rb +23 -0
- data/lib/v8/string.rb +27 -0
- data/mustang.gemspec +3 -0
- data/spec/core_ext/class_spec.rb +19 -0
- data/spec/core_ext/object_spec.rb +19 -0
- data/spec/core_ext/symbol_spec.rb +27 -0
- data/spec/fixtures/test1.js +2 -0
- data/spec/fixtures/test2.js +2 -0
- data/spec/spec_helper.rb +20 -0
- data/spec/v8/array_spec.rb +88 -0
- data/spec/v8/cast_spec.rb +151 -0
- data/spec/v8/context_spec.rb +78 -0
- data/spec/v8/data_spec.rb +39 -0
- data/spec/v8/date_spec.rb +45 -0
- data/spec/v8/empty_spec.rb +27 -0
- data/spec/v8/errors_spec.rb +142 -0
- data/spec/v8/external_spec.rb +44 -0
- data/spec/v8/function_spec.rb +170 -0
- data/spec/v8/integer_spec.rb +41 -0
- data/spec/v8/main_spec.rb +18 -0
- data/spec/v8/null_spec.rb +27 -0
- data/spec/v8/number_spec.rb +40 -0
- data/spec/v8/object_spec.rb +79 -0
- data/spec/v8/primitive_spec.rb +9 -0
- data/spec/v8/regexp_spec.rb +65 -0
- data/spec/v8/string_spec.rb +48 -0
- data/spec/v8/undefined_spec.rb +27 -0
- data/spec/v8/value_spec.rb +215 -0
- data/vendor/v8/.gitignore +2 -0
- data/vendor/v8/AUTHORS +3 -1
- data/vendor/v8/ChangeLog +117 -0
- data/vendor/v8/SConstruct +334 -53
- data/vendor/v8/include/v8-debug.h +21 -11
- data/vendor/v8/include/v8-preparser.h +1 -1
- data/vendor/v8/include/v8-profiler.h +122 -43
- data/vendor/v8/include/v8-testing.h +5 -0
- data/vendor/v8/include/v8.h +171 -17
- data/vendor/v8/preparser/SConscript +38 -0
- data/vendor/v8/preparser/preparser-process.cc +77 -114
- data/vendor/v8/samples/shell.cc +232 -46
- data/vendor/v8/src/SConscript +29 -5
- data/vendor/v8/src/accessors.cc +70 -211
- data/vendor/v8/{test/cctest/test-mips.cc → src/allocation-inl.h} +15 -18
- data/vendor/v8/src/allocation.cc +0 -82
- data/vendor/v8/src/allocation.h +9 -42
- data/vendor/v8/src/api.cc +1645 -1156
- data/vendor/v8/src/api.h +76 -12
- data/vendor/v8/src/apiutils.h +0 -7
- data/vendor/v8/src/arguments.h +15 -4
- data/vendor/v8/src/arm/assembler-arm-inl.h +10 -9
- data/vendor/v8/src/arm/assembler-arm.cc +62 -23
- data/vendor/v8/src/arm/assembler-arm.h +76 -11
- data/vendor/v8/src/arm/builtins-arm.cc +39 -33
- data/vendor/v8/src/arm/code-stubs-arm.cc +1182 -402
- data/vendor/v8/src/arm/code-stubs-arm.h +20 -54
- data/vendor/v8/src/arm/codegen-arm.cc +159 -106
- data/vendor/v8/src/arm/codegen-arm.h +6 -6
- data/vendor/v8/src/arm/constants-arm.h +16 -1
- data/vendor/v8/src/arm/cpu-arm.cc +7 -5
- data/vendor/v8/src/arm/debug-arm.cc +6 -4
- data/vendor/v8/src/arm/deoptimizer-arm.cc +51 -14
- data/vendor/v8/src/arm/disasm-arm.cc +47 -15
- data/vendor/v8/src/arm/frames-arm.h +1 -1
- data/vendor/v8/src/arm/full-codegen-arm.cc +724 -408
- data/vendor/v8/src/arm/ic-arm.cc +90 -85
- data/vendor/v8/src/arm/lithium-arm.cc +140 -69
- data/vendor/v8/src/arm/lithium-arm.h +161 -46
- data/vendor/v8/src/arm/lithium-codegen-arm.cc +567 -297
- data/vendor/v8/src/arm/lithium-codegen-arm.h +21 -9
- data/vendor/v8/src/arm/lithium-gap-resolver-arm.cc +2 -0
- data/vendor/v8/src/arm/macro-assembler-arm.cc +457 -96
- data/vendor/v8/src/arm/macro-assembler-arm.h +115 -18
- data/vendor/v8/src/arm/regexp-macro-assembler-arm.cc +20 -13
- data/vendor/v8/src/arm/regexp-macro-assembler-arm.h +1 -0
- data/vendor/v8/src/arm/simulator-arm.cc +184 -101
- data/vendor/v8/src/arm/simulator-arm.h +26 -21
- data/vendor/v8/src/arm/stub-cache-arm.cc +450 -467
- data/vendor/v8/src/arm/virtual-frame-arm.cc +14 -12
- data/vendor/v8/src/arm/virtual-frame-arm.h +11 -8
- data/vendor/v8/src/array.js +35 -18
- data/vendor/v8/src/assembler.cc +186 -92
- data/vendor/v8/src/assembler.h +106 -69
- data/vendor/v8/src/ast-inl.h +5 -0
- data/vendor/v8/src/ast.cc +46 -35
- data/vendor/v8/src/ast.h +107 -50
- data/vendor/v8/src/atomicops.h +2 -0
- data/vendor/v8/src/atomicops_internals_mips_gcc.h +169 -0
- data/vendor/v8/src/bootstrapper.cc +649 -399
- data/vendor/v8/src/bootstrapper.h +94 -27
- data/vendor/v8/src/builtins.cc +359 -227
- data/vendor/v8/src/builtins.h +157 -123
- data/vendor/v8/src/checks.cc +2 -2
- data/vendor/v8/src/checks.h +4 -0
- data/vendor/v8/src/code-stubs.cc +27 -17
- data/vendor/v8/src/code-stubs.h +38 -17
- data/vendor/v8/src/codegen-inl.h +5 -1
- data/vendor/v8/src/codegen.cc +27 -17
- data/vendor/v8/src/codegen.h +9 -9
- data/vendor/v8/src/compilation-cache.cc +92 -206
- data/vendor/v8/src/compilation-cache.h +205 -30
- data/vendor/v8/src/compiler.cc +107 -120
- data/vendor/v8/src/compiler.h +17 -2
- data/vendor/v8/src/contexts.cc +22 -15
- data/vendor/v8/src/contexts.h +14 -8
- data/vendor/v8/src/conversions.cc +86 -30
- data/vendor/v8/src/counters.cc +19 -4
- data/vendor/v8/src/counters.h +28 -16
- data/vendor/v8/src/cpu-profiler-inl.h +4 -3
- data/vendor/v8/src/cpu-profiler.cc +123 -72
- data/vendor/v8/src/cpu-profiler.h +33 -19
- data/vendor/v8/src/cpu.h +2 -0
- data/vendor/v8/src/d8-debug.cc +3 -3
- data/vendor/v8/src/d8-debug.h +7 -6
- data/vendor/v8/src/d8-posix.cc +2 -0
- data/vendor/v8/src/d8.cc +22 -12
- data/vendor/v8/src/d8.gyp +3 -0
- data/vendor/v8/src/d8.js +618 -0
- data/vendor/v8/src/data-flow.h +3 -3
- data/vendor/v8/src/dateparser.h +4 -2
- data/vendor/v8/src/debug-agent.cc +10 -9
- data/vendor/v8/src/debug-agent.h +9 -11
- data/vendor/v8/src/debug-debugger.js +121 -0
- data/vendor/v8/src/debug.cc +331 -227
- data/vendor/v8/src/debug.h +248 -219
- data/vendor/v8/src/deoptimizer.cc +173 -62
- data/vendor/v8/src/deoptimizer.h +119 -19
- data/vendor/v8/src/disasm.h +3 -0
- data/vendor/v8/src/disassembler.cc +10 -9
- data/vendor/v8/src/execution.cc +185 -129
- data/vendor/v8/src/execution.h +47 -78
- data/vendor/v8/src/extensions/experimental/break-iterator.cc +250 -0
- data/vendor/v8/src/extensions/experimental/break-iterator.h +89 -0
- data/vendor/v8/src/extensions/experimental/experimental.gyp +2 -0
- data/vendor/v8/src/extensions/experimental/i18n-extension.cc +22 -2
- data/vendor/v8/src/extensions/externalize-string-extension.cc +2 -2
- data/vendor/v8/src/extensions/gc-extension.cc +1 -1
- data/vendor/v8/src/factory.cc +261 -154
- data/vendor/v8/src/factory.h +162 -158
- data/vendor/v8/src/flag-definitions.h +17 -11
- data/vendor/v8/src/frame-element.cc +0 -5
- data/vendor/v8/src/frame-element.h +9 -13
- data/vendor/v8/src/frames-inl.h +7 -0
- data/vendor/v8/src/frames.cc +56 -46
- data/vendor/v8/src/frames.h +36 -25
- data/vendor/v8/src/full-codegen.cc +15 -24
- data/vendor/v8/src/full-codegen.h +13 -41
- data/vendor/v8/src/func-name-inferrer.cc +7 -6
- data/vendor/v8/src/func-name-inferrer.h +1 -1
- data/vendor/v8/src/gdb-jit.cc +1 -0
- data/vendor/v8/src/global-handles.cc +118 -56
- data/vendor/v8/src/global-handles.h +98 -40
- data/vendor/v8/src/globals.h +2 -2
- data/vendor/v8/src/handles-inl.h +106 -9
- data/vendor/v8/src/handles.cc +220 -157
- data/vendor/v8/src/handles.h +38 -59
- data/vendor/v8/src/hashmap.h +3 -3
- data/vendor/v8/src/heap-inl.h +141 -25
- data/vendor/v8/src/heap-profiler.cc +117 -63
- data/vendor/v8/src/heap-profiler.h +38 -21
- data/vendor/v8/src/heap.cc +805 -564
- data/vendor/v8/src/heap.h +640 -594
- data/vendor/v8/src/hydrogen-instructions.cc +216 -73
- data/vendor/v8/src/hydrogen-instructions.h +259 -124
- data/vendor/v8/src/hydrogen.cc +996 -1171
- data/vendor/v8/src/hydrogen.h +163 -144
- data/vendor/v8/src/ia32/assembler-ia32-inl.h +12 -11
- data/vendor/v8/src/ia32/assembler-ia32.cc +85 -39
- data/vendor/v8/src/ia32/assembler-ia32.h +82 -16
- data/vendor/v8/src/ia32/builtins-ia32.cc +64 -58
- data/vendor/v8/src/ia32/code-stubs-ia32.cc +248 -324
- data/vendor/v8/src/ia32/code-stubs-ia32.h +3 -44
- data/vendor/v8/src/ia32/codegen-ia32.cc +217 -165
- data/vendor/v8/src/ia32/codegen-ia32.h +3 -0
- data/vendor/v8/src/ia32/cpu-ia32.cc +6 -5
- data/vendor/v8/src/ia32/debug-ia32.cc +8 -5
- data/vendor/v8/src/ia32/deoptimizer-ia32.cc +124 -14
- data/vendor/v8/src/ia32/disasm-ia32.cc +85 -62
- data/vendor/v8/src/ia32/frames-ia32.h +1 -1
- data/vendor/v8/src/ia32/full-codegen-ia32.cc +348 -435
- data/vendor/v8/src/ia32/ic-ia32.cc +91 -91
- data/vendor/v8/src/ia32/lithium-codegen-ia32.cc +500 -255
- data/vendor/v8/src/ia32/lithium-codegen-ia32.h +13 -4
- data/vendor/v8/src/ia32/lithium-gap-resolver-ia32.cc +6 -0
- data/vendor/v8/src/ia32/lithium-ia32.cc +122 -45
- data/vendor/v8/src/ia32/lithium-ia32.h +128 -41
- data/vendor/v8/src/ia32/macro-assembler-ia32.cc +109 -84
- data/vendor/v8/src/ia32/macro-assembler-ia32.h +18 -9
- data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.cc +26 -15
- data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.h +1 -0
- data/vendor/v8/src/ia32/register-allocator-ia32.cc +30 -30
- data/vendor/v8/src/ia32/simulator-ia32.h +4 -4
- data/vendor/v8/src/ia32/stub-cache-ia32.cc +383 -400
- data/vendor/v8/src/ia32/virtual-frame-ia32.cc +36 -13
- data/vendor/v8/src/ia32/virtual-frame-ia32.h +11 -5
- data/vendor/v8/src/ic-inl.h +12 -2
- data/vendor/v8/src/ic.cc +304 -221
- data/vendor/v8/src/ic.h +115 -58
- data/vendor/v8/src/interpreter-irregexp.cc +25 -21
- data/vendor/v8/src/interpreter-irregexp.h +2 -1
- data/vendor/v8/src/isolate.cc +883 -0
- data/vendor/v8/src/isolate.h +1304 -0
- data/vendor/v8/src/json.js +10 -10
- data/vendor/v8/src/jsregexp.cc +111 -80
- data/vendor/v8/src/jsregexp.h +6 -7
- data/vendor/v8/src/jump-target-heavy.cc +5 -8
- data/vendor/v8/src/jump-target-heavy.h +0 -6
- data/vendor/v8/src/jump-target-inl.h +1 -1
- data/vendor/v8/src/jump-target-light.cc +3 -3
- data/vendor/v8/src/lithium-allocator-inl.h +2 -0
- data/vendor/v8/src/lithium-allocator.cc +42 -30
- data/vendor/v8/src/lithium-allocator.h +8 -22
- data/vendor/v8/src/lithium.cc +1 -0
- data/vendor/v8/src/liveedit.cc +141 -99
- data/vendor/v8/src/liveedit.h +7 -2
- data/vendor/v8/src/liveobjectlist-inl.h +90 -0
- data/vendor/v8/src/liveobjectlist.cc +2537 -1
- data/vendor/v8/src/liveobjectlist.h +245 -35
- data/vendor/v8/src/log-utils.cc +122 -35
- data/vendor/v8/src/log-utils.h +33 -36
- data/vendor/v8/src/log.cc +299 -241
- data/vendor/v8/src/log.h +177 -110
- data/vendor/v8/src/mark-compact.cc +612 -470
- data/vendor/v8/src/mark-compact.h +153 -80
- data/vendor/v8/src/messages.cc +16 -14
- data/vendor/v8/src/messages.js +30 -7
- data/vendor/v8/src/mips/assembler-mips-inl.h +155 -35
- data/vendor/v8/src/mips/assembler-mips.cc +1093 -219
- data/vendor/v8/src/mips/assembler-mips.h +552 -153
- data/vendor/v8/src/mips/builtins-mips.cc +43 -100
- data/vendor/v8/src/mips/code-stubs-mips.cc +752 -0
- data/vendor/v8/src/mips/code-stubs-mips.h +511 -0
- data/vendor/v8/src/mips/codegen-mips-inl.h +8 -14
- data/vendor/v8/src/mips/codegen-mips.cc +672 -896
- data/vendor/v8/src/mips/codegen-mips.h +271 -69
- data/vendor/v8/src/mips/constants-mips.cc +44 -20
- data/vendor/v8/src/mips/constants-mips.h +238 -40
- data/vendor/v8/src/mips/cpu-mips.cc +20 -3
- data/vendor/v8/src/mips/debug-mips.cc +35 -7
- data/vendor/v8/src/mips/deoptimizer-mips.cc +91 -0
- data/vendor/v8/src/mips/disasm-mips.cc +329 -93
- data/vendor/v8/src/mips/frames-mips.cc +2 -50
- data/vendor/v8/src/mips/frames-mips.h +24 -9
- data/vendor/v8/src/mips/full-codegen-mips.cc +473 -23
- data/vendor/v8/src/mips/ic-mips.cc +81 -45
- data/vendor/v8/src/mips/jump-target-mips.cc +11 -106
- data/vendor/v8/src/mips/lithium-codegen-mips.h +65 -0
- data/vendor/v8/src/mips/lithium-mips.h +304 -0
- data/vendor/v8/src/mips/macro-assembler-mips.cc +2391 -390
- data/vendor/v8/src/mips/macro-assembler-mips.h +718 -121
- data/vendor/v8/src/mips/regexp-macro-assembler-mips.cc +478 -0
- data/vendor/v8/src/mips/regexp-macro-assembler-mips.h +250 -0
- data/vendor/v8/src/mips/register-allocator-mips-inl.h +0 -3
- data/vendor/v8/src/mips/register-allocator-mips.h +3 -2
- data/vendor/v8/src/mips/simulator-mips.cc +1009 -221
- data/vendor/v8/src/mips/simulator-mips.h +119 -36
- data/vendor/v8/src/mips/stub-cache-mips.cc +331 -148
- data/vendor/v8/src/mips/{fast-codegen-mips.cc → virtual-frame-mips-inl.h} +11 -30
- data/vendor/v8/src/mips/virtual-frame-mips.cc +137 -149
- data/vendor/v8/src/mips/virtual-frame-mips.h +294 -312
- data/vendor/v8/src/mirror-debugger.js +9 -8
- data/vendor/v8/src/mksnapshot.cc +2 -2
- data/vendor/v8/src/objects-debug.cc +16 -16
- data/vendor/v8/src/objects-inl.h +421 -195
- data/vendor/v8/src/objects-printer.cc +7 -7
- data/vendor/v8/src/objects-visiting.cc +1 -1
- data/vendor/v8/src/objects-visiting.h +33 -12
- data/vendor/v8/src/objects.cc +935 -658
- data/vendor/v8/src/objects.h +234 -139
- data/vendor/v8/src/parser.cc +484 -439
- data/vendor/v8/src/parser.h +35 -14
- data/vendor/v8/src/platform-cygwin.cc +173 -107
- data/vendor/v8/src/platform-freebsd.cc +224 -72
- data/vendor/v8/src/platform-linux.cc +234 -95
- data/vendor/v8/src/platform-macos.cc +215 -82
- data/vendor/v8/src/platform-nullos.cc +9 -3
- data/vendor/v8/src/platform-openbsd.cc +22 -7
- data/vendor/v8/src/platform-posix.cc +30 -5
- data/vendor/v8/src/platform-solaris.cc +120 -38
- data/vendor/v8/src/platform-tls-mac.h +62 -0
- data/vendor/v8/src/platform-tls-win32.h +62 -0
- data/vendor/v8/src/platform-tls.h +50 -0
- data/vendor/v8/src/platform-win32.cc +195 -97
- data/vendor/v8/src/platform.h +72 -15
- data/vendor/v8/src/preparse-data.cc +2 -0
- data/vendor/v8/src/preparser-api.cc +8 -2
- data/vendor/v8/src/preparser.cc +1 -1
- data/vendor/v8/src/prettyprinter.cc +43 -52
- data/vendor/v8/src/prettyprinter.h +1 -1
- data/vendor/v8/src/profile-generator-inl.h +0 -28
- data/vendor/v8/src/profile-generator.cc +942 -685
- data/vendor/v8/src/profile-generator.h +210 -176
- data/vendor/v8/src/property.cc +6 -0
- data/vendor/v8/src/property.h +14 -3
- data/vendor/v8/src/regexp-macro-assembler-irregexp.cc +1 -1
- data/vendor/v8/src/regexp-macro-assembler.cc +28 -19
- data/vendor/v8/src/regexp-macro-assembler.h +11 -6
- data/vendor/v8/src/regexp-stack.cc +18 -10
- data/vendor/v8/src/regexp-stack.h +45 -21
- data/vendor/v8/src/regexp.js +3 -3
- data/vendor/v8/src/register-allocator-inl.h +3 -3
- data/vendor/v8/src/register-allocator.cc +1 -7
- data/vendor/v8/src/register-allocator.h +5 -15
- data/vendor/v8/src/rewriter.cc +2 -1
- data/vendor/v8/src/runtime-profiler.cc +158 -128
- data/vendor/v8/src/runtime-profiler.h +131 -15
- data/vendor/v8/src/runtime.cc +2409 -1692
- data/vendor/v8/src/runtime.h +93 -17
- data/vendor/v8/src/safepoint-table.cc +3 -0
- data/vendor/v8/src/safepoint-table.h +9 -3
- data/vendor/v8/src/scanner-base.cc +21 -28
- data/vendor/v8/src/scanner-base.h +22 -11
- data/vendor/v8/src/scanner.cc +3 -5
- data/vendor/v8/src/scanner.h +4 -2
- data/vendor/v8/src/scopeinfo.cc +11 -16
- data/vendor/v8/src/scopeinfo.h +26 -15
- data/vendor/v8/src/scopes.cc +67 -37
- data/vendor/v8/src/scopes.h +26 -12
- data/vendor/v8/src/serialize.cc +193 -154
- data/vendor/v8/src/serialize.h +41 -36
- data/vendor/v8/src/small-pointer-list.h +163 -0
- data/vendor/v8/src/snapshot-common.cc +1 -1
- data/vendor/v8/src/snapshot.h +3 -1
- data/vendor/v8/src/spaces-inl.h +30 -25
- data/vendor/v8/src/spaces.cc +263 -370
- data/vendor/v8/src/spaces.h +178 -166
- data/vendor/v8/src/string-search.cc +4 -3
- data/vendor/v8/src/string-search.h +21 -20
- data/vendor/v8/src/string-stream.cc +32 -24
- data/vendor/v8/src/string.js +7 -7
- data/vendor/v8/src/stub-cache.cc +324 -248
- data/vendor/v8/src/stub-cache.h +181 -155
- data/vendor/v8/src/token.cc +3 -3
- data/vendor/v8/src/token.h +3 -3
- data/vendor/v8/src/top.cc +218 -390
- data/vendor/v8/src/type-info.cc +98 -32
- data/vendor/v8/src/type-info.h +10 -3
- data/vendor/v8/src/unicode.cc +1 -1
- data/vendor/v8/src/unicode.h +1 -1
- data/vendor/v8/src/utils.h +3 -0
- data/vendor/v8/src/v8-counters.cc +18 -11
- data/vendor/v8/src/v8-counters.h +34 -13
- data/vendor/v8/src/v8.cc +66 -121
- data/vendor/v8/src/v8.h +7 -4
- data/vendor/v8/src/v8globals.h +18 -12
- data/vendor/v8/src/{memory.h → v8memory.h} +0 -0
- data/vendor/v8/src/v8natives.js +59 -18
- data/vendor/v8/src/v8threads.cc +127 -114
- data/vendor/v8/src/v8threads.h +42 -35
- data/vendor/v8/src/v8utils.h +2 -39
- data/vendor/v8/src/variables.h +1 -1
- data/vendor/v8/src/version.cc +26 -5
- data/vendor/v8/src/version.h +4 -0
- data/vendor/v8/src/virtual-frame-heavy-inl.h +2 -4
- data/vendor/v8/src/virtual-frame-light-inl.h +5 -4
- data/vendor/v8/src/vm-state-inl.h +21 -17
- data/vendor/v8/src/vm-state.h +7 -5
- data/vendor/v8/src/win32-headers.h +1 -0
- data/vendor/v8/src/x64/assembler-x64-inl.h +12 -11
- data/vendor/v8/src/x64/assembler-x64.cc +80 -40
- data/vendor/v8/src/x64/assembler-x64.h +67 -17
- data/vendor/v8/src/x64/builtins-x64.cc +34 -33
- data/vendor/v8/src/x64/code-stubs-x64.cc +636 -377
- data/vendor/v8/src/x64/code-stubs-x64.h +14 -48
- data/vendor/v8/src/x64/codegen-x64-inl.h +1 -1
- data/vendor/v8/src/x64/codegen-x64.cc +158 -136
- data/vendor/v8/src/x64/codegen-x64.h +4 -1
- data/vendor/v8/src/x64/cpu-x64.cc +7 -5
- data/vendor/v8/src/x64/debug-x64.cc +8 -6
- data/vendor/v8/src/x64/deoptimizer-x64.cc +195 -20
- data/vendor/v8/src/x64/disasm-x64.cc +42 -23
- data/vendor/v8/src/x64/frames-x64.cc +1 -1
- data/vendor/v8/src/x64/frames-x64.h +2 -2
- data/vendor/v8/src/x64/full-codegen-x64.cc +780 -218
- data/vendor/v8/src/x64/ic-x64.cc +77 -79
- data/vendor/v8/src/x64/jump-target-x64.cc +1 -1
- data/vendor/v8/src/x64/lithium-codegen-x64.cc +698 -181
- data/vendor/v8/src/x64/lithium-codegen-x64.h +31 -6
- data/vendor/v8/src/x64/lithium-x64.cc +136 -54
- data/vendor/v8/src/x64/lithium-x64.h +142 -51
- data/vendor/v8/src/x64/macro-assembler-x64.cc +456 -187
- data/vendor/v8/src/x64/macro-assembler-x64.h +166 -34
- data/vendor/v8/src/x64/regexp-macro-assembler-x64.cc +44 -28
- data/vendor/v8/src/x64/regexp-macro-assembler-x64.h +8 -4
- data/vendor/v8/src/x64/register-allocator-x64-inl.h +3 -3
- data/vendor/v8/src/x64/register-allocator-x64.cc +12 -8
- data/vendor/v8/src/x64/simulator-x64.h +5 -5
- data/vendor/v8/src/x64/stub-cache-x64.cc +299 -344
- data/vendor/v8/src/x64/virtual-frame-x64.cc +37 -13
- data/vendor/v8/src/x64/virtual-frame-x64.h +13 -7
- data/vendor/v8/src/zone-inl.h +49 -3
- data/vendor/v8/src/zone.cc +42 -41
- data/vendor/v8/src/zone.h +37 -34
- data/vendor/v8/test/benchmarks/testcfg.py +100 -0
- data/vendor/v8/test/cctest/SConscript +5 -4
- data/vendor/v8/test/cctest/cctest.h +3 -2
- data/vendor/v8/test/cctest/cctest.status +6 -11
- data/vendor/v8/test/cctest/test-accessors.cc +3 -3
- data/vendor/v8/test/cctest/test-alloc.cc +39 -33
- data/vendor/v8/test/cctest/test-api.cc +1092 -205
- data/vendor/v8/test/cctest/test-assembler-arm.cc +39 -25
- data/vendor/v8/test/cctest/test-assembler-ia32.cc +36 -37
- data/vendor/v8/test/cctest/test-assembler-mips.cc +1098 -40
- data/vendor/v8/test/cctest/test-assembler-x64.cc +32 -25
- data/vendor/v8/test/cctest/test-ast.cc +1 -0
- data/vendor/v8/test/cctest/test-circular-queue.cc +8 -5
- data/vendor/v8/test/cctest/test-compiler.cc +24 -24
- data/vendor/v8/test/cctest/test-cpu-profiler.cc +140 -5
- data/vendor/v8/test/cctest/test-dataflow.cc +1 -0
- data/vendor/v8/test/cctest/test-debug.cc +136 -77
- data/vendor/v8/test/cctest/test-decls.cc +1 -1
- data/vendor/v8/test/cctest/test-deoptimization.cc +25 -24
- data/vendor/v8/test/cctest/test-disasm-arm.cc +9 -4
- data/vendor/v8/test/cctest/test-disasm-ia32.cc +10 -8
- data/vendor/v8/test/cctest/test-func-name-inference.cc +10 -4
- data/vendor/v8/test/cctest/test-heap-profiler.cc +226 -164
- data/vendor/v8/test/cctest/test-heap.cc +240 -217
- data/vendor/v8/test/cctest/test-liveedit.cc +1 -0
- data/vendor/v8/test/cctest/test-log-stack-tracer.cc +18 -20
- data/vendor/v8/test/cctest/test-log.cc +114 -108
- data/vendor/v8/test/cctest/test-macro-assembler-x64.cc +247 -177
- data/vendor/v8/test/cctest/test-mark-compact.cc +129 -90
- data/vendor/v8/test/cctest/test-parsing.cc +15 -14
- data/vendor/v8/test/cctest/test-platform-linux.cc +1 -0
- data/vendor/v8/test/cctest/test-platform-tls.cc +66 -0
- data/vendor/v8/test/cctest/test-platform-win32.cc +1 -0
- data/vendor/v8/test/cctest/test-profile-generator.cc +1 -1
- data/vendor/v8/test/cctest/test-regexp.cc +53 -41
- data/vendor/v8/test/cctest/test-reloc-info.cc +18 -11
- data/vendor/v8/test/cctest/test-serialize.cc +44 -43
- data/vendor/v8/test/cctest/test-sockets.cc +8 -3
- data/vendor/v8/test/cctest/test-spaces.cc +47 -29
- data/vendor/v8/test/cctest/test-strings.cc +20 -20
- data/vendor/v8/test/cctest/test-thread-termination.cc +8 -3
- data/vendor/v8/test/cctest/test-threads.cc +5 -3
- data/vendor/v8/test/cctest/test-utils.cc +5 -4
- data/vendor/v8/test/cctest/testcfg.py +7 -3
- data/vendor/v8/test/es5conform/es5conform.status +2 -77
- data/vendor/v8/test/es5conform/testcfg.py +1 -1
- data/vendor/v8/test/message/testcfg.py +1 -1
- data/vendor/v8/test/mjsunit/accessors-on-global-object.js +3 -3
- data/vendor/v8/test/mjsunit/array-concat.js +43 -1
- data/vendor/v8/test/mjsunit/array-join.js +25 -0
- data/vendor/v8/test/mjsunit/bitops-info.js +7 -1
- data/vendor/v8/test/mjsunit/compiler/array-length.js +2 -2
- data/vendor/v8/test/mjsunit/compiler/global-accessors.js +47 -0
- data/vendor/v8/test/mjsunit/compiler/pic.js +1 -1
- data/vendor/v8/test/mjsunit/compiler/regress-loadfield.js +65 -0
- data/vendor/v8/test/mjsunit/math-sqrt.js +5 -1
- data/vendor/v8/test/mjsunit/mjsunit.js +59 -8
- data/vendor/v8/test/mjsunit/mjsunit.status +0 -12
- data/vendor/v8/test/mjsunit/mul-exhaustive.js +129 -11
- data/vendor/v8/test/mjsunit/negate-zero.js +1 -1
- data/vendor/v8/test/mjsunit/object-freeze.js +5 -13
- data/vendor/v8/test/mjsunit/object-prevent-extensions.js +9 -50
- data/vendor/v8/test/mjsunit/object-seal.js +4 -13
- data/vendor/v8/test/mjsunit/override-eval-with-non-function.js +36 -0
- data/vendor/v8/test/mjsunit/regress/regress-1145.js +54 -0
- data/vendor/v8/test/mjsunit/regress/regress-1172-bis.js +37 -0
- data/vendor/v8/test/mjsunit/regress/regress-1181.js +54 -0
- data/vendor/v8/test/mjsunit/regress/regress-1207.js +35 -0
- data/vendor/v8/test/mjsunit/regress/regress-1209.js +34 -0
- data/vendor/v8/test/mjsunit/regress/regress-1210.js +48 -0
- data/vendor/v8/test/mjsunit/regress/regress-1213.js +43 -0
- data/vendor/v8/test/mjsunit/regress/regress-1218.js +29 -0
- data/vendor/v8/test/mjsunit/regress/regress-1229.js +79 -0
- data/vendor/v8/test/mjsunit/regress/regress-1233.js +47 -0
- data/vendor/v8/test/mjsunit/regress/regress-1236.js +34 -0
- data/vendor/v8/test/mjsunit/regress/regress-1237.js +36 -0
- data/vendor/v8/test/mjsunit/regress/regress-1240.js +39 -0
- data/vendor/v8/test/mjsunit/regress/regress-1257.js +58 -0
- data/vendor/v8/test/mjsunit/regress/regress-1278.js +69 -0
- data/vendor/v8/test/mjsunit/regress/regress-create-exception.js +1 -0
- data/vendor/v8/test/mjsunit/regress/regress-lazy-deopt-reloc.js +52 -0
- data/vendor/v8/test/mjsunit/sin-cos.js +15 -10
- data/vendor/v8/test/mjsunit/smi-negative-zero.js +2 -2
- data/vendor/v8/test/mjsunit/str-to-num.js +1 -1
- data/vendor/v8/test/mjsunit/strict-mode.js +435 -0
- data/vendor/v8/test/mjsunit/testcfg.py +23 -6
- data/vendor/v8/test/mozilla/mozilla.status +0 -2
- data/vendor/v8/test/mozilla/testcfg.py +1 -1
- data/vendor/v8/test/preparser/empty.js +28 -0
- data/vendor/v8/test/preparser/functions-only.js +38 -0
- data/vendor/v8/test/preparser/non-alphanum.js +34 -0
- data/vendor/v8/test/preparser/symbols-only.js +49 -0
- data/vendor/v8/test/preparser/testcfg.py +90 -0
- data/vendor/v8/test/sputnik/testcfg.py +1 -1
- data/vendor/v8/test/test262/README +16 -0
- data/vendor/v8/test/test262/harness-adapt.js +80 -0
- data/vendor/v8/test/test262/test262.status +1506 -0
- data/vendor/v8/test/test262/testcfg.py +123 -0
- data/vendor/v8/tools/freebsd-tick-processor +10 -0
- data/vendor/v8/tools/gyp/v8.gyp +8 -33
- data/vendor/v8/tools/linux-tick-processor +5 -3
- data/vendor/v8/tools/test.py +37 -14
- data/vendor/v8/tools/tickprocessor.js +22 -8
- data/vendor/v8/tools/visual_studio/v8_base.vcproj +13 -1
- data/vendor/v8/tools/visual_studio/v8_base_arm.vcproj +5 -1
- data/vendor/v8/tools/visual_studio/v8_base_x64.vcproj +5 -1
- data/vendor/v8/tools/visual_studio/x64.vsprops +1 -0
- metadata +1495 -1341
- data/ext/extconf.rb +0 -22
- data/ext/mustang.cpp +0 -58
- data/vendor/v8/src/top.h +0 -608
data/vendor/v8/src/log.h
CHANGED
@@ -77,13 +77,15 @@ class LogMessageBuilder;
|
|
77
77
|
|
78
78
|
#undef LOG
|
79
79
|
#ifdef ENABLE_LOGGING_AND_PROFILING
|
80
|
-
#define LOG(Call)
|
81
|
-
do {
|
82
|
-
|
83
|
-
|
80
|
+
#define LOG(isolate, Call) \
|
81
|
+
do { \
|
82
|
+
v8::internal::Logger* logger = \
|
83
|
+
(isolate)->logger(); \
|
84
|
+
if (logger->is_logging()) \
|
85
|
+
logger->Call; \
|
84
86
|
} while (false)
|
85
87
|
#else
|
86
|
-
#define LOG(Call) ((void) 0)
|
88
|
+
#define LOG(isolate, Call) ((void) 0)
|
87
89
|
#endif
|
88
90
|
|
89
91
|
#define LOG_EVENTS_AND_TAGS_LIST(V) \
|
@@ -91,7 +93,7 @@ class LogMessageBuilder;
|
|
91
93
|
V(CODE_MOVE_EVENT, "code-move") \
|
92
94
|
V(CODE_DELETE_EVENT, "code-delete") \
|
93
95
|
V(CODE_MOVING_GC, "code-moving-gc") \
|
94
|
-
V(
|
96
|
+
V(SHARED_FUNC_MOVE_EVENT, "sfi-move") \
|
95
97
|
V(SNAPSHOT_POSITION_EVENT, "snapshot-pos") \
|
96
98
|
V(TICK_EVENT, "tick") \
|
97
99
|
V(REPEAT_META_EVENT, "repeat") \
|
@@ -117,7 +119,9 @@ class LogMessageBuilder;
|
|
117
119
|
V(EVAL_TAG, "Eval") \
|
118
120
|
V(FUNCTION_TAG, "Function") \
|
119
121
|
V(KEYED_LOAD_IC_TAG, "KeyedLoadIC") \
|
122
|
+
V(KEYED_EXTERNAL_ARRAY_LOAD_IC_TAG, "KeyedExternalArrayLoadIC") \
|
120
123
|
V(KEYED_STORE_IC_TAG, "KeyedStoreIC") \
|
124
|
+
V(KEYED_EXTERNAL_ARRAY_STORE_IC_TAG, "KeyedExternalArrayStoreIC")\
|
121
125
|
V(LAZY_COMPILE_TAG, "LazyCompile") \
|
122
126
|
V(LOAD_IC_TAG, "LoadIC") \
|
123
127
|
V(REG_EXP_TAG, "RegExp") \
|
@@ -131,6 +135,9 @@ class LogMessageBuilder;
|
|
131
135
|
// original tags when writing to the log.
|
132
136
|
|
133
137
|
|
138
|
+
class Sampler;
|
139
|
+
|
140
|
+
|
134
141
|
class Logger {
|
135
142
|
public:
|
136
143
|
#define DECLARE_ENUM(enum_item, ignore) enum_item,
|
@@ -141,142 +148,147 @@ class Logger {
|
|
141
148
|
#undef DECLARE_ENUM
|
142
149
|
|
143
150
|
// Acquires resources for logging if the right flags are set.
|
144
|
-
|
151
|
+
bool Setup();
|
145
152
|
|
146
|
-
|
147
|
-
|
153
|
+
void EnsureTickerStarted();
|
154
|
+
void EnsureTickerStopped();
|
155
|
+
|
156
|
+
Sampler* sampler();
|
148
157
|
|
149
158
|
// Frees resources acquired in Setup.
|
150
|
-
|
159
|
+
void TearDown();
|
151
160
|
|
152
161
|
// Enable the computation of a sliding window of states.
|
153
|
-
|
162
|
+
void EnableSlidingStateWindow();
|
154
163
|
|
155
164
|
// Emits an event with a string value -> (name, value).
|
156
|
-
|
165
|
+
void StringEvent(const char* name, const char* value);
|
157
166
|
|
158
167
|
// Emits an event with an int value -> (name, value).
|
159
|
-
|
160
|
-
|
168
|
+
void IntEvent(const char* name, int value);
|
169
|
+
void IntPtrTEvent(const char* name, intptr_t value);
|
161
170
|
|
162
171
|
// Emits an event with an handle value -> (name, location).
|
163
|
-
|
172
|
+
void HandleEvent(const char* name, Object** location);
|
164
173
|
|
165
174
|
// Emits memory management events for C allocated structures.
|
166
|
-
|
167
|
-
|
175
|
+
void NewEvent(const char* name, void* object, size_t size);
|
176
|
+
void DeleteEvent(const char* name, void* object);
|
177
|
+
|
178
|
+
// Static versions of the above, operate on current isolate's logger.
|
179
|
+
// Used in TRACK_MEMORY(TypeName) defined in globals.h
|
180
|
+
static void NewEventStatic(const char* name, void* object, size_t size);
|
181
|
+
static void DeleteEventStatic(const char* name, void* object);
|
168
182
|
|
169
183
|
// Emits an event with a tag, and some resource usage information.
|
170
184
|
// -> (name, tag, <rusage information>).
|
171
185
|
// Currently, the resource usage information is a process time stamp
|
172
186
|
// and a real time timestamp.
|
173
|
-
|
187
|
+
void ResourceEvent(const char* name, const char* tag);
|
174
188
|
|
175
189
|
// Emits an event that an undefined property was read from an
|
176
190
|
// object.
|
177
|
-
|
191
|
+
void SuspectReadEvent(String* name, Object* obj);
|
178
192
|
|
179
193
|
// Emits an event when a message is put on or read from a debugging queue.
|
180
194
|
// DebugTag lets us put a call-site specific label on the event.
|
181
|
-
|
182
|
-
|
195
|
+
void DebugTag(const char* call_site_tag);
|
196
|
+
void DebugEvent(const char* event_type, Vector<uint16_t> parameter);
|
183
197
|
|
184
198
|
|
185
199
|
// ==== Events logged by --log-api. ====
|
186
|
-
|
187
|
-
|
188
|
-
|
189
|
-
|
190
|
-
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
static void ApiObjectAccess(const char* tag, JSObject* obj);
|
195
|
-
static void ApiEntryCall(const char* name);
|
200
|
+
void ApiNamedSecurityCheck(Object* key);
|
201
|
+
void ApiIndexedSecurityCheck(uint32_t index);
|
202
|
+
void ApiNamedPropertyAccess(const char* tag, JSObject* holder, Object* name);
|
203
|
+
void ApiIndexedPropertyAccess(const char* tag,
|
204
|
+
JSObject* holder,
|
205
|
+
uint32_t index);
|
206
|
+
void ApiObjectAccess(const char* tag, JSObject* obj);
|
207
|
+
void ApiEntryCall(const char* name);
|
196
208
|
|
197
209
|
|
198
210
|
// ==== Events logged by --log-code. ====
|
199
211
|
// Emits a code event for a callback function.
|
200
|
-
|
201
|
-
|
202
|
-
|
212
|
+
void CallbackEvent(String* name, Address entry_point);
|
213
|
+
void GetterCallbackEvent(String* name, Address entry_point);
|
214
|
+
void SetterCallbackEvent(String* name, Address entry_point);
|
203
215
|
// Emits a code create event.
|
204
|
-
|
205
|
-
|
206
|
-
|
207
|
-
|
208
|
-
|
209
|
-
|
210
|
-
|
211
|
-
|
212
|
-
|
213
|
-
|
214
|
-
|
215
|
-
|
216
|
-
|
217
|
-
|
216
|
+
void CodeCreateEvent(LogEventsAndTags tag,
|
217
|
+
Code* code, const char* source);
|
218
|
+
void CodeCreateEvent(LogEventsAndTags tag,
|
219
|
+
Code* code, String* name);
|
220
|
+
void CodeCreateEvent(LogEventsAndTags tag,
|
221
|
+
Code* code,
|
222
|
+
SharedFunctionInfo* shared,
|
223
|
+
String* name);
|
224
|
+
void CodeCreateEvent(LogEventsAndTags tag,
|
225
|
+
Code* code,
|
226
|
+
SharedFunctionInfo* shared,
|
227
|
+
String* source, int line);
|
228
|
+
void CodeCreateEvent(LogEventsAndTags tag, Code* code, int args_count);
|
229
|
+
void CodeMovingGCEvent();
|
218
230
|
// Emits a code create event for a RegExp.
|
219
|
-
|
231
|
+
void RegExpCodeCreateEvent(Code* code, String* source);
|
220
232
|
// Emits a code move event.
|
221
|
-
|
233
|
+
void CodeMoveEvent(Address from, Address to);
|
222
234
|
// Emits a code delete event.
|
223
|
-
|
235
|
+
void CodeDeleteEvent(Address from);
|
224
236
|
|
225
|
-
|
237
|
+
void SharedFunctionInfoMoveEvent(Address from, Address to);
|
226
238
|
|
227
|
-
|
239
|
+
void SnapshotPositionEvent(Address addr, int pos);
|
228
240
|
|
229
241
|
// ==== Events logged by --log-gc. ====
|
230
242
|
// Heap sampling events: start, end, and individual types.
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
|
243
|
+
void HeapSampleBeginEvent(const char* space, const char* kind);
|
244
|
+
void HeapSampleEndEvent(const char* space, const char* kind);
|
245
|
+
void HeapSampleItemEvent(const char* type, int number, int bytes);
|
246
|
+
void HeapSampleJSConstructorEvent(const char* constructor,
|
247
|
+
int number, int bytes);
|
248
|
+
void HeapSampleJSRetainersEvent(const char* constructor,
|
237
249
|
const char* event);
|
238
|
-
|
239
|
-
|
240
|
-
|
241
|
-
|
242
|
-
|
243
|
-
|
244
|
-
|
245
|
-
|
246
|
-
|
247
|
-
|
248
|
-
|
250
|
+
void HeapSampleJSProducerEvent(const char* constructor,
|
251
|
+
Address* stack);
|
252
|
+
void HeapSampleStats(const char* space, const char* kind,
|
253
|
+
intptr_t capacity, intptr_t used);
|
254
|
+
|
255
|
+
void SharedLibraryEvent(const char* library_path,
|
256
|
+
uintptr_t start,
|
257
|
+
uintptr_t end);
|
258
|
+
void SharedLibraryEvent(const wchar_t* library_path,
|
259
|
+
uintptr_t start,
|
260
|
+
uintptr_t end);
|
249
261
|
|
250
262
|
// ==== Events logged by --log-regexp ====
|
251
263
|
// Regexp compilation and execution events.
|
252
264
|
|
253
|
-
|
265
|
+
void RegExpCompileEvent(Handle<JSRegExp> regexp, bool in_cache);
|
254
266
|
|
255
267
|
// Log an event reported from generated code
|
256
|
-
|
268
|
+
void LogRuntime(Vector<const char> format, JSArray* args);
|
257
269
|
|
258
270
|
#ifdef ENABLE_LOGGING_AND_PROFILING
|
259
|
-
|
271
|
+
bool is_logging() {
|
260
272
|
return logging_nesting_ > 0;
|
261
273
|
}
|
262
274
|
|
263
275
|
// Pause/Resume collection of profiling data.
|
264
276
|
// When data collection is paused, CPU Tick events are discarded until
|
265
277
|
// data collection is Resumed.
|
266
|
-
|
267
|
-
|
268
|
-
|
278
|
+
void PauseProfiler(int flags, int tag);
|
279
|
+
void ResumeProfiler(int flags, int tag);
|
280
|
+
int GetActiveProfilerModules();
|
269
281
|
|
270
282
|
// If logging is performed into a memory buffer, allows to
|
271
283
|
// retrieve previously written messages. See v8.h.
|
272
|
-
|
284
|
+
int GetLogLines(int from_pos, char* dest_buf, int max_size);
|
273
285
|
|
274
286
|
// Logs all compiled functions found in the heap.
|
275
|
-
|
287
|
+
void LogCompiledFunctions();
|
276
288
|
// Logs all accessor callbacks found in the heap.
|
277
|
-
|
289
|
+
void LogAccessorCallbacks();
|
278
290
|
// Used for logging stubs found in the snapshot.
|
279
|
-
|
291
|
+
void LogCodeObjects();
|
280
292
|
|
281
293
|
// Converts tag to a corresponding NATIVE_... if the script is native.
|
282
294
|
INLINE(static LogEventsAndTags ToNativeByScript(LogEventsAndTags, Script*));
|
@@ -284,70 +296,74 @@ class Logger {
|
|
284
296
|
// Profiler's sampling interval (in milliseconds).
|
285
297
|
static const int kSamplingIntervalMs = 1;
|
286
298
|
|
299
|
+
// Callback from Log, stops profiling in case of insufficient resources.
|
300
|
+
void LogFailure();
|
301
|
+
|
287
302
|
private:
|
303
|
+
Logger();
|
304
|
+
~Logger();
|
288
305
|
|
289
306
|
// Emits the profiler's first message.
|
290
|
-
|
307
|
+
void ProfilerBeginEvent();
|
291
308
|
|
292
309
|
// Emits callback event messages.
|
293
|
-
|
294
|
-
|
295
|
-
|
310
|
+
void CallbackEventInternal(const char* prefix,
|
311
|
+
const char* name,
|
312
|
+
Address entry_point);
|
296
313
|
|
297
314
|
// Internal configurable move event.
|
298
|
-
|
299
|
-
Address from,
|
300
|
-
Address to);
|
315
|
+
void MoveEventInternal(LogEventsAndTags event, Address from, Address to);
|
301
316
|
|
302
317
|
// Internal configurable move event.
|
303
|
-
|
304
|
-
Address from);
|
318
|
+
void DeleteEventInternal(LogEventsAndTags event, Address from);
|
305
319
|
|
306
320
|
// Emits the source code of a regexp. Used by regexp events.
|
307
|
-
|
321
|
+
void LogRegExpSource(Handle<JSRegExp> regexp);
|
308
322
|
|
309
323
|
// Used for logging stubs found in the snapshot.
|
310
|
-
|
324
|
+
void LogCodeObject(Object* code_object);
|
311
325
|
|
312
326
|
// Emits general information about generated code.
|
313
|
-
|
327
|
+
void LogCodeInfo();
|
314
328
|
|
315
329
|
// Handles code creation when low-level profiling is active.
|
316
|
-
|
330
|
+
void LowLevelCodeCreateEvent(Code* code, LogMessageBuilder* msg);
|
317
331
|
|
318
332
|
// Emits a profiler tick event. Used by the profiler thread.
|
319
|
-
|
333
|
+
void TickEvent(TickSample* sample, bool overflow);
|
320
334
|
|
321
|
-
|
335
|
+
void ApiEvent(const char* name, ...);
|
322
336
|
|
323
337
|
// Logs a StringEvent regardless of whether FLAG_log is true.
|
324
|
-
|
338
|
+
void UncheckedStringEvent(const char* name, const char* value);
|
325
339
|
|
326
340
|
// Logs an IntEvent regardless of whether FLAG_log is true.
|
327
|
-
|
328
|
-
|
329
|
-
|
330
|
-
// Stops logging and profiling in case of insufficient resources.
|
331
|
-
static void StopLoggingAndProfiling();
|
341
|
+
void UncheckedIntEvent(const char* name, int value);
|
342
|
+
void UncheckedIntPtrTEvent(const char* name, intptr_t value);
|
332
343
|
|
333
344
|
// Returns whether profiler's sampler is active.
|
334
|
-
|
345
|
+
bool IsProfilerSamplerActive();
|
335
346
|
|
336
347
|
// The sampler used by the profiler and the sliding state window.
|
337
|
-
|
348
|
+
Ticker* ticker_;
|
338
349
|
|
339
350
|
// When the statistical profile is active, profiler_
|
340
351
|
// points to a Profiler, that handles collection
|
341
352
|
// of samples.
|
342
|
-
|
353
|
+
Profiler* profiler_;
|
343
354
|
|
344
355
|
// SlidingStateWindow instance keeping a sliding window of the most
|
345
356
|
// recent VM states.
|
346
|
-
|
357
|
+
SlidingStateWindow* sliding_state_window_;
|
358
|
+
|
359
|
+
// An array of log events names.
|
360
|
+
const char* const* log_events_;
|
347
361
|
|
348
362
|
// Internal implementation classes with access to
|
349
363
|
// private members.
|
350
364
|
friend class EventLog;
|
365
|
+
friend class Isolate;
|
366
|
+
friend class LogMessageBuilder;
|
351
367
|
friend class TimeLog;
|
352
368
|
friend class Profiler;
|
353
369
|
friend class SlidingStateWindow;
|
@@ -356,21 +372,72 @@ class Logger {
|
|
356
372
|
|
357
373
|
friend class LoggerTestHelper;
|
358
374
|
|
359
|
-
|
360
|
-
|
361
|
-
|
375
|
+
|
376
|
+
int logging_nesting_;
|
377
|
+
int cpu_profiler_nesting_;
|
378
|
+
int heap_profiler_nesting_;
|
379
|
+
|
380
|
+
Log* log_;
|
381
|
+
|
382
|
+
// Guards against multiple calls to TearDown() that can happen in some tests.
|
383
|
+
// 'true' between Setup() and TearDown().
|
384
|
+
bool is_initialized_;
|
385
|
+
|
386
|
+
// Support for 'incremental addresses' in compressed logs:
|
387
|
+
// LogMessageBuilder::AppendAddress(Address addr)
|
388
|
+
Address last_address_;
|
389
|
+
// Logger::TickEvent(...)
|
390
|
+
Address prev_sp_;
|
391
|
+
Address prev_function_;
|
392
|
+
// Logger::MoveEventInternal(...)
|
393
|
+
Address prev_to_;
|
394
|
+
// Logger::FunctionCreateEvent(...)
|
395
|
+
Address prev_code_;
|
362
396
|
|
363
397
|
friend class CpuProfiler;
|
364
398
|
#else
|
365
|
-
|
399
|
+
bool is_logging() { return false; }
|
366
400
|
#endif
|
367
401
|
};
|
368
402
|
|
369
403
|
|
404
|
+
// Process wide registry of samplers.
|
405
|
+
class SamplerRegistry : public AllStatic {
|
406
|
+
public:
|
407
|
+
enum State {
|
408
|
+
HAS_NO_SAMPLERS,
|
409
|
+
HAS_SAMPLERS,
|
410
|
+
HAS_CPU_PROFILING_SAMPLERS
|
411
|
+
};
|
412
|
+
|
413
|
+
typedef void (*VisitSampler)(Sampler*, void*);
|
414
|
+
|
415
|
+
static State GetState();
|
416
|
+
|
417
|
+
// Iterates over all active samplers keeping the internal lock held.
|
418
|
+
// Returns whether there are any active samplers.
|
419
|
+
static bool IterateActiveSamplers(VisitSampler func, void* param);
|
420
|
+
|
421
|
+
// Adds/Removes an active sampler.
|
422
|
+
static void AddActiveSampler(Sampler* sampler);
|
423
|
+
static void RemoveActiveSampler(Sampler* sampler);
|
424
|
+
|
425
|
+
private:
|
426
|
+
static bool ActiveSamplersExist() {
|
427
|
+
return active_samplers_ != NULL && !active_samplers_->is_empty();
|
428
|
+
}
|
429
|
+
|
430
|
+
static Mutex* mutex_; // Protects the state below.
|
431
|
+
static List<Sampler*>* active_samplers_;
|
432
|
+
|
433
|
+
DISALLOW_IMPLICIT_CONSTRUCTORS(SamplerRegistry);
|
434
|
+
};
|
435
|
+
|
436
|
+
|
370
437
|
// Class that extracts stack trace, used for profiling.
|
371
438
|
class StackTracer : public AllStatic {
|
372
439
|
public:
|
373
|
-
static void Trace(TickSample* sample);
|
440
|
+
static void Trace(Isolate* isolate, TickSample* sample);
|
374
441
|
};
|
375
442
|
|
376
443
|
} } // namespace v8::internal
|
@@ -44,28 +44,27 @@ namespace internal {
|
|
44
44
|
// -------------------------------------------------------------------------
|
45
45
|
// MarkCompactCollector
|
46
46
|
|
47
|
-
|
48
|
-
bool MarkCompactCollector::compacting_collection_ = false;
|
49
|
-
bool MarkCompactCollector::compact_on_next_gc_ = false;
|
50
|
-
|
51
|
-
int MarkCompactCollector::previous_marked_count_ = 0;
|
52
|
-
GCTracer* MarkCompactCollector::tracer_ = NULL;
|
53
|
-
|
54
|
-
|
47
|
+
MarkCompactCollector::MarkCompactCollector() : // NOLINT
|
55
48
|
#ifdef DEBUG
|
56
|
-
|
57
|
-
|
58
|
-
// Counters used for debugging the marking phase of mark-compact or mark-sweep
|
59
|
-
// collection.
|
60
|
-
int MarkCompactCollector::live_bytes_ = 0;
|
61
|
-
int MarkCompactCollector::live_young_objects_size_ = 0;
|
62
|
-
int MarkCompactCollector::live_old_data_objects_size_ = 0;
|
63
|
-
int MarkCompactCollector::live_old_pointer_objects_size_ = 0;
|
64
|
-
int MarkCompactCollector::live_code_objects_size_ = 0;
|
65
|
-
int MarkCompactCollector::live_map_objects_size_ = 0;
|
66
|
-
int MarkCompactCollector::live_cell_objects_size_ = 0;
|
67
|
-
int MarkCompactCollector::live_lo_objects_size_ = 0;
|
49
|
+
state_(IDLE),
|
68
50
|
#endif
|
51
|
+
force_compaction_(false),
|
52
|
+
compacting_collection_(false),
|
53
|
+
compact_on_next_gc_(false),
|
54
|
+
previous_marked_count_(0),
|
55
|
+
tracer_(NULL),
|
56
|
+
#ifdef DEBUG
|
57
|
+
live_young_objects_size_(0),
|
58
|
+
live_old_pointer_objects_size_(0),
|
59
|
+
live_old_data_objects_size_(0),
|
60
|
+
live_code_objects_size_(0),
|
61
|
+
live_map_objects_size_(0),
|
62
|
+
live_cell_objects_size_(0),
|
63
|
+
live_lo_objects_size_(0),
|
64
|
+
live_bytes_(0),
|
65
|
+
#endif
|
66
|
+
heap_(NULL),
|
67
|
+
code_flusher_(NULL) { }
|
69
68
|
|
70
69
|
|
71
70
|
void MarkCompactCollector::CollectGarbage() {
|
@@ -87,15 +86,15 @@ void MarkCompactCollector::CollectGarbage() {
|
|
87
86
|
GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_COMPACT);
|
88
87
|
EncodeForwardingAddresses();
|
89
88
|
|
90
|
-
|
89
|
+
heap()->MarkMapPointersAsEncoded(true);
|
91
90
|
UpdatePointers();
|
92
|
-
|
93
|
-
|
91
|
+
heap()->MarkMapPointersAsEncoded(false);
|
92
|
+
heap()->isolate()->pc_to_code_cache()->Flush();
|
94
93
|
|
95
94
|
RelocateObjects();
|
96
95
|
} else {
|
97
96
|
SweepSpaces();
|
98
|
-
|
97
|
+
heap()->isolate()->pc_to_code_cache()->Flush();
|
99
98
|
}
|
100
99
|
|
101
100
|
Finish();
|
@@ -124,7 +123,7 @@ void MarkCompactCollector::Prepare(GCTracer* tracer) {
|
|
124
123
|
compact_on_next_gc_ = false;
|
125
124
|
|
126
125
|
if (FLAG_never_compact) compacting_collection_ = false;
|
127
|
-
if (!
|
126
|
+
if (!heap()->map_space()->MapPointersEncodable())
|
128
127
|
compacting_collection_ = false;
|
129
128
|
if (FLAG_collect_maps) CreateBackPointers();
|
130
129
|
#ifdef ENABLE_GDB_JIT_INTERFACE
|
@@ -162,9 +161,9 @@ void MarkCompactCollector::Finish() {
|
|
162
161
|
// force lazy re-initialization of it. This must be done after the
|
163
162
|
// GC, because it relies on the new address of certain old space
|
164
163
|
// objects (empty string, illegal builtin).
|
165
|
-
|
164
|
+
heap()->isolate()->stub_cache()->Clear();
|
166
165
|
|
167
|
-
|
166
|
+
heap()->external_string_table_.CleanUp();
|
168
167
|
|
169
168
|
// If we've just compacted old space there's no reason to check the
|
170
169
|
// fragmentation limit. Just return.
|
@@ -221,17 +220,19 @@ void MarkCompactCollector::Finish() {
|
|
221
220
|
// and continue with marking. This process repeats until all reachable
|
222
221
|
// objects have been marked.
|
223
222
|
|
224
|
-
|
225
|
-
|
226
|
-
class FlushCode : public AllStatic {
|
223
|
+
class CodeFlusher {
|
227
224
|
public:
|
228
|
-
|
225
|
+
explicit CodeFlusher(Isolate* isolate)
|
226
|
+
: isolate_(isolate),
|
227
|
+
jsfunction_candidates_head_(NULL),
|
228
|
+
shared_function_info_candidates_head_(NULL) {}
|
229
|
+
|
230
|
+
void AddCandidate(SharedFunctionInfo* shared_info) {
|
229
231
|
SetNextCandidate(shared_info, shared_function_info_candidates_head_);
|
230
232
|
shared_function_info_candidates_head_ = shared_info;
|
231
233
|
}
|
232
234
|
|
233
|
-
|
234
|
-
static void AddCandidate(JSFunction* function) {
|
235
|
+
void AddCandidate(JSFunction* function) {
|
235
236
|
ASSERT(function->unchecked_code() ==
|
236
237
|
function->unchecked_shared()->unchecked_code());
|
237
238
|
|
@@ -239,15 +240,14 @@ class FlushCode : public AllStatic {
|
|
239
240
|
jsfunction_candidates_head_ = function;
|
240
241
|
}
|
241
242
|
|
242
|
-
|
243
|
-
static void ProcessCandidates() {
|
243
|
+
void ProcessCandidates() {
|
244
244
|
ProcessSharedFunctionInfoCandidates();
|
245
245
|
ProcessJSFunctionCandidates();
|
246
246
|
}
|
247
247
|
|
248
248
|
private:
|
249
|
-
|
250
|
-
Code* lazy_compile =
|
249
|
+
void ProcessJSFunctionCandidates() {
|
250
|
+
Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
|
251
251
|
|
252
252
|
JSFunction* candidate = jsfunction_candidates_head_;
|
253
253
|
JSFunction* next_candidate;
|
@@ -271,8 +271,8 @@ class FlushCode : public AllStatic {
|
|
271
271
|
}
|
272
272
|
|
273
273
|
|
274
|
-
|
275
|
-
Code* lazy_compile =
|
274
|
+
void ProcessSharedFunctionInfoCandidates() {
|
275
|
+
Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
|
276
276
|
|
277
277
|
SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
|
278
278
|
SharedFunctionInfo* next_candidate;
|
@@ -291,27 +291,22 @@ class FlushCode : public AllStatic {
|
|
291
291
|
shared_function_info_candidates_head_ = NULL;
|
292
292
|
}
|
293
293
|
|
294
|
-
|
295
294
|
static JSFunction** GetNextCandidateField(JSFunction* candidate) {
|
296
295
|
return reinterpret_cast<JSFunction**>(
|
297
296
|
candidate->address() + JSFunction::kCodeEntryOffset);
|
298
297
|
}
|
299
298
|
|
300
|
-
|
301
299
|
static JSFunction* GetNextCandidate(JSFunction* candidate) {
|
302
300
|
return *GetNextCandidateField(candidate);
|
303
301
|
}
|
304
302
|
|
305
|
-
|
306
303
|
static void SetNextCandidate(JSFunction* candidate,
|
307
304
|
JSFunction* next_candidate) {
|
308
305
|
*GetNextCandidateField(candidate) = next_candidate;
|
309
306
|
}
|
310
307
|
|
311
|
-
|
312
308
|
STATIC_ASSERT(kPointerSize <= Code::kHeaderSize - Code::kHeaderPaddingStart);
|
313
309
|
|
314
|
-
|
315
310
|
static SharedFunctionInfo** GetNextCandidateField(
|
316
311
|
SharedFunctionInfo* candidate) {
|
317
312
|
Code* code = candidate->unchecked_code();
|
@@ -319,29 +314,34 @@ class FlushCode : public AllStatic {
|
|
319
314
|
code->address() + Code::kHeaderPaddingStart);
|
320
315
|
}
|
321
316
|
|
322
|
-
|
323
317
|
static SharedFunctionInfo* GetNextCandidate(SharedFunctionInfo* candidate) {
|
324
318
|
return *GetNextCandidateField(candidate);
|
325
319
|
}
|
326
320
|
|
327
|
-
|
328
321
|
static void SetNextCandidate(SharedFunctionInfo* candidate,
|
329
322
|
SharedFunctionInfo* next_candidate) {
|
330
323
|
*GetNextCandidateField(candidate) = next_candidate;
|
331
324
|
}
|
332
325
|
|
333
|
-
|
326
|
+
Isolate* isolate_;
|
327
|
+
JSFunction* jsfunction_candidates_head_;
|
328
|
+
SharedFunctionInfo* shared_function_info_candidates_head_;
|
334
329
|
|
335
|
-
|
330
|
+
DISALLOW_COPY_AND_ASSIGN(CodeFlusher);
|
336
331
|
};
|
337
332
|
|
338
|
-
JSFunction* FlushCode::jsfunction_candidates_head_ = NULL;
|
339
333
|
|
340
|
-
|
334
|
+
MarkCompactCollector::~MarkCompactCollector() {
|
335
|
+
if (code_flusher_ != NULL) {
|
336
|
+
delete code_flusher_;
|
337
|
+
code_flusher_ = NULL;
|
338
|
+
}
|
339
|
+
}
|
340
|
+
|
341
341
|
|
342
342
|
static inline HeapObject* ShortCircuitConsString(Object** p) {
|
343
343
|
// Optimization: If the heap object pointed to by p is a non-symbol
|
344
|
-
// cons string whose right substring is
|
344
|
+
// cons string whose right substring is HEAP->empty_string, update
|
345
345
|
// it in place to its left substring. Return the updated value.
|
346
346
|
//
|
347
347
|
// Here we assume that if we change *p, we replace it with a heap object
|
@@ -349,7 +349,7 @@ static inline HeapObject* ShortCircuitConsString(Object** p) {
|
|
349
349
|
//
|
350
350
|
// The check performed is:
|
351
351
|
// object->IsConsString() && !object->IsSymbol() &&
|
352
|
-
// (ConsString::cast(object)->second() ==
|
352
|
+
// (ConsString::cast(object)->second() == HEAP->empty_string())
|
353
353
|
// except the maps for the object and its possible substrings might be
|
354
354
|
// marked.
|
355
355
|
HeapObject* object = HeapObject::cast(*p);
|
@@ -359,7 +359,8 @@ static inline HeapObject* ShortCircuitConsString(Object** p) {
|
|
359
359
|
if ((type & kShortcutTypeMask) != kShortcutTypeTag) return object;
|
360
360
|
|
361
361
|
Object* second = reinterpret_cast<ConsString*>(object)->unchecked_second();
|
362
|
-
|
362
|
+
Heap* heap = map_word.ToMap()->heap();
|
363
|
+
if (second != heap->raw_unchecked_empty_string()) {
|
363
364
|
return object;
|
364
365
|
}
|
365
366
|
|
@@ -367,7 +368,7 @@ static inline HeapObject* ShortCircuitConsString(Object** p) {
|
|
367
368
|
// page dirty marks. Therefore, we only replace the string with its left
|
368
369
|
// substring when page dirty marks do not change.
|
369
370
|
Object* first = reinterpret_cast<ConsString*>(object)->unchecked_first();
|
370
|
-
if (!
|
371
|
+
if (!heap->InNewSpace(object) && heap->InNewSpace(first)) return object;
|
371
372
|
|
372
373
|
*p = first;
|
373
374
|
return HeapObject::cast(first);
|
@@ -380,19 +381,6 @@ class StaticMarkingVisitor : public StaticVisitorBase {
|
|
380
381
|
table_.GetVisitor(map)(map, obj);
|
381
382
|
}
|
382
383
|
|
383
|
-
static void EnableCodeFlushing(bool enabled) {
|
384
|
-
if (enabled) {
|
385
|
-
table_.Register(kVisitJSFunction, &VisitJSFunctionAndFlushCode);
|
386
|
-
table_.Register(kVisitSharedFunctionInfo,
|
387
|
-
&VisitSharedFunctionInfoAndFlushCode);
|
388
|
-
|
389
|
-
} else {
|
390
|
-
table_.Register(kVisitJSFunction, &VisitJSFunction);
|
391
|
-
table_.Register(kVisitSharedFunctionInfo,
|
392
|
-
&VisitSharedFunctionInfoGeneric);
|
393
|
-
}
|
394
|
-
}
|
395
|
-
|
396
384
|
static void Initialize() {
|
397
385
|
table_.Register(kVisitShortcutCandidate,
|
398
386
|
&FixedBodyVisitor<StaticMarkingVisitor,
|
@@ -454,84 +442,91 @@ class StaticMarkingVisitor : public StaticVisitorBase {
|
|
454
442
|
kVisitStructGeneric>();
|
455
443
|
}
|
456
444
|
|
457
|
-
INLINE(static void VisitPointer(Object** p)) {
|
458
|
-
MarkObjectByPointer(p);
|
445
|
+
INLINE(static void VisitPointer(Heap* heap, Object** p)) {
|
446
|
+
MarkObjectByPointer(heap, p);
|
459
447
|
}
|
460
448
|
|
461
|
-
INLINE(static void VisitPointers(Object** start, Object** end)) {
|
449
|
+
INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) {
|
462
450
|
// Mark all objects pointed to in [start, end).
|
463
451
|
const int kMinRangeForMarkingRecursion = 64;
|
464
452
|
if (end - start >= kMinRangeForMarkingRecursion) {
|
465
|
-
if (VisitUnmarkedObjects(start, end)) return;
|
453
|
+
if (VisitUnmarkedObjects(heap, start, end)) return;
|
466
454
|
// We are close to a stack overflow, so just mark the objects.
|
467
455
|
}
|
468
|
-
for (Object** p = start; p < end; p++) MarkObjectByPointer(p);
|
456
|
+
for (Object** p = start; p < end; p++) MarkObjectByPointer(heap, p);
|
469
457
|
}
|
470
458
|
|
471
|
-
static inline void VisitCodeTarget(RelocInfo* rinfo) {
|
459
|
+
static inline void VisitCodeTarget(Heap* heap, RelocInfo* rinfo) {
|
472
460
|
ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
|
473
461
|
Code* code = Code::GetCodeFromTargetAddress(rinfo->target_address());
|
474
462
|
if (FLAG_cleanup_ics_at_gc && code->is_inline_cache_stub()) {
|
475
463
|
IC::Clear(rinfo->pc());
|
476
464
|
// Please note targets for cleared inline cached do not have to be
|
477
|
-
// marked since they are contained in
|
465
|
+
// marked since they are contained in HEAP->non_monomorphic_cache().
|
478
466
|
} else {
|
479
|
-
|
467
|
+
heap->mark_compact_collector()->MarkObject(code);
|
480
468
|
}
|
481
469
|
}
|
482
470
|
|
483
|
-
static void VisitGlobalPropertyCell(RelocInfo* rinfo) {
|
471
|
+
static void VisitGlobalPropertyCell(Heap* heap, RelocInfo* rinfo) {
|
484
472
|
ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL);
|
485
473
|
Object* cell = rinfo->target_cell();
|
486
474
|
Object* old_cell = cell;
|
487
|
-
VisitPointer(&cell);
|
475
|
+
VisitPointer(heap, &cell);
|
488
476
|
if (cell != old_cell) {
|
489
477
|
rinfo->set_target_cell(reinterpret_cast<JSGlobalPropertyCell*>(cell));
|
490
478
|
}
|
491
479
|
}
|
492
480
|
|
493
|
-
static inline void VisitDebugTarget(RelocInfo* rinfo) {
|
481
|
+
static inline void VisitDebugTarget(Heap* heap, RelocInfo* rinfo) {
|
494
482
|
ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
|
495
483
|
rinfo->IsPatchedReturnSequence()) ||
|
496
484
|
(RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
|
497
485
|
rinfo->IsPatchedDebugBreakSlotSequence()));
|
498
486
|
HeapObject* code = Code::GetCodeFromTargetAddress(rinfo->call_address());
|
499
|
-
|
487
|
+
heap->mark_compact_collector()->MarkObject(code);
|
500
488
|
}
|
501
489
|
|
502
490
|
// Mark object pointed to by p.
|
503
|
-
INLINE(static void MarkObjectByPointer(Object** p)) {
|
491
|
+
INLINE(static void MarkObjectByPointer(Heap* heap, Object** p)) {
|
504
492
|
if (!(*p)->IsHeapObject()) return;
|
505
493
|
HeapObject* object = ShortCircuitConsString(p);
|
506
|
-
|
494
|
+
if (!object->IsMarked()) {
|
495
|
+
heap->mark_compact_collector()->MarkUnmarkedObject(object);
|
496
|
+
}
|
507
497
|
}
|
508
498
|
|
499
|
+
|
509
500
|
// Visit an unmarked object.
|
510
|
-
static
|
501
|
+
INLINE(static void VisitUnmarkedObject(MarkCompactCollector* collector,
|
502
|
+
HeapObject* obj)) {
|
511
503
|
#ifdef DEBUG
|
512
|
-
ASSERT(
|
504
|
+
ASSERT(Isolate::Current()->heap()->Contains(obj));
|
513
505
|
ASSERT(!obj->IsMarked());
|
514
506
|
#endif
|
515
507
|
Map* map = obj->map();
|
516
|
-
|
508
|
+
collector->SetMark(obj);
|
517
509
|
// Mark the map pointer and the body.
|
518
|
-
|
510
|
+
if (!map->IsMarked()) collector->MarkUnmarkedObject(map);
|
519
511
|
IterateBody(map, obj);
|
520
512
|
}
|
521
513
|
|
522
514
|
// Visit all unmarked objects pointed to by [start, end).
|
523
515
|
// Returns false if the operation fails (lack of stack space).
|
524
|
-
static inline bool VisitUnmarkedObjects(
|
516
|
+
static inline bool VisitUnmarkedObjects(Heap* heap,
|
517
|
+
Object** start,
|
518
|
+
Object** end) {
|
525
519
|
// Return false is we are close to the stack limit.
|
526
|
-
StackLimitCheck check;
|
520
|
+
StackLimitCheck check(heap->isolate());
|
527
521
|
if (check.HasOverflowed()) return false;
|
528
522
|
|
523
|
+
MarkCompactCollector* collector = heap->mark_compact_collector();
|
529
524
|
// Visit the unmarked objects.
|
530
525
|
for (Object** p = start; p < end; p++) {
|
531
526
|
if (!(*p)->IsHeapObject()) continue;
|
532
527
|
HeapObject* obj = HeapObject::cast(*p);
|
533
528
|
if (obj->IsMarked()) continue;
|
534
|
-
VisitUnmarkedObject(obj);
|
529
|
+
VisitUnmarkedObject(collector, obj);
|
535
530
|
}
|
536
531
|
return true;
|
537
532
|
}
|
@@ -559,7 +554,8 @@ class StaticMarkingVisitor : public StaticVisitorBase {
|
|
559
554
|
void> StructObjectVisitor;
|
560
555
|
|
561
556
|
static void VisitCode(Map* map, HeapObject* object) {
|
562
|
-
reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>(
|
557
|
+
reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>(
|
558
|
+
map->heap());
|
563
559
|
}
|
564
560
|
|
565
561
|
// Code flushing support.
|
@@ -568,25 +564,24 @@ class StaticMarkingVisitor : public StaticVisitorBase {
|
|
568
564
|
// flushed.
|
569
565
|
static const int kCodeAgeThreshold = 5;
|
570
566
|
|
571
|
-
inline static bool HasSourceCode(SharedFunctionInfo* info) {
|
572
|
-
Object* undefined =
|
567
|
+
inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
|
568
|
+
Object* undefined = heap->raw_unchecked_undefined_value();
|
573
569
|
return (info->script() != undefined) &&
|
574
570
|
(reinterpret_cast<Script*>(info->script())->source() != undefined);
|
575
571
|
}
|
576
572
|
|
577
573
|
|
578
574
|
inline static bool IsCompiled(JSFunction* function) {
|
579
|
-
return
|
580
|
-
function->
|
575
|
+
return function->unchecked_code() !=
|
576
|
+
function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
|
581
577
|
}
|
582
578
|
|
583
|
-
|
584
579
|
inline static bool IsCompiled(SharedFunctionInfo* function) {
|
585
|
-
return
|
586
|
-
function->
|
580
|
+
return function->unchecked_code() !=
|
581
|
+
function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
|
587
582
|
}
|
588
583
|
|
589
|
-
inline static bool IsFlushable(JSFunction* function) {
|
584
|
+
inline static bool IsFlushable(Heap* heap, JSFunction* function) {
|
590
585
|
SharedFunctionInfo* shared_info = function->unchecked_shared();
|
591
586
|
|
592
587
|
// Code is either on stack, in compilation cache or referenced
|
@@ -601,10 +596,10 @@ class StaticMarkingVisitor : public StaticVisitorBase {
|
|
601
596
|
return false;
|
602
597
|
}
|
603
598
|
|
604
|
-
return IsFlushable(shared_info);
|
599
|
+
return IsFlushable(heap, shared_info);
|
605
600
|
}
|
606
601
|
|
607
|
-
inline static bool IsFlushable(SharedFunctionInfo* shared_info) {
|
602
|
+
inline static bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info) {
|
608
603
|
// Code is either on stack, in compilation cache or referenced
|
609
604
|
// by optimized version of function.
|
610
605
|
if (shared_info->unchecked_code()->IsMarked()) {
|
@@ -614,7 +609,7 @@ class StaticMarkingVisitor : public StaticVisitorBase {
|
|
614
609
|
|
615
610
|
// The function must be compiled and have the source code available,
|
616
611
|
// to be able to recompile it in case we need the function again.
|
617
|
-
if (!(shared_info->is_compiled() && HasSourceCode(shared_info))) {
|
612
|
+
if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
|
618
613
|
return false;
|
619
614
|
}
|
620
615
|
|
@@ -645,15 +640,15 @@ class StaticMarkingVisitor : public StaticVisitorBase {
|
|
645
640
|
}
|
646
641
|
|
647
642
|
|
648
|
-
static bool FlushCodeForFunction(JSFunction* function) {
|
649
|
-
if (!IsFlushable(function)) return false;
|
643
|
+
static bool FlushCodeForFunction(Heap* heap, JSFunction* function) {
|
644
|
+
if (!IsFlushable(heap, function)) return false;
|
650
645
|
|
651
646
|
// This function's code looks flushable. But we have to postpone the
|
652
647
|
// decision until we see all functions that point to the same
|
653
648
|
// SharedFunctionInfo because some of them might be optimized.
|
654
649
|
// That would make the nonoptimized version of the code nonflushable,
|
655
650
|
// because it is required for bailing out from optimized code.
|
656
|
-
|
651
|
+
heap->mark_compact_collector()->code_flusher()->AddCandidate(function);
|
657
652
|
return true;
|
658
653
|
}
|
659
654
|
|
@@ -676,9 +671,10 @@ class StaticMarkingVisitor : public StaticVisitorBase {
|
|
676
671
|
if (!ctx->IsHeapObject()) return false;
|
677
672
|
|
678
673
|
Map* map = SafeMap(ctx);
|
679
|
-
|
680
|
-
|
681
|
-
map ==
|
674
|
+
Heap* heap = map->heap();
|
675
|
+
if (!(map == heap->raw_unchecked_context_map() ||
|
676
|
+
map == heap->raw_unchecked_catch_context_map() ||
|
677
|
+
map == heap->raw_unchecked_global_context_map())) {
|
682
678
|
return false;
|
683
679
|
}
|
684
680
|
|
@@ -705,29 +701,37 @@ class StaticMarkingVisitor : public StaticVisitorBase {
|
|
705
701
|
|
706
702
|
static void VisitSharedFunctionInfoAndFlushCode(Map* map,
|
707
703
|
HeapObject* object) {
|
704
|
+
MarkCompactCollector* collector = map->heap()->mark_compact_collector();
|
705
|
+
if (!collector->is_code_flushing_enabled()) {
|
706
|
+
VisitSharedFunctionInfoGeneric(map, object);
|
707
|
+
return;
|
708
|
+
}
|
708
709
|
VisitSharedFunctionInfoAndFlushCodeGeneric(map, object, false);
|
709
710
|
}
|
710
711
|
|
711
712
|
|
712
713
|
static void VisitSharedFunctionInfoAndFlushCodeGeneric(
|
713
714
|
Map* map, HeapObject* object, bool known_flush_code_candidate) {
|
715
|
+
Heap* heap = map->heap();
|
714
716
|
SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object);
|
715
717
|
|
716
718
|
if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap();
|
717
719
|
|
718
720
|
if (!known_flush_code_candidate) {
|
719
|
-
known_flush_code_candidate = IsFlushable(shared);
|
720
|
-
if (known_flush_code_candidate)
|
721
|
+
known_flush_code_candidate = IsFlushable(heap, shared);
|
722
|
+
if (known_flush_code_candidate) {
|
723
|
+
heap->mark_compact_collector()->code_flusher()->AddCandidate(shared);
|
724
|
+
}
|
721
725
|
}
|
722
726
|
|
723
|
-
VisitSharedFunctionInfoFields(object, known_flush_code_candidate);
|
727
|
+
VisitSharedFunctionInfoFields(heap, object, known_flush_code_candidate);
|
724
728
|
}
|
725
729
|
|
726
730
|
|
727
|
-
static void VisitCodeEntry(Address entry_address) {
|
731
|
+
static void VisitCodeEntry(Heap* heap, Address entry_address) {
|
728
732
|
Object* code = Code::GetObjectFromEntryAddress(entry_address);
|
729
733
|
Object* old_code = code;
|
730
|
-
VisitPointer(&code);
|
734
|
+
VisitPointer(heap, &code);
|
731
735
|
if (code != old_code) {
|
732
736
|
Memory::Address_at(entry_address) =
|
733
737
|
reinterpret_cast<Code*>(code)->entry();
|
@@ -736,16 +740,22 @@ class StaticMarkingVisitor : public StaticVisitorBase {
|
|
736
740
|
|
737
741
|
|
738
742
|
static void VisitJSFunctionAndFlushCode(Map* map, HeapObject* object) {
|
743
|
+
Heap* heap = map->heap();
|
744
|
+
MarkCompactCollector* collector = heap->mark_compact_collector();
|
745
|
+
if (!collector->is_code_flushing_enabled()) {
|
746
|
+
VisitJSFunction(map, object);
|
747
|
+
return;
|
748
|
+
}
|
749
|
+
|
739
750
|
JSFunction* jsfunction = reinterpret_cast<JSFunction*>(object);
|
740
751
|
// The function must have a valid context and not be a builtin.
|
741
752
|
bool flush_code_candidate = false;
|
742
753
|
if (IsValidNotBuiltinContext(jsfunction->unchecked_context())) {
|
743
|
-
flush_code_candidate = FlushCodeForFunction(jsfunction);
|
754
|
+
flush_code_candidate = FlushCodeForFunction(heap, jsfunction);
|
744
755
|
}
|
745
756
|
|
746
757
|
if (!flush_code_candidate) {
|
747
|
-
|
748
|
-
jsfunction->unchecked_shared()->unchecked_code());
|
758
|
+
collector->MarkObject(jsfunction->unchecked_shared()->unchecked_code());
|
749
759
|
|
750
760
|
if (jsfunction->unchecked_code()->kind() == Code::OPTIMIZED_FUNCTION) {
|
751
761
|
// For optimized functions we should retain both non-optimized version
|
@@ -761,8 +771,7 @@ class StaticMarkingVisitor : public StaticVisitorBase {
|
|
761
771
|
i < count;
|
762
772
|
i++) {
|
763
773
|
JSFunction* inlined = reinterpret_cast<JSFunction*>(literals->get(i));
|
764
|
-
|
765
|
-
inlined->unchecked_shared()->unchecked_code());
|
774
|
+
collector->MarkObject(inlined->unchecked_shared()->unchecked_code());
|
766
775
|
}
|
767
776
|
}
|
768
777
|
}
|
@@ -787,11 +796,15 @@ class StaticMarkingVisitor : public StaticVisitorBase {
|
|
787
796
|
static inline void VisitJSFunctionFields(Map* map,
|
788
797
|
JSFunction* object,
|
789
798
|
bool flush_code_candidate) {
|
790
|
-
|
799
|
+
Heap* heap = map->heap();
|
800
|
+
MarkCompactCollector* collector = heap->mark_compact_collector();
|
801
|
+
|
802
|
+
VisitPointers(heap,
|
803
|
+
SLOT_ADDR(object, JSFunction::kPropertiesOffset),
|
791
804
|
SLOT_ADDR(object, JSFunction::kCodeEntryOffset));
|
792
805
|
|
793
806
|
if (!flush_code_candidate) {
|
794
|
-
VisitCodeEntry(object->address() + JSFunction::kCodeEntryOffset);
|
807
|
+
VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
|
795
808
|
} else {
|
796
809
|
// Don't visit code object.
|
797
810
|
|
@@ -800,15 +813,16 @@ class StaticMarkingVisitor : public StaticVisitorBase {
|
|
800
813
|
SharedFunctionInfo* shared_info = object->unchecked_shared();
|
801
814
|
if (!shared_info->IsMarked()) {
|
802
815
|
Map* shared_info_map = shared_info->map();
|
803
|
-
|
804
|
-
|
816
|
+
collector->SetMark(shared_info);
|
817
|
+
collector->MarkObject(shared_info_map);
|
805
818
|
VisitSharedFunctionInfoAndFlushCodeGeneric(shared_info_map,
|
806
819
|
shared_info,
|
807
820
|
true);
|
808
821
|
}
|
809
822
|
}
|
810
823
|
|
811
|
-
VisitPointers(
|
824
|
+
VisitPointers(heap,
|
825
|
+
SLOT_ADDR(object,
|
812
826
|
JSFunction::kCodeEntryOffset + kPointerSize),
|
813
827
|
SLOT_ADDR(object, JSFunction::kNonWeakFieldsEndOffset));
|
814
828
|
|
@@ -816,15 +830,17 @@ class StaticMarkingVisitor : public StaticVisitorBase {
|
|
816
830
|
}
|
817
831
|
|
818
832
|
|
819
|
-
static void VisitSharedFunctionInfoFields(
|
833
|
+
static void VisitSharedFunctionInfoFields(Heap* heap,
|
834
|
+
HeapObject* object,
|
820
835
|
bool flush_code_candidate) {
|
821
|
-
VisitPointer(SLOT_ADDR(object, SharedFunctionInfo::kNameOffset));
|
836
|
+
VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kNameOffset));
|
822
837
|
|
823
838
|
if (!flush_code_candidate) {
|
824
|
-
VisitPointer(SLOT_ADDR(object, SharedFunctionInfo::kCodeOffset));
|
839
|
+
VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kCodeOffset));
|
825
840
|
}
|
826
841
|
|
827
|
-
VisitPointers(
|
842
|
+
VisitPointers(heap,
|
843
|
+
SLOT_ADDR(object, SharedFunctionInfo::kScopeInfoOffset),
|
828
844
|
SLOT_ADDR(object, SharedFunctionInfo::kSize));
|
829
845
|
}
|
830
846
|
|
@@ -842,40 +858,54 @@ VisitorDispatchTable<StaticMarkingVisitor::Callback>
|
|
842
858
|
|
843
859
|
class MarkingVisitor : public ObjectVisitor {
|
844
860
|
public:
|
861
|
+
explicit MarkingVisitor(Heap* heap) : heap_(heap) { }
|
862
|
+
|
845
863
|
void VisitPointer(Object** p) {
|
846
|
-
StaticMarkingVisitor::VisitPointer(p);
|
864
|
+
StaticMarkingVisitor::VisitPointer(heap_, p);
|
847
865
|
}
|
848
866
|
|
849
867
|
void VisitPointers(Object** start, Object** end) {
|
850
|
-
StaticMarkingVisitor::VisitPointers(start, end);
|
868
|
+
StaticMarkingVisitor::VisitPointers(heap_, start, end);
|
851
869
|
}
|
852
870
|
|
853
|
-
void VisitCodeTarget(RelocInfo* rinfo) {
|
854
|
-
StaticMarkingVisitor::VisitCodeTarget(rinfo);
|
871
|
+
void VisitCodeTarget(Heap* heap, RelocInfo* rinfo) {
|
872
|
+
StaticMarkingVisitor::VisitCodeTarget(heap, rinfo);
|
855
873
|
}
|
856
874
|
|
857
|
-
void VisitGlobalPropertyCell(RelocInfo* rinfo) {
|
858
|
-
StaticMarkingVisitor::VisitGlobalPropertyCell(rinfo);
|
875
|
+
void VisitGlobalPropertyCell(Heap* heap, RelocInfo* rinfo) {
|
876
|
+
StaticMarkingVisitor::VisitGlobalPropertyCell(heap, rinfo);
|
859
877
|
}
|
860
878
|
|
861
|
-
void VisitDebugTarget(RelocInfo* rinfo) {
|
862
|
-
StaticMarkingVisitor::VisitDebugTarget(rinfo);
|
879
|
+
void VisitDebugTarget(Heap* heap, RelocInfo* rinfo) {
|
880
|
+
StaticMarkingVisitor::VisitDebugTarget(heap, rinfo);
|
863
881
|
}
|
882
|
+
|
883
|
+
private:
|
884
|
+
Heap* heap_;
|
864
885
|
};
|
865
886
|
|
866
887
|
|
867
888
|
class CodeMarkingVisitor : public ThreadVisitor {
|
868
889
|
public:
|
890
|
+
explicit CodeMarkingVisitor(MarkCompactCollector* collector)
|
891
|
+
: collector_(collector) {}
|
892
|
+
|
869
893
|
void VisitThread(ThreadLocalTop* top) {
|
870
894
|
for (StackFrameIterator it(top); !it.done(); it.Advance()) {
|
871
|
-
|
895
|
+
collector_->MarkObject(it.frame()->unchecked_code());
|
872
896
|
}
|
873
897
|
}
|
898
|
+
|
899
|
+
private:
|
900
|
+
MarkCompactCollector* collector_;
|
874
901
|
};
|
875
902
|
|
876
903
|
|
877
904
|
class SharedFunctionInfoMarkingVisitor : public ObjectVisitor {
|
878
905
|
public:
|
906
|
+
explicit SharedFunctionInfoMarkingVisitor(MarkCompactCollector* collector)
|
907
|
+
: collector_(collector) {}
|
908
|
+
|
879
909
|
void VisitPointers(Object** start, Object** end) {
|
880
910
|
for (Object** p = start; p < end; p++) VisitPointer(p);
|
881
911
|
}
|
@@ -884,44 +914,52 @@ class SharedFunctionInfoMarkingVisitor : public ObjectVisitor {
|
|
884
914
|
Object* obj = *slot;
|
885
915
|
if (obj->IsSharedFunctionInfo()) {
|
886
916
|
SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(obj);
|
887
|
-
|
888
|
-
|
917
|
+
collector_->MarkObject(shared->unchecked_code());
|
918
|
+
collector_->MarkObject(shared);
|
889
919
|
}
|
890
920
|
}
|
921
|
+
|
922
|
+
private:
|
923
|
+
MarkCompactCollector* collector_;
|
891
924
|
};
|
892
925
|
|
893
926
|
|
894
927
|
void MarkCompactCollector::PrepareForCodeFlushing() {
|
928
|
+
ASSERT(heap() == Isolate::Current()->heap());
|
929
|
+
|
895
930
|
if (!FLAG_flush_code) {
|
896
|
-
|
931
|
+
EnableCodeFlushing(false);
|
897
932
|
return;
|
898
933
|
}
|
899
934
|
|
900
935
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
901
|
-
if (
|
902
|
-
|
936
|
+
if (heap()->isolate()->debug()->IsLoaded() ||
|
937
|
+
heap()->isolate()->debug()->has_break_points()) {
|
938
|
+
EnableCodeFlushing(false);
|
903
939
|
return;
|
904
940
|
}
|
905
941
|
#endif
|
906
|
-
|
942
|
+
EnableCodeFlushing(true);
|
907
943
|
|
908
944
|
// Ensure that empty descriptor array is marked. Method MarkDescriptorArray
|
909
945
|
// relies on it being marked before any other descriptor array.
|
910
|
-
MarkObject(
|
946
|
+
MarkObject(heap()->raw_unchecked_empty_descriptor_array());
|
911
947
|
|
912
948
|
// Make sure we are not referencing the code from the stack.
|
949
|
+
ASSERT(this == heap()->mark_compact_collector());
|
913
950
|
for (StackFrameIterator it; !it.done(); it.Advance()) {
|
914
951
|
MarkObject(it.frame()->unchecked_code());
|
915
952
|
}
|
916
953
|
|
917
954
|
// Iterate the archived stacks in all threads to check if
|
918
955
|
// the code is referenced.
|
919
|
-
CodeMarkingVisitor code_marking_visitor;
|
920
|
-
|
956
|
+
CodeMarkingVisitor code_marking_visitor(this);
|
957
|
+
heap()->isolate()->thread_manager()->IterateArchivedThreads(
|
958
|
+
&code_marking_visitor);
|
921
959
|
|
922
|
-
SharedFunctionInfoMarkingVisitor visitor;
|
923
|
-
|
924
|
-
|
960
|
+
SharedFunctionInfoMarkingVisitor visitor(this);
|
961
|
+
heap()->isolate()->compilation_cache()->IterateFunctions(&visitor);
|
962
|
+
heap()->isolate()->handle_scope_implementer()->Iterate(&visitor);
|
925
963
|
|
926
964
|
ProcessMarkingStack();
|
927
965
|
}
|
@@ -930,6 +968,9 @@ void MarkCompactCollector::PrepareForCodeFlushing() {
|
|
930
968
|
// Visitor class for marking heap roots.
|
931
969
|
class RootMarkingVisitor : public ObjectVisitor {
|
932
970
|
public:
|
971
|
+
explicit RootMarkingVisitor(Heap* heap)
|
972
|
+
: collector_(heap->mark_compact_collector()) { }
|
973
|
+
|
933
974
|
void VisitPointer(Object** p) {
|
934
975
|
MarkObjectByPointer(p);
|
935
976
|
}
|
@@ -948,23 +989,26 @@ class RootMarkingVisitor : public ObjectVisitor {
|
|
948
989
|
|
949
990
|
Map* map = object->map();
|
950
991
|
// Mark the object.
|
951
|
-
|
992
|
+
collector_->SetMark(object);
|
952
993
|
|
953
994
|
// Mark the map pointer and body, and push them on the marking stack.
|
954
|
-
|
995
|
+
collector_->MarkObject(map);
|
955
996
|
StaticMarkingVisitor::IterateBody(map, object);
|
956
997
|
|
957
998
|
// Mark all the objects reachable from the map and body. May leave
|
958
999
|
// overflowed objects in the heap.
|
959
|
-
|
1000
|
+
collector_->EmptyMarkingStack();
|
960
1001
|
}
|
1002
|
+
|
1003
|
+
MarkCompactCollector* collector_;
|
961
1004
|
};
|
962
1005
|
|
963
1006
|
|
964
1007
|
// Helper class for pruning the symbol table.
|
965
1008
|
class SymbolTableCleaner : public ObjectVisitor {
|
966
1009
|
public:
|
967
|
-
SymbolTableCleaner(
|
1010
|
+
explicit SymbolTableCleaner(Heap* heap)
|
1011
|
+
: heap_(heap), pointers_removed_(0) { }
|
968
1012
|
|
969
1013
|
virtual void VisitPointers(Object** start, Object** end) {
|
970
1014
|
// Visit all HeapObject pointers in [start, end).
|
@@ -976,10 +1020,10 @@ class SymbolTableCleaner : public ObjectVisitor {
|
|
976
1020
|
// Since no objects have yet been moved we can safely access the map of
|
977
1021
|
// the object.
|
978
1022
|
if ((*p)->IsExternalString()) {
|
979
|
-
|
1023
|
+
heap_->FinalizeExternalString(String::cast(*p));
|
980
1024
|
}
|
981
1025
|
// Set the entry to null_value (as deleted).
|
982
|
-
*p =
|
1026
|
+
*p = heap_->raw_unchecked_null_value();
|
983
1027
|
pointers_removed_++;
|
984
1028
|
}
|
985
1029
|
}
|
@@ -989,6 +1033,7 @@ class SymbolTableCleaner : public ObjectVisitor {
|
|
989
1033
|
return pointers_removed_;
|
990
1034
|
}
|
991
1035
|
private:
|
1036
|
+
Heap* heap_;
|
992
1037
|
int pointers_removed_;
|
993
1038
|
};
|
994
1039
|
|
@@ -1010,11 +1055,11 @@ class MarkCompactWeakObjectRetainer : public WeakObjectRetainer {
|
|
1010
1055
|
|
1011
1056
|
void MarkCompactCollector::MarkUnmarkedObject(HeapObject* object) {
|
1012
1057
|
ASSERT(!object->IsMarked());
|
1013
|
-
ASSERT(
|
1058
|
+
ASSERT(HEAP->Contains(object));
|
1014
1059
|
if (object->IsMap()) {
|
1015
1060
|
Map* map = Map::cast(object);
|
1016
1061
|
if (FLAG_cleanup_caches_in_maps_at_gc) {
|
1017
|
-
map->ClearCodeCache();
|
1062
|
+
map->ClearCodeCache(heap());
|
1018
1063
|
}
|
1019
1064
|
SetMark(map);
|
1020
1065
|
if (FLAG_collect_maps &&
|
@@ -1022,11 +1067,11 @@ void MarkCompactCollector::MarkUnmarkedObject(HeapObject* object) {
|
|
1022
1067
|
map->instance_type() <= JS_FUNCTION_TYPE) {
|
1023
1068
|
MarkMapContents(map);
|
1024
1069
|
} else {
|
1025
|
-
|
1070
|
+
marking_stack_.Push(map);
|
1026
1071
|
}
|
1027
1072
|
} else {
|
1028
1073
|
SetMark(object);
|
1029
|
-
|
1074
|
+
marking_stack_.Push(object);
|
1030
1075
|
}
|
1031
1076
|
}
|
1032
1077
|
|
@@ -1043,7 +1088,7 @@ void MarkCompactCollector::MarkMapContents(Map* map) {
|
|
1043
1088
|
|
1044
1089
|
Object** end_slot = HeapObject::RawField(map, Map::kPointerFieldsEndOffset);
|
1045
1090
|
|
1046
|
-
StaticMarkingVisitor::VisitPointers(start_slot, end_slot);
|
1091
|
+
StaticMarkingVisitor::VisitPointers(map->heap(), start_slot, end_slot);
|
1047
1092
|
}
|
1048
1093
|
|
1049
1094
|
|
@@ -1051,7 +1096,7 @@ void MarkCompactCollector::MarkDescriptorArray(
|
|
1051
1096
|
DescriptorArray* descriptors) {
|
1052
1097
|
if (descriptors->IsMarked()) return;
|
1053
1098
|
// Empty descriptor array is marked as a root before any maps are marked.
|
1054
|
-
ASSERT(descriptors !=
|
1099
|
+
ASSERT(descriptors != HEAP->raw_unchecked_empty_descriptor_array());
|
1055
1100
|
SetMark(descriptors);
|
1056
1101
|
|
1057
1102
|
FixedArray* contents = reinterpret_cast<FixedArray*>(
|
@@ -1061,11 +1106,11 @@ void MarkCompactCollector::MarkDescriptorArray(
|
|
1061
1106
|
ASSERT(contents->IsFixedArray());
|
1062
1107
|
ASSERT(contents->length() >= 2);
|
1063
1108
|
SetMark(contents);
|
1064
|
-
// Contents contains (value, details) pairs. If the details say that
|
1065
|
-
//
|
1066
|
-
// NULL_DESCRIPTOR, we don't mark the value as
|
1067
|
-
//
|
1068
|
-
// Map*).
|
1109
|
+
// Contents contains (value, details) pairs. If the details say that the type
|
1110
|
+
// of descriptor is MAP_TRANSITION, CONSTANT_TRANSITION,
|
1111
|
+
// EXTERNAL_ARRAY_TRANSITION or NULL_DESCRIPTOR, we don't mark the value as
|
1112
|
+
// live. Only for MAP_TRANSITION, EXTERNAL_ARRAY_TRANSITION and
|
1113
|
+
// CONSTANT_TRANSITION is the value an Object* (a Map*).
|
1069
1114
|
for (int i = 0; i < contents->length(); i += 2) {
|
1070
1115
|
// If the pair (value, details) at index i, i+1 is not
|
1071
1116
|
// a transition or null descriptor, mark the value.
|
@@ -1074,18 +1119,18 @@ void MarkCompactCollector::MarkDescriptorArray(
|
|
1074
1119
|
HeapObject* object = reinterpret_cast<HeapObject*>(contents->get(i));
|
1075
1120
|
if (object->IsHeapObject() && !object->IsMarked()) {
|
1076
1121
|
SetMark(object);
|
1077
|
-
|
1122
|
+
marking_stack_.Push(object);
|
1078
1123
|
}
|
1079
1124
|
}
|
1080
1125
|
}
|
1081
1126
|
// The DescriptorArray descriptors contains a pointer to its contents array,
|
1082
1127
|
// but the contents array is already marked.
|
1083
|
-
|
1128
|
+
marking_stack_.Push(descriptors);
|
1084
1129
|
}
|
1085
1130
|
|
1086
1131
|
|
1087
1132
|
void MarkCompactCollector::CreateBackPointers() {
|
1088
|
-
HeapObjectIterator iterator(
|
1133
|
+
HeapObjectIterator iterator(heap()->map_space());
|
1089
1134
|
for (HeapObject* next_object = iterator.next();
|
1090
1135
|
next_object != NULL; next_object = iterator.next()) {
|
1091
1136
|
if (next_object->IsMap()) { // Could also be ByteArray on free list.
|
@@ -1094,7 +1139,7 @@ void MarkCompactCollector::CreateBackPointers() {
|
|
1094
1139
|
map->instance_type() <= JS_FUNCTION_TYPE) {
|
1095
1140
|
map->CreateBackPointers();
|
1096
1141
|
} else {
|
1097
|
-
ASSERT(map->instance_descriptors() ==
|
1142
|
+
ASSERT(map->instance_descriptors() == heap()->empty_descriptor_array());
|
1098
1143
|
}
|
1099
1144
|
}
|
1100
1145
|
}
|
@@ -1111,25 +1156,29 @@ static int OverflowObjectSize(HeapObject* obj) {
|
|
1111
1156
|
}
|
1112
1157
|
|
1113
1158
|
|
1114
|
-
|
1115
|
-
|
1116
|
-
//
|
1117
|
-
|
1118
|
-
|
1119
|
-
|
1120
|
-
|
1121
|
-
|
1122
|
-
|
1123
|
-
|
1124
|
-
|
1125
|
-
|
1126
|
-
|
1127
|
-
|
1128
|
-
|
1129
|
-
|
1159
|
+
class OverflowedObjectsScanner : public AllStatic {
|
1160
|
+
public:
|
1161
|
+
// Fill the marking stack with overflowed objects returned by the given
|
1162
|
+
// iterator. Stop when the marking stack is filled or the end of the space
|
1163
|
+
// is reached, whichever comes first.
|
1164
|
+
template<class T>
|
1165
|
+
static inline void ScanOverflowedObjects(MarkCompactCollector* collector,
|
1166
|
+
T* it) {
|
1167
|
+
// The caller should ensure that the marking stack is initially not full,
|
1168
|
+
// so that we don't waste effort pointlessly scanning for objects.
|
1169
|
+
ASSERT(!collector->marking_stack_.is_full());
|
1170
|
+
|
1171
|
+
for (HeapObject* object = it->next(); object != NULL; object = it->next()) {
|
1172
|
+
if (object->IsOverflowed()) {
|
1173
|
+
object->ClearOverflow();
|
1174
|
+
ASSERT(object->IsMarked());
|
1175
|
+
ASSERT(HEAP->Contains(object));
|
1176
|
+
collector->marking_stack_.Push(object);
|
1177
|
+
if (collector->marking_stack_.is_full()) return;
|
1178
|
+
}
|
1130
1179
|
}
|
1131
1180
|
}
|
1132
|
-
}
|
1181
|
+
};
|
1133
1182
|
|
1134
1183
|
|
1135
1184
|
bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) {
|
@@ -1138,11 +1187,11 @@ bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) {
|
|
1138
1187
|
|
1139
1188
|
|
1140
1189
|
void MarkCompactCollector::MarkSymbolTable() {
|
1141
|
-
SymbolTable* symbol_table =
|
1190
|
+
SymbolTable* symbol_table = heap()->raw_unchecked_symbol_table();
|
1142
1191
|
// Mark the symbol table itself.
|
1143
1192
|
SetMark(symbol_table);
|
1144
1193
|
// Explicitly mark the prefix.
|
1145
|
-
MarkingVisitor marker;
|
1194
|
+
MarkingVisitor marker(heap());
|
1146
1195
|
symbol_table->IteratePrefix(&marker);
|
1147
1196
|
ProcessMarkingStack();
|
1148
1197
|
}
|
@@ -1151,13 +1200,13 @@ void MarkCompactCollector::MarkSymbolTable() {
|
|
1151
1200
|
void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
|
1152
1201
|
// Mark the heap roots including global variables, stack variables,
|
1153
1202
|
// etc., and all objects reachable from them.
|
1154
|
-
|
1203
|
+
heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG);
|
1155
1204
|
|
1156
1205
|
// Handle the symbol table specially.
|
1157
1206
|
MarkSymbolTable();
|
1158
1207
|
|
1159
1208
|
// There may be overflowed objects in the heap. Visit them now.
|
1160
|
-
while (
|
1209
|
+
while (marking_stack_.overflowed()) {
|
1161
1210
|
RefillMarkingStack();
|
1162
1211
|
EmptyMarkingStack();
|
1163
1212
|
}
|
@@ -1165,7 +1214,8 @@ void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
|
|
1165
1214
|
|
1166
1215
|
|
1167
1216
|
void MarkCompactCollector::MarkObjectGroups() {
|
1168
|
-
List<ObjectGroup*>* object_groups =
|
1217
|
+
List<ObjectGroup*>* object_groups =
|
1218
|
+
heap()->isolate()->global_handles()->object_groups();
|
1169
1219
|
|
1170
1220
|
for (int i = 0; i < object_groups->length(); i++) {
|
1171
1221
|
ObjectGroup* entry = object_groups->at(i);
|
@@ -1190,23 +1240,51 @@ void MarkCompactCollector::MarkObjectGroups() {
|
|
1190
1240
|
MarkObject(HeapObject::cast(*objects[j]));
|
1191
1241
|
}
|
1192
1242
|
}
|
1243
|
+
|
1193
1244
|
// Once the entire group has been colored gray, set the object group
|
1194
1245
|
// to NULL so it won't be processed again.
|
1195
|
-
delete
|
1246
|
+
delete entry;
|
1196
1247
|
object_groups->at(i) = NULL;
|
1197
1248
|
}
|
1198
1249
|
}
|
1199
1250
|
|
1200
1251
|
|
1252
|
+
void MarkCompactCollector::MarkImplicitRefGroups() {
|
1253
|
+
List<ImplicitRefGroup*>* ref_groups =
|
1254
|
+
heap()->isolate()->global_handles()->implicit_ref_groups();
|
1255
|
+
|
1256
|
+
for (int i = 0; i < ref_groups->length(); i++) {
|
1257
|
+
ImplicitRefGroup* entry = ref_groups->at(i);
|
1258
|
+
if (entry == NULL) continue;
|
1259
|
+
|
1260
|
+
if (!entry->parent_->IsMarked()) continue;
|
1261
|
+
|
1262
|
+
List<Object**>& children = entry->children_;
|
1263
|
+
// A parent object is marked, so mark as gray all child white heap
|
1264
|
+
// objects.
|
1265
|
+
for (int j = 0; j < children.length(); ++j) {
|
1266
|
+
if ((*children[j])->IsHeapObject()) {
|
1267
|
+
MarkObject(HeapObject::cast(*children[j]));
|
1268
|
+
}
|
1269
|
+
}
|
1270
|
+
|
1271
|
+
// Once the entire group has been colored gray, set the group
|
1272
|
+
// to NULL so it won't be processed again.
|
1273
|
+
delete entry;
|
1274
|
+
ref_groups->at(i) = NULL;
|
1275
|
+
}
|
1276
|
+
}
|
1277
|
+
|
1278
|
+
|
1201
1279
|
// Mark all objects reachable from the objects on the marking stack.
|
1202
1280
|
// Before: the marking stack contains zero or more heap object pointers.
|
1203
1281
|
// After: the marking stack is empty, and all objects reachable from the
|
1204
1282
|
// marking stack have been marked, or are overflowed in the heap.
|
1205
1283
|
void MarkCompactCollector::EmptyMarkingStack() {
|
1206
|
-
while (!
|
1207
|
-
HeapObject* object =
|
1284
|
+
while (!marking_stack_.is_empty()) {
|
1285
|
+
HeapObject* object = marking_stack_.Pop();
|
1208
1286
|
ASSERT(object->IsHeapObject());
|
1209
|
-
ASSERT(
|
1287
|
+
ASSERT(heap()->Contains(object));
|
1210
1288
|
ASSERT(object->IsMarked());
|
1211
1289
|
ASSERT(!object->IsOverflowed());
|
1212
1290
|
|
@@ -1228,38 +1306,38 @@ void MarkCompactCollector::EmptyMarkingStack() {
|
|
1228
1306
|
// overflowed objects in the heap so the overflow flag on the markings stack
|
1229
1307
|
// is cleared.
|
1230
1308
|
void MarkCompactCollector::RefillMarkingStack() {
|
1231
|
-
ASSERT(
|
1309
|
+
ASSERT(marking_stack_.overflowed());
|
1232
1310
|
|
1233
|
-
SemiSpaceIterator new_it(
|
1234
|
-
ScanOverflowedObjects(&new_it);
|
1235
|
-
if (
|
1311
|
+
SemiSpaceIterator new_it(heap()->new_space(), &OverflowObjectSize);
|
1312
|
+
OverflowedObjectsScanner::ScanOverflowedObjects(this, &new_it);
|
1313
|
+
if (marking_stack_.is_full()) return;
|
1236
1314
|
|
1237
|
-
HeapObjectIterator old_pointer_it(
|
1315
|
+
HeapObjectIterator old_pointer_it(heap()->old_pointer_space(),
|
1238
1316
|
&OverflowObjectSize);
|
1239
|
-
ScanOverflowedObjects(&old_pointer_it);
|
1240
|
-
if (
|
1317
|
+
OverflowedObjectsScanner::ScanOverflowedObjects(this, &old_pointer_it);
|
1318
|
+
if (marking_stack_.is_full()) return;
|
1241
1319
|
|
1242
|
-
HeapObjectIterator old_data_it(
|
1243
|
-
ScanOverflowedObjects(&old_data_it);
|
1244
|
-
if (
|
1320
|
+
HeapObjectIterator old_data_it(heap()->old_data_space(), &OverflowObjectSize);
|
1321
|
+
OverflowedObjectsScanner::ScanOverflowedObjects(this, &old_data_it);
|
1322
|
+
if (marking_stack_.is_full()) return;
|
1245
1323
|
|
1246
|
-
HeapObjectIterator code_it(
|
1247
|
-
ScanOverflowedObjects(&code_it);
|
1248
|
-
if (
|
1324
|
+
HeapObjectIterator code_it(heap()->code_space(), &OverflowObjectSize);
|
1325
|
+
OverflowedObjectsScanner::ScanOverflowedObjects(this, &code_it);
|
1326
|
+
if (marking_stack_.is_full()) return;
|
1249
1327
|
|
1250
|
-
HeapObjectIterator map_it(
|
1251
|
-
ScanOverflowedObjects(&map_it);
|
1252
|
-
if (
|
1328
|
+
HeapObjectIterator map_it(heap()->map_space(), &OverflowObjectSize);
|
1329
|
+
OverflowedObjectsScanner::ScanOverflowedObjects(this, &map_it);
|
1330
|
+
if (marking_stack_.is_full()) return;
|
1253
1331
|
|
1254
|
-
HeapObjectIterator cell_it(
|
1255
|
-
ScanOverflowedObjects(&cell_it);
|
1256
|
-
if (
|
1332
|
+
HeapObjectIterator cell_it(heap()->cell_space(), &OverflowObjectSize);
|
1333
|
+
OverflowedObjectsScanner::ScanOverflowedObjects(this, &cell_it);
|
1334
|
+
if (marking_stack_.is_full()) return;
|
1257
1335
|
|
1258
|
-
LargeObjectIterator lo_it(
|
1259
|
-
ScanOverflowedObjects(&lo_it);
|
1260
|
-
if (
|
1336
|
+
LargeObjectIterator lo_it(heap()->lo_space(), &OverflowObjectSize);
|
1337
|
+
OverflowedObjectsScanner::ScanOverflowedObjects(this, &lo_it);
|
1338
|
+
if (marking_stack_.is_full()) return;
|
1261
1339
|
|
1262
|
-
|
1340
|
+
marking_stack_.clear_overflowed();
|
1263
1341
|
}
|
1264
1342
|
|
1265
1343
|
|
@@ -1269,19 +1347,20 @@ void MarkCompactCollector::RefillMarkingStack() {
|
|
1269
1347
|
// objects in the heap.
|
1270
1348
|
void MarkCompactCollector::ProcessMarkingStack() {
|
1271
1349
|
EmptyMarkingStack();
|
1272
|
-
while (
|
1350
|
+
while (marking_stack_.overflowed()) {
|
1273
1351
|
RefillMarkingStack();
|
1274
1352
|
EmptyMarkingStack();
|
1275
1353
|
}
|
1276
1354
|
}
|
1277
1355
|
|
1278
1356
|
|
1279
|
-
void MarkCompactCollector::
|
1357
|
+
void MarkCompactCollector::ProcessExternalMarking() {
|
1280
1358
|
bool work_to_do = true;
|
1281
|
-
ASSERT(
|
1359
|
+
ASSERT(marking_stack_.is_empty());
|
1282
1360
|
while (work_to_do) {
|
1283
1361
|
MarkObjectGroups();
|
1284
|
-
|
1362
|
+
MarkImplicitRefGroups();
|
1363
|
+
work_to_do = !marking_stack_.is_empty();
|
1285
1364
|
ProcessMarkingStack();
|
1286
1365
|
}
|
1287
1366
|
}
|
@@ -1292,7 +1371,7 @@ void MarkCompactCollector::MarkLiveObjects() {
|
|
1292
1371
|
// The recursive GC marker detects when it is nearing stack overflow,
|
1293
1372
|
// and switches to a different marking system. JS interrupts interfere
|
1294
1373
|
// with the C stack limit check.
|
1295
|
-
PostponeInterruptsScope postpone;
|
1374
|
+
PostponeInterruptsScope postpone(heap()->isolate());
|
1296
1375
|
|
1297
1376
|
#ifdef DEBUG
|
1298
1377
|
ASSERT(state_ == PREPARE_GC);
|
@@ -1300,21 +1379,20 @@ void MarkCompactCollector::MarkLiveObjects() {
|
|
1300
1379
|
#endif
|
1301
1380
|
// The to space contains live objects, the from space is used as a marking
|
1302
1381
|
// stack.
|
1303
|
-
|
1304
|
-
|
1382
|
+
marking_stack_.Initialize(heap()->new_space()->FromSpaceLow(),
|
1383
|
+
heap()->new_space()->FromSpaceHigh());
|
1305
1384
|
|
1306
|
-
ASSERT(!
|
1385
|
+
ASSERT(!marking_stack_.overflowed());
|
1307
1386
|
|
1308
1387
|
PrepareForCodeFlushing();
|
1309
1388
|
|
1310
|
-
RootMarkingVisitor root_visitor;
|
1389
|
+
RootMarkingVisitor root_visitor(heap());
|
1311
1390
|
MarkRoots(&root_visitor);
|
1312
1391
|
|
1313
1392
|
// The objects reachable from the roots are marked, yet unreachable
|
1314
|
-
// objects are unmarked. Mark objects reachable
|
1315
|
-
//
|
1316
|
-
|
1317
|
-
ProcessObjectGroups();
|
1393
|
+
// objects are unmarked. Mark objects reachable due to host
|
1394
|
+
// application specific logic.
|
1395
|
+
ProcessExternalMarking();
|
1318
1396
|
|
1319
1397
|
// The objects reachable from the roots or object groups are marked,
|
1320
1398
|
// yet unreachable objects are unmarked. Mark objects reachable
|
@@ -1322,58 +1400,65 @@ void MarkCompactCollector::MarkLiveObjects() {
|
|
1322
1400
|
//
|
1323
1401
|
// First we identify nonlive weak handles and mark them as pending
|
1324
1402
|
// destruction.
|
1325
|
-
|
1403
|
+
heap()->isolate()->global_handles()->IdentifyWeakHandles(
|
1404
|
+
&IsUnmarkedHeapObject);
|
1326
1405
|
// Then we mark the objects and process the transitive closure.
|
1327
|
-
|
1328
|
-
while (
|
1406
|
+
heap()->isolate()->global_handles()->IterateWeakRoots(&root_visitor);
|
1407
|
+
while (marking_stack_.overflowed()) {
|
1329
1408
|
RefillMarkingStack();
|
1330
1409
|
EmptyMarkingStack();
|
1331
1410
|
}
|
1332
1411
|
|
1333
|
-
// Repeat
|
1334
|
-
// weak roots.
|
1335
|
-
|
1412
|
+
// Repeat host application specific marking to mark unmarked objects
|
1413
|
+
// reachable from the weak roots.
|
1414
|
+
ProcessExternalMarking();
|
1336
1415
|
|
1337
1416
|
// Prune the symbol table removing all symbols only pointed to by the
|
1338
1417
|
// symbol table. Cannot use symbol_table() here because the symbol
|
1339
1418
|
// table is marked.
|
1340
|
-
SymbolTable* symbol_table =
|
1341
|
-
SymbolTableCleaner v;
|
1419
|
+
SymbolTable* symbol_table = heap()->raw_unchecked_symbol_table();
|
1420
|
+
SymbolTableCleaner v(heap());
|
1342
1421
|
symbol_table->IterateElements(&v);
|
1343
1422
|
symbol_table->ElementsRemoved(v.PointersRemoved());
|
1344
|
-
|
1345
|
-
|
1423
|
+
heap()->external_string_table_.Iterate(&v);
|
1424
|
+
heap()->external_string_table_.CleanUp();
|
1346
1425
|
|
1347
1426
|
// Process the weak references.
|
1348
1427
|
MarkCompactWeakObjectRetainer mark_compact_object_retainer;
|
1349
|
-
|
1428
|
+
heap()->ProcessWeakReferences(&mark_compact_object_retainer);
|
1350
1429
|
|
1351
1430
|
// Remove object groups after marking phase.
|
1352
|
-
|
1431
|
+
heap()->isolate()->global_handles()->RemoveObjectGroups();
|
1432
|
+
heap()->isolate()->global_handles()->RemoveImplicitRefGroups();
|
1353
1433
|
|
1354
1434
|
// Flush code from collected candidates.
|
1355
|
-
|
1435
|
+
if (is_code_flushing_enabled()) {
|
1436
|
+
code_flusher_->ProcessCandidates();
|
1437
|
+
}
|
1438
|
+
|
1439
|
+
// Clean up dead objects from the runtime profiler.
|
1440
|
+
heap()->isolate()->runtime_profiler()->RemoveDeadSamples();
|
1356
1441
|
}
|
1357
1442
|
|
1358
1443
|
|
1359
1444
|
#ifdef DEBUG
|
1360
1445
|
void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) {
|
1361
1446
|
live_bytes_ += obj->Size();
|
1362
|
-
if (
|
1447
|
+
if (heap()->new_space()->Contains(obj)) {
|
1363
1448
|
live_young_objects_size_ += obj->Size();
|
1364
|
-
} else if (
|
1449
|
+
} else if (heap()->map_space()->Contains(obj)) {
|
1365
1450
|
ASSERT(obj->IsMap());
|
1366
1451
|
live_map_objects_size_ += obj->Size();
|
1367
|
-
} else if (
|
1452
|
+
} else if (heap()->cell_space()->Contains(obj)) {
|
1368
1453
|
ASSERT(obj->IsJSGlobalPropertyCell());
|
1369
1454
|
live_cell_objects_size_ += obj->Size();
|
1370
|
-
} else if (
|
1455
|
+
} else if (heap()->old_pointer_space()->Contains(obj)) {
|
1371
1456
|
live_old_pointer_objects_size_ += obj->Size();
|
1372
|
-
} else if (
|
1457
|
+
} else if (heap()->old_data_space()->Contains(obj)) {
|
1373
1458
|
live_old_data_objects_size_ += obj->Size();
|
1374
|
-
} else if (
|
1459
|
+
} else if (heap()->code_space()->Contains(obj)) {
|
1375
1460
|
live_code_objects_size_ += obj->Size();
|
1376
|
-
} else if (
|
1461
|
+
} else if (heap()->lo_space()->Contains(obj)) {
|
1377
1462
|
live_lo_objects_size_ += obj->Size();
|
1378
1463
|
} else {
|
1379
1464
|
UNREACHABLE();
|
@@ -1389,7 +1474,7 @@ void MarkCompactCollector::SweepLargeObjectSpace() {
|
|
1389
1474
|
compacting_collection_ ? ENCODE_FORWARDING_ADDRESSES : SWEEP_SPACES;
|
1390
1475
|
#endif
|
1391
1476
|
// Deallocate unmarked objects and clear marked bits for marked objects.
|
1392
|
-
|
1477
|
+
heap()->lo_space()->FreeUnmarkedObjects();
|
1393
1478
|
}
|
1394
1479
|
|
1395
1480
|
|
@@ -1402,7 +1487,7 @@ bool MarkCompactCollector::SafeIsMap(HeapObject* object) {
|
|
1402
1487
|
|
1403
1488
|
|
1404
1489
|
void MarkCompactCollector::ClearNonLiveTransitions() {
|
1405
|
-
HeapObjectIterator map_iterator(
|
1490
|
+
HeapObjectIterator map_iterator(heap() ->map_space(), &SizeOfMarkedObject);
|
1406
1491
|
// Iterate over the map space, setting map transitions that go from
|
1407
1492
|
// a marked map to an unmarked map to null transitions. At the same time,
|
1408
1493
|
// set all the prototype fields of maps back to their original value,
|
@@ -1452,7 +1537,7 @@ void MarkCompactCollector::ClearNonLiveTransitions() {
|
|
1452
1537
|
// This test will always be false on the first iteration.
|
1453
1538
|
if (on_dead_path && current->IsMarked()) {
|
1454
1539
|
on_dead_path = false;
|
1455
|
-
current->ClearNonLiveTransitions(real_prototype);
|
1540
|
+
current->ClearNonLiveTransitions(heap(), real_prototype);
|
1456
1541
|
}
|
1457
1542
|
*HeapObject::RawField(current, Map::kPrototypeOffset) =
|
1458
1543
|
real_prototype;
|
@@ -1514,20 +1599,21 @@ void EncodeFreeRegion(Address free_start, int free_size) {
|
|
1514
1599
|
// Try to promote all objects in new space. Heap numbers and sequential
|
1515
1600
|
// strings are promoted to the code space, large objects to large object space,
|
1516
1601
|
// and all others to the old space.
|
1517
|
-
inline MaybeObject* MCAllocateFromNewSpace(
|
1602
|
+
inline MaybeObject* MCAllocateFromNewSpace(Heap* heap,
|
1603
|
+
HeapObject* object,
|
1518
1604
|
int object_size) {
|
1519
1605
|
MaybeObject* forwarded;
|
1520
|
-
if (object_size >
|
1606
|
+
if (object_size > heap->MaxObjectSizeInPagedSpace()) {
|
1521
1607
|
forwarded = Failure::Exception();
|
1522
1608
|
} else {
|
1523
|
-
OldSpace* target_space =
|
1524
|
-
ASSERT(target_space ==
|
1525
|
-
target_space ==
|
1609
|
+
OldSpace* target_space = heap->TargetSpace(object);
|
1610
|
+
ASSERT(target_space == heap->old_pointer_space() ||
|
1611
|
+
target_space == heap->old_data_space());
|
1526
1612
|
forwarded = target_space->MCAllocateRaw(object_size);
|
1527
1613
|
}
|
1528
1614
|
Object* result;
|
1529
1615
|
if (!forwarded->ToObject(&result)) {
|
1530
|
-
result =
|
1616
|
+
result = heap->new_space()->MCAllocateRaw(object_size)->ToObjectUnchecked();
|
1531
1617
|
}
|
1532
1618
|
return result;
|
1533
1619
|
}
|
@@ -1535,48 +1621,53 @@ inline MaybeObject* MCAllocateFromNewSpace(HeapObject* object,
|
|
1535
1621
|
|
1536
1622
|
// Allocation functions for the paged spaces call the space's MCAllocateRaw.
|
1537
1623
|
MUST_USE_RESULT inline MaybeObject* MCAllocateFromOldPointerSpace(
|
1624
|
+
Heap *heap,
|
1538
1625
|
HeapObject* ignore,
|
1539
1626
|
int object_size) {
|
1540
|
-
return
|
1627
|
+
return heap->old_pointer_space()->MCAllocateRaw(object_size);
|
1541
1628
|
}
|
1542
1629
|
|
1543
1630
|
|
1544
1631
|
MUST_USE_RESULT inline MaybeObject* MCAllocateFromOldDataSpace(
|
1632
|
+
Heap* heap,
|
1545
1633
|
HeapObject* ignore,
|
1546
1634
|
int object_size) {
|
1547
|
-
return
|
1635
|
+
return heap->old_data_space()->MCAllocateRaw(object_size);
|
1548
1636
|
}
|
1549
1637
|
|
1550
1638
|
|
1551
1639
|
MUST_USE_RESULT inline MaybeObject* MCAllocateFromCodeSpace(
|
1640
|
+
Heap* heap,
|
1552
1641
|
HeapObject* ignore,
|
1553
1642
|
int object_size) {
|
1554
|
-
return
|
1643
|
+
return heap->code_space()->MCAllocateRaw(object_size);
|
1555
1644
|
}
|
1556
1645
|
|
1557
1646
|
|
1558
1647
|
MUST_USE_RESULT inline MaybeObject* MCAllocateFromMapSpace(
|
1648
|
+
Heap* heap,
|
1559
1649
|
HeapObject* ignore,
|
1560
1650
|
int object_size) {
|
1561
|
-
return
|
1651
|
+
return heap->map_space()->MCAllocateRaw(object_size);
|
1562
1652
|
}
|
1563
1653
|
|
1564
1654
|
|
1565
|
-
MUST_USE_RESULT inline MaybeObject* MCAllocateFromCellSpace(
|
1566
|
-
|
1567
|
-
return
|
1655
|
+
MUST_USE_RESULT inline MaybeObject* MCAllocateFromCellSpace(
|
1656
|
+
Heap* heap, HeapObject* ignore, int object_size) {
|
1657
|
+
return heap->cell_space()->MCAllocateRaw(object_size);
|
1568
1658
|
}
|
1569
1659
|
|
1570
1660
|
|
1571
1661
|
// The forwarding address is encoded at the same offset as the current
|
1572
1662
|
// to-space object, but in from space.
|
1573
|
-
inline void EncodeForwardingAddressInNewSpace(
|
1663
|
+
inline void EncodeForwardingAddressInNewSpace(Heap* heap,
|
1664
|
+
HeapObject* old_object,
|
1574
1665
|
int object_size,
|
1575
1666
|
Object* new_object,
|
1576
1667
|
int* ignored) {
|
1577
1668
|
int offset =
|
1578
|
-
|
1579
|
-
Memory::Address_at(
|
1669
|
+
heap->new_space()->ToSpaceOffsetForAddress(old_object->address());
|
1670
|
+
Memory::Address_at(heap->new_space()->FromSpaceLow() + offset) =
|
1580
1671
|
HeapObject::cast(new_object)->address();
|
1581
1672
|
}
|
1582
1673
|
|
@@ -1584,7 +1675,8 @@ inline void EncodeForwardingAddressInNewSpace(HeapObject* old_object,
|
|
1584
1675
|
// The forwarding address is encoded in the map pointer of the object as an
|
1585
1676
|
// offset (in terms of live bytes) from the address of the first live object
|
1586
1677
|
// in the page.
|
1587
|
-
inline void EncodeForwardingAddressInPagedSpace(
|
1678
|
+
inline void EncodeForwardingAddressInPagedSpace(Heap* heap,
|
1679
|
+
HeapObject* old_object,
|
1588
1680
|
int object_size,
|
1589
1681
|
Object* new_object,
|
1590
1682
|
int* offset) {
|
@@ -1603,7 +1695,7 @@ inline void EncodeForwardingAddressInPagedSpace(HeapObject* old_object,
|
|
1603
1695
|
|
1604
1696
|
|
1605
1697
|
// Most non-live objects are ignored.
|
1606
|
-
inline void IgnoreNonLiveObject(HeapObject* object) {}
|
1698
|
+
inline void IgnoreNonLiveObject(HeapObject* object, Isolate* isolate) {}
|
1607
1699
|
|
1608
1700
|
|
1609
1701
|
// Function template that, given a range of addresses (eg, a semispace or a
|
@@ -1617,7 +1709,8 @@ inline void IgnoreNonLiveObject(HeapObject* object) {}
|
|
1617
1709
|
template<MarkCompactCollector::AllocationFunction Alloc,
|
1618
1710
|
MarkCompactCollector::EncodingFunction Encode,
|
1619
1711
|
MarkCompactCollector::ProcessNonLiveFunction ProcessNonLive>
|
1620
|
-
inline void EncodeForwardingAddressesInRange(
|
1712
|
+
inline void EncodeForwardingAddressesInRange(MarkCompactCollector* collector,
|
1713
|
+
Address start,
|
1621
1714
|
Address end,
|
1622
1715
|
int* offset) {
|
1623
1716
|
// The start address of the current free region while sweeping the space.
|
@@ -1637,12 +1730,12 @@ inline void EncodeForwardingAddressesInRange(Address start,
|
|
1637
1730
|
HeapObject* object = HeapObject::FromAddress(current);
|
1638
1731
|
if (object->IsMarked()) {
|
1639
1732
|
object->ClearMark();
|
1640
|
-
|
1733
|
+
collector->tracer()->decrement_marked_count();
|
1641
1734
|
object_size = object->Size();
|
1642
1735
|
|
1643
|
-
|
1644
|
-
|
1645
|
-
Encode(object, object_size, forwarded, offset);
|
1736
|
+
Object* forwarded =
|
1737
|
+
Alloc(collector->heap(), object, object_size)->ToObjectUnchecked();
|
1738
|
+
Encode(collector->heap(), object, object_size, forwarded, offset);
|
1646
1739
|
|
1647
1740
|
#ifdef DEBUG
|
1648
1741
|
if (FLAG_gc_verbose) {
|
@@ -1656,7 +1749,7 @@ inline void EncodeForwardingAddressesInRange(Address start,
|
|
1656
1749
|
}
|
1657
1750
|
} else { // Non-live object.
|
1658
1751
|
object_size = object->Size();
|
1659
|
-
ProcessNonLive(object);
|
1752
|
+
ProcessNonLive(object, collector->heap()->isolate());
|
1660
1753
|
if (is_prev_alive) { // Transition from live to non-live.
|
1661
1754
|
free_start = current;
|
1662
1755
|
is_prev_alive = false;
|
@@ -1678,8 +1771,9 @@ void MarkCompactCollector::EncodeForwardingAddressesInNewSpace() {
|
|
1678
1771
|
EncodeForwardingAddressesInRange<MCAllocateFromNewSpace,
|
1679
1772
|
EncodeForwardingAddressInNewSpace,
|
1680
1773
|
IgnoreNonLiveObject>(
|
1681
|
-
|
1682
|
-
|
1774
|
+
this,
|
1775
|
+
heap()->new_space()->bottom(),
|
1776
|
+
heap()->new_space()->top(),
|
1683
1777
|
&ignored);
|
1684
1778
|
}
|
1685
1779
|
|
@@ -1698,6 +1792,7 @@ void MarkCompactCollector::EncodeForwardingAddressesInPagedSpace(
|
|
1698
1792
|
EncodeForwardingAddressesInRange<Alloc,
|
1699
1793
|
EncodeForwardingAddressInPagedSpace,
|
1700
1794
|
ProcessNonLive>(
|
1795
|
+
this,
|
1701
1796
|
p->ObjectAreaStart(),
|
1702
1797
|
p->AllocationTop(),
|
1703
1798
|
&offset);
|
@@ -1715,14 +1810,15 @@ void MarkCompactCollector::EncodeForwardingAddressesInPagedSpace(
|
|
1715
1810
|
// to encounter pointers to dead objects during traversal of dirty regions we
|
1716
1811
|
// should clear them to avoid encountering them during next dirty regions
|
1717
1812
|
// iteration.
|
1718
|
-
static void MigrateObject(
|
1813
|
+
static void MigrateObject(Heap* heap,
|
1814
|
+
Address dst,
|
1719
1815
|
Address src,
|
1720
1816
|
int size,
|
1721
1817
|
bool to_old_space) {
|
1722
1818
|
if (to_old_space) {
|
1723
|
-
|
1819
|
+
heap->CopyBlockToOldSpaceAndUpdateRegionMarks(dst, src, size);
|
1724
1820
|
} else {
|
1725
|
-
|
1821
|
+
heap->CopyBlock(dst, src, size);
|
1726
1822
|
}
|
1727
1823
|
|
1728
1824
|
Memory::Address_at(src) = dst;
|
@@ -1732,14 +1828,14 @@ static void MigrateObject(Address dst,
|
|
1732
1828
|
class StaticPointersToNewGenUpdatingVisitor : public
|
1733
1829
|
StaticNewSpaceVisitor<StaticPointersToNewGenUpdatingVisitor> {
|
1734
1830
|
public:
|
1735
|
-
static inline void VisitPointer(Object** p) {
|
1831
|
+
static inline void VisitPointer(Heap* heap, Object** p) {
|
1736
1832
|
if (!(*p)->IsHeapObject()) return;
|
1737
1833
|
|
1738
1834
|
HeapObject* obj = HeapObject::cast(*p);
|
1739
1835
|
Address old_addr = obj->address();
|
1740
1836
|
|
1741
|
-
if (
|
1742
|
-
ASSERT(
|
1837
|
+
if (heap->new_space()->Contains(obj)) {
|
1838
|
+
ASSERT(heap->InFromSpace(*p));
|
1743
1839
|
*p = HeapObject::FromAddress(Memory::Address_at(old_addr));
|
1744
1840
|
}
|
1745
1841
|
}
|
@@ -1750,13 +1846,15 @@ class StaticPointersToNewGenUpdatingVisitor : public
|
|
1750
1846
|
// It does not expect to encounter pointers to dead objects.
|
1751
1847
|
class PointersToNewGenUpdatingVisitor: public ObjectVisitor {
|
1752
1848
|
public:
|
1849
|
+
explicit PointersToNewGenUpdatingVisitor(Heap* heap) : heap_(heap) { }
|
1850
|
+
|
1753
1851
|
void VisitPointer(Object** p) {
|
1754
|
-
StaticPointersToNewGenUpdatingVisitor::VisitPointer(p);
|
1852
|
+
StaticPointersToNewGenUpdatingVisitor::VisitPointer(heap_, p);
|
1755
1853
|
}
|
1756
1854
|
|
1757
1855
|
void VisitPointers(Object** start, Object** end) {
|
1758
1856
|
for (Object** p = start; p < end; p++) {
|
1759
|
-
StaticPointersToNewGenUpdatingVisitor::VisitPointer(p);
|
1857
|
+
StaticPointersToNewGenUpdatingVisitor::VisitPointer(heap_, p);
|
1760
1858
|
}
|
1761
1859
|
}
|
1762
1860
|
|
@@ -1776,6 +1874,8 @@ class PointersToNewGenUpdatingVisitor: public ObjectVisitor {
|
|
1776
1874
|
VisitPointer(&target);
|
1777
1875
|
rinfo->set_call_address(Code::cast(target)->instruction_start());
|
1778
1876
|
}
|
1877
|
+
private:
|
1878
|
+
Heap* heap_;
|
1779
1879
|
};
|
1780
1880
|
|
1781
1881
|
|
@@ -1786,7 +1886,7 @@ static void UpdatePointerToNewGen(HeapObject** p) {
|
|
1786
1886
|
if (!(*p)->IsHeapObject()) return;
|
1787
1887
|
|
1788
1888
|
Address old_addr = (*p)->address();
|
1789
|
-
ASSERT(
|
1889
|
+
ASSERT(HEAP->InFromSpace(*p));
|
1790
1890
|
|
1791
1891
|
Address new_addr = Memory::Address_at(old_addr);
|
1792
1892
|
|
@@ -1800,39 +1900,42 @@ static void UpdatePointerToNewGen(HeapObject** p) {
|
|
1800
1900
|
}
|
1801
1901
|
|
1802
1902
|
|
1803
|
-
static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
|
1903
|
+
static String* UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap,
|
1904
|
+
Object** p) {
|
1804
1905
|
Address old_addr = HeapObject::cast(*p)->address();
|
1805
1906
|
Address new_addr = Memory::Address_at(old_addr);
|
1806
1907
|
return String::cast(HeapObject::FromAddress(new_addr));
|
1807
1908
|
}
|
1808
1909
|
|
1809
1910
|
|
1810
|
-
static bool TryPromoteObject(HeapObject* object, int object_size) {
|
1911
|
+
static bool TryPromoteObject(Heap* heap, HeapObject* object, int object_size) {
|
1811
1912
|
Object* result;
|
1812
1913
|
|
1813
|
-
if (object_size >
|
1914
|
+
if (object_size > heap->MaxObjectSizeInPagedSpace()) {
|
1814
1915
|
MaybeObject* maybe_result =
|
1815
|
-
|
1916
|
+
heap->lo_space()->AllocateRawFixedArray(object_size);
|
1816
1917
|
if (maybe_result->ToObject(&result)) {
|
1817
1918
|
HeapObject* target = HeapObject::cast(result);
|
1818
|
-
MigrateObject(target->address(), object->address(), object_size,
|
1819
|
-
|
1919
|
+
MigrateObject(heap, target->address(), object->address(), object_size,
|
1920
|
+
true);
|
1921
|
+
heap->mark_compact_collector()->tracer()->
|
1820
1922
|
increment_promoted_objects_size(object_size);
|
1821
1923
|
return true;
|
1822
1924
|
}
|
1823
1925
|
} else {
|
1824
|
-
OldSpace* target_space =
|
1926
|
+
OldSpace* target_space = heap->TargetSpace(object);
|
1825
1927
|
|
1826
|
-
ASSERT(target_space ==
|
1827
|
-
target_space ==
|
1928
|
+
ASSERT(target_space == heap->old_pointer_space() ||
|
1929
|
+
target_space == heap->old_data_space());
|
1828
1930
|
MaybeObject* maybe_result = target_space->AllocateRaw(object_size);
|
1829
1931
|
if (maybe_result->ToObject(&result)) {
|
1830
1932
|
HeapObject* target = HeapObject::cast(result);
|
1831
|
-
MigrateObject(
|
1933
|
+
MigrateObject(heap,
|
1934
|
+
target->address(),
|
1832
1935
|
object->address(),
|
1833
1936
|
object_size,
|
1834
|
-
target_space ==
|
1835
|
-
|
1937
|
+
target_space == heap->old_pointer_space());
|
1938
|
+
heap->mark_compact_collector()->tracer()->
|
1836
1939
|
increment_promoted_objects_size(object_size);
|
1837
1940
|
return true;
|
1838
1941
|
}
|
@@ -1842,8 +1945,8 @@ static bool TryPromoteObject(HeapObject* object, int object_size) {
|
|
1842
1945
|
}
|
1843
1946
|
|
1844
1947
|
|
1845
|
-
static void SweepNewSpace(NewSpace* space) {
|
1846
|
-
|
1948
|
+
static void SweepNewSpace(Heap* heap, NewSpace* space) {
|
1949
|
+
heap->CheckNewSpaceExpansionCriteria();
|
1847
1950
|
|
1848
1951
|
Address from_bottom = space->bottom();
|
1849
1952
|
Address from_top = space->top();
|
@@ -1863,13 +1966,13 @@ static void SweepNewSpace(NewSpace* space) {
|
|
1863
1966
|
|
1864
1967
|
if (object->IsMarked()) {
|
1865
1968
|
object->ClearMark();
|
1866
|
-
|
1969
|
+
heap->mark_compact_collector()->tracer()->decrement_marked_count();
|
1867
1970
|
|
1868
1971
|
size = object->Size();
|
1869
1972
|
survivors_size += size;
|
1870
1973
|
|
1871
1974
|
// Aggressively promote young survivors to the old space.
|
1872
|
-
if (TryPromoteObject(object, size)) {
|
1975
|
+
if (TryPromoteObject(heap, object, size)) {
|
1873
1976
|
continue;
|
1874
1977
|
}
|
1875
1978
|
|
@@ -1877,7 +1980,8 @@ static void SweepNewSpace(NewSpace* space) {
|
|
1877
1980
|
// Allocation cannot fail at this point: semispaces are of equal size.
|
1878
1981
|
Object* target = space->AllocateRaw(size)->ToObjectUnchecked();
|
1879
1982
|
|
1880
|
-
MigrateObject(
|
1983
|
+
MigrateObject(heap,
|
1984
|
+
HeapObject::cast(target)->address(),
|
1881
1985
|
current,
|
1882
1986
|
size,
|
1883
1987
|
false);
|
@@ -1891,7 +1995,7 @@ static void SweepNewSpace(NewSpace* space) {
|
|
1891
1995
|
}
|
1892
1996
|
|
1893
1997
|
// Second pass: find pointers to new space and update them.
|
1894
|
-
PointersToNewGenUpdatingVisitor updating_visitor;
|
1998
|
+
PointersToNewGenUpdatingVisitor updating_visitor(heap);
|
1895
1999
|
|
1896
2000
|
// Update pointers in to space.
|
1897
2001
|
Address current = space->bottom();
|
@@ -1903,19 +2007,19 @@ static void SweepNewSpace(NewSpace* space) {
|
|
1903
2007
|
}
|
1904
2008
|
|
1905
2009
|
// Update roots.
|
1906
|
-
|
2010
|
+
heap->IterateRoots(&updating_visitor, VISIT_ALL_IN_SCAVENGE);
|
1907
2011
|
LiveObjectList::IterateElements(&updating_visitor);
|
1908
2012
|
|
1909
2013
|
// Update pointers in old spaces.
|
1910
|
-
|
2014
|
+
heap->IterateDirtyRegions(heap->old_pointer_space(),
|
1911
2015
|
&Heap::IteratePointersInDirtyRegion,
|
1912
2016
|
&UpdatePointerToNewGen,
|
1913
|
-
|
2017
|
+
heap->WATERMARK_SHOULD_BE_VALID);
|
1914
2018
|
|
1915
|
-
|
2019
|
+
heap->lo_space()->IterateDirtyRegions(&UpdatePointerToNewGen);
|
1916
2020
|
|
1917
2021
|
// Update pointers from cells.
|
1918
|
-
HeapObjectIterator cell_iterator(
|
2022
|
+
HeapObjectIterator cell_iterator(heap->cell_space());
|
1919
2023
|
for (HeapObject* cell = cell_iterator.next();
|
1920
2024
|
cell != NULL;
|
1921
2025
|
cell = cell_iterator.next()) {
|
@@ -1928,19 +2032,22 @@ static void SweepNewSpace(NewSpace* space) {
|
|
1928
2032
|
}
|
1929
2033
|
|
1930
2034
|
// Update pointer from the global contexts list.
|
1931
|
-
updating_visitor.VisitPointer(
|
2035
|
+
updating_visitor.VisitPointer(heap->global_contexts_list_address());
|
1932
2036
|
|
1933
2037
|
// Update pointers from external string table.
|
1934
|
-
|
2038
|
+
heap->UpdateNewSpaceReferencesInExternalStringTable(
|
1935
2039
|
&UpdateNewSpaceReferenceInExternalStringTableEntry);
|
1936
2040
|
|
1937
2041
|
// All pointers were updated. Update auxiliary allocation info.
|
1938
|
-
|
2042
|
+
heap->IncrementYoungSurvivorsCounter(survivors_size);
|
1939
2043
|
space->set_age_mark(space->top());
|
2044
|
+
|
2045
|
+
// Update JSFunction pointers from the runtime profiler.
|
2046
|
+
heap->isolate()->runtime_profiler()->UpdateSamplesAfterScavenge();
|
1940
2047
|
}
|
1941
2048
|
|
1942
2049
|
|
1943
|
-
static void SweepSpace(PagedSpace* space) {
|
2050
|
+
static void SweepSpace(Heap* heap, PagedSpace* space) {
|
1944
2051
|
PageIterator it(space, PageIterator::PAGES_IN_USE);
|
1945
2052
|
|
1946
2053
|
// During sweeping of paged space we are trying to find longest sequences
|
@@ -1978,7 +2085,7 @@ static void SweepSpace(PagedSpace* space) {
|
|
1978
2085
|
object = HeapObject::FromAddress(current);
|
1979
2086
|
if (object->IsMarked()) {
|
1980
2087
|
object->ClearMark();
|
1981
|
-
|
2088
|
+
heap->mark_compact_collector()->tracer()->decrement_marked_count();
|
1982
2089
|
|
1983
2090
|
if (!is_previous_alive) { // Transition from free to live.
|
1984
2091
|
space->DeallocateBlock(free_start,
|
@@ -1987,7 +2094,8 @@ static void SweepSpace(PagedSpace* space) {
|
|
1987
2094
|
is_previous_alive = true;
|
1988
2095
|
}
|
1989
2096
|
} else {
|
1990
|
-
|
2097
|
+
heap->mark_compact_collector()->ReportDeleteIfNeeded(
|
2098
|
+
object, heap->isolate());
|
1991
2099
|
if (is_previous_alive) { // Transition from live to free.
|
1992
2100
|
free_start = current;
|
1993
2101
|
is_previous_alive = false;
|
@@ -2087,24 +2195,24 @@ void MarkCompactCollector::EncodeForwardingAddresses() {
|
|
2087
2195
|
// Objects in the active semispace of the young generation may be
|
2088
2196
|
// relocated to the inactive semispace (if not promoted). Set the
|
2089
2197
|
// relocation info to the beginning of the inactive semispace.
|
2090
|
-
|
2198
|
+
heap()->new_space()->MCResetRelocationInfo();
|
2091
2199
|
|
2092
2200
|
// Compute the forwarding pointers in each space.
|
2093
2201
|
EncodeForwardingAddressesInPagedSpace<MCAllocateFromOldPointerSpace,
|
2094
2202
|
ReportDeleteIfNeeded>(
|
2095
|
-
|
2203
|
+
heap()->old_pointer_space());
|
2096
2204
|
|
2097
2205
|
EncodeForwardingAddressesInPagedSpace<MCAllocateFromOldDataSpace,
|
2098
2206
|
IgnoreNonLiveObject>(
|
2099
|
-
|
2207
|
+
heap()->old_data_space());
|
2100
2208
|
|
2101
2209
|
EncodeForwardingAddressesInPagedSpace<MCAllocateFromCodeSpace,
|
2102
2210
|
ReportDeleteIfNeeded>(
|
2103
|
-
|
2211
|
+
heap()->code_space());
|
2104
2212
|
|
2105
2213
|
EncodeForwardingAddressesInPagedSpace<MCAllocateFromCellSpace,
|
2106
2214
|
IgnoreNonLiveObject>(
|
2107
|
-
|
2215
|
+
heap()->cell_space());
|
2108
2216
|
|
2109
2217
|
|
2110
2218
|
// Compute new space next to last after the old and code spaces have been
|
@@ -2116,25 +2224,26 @@ void MarkCompactCollector::EncodeForwardingAddresses() {
|
|
2116
2224
|
// non-live map pointers to get the sizes of non-live objects.
|
2117
2225
|
EncodeForwardingAddressesInPagedSpace<MCAllocateFromMapSpace,
|
2118
2226
|
IgnoreNonLiveObject>(
|
2119
|
-
|
2227
|
+
heap()->map_space());
|
2120
2228
|
|
2121
2229
|
// Write relocation info to the top page, so we can use it later. This is
|
2122
2230
|
// done after promoting objects from the new space so we get the correct
|
2123
2231
|
// allocation top.
|
2124
|
-
|
2125
|
-
|
2126
|
-
|
2127
|
-
|
2128
|
-
|
2232
|
+
heap()->old_pointer_space()->MCWriteRelocationInfoToPage();
|
2233
|
+
heap()->old_data_space()->MCWriteRelocationInfoToPage();
|
2234
|
+
heap()->code_space()->MCWriteRelocationInfoToPage();
|
2235
|
+
heap()->map_space()->MCWriteRelocationInfoToPage();
|
2236
|
+
heap()->cell_space()->MCWriteRelocationInfoToPage();
|
2129
2237
|
}
|
2130
2238
|
|
2131
2239
|
|
2132
2240
|
class MapIterator : public HeapObjectIterator {
|
2133
2241
|
public:
|
2134
|
-
MapIterator(
|
2242
|
+
explicit MapIterator(Heap* heap)
|
2243
|
+
: HeapObjectIterator(heap->map_space(), &SizeCallback) { }
|
2135
2244
|
|
2136
|
-
|
2137
|
-
: HeapObjectIterator(
|
2245
|
+
MapIterator(Heap* heap, Address start)
|
2246
|
+
: HeapObjectIterator(heap->map_space(), start, &SizeCallback) { }
|
2138
2247
|
|
2139
2248
|
private:
|
2140
2249
|
static int SizeCallback(HeapObject* unused) {
|
@@ -2146,10 +2255,12 @@ class MapIterator : public HeapObjectIterator {
|
|
2146
2255
|
|
2147
2256
|
class MapCompact {
|
2148
2257
|
public:
|
2149
|
-
explicit MapCompact(int live_maps)
|
2150
|
-
:
|
2151
|
-
|
2152
|
-
|
2258
|
+
explicit MapCompact(Heap* heap, int live_maps)
|
2259
|
+
: heap_(heap),
|
2260
|
+
live_maps_(live_maps),
|
2261
|
+
to_evacuate_start_(heap->map_space()->TopAfterCompaction(live_maps)),
|
2262
|
+
vacant_map_it_(heap),
|
2263
|
+
map_to_evacuate_it_(heap, to_evacuate_start_),
|
2153
2264
|
first_map_to_evacuate_(
|
2154
2265
|
reinterpret_cast<Map*>(HeapObject::FromAddress(to_evacuate_start_))) {
|
2155
2266
|
}
|
@@ -2169,37 +2280,44 @@ class MapCompact {
|
|
2169
2280
|
}
|
2170
2281
|
|
2171
2282
|
void UpdateMapPointersInRoots() {
|
2172
|
-
|
2173
|
-
|
2174
|
-
|
2283
|
+
MapUpdatingVisitor map_updating_visitor;
|
2284
|
+
heap()->IterateRoots(&map_updating_visitor, VISIT_ONLY_STRONG);
|
2285
|
+
heap()->isolate()->global_handles()->IterateWeakRoots(
|
2286
|
+
&map_updating_visitor);
|
2287
|
+
LiveObjectList::IterateElements(&map_updating_visitor);
|
2175
2288
|
}
|
2176
2289
|
|
2177
2290
|
void UpdateMapPointersInPagedSpace(PagedSpace* space) {
|
2178
|
-
ASSERT(space !=
|
2291
|
+
ASSERT(space != heap()->map_space());
|
2179
2292
|
|
2180
2293
|
PageIterator it(space, PageIterator::PAGES_IN_USE);
|
2181
2294
|
while (it.has_next()) {
|
2182
2295
|
Page* p = it.next();
|
2183
|
-
UpdateMapPointersInRange(
|
2296
|
+
UpdateMapPointersInRange(heap(),
|
2297
|
+
p->ObjectAreaStart(),
|
2298
|
+
p->AllocationTop());
|
2184
2299
|
}
|
2185
2300
|
}
|
2186
2301
|
|
2187
2302
|
void UpdateMapPointersInNewSpace() {
|
2188
|
-
NewSpace* space =
|
2189
|
-
UpdateMapPointersInRange(space->bottom(), space->top());
|
2303
|
+
NewSpace* space = heap()->new_space();
|
2304
|
+
UpdateMapPointersInRange(heap(), space->bottom(), space->top());
|
2190
2305
|
}
|
2191
2306
|
|
2192
2307
|
void UpdateMapPointersInLargeObjectSpace() {
|
2193
|
-
LargeObjectIterator it(
|
2308
|
+
LargeObjectIterator it(heap()->lo_space());
|
2194
2309
|
for (HeapObject* obj = it.next(); obj != NULL; obj = it.next())
|
2195
|
-
UpdateMapPointersInObject(obj);
|
2310
|
+
UpdateMapPointersInObject(heap(), obj);
|
2196
2311
|
}
|
2197
2312
|
|
2198
2313
|
void Finish() {
|
2199
|
-
|
2314
|
+
heap()->map_space()->FinishCompaction(to_evacuate_start_, live_maps_);
|
2200
2315
|
}
|
2201
2316
|
|
2317
|
+
inline Heap* heap() const { return heap_; }
|
2318
|
+
|
2202
2319
|
private:
|
2320
|
+
Heap* heap_;
|
2203
2321
|
int live_maps_;
|
2204
2322
|
Address to_evacuate_start_;
|
2205
2323
|
MapIterator vacant_map_it_;
|
@@ -2209,6 +2327,8 @@ class MapCompact {
|
|
2209
2327
|
// Helper class for updating map pointers in HeapObjects.
|
2210
2328
|
class MapUpdatingVisitor: public ObjectVisitor {
|
2211
2329
|
public:
|
2330
|
+
MapUpdatingVisitor() {}
|
2331
|
+
|
2212
2332
|
void VisitPointer(Object** p) {
|
2213
2333
|
UpdateMapPointer(p);
|
2214
2334
|
}
|
@@ -2231,8 +2351,6 @@ class MapCompact {
|
|
2231
2351
|
}
|
2232
2352
|
};
|
2233
2353
|
|
2234
|
-
static MapUpdatingVisitor map_updating_visitor_;
|
2235
|
-
|
2236
2354
|
static Map* NextMap(MapIterator* it, HeapObject* last, bool live) {
|
2237
2355
|
while (true) {
|
2238
2356
|
HeapObject* next = it->next();
|
@@ -2266,9 +2384,8 @@ class MapCompact {
|
|
2266
2384
|
|
2267
2385
|
ASSERT(Map::kSize % 4 == 0);
|
2268
2386
|
|
2269
|
-
|
2270
|
-
|
2271
|
-
Map::kSize);
|
2387
|
+
map_to_evacuate->heap()->CopyBlockToOldSpaceAndUpdateRegionMarks(
|
2388
|
+
vacant_map->address(), map_to_evacuate->address(), Map::kSize);
|
2272
2389
|
|
2273
2390
|
ASSERT(vacant_map->IsMap()); // Due to memcpy above.
|
2274
2391
|
|
@@ -2288,15 +2405,15 @@ class MapCompact {
|
|
2288
2405
|
return new_map;
|
2289
2406
|
}
|
2290
2407
|
|
2291
|
-
static int UpdateMapPointersInObject(HeapObject* obj) {
|
2408
|
+
static int UpdateMapPointersInObject(Heap* heap, HeapObject* obj) {
|
2292
2409
|
ASSERT(!obj->IsMarked());
|
2293
2410
|
Map* map = obj->map();
|
2294
|
-
ASSERT(
|
2411
|
+
ASSERT(heap->map_space()->Contains(map));
|
2295
2412
|
MapWord map_word = map->map_word();
|
2296
2413
|
ASSERT(!map_word.IsMarked());
|
2297
2414
|
if (map_word.IsOverflowed()) {
|
2298
2415
|
Map* new_map = GetForwardedMap(map_word);
|
2299
|
-
ASSERT(
|
2416
|
+
ASSERT(heap->map_space()->Contains(new_map));
|
2300
2417
|
obj->set_map(new_map);
|
2301
2418
|
|
2302
2419
|
#ifdef DEBUG
|
@@ -2310,16 +2427,17 @@ class MapCompact {
|
|
2310
2427
|
}
|
2311
2428
|
|
2312
2429
|
int size = obj->SizeFromMap(map);
|
2313
|
-
|
2430
|
+
MapUpdatingVisitor map_updating_visitor;
|
2431
|
+
obj->IterateBody(map->instance_type(), size, &map_updating_visitor);
|
2314
2432
|
return size;
|
2315
2433
|
}
|
2316
2434
|
|
2317
|
-
static void UpdateMapPointersInRange(Address start, Address end) {
|
2435
|
+
static void UpdateMapPointersInRange(Heap* heap, Address start, Address end) {
|
2318
2436
|
HeapObject* object;
|
2319
2437
|
int size;
|
2320
2438
|
for (Address current = start; current < end; current += size) {
|
2321
2439
|
object = HeapObject::FromAddress(current);
|
2322
|
-
size = UpdateMapPointersInObject(object);
|
2440
|
+
size = UpdateMapPointersInObject(heap, object);
|
2323
2441
|
ASSERT(size > 0);
|
2324
2442
|
}
|
2325
2443
|
}
|
@@ -2336,8 +2454,6 @@ class MapCompact {
|
|
2336
2454
|
#endif
|
2337
2455
|
};
|
2338
2456
|
|
2339
|
-
MapCompact::MapUpdatingVisitor MapCompact::map_updating_visitor_;
|
2340
|
-
|
2341
2457
|
|
2342
2458
|
void MarkCompactCollector::SweepSpaces() {
|
2343
2459
|
GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP);
|
@@ -2349,26 +2465,26 @@ void MarkCompactCollector::SweepSpaces() {
|
|
2349
2465
|
// the map space last because freeing non-live maps overwrites them and
|
2350
2466
|
// the other spaces rely on possibly non-live maps to get the sizes for
|
2351
2467
|
// non-live objects.
|
2352
|
-
SweepSpace(
|
2353
|
-
SweepSpace(
|
2354
|
-
SweepSpace(
|
2355
|
-
SweepSpace(
|
2468
|
+
SweepSpace(heap(), heap()->old_pointer_space());
|
2469
|
+
SweepSpace(heap(), heap()->old_data_space());
|
2470
|
+
SweepSpace(heap(), heap()->code_space());
|
2471
|
+
SweepSpace(heap(), heap()->cell_space());
|
2356
2472
|
{ GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE);
|
2357
|
-
SweepNewSpace(
|
2473
|
+
SweepNewSpace(heap(), heap()->new_space());
|
2358
2474
|
}
|
2359
|
-
SweepSpace(
|
2475
|
+
SweepSpace(heap(), heap()->map_space());
|
2360
2476
|
|
2361
|
-
|
2362
|
-
|
2363
|
-
|
2364
|
-
|
2477
|
+
heap()->IterateDirtyRegions(heap()->map_space(),
|
2478
|
+
&heap()->IteratePointersInDirtyMapsRegion,
|
2479
|
+
&UpdatePointerToNewGen,
|
2480
|
+
heap()->WATERMARK_SHOULD_BE_VALID);
|
2365
2481
|
|
2366
|
-
intptr_t live_maps_size =
|
2482
|
+
intptr_t live_maps_size = heap()->map_space()->Size();
|
2367
2483
|
int live_maps = static_cast<int>(live_maps_size / Map::kSize);
|
2368
2484
|
ASSERT(live_map_objects_size_ == live_maps_size);
|
2369
2485
|
|
2370
|
-
if (
|
2371
|
-
MapCompact map_compact(live_maps);
|
2486
|
+
if (heap()->map_space()->NeedsCompaction(live_maps)) {
|
2487
|
+
MapCompact map_compact(heap(), live_maps);
|
2372
2488
|
|
2373
2489
|
map_compact.CompactMaps();
|
2374
2490
|
map_compact.UpdateMapPointersInRoots();
|
@@ -2376,7 +2492,7 @@ void MarkCompactCollector::SweepSpaces() {
|
|
2376
2492
|
PagedSpaces spaces;
|
2377
2493
|
for (PagedSpace* space = spaces.next();
|
2378
2494
|
space != NULL; space = spaces.next()) {
|
2379
|
-
if (space ==
|
2495
|
+
if (space == heap()->map_space()) continue;
|
2380
2496
|
map_compact.UpdateMapPointersInPagedSpace(space);
|
2381
2497
|
}
|
2382
2498
|
map_compact.UpdateMapPointersInNewSpace();
|
@@ -2395,7 +2511,7 @@ void MarkCompactCollector::SweepSpaces() {
|
|
2395
2511
|
int MarkCompactCollector::IterateLiveObjectsInRange(
|
2396
2512
|
Address start,
|
2397
2513
|
Address end,
|
2398
|
-
|
2514
|
+
LiveObjectCallback size_func) {
|
2399
2515
|
int live_objects_size = 0;
|
2400
2516
|
Address current = start;
|
2401
2517
|
while (current < end) {
|
@@ -2405,7 +2521,7 @@ int MarkCompactCollector::IterateLiveObjectsInRange(
|
|
2405
2521
|
} else if (encoded_map == kMultiFreeEncoding) {
|
2406
2522
|
current += Memory::int_at(current + kIntSize);
|
2407
2523
|
} else {
|
2408
|
-
int size = size_func(HeapObject::FromAddress(current));
|
2524
|
+
int size = (this->*size_func)(HeapObject::FromAddress(current));
|
2409
2525
|
current += size;
|
2410
2526
|
live_objects_size += size;
|
2411
2527
|
}
|
@@ -2414,15 +2530,15 @@ int MarkCompactCollector::IterateLiveObjectsInRange(
|
|
2414
2530
|
}
|
2415
2531
|
|
2416
2532
|
|
2417
|
-
int MarkCompactCollector::IterateLiveObjects(
|
2418
|
-
|
2533
|
+
int MarkCompactCollector::IterateLiveObjects(
|
2534
|
+
NewSpace* space, LiveObjectCallback size_f) {
|
2419
2535
|
ASSERT(MARK_LIVE_OBJECTS < state_ && state_ <= RELOCATE_OBJECTS);
|
2420
2536
|
return IterateLiveObjectsInRange(space->bottom(), space->top(), size_f);
|
2421
2537
|
}
|
2422
2538
|
|
2423
2539
|
|
2424
|
-
int MarkCompactCollector::IterateLiveObjects(
|
2425
|
-
|
2540
|
+
int MarkCompactCollector::IterateLiveObjects(
|
2541
|
+
PagedSpace* space, LiveObjectCallback size_f) {
|
2426
2542
|
ASSERT(MARK_LIVE_OBJECTS < state_ && state_ <= RELOCATE_OBJECTS);
|
2427
2543
|
int total = 0;
|
2428
2544
|
PageIterator it(space, PageIterator::PAGES_IN_USE);
|
@@ -2442,6 +2558,8 @@ int MarkCompactCollector::IterateLiveObjects(PagedSpace* space,
|
|
2442
2558
|
// Helper class for updating pointers in HeapObjects.
|
2443
2559
|
class UpdatingVisitor: public ObjectVisitor {
|
2444
2560
|
public:
|
2561
|
+
explicit UpdatingVisitor(Heap* heap) : heap_(heap) {}
|
2562
|
+
|
2445
2563
|
void VisitPointer(Object** p) {
|
2446
2564
|
UpdatePointer(p);
|
2447
2565
|
}
|
@@ -2470,6 +2588,8 @@ class UpdatingVisitor: public ObjectVisitor {
|
|
2470
2588
|
reinterpret_cast<Code*>(target)->instruction_start());
|
2471
2589
|
}
|
2472
2590
|
|
2591
|
+
inline Heap* heap() const { return heap_; }
|
2592
|
+
|
2473
2593
|
private:
|
2474
2594
|
void UpdatePointer(Object** p) {
|
2475
2595
|
if (!(*p)->IsHeapObject()) return;
|
@@ -2477,27 +2597,27 @@ class UpdatingVisitor: public ObjectVisitor {
|
|
2477
2597
|
HeapObject* obj = HeapObject::cast(*p);
|
2478
2598
|
Address old_addr = obj->address();
|
2479
2599
|
Address new_addr;
|
2480
|
-
ASSERT(!
|
2600
|
+
ASSERT(!heap()->InFromSpace(obj));
|
2481
2601
|
|
2482
|
-
if (
|
2602
|
+
if (heap()->new_space()->Contains(obj)) {
|
2483
2603
|
Address forwarding_pointer_addr =
|
2484
|
-
|
2485
|
-
|
2604
|
+
heap()->new_space()->FromSpaceLow() +
|
2605
|
+
heap()->new_space()->ToSpaceOffsetForAddress(old_addr);
|
2486
2606
|
new_addr = Memory::Address_at(forwarding_pointer_addr);
|
2487
2607
|
|
2488
2608
|
#ifdef DEBUG
|
2489
|
-
ASSERT(
|
2490
|
-
|
2491
|
-
|
2492
|
-
|
2493
|
-
|
2494
|
-
if (
|
2495
|
-
ASSERT(
|
2496
|
-
|
2609
|
+
ASSERT(heap()->old_pointer_space()->Contains(new_addr) ||
|
2610
|
+
heap()->old_data_space()->Contains(new_addr) ||
|
2611
|
+
heap()->new_space()->FromSpaceContains(new_addr) ||
|
2612
|
+
heap()->lo_space()->Contains(HeapObject::FromAddress(new_addr)));
|
2613
|
+
|
2614
|
+
if (heap()->new_space()->FromSpaceContains(new_addr)) {
|
2615
|
+
ASSERT(heap()->new_space()->FromSpaceOffsetForAddress(new_addr) <=
|
2616
|
+
heap()->new_space()->ToSpaceOffsetForAddress(old_addr));
|
2497
2617
|
}
|
2498
2618
|
#endif
|
2499
2619
|
|
2500
|
-
} else if (
|
2620
|
+
} else if (heap()->lo_space()->Contains(obj)) {
|
2501
2621
|
// Don't move objects in the large object space.
|
2502
2622
|
return;
|
2503
2623
|
|
@@ -2526,6 +2646,8 @@ class UpdatingVisitor: public ObjectVisitor {
|
|
2526
2646
|
}
|
2527
2647
|
#endif
|
2528
2648
|
}
|
2649
|
+
|
2650
|
+
Heap* heap_;
|
2529
2651
|
};
|
2530
2652
|
|
2531
2653
|
|
@@ -2534,30 +2656,34 @@ void MarkCompactCollector::UpdatePointers() {
|
|
2534
2656
|
ASSERT(state_ == ENCODE_FORWARDING_ADDRESSES);
|
2535
2657
|
state_ = UPDATE_POINTERS;
|
2536
2658
|
#endif
|
2537
|
-
UpdatingVisitor updating_visitor;
|
2538
|
-
|
2539
|
-
|
2659
|
+
UpdatingVisitor updating_visitor(heap());
|
2660
|
+
heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact(
|
2661
|
+
&updating_visitor);
|
2662
|
+
heap()->IterateRoots(&updating_visitor, VISIT_ONLY_STRONG);
|
2663
|
+
heap()->isolate()->global_handles()->IterateWeakRoots(&updating_visitor);
|
2540
2664
|
|
2541
2665
|
// Update the pointer to the head of the weak list of global contexts.
|
2542
|
-
updating_visitor.VisitPointer(&
|
2666
|
+
updating_visitor.VisitPointer(&heap()->global_contexts_list_);
|
2543
2667
|
|
2544
2668
|
LiveObjectList::IterateElements(&updating_visitor);
|
2545
2669
|
|
2546
|
-
int live_maps_size = IterateLiveObjects(
|
2547
|
-
|
2548
|
-
int live_pointer_olds_size = IterateLiveObjects(
|
2549
|
-
|
2550
|
-
|
2551
|
-
|
2552
|
-
|
2553
|
-
|
2554
|
-
int
|
2555
|
-
|
2556
|
-
int
|
2557
|
-
|
2670
|
+
int live_maps_size = IterateLiveObjects(
|
2671
|
+
heap()->map_space(), &MarkCompactCollector::UpdatePointersInOldObject);
|
2672
|
+
int live_pointer_olds_size = IterateLiveObjects(
|
2673
|
+
heap()->old_pointer_space(),
|
2674
|
+
&MarkCompactCollector::UpdatePointersInOldObject);
|
2675
|
+
int live_data_olds_size = IterateLiveObjects(
|
2676
|
+
heap()->old_data_space(),
|
2677
|
+
&MarkCompactCollector::UpdatePointersInOldObject);
|
2678
|
+
int live_codes_size = IterateLiveObjects(
|
2679
|
+
heap()->code_space(), &MarkCompactCollector::UpdatePointersInOldObject);
|
2680
|
+
int live_cells_size = IterateLiveObjects(
|
2681
|
+
heap()->cell_space(), &MarkCompactCollector::UpdatePointersInOldObject);
|
2682
|
+
int live_news_size = IterateLiveObjects(
|
2683
|
+
heap()->new_space(), &MarkCompactCollector::UpdatePointersInNewObject);
|
2558
2684
|
|
2559
2685
|
// Large objects do not move, the map word can be updated directly.
|
2560
|
-
LargeObjectIterator it(
|
2686
|
+
LargeObjectIterator it(heap()->lo_space());
|
2561
2687
|
for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
|
2562
2688
|
UpdatePointersInNewObject(obj);
|
2563
2689
|
}
|
@@ -2584,8 +2710,8 @@ int MarkCompactCollector::UpdatePointersInNewObject(HeapObject* obj) {
|
|
2584
2710
|
|
2585
2711
|
Address forwarded = GetForwardingAddressInOldSpace(old_map);
|
2586
2712
|
|
2587
|
-
ASSERT(
|
2588
|
-
ASSERT(
|
2713
|
+
ASSERT(heap()->map_space()->Contains(old_map));
|
2714
|
+
ASSERT(heap()->map_space()->Contains(forwarded));
|
2589
2715
|
#ifdef DEBUG
|
2590
2716
|
if (FLAG_gc_verbose) {
|
2591
2717
|
PrintF("update %p : %p -> %p\n", obj->address(), old_map->address(),
|
@@ -2600,7 +2726,7 @@ int MarkCompactCollector::UpdatePointersInNewObject(HeapObject* obj) {
|
|
2600
2726
|
int obj_size = obj->SizeFromMap(old_map);
|
2601
2727
|
|
2602
2728
|
// Update pointers in the object body.
|
2603
|
-
UpdatingVisitor updating_visitor;
|
2729
|
+
UpdatingVisitor updating_visitor(heap());
|
2604
2730
|
obj->IterateBody(old_map->instance_type(), obj_size, &updating_visitor);
|
2605
2731
|
return obj_size;
|
2606
2732
|
}
|
@@ -2609,8 +2735,8 @@ int MarkCompactCollector::UpdatePointersInNewObject(HeapObject* obj) {
|
|
2609
2735
|
int MarkCompactCollector::UpdatePointersInOldObject(HeapObject* obj) {
|
2610
2736
|
// Decode the map pointer.
|
2611
2737
|
MapWord encoding = obj->map_word();
|
2612
|
-
Address map_addr = encoding.DecodeMapAddress(
|
2613
|
-
ASSERT(
|
2738
|
+
Address map_addr = encoding.DecodeMapAddress(heap()->map_space());
|
2739
|
+
ASSERT(heap()->map_space()->Contains(HeapObject::FromAddress(map_addr)));
|
2614
2740
|
|
2615
2741
|
// At this point, the first word of map_addr is also encoded, cannot
|
2616
2742
|
// cast it to Map* using Map::cast.
|
@@ -2631,7 +2757,7 @@ int MarkCompactCollector::UpdatePointersInOldObject(HeapObject* obj) {
|
|
2631
2757
|
#endif
|
2632
2758
|
|
2633
2759
|
// Update pointers in the object body.
|
2634
|
-
UpdatingVisitor updating_visitor;
|
2760
|
+
UpdatingVisitor updating_visitor(heap());
|
2635
2761
|
obj->IterateBody(type, obj_size, &updating_visitor);
|
2636
2762
|
return obj_size;
|
2637
2763
|
}
|
@@ -2687,18 +2813,19 @@ void MarkCompactCollector::RelocateObjects() {
|
|
2687
2813
|
#endif
|
2688
2814
|
// Relocates objects, always relocate map objects first. Relocating
|
2689
2815
|
// objects in other space relies on map objects to get object size.
|
2690
|
-
int live_maps_size = IterateLiveObjects(
|
2691
|
-
|
2692
|
-
int live_pointer_olds_size = IterateLiveObjects(
|
2693
|
-
|
2694
|
-
|
2695
|
-
|
2696
|
-
|
2697
|
-
|
2698
|
-
|
2699
|
-
|
2700
|
-
|
2701
|
-
|
2816
|
+
int live_maps_size = IterateLiveObjects(
|
2817
|
+
heap()->map_space(), &MarkCompactCollector::RelocateMapObject);
|
2818
|
+
int live_pointer_olds_size = IterateLiveObjects(
|
2819
|
+
heap()->old_pointer_space(),
|
2820
|
+
&MarkCompactCollector::RelocateOldPointerObject);
|
2821
|
+
int live_data_olds_size = IterateLiveObjects(
|
2822
|
+
heap()->old_data_space(), &MarkCompactCollector::RelocateOldDataObject);
|
2823
|
+
int live_codes_size = IterateLiveObjects(
|
2824
|
+
heap()->code_space(), &MarkCompactCollector::RelocateCodeObject);
|
2825
|
+
int live_cells_size = IterateLiveObjects(
|
2826
|
+
heap()->cell_space(), &MarkCompactCollector::RelocateCellObject);
|
2827
|
+
int live_news_size = IterateLiveObjects(
|
2828
|
+
heap()->new_space(), &MarkCompactCollector::RelocateNewObject);
|
2702
2829
|
|
2703
2830
|
USE(live_maps_size);
|
2704
2831
|
USE(live_pointer_olds_size);
|
@@ -2714,28 +2841,28 @@ void MarkCompactCollector::RelocateObjects() {
|
|
2714
2841
|
ASSERT(live_news_size == live_young_objects_size_);
|
2715
2842
|
|
2716
2843
|
// Flip from and to spaces
|
2717
|
-
|
2844
|
+
heap()->new_space()->Flip();
|
2718
2845
|
|
2719
|
-
|
2846
|
+
heap()->new_space()->MCCommitRelocationInfo();
|
2720
2847
|
|
2721
2848
|
// Set age_mark to bottom in to space
|
2722
|
-
Address mark =
|
2723
|
-
|
2849
|
+
Address mark = heap()->new_space()->bottom();
|
2850
|
+
heap()->new_space()->set_age_mark(mark);
|
2724
2851
|
|
2725
2852
|
PagedSpaces spaces;
|
2726
2853
|
for (PagedSpace* space = spaces.next(); space != NULL; space = spaces.next())
|
2727
2854
|
space->MCCommitRelocationInfo();
|
2728
2855
|
|
2729
|
-
|
2730
|
-
|
2856
|
+
heap()->CheckNewSpaceExpansionCriteria();
|
2857
|
+
heap()->IncrementYoungSurvivorsCounter(live_news_size);
|
2731
2858
|
}
|
2732
2859
|
|
2733
2860
|
|
2734
2861
|
int MarkCompactCollector::RelocateMapObject(HeapObject* obj) {
|
2735
2862
|
// Recover map pointer.
|
2736
2863
|
MapWord encoding = obj->map_word();
|
2737
|
-
Address map_addr = encoding.DecodeMapAddress(
|
2738
|
-
ASSERT(
|
2864
|
+
Address map_addr = encoding.DecodeMapAddress(heap()->map_space());
|
2865
|
+
ASSERT(heap()->map_space()->Contains(HeapObject::FromAddress(map_addr)));
|
2739
2866
|
|
2740
2867
|
// Get forwarding address before resetting map pointer
|
2741
2868
|
Address new_addr = GetForwardingAddressInOldSpace(obj);
|
@@ -2748,9 +2875,9 @@ int MarkCompactCollector::RelocateMapObject(HeapObject* obj) {
|
|
2748
2875
|
|
2749
2876
|
if (new_addr != old_addr) {
|
2750
2877
|
// Move contents.
|
2751
|
-
|
2752
|
-
|
2753
|
-
|
2878
|
+
heap()->MoveBlockToOldSpaceAndUpdateRegionMarks(new_addr,
|
2879
|
+
old_addr,
|
2880
|
+
Map::kSize);
|
2754
2881
|
}
|
2755
2882
|
|
2756
2883
|
#ifdef DEBUG
|
@@ -2794,8 +2921,8 @@ int MarkCompactCollector::RelocateOldNonCodeObject(HeapObject* obj,
|
|
2794
2921
|
PagedSpace* space) {
|
2795
2922
|
// Recover map pointer.
|
2796
2923
|
MapWord encoding = obj->map_word();
|
2797
|
-
Address map_addr = encoding.DecodeMapAddress(
|
2798
|
-
ASSERT(
|
2924
|
+
Address map_addr = encoding.DecodeMapAddress(heap()->map_space());
|
2925
|
+
ASSERT(heap()->map_space()->Contains(map_addr));
|
2799
2926
|
|
2800
2927
|
// Get forwarding address before resetting map pointer.
|
2801
2928
|
Address new_addr = GetForwardingAddressInOldSpace(obj);
|
@@ -2807,12 +2934,12 @@ int MarkCompactCollector::RelocateOldNonCodeObject(HeapObject* obj,
|
|
2807
2934
|
|
2808
2935
|
if (new_addr != old_addr) {
|
2809
2936
|
// Move contents.
|
2810
|
-
if (space ==
|
2811
|
-
|
2937
|
+
if (space == heap()->old_data_space()) {
|
2938
|
+
heap()->MoveBlock(new_addr, old_addr, obj_size);
|
2812
2939
|
} else {
|
2813
|
-
|
2814
|
-
|
2815
|
-
|
2940
|
+
heap()->MoveBlockToOldSpaceAndUpdateRegionMarks(new_addr,
|
2941
|
+
old_addr,
|
2942
|
+
obj_size);
|
2816
2943
|
}
|
2817
2944
|
}
|
2818
2945
|
|
@@ -2820,46 +2947,47 @@ int MarkCompactCollector::RelocateOldNonCodeObject(HeapObject* obj,
|
|
2820
2947
|
|
2821
2948
|
HeapObject* copied_to = HeapObject::FromAddress(new_addr);
|
2822
2949
|
if (copied_to->IsSharedFunctionInfo()) {
|
2823
|
-
PROFILE(
|
2950
|
+
PROFILE(heap()->isolate(),
|
2951
|
+
SharedFunctionInfoMoveEvent(old_addr, new_addr));
|
2824
2952
|
}
|
2825
|
-
HEAP_PROFILE(ObjectMoveEvent(old_addr, new_addr));
|
2953
|
+
HEAP_PROFILE(heap(), ObjectMoveEvent(old_addr, new_addr));
|
2826
2954
|
|
2827
2955
|
return obj_size;
|
2828
2956
|
}
|
2829
2957
|
|
2830
2958
|
|
2831
2959
|
int MarkCompactCollector::RelocateOldPointerObject(HeapObject* obj) {
|
2832
|
-
return RelocateOldNonCodeObject(obj,
|
2960
|
+
return RelocateOldNonCodeObject(obj, heap()->old_pointer_space());
|
2833
2961
|
}
|
2834
2962
|
|
2835
2963
|
|
2836
2964
|
int MarkCompactCollector::RelocateOldDataObject(HeapObject* obj) {
|
2837
|
-
return RelocateOldNonCodeObject(obj,
|
2965
|
+
return RelocateOldNonCodeObject(obj, heap()->old_data_space());
|
2838
2966
|
}
|
2839
2967
|
|
2840
2968
|
|
2841
2969
|
int MarkCompactCollector::RelocateCellObject(HeapObject* obj) {
|
2842
|
-
return RelocateOldNonCodeObject(obj,
|
2970
|
+
return RelocateOldNonCodeObject(obj, heap()->cell_space());
|
2843
2971
|
}
|
2844
2972
|
|
2845
2973
|
|
2846
2974
|
int MarkCompactCollector::RelocateCodeObject(HeapObject* obj) {
|
2847
2975
|
// Recover map pointer.
|
2848
2976
|
MapWord encoding = obj->map_word();
|
2849
|
-
Address map_addr = encoding.DecodeMapAddress(
|
2850
|
-
ASSERT(
|
2977
|
+
Address map_addr = encoding.DecodeMapAddress(heap()->map_space());
|
2978
|
+
ASSERT(heap()->map_space()->Contains(HeapObject::FromAddress(map_addr)));
|
2851
2979
|
|
2852
2980
|
// Get forwarding address before resetting map pointer
|
2853
2981
|
Address new_addr = GetForwardingAddressInOldSpace(obj);
|
2854
2982
|
|
2855
2983
|
// Reset the map pointer.
|
2856
|
-
int obj_size = RestoreMap(obj,
|
2984
|
+
int obj_size = RestoreMap(obj, heap()->code_space(), new_addr, map_addr);
|
2857
2985
|
|
2858
2986
|
Address old_addr = obj->address();
|
2859
2987
|
|
2860
2988
|
if (new_addr != old_addr) {
|
2861
2989
|
// Move contents.
|
2862
|
-
|
2990
|
+
heap()->MoveBlock(new_addr, old_addr, obj_size);
|
2863
2991
|
}
|
2864
2992
|
|
2865
2993
|
HeapObject* copied_to = HeapObject::FromAddress(new_addr);
|
@@ -2867,9 +2995,9 @@ int MarkCompactCollector::RelocateCodeObject(HeapObject* obj) {
|
|
2867
2995
|
// May also update inline cache target.
|
2868
2996
|
Code::cast(copied_to)->Relocate(new_addr - old_addr);
|
2869
2997
|
// Notify the logger that compiled code has moved.
|
2870
|
-
PROFILE(CodeMoveEvent(old_addr, new_addr));
|
2998
|
+
PROFILE(heap()->isolate(), CodeMoveEvent(old_addr, new_addr));
|
2871
2999
|
}
|
2872
|
-
HEAP_PROFILE(ObjectMoveEvent(old_addr, new_addr));
|
3000
|
+
HEAP_PROFILE(heap(), ObjectMoveEvent(old_addr, new_addr));
|
2873
3001
|
|
2874
3002
|
return obj_size;
|
2875
3003
|
}
|
@@ -2880,28 +3008,28 @@ int MarkCompactCollector::RelocateNewObject(HeapObject* obj) {
|
|
2880
3008
|
|
2881
3009
|
// Get forwarding address
|
2882
3010
|
Address old_addr = obj->address();
|
2883
|
-
int offset =
|
3011
|
+
int offset = heap()->new_space()->ToSpaceOffsetForAddress(old_addr);
|
2884
3012
|
|
2885
3013
|
Address new_addr =
|
2886
|
-
Memory::Address_at(
|
3014
|
+
Memory::Address_at(heap()->new_space()->FromSpaceLow() + offset);
|
2887
3015
|
|
2888
3016
|
#ifdef DEBUG
|
2889
|
-
if (
|
2890
|
-
ASSERT(
|
2891
|
-
|
3017
|
+
if (heap()->new_space()->FromSpaceContains(new_addr)) {
|
3018
|
+
ASSERT(heap()->new_space()->FromSpaceOffsetForAddress(new_addr) <=
|
3019
|
+
heap()->new_space()->ToSpaceOffsetForAddress(old_addr));
|
2892
3020
|
} else {
|
2893
|
-
ASSERT(
|
2894
|
-
|
3021
|
+
ASSERT(heap()->TargetSpace(obj) == heap()->old_pointer_space() ||
|
3022
|
+
heap()->TargetSpace(obj) == heap()->old_data_space());
|
2895
3023
|
}
|
2896
3024
|
#endif
|
2897
3025
|
|
2898
3026
|
// New and old addresses cannot overlap.
|
2899
|
-
if (
|
2900
|
-
|
3027
|
+
if (heap()->InNewSpace(HeapObject::FromAddress(new_addr))) {
|
3028
|
+
heap()->CopyBlock(new_addr, old_addr, obj_size);
|
2901
3029
|
} else {
|
2902
|
-
|
2903
|
-
|
2904
|
-
|
3030
|
+
heap()->CopyBlockToOldSpaceAndUpdateRegionMarks(new_addr,
|
3031
|
+
old_addr,
|
3032
|
+
obj_size);
|
2905
3033
|
}
|
2906
3034
|
|
2907
3035
|
#ifdef DEBUG
|
@@ -2912,15 +3040,29 @@ int MarkCompactCollector::RelocateNewObject(HeapObject* obj) {
|
|
2912
3040
|
|
2913
3041
|
HeapObject* copied_to = HeapObject::FromAddress(new_addr);
|
2914
3042
|
if (copied_to->IsSharedFunctionInfo()) {
|
2915
|
-
PROFILE(
|
3043
|
+
PROFILE(heap()->isolate(),
|
3044
|
+
SharedFunctionInfoMoveEvent(old_addr, new_addr));
|
2916
3045
|
}
|
2917
|
-
HEAP_PROFILE(ObjectMoveEvent(old_addr, new_addr));
|
3046
|
+
HEAP_PROFILE(heap(), ObjectMoveEvent(old_addr, new_addr));
|
2918
3047
|
|
2919
3048
|
return obj_size;
|
2920
3049
|
}
|
2921
3050
|
|
2922
3051
|
|
2923
|
-
void MarkCompactCollector::
|
3052
|
+
void MarkCompactCollector::EnableCodeFlushing(bool enable) {
|
3053
|
+
if (enable) {
|
3054
|
+
if (code_flusher_ != NULL) return;
|
3055
|
+
code_flusher_ = new CodeFlusher(heap()->isolate());
|
3056
|
+
} else {
|
3057
|
+
if (code_flusher_ == NULL) return;
|
3058
|
+
delete code_flusher_;
|
3059
|
+
code_flusher_ = NULL;
|
3060
|
+
}
|
3061
|
+
}
|
3062
|
+
|
3063
|
+
|
3064
|
+
void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj,
|
3065
|
+
Isolate* isolate) {
|
2924
3066
|
#ifdef ENABLE_GDB_JIT_INTERFACE
|
2925
3067
|
if (obj->IsCode()) {
|
2926
3068
|
GDBJITInterface::RemoveCode(reinterpret_cast<Code*>(obj));
|
@@ -2928,7 +3070,7 @@ void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj) {
|
|
2928
3070
|
#endif
|
2929
3071
|
#ifdef ENABLE_LOGGING_AND_PROFILING
|
2930
3072
|
if (obj->IsCode()) {
|
2931
|
-
PROFILE(CodeDeleteEvent(obj->address()));
|
3073
|
+
PROFILE(isolate, CodeDeleteEvent(obj->address()));
|
2932
3074
|
}
|
2933
3075
|
#endif
|
2934
3076
|
}
|