therubyracer 0.5.0-x86-linux → 0.11.0beta5-x86-linux
Sign up to get free protection for your applications and to get access to all the features.
Potentially problematic release.
This version of therubyracer might be problematic. Click here for more details.
- data/.gitignore +23 -11
- data/.travis.yml +10 -0
- data/Changelog.md +242 -0
- data/Gemfile +16 -0
- data/README.md +185 -0
- data/Rakefile +42 -51
- data/benchmarks.rb +217 -0
- data/ext/v8/accessor.cc +181 -0
- data/ext/v8/array.cc +26 -0
- data/ext/v8/backref.cc +56 -0
- data/ext/v8/build.rb +52 -0
- data/ext/v8/constants.cc +34 -0
- data/ext/v8/constraints.cc +52 -0
- data/ext/v8/context.cc +130 -0
- data/ext/v8/date.cc +18 -0
- data/ext/v8/exception.cc +38 -0
- data/ext/v8/extconf.rb +16 -29
- data/ext/v8/external.cc +43 -0
- data/ext/v8/function.cc +58 -0
- data/ext/v8/gc.cc +43 -0
- data/ext/v8/handles.cc +34 -0
- data/ext/v8/heap.cc +31 -0
- data/ext/v8/init.cc +39 -0
- data/ext/v8/init.so +0 -0
- data/ext/v8/invocation.cc +86 -0
- data/ext/v8/locker.cc +77 -0
- data/ext/v8/message.cc +51 -0
- data/ext/v8/object.cc +334 -0
- data/ext/v8/primitive.cc +8 -0
- data/ext/v8/rr.cc +83 -0
- data/ext/v8/rr.h +883 -0
- data/ext/v8/script.cc +80 -0
- data/ext/v8/signature.cc +18 -0
- data/ext/v8/stack.cc +75 -0
- data/ext/v8/string.cc +47 -0
- data/ext/v8/template.cc +175 -0
- data/ext/v8/trycatch.cc +86 -0
- data/ext/v8/v8.cc +87 -0
- data/ext/v8/value.cc +239 -0
- data/lib/v8.rb +30 -9
- data/lib/v8/access.rb +5 -0
- data/lib/v8/access/indices.rb +40 -0
- data/lib/v8/access/invocation.rb +47 -0
- data/lib/v8/access/names.rb +65 -0
- data/lib/v8/array.rb +26 -0
- data/lib/v8/context.rb +217 -75
- data/lib/v8/conversion.rb +35 -0
- data/lib/v8/conversion/array.rb +11 -0
- data/lib/v8/conversion/class.rb +120 -0
- data/lib/v8/conversion/code.rb +38 -0
- data/lib/v8/conversion/fundamental.rb +11 -0
- data/lib/v8/conversion/hash.rb +11 -0
- data/lib/v8/conversion/indentity.rb +31 -0
- data/lib/v8/conversion/method.rb +26 -0
- data/lib/v8/conversion/object.rb +28 -0
- data/lib/v8/conversion/primitive.rb +7 -0
- data/lib/v8/conversion/proc.rb +5 -0
- data/lib/v8/conversion/reference.rb +16 -0
- data/lib/v8/conversion/string.rb +12 -0
- data/lib/v8/conversion/symbol.rb +7 -0
- data/lib/v8/conversion/time.rb +13 -0
- data/lib/v8/error.rb +25 -0
- data/lib/v8/error/protect.rb +20 -0
- data/lib/v8/error/try.rb +15 -0
- data/lib/v8/function.rb +28 -0
- data/lib/v8/object.rb +69 -28
- data/lib/v8/util/weakcell.rb +29 -0
- data/lib/v8/version.rb +3 -0
- data/spec/c/array_spec.rb +17 -0
- data/spec/c/constants_spec.rb +20 -0
- data/spec/c/exception_spec.rb +26 -0
- data/spec/c/external_spec.rb +9 -0
- data/spec/c/function_spec.rb +46 -0
- data/spec/c/handles_spec.rb +35 -0
- data/spec/c/locker_spec.rb +38 -0
- data/spec/c/object_spec.rb +46 -0
- data/spec/c/script_spec.rb +28 -0
- data/spec/c/string_spec.rb +16 -0
- data/spec/c/template_spec.rb +30 -0
- data/spec/c/trycatch_spec.rb +51 -0
- data/spec/mem/blunt_spec.rb +42 -0
- data/spec/redjs_spec.rb +10 -0
- data/spec/spec_helper.rb +43 -12
- data/spec/threading_spec.rb +52 -0
- data/spec/v8/context_spec.rb +19 -0
- data/spec/v8/conversion_spec.rb +9 -0
- data/spec/v8/error_spec.rb +21 -0
- data/spec/v8/function_spec.rb +9 -0
- data/spec/v8/object_spec.rb +15 -0
- data/thefrontside.png +0 -0
- data/therubyracer.gemspec +15 -676
- metadata +146 -680
- data/.gitmodules +0 -3
- data/Doxyfile +0 -1514
- data/History.txt +0 -51
- data/README.rdoc +0 -158
- data/docs/data_conversion.txt +0 -18
- data/ext/v8/callbacks.cpp +0 -160
- data/ext/v8/callbacks.h +0 -14
- data/ext/v8/convert_ruby.cpp +0 -8
- data/ext/v8/convert_ruby.h +0 -99
- data/ext/v8/convert_string.cpp +0 -10
- data/ext/v8/convert_string.h +0 -73
- data/ext/v8/convert_v8.cpp +0 -9
- data/ext/v8/convert_v8.h +0 -121
- data/ext/v8/converters.cpp +0 -83
- data/ext/v8/converters.h +0 -23
- data/ext/v8/upstream/2.0.6/.gitignore +0 -26
- data/ext/v8/upstream/2.0.6/AUTHORS +0 -23
- data/ext/v8/upstream/2.0.6/ChangeLog +0 -1479
- data/ext/v8/upstream/2.0.6/LICENSE +0 -55
- data/ext/v8/upstream/2.0.6/SConstruct +0 -1028
- data/ext/v8/upstream/2.0.6/include/v8-debug.h +0 -275
- data/ext/v8/upstream/2.0.6/include/v8.h +0 -3236
- data/ext/v8/upstream/2.0.6/src/SConscript +0 -283
- data/ext/v8/upstream/2.0.6/src/accessors.cc +0 -695
- data/ext/v8/upstream/2.0.6/src/accessors.h +0 -114
- data/ext/v8/upstream/2.0.6/src/allocation.cc +0 -198
- data/ext/v8/upstream/2.0.6/src/allocation.h +0 -169
- data/ext/v8/upstream/2.0.6/src/api.cc +0 -3831
- data/ext/v8/upstream/2.0.6/src/api.h +0 -479
- data/ext/v8/upstream/2.0.6/src/apinatives.js +0 -110
- data/ext/v8/upstream/2.0.6/src/apiutils.h +0 -69
- data/ext/v8/upstream/2.0.6/src/arguments.h +0 -97
- data/ext/v8/upstream/2.0.6/src/arm/assembler-arm-inl.h +0 -277
- data/ext/v8/upstream/2.0.6/src/arm/assembler-arm.cc +0 -1821
- data/ext/v8/upstream/2.0.6/src/arm/assembler-arm.h +0 -1027
- data/ext/v8/upstream/2.0.6/src/arm/assembler-thumb2-inl.h +0 -267
- data/ext/v8/upstream/2.0.6/src/arm/assembler-thumb2.cc +0 -1821
- data/ext/v8/upstream/2.0.6/src/arm/assembler-thumb2.h +0 -1027
- data/ext/v8/upstream/2.0.6/src/arm/builtins-arm.cc +0 -1271
- data/ext/v8/upstream/2.0.6/src/arm/codegen-arm-inl.h +0 -74
- data/ext/v8/upstream/2.0.6/src/arm/codegen-arm.cc +0 -6682
- data/ext/v8/upstream/2.0.6/src/arm/codegen-arm.h +0 -535
- data/ext/v8/upstream/2.0.6/src/arm/constants-arm.cc +0 -112
- data/ext/v8/upstream/2.0.6/src/arm/constants-arm.h +0 -347
- data/ext/v8/upstream/2.0.6/src/arm/cpu-arm.cc +0 -132
- data/ext/v8/upstream/2.0.6/src/arm/debug-arm.cc +0 -213
- data/ext/v8/upstream/2.0.6/src/arm/disasm-arm.cc +0 -1166
- data/ext/v8/upstream/2.0.6/src/arm/fast-codegen-arm.cc +0 -1698
- data/ext/v8/upstream/2.0.6/src/arm/frames-arm.cc +0 -123
- data/ext/v8/upstream/2.0.6/src/arm/frames-arm.h +0 -162
- data/ext/v8/upstream/2.0.6/src/arm/ic-arm.cc +0 -849
- data/ext/v8/upstream/2.0.6/src/arm/jump-target-arm.cc +0 -238
- data/ext/v8/upstream/2.0.6/src/arm/macro-assembler-arm.cc +0 -1259
- data/ext/v8/upstream/2.0.6/src/arm/macro-assembler-arm.h +0 -423
- data/ext/v8/upstream/2.0.6/src/arm/regexp-macro-assembler-arm.cc +0 -1266
- data/ext/v8/upstream/2.0.6/src/arm/regexp-macro-assembler-arm.h +0 -282
- data/ext/v8/upstream/2.0.6/src/arm/register-allocator-arm-inl.h +0 -103
- data/ext/v8/upstream/2.0.6/src/arm/register-allocator-arm.cc +0 -59
- data/ext/v8/upstream/2.0.6/src/arm/register-allocator-arm.h +0 -43
- data/ext/v8/upstream/2.0.6/src/arm/simulator-arm.cc +0 -2264
- data/ext/v8/upstream/2.0.6/src/arm/simulator-arm.h +0 -306
- data/ext/v8/upstream/2.0.6/src/arm/stub-cache-arm.cc +0 -1516
- data/ext/v8/upstream/2.0.6/src/arm/virtual-frame-arm.cc +0 -412
- data/ext/v8/upstream/2.0.6/src/arm/virtual-frame-arm.h +0 -532
- data/ext/v8/upstream/2.0.6/src/array.js +0 -1154
- data/ext/v8/upstream/2.0.6/src/assembler.cc +0 -772
- data/ext/v8/upstream/2.0.6/src/assembler.h +0 -525
- data/ext/v8/upstream/2.0.6/src/ast.cc +0 -512
- data/ext/v8/upstream/2.0.6/src/ast.h +0 -1820
- data/ext/v8/upstream/2.0.6/src/bootstrapper.cc +0 -1680
- data/ext/v8/upstream/2.0.6/src/bootstrapper.h +0 -103
- data/ext/v8/upstream/2.0.6/src/builtins.cc +0 -851
- data/ext/v8/upstream/2.0.6/src/builtins.h +0 -245
- data/ext/v8/upstream/2.0.6/src/bytecodes-irregexp.h +0 -104
- data/ext/v8/upstream/2.0.6/src/char-predicates-inl.h +0 -86
- data/ext/v8/upstream/2.0.6/src/char-predicates.h +0 -65
- data/ext/v8/upstream/2.0.6/src/checks.cc +0 -100
- data/ext/v8/upstream/2.0.6/src/checks.h +0 -284
- data/ext/v8/upstream/2.0.6/src/code-stubs.cc +0 -164
- data/ext/v8/upstream/2.0.6/src/code-stubs.h +0 -164
- data/ext/v8/upstream/2.0.6/src/code.h +0 -68
- data/ext/v8/upstream/2.0.6/src/codegen-inl.h +0 -88
- data/ext/v8/upstream/2.0.6/src/codegen.cc +0 -504
- data/ext/v8/upstream/2.0.6/src/codegen.h +0 -522
- data/ext/v8/upstream/2.0.6/src/compilation-cache.cc +0 -490
- data/ext/v8/upstream/2.0.6/src/compilation-cache.h +0 -98
- data/ext/v8/upstream/2.0.6/src/compiler.cc +0 -1132
- data/ext/v8/upstream/2.0.6/src/compiler.h +0 -107
- data/ext/v8/upstream/2.0.6/src/contexts.cc +0 -256
- data/ext/v8/upstream/2.0.6/src/contexts.h +0 -345
- data/ext/v8/upstream/2.0.6/src/conversions-inl.h +0 -95
- data/ext/v8/upstream/2.0.6/src/conversions.cc +0 -709
- data/ext/v8/upstream/2.0.6/src/conversions.h +0 -118
- data/ext/v8/upstream/2.0.6/src/counters.cc +0 -78
- data/ext/v8/upstream/2.0.6/src/counters.h +0 -239
- data/ext/v8/upstream/2.0.6/src/cpu.h +0 -65
- data/ext/v8/upstream/2.0.6/src/d8-debug.cc +0 -345
- data/ext/v8/upstream/2.0.6/src/d8-debug.h +0 -155
- data/ext/v8/upstream/2.0.6/src/d8-posix.cc +0 -675
- data/ext/v8/upstream/2.0.6/src/d8-readline.cc +0 -128
- data/ext/v8/upstream/2.0.6/src/d8-windows.cc +0 -42
- data/ext/v8/upstream/2.0.6/src/d8.cc +0 -776
- data/ext/v8/upstream/2.0.6/src/d8.h +0 -225
- data/ext/v8/upstream/2.0.6/src/d8.js +0 -1625
- data/ext/v8/upstream/2.0.6/src/date-delay.js +0 -1138
- data/ext/v8/upstream/2.0.6/src/dateparser-inl.h +0 -114
- data/ext/v8/upstream/2.0.6/src/dateparser.cc +0 -186
- data/ext/v8/upstream/2.0.6/src/dateparser.h +0 -240
- data/ext/v8/upstream/2.0.6/src/debug-agent.cc +0 -425
- data/ext/v8/upstream/2.0.6/src/debug-agent.h +0 -129
- data/ext/v8/upstream/2.0.6/src/debug-delay.js +0 -2073
- data/ext/v8/upstream/2.0.6/src/debug.cc +0 -2751
- data/ext/v8/upstream/2.0.6/src/debug.h +0 -866
- data/ext/v8/upstream/2.0.6/src/disasm.h +0 -77
- data/ext/v8/upstream/2.0.6/src/disassembler.cc +0 -318
- data/ext/v8/upstream/2.0.6/src/disassembler.h +0 -56
- data/ext/v8/upstream/2.0.6/src/dtoa-config.c +0 -91
- data/ext/v8/upstream/2.0.6/src/execution.cc +0 -701
- data/ext/v8/upstream/2.0.6/src/execution.h +0 -312
- data/ext/v8/upstream/2.0.6/src/factory.cc +0 -957
- data/ext/v8/upstream/2.0.6/src/factory.h +0 -393
- data/ext/v8/upstream/2.0.6/src/fast-codegen.cc +0 -725
- data/ext/v8/upstream/2.0.6/src/fast-codegen.h +0 -371
- data/ext/v8/upstream/2.0.6/src/flag-definitions.h +0 -426
- data/ext/v8/upstream/2.0.6/src/flags.cc +0 -555
- data/ext/v8/upstream/2.0.6/src/flags.h +0 -81
- data/ext/v8/upstream/2.0.6/src/frame-element.cc +0 -45
- data/ext/v8/upstream/2.0.6/src/frame-element.h +0 -235
- data/ext/v8/upstream/2.0.6/src/frames-inl.h +0 -215
- data/ext/v8/upstream/2.0.6/src/frames.cc +0 -749
- data/ext/v8/upstream/2.0.6/src/frames.h +0 -659
- data/ext/v8/upstream/2.0.6/src/func-name-inferrer.cc +0 -76
- data/ext/v8/upstream/2.0.6/src/func-name-inferrer.h +0 -135
- data/ext/v8/upstream/2.0.6/src/global-handles.cc +0 -516
- data/ext/v8/upstream/2.0.6/src/global-handles.h +0 -180
- data/ext/v8/upstream/2.0.6/src/globals.h +0 -608
- data/ext/v8/upstream/2.0.6/src/handles-inl.h +0 -76
- data/ext/v8/upstream/2.0.6/src/handles.cc +0 -811
- data/ext/v8/upstream/2.0.6/src/handles.h +0 -367
- data/ext/v8/upstream/2.0.6/src/hashmap.cc +0 -226
- data/ext/v8/upstream/2.0.6/src/hashmap.h +0 -120
- data/ext/v8/upstream/2.0.6/src/heap-inl.h +0 -407
- data/ext/v8/upstream/2.0.6/src/heap-profiler.cc +0 -695
- data/ext/v8/upstream/2.0.6/src/heap-profiler.h +0 -277
- data/ext/v8/upstream/2.0.6/src/heap.cc +0 -4204
- data/ext/v8/upstream/2.0.6/src/heap.h +0 -1704
- data/ext/v8/upstream/2.0.6/src/ia32/assembler-ia32-inl.h +0 -325
- data/ext/v8/upstream/2.0.6/src/ia32/assembler-ia32.cc +0 -2375
- data/ext/v8/upstream/2.0.6/src/ia32/assembler-ia32.h +0 -914
- data/ext/v8/upstream/2.0.6/src/ia32/builtins-ia32.cc +0 -1222
- data/ext/v8/upstream/2.0.6/src/ia32/codegen-ia32-inl.h +0 -46
- data/ext/v8/upstream/2.0.6/src/ia32/codegen-ia32.cc +0 -9770
- data/ext/v8/upstream/2.0.6/src/ia32/codegen-ia32.h +0 -834
- data/ext/v8/upstream/2.0.6/src/ia32/cpu-ia32.cc +0 -79
- data/ext/v8/upstream/2.0.6/src/ia32/debug-ia32.cc +0 -208
- data/ext/v8/upstream/2.0.6/src/ia32/disasm-ia32.cc +0 -1357
- data/ext/v8/upstream/2.0.6/src/ia32/fast-codegen-ia32.cc +0 -1813
- data/ext/v8/upstream/2.0.6/src/ia32/frames-ia32.cc +0 -111
- data/ext/v8/upstream/2.0.6/src/ia32/frames-ia32.h +0 -135
- data/ext/v8/upstream/2.0.6/src/ia32/ic-ia32.cc +0 -1490
- data/ext/v8/upstream/2.0.6/src/ia32/jump-target-ia32.cc +0 -432
- data/ext/v8/upstream/2.0.6/src/ia32/macro-assembler-ia32.cc +0 -1517
- data/ext/v8/upstream/2.0.6/src/ia32/macro-assembler-ia32.h +0 -528
- data/ext/v8/upstream/2.0.6/src/ia32/regexp-macro-assembler-ia32.cc +0 -1219
- data/ext/v8/upstream/2.0.6/src/ia32/regexp-macro-assembler-ia32.h +0 -230
- data/ext/v8/upstream/2.0.6/src/ia32/register-allocator-ia32-inl.h +0 -82
- data/ext/v8/upstream/2.0.6/src/ia32/register-allocator-ia32.cc +0 -99
- data/ext/v8/upstream/2.0.6/src/ia32/register-allocator-ia32.h +0 -43
- data/ext/v8/upstream/2.0.6/src/ia32/simulator-ia32.cc +0 -30
- data/ext/v8/upstream/2.0.6/src/ia32/simulator-ia32.h +0 -62
- data/ext/v8/upstream/2.0.6/src/ia32/stub-cache-ia32.cc +0 -1961
- data/ext/v8/upstream/2.0.6/src/ia32/virtual-frame-ia32.cc +0 -1105
- data/ext/v8/upstream/2.0.6/src/ia32/virtual-frame-ia32.h +0 -580
- data/ext/v8/upstream/2.0.6/src/ic-inl.h +0 -93
- data/ext/v8/upstream/2.0.6/src/ic.cc +0 -1426
- data/ext/v8/upstream/2.0.6/src/ic.h +0 -443
- data/ext/v8/upstream/2.0.6/src/interpreter-irregexp.cc +0 -646
- data/ext/v8/upstream/2.0.6/src/interpreter-irregexp.h +0 -48
- data/ext/v8/upstream/2.0.6/src/json-delay.js +0 -254
- data/ext/v8/upstream/2.0.6/src/jsregexp.cc +0 -5234
- data/ext/v8/upstream/2.0.6/src/jsregexp.h +0 -1439
- data/ext/v8/upstream/2.0.6/src/jump-target-inl.h +0 -49
- data/ext/v8/upstream/2.0.6/src/jump-target.cc +0 -383
- data/ext/v8/upstream/2.0.6/src/jump-target.h +0 -280
- data/ext/v8/upstream/2.0.6/src/list-inl.h +0 -166
- data/ext/v8/upstream/2.0.6/src/list.h +0 -158
- data/ext/v8/upstream/2.0.6/src/log-inl.h +0 -126
- data/ext/v8/upstream/2.0.6/src/log-utils.cc +0 -503
- data/ext/v8/upstream/2.0.6/src/log-utils.h +0 -292
- data/ext/v8/upstream/2.0.6/src/log.cc +0 -1457
- data/ext/v8/upstream/2.0.6/src/log.h +0 -371
- data/ext/v8/upstream/2.0.6/src/macro-assembler.h +0 -93
- data/ext/v8/upstream/2.0.6/src/macros.py +0 -137
- data/ext/v8/upstream/2.0.6/src/mark-compact.cc +0 -2007
- data/ext/v8/upstream/2.0.6/src/mark-compact.h +0 -442
- data/ext/v8/upstream/2.0.6/src/math.js +0 -263
- data/ext/v8/upstream/2.0.6/src/memory.h +0 -74
- data/ext/v8/upstream/2.0.6/src/messages.cc +0 -177
- data/ext/v8/upstream/2.0.6/src/messages.h +0 -112
- data/ext/v8/upstream/2.0.6/src/messages.js +0 -937
- data/ext/v8/upstream/2.0.6/src/mirror-delay.js +0 -2332
- data/ext/v8/upstream/2.0.6/src/mksnapshot.cc +0 -169
- data/ext/v8/upstream/2.0.6/src/natives.h +0 -63
- data/ext/v8/upstream/2.0.6/src/objects-debug.cc +0 -1317
- data/ext/v8/upstream/2.0.6/src/objects-inl.h +0 -3044
- data/ext/v8/upstream/2.0.6/src/objects.cc +0 -8306
- data/ext/v8/upstream/2.0.6/src/objects.h +0 -4960
- data/ext/v8/upstream/2.0.6/src/oprofile-agent.cc +0 -116
- data/ext/v8/upstream/2.0.6/src/oprofile-agent.h +0 -69
- data/ext/v8/upstream/2.0.6/src/parser.cc +0 -4810
- data/ext/v8/upstream/2.0.6/src/parser.h +0 -195
- data/ext/v8/upstream/2.0.6/src/platform-freebsd.cc +0 -645
- data/ext/v8/upstream/2.0.6/src/platform-linux.cc +0 -808
- data/ext/v8/upstream/2.0.6/src/platform-macos.cc +0 -643
- data/ext/v8/upstream/2.0.6/src/platform-nullos.cc +0 -454
- data/ext/v8/upstream/2.0.6/src/platform-openbsd.cc +0 -597
- data/ext/v8/upstream/2.0.6/src/platform-posix.cc +0 -380
- data/ext/v8/upstream/2.0.6/src/platform-win32.cc +0 -1908
- data/ext/v8/upstream/2.0.6/src/platform.h +0 -556
- data/ext/v8/upstream/2.0.6/src/prettyprinter.cc +0 -1511
- data/ext/v8/upstream/2.0.6/src/prettyprinter.h +0 -219
- data/ext/v8/upstream/2.0.6/src/property.cc +0 -96
- data/ext/v8/upstream/2.0.6/src/property.h +0 -327
- data/ext/v8/upstream/2.0.6/src/regexp-delay.js +0 -406
- data/ext/v8/upstream/2.0.6/src/regexp-macro-assembler-irregexp-inl.h +0 -78
- data/ext/v8/upstream/2.0.6/src/regexp-macro-assembler-irregexp.cc +0 -464
- data/ext/v8/upstream/2.0.6/src/regexp-macro-assembler-irregexp.h +0 -141
- data/ext/v8/upstream/2.0.6/src/regexp-macro-assembler-tracer.cc +0 -356
- data/ext/v8/upstream/2.0.6/src/regexp-macro-assembler-tracer.h +0 -103
- data/ext/v8/upstream/2.0.6/src/regexp-macro-assembler.cc +0 -240
- data/ext/v8/upstream/2.0.6/src/regexp-macro-assembler.h +0 -220
- data/ext/v8/upstream/2.0.6/src/regexp-stack.cc +0 -103
- data/ext/v8/upstream/2.0.6/src/regexp-stack.h +0 -123
- data/ext/v8/upstream/2.0.6/src/register-allocator-inl.h +0 -74
- data/ext/v8/upstream/2.0.6/src/register-allocator.cc +0 -100
- data/ext/v8/upstream/2.0.6/src/register-allocator.h +0 -295
- data/ext/v8/upstream/2.0.6/src/rewriter.cc +0 -855
- data/ext/v8/upstream/2.0.6/src/rewriter.h +0 -54
- data/ext/v8/upstream/2.0.6/src/runtime.cc +0 -8163
- data/ext/v8/upstream/2.0.6/src/runtime.h +0 -432
- data/ext/v8/upstream/2.0.6/src/runtime.js +0 -626
- data/ext/v8/upstream/2.0.6/src/scanner.cc +0 -1098
- data/ext/v8/upstream/2.0.6/src/scanner.h +0 -425
- data/ext/v8/upstream/2.0.6/src/scopeinfo.cc +0 -649
- data/ext/v8/upstream/2.0.6/src/scopeinfo.h +0 -236
- data/ext/v8/upstream/2.0.6/src/scopes.cc +0 -963
- data/ext/v8/upstream/2.0.6/src/scopes.h +0 -401
- data/ext/v8/upstream/2.0.6/src/serialize.cc +0 -1260
- data/ext/v8/upstream/2.0.6/src/serialize.h +0 -404
- data/ext/v8/upstream/2.0.6/src/shell.h +0 -55
- data/ext/v8/upstream/2.0.6/src/simulator.h +0 -41
- data/ext/v8/upstream/2.0.6/src/smart-pointer.h +0 -109
- data/ext/v8/upstream/2.0.6/src/snapshot-common.cc +0 -97
- data/ext/v8/upstream/2.0.6/src/snapshot-empty.cc +0 -40
- data/ext/v8/upstream/2.0.6/src/snapshot.h +0 -59
- data/ext/v8/upstream/2.0.6/src/spaces-inl.h +0 -372
- data/ext/v8/upstream/2.0.6/src/spaces.cc +0 -2864
- data/ext/v8/upstream/2.0.6/src/spaces.h +0 -2072
- data/ext/v8/upstream/2.0.6/src/string-stream.cc +0 -584
- data/ext/v8/upstream/2.0.6/src/string-stream.h +0 -189
- data/ext/v8/upstream/2.0.6/src/string.js +0 -901
- data/ext/v8/upstream/2.0.6/src/stub-cache.cc +0 -1108
- data/ext/v8/upstream/2.0.6/src/stub-cache.h +0 -578
- data/ext/v8/upstream/2.0.6/src/third_party/dtoa/COPYING +0 -15
- data/ext/v8/upstream/2.0.6/src/third_party/dtoa/dtoa.c +0 -3330
- data/ext/v8/upstream/2.0.6/src/third_party/valgrind/valgrind.h +0 -3925
- data/ext/v8/upstream/2.0.6/src/token.cc +0 -56
- data/ext/v8/upstream/2.0.6/src/token.h +0 -270
- data/ext/v8/upstream/2.0.6/src/top.cc +0 -991
- data/ext/v8/upstream/2.0.6/src/top.h +0 -459
- data/ext/v8/upstream/2.0.6/src/unicode-inl.h +0 -238
- data/ext/v8/upstream/2.0.6/src/unicode.cc +0 -749
- data/ext/v8/upstream/2.0.6/src/unicode.h +0 -279
- data/ext/v8/upstream/2.0.6/src/uri.js +0 -415
- data/ext/v8/upstream/2.0.6/src/usage-analyzer.cc +0 -426
- data/ext/v8/upstream/2.0.6/src/usage-analyzer.h +0 -40
- data/ext/v8/upstream/2.0.6/src/utils.cc +0 -322
- data/ext/v8/upstream/2.0.6/src/utils.h +0 -592
- data/ext/v8/upstream/2.0.6/src/v8-counters.cc +0 -55
- data/ext/v8/upstream/2.0.6/src/v8-counters.h +0 -198
- data/ext/v8/upstream/2.0.6/src/v8.cc +0 -193
- data/ext/v8/upstream/2.0.6/src/v8.h +0 -119
- data/ext/v8/upstream/2.0.6/src/v8natives.js +0 -846
- data/ext/v8/upstream/2.0.6/src/v8threads.cc +0 -450
- data/ext/v8/upstream/2.0.6/src/v8threads.h +0 -144
- data/ext/v8/upstream/2.0.6/src/variables.cc +0 -163
- data/ext/v8/upstream/2.0.6/src/variables.h +0 -235
- data/ext/v8/upstream/2.0.6/src/version.cc +0 -88
- data/ext/v8/upstream/2.0.6/src/version.h +0 -64
- data/ext/v8/upstream/2.0.6/src/virtual-frame.cc +0 -381
- data/ext/v8/upstream/2.0.6/src/virtual-frame.h +0 -44
- data/ext/v8/upstream/2.0.6/src/x64/assembler-x64-inl.h +0 -352
- data/ext/v8/upstream/2.0.6/src/x64/assembler-x64.cc +0 -2539
- data/ext/v8/upstream/2.0.6/src/x64/assembler-x64.h +0 -1399
- data/ext/v8/upstream/2.0.6/src/x64/builtins-x64.cc +0 -1255
- data/ext/v8/upstream/2.0.6/src/x64/codegen-x64-inl.h +0 -46
- data/ext/v8/upstream/2.0.6/src/x64/codegen-x64.cc +0 -8223
- data/ext/v8/upstream/2.0.6/src/x64/codegen-x64.h +0 -785
- data/ext/v8/upstream/2.0.6/src/x64/cpu-x64.cc +0 -79
- data/ext/v8/upstream/2.0.6/src/x64/debug-x64.cc +0 -202
- data/ext/v8/upstream/2.0.6/src/x64/disasm-x64.cc +0 -1596
- data/ext/v8/upstream/2.0.6/src/x64/fast-codegen-x64.cc +0 -1820
- data/ext/v8/upstream/2.0.6/src/x64/frames-x64.cc +0 -109
- data/ext/v8/upstream/2.0.6/src/x64/frames-x64.h +0 -121
- data/ext/v8/upstream/2.0.6/src/x64/ic-x64.cc +0 -1392
- data/ext/v8/upstream/2.0.6/src/x64/jump-target-x64.cc +0 -432
- data/ext/v8/upstream/2.0.6/src/x64/macro-assembler-x64.cc +0 -2409
- data/ext/v8/upstream/2.0.6/src/x64/macro-assembler-x64.h +0 -765
- data/ext/v8/upstream/2.0.6/src/x64/regexp-macro-assembler-x64.cc +0 -1337
- data/ext/v8/upstream/2.0.6/src/x64/regexp-macro-assembler-x64.h +0 -295
- data/ext/v8/upstream/2.0.6/src/x64/register-allocator-x64-inl.h +0 -86
- data/ext/v8/upstream/2.0.6/src/x64/register-allocator-x64.cc +0 -84
- data/ext/v8/upstream/2.0.6/src/x64/register-allocator-x64.h +0 -43
- data/ext/v8/upstream/2.0.6/src/x64/simulator-x64.cc +0 -27
- data/ext/v8/upstream/2.0.6/src/x64/simulator-x64.h +0 -63
- data/ext/v8/upstream/2.0.6/src/x64/stub-cache-x64.cc +0 -1884
- data/ext/v8/upstream/2.0.6/src/x64/virtual-frame-x64.cc +0 -1089
- data/ext/v8/upstream/2.0.6/src/x64/virtual-frame-x64.h +0 -560
- data/ext/v8/upstream/2.0.6/src/zone-inl.h +0 -297
- data/ext/v8/upstream/2.0.6/src/zone.cc +0 -193
- data/ext/v8/upstream/2.0.6/src/zone.h +0 -305
- data/ext/v8/upstream/2.0.6/tools/codemap.js +0 -258
- data/ext/v8/upstream/2.0.6/tools/consarray.js +0 -93
- data/ext/v8/upstream/2.0.6/tools/csvparser.js +0 -98
- data/ext/v8/upstream/2.0.6/tools/gyp/v8.gyp +0 -620
- data/ext/v8/upstream/2.0.6/tools/js2c.py +0 -376
- data/ext/v8/upstream/2.0.6/tools/jsmin.py +0 -280
- data/ext/v8/upstream/2.0.6/tools/linux-tick-processor +0 -24
- data/ext/v8/upstream/2.0.6/tools/linux-tick-processor.py +0 -78
- data/ext/v8/upstream/2.0.6/tools/logreader.js +0 -320
- data/ext/v8/upstream/2.0.6/tools/mac-nm +0 -18
- data/ext/v8/upstream/2.0.6/tools/mac-tick-processor +0 -6
- data/ext/v8/upstream/2.0.6/tools/oprofile/annotate +0 -7
- data/ext/v8/upstream/2.0.6/tools/oprofile/common +0 -19
- data/ext/v8/upstream/2.0.6/tools/oprofile/dump +0 -7
- data/ext/v8/upstream/2.0.6/tools/oprofile/report +0 -7
- data/ext/v8/upstream/2.0.6/tools/oprofile/reset +0 -7
- data/ext/v8/upstream/2.0.6/tools/oprofile/run +0 -14
- data/ext/v8/upstream/2.0.6/tools/oprofile/shutdown +0 -7
- data/ext/v8/upstream/2.0.6/tools/oprofile/start +0 -7
- data/ext/v8/upstream/2.0.6/tools/presubmit.py +0 -299
- data/ext/v8/upstream/2.0.6/tools/process-heap-prof.py +0 -120
- data/ext/v8/upstream/2.0.6/tools/profile.js +0 -621
- data/ext/v8/upstream/2.0.6/tools/profile_view.js +0 -224
- data/ext/v8/upstream/2.0.6/tools/run-valgrind.py +0 -77
- data/ext/v8/upstream/2.0.6/tools/splaytree.js +0 -322
- data/ext/v8/upstream/2.0.6/tools/splaytree.py +0 -226
- data/ext/v8/upstream/2.0.6/tools/stats-viewer.py +0 -456
- data/ext/v8/upstream/2.0.6/tools/test.py +0 -1370
- data/ext/v8/upstream/2.0.6/tools/tickprocessor-driver.js +0 -53
- data/ext/v8/upstream/2.0.6/tools/tickprocessor.js +0 -731
- data/ext/v8/upstream/2.0.6/tools/tickprocessor.py +0 -535
- data/ext/v8/upstream/2.0.6/tools/utils.py +0 -82
- data/ext/v8/upstream/2.0.6/tools/visual_studio/README.txt +0 -71
- data/ext/v8/upstream/2.0.6/tools/visual_studio/arm.vsprops +0 -14
- data/ext/v8/upstream/2.0.6/tools/visual_studio/common.vsprops +0 -35
- data/ext/v8/upstream/2.0.6/tools/visual_studio/d8.vcproj +0 -199
- data/ext/v8/upstream/2.0.6/tools/visual_studio/d8_arm.vcproj +0 -199
- data/ext/v8/upstream/2.0.6/tools/visual_studio/d8_x64.vcproj +0 -201
- data/ext/v8/upstream/2.0.6/tools/visual_studio/d8js2c.cmd +0 -6
- data/ext/v8/upstream/2.0.6/tools/visual_studio/debug.vsprops +0 -17
- data/ext/v8/upstream/2.0.6/tools/visual_studio/ia32.vsprops +0 -13
- data/ext/v8/upstream/2.0.6/tools/visual_studio/js2c.cmd +0 -6
- data/ext/v8/upstream/2.0.6/tools/visual_studio/release.vsprops +0 -24
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8.sln +0 -101
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8.vcproj +0 -223
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_arm.sln +0 -74
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_arm.vcproj +0 -223
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_base.vcproj +0 -971
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_base_arm.vcproj +0 -983
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_base_x64.vcproj +0 -959
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_cctest.vcproj +0 -255
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_cctest_arm.vcproj +0 -243
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_cctest_x64.vcproj +0 -257
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_mksnapshot.vcproj +0 -151
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_mksnapshot_x64.vcproj +0 -151
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_process_sample.vcproj +0 -151
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_process_sample_arm.vcproj +0 -151
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_process_sample_x64.vcproj +0 -151
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_shell_sample.vcproj +0 -151
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_shell_sample_arm.vcproj +0 -151
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_shell_sample_x64.vcproj +0 -153
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_snapshot.vcproj +0 -142
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_snapshot_cc.vcproj +0 -92
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_snapshot_cc_x64.vcproj +0 -92
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_snapshot_x64.vcproj +0 -142
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_x64.sln +0 -101
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_x64.vcproj +0 -223
- data/ext/v8/upstream/2.0.6/tools/visual_studio/x64.vsprops +0 -13
- data/ext/v8/upstream/2.0.6/tools/windows-tick-processor.bat +0 -5
- data/ext/v8/upstream/2.0.6/tools/windows-tick-processor.py +0 -137
- data/ext/v8/upstream/Makefile +0 -32
- data/ext/v8/upstream/fpic-on-linux-amd64.patch +0 -13
- data/ext/v8/upstream/no-strict-aliasing.patch +0 -13
- data/ext/v8/upstream/scons/CHANGES.txt +0 -5183
- data/ext/v8/upstream/scons/LICENSE.txt +0 -20
- data/ext/v8/upstream/scons/MANIFEST +0 -202
- data/ext/v8/upstream/scons/PKG-INFO +0 -13
- data/ext/v8/upstream/scons/README.txt +0 -273
- data/ext/v8/upstream/scons/RELEASE.txt +0 -1040
- data/ext/v8/upstream/scons/engine/SCons/Action.py +0 -1256
- data/ext/v8/upstream/scons/engine/SCons/Builder.py +0 -868
- data/ext/v8/upstream/scons/engine/SCons/CacheDir.py +0 -217
- data/ext/v8/upstream/scons/engine/SCons/Conftest.py +0 -794
- data/ext/v8/upstream/scons/engine/SCons/Debug.py +0 -237
- data/ext/v8/upstream/scons/engine/SCons/Defaults.py +0 -485
- data/ext/v8/upstream/scons/engine/SCons/Environment.py +0 -2327
- data/ext/v8/upstream/scons/engine/SCons/Errors.py +0 -207
- data/ext/v8/upstream/scons/engine/SCons/Executor.py +0 -636
- data/ext/v8/upstream/scons/engine/SCons/Job.py +0 -435
- data/ext/v8/upstream/scons/engine/SCons/Memoize.py +0 -292
- data/ext/v8/upstream/scons/engine/SCons/Node/Alias.py +0 -153
- data/ext/v8/upstream/scons/engine/SCons/Node/FS.py +0 -3220
- data/ext/v8/upstream/scons/engine/SCons/Node/Python.py +0 -128
- data/ext/v8/upstream/scons/engine/SCons/Node/__init__.py +0 -1341
- data/ext/v8/upstream/scons/engine/SCons/Options/BoolOption.py +0 -50
- data/ext/v8/upstream/scons/engine/SCons/Options/EnumOption.py +0 -50
- data/ext/v8/upstream/scons/engine/SCons/Options/ListOption.py +0 -50
- data/ext/v8/upstream/scons/engine/SCons/Options/PackageOption.py +0 -50
- data/ext/v8/upstream/scons/engine/SCons/Options/PathOption.py +0 -76
- data/ext/v8/upstream/scons/engine/SCons/Options/__init__.py +0 -74
- data/ext/v8/upstream/scons/engine/SCons/PathList.py +0 -232
- data/ext/v8/upstream/scons/engine/SCons/Platform/__init__.py +0 -236
- data/ext/v8/upstream/scons/engine/SCons/Platform/aix.py +0 -70
- data/ext/v8/upstream/scons/engine/SCons/Platform/cygwin.py +0 -55
- data/ext/v8/upstream/scons/engine/SCons/Platform/darwin.py +0 -46
- data/ext/v8/upstream/scons/engine/SCons/Platform/hpux.py +0 -46
- data/ext/v8/upstream/scons/engine/SCons/Platform/irix.py +0 -44
- data/ext/v8/upstream/scons/engine/SCons/Platform/os2.py +0 -58
- data/ext/v8/upstream/scons/engine/SCons/Platform/posix.py +0 -264
- data/ext/v8/upstream/scons/engine/SCons/Platform/sunos.py +0 -50
- data/ext/v8/upstream/scons/engine/SCons/Platform/win32.py +0 -386
- data/ext/v8/upstream/scons/engine/SCons/SConf.py +0 -1038
- data/ext/v8/upstream/scons/engine/SCons/SConsign.py +0 -381
- data/ext/v8/upstream/scons/engine/SCons/Scanner/C.py +0 -132
- data/ext/v8/upstream/scons/engine/SCons/Scanner/D.py +0 -74
- data/ext/v8/upstream/scons/engine/SCons/Scanner/Dir.py +0 -111
- data/ext/v8/upstream/scons/engine/SCons/Scanner/Fortran.py +0 -320
- data/ext/v8/upstream/scons/engine/SCons/Scanner/IDL.py +0 -48
- data/ext/v8/upstream/scons/engine/SCons/Scanner/LaTeX.py +0 -378
- data/ext/v8/upstream/scons/engine/SCons/Scanner/Prog.py +0 -103
- data/ext/v8/upstream/scons/engine/SCons/Scanner/RC.py +0 -55
- data/ext/v8/upstream/scons/engine/SCons/Scanner/__init__.py +0 -415
- data/ext/v8/upstream/scons/engine/SCons/Script/Interactive.py +0 -386
- data/ext/v8/upstream/scons/engine/SCons/Script/Main.py +0 -1360
- data/ext/v8/upstream/scons/engine/SCons/Script/SConsOptions.py +0 -944
- data/ext/v8/upstream/scons/engine/SCons/Script/SConscript.py +0 -642
- data/ext/v8/upstream/scons/engine/SCons/Script/__init__.py +0 -414
- data/ext/v8/upstream/scons/engine/SCons/Sig.py +0 -63
- data/ext/v8/upstream/scons/engine/SCons/Subst.py +0 -911
- data/ext/v8/upstream/scons/engine/SCons/Taskmaster.py +0 -1030
- data/ext/v8/upstream/scons/engine/SCons/Tool/386asm.py +0 -61
- data/ext/v8/upstream/scons/engine/SCons/Tool/BitKeeper.py +0 -65
- data/ext/v8/upstream/scons/engine/SCons/Tool/CVS.py +0 -73
- data/ext/v8/upstream/scons/engine/SCons/Tool/FortranCommon.py +0 -247
- data/ext/v8/upstream/scons/engine/SCons/Tool/JavaCommon.py +0 -324
- data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/__init__.py +0 -56
- data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/arch.py +0 -61
- data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/common.py +0 -210
- data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/netframework.py +0 -84
- data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/sdk.py +0 -321
- data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/vc.py +0 -367
- data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/vs.py +0 -497
- data/ext/v8/upstream/scons/engine/SCons/Tool/Perforce.py +0 -104
- data/ext/v8/upstream/scons/engine/SCons/Tool/PharLapCommon.py +0 -138
- data/ext/v8/upstream/scons/engine/SCons/Tool/RCS.py +0 -64
- data/ext/v8/upstream/scons/engine/SCons/Tool/SCCS.py +0 -64
- data/ext/v8/upstream/scons/engine/SCons/Tool/Subversion.py +0 -71
- data/ext/v8/upstream/scons/engine/SCons/Tool/__init__.py +0 -675
- data/ext/v8/upstream/scons/engine/SCons/Tool/aixc++.py +0 -82
- data/ext/v8/upstream/scons/engine/SCons/Tool/aixcc.py +0 -74
- data/ext/v8/upstream/scons/engine/SCons/Tool/aixf77.py +0 -80
- data/ext/v8/upstream/scons/engine/SCons/Tool/aixlink.py +0 -76
- data/ext/v8/upstream/scons/engine/SCons/Tool/applelink.py +0 -71
- data/ext/v8/upstream/scons/engine/SCons/Tool/ar.py +0 -63
- data/ext/v8/upstream/scons/engine/SCons/Tool/as.py +0 -78
- data/ext/v8/upstream/scons/engine/SCons/Tool/bcc32.py +0 -82
- data/ext/v8/upstream/scons/engine/SCons/Tool/c++.py +0 -99
- data/ext/v8/upstream/scons/engine/SCons/Tool/cc.py +0 -114
- data/ext/v8/upstream/scons/engine/SCons/Tool/cvf.py +0 -58
- data/ext/v8/upstream/scons/engine/SCons/Tool/default.py +0 -50
- data/ext/v8/upstream/scons/engine/SCons/Tool/dmd.py +0 -224
- data/ext/v8/upstream/scons/engine/SCons/Tool/dvi.py +0 -64
- data/ext/v8/upstream/scons/engine/SCons/Tool/dvipdf.py +0 -125
- data/ext/v8/upstream/scons/engine/SCons/Tool/dvips.py +0 -94
- data/ext/v8/upstream/scons/engine/SCons/Tool/f77.py +0 -62
- data/ext/v8/upstream/scons/engine/SCons/Tool/f90.py +0 -62
- data/ext/v8/upstream/scons/engine/SCons/Tool/f95.py +0 -63
- data/ext/v8/upstream/scons/engine/SCons/Tool/filesystem.py +0 -98
- data/ext/v8/upstream/scons/engine/SCons/Tool/fortran.py +0 -63
- data/ext/v8/upstream/scons/engine/SCons/Tool/g++.py +0 -90
- data/ext/v8/upstream/scons/engine/SCons/Tool/g77.py +0 -73
- data/ext/v8/upstream/scons/engine/SCons/Tool/gas.py +0 -53
- data/ext/v8/upstream/scons/engine/SCons/Tool/gcc.py +0 -80
- data/ext/v8/upstream/scons/engine/SCons/Tool/gfortran.py +0 -64
- data/ext/v8/upstream/scons/engine/SCons/Tool/gnulink.py +0 -63
- data/ext/v8/upstream/scons/engine/SCons/Tool/gs.py +0 -81
- data/ext/v8/upstream/scons/engine/SCons/Tool/hpc++.py +0 -85
- data/ext/v8/upstream/scons/engine/SCons/Tool/hpcc.py +0 -53
- data/ext/v8/upstream/scons/engine/SCons/Tool/hplink.py +0 -77
- data/ext/v8/upstream/scons/engine/SCons/Tool/icc.py +0 -59
- data/ext/v8/upstream/scons/engine/SCons/Tool/icl.py +0 -52
- data/ext/v8/upstream/scons/engine/SCons/Tool/ifl.py +0 -72
- data/ext/v8/upstream/scons/engine/SCons/Tool/ifort.py +0 -90
- data/ext/v8/upstream/scons/engine/SCons/Tool/ilink.py +0 -59
- data/ext/v8/upstream/scons/engine/SCons/Tool/ilink32.py +0 -60
- data/ext/v8/upstream/scons/engine/SCons/Tool/install.py +0 -229
- data/ext/v8/upstream/scons/engine/SCons/Tool/intelc.py +0 -490
- data/ext/v8/upstream/scons/engine/SCons/Tool/ipkg.py +0 -71
- data/ext/v8/upstream/scons/engine/SCons/Tool/jar.py +0 -110
- data/ext/v8/upstream/scons/engine/SCons/Tool/javac.py +0 -234
- data/ext/v8/upstream/scons/engine/SCons/Tool/javah.py +0 -138
- data/ext/v8/upstream/scons/engine/SCons/Tool/latex.py +0 -79
- data/ext/v8/upstream/scons/engine/SCons/Tool/lex.py +0 -99
- data/ext/v8/upstream/scons/engine/SCons/Tool/link.py +0 -121
- data/ext/v8/upstream/scons/engine/SCons/Tool/linkloc.py +0 -112
- data/ext/v8/upstream/scons/engine/SCons/Tool/m4.py +0 -63
- data/ext/v8/upstream/scons/engine/SCons/Tool/masm.py +0 -77
- data/ext/v8/upstream/scons/engine/SCons/Tool/midl.py +0 -90
- data/ext/v8/upstream/scons/engine/SCons/Tool/mingw.py +0 -159
- data/ext/v8/upstream/scons/engine/SCons/Tool/mslib.py +0 -64
- data/ext/v8/upstream/scons/engine/SCons/Tool/mslink.py +0 -266
- data/ext/v8/upstream/scons/engine/SCons/Tool/mssdk.py +0 -50
- data/ext/v8/upstream/scons/engine/SCons/Tool/msvc.py +0 -269
- data/ext/v8/upstream/scons/engine/SCons/Tool/msvs.py +0 -1439
- data/ext/v8/upstream/scons/engine/SCons/Tool/mwcc.py +0 -208
- data/ext/v8/upstream/scons/engine/SCons/Tool/mwld.py +0 -107
- data/ext/v8/upstream/scons/engine/SCons/Tool/nasm.py +0 -72
- data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/__init__.py +0 -314
- data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/ipk.py +0 -185
- data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/msi.py +0 -526
- data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/rpm.py +0 -367
- data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/src_tarbz2.py +0 -43
- data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/src_targz.py +0 -43
- data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/src_zip.py +0 -43
- data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/tarbz2.py +0 -44
- data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/targz.py +0 -44
- data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/zip.py +0 -44
- data/ext/v8/upstream/scons/engine/SCons/Tool/pdf.py +0 -78
- data/ext/v8/upstream/scons/engine/SCons/Tool/pdflatex.py +0 -83
- data/ext/v8/upstream/scons/engine/SCons/Tool/pdftex.py +0 -108
- data/ext/v8/upstream/scons/engine/SCons/Tool/qt.py +0 -336
- data/ext/v8/upstream/scons/engine/SCons/Tool/rmic.py +0 -121
- data/ext/v8/upstream/scons/engine/SCons/Tool/rpcgen.py +0 -70
- data/ext/v8/upstream/scons/engine/SCons/Tool/rpm.py +0 -132
- data/ext/v8/upstream/scons/engine/SCons/Tool/sgiar.py +0 -68
- data/ext/v8/upstream/scons/engine/SCons/Tool/sgic++.py +0 -58
- data/ext/v8/upstream/scons/engine/SCons/Tool/sgicc.py +0 -53
- data/ext/v8/upstream/scons/engine/SCons/Tool/sgilink.py +0 -63
- data/ext/v8/upstream/scons/engine/SCons/Tool/sunar.py +0 -67
- data/ext/v8/upstream/scons/engine/SCons/Tool/sunc++.py +0 -142
- data/ext/v8/upstream/scons/engine/SCons/Tool/suncc.py +0 -58
- data/ext/v8/upstream/scons/engine/SCons/Tool/sunf77.py +0 -63
- data/ext/v8/upstream/scons/engine/SCons/Tool/sunf90.py +0 -64
- data/ext/v8/upstream/scons/engine/SCons/Tool/sunf95.py +0 -64
- data/ext/v8/upstream/scons/engine/SCons/Tool/sunlink.py +0 -77
- data/ext/v8/upstream/scons/engine/SCons/Tool/swig.py +0 -186
- data/ext/v8/upstream/scons/engine/SCons/Tool/tar.py +0 -73
- data/ext/v8/upstream/scons/engine/SCons/Tool/tex.py +0 -805
- data/ext/v8/upstream/scons/engine/SCons/Tool/textfile.py +0 -175
- data/ext/v8/upstream/scons/engine/SCons/Tool/tlib.py +0 -53
- data/ext/v8/upstream/scons/engine/SCons/Tool/wix.py +0 -100
- data/ext/v8/upstream/scons/engine/SCons/Tool/yacc.py +0 -131
- data/ext/v8/upstream/scons/engine/SCons/Tool/zip.py +0 -100
- data/ext/v8/upstream/scons/engine/SCons/Util.py +0 -1645
- data/ext/v8/upstream/scons/engine/SCons/Variables/BoolVariable.py +0 -91
- data/ext/v8/upstream/scons/engine/SCons/Variables/EnumVariable.py +0 -107
- data/ext/v8/upstream/scons/engine/SCons/Variables/ListVariable.py +0 -139
- data/ext/v8/upstream/scons/engine/SCons/Variables/PackageVariable.py +0 -109
- data/ext/v8/upstream/scons/engine/SCons/Variables/PathVariable.py +0 -147
- data/ext/v8/upstream/scons/engine/SCons/Variables/__init__.py +0 -317
- data/ext/v8/upstream/scons/engine/SCons/Warnings.py +0 -228
- data/ext/v8/upstream/scons/engine/SCons/__init__.py +0 -49
- data/ext/v8/upstream/scons/engine/SCons/compat/__init__.py +0 -302
- data/ext/v8/upstream/scons/engine/SCons/compat/_scons_UserString.py +0 -98
- data/ext/v8/upstream/scons/engine/SCons/compat/_scons_hashlib.py +0 -91
- data/ext/v8/upstream/scons/engine/SCons/compat/_scons_itertools.py +0 -124
- data/ext/v8/upstream/scons/engine/SCons/compat/_scons_optparse.py +0 -1725
- data/ext/v8/upstream/scons/engine/SCons/compat/_scons_sets.py +0 -583
- data/ext/v8/upstream/scons/engine/SCons/compat/_scons_sets15.py +0 -176
- data/ext/v8/upstream/scons/engine/SCons/compat/_scons_shlex.py +0 -325
- data/ext/v8/upstream/scons/engine/SCons/compat/_scons_subprocess.py +0 -1296
- data/ext/v8/upstream/scons/engine/SCons/compat/_scons_textwrap.py +0 -382
- data/ext/v8/upstream/scons/engine/SCons/compat/builtins.py +0 -187
- data/ext/v8/upstream/scons/engine/SCons/cpp.py +0 -598
- data/ext/v8/upstream/scons/engine/SCons/dblite.py +0 -248
- data/ext/v8/upstream/scons/engine/SCons/exitfuncs.py +0 -77
- data/ext/v8/upstream/scons/os_spawnv_fix.diff +0 -83
- data/ext/v8/upstream/scons/scons-time.1 +0 -1017
- data/ext/v8/upstream/scons/scons.1 +0 -15179
- data/ext/v8/upstream/scons/sconsign.1 +0 -208
- data/ext/v8/upstream/scons/script/scons +0 -184
- data/ext/v8/upstream/scons/script/scons-time +0 -1529
- data/ext/v8/upstream/scons/script/scons.bat +0 -31
- data/ext/v8/upstream/scons/script/sconsign +0 -508
- data/ext/v8/upstream/scons/setup.cfg +0 -6
- data/ext/v8/upstream/scons/setup.py +0 -427
- data/ext/v8/v8.cpp +0 -89
- data/ext/v8/v8_cxt.cpp +0 -92
- data/ext/v8/v8_cxt.h +0 -20
- data/ext/v8/v8_func.cpp +0 -10
- data/ext/v8/v8_func.h +0 -11
- data/ext/v8/v8_msg.cpp +0 -54
- data/ext/v8/v8_msg.h +0 -18
- data/ext/v8/v8_obj.cpp +0 -52
- data/ext/v8/v8_obj.h +0 -13
- data/ext/v8/v8_ref.cpp +0 -26
- data/ext/v8/v8_ref.h +0 -31
- data/ext/v8/v8_script.cpp +0 -20
- data/ext/v8/v8_script.h +0 -8
- data/ext/v8/v8_standalone.cpp +0 -69
- data/ext/v8/v8_standalone.h +0 -31
- data/ext/v8/v8_str.cpp +0 -17
- data/ext/v8/v8_str.h +0 -9
- data/ext/v8/v8_template.cpp +0 -53
- data/ext/v8/v8_template.h +0 -13
- data/lib/v8/to.rb +0 -33
- data/lib/v8/v8.so +0 -0
- data/script/console +0 -10
- data/script/destroy +0 -14
- data/script/generate +0 -14
- data/spec/ext/cxt_spec.rb +0 -25
- data/spec/ext/obj_spec.rb +0 -13
- data/spec/redjs/jsapi_spec.rb +0 -405
- data/spec/redjs/tap.rb +0 -8
- data/spec/redjs_helper.rb +0 -3
- data/spec/spec.opts +0 -1
- data/spec/v8/to_spec.rb +0 -15
- data/tasks/rspec.rake +0 -21
@@ -1,137 +0,0 @@
|
|
1
|
-
# Copyright 2006-2009 the V8 project authors. All rights reserved.
|
2
|
-
# Redistribution and use in source and binary forms, with or without
|
3
|
-
# modification, are permitted provided that the following conditions are
|
4
|
-
# met:
|
5
|
-
#
|
6
|
-
# * Redistributions of source code must retain the above copyright
|
7
|
-
# notice, this list of conditions and the following disclaimer.
|
8
|
-
# * Redistributions in binary form must reproduce the above
|
9
|
-
# copyright notice, this list of conditions and the following
|
10
|
-
# disclaimer in the documentation and/or other materials provided
|
11
|
-
# with the distribution.
|
12
|
-
# * Neither the name of Google Inc. nor the names of its
|
13
|
-
# contributors may be used to endorse or promote products derived
|
14
|
-
# from this software without specific prior written permission.
|
15
|
-
#
|
16
|
-
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
17
|
-
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
18
|
-
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
19
|
-
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
20
|
-
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
21
|
-
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
22
|
-
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
23
|
-
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
24
|
-
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
25
|
-
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
26
|
-
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
27
|
-
|
28
|
-
# Dictionary that is passed as defines for js2c.py.
|
29
|
-
# Used for defines that must be defined for all native js files.
|
30
|
-
|
31
|
-
const NONE = 0;
|
32
|
-
const READ_ONLY = 1;
|
33
|
-
const DONT_ENUM = 2;
|
34
|
-
const DONT_DELETE = 4;
|
35
|
-
|
36
|
-
# Constants used for getter and setter operations.
|
37
|
-
const GETTER = 0;
|
38
|
-
const SETTER = 1;
|
39
|
-
|
40
|
-
# These definitions must match the index of the properties in objects.h.
|
41
|
-
const kApiTagOffset = 0;
|
42
|
-
const kApiPropertyListOffset = 1;
|
43
|
-
const kApiSerialNumberOffset = 2;
|
44
|
-
const kApiConstructorOffset = 2;
|
45
|
-
const kApiPrototypeTemplateOffset = 5;
|
46
|
-
const kApiParentTemplateOffset = 6;
|
47
|
-
|
48
|
-
const NO_HINT = 0;
|
49
|
-
const NUMBER_HINT = 1;
|
50
|
-
const STRING_HINT = 2;
|
51
|
-
|
52
|
-
const kFunctionTag = 0;
|
53
|
-
const kNewObjectTag = 1;
|
54
|
-
|
55
|
-
# For date.js.
|
56
|
-
const HoursPerDay = 24;
|
57
|
-
const MinutesPerHour = 60;
|
58
|
-
const SecondsPerMinute = 60;
|
59
|
-
const msPerSecond = 1000;
|
60
|
-
const msPerMinute = 60000;
|
61
|
-
const msPerHour = 3600000;
|
62
|
-
const msPerDay = 86400000;
|
63
|
-
const msPerMonth = 2592000000;
|
64
|
-
|
65
|
-
# For apinatives.js
|
66
|
-
const kUninitialized = -1;
|
67
|
-
|
68
|
-
# Note: kDayZeroInJulianDay = ToJulianDay(1970, 0, 1).
|
69
|
-
const kInvalidDate = 'Invalid Date';
|
70
|
-
const kDayZeroInJulianDay = 2440588;
|
71
|
-
const kMonthMask = 0x1e0;
|
72
|
-
const kDayMask = 0x01f;
|
73
|
-
const kYearShift = 9;
|
74
|
-
const kMonthShift = 5;
|
75
|
-
|
76
|
-
# Type query macros.
|
77
|
-
macro IS_NULL(arg) = (arg === null);
|
78
|
-
macro IS_NULL_OR_UNDEFINED(arg) = (arg == null);
|
79
|
-
macro IS_UNDEFINED(arg) = (typeof(arg) === 'undefined');
|
80
|
-
macro IS_NUMBER(arg) = (typeof(arg) === 'number');
|
81
|
-
macro IS_STRING(arg) = (typeof(arg) === 'string');
|
82
|
-
macro IS_BOOLEAN(arg) = (typeof(arg) === 'boolean');
|
83
|
-
macro IS_OBJECT(arg) = (%_IsObject(arg));
|
84
|
-
macro IS_ARRAY(arg) = (%_IsArray(arg));
|
85
|
-
macro IS_FUNCTION(arg) = (%_IsFunction(arg));
|
86
|
-
macro IS_REGEXP(arg) = (%_ClassOf(arg) === 'RegExp');
|
87
|
-
macro IS_DATE(arg) = (%_ClassOf(arg) === 'Date');
|
88
|
-
macro IS_NUMBER_WRAPPER(arg) = (%_ClassOf(arg) === 'Number');
|
89
|
-
macro IS_STRING_WRAPPER(arg) = (%_ClassOf(arg) === 'String');
|
90
|
-
macro IS_BOOLEAN_WRAPPER(arg) = (%_ClassOf(arg) === 'Boolean');
|
91
|
-
macro IS_ERROR(arg) = (%_ClassOf(arg) === 'Error');
|
92
|
-
macro IS_SCRIPT(arg) = (%_ClassOf(arg) === 'Script');
|
93
|
-
macro IS_ARGUMENTS(arg) = (%_ClassOf(arg) === 'Arguments');
|
94
|
-
macro IS_GLOBAL(arg) = (%_ClassOf(arg) === 'global');
|
95
|
-
macro FLOOR(arg) = $floor(arg);
|
96
|
-
|
97
|
-
# Inline macros. Use %IS_VAR to make sure arg is evaluated only once.
|
98
|
-
macro NUMBER_IS_NAN(arg) = (!%_IsSmi(%IS_VAR(arg)) && !(arg == arg));
|
99
|
-
macro TO_INTEGER(arg) = (%_IsSmi(%IS_VAR(arg)) ? arg : ToInteger(arg));
|
100
|
-
macro TO_INT32(arg) = (%_IsSmi(%IS_VAR(arg)) ? arg : (arg >> 0));
|
101
|
-
macro TO_UINT32(arg) = (arg >>> 0);
|
102
|
-
|
103
|
-
# Macros implemented in Python.
|
104
|
-
python macro CHAR_CODE(str) = ord(str[1]);
|
105
|
-
|
106
|
-
# Accessors for original global properties that ensure they have been loaded.
|
107
|
-
const ORIGINAL_REGEXP = (global.RegExp, $RegExp);
|
108
|
-
const ORIGINAL_DATE = (global.Date, $Date);
|
109
|
-
|
110
|
-
# Constants used on an array to implement the properties of the RegExp object.
|
111
|
-
const REGEXP_NUMBER_OF_CAPTURES = 0;
|
112
|
-
const REGEXP_FIRST_CAPTURE = 3;
|
113
|
-
|
114
|
-
# We can't put macros in macros so we use constants here.
|
115
|
-
# REGEXP_NUMBER_OF_CAPTURES
|
116
|
-
macro NUMBER_OF_CAPTURES(array) = ((array)[0]);
|
117
|
-
|
118
|
-
# Gets the value of a Date object. If arg is not a Date object
|
119
|
-
# a type error is thrown.
|
120
|
-
macro DATE_VALUE(arg) = (%_ClassOf(arg) === 'Date' ? %_ValueOf(arg) : ThrowDateTypeError());
|
121
|
-
macro DAY(time) = ($floor(time / 86400000));
|
122
|
-
macro MONTH_FROM_TIME(time) = (FromJulianDay(($floor(time / 86400000)) + 2440588).month);
|
123
|
-
macro DATE_FROM_TIME(time) = (FromJulianDay(($floor(time / 86400000)) + 2440588).date);
|
124
|
-
macro YEAR_FROM_TIME(time) = (FromJulianDay(($floor(time / 86400000)) + 2440588).year);
|
125
|
-
macro HOUR_FROM_TIME(time) = (Modulo($floor(time / 3600000), 24));
|
126
|
-
macro MIN_FROM_TIME(time) = (Modulo($floor(time / 60000), 60));
|
127
|
-
macro SEC_FROM_TIME(time) = (Modulo($floor(time / 1000), 60));
|
128
|
-
macro MS_FROM_TIME(time) = (Modulo(time, 1000));
|
129
|
-
|
130
|
-
# Last input and last subject of regexp matches.
|
131
|
-
macro LAST_SUBJECT(array) = ((array)[1]);
|
132
|
-
macro LAST_INPUT(array) = ((array)[2]);
|
133
|
-
|
134
|
-
# REGEXP_FIRST_CAPTURE
|
135
|
-
macro CAPTURE(index) = (3 + (index));
|
136
|
-
const CAPTURE0 = 3;
|
137
|
-
const CAPTURE1 = 4;
|
@@ -1,2007 +0,0 @@
|
|
1
|
-
// Copyright 2006-2008 the V8 project authors. All rights reserved.
|
2
|
-
// Redistribution and use in source and binary forms, with or without
|
3
|
-
// modification, are permitted provided that the following conditions are
|
4
|
-
// met:
|
5
|
-
//
|
6
|
-
// * Redistributions of source code must retain the above copyright
|
7
|
-
// notice, this list of conditions and the following disclaimer.
|
8
|
-
// * Redistributions in binary form must reproduce the above
|
9
|
-
// copyright notice, this list of conditions and the following
|
10
|
-
// disclaimer in the documentation and/or other materials provided
|
11
|
-
// with the distribution.
|
12
|
-
// * Neither the name of Google Inc. nor the names of its
|
13
|
-
// contributors may be used to endorse or promote products derived
|
14
|
-
// from this software without specific prior written permission.
|
15
|
-
//
|
16
|
-
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
17
|
-
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
18
|
-
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
19
|
-
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
20
|
-
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
21
|
-
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
22
|
-
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
23
|
-
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
24
|
-
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
25
|
-
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
26
|
-
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
27
|
-
|
28
|
-
#include "v8.h"
|
29
|
-
|
30
|
-
#include "execution.h"
|
31
|
-
#include "global-handles.h"
|
32
|
-
#include "ic-inl.h"
|
33
|
-
#include "mark-compact.h"
|
34
|
-
#include "stub-cache.h"
|
35
|
-
|
36
|
-
namespace v8 {
|
37
|
-
namespace internal {
|
38
|
-
|
39
|
-
// -------------------------------------------------------------------------
|
40
|
-
// MarkCompactCollector
|
41
|
-
|
42
|
-
bool MarkCompactCollector::force_compaction_ = false;
|
43
|
-
bool MarkCompactCollector::compacting_collection_ = false;
|
44
|
-
bool MarkCompactCollector::compact_on_next_gc_ = false;
|
45
|
-
|
46
|
-
int MarkCompactCollector::previous_marked_count_ = 0;
|
47
|
-
GCTracer* MarkCompactCollector::tracer_ = NULL;
|
48
|
-
|
49
|
-
|
50
|
-
#ifdef DEBUG
|
51
|
-
MarkCompactCollector::CollectorState MarkCompactCollector::state_ = IDLE;
|
52
|
-
|
53
|
-
// Counters used for debugging the marking phase of mark-compact or mark-sweep
|
54
|
-
// collection.
|
55
|
-
int MarkCompactCollector::live_bytes_ = 0;
|
56
|
-
int MarkCompactCollector::live_young_objects_ = 0;
|
57
|
-
int MarkCompactCollector::live_old_data_objects_ = 0;
|
58
|
-
int MarkCompactCollector::live_old_pointer_objects_ = 0;
|
59
|
-
int MarkCompactCollector::live_code_objects_ = 0;
|
60
|
-
int MarkCompactCollector::live_map_objects_ = 0;
|
61
|
-
int MarkCompactCollector::live_cell_objects_ = 0;
|
62
|
-
int MarkCompactCollector::live_lo_objects_ = 0;
|
63
|
-
#endif
|
64
|
-
|
65
|
-
void MarkCompactCollector::CollectGarbage() {
|
66
|
-
// Make sure that Prepare() has been called. The individual steps below will
|
67
|
-
// update the state as they proceed.
|
68
|
-
ASSERT(state_ == PREPARE_GC);
|
69
|
-
|
70
|
-
// Prepare has selected whether to compact the old generation or not.
|
71
|
-
// Tell the tracer.
|
72
|
-
if (IsCompacting()) tracer_->set_is_compacting();
|
73
|
-
|
74
|
-
MarkLiveObjects();
|
75
|
-
|
76
|
-
if (FLAG_collect_maps) ClearNonLiveTransitions();
|
77
|
-
|
78
|
-
SweepLargeObjectSpace();
|
79
|
-
|
80
|
-
if (IsCompacting()) {
|
81
|
-
EncodeForwardingAddresses();
|
82
|
-
|
83
|
-
UpdatePointers();
|
84
|
-
|
85
|
-
RelocateObjects();
|
86
|
-
|
87
|
-
RebuildRSets();
|
88
|
-
|
89
|
-
} else {
|
90
|
-
SweepSpaces();
|
91
|
-
}
|
92
|
-
|
93
|
-
Finish();
|
94
|
-
|
95
|
-
// Save the count of marked objects remaining after the collection and
|
96
|
-
// null out the GC tracer.
|
97
|
-
previous_marked_count_ = tracer_->marked_count();
|
98
|
-
ASSERT(previous_marked_count_ == 0);
|
99
|
-
tracer_ = NULL;
|
100
|
-
}
|
101
|
-
|
102
|
-
|
103
|
-
void MarkCompactCollector::Prepare(GCTracer* tracer) {
|
104
|
-
// Rather than passing the tracer around we stash it in a static member
|
105
|
-
// variable.
|
106
|
-
tracer_ = tracer;
|
107
|
-
|
108
|
-
#ifdef DEBUG
|
109
|
-
ASSERT(state_ == IDLE);
|
110
|
-
state_ = PREPARE_GC;
|
111
|
-
#endif
|
112
|
-
ASSERT(!FLAG_always_compact || !FLAG_never_compact);
|
113
|
-
|
114
|
-
compacting_collection_ =
|
115
|
-
FLAG_always_compact || force_compaction_ || compact_on_next_gc_;
|
116
|
-
compact_on_next_gc_ = false;
|
117
|
-
|
118
|
-
if (FLAG_never_compact) compacting_collection_ = false;
|
119
|
-
if (!Heap::map_space()->MapPointersEncodable())
|
120
|
-
compacting_collection_ = false;
|
121
|
-
if (FLAG_collect_maps) CreateBackPointers();
|
122
|
-
|
123
|
-
#ifdef DEBUG
|
124
|
-
if (compacting_collection_) {
|
125
|
-
// We will write bookkeeping information to the remembered set area
|
126
|
-
// starting now.
|
127
|
-
Page::set_rset_state(Page::NOT_IN_USE);
|
128
|
-
}
|
129
|
-
#endif
|
130
|
-
|
131
|
-
PagedSpaces spaces;
|
132
|
-
while (PagedSpace* space = spaces.next()) {
|
133
|
-
space->PrepareForMarkCompact(compacting_collection_);
|
134
|
-
}
|
135
|
-
|
136
|
-
#ifdef DEBUG
|
137
|
-
live_bytes_ = 0;
|
138
|
-
live_young_objects_ = 0;
|
139
|
-
live_old_pointer_objects_ = 0;
|
140
|
-
live_old_data_objects_ = 0;
|
141
|
-
live_code_objects_ = 0;
|
142
|
-
live_map_objects_ = 0;
|
143
|
-
live_cell_objects_ = 0;
|
144
|
-
live_lo_objects_ = 0;
|
145
|
-
#endif
|
146
|
-
}
|
147
|
-
|
148
|
-
|
149
|
-
void MarkCompactCollector::Finish() {
|
150
|
-
#ifdef DEBUG
|
151
|
-
ASSERT(state_ == SWEEP_SPACES || state_ == REBUILD_RSETS);
|
152
|
-
state_ = IDLE;
|
153
|
-
#endif
|
154
|
-
// The stub cache is not traversed during GC; clear the cache to
|
155
|
-
// force lazy re-initialization of it. This must be done after the
|
156
|
-
// GC, because it relies on the new address of certain old space
|
157
|
-
// objects (empty string, illegal builtin).
|
158
|
-
StubCache::Clear();
|
159
|
-
|
160
|
-
ExternalStringTable::CleanUp();
|
161
|
-
|
162
|
-
// If we've just compacted old space there's no reason to check the
|
163
|
-
// fragmentation limit. Just return.
|
164
|
-
if (HasCompacted()) return;
|
165
|
-
|
166
|
-
// We compact the old generation on the next GC if it has gotten too
|
167
|
-
// fragmented (ie, we could recover an expected amount of space by
|
168
|
-
// reclaiming the waste and free list blocks).
|
169
|
-
static const int kFragmentationLimit = 15; // Percent.
|
170
|
-
static const int kFragmentationAllowed = 1 * MB; // Absolute.
|
171
|
-
int old_gen_recoverable = 0;
|
172
|
-
int old_gen_used = 0;
|
173
|
-
|
174
|
-
OldSpaces spaces;
|
175
|
-
while (OldSpace* space = spaces.next()) {
|
176
|
-
old_gen_recoverable += space->Waste() + space->AvailableFree();
|
177
|
-
old_gen_used += space->Size();
|
178
|
-
}
|
179
|
-
|
180
|
-
int old_gen_fragmentation =
|
181
|
-
static_cast<int>((old_gen_recoverable * 100.0) / old_gen_used);
|
182
|
-
if (old_gen_fragmentation > kFragmentationLimit &&
|
183
|
-
old_gen_recoverable > kFragmentationAllowed) {
|
184
|
-
compact_on_next_gc_ = true;
|
185
|
-
}
|
186
|
-
}
|
187
|
-
|
188
|
-
|
189
|
-
// -------------------------------------------------------------------------
|
190
|
-
// Phase 1: tracing and marking live objects.
|
191
|
-
// before: all objects are in normal state.
|
192
|
-
// after: a live object's map pointer is marked as '00'.
|
193
|
-
|
194
|
-
// Marking all live objects in the heap as part of mark-sweep or mark-compact
|
195
|
-
// collection. Before marking, all objects are in their normal state. After
|
196
|
-
// marking, live objects' map pointers are marked indicating that the object
|
197
|
-
// has been found reachable.
|
198
|
-
//
|
199
|
-
// The marking algorithm is a (mostly) depth-first (because of possible stack
|
200
|
-
// overflow) traversal of the graph of objects reachable from the roots. It
|
201
|
-
// uses an explicit stack of pointers rather than recursion. The young
|
202
|
-
// generation's inactive ('from') space is used as a marking stack. The
|
203
|
-
// objects in the marking stack are the ones that have been reached and marked
|
204
|
-
// but their children have not yet been visited.
|
205
|
-
//
|
206
|
-
// The marking stack can overflow during traversal. In that case, we set an
|
207
|
-
// overflow flag. When the overflow flag is set, we continue marking objects
|
208
|
-
// reachable from the objects on the marking stack, but no longer push them on
|
209
|
-
// the marking stack. Instead, we mark them as both marked and overflowed.
|
210
|
-
// When the stack is in the overflowed state, objects marked as overflowed
|
211
|
-
// have been reached and marked but their children have not been visited yet.
|
212
|
-
// After emptying the marking stack, we clear the overflow flag and traverse
|
213
|
-
// the heap looking for objects marked as overflowed, push them on the stack,
|
214
|
-
// and continue with marking. This process repeats until all reachable
|
215
|
-
// objects have been marked.
|
216
|
-
|
217
|
-
static MarkingStack marking_stack;
|
218
|
-
|
219
|
-
|
220
|
-
static inline HeapObject* ShortCircuitConsString(Object** p) {
|
221
|
-
// Optimization: If the heap object pointed to by p is a non-symbol
|
222
|
-
// cons string whose right substring is Heap::empty_string, update
|
223
|
-
// it in place to its left substring. Return the updated value.
|
224
|
-
//
|
225
|
-
// Here we assume that if we change *p, we replace it with a heap object
|
226
|
-
// (ie, the left substring of a cons string is always a heap object).
|
227
|
-
//
|
228
|
-
// The check performed is:
|
229
|
-
// object->IsConsString() && !object->IsSymbol() &&
|
230
|
-
// (ConsString::cast(object)->second() == Heap::empty_string())
|
231
|
-
// except the maps for the object and its possible substrings might be
|
232
|
-
// marked.
|
233
|
-
HeapObject* object = HeapObject::cast(*p);
|
234
|
-
MapWord map_word = object->map_word();
|
235
|
-
map_word.ClearMark();
|
236
|
-
InstanceType type = map_word.ToMap()->instance_type();
|
237
|
-
if ((type & kShortcutTypeMask) != kShortcutTypeTag) return object;
|
238
|
-
|
239
|
-
Object* second = reinterpret_cast<ConsString*>(object)->unchecked_second();
|
240
|
-
if (second != Heap::raw_unchecked_empty_string()) {
|
241
|
-
return object;
|
242
|
-
}
|
243
|
-
|
244
|
-
// Since we don't have the object's start, it is impossible to update the
|
245
|
-
// remembered set. Therefore, we only replace the string with its left
|
246
|
-
// substring when the remembered set does not change.
|
247
|
-
Object* first = reinterpret_cast<ConsString*>(object)->unchecked_first();
|
248
|
-
if (!Heap::InNewSpace(object) && Heap::InNewSpace(first)) return object;
|
249
|
-
|
250
|
-
*p = first;
|
251
|
-
return HeapObject::cast(first);
|
252
|
-
}
|
253
|
-
|
254
|
-
|
255
|
-
// Helper class for marking pointers in HeapObjects.
|
256
|
-
class MarkingVisitor : public ObjectVisitor {
|
257
|
-
public:
|
258
|
-
void VisitPointer(Object** p) {
|
259
|
-
MarkObjectByPointer(p);
|
260
|
-
}
|
261
|
-
|
262
|
-
void VisitPointers(Object** start, Object** end) {
|
263
|
-
// Mark all objects pointed to in [start, end).
|
264
|
-
const int kMinRangeForMarkingRecursion = 64;
|
265
|
-
if (end - start >= kMinRangeForMarkingRecursion) {
|
266
|
-
if (VisitUnmarkedObjects(start, end)) return;
|
267
|
-
// We are close to a stack overflow, so just mark the objects.
|
268
|
-
}
|
269
|
-
for (Object** p = start; p < end; p++) MarkObjectByPointer(p);
|
270
|
-
}
|
271
|
-
|
272
|
-
void VisitCodeTarget(RelocInfo* rinfo) {
|
273
|
-
ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
|
274
|
-
Code* code = Code::GetCodeFromTargetAddress(rinfo->target_address());
|
275
|
-
if (FLAG_cleanup_ics_at_gc && code->is_inline_cache_stub()) {
|
276
|
-
IC::Clear(rinfo->pc());
|
277
|
-
// Please note targets for cleared inline cached do not have to be
|
278
|
-
// marked since they are contained in Heap::non_monomorphic_cache().
|
279
|
-
} else {
|
280
|
-
MarkCompactCollector::MarkObject(code);
|
281
|
-
}
|
282
|
-
}
|
283
|
-
|
284
|
-
void VisitDebugTarget(RelocInfo* rinfo) {
|
285
|
-
ASSERT(RelocInfo::IsJSReturn(rinfo->rmode()) &&
|
286
|
-
rinfo->IsPatchedReturnSequence());
|
287
|
-
HeapObject* code = Code::GetCodeFromTargetAddress(rinfo->call_address());
|
288
|
-
MarkCompactCollector::MarkObject(code);
|
289
|
-
}
|
290
|
-
|
291
|
-
private:
|
292
|
-
// Mark object pointed to by p.
|
293
|
-
void MarkObjectByPointer(Object** p) {
|
294
|
-
if (!(*p)->IsHeapObject()) return;
|
295
|
-
HeapObject* object = ShortCircuitConsString(p);
|
296
|
-
MarkCompactCollector::MarkObject(object);
|
297
|
-
}
|
298
|
-
|
299
|
-
// Tells whether the mark sweep collection will perform compaction.
|
300
|
-
bool IsCompacting() { return MarkCompactCollector::IsCompacting(); }
|
301
|
-
|
302
|
-
// Visit an unmarked object.
|
303
|
-
void VisitUnmarkedObject(HeapObject* obj) {
|
304
|
-
#ifdef DEBUG
|
305
|
-
ASSERT(Heap::Contains(obj));
|
306
|
-
ASSERT(!obj->IsMarked());
|
307
|
-
#endif
|
308
|
-
Map* map = obj->map();
|
309
|
-
MarkCompactCollector::SetMark(obj);
|
310
|
-
// Mark the map pointer and the body.
|
311
|
-
MarkCompactCollector::MarkObject(map);
|
312
|
-
obj->IterateBody(map->instance_type(), obj->SizeFromMap(map), this);
|
313
|
-
}
|
314
|
-
|
315
|
-
// Visit all unmarked objects pointed to by [start, end).
|
316
|
-
// Returns false if the operation fails (lack of stack space).
|
317
|
-
inline bool VisitUnmarkedObjects(Object** start, Object** end) {
|
318
|
-
// Return false is we are close to the stack limit.
|
319
|
-
StackLimitCheck check;
|
320
|
-
if (check.HasOverflowed()) return false;
|
321
|
-
|
322
|
-
// Visit the unmarked objects.
|
323
|
-
for (Object** p = start; p < end; p++) {
|
324
|
-
if (!(*p)->IsHeapObject()) continue;
|
325
|
-
HeapObject* obj = HeapObject::cast(*p);
|
326
|
-
if (obj->IsMarked()) continue;
|
327
|
-
VisitUnmarkedObject(obj);
|
328
|
-
}
|
329
|
-
return true;
|
330
|
-
}
|
331
|
-
};
|
332
|
-
|
333
|
-
|
334
|
-
// Visitor class for marking heap roots.
|
335
|
-
class RootMarkingVisitor : public ObjectVisitor {
|
336
|
-
public:
|
337
|
-
void VisitPointer(Object** p) {
|
338
|
-
MarkObjectByPointer(p);
|
339
|
-
}
|
340
|
-
|
341
|
-
void VisitPointers(Object** start, Object** end) {
|
342
|
-
for (Object** p = start; p < end; p++) MarkObjectByPointer(p);
|
343
|
-
}
|
344
|
-
|
345
|
-
MarkingVisitor* stack_visitor() { return &stack_visitor_; }
|
346
|
-
|
347
|
-
private:
|
348
|
-
MarkingVisitor stack_visitor_;
|
349
|
-
|
350
|
-
void MarkObjectByPointer(Object** p) {
|
351
|
-
if (!(*p)->IsHeapObject()) return;
|
352
|
-
|
353
|
-
// Replace flat cons strings in place.
|
354
|
-
HeapObject* object = ShortCircuitConsString(p);
|
355
|
-
if (object->IsMarked()) return;
|
356
|
-
|
357
|
-
Map* map = object->map();
|
358
|
-
// Mark the object.
|
359
|
-
MarkCompactCollector::SetMark(object);
|
360
|
-
// Mark the map pointer and body, and push them on the marking stack.
|
361
|
-
MarkCompactCollector::MarkObject(map);
|
362
|
-
object->IterateBody(map->instance_type(), object->SizeFromMap(map),
|
363
|
-
&stack_visitor_);
|
364
|
-
|
365
|
-
// Mark all the objects reachable from the map and body. May leave
|
366
|
-
// overflowed objects in the heap.
|
367
|
-
MarkCompactCollector::EmptyMarkingStack(&stack_visitor_);
|
368
|
-
}
|
369
|
-
};
|
370
|
-
|
371
|
-
|
372
|
-
// Helper class for pruning the symbol table.
|
373
|
-
class SymbolTableCleaner : public ObjectVisitor {
|
374
|
-
public:
|
375
|
-
SymbolTableCleaner() : pointers_removed_(0) { }
|
376
|
-
|
377
|
-
virtual void VisitPointers(Object** start, Object** end) {
|
378
|
-
// Visit all HeapObject pointers in [start, end).
|
379
|
-
for (Object** p = start; p < end; p++) {
|
380
|
-
if ((*p)->IsHeapObject() && !HeapObject::cast(*p)->IsMarked()) {
|
381
|
-
// Check if the symbol being pruned is an external symbol. We need to
|
382
|
-
// delete the associated external data as this symbol is going away.
|
383
|
-
|
384
|
-
// Since no objects have yet been moved we can safely access the map of
|
385
|
-
// the object.
|
386
|
-
if ((*p)->IsExternalString()) {
|
387
|
-
Heap::FinalizeExternalString(String::cast(*p));
|
388
|
-
}
|
389
|
-
// Set the entry to null_value (as deleted).
|
390
|
-
*p = Heap::raw_unchecked_null_value();
|
391
|
-
pointers_removed_++;
|
392
|
-
}
|
393
|
-
}
|
394
|
-
}
|
395
|
-
|
396
|
-
int PointersRemoved() {
|
397
|
-
return pointers_removed_;
|
398
|
-
}
|
399
|
-
private:
|
400
|
-
int pointers_removed_;
|
401
|
-
};
|
402
|
-
|
403
|
-
|
404
|
-
void MarkCompactCollector::MarkUnmarkedObject(HeapObject* object) {
|
405
|
-
ASSERT(!object->IsMarked());
|
406
|
-
ASSERT(Heap::Contains(object));
|
407
|
-
if (object->IsMap()) {
|
408
|
-
Map* map = Map::cast(object);
|
409
|
-
if (FLAG_cleanup_caches_in_maps_at_gc) {
|
410
|
-
map->ClearCodeCache();
|
411
|
-
}
|
412
|
-
SetMark(map);
|
413
|
-
if (FLAG_collect_maps &&
|
414
|
-
map->instance_type() >= FIRST_JS_OBJECT_TYPE &&
|
415
|
-
map->instance_type() <= JS_FUNCTION_TYPE) {
|
416
|
-
MarkMapContents(map);
|
417
|
-
} else {
|
418
|
-
marking_stack.Push(map);
|
419
|
-
}
|
420
|
-
} else {
|
421
|
-
SetMark(object);
|
422
|
-
marking_stack.Push(object);
|
423
|
-
}
|
424
|
-
}
|
425
|
-
|
426
|
-
|
427
|
-
void MarkCompactCollector::MarkMapContents(Map* map) {
|
428
|
-
MarkDescriptorArray(reinterpret_cast<DescriptorArray*>(
|
429
|
-
*HeapObject::RawField(map, Map::kInstanceDescriptorsOffset)));
|
430
|
-
|
431
|
-
// Mark the Object* fields of the Map.
|
432
|
-
// Since the descriptor array has been marked already, it is fine
|
433
|
-
// that one of these fields contains a pointer to it.
|
434
|
-
MarkingVisitor visitor; // Has no state or contents.
|
435
|
-
visitor.VisitPointers(HeapObject::RawField(map, Map::kPrototypeOffset),
|
436
|
-
HeapObject::RawField(map, Map::kSize));
|
437
|
-
}
|
438
|
-
|
439
|
-
|
440
|
-
void MarkCompactCollector::MarkDescriptorArray(
|
441
|
-
DescriptorArray* descriptors) {
|
442
|
-
if (descriptors->IsMarked()) return;
|
443
|
-
// Empty descriptor array is marked as a root before any maps are marked.
|
444
|
-
ASSERT(descriptors != Heap::raw_unchecked_empty_descriptor_array());
|
445
|
-
SetMark(descriptors);
|
446
|
-
|
447
|
-
FixedArray* contents = reinterpret_cast<FixedArray*>(
|
448
|
-
descriptors->get(DescriptorArray::kContentArrayIndex));
|
449
|
-
ASSERT(contents->IsHeapObject());
|
450
|
-
ASSERT(!contents->IsMarked());
|
451
|
-
ASSERT(contents->IsFixedArray());
|
452
|
-
ASSERT(contents->length() >= 2);
|
453
|
-
SetMark(contents);
|
454
|
-
// Contents contains (value, details) pairs. If the details say
|
455
|
-
// that the type of descriptor is MAP_TRANSITION, CONSTANT_TRANSITION,
|
456
|
-
// or NULL_DESCRIPTOR, we don't mark the value as live. Only for
|
457
|
-
// type MAP_TRANSITION is the value a Object* (a Map*).
|
458
|
-
for (int i = 0; i < contents->length(); i += 2) {
|
459
|
-
// If the pair (value, details) at index i, i+1 is not
|
460
|
-
// a transition or null descriptor, mark the value.
|
461
|
-
PropertyDetails details(Smi::cast(contents->get(i + 1)));
|
462
|
-
if (details.type() < FIRST_PHANTOM_PROPERTY_TYPE) {
|
463
|
-
HeapObject* object = reinterpret_cast<HeapObject*>(contents->get(i));
|
464
|
-
if (object->IsHeapObject() && !object->IsMarked()) {
|
465
|
-
SetMark(object);
|
466
|
-
marking_stack.Push(object);
|
467
|
-
}
|
468
|
-
}
|
469
|
-
}
|
470
|
-
// The DescriptorArray descriptors contains a pointer to its contents array,
|
471
|
-
// but the contents array is already marked.
|
472
|
-
marking_stack.Push(descriptors);
|
473
|
-
}
|
474
|
-
|
475
|
-
|
476
|
-
void MarkCompactCollector::CreateBackPointers() {
|
477
|
-
HeapObjectIterator iterator(Heap::map_space());
|
478
|
-
while (iterator.has_next()) {
|
479
|
-
Object* next_object = iterator.next();
|
480
|
-
if (next_object->IsMap()) { // Could also be ByteArray on free list.
|
481
|
-
Map* map = Map::cast(next_object);
|
482
|
-
if (map->instance_type() >= FIRST_JS_OBJECT_TYPE &&
|
483
|
-
map->instance_type() <= JS_FUNCTION_TYPE) {
|
484
|
-
map->CreateBackPointers();
|
485
|
-
} else {
|
486
|
-
ASSERT(map->instance_descriptors() == Heap::empty_descriptor_array());
|
487
|
-
}
|
488
|
-
}
|
489
|
-
}
|
490
|
-
}
|
491
|
-
|
492
|
-
|
493
|
-
static int OverflowObjectSize(HeapObject* obj) {
|
494
|
-
// Recover the normal map pointer, it might be marked as live and
|
495
|
-
// overflowed.
|
496
|
-
MapWord map_word = obj->map_word();
|
497
|
-
map_word.ClearMark();
|
498
|
-
map_word.ClearOverflow();
|
499
|
-
return obj->SizeFromMap(map_word.ToMap());
|
500
|
-
}
|
501
|
-
|
502
|
-
|
503
|
-
// Fill the marking stack with overflowed objects returned by the given
|
504
|
-
// iterator. Stop when the marking stack is filled or the end of the space
|
505
|
-
// is reached, whichever comes first.
|
506
|
-
template<class T>
|
507
|
-
static void ScanOverflowedObjects(T* it) {
|
508
|
-
// The caller should ensure that the marking stack is initially not full,
|
509
|
-
// so that we don't waste effort pointlessly scanning for objects.
|
510
|
-
ASSERT(!marking_stack.is_full());
|
511
|
-
|
512
|
-
while (it->has_next()) {
|
513
|
-
HeapObject* object = it->next();
|
514
|
-
if (object->IsOverflowed()) {
|
515
|
-
object->ClearOverflow();
|
516
|
-
ASSERT(object->IsMarked());
|
517
|
-
ASSERT(Heap::Contains(object));
|
518
|
-
marking_stack.Push(object);
|
519
|
-
if (marking_stack.is_full()) return;
|
520
|
-
}
|
521
|
-
}
|
522
|
-
}
|
523
|
-
|
524
|
-
|
525
|
-
bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) {
|
526
|
-
return (*p)->IsHeapObject() && !HeapObject::cast(*p)->IsMarked();
|
527
|
-
}
|
528
|
-
|
529
|
-
|
530
|
-
void MarkCompactCollector::MarkSymbolTable() {
|
531
|
-
SymbolTable* symbol_table = Heap::raw_unchecked_symbol_table();
|
532
|
-
// Mark the symbol table itself.
|
533
|
-
SetMark(symbol_table);
|
534
|
-
// Explicitly mark the prefix.
|
535
|
-
MarkingVisitor marker;
|
536
|
-
symbol_table->IteratePrefix(&marker);
|
537
|
-
ProcessMarkingStack(&marker);
|
538
|
-
}
|
539
|
-
|
540
|
-
|
541
|
-
void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
|
542
|
-
// Mark the heap roots including global variables, stack variables,
|
543
|
-
// etc., and all objects reachable from them.
|
544
|
-
Heap::IterateStrongRoots(visitor, VISIT_ONLY_STRONG);
|
545
|
-
|
546
|
-
// Handle the symbol table specially.
|
547
|
-
MarkSymbolTable();
|
548
|
-
|
549
|
-
// There may be overflowed objects in the heap. Visit them now.
|
550
|
-
while (marking_stack.overflowed()) {
|
551
|
-
RefillMarkingStack();
|
552
|
-
EmptyMarkingStack(visitor->stack_visitor());
|
553
|
-
}
|
554
|
-
}
|
555
|
-
|
556
|
-
|
557
|
-
void MarkCompactCollector::MarkObjectGroups() {
|
558
|
-
List<ObjectGroup*>* object_groups = GlobalHandles::ObjectGroups();
|
559
|
-
|
560
|
-
for (int i = 0; i < object_groups->length(); i++) {
|
561
|
-
ObjectGroup* entry = object_groups->at(i);
|
562
|
-
if (entry == NULL) continue;
|
563
|
-
|
564
|
-
List<Object**>& objects = entry->objects_;
|
565
|
-
bool group_marked = false;
|
566
|
-
for (int j = 0; j < objects.length(); j++) {
|
567
|
-
Object* object = *objects[j];
|
568
|
-
if (object->IsHeapObject() && HeapObject::cast(object)->IsMarked()) {
|
569
|
-
group_marked = true;
|
570
|
-
break;
|
571
|
-
}
|
572
|
-
}
|
573
|
-
|
574
|
-
if (!group_marked) continue;
|
575
|
-
|
576
|
-
// An object in the group is marked, so mark as gray all white heap
|
577
|
-
// objects in the group.
|
578
|
-
for (int j = 0; j < objects.length(); ++j) {
|
579
|
-
if ((*objects[j])->IsHeapObject()) {
|
580
|
-
MarkObject(HeapObject::cast(*objects[j]));
|
581
|
-
}
|
582
|
-
}
|
583
|
-
// Once the entire group has been colored gray, set the object group
|
584
|
-
// to NULL so it won't be processed again.
|
585
|
-
delete object_groups->at(i);
|
586
|
-
object_groups->at(i) = NULL;
|
587
|
-
}
|
588
|
-
}
|
589
|
-
|
590
|
-
|
591
|
-
// Mark all objects reachable from the objects on the marking stack.
|
592
|
-
// Before: the marking stack contains zero or more heap object pointers.
|
593
|
-
// After: the marking stack is empty, and all objects reachable from the
|
594
|
-
// marking stack have been marked, or are overflowed in the heap.
|
595
|
-
void MarkCompactCollector::EmptyMarkingStack(MarkingVisitor* visitor) {
|
596
|
-
while (!marking_stack.is_empty()) {
|
597
|
-
HeapObject* object = marking_stack.Pop();
|
598
|
-
ASSERT(object->IsHeapObject());
|
599
|
-
ASSERT(Heap::Contains(object));
|
600
|
-
ASSERT(object->IsMarked());
|
601
|
-
ASSERT(!object->IsOverflowed());
|
602
|
-
|
603
|
-
// Because the object is marked, we have to recover the original map
|
604
|
-
// pointer and use it to mark the object's body.
|
605
|
-
MapWord map_word = object->map_word();
|
606
|
-
map_word.ClearMark();
|
607
|
-
Map* map = map_word.ToMap();
|
608
|
-
MarkObject(map);
|
609
|
-
object->IterateBody(map->instance_type(), object->SizeFromMap(map),
|
610
|
-
visitor);
|
611
|
-
}
|
612
|
-
}
|
613
|
-
|
614
|
-
|
615
|
-
// Sweep the heap for overflowed objects, clear their overflow bits, and
|
616
|
-
// push them on the marking stack. Stop early if the marking stack fills
|
617
|
-
// before sweeping completes. If sweeping completes, there are no remaining
|
618
|
-
// overflowed objects in the heap so the overflow flag on the markings stack
|
619
|
-
// is cleared.
|
620
|
-
void MarkCompactCollector::RefillMarkingStack() {
|
621
|
-
ASSERT(marking_stack.overflowed());
|
622
|
-
|
623
|
-
SemiSpaceIterator new_it(Heap::new_space(), &OverflowObjectSize);
|
624
|
-
ScanOverflowedObjects(&new_it);
|
625
|
-
if (marking_stack.is_full()) return;
|
626
|
-
|
627
|
-
HeapObjectIterator old_pointer_it(Heap::old_pointer_space(),
|
628
|
-
&OverflowObjectSize);
|
629
|
-
ScanOverflowedObjects(&old_pointer_it);
|
630
|
-
if (marking_stack.is_full()) return;
|
631
|
-
|
632
|
-
HeapObjectIterator old_data_it(Heap::old_data_space(), &OverflowObjectSize);
|
633
|
-
ScanOverflowedObjects(&old_data_it);
|
634
|
-
if (marking_stack.is_full()) return;
|
635
|
-
|
636
|
-
HeapObjectIterator code_it(Heap::code_space(), &OverflowObjectSize);
|
637
|
-
ScanOverflowedObjects(&code_it);
|
638
|
-
if (marking_stack.is_full()) return;
|
639
|
-
|
640
|
-
HeapObjectIterator map_it(Heap::map_space(), &OverflowObjectSize);
|
641
|
-
ScanOverflowedObjects(&map_it);
|
642
|
-
if (marking_stack.is_full()) return;
|
643
|
-
|
644
|
-
HeapObjectIterator cell_it(Heap::cell_space(), &OverflowObjectSize);
|
645
|
-
ScanOverflowedObjects(&cell_it);
|
646
|
-
if (marking_stack.is_full()) return;
|
647
|
-
|
648
|
-
LargeObjectIterator lo_it(Heap::lo_space(), &OverflowObjectSize);
|
649
|
-
ScanOverflowedObjects(&lo_it);
|
650
|
-
if (marking_stack.is_full()) return;
|
651
|
-
|
652
|
-
marking_stack.clear_overflowed();
|
653
|
-
}
|
654
|
-
|
655
|
-
|
656
|
-
// Mark all objects reachable (transitively) from objects on the marking
|
657
|
-
// stack. Before: the marking stack contains zero or more heap object
|
658
|
-
// pointers. After: the marking stack is empty and there are no overflowed
|
659
|
-
// objects in the heap.
|
660
|
-
void MarkCompactCollector::ProcessMarkingStack(MarkingVisitor* visitor) {
|
661
|
-
EmptyMarkingStack(visitor);
|
662
|
-
while (marking_stack.overflowed()) {
|
663
|
-
RefillMarkingStack();
|
664
|
-
EmptyMarkingStack(visitor);
|
665
|
-
}
|
666
|
-
}
|
667
|
-
|
668
|
-
|
669
|
-
void MarkCompactCollector::ProcessObjectGroups(MarkingVisitor* visitor) {
|
670
|
-
bool work_to_do = true;
|
671
|
-
ASSERT(marking_stack.is_empty());
|
672
|
-
while (work_to_do) {
|
673
|
-
MarkObjectGroups();
|
674
|
-
work_to_do = !marking_stack.is_empty();
|
675
|
-
ProcessMarkingStack(visitor);
|
676
|
-
}
|
677
|
-
}
|
678
|
-
|
679
|
-
|
680
|
-
void MarkCompactCollector::MarkLiveObjects() {
|
681
|
-
#ifdef DEBUG
|
682
|
-
ASSERT(state_ == PREPARE_GC);
|
683
|
-
state_ = MARK_LIVE_OBJECTS;
|
684
|
-
#endif
|
685
|
-
// The to space contains live objects, the from space is used as a marking
|
686
|
-
// stack.
|
687
|
-
marking_stack.Initialize(Heap::new_space()->FromSpaceLow(),
|
688
|
-
Heap::new_space()->FromSpaceHigh());
|
689
|
-
|
690
|
-
ASSERT(!marking_stack.overflowed());
|
691
|
-
|
692
|
-
RootMarkingVisitor root_visitor;
|
693
|
-
MarkRoots(&root_visitor);
|
694
|
-
|
695
|
-
// The objects reachable from the roots are marked, yet unreachable
|
696
|
-
// objects are unmarked. Mark objects reachable from object groups
|
697
|
-
// containing at least one marked object, and continue until no new
|
698
|
-
// objects are reachable from the object groups.
|
699
|
-
ProcessObjectGroups(root_visitor.stack_visitor());
|
700
|
-
|
701
|
-
// The objects reachable from the roots or object groups are marked,
|
702
|
-
// yet unreachable objects are unmarked. Mark objects reachable
|
703
|
-
// only from weak global handles.
|
704
|
-
//
|
705
|
-
// First we identify nonlive weak handles and mark them as pending
|
706
|
-
// destruction.
|
707
|
-
GlobalHandles::IdentifyWeakHandles(&IsUnmarkedHeapObject);
|
708
|
-
// Then we mark the objects and process the transitive closure.
|
709
|
-
GlobalHandles::IterateWeakRoots(&root_visitor);
|
710
|
-
while (marking_stack.overflowed()) {
|
711
|
-
RefillMarkingStack();
|
712
|
-
EmptyMarkingStack(root_visitor.stack_visitor());
|
713
|
-
}
|
714
|
-
|
715
|
-
// Repeat the object groups to mark unmarked groups reachable from the
|
716
|
-
// weak roots.
|
717
|
-
ProcessObjectGroups(root_visitor.stack_visitor());
|
718
|
-
|
719
|
-
// Prune the symbol table removing all symbols only pointed to by the
|
720
|
-
// symbol table. Cannot use symbol_table() here because the symbol
|
721
|
-
// table is marked.
|
722
|
-
SymbolTable* symbol_table = Heap::raw_unchecked_symbol_table();
|
723
|
-
SymbolTableCleaner v;
|
724
|
-
symbol_table->IterateElements(&v);
|
725
|
-
symbol_table->ElementsRemoved(v.PointersRemoved());
|
726
|
-
ExternalStringTable::Iterate(&v);
|
727
|
-
ExternalStringTable::CleanUp();
|
728
|
-
|
729
|
-
// Remove object groups after marking phase.
|
730
|
-
GlobalHandles::RemoveObjectGroups();
|
731
|
-
}
|
732
|
-
|
733
|
-
|
734
|
-
static int CountMarkedCallback(HeapObject* obj) {
|
735
|
-
MapWord map_word = obj->map_word();
|
736
|
-
map_word.ClearMark();
|
737
|
-
return obj->SizeFromMap(map_word.ToMap());
|
738
|
-
}
|
739
|
-
|
740
|
-
|
741
|
-
#ifdef DEBUG
|
742
|
-
void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) {
|
743
|
-
live_bytes_ += obj->Size();
|
744
|
-
if (Heap::new_space()->Contains(obj)) {
|
745
|
-
live_young_objects_++;
|
746
|
-
} else if (Heap::map_space()->Contains(obj)) {
|
747
|
-
ASSERT(obj->IsMap());
|
748
|
-
live_map_objects_++;
|
749
|
-
} else if (Heap::cell_space()->Contains(obj)) {
|
750
|
-
ASSERT(obj->IsJSGlobalPropertyCell());
|
751
|
-
live_cell_objects_++;
|
752
|
-
} else if (Heap::old_pointer_space()->Contains(obj)) {
|
753
|
-
live_old_pointer_objects_++;
|
754
|
-
} else if (Heap::old_data_space()->Contains(obj)) {
|
755
|
-
live_old_data_objects_++;
|
756
|
-
} else if (Heap::code_space()->Contains(obj)) {
|
757
|
-
live_code_objects_++;
|
758
|
-
} else if (Heap::lo_space()->Contains(obj)) {
|
759
|
-
live_lo_objects_++;
|
760
|
-
} else {
|
761
|
-
UNREACHABLE();
|
762
|
-
}
|
763
|
-
}
|
764
|
-
#endif // DEBUG
|
765
|
-
|
766
|
-
|
767
|
-
void MarkCompactCollector::SweepLargeObjectSpace() {
|
768
|
-
#ifdef DEBUG
|
769
|
-
ASSERT(state_ == MARK_LIVE_OBJECTS);
|
770
|
-
state_ =
|
771
|
-
compacting_collection_ ? ENCODE_FORWARDING_ADDRESSES : SWEEP_SPACES;
|
772
|
-
#endif
|
773
|
-
// Deallocate unmarked objects and clear marked bits for marked objects.
|
774
|
-
Heap::lo_space()->FreeUnmarkedObjects();
|
775
|
-
}
|
776
|
-
|
777
|
-
// Safe to use during marking phase only.
|
778
|
-
bool MarkCompactCollector::SafeIsMap(HeapObject* object) {
|
779
|
-
MapWord metamap = object->map_word();
|
780
|
-
metamap.ClearMark();
|
781
|
-
return metamap.ToMap()->instance_type() == MAP_TYPE;
|
782
|
-
}
|
783
|
-
|
784
|
-
void MarkCompactCollector::ClearNonLiveTransitions() {
|
785
|
-
HeapObjectIterator map_iterator(Heap::map_space(), &CountMarkedCallback);
|
786
|
-
// Iterate over the map space, setting map transitions that go from
|
787
|
-
// a marked map to an unmarked map to null transitions. At the same time,
|
788
|
-
// set all the prototype fields of maps back to their original value,
|
789
|
-
// dropping the back pointers temporarily stored in the prototype field.
|
790
|
-
// Setting the prototype field requires following the linked list of
|
791
|
-
// back pointers, reversing them all at once. This allows us to find
|
792
|
-
// those maps with map transitions that need to be nulled, and only
|
793
|
-
// scan the descriptor arrays of those maps, not all maps.
|
794
|
-
// All of these actions are carried out only on maps of JSObjects
|
795
|
-
// and related subtypes.
|
796
|
-
while (map_iterator.has_next()) {
|
797
|
-
Map* map = reinterpret_cast<Map*>(map_iterator.next());
|
798
|
-
if (!map->IsMarked() && map->IsByteArray()) continue;
|
799
|
-
|
800
|
-
ASSERT(SafeIsMap(map));
|
801
|
-
// Only JSObject and subtypes have map transitions and back pointers.
|
802
|
-
if (map->instance_type() < FIRST_JS_OBJECT_TYPE) continue;
|
803
|
-
if (map->instance_type() > JS_FUNCTION_TYPE) continue;
|
804
|
-
// Follow the chain of back pointers to find the prototype.
|
805
|
-
Map* current = map;
|
806
|
-
while (SafeIsMap(current)) {
|
807
|
-
current = reinterpret_cast<Map*>(current->prototype());
|
808
|
-
ASSERT(current->IsHeapObject());
|
809
|
-
}
|
810
|
-
Object* real_prototype = current;
|
811
|
-
|
812
|
-
// Follow back pointers, setting them to prototype,
|
813
|
-
// clearing map transitions when necessary.
|
814
|
-
current = map;
|
815
|
-
bool on_dead_path = !current->IsMarked();
|
816
|
-
Object* next;
|
817
|
-
while (SafeIsMap(current)) {
|
818
|
-
next = current->prototype();
|
819
|
-
// There should never be a dead map above a live map.
|
820
|
-
ASSERT(on_dead_path || current->IsMarked());
|
821
|
-
|
822
|
-
// A live map above a dead map indicates a dead transition.
|
823
|
-
// This test will always be false on the first iteration.
|
824
|
-
if (on_dead_path && current->IsMarked()) {
|
825
|
-
on_dead_path = false;
|
826
|
-
current->ClearNonLiveTransitions(real_prototype);
|
827
|
-
}
|
828
|
-
*HeapObject::RawField(current, Map::kPrototypeOffset) =
|
829
|
-
real_prototype;
|
830
|
-
current = reinterpret_cast<Map*>(next);
|
831
|
-
}
|
832
|
-
}
|
833
|
-
}
|
834
|
-
|
835
|
-
// -------------------------------------------------------------------------
|
836
|
-
// Phase 2: Encode forwarding addresses.
|
837
|
-
// When compacting, forwarding addresses for objects in old space and map
|
838
|
-
// space are encoded in their map pointer word (along with an encoding of
|
839
|
-
// their map pointers).
|
840
|
-
//
|
841
|
-
// The excact encoding is described in the comments for class MapWord in
|
842
|
-
// objects.h.
|
843
|
-
//
|
844
|
-
// An address range [start, end) can have both live and non-live objects.
|
845
|
-
// Maximal non-live regions are marked so they can be skipped on subsequent
|
846
|
-
// sweeps of the heap. A distinguished map-pointer encoding is used to mark
|
847
|
-
// free regions of one-word size (in which case the next word is the start
|
848
|
-
// of a live object). A second distinguished map-pointer encoding is used
|
849
|
-
// to mark free regions larger than one word, and the size of the free
|
850
|
-
// region (including the first word) is written to the second word of the
|
851
|
-
// region.
|
852
|
-
//
|
853
|
-
// Any valid map page offset must lie in the object area of the page, so map
|
854
|
-
// page offsets less than Page::kObjectStartOffset are invalid. We use a
|
855
|
-
// pair of distinguished invalid map encodings (for single word and multiple
|
856
|
-
// words) to indicate free regions in the page found during computation of
|
857
|
-
// forwarding addresses and skipped over in subsequent sweeps.
|
858
|
-
static const uint32_t kSingleFreeEncoding = 0;
|
859
|
-
static const uint32_t kMultiFreeEncoding = 1;
|
860
|
-
|
861
|
-
|
862
|
-
// Encode a free region, defined by the given start address and size, in the
|
863
|
-
// first word or two of the region.
|
864
|
-
void EncodeFreeRegion(Address free_start, int free_size) {
|
865
|
-
ASSERT(free_size >= kIntSize);
|
866
|
-
if (free_size == kIntSize) {
|
867
|
-
Memory::uint32_at(free_start) = kSingleFreeEncoding;
|
868
|
-
} else {
|
869
|
-
ASSERT(free_size >= 2 * kIntSize);
|
870
|
-
Memory::uint32_at(free_start) = kMultiFreeEncoding;
|
871
|
-
Memory::int_at(free_start + kIntSize) = free_size;
|
872
|
-
}
|
873
|
-
|
874
|
-
#ifdef DEBUG
|
875
|
-
// Zap the body of the free region.
|
876
|
-
if (FLAG_enable_slow_asserts) {
|
877
|
-
for (int offset = 2 * kIntSize;
|
878
|
-
offset < free_size;
|
879
|
-
offset += kPointerSize) {
|
880
|
-
Memory::Address_at(free_start + offset) = kZapValue;
|
881
|
-
}
|
882
|
-
}
|
883
|
-
#endif
|
884
|
-
}
|
885
|
-
|
886
|
-
|
887
|
-
// Try to promote all objects in new space. Heap numbers and sequential
|
888
|
-
// strings are promoted to the code space, large objects to large object space,
|
889
|
-
// and all others to the old space.
|
890
|
-
inline Object* MCAllocateFromNewSpace(HeapObject* object, int object_size) {
|
891
|
-
Object* forwarded;
|
892
|
-
if (object_size > Heap::MaxObjectSizeInPagedSpace()) {
|
893
|
-
forwarded = Failure::Exception();
|
894
|
-
} else {
|
895
|
-
OldSpace* target_space = Heap::TargetSpace(object);
|
896
|
-
ASSERT(target_space == Heap::old_pointer_space() ||
|
897
|
-
target_space == Heap::old_data_space());
|
898
|
-
forwarded = target_space->MCAllocateRaw(object_size);
|
899
|
-
}
|
900
|
-
if (forwarded->IsFailure()) {
|
901
|
-
forwarded = Heap::new_space()->MCAllocateRaw(object_size);
|
902
|
-
}
|
903
|
-
return forwarded;
|
904
|
-
}
|
905
|
-
|
906
|
-
|
907
|
-
// Allocation functions for the paged spaces call the space's MCAllocateRaw.
|
908
|
-
inline Object* MCAllocateFromOldPointerSpace(HeapObject* ignore,
|
909
|
-
int object_size) {
|
910
|
-
return Heap::old_pointer_space()->MCAllocateRaw(object_size);
|
911
|
-
}
|
912
|
-
|
913
|
-
|
914
|
-
inline Object* MCAllocateFromOldDataSpace(HeapObject* ignore, int object_size) {
|
915
|
-
return Heap::old_data_space()->MCAllocateRaw(object_size);
|
916
|
-
}
|
917
|
-
|
918
|
-
|
919
|
-
inline Object* MCAllocateFromCodeSpace(HeapObject* ignore, int object_size) {
|
920
|
-
return Heap::code_space()->MCAllocateRaw(object_size);
|
921
|
-
}
|
922
|
-
|
923
|
-
|
924
|
-
inline Object* MCAllocateFromMapSpace(HeapObject* ignore, int object_size) {
|
925
|
-
return Heap::map_space()->MCAllocateRaw(object_size);
|
926
|
-
}
|
927
|
-
|
928
|
-
|
929
|
-
inline Object* MCAllocateFromCellSpace(HeapObject* ignore, int object_size) {
|
930
|
-
return Heap::cell_space()->MCAllocateRaw(object_size);
|
931
|
-
}
|
932
|
-
|
933
|
-
|
934
|
-
// The forwarding address is encoded at the same offset as the current
|
935
|
-
// to-space object, but in from space.
|
936
|
-
inline void EncodeForwardingAddressInNewSpace(HeapObject* old_object,
|
937
|
-
int object_size,
|
938
|
-
Object* new_object,
|
939
|
-
int* ignored) {
|
940
|
-
int offset =
|
941
|
-
Heap::new_space()->ToSpaceOffsetForAddress(old_object->address());
|
942
|
-
Memory::Address_at(Heap::new_space()->FromSpaceLow() + offset) =
|
943
|
-
HeapObject::cast(new_object)->address();
|
944
|
-
}
|
945
|
-
|
946
|
-
|
947
|
-
// The forwarding address is encoded in the map pointer of the object as an
|
948
|
-
// offset (in terms of live bytes) from the address of the first live object
|
949
|
-
// in the page.
|
950
|
-
inline void EncodeForwardingAddressInPagedSpace(HeapObject* old_object,
|
951
|
-
int object_size,
|
952
|
-
Object* new_object,
|
953
|
-
int* offset) {
|
954
|
-
// Record the forwarding address of the first live object if necessary.
|
955
|
-
if (*offset == 0) {
|
956
|
-
Page::FromAddress(old_object->address())->mc_first_forwarded =
|
957
|
-
HeapObject::cast(new_object)->address();
|
958
|
-
}
|
959
|
-
|
960
|
-
MapWord encoding =
|
961
|
-
MapWord::EncodeAddress(old_object->map()->address(), *offset);
|
962
|
-
old_object->set_map_word(encoding);
|
963
|
-
*offset += object_size;
|
964
|
-
ASSERT(*offset <= Page::kObjectAreaSize);
|
965
|
-
}
|
966
|
-
|
967
|
-
|
968
|
-
// Most non-live objects are ignored.
|
969
|
-
inline void IgnoreNonLiveObject(HeapObject* object) {}
|
970
|
-
|
971
|
-
|
972
|
-
// A code deletion event is logged for non-live code objects.
|
973
|
-
inline void LogNonLiveCodeObject(HeapObject* object) {
|
974
|
-
if (object->IsCode()) LOG(CodeDeleteEvent(object->address()));
|
975
|
-
}
|
976
|
-
|
977
|
-
|
978
|
-
// Function template that, given a range of addresses (eg, a semispace or a
|
979
|
-
// paged space page), iterates through the objects in the range to clear
|
980
|
-
// mark bits and compute and encode forwarding addresses. As a side effect,
|
981
|
-
// maximal free chunks are marked so that they can be skipped on subsequent
|
982
|
-
// sweeps.
|
983
|
-
//
|
984
|
-
// The template parameters are an allocation function, a forwarding address
|
985
|
-
// encoding function, and a function to process non-live objects.
|
986
|
-
template<MarkCompactCollector::AllocationFunction Alloc,
|
987
|
-
MarkCompactCollector::EncodingFunction Encode,
|
988
|
-
MarkCompactCollector::ProcessNonLiveFunction ProcessNonLive>
|
989
|
-
inline void EncodeForwardingAddressesInRange(Address start,
|
990
|
-
Address end,
|
991
|
-
int* offset) {
|
992
|
-
// The start address of the current free region while sweeping the space.
|
993
|
-
// This address is set when a transition from live to non-live objects is
|
994
|
-
// encountered. A value (an encoding of the 'next free region' pointer)
|
995
|
-
// is written to memory at this address when a transition from non-live to
|
996
|
-
// live objects is encountered.
|
997
|
-
Address free_start = NULL;
|
998
|
-
|
999
|
-
// A flag giving the state of the previously swept object. Initially true
|
1000
|
-
// to ensure that free_start is initialized to a proper address before
|
1001
|
-
// trying to write to it.
|
1002
|
-
bool is_prev_alive = true;
|
1003
|
-
|
1004
|
-
int object_size; // Will be set on each iteration of the loop.
|
1005
|
-
for (Address current = start; current < end; current += object_size) {
|
1006
|
-
HeapObject* object = HeapObject::FromAddress(current);
|
1007
|
-
if (object->IsMarked()) {
|
1008
|
-
object->ClearMark();
|
1009
|
-
MarkCompactCollector::tracer()->decrement_marked_count();
|
1010
|
-
object_size = object->Size();
|
1011
|
-
|
1012
|
-
Object* forwarded = Alloc(object, object_size);
|
1013
|
-
// Allocation cannot fail, because we are compacting the space.
|
1014
|
-
ASSERT(!forwarded->IsFailure());
|
1015
|
-
Encode(object, object_size, forwarded, offset);
|
1016
|
-
|
1017
|
-
#ifdef DEBUG
|
1018
|
-
if (FLAG_gc_verbose) {
|
1019
|
-
PrintF("forward %p -> %p.\n", object->address(),
|
1020
|
-
HeapObject::cast(forwarded)->address());
|
1021
|
-
}
|
1022
|
-
#endif
|
1023
|
-
if (!is_prev_alive) { // Transition from non-live to live.
|
1024
|
-
EncodeFreeRegion(free_start, static_cast<int>(current - free_start));
|
1025
|
-
is_prev_alive = true;
|
1026
|
-
}
|
1027
|
-
} else { // Non-live object.
|
1028
|
-
object_size = object->Size();
|
1029
|
-
ProcessNonLive(object);
|
1030
|
-
if (is_prev_alive) { // Transition from live to non-live.
|
1031
|
-
free_start = current;
|
1032
|
-
is_prev_alive = false;
|
1033
|
-
}
|
1034
|
-
}
|
1035
|
-
}
|
1036
|
-
|
1037
|
-
// If we ended on a free region, mark it.
|
1038
|
-
if (!is_prev_alive) {
|
1039
|
-
EncodeFreeRegion(free_start, static_cast<int>(end - free_start));
|
1040
|
-
}
|
1041
|
-
}
|
1042
|
-
|
1043
|
-
|
1044
|
-
// Functions to encode the forwarding pointers in each compactable space.
|
1045
|
-
void MarkCompactCollector::EncodeForwardingAddressesInNewSpace() {
|
1046
|
-
int ignored;
|
1047
|
-
EncodeForwardingAddressesInRange<MCAllocateFromNewSpace,
|
1048
|
-
EncodeForwardingAddressInNewSpace,
|
1049
|
-
IgnoreNonLiveObject>(
|
1050
|
-
Heap::new_space()->bottom(),
|
1051
|
-
Heap::new_space()->top(),
|
1052
|
-
&ignored);
|
1053
|
-
}
|
1054
|
-
|
1055
|
-
|
1056
|
-
template<MarkCompactCollector::AllocationFunction Alloc,
|
1057
|
-
MarkCompactCollector::ProcessNonLiveFunction ProcessNonLive>
|
1058
|
-
void MarkCompactCollector::EncodeForwardingAddressesInPagedSpace(
|
1059
|
-
PagedSpace* space) {
|
1060
|
-
PageIterator it(space, PageIterator::PAGES_IN_USE);
|
1061
|
-
while (it.has_next()) {
|
1062
|
-
Page* p = it.next();
|
1063
|
-
// The offset of each live object in the page from the first live object
|
1064
|
-
// in the page.
|
1065
|
-
int offset = 0;
|
1066
|
-
EncodeForwardingAddressesInRange<Alloc,
|
1067
|
-
EncodeForwardingAddressInPagedSpace,
|
1068
|
-
ProcessNonLive>(
|
1069
|
-
p->ObjectAreaStart(),
|
1070
|
-
p->AllocationTop(),
|
1071
|
-
&offset);
|
1072
|
-
}
|
1073
|
-
}
|
1074
|
-
|
1075
|
-
|
1076
|
-
static void SweepSpace(NewSpace* space) {
|
1077
|
-
HeapObject* object;
|
1078
|
-
for (Address current = space->bottom();
|
1079
|
-
current < space->top();
|
1080
|
-
current += object->Size()) {
|
1081
|
-
object = HeapObject::FromAddress(current);
|
1082
|
-
if (object->IsMarked()) {
|
1083
|
-
object->ClearMark();
|
1084
|
-
MarkCompactCollector::tracer()->decrement_marked_count();
|
1085
|
-
} else {
|
1086
|
-
// We give non-live objects a map that will correctly give their size,
|
1087
|
-
// since their existing map might not be live after the collection.
|
1088
|
-
int size = object->Size();
|
1089
|
-
if (size >= ByteArray::kHeaderSize) {
|
1090
|
-
object->set_map(Heap::raw_unchecked_byte_array_map());
|
1091
|
-
ByteArray::cast(object)->set_length(ByteArray::LengthFor(size));
|
1092
|
-
} else {
|
1093
|
-
ASSERT(size == kPointerSize);
|
1094
|
-
object->set_map(Heap::raw_unchecked_one_pointer_filler_map());
|
1095
|
-
}
|
1096
|
-
ASSERT(object->Size() == size);
|
1097
|
-
}
|
1098
|
-
// The object is now unmarked for the call to Size() at the top of the
|
1099
|
-
// loop.
|
1100
|
-
}
|
1101
|
-
}
|
1102
|
-
|
1103
|
-
|
1104
|
-
static void SweepSpace(PagedSpace* space, DeallocateFunction dealloc) {
|
1105
|
-
PageIterator it(space, PageIterator::PAGES_IN_USE);
|
1106
|
-
while (it.has_next()) {
|
1107
|
-
Page* p = it.next();
|
1108
|
-
|
1109
|
-
bool is_previous_alive = true;
|
1110
|
-
Address free_start = NULL;
|
1111
|
-
HeapObject* object;
|
1112
|
-
|
1113
|
-
for (Address current = p->ObjectAreaStart();
|
1114
|
-
current < p->AllocationTop();
|
1115
|
-
current += object->Size()) {
|
1116
|
-
object = HeapObject::FromAddress(current);
|
1117
|
-
if (object->IsMarked()) {
|
1118
|
-
object->ClearMark();
|
1119
|
-
MarkCompactCollector::tracer()->decrement_marked_count();
|
1120
|
-
if (!is_previous_alive) { // Transition from free to live.
|
1121
|
-
dealloc(free_start, static_cast<int>(current - free_start));
|
1122
|
-
is_previous_alive = true;
|
1123
|
-
}
|
1124
|
-
} else {
|
1125
|
-
if (object->IsCode()) {
|
1126
|
-
// Notify the logger that compiled code has been collected.
|
1127
|
-
LOG(CodeDeleteEvent(Code::cast(object)->address()));
|
1128
|
-
}
|
1129
|
-
if (is_previous_alive) { // Transition from live to free.
|
1130
|
-
free_start = current;
|
1131
|
-
is_previous_alive = false;
|
1132
|
-
}
|
1133
|
-
}
|
1134
|
-
// The object is now unmarked for the call to Size() at the top of the
|
1135
|
-
// loop.
|
1136
|
-
}
|
1137
|
-
|
1138
|
-
// If the last region was not live we need to deallocate from
|
1139
|
-
// free_start to the allocation top in the page.
|
1140
|
-
if (!is_previous_alive) {
|
1141
|
-
int free_size = static_cast<int>(p->AllocationTop() - free_start);
|
1142
|
-
if (free_size > 0) {
|
1143
|
-
dealloc(free_start, free_size);
|
1144
|
-
}
|
1145
|
-
}
|
1146
|
-
}
|
1147
|
-
}
|
1148
|
-
|
1149
|
-
|
1150
|
-
void MarkCompactCollector::DeallocateOldPointerBlock(Address start,
|
1151
|
-
int size_in_bytes) {
|
1152
|
-
Heap::ClearRSetRange(start, size_in_bytes);
|
1153
|
-
Heap::old_pointer_space()->Free(start, size_in_bytes);
|
1154
|
-
}
|
1155
|
-
|
1156
|
-
|
1157
|
-
void MarkCompactCollector::DeallocateOldDataBlock(Address start,
|
1158
|
-
int size_in_bytes) {
|
1159
|
-
Heap::old_data_space()->Free(start, size_in_bytes);
|
1160
|
-
}
|
1161
|
-
|
1162
|
-
|
1163
|
-
void MarkCompactCollector::DeallocateCodeBlock(Address start,
|
1164
|
-
int size_in_bytes) {
|
1165
|
-
Heap::code_space()->Free(start, size_in_bytes);
|
1166
|
-
}
|
1167
|
-
|
1168
|
-
|
1169
|
-
void MarkCompactCollector::DeallocateMapBlock(Address start,
|
1170
|
-
int size_in_bytes) {
|
1171
|
-
// Objects in map space are assumed to have size Map::kSize and a
|
1172
|
-
// valid map in their first word. Thus, we break the free block up into
|
1173
|
-
// chunks and free them separately.
|
1174
|
-
ASSERT(size_in_bytes % Map::kSize == 0);
|
1175
|
-
Heap::ClearRSetRange(start, size_in_bytes);
|
1176
|
-
Address end = start + size_in_bytes;
|
1177
|
-
for (Address a = start; a < end; a += Map::kSize) {
|
1178
|
-
Heap::map_space()->Free(a);
|
1179
|
-
}
|
1180
|
-
}
|
1181
|
-
|
1182
|
-
|
1183
|
-
void MarkCompactCollector::DeallocateCellBlock(Address start,
|
1184
|
-
int size_in_bytes) {
|
1185
|
-
// Free-list elements in cell space are assumed to have a fixed size.
|
1186
|
-
// We break the free block into chunks and add them to the free list
|
1187
|
-
// individually.
|
1188
|
-
int size = Heap::cell_space()->object_size_in_bytes();
|
1189
|
-
ASSERT(size_in_bytes % size == 0);
|
1190
|
-
Heap::ClearRSetRange(start, size_in_bytes);
|
1191
|
-
Address end = start + size_in_bytes;
|
1192
|
-
for (Address a = start; a < end; a += size) {
|
1193
|
-
Heap::cell_space()->Free(a);
|
1194
|
-
}
|
1195
|
-
}
|
1196
|
-
|
1197
|
-
|
1198
|
-
void MarkCompactCollector::EncodeForwardingAddresses() {
|
1199
|
-
ASSERT(state_ == ENCODE_FORWARDING_ADDRESSES);
|
1200
|
-
// Objects in the active semispace of the young generation may be
|
1201
|
-
// relocated to the inactive semispace (if not promoted). Set the
|
1202
|
-
// relocation info to the beginning of the inactive semispace.
|
1203
|
-
Heap::new_space()->MCResetRelocationInfo();
|
1204
|
-
|
1205
|
-
// Compute the forwarding pointers in each space.
|
1206
|
-
EncodeForwardingAddressesInPagedSpace<MCAllocateFromOldPointerSpace,
|
1207
|
-
IgnoreNonLiveObject>(
|
1208
|
-
Heap::old_pointer_space());
|
1209
|
-
|
1210
|
-
EncodeForwardingAddressesInPagedSpace<MCAllocateFromOldDataSpace,
|
1211
|
-
IgnoreNonLiveObject>(
|
1212
|
-
Heap::old_data_space());
|
1213
|
-
|
1214
|
-
EncodeForwardingAddressesInPagedSpace<MCAllocateFromCodeSpace,
|
1215
|
-
LogNonLiveCodeObject>(
|
1216
|
-
Heap::code_space());
|
1217
|
-
|
1218
|
-
EncodeForwardingAddressesInPagedSpace<MCAllocateFromCellSpace,
|
1219
|
-
IgnoreNonLiveObject>(
|
1220
|
-
Heap::cell_space());
|
1221
|
-
|
1222
|
-
|
1223
|
-
// Compute new space next to last after the old and code spaces have been
|
1224
|
-
// compacted. Objects in new space can be promoted to old or code space.
|
1225
|
-
EncodeForwardingAddressesInNewSpace();
|
1226
|
-
|
1227
|
-
// Compute map space last because computing forwarding addresses
|
1228
|
-
// overwrites non-live objects. Objects in the other spaces rely on
|
1229
|
-
// non-live map pointers to get the sizes of non-live objects.
|
1230
|
-
EncodeForwardingAddressesInPagedSpace<MCAllocateFromMapSpace,
|
1231
|
-
IgnoreNonLiveObject>(
|
1232
|
-
Heap::map_space());
|
1233
|
-
|
1234
|
-
// Write relocation info to the top page, so we can use it later. This is
|
1235
|
-
// done after promoting objects from the new space so we get the correct
|
1236
|
-
// allocation top.
|
1237
|
-
Heap::old_pointer_space()->MCWriteRelocationInfoToPage();
|
1238
|
-
Heap::old_data_space()->MCWriteRelocationInfoToPage();
|
1239
|
-
Heap::code_space()->MCWriteRelocationInfoToPage();
|
1240
|
-
Heap::map_space()->MCWriteRelocationInfoToPage();
|
1241
|
-
Heap::cell_space()->MCWriteRelocationInfoToPage();
|
1242
|
-
}
|
1243
|
-
|
1244
|
-
|
1245
|
-
class MapIterator : public HeapObjectIterator {
|
1246
|
-
public:
|
1247
|
-
MapIterator() : HeapObjectIterator(Heap::map_space(), &SizeCallback) { }
|
1248
|
-
|
1249
|
-
explicit MapIterator(Address start)
|
1250
|
-
: HeapObjectIterator(Heap::map_space(), start, &SizeCallback) { }
|
1251
|
-
|
1252
|
-
private:
|
1253
|
-
static int SizeCallback(HeapObject* unused) {
|
1254
|
-
USE(unused);
|
1255
|
-
return Map::kSize;
|
1256
|
-
}
|
1257
|
-
};
|
1258
|
-
|
1259
|
-
|
1260
|
-
class MapCompact {
|
1261
|
-
public:
|
1262
|
-
explicit MapCompact(int live_maps)
|
1263
|
-
: live_maps_(live_maps),
|
1264
|
-
to_evacuate_start_(Heap::map_space()->TopAfterCompaction(live_maps)),
|
1265
|
-
map_to_evacuate_it_(to_evacuate_start_),
|
1266
|
-
first_map_to_evacuate_(
|
1267
|
-
reinterpret_cast<Map*>(HeapObject::FromAddress(to_evacuate_start_))) {
|
1268
|
-
}
|
1269
|
-
|
1270
|
-
void CompactMaps() {
|
1271
|
-
// As we know the number of maps to evacuate beforehand,
|
1272
|
-
// we stop then there is no more vacant maps.
|
1273
|
-
for (Map* next_vacant_map = NextVacantMap();
|
1274
|
-
next_vacant_map;
|
1275
|
-
next_vacant_map = NextVacantMap()) {
|
1276
|
-
EvacuateMap(next_vacant_map, NextMapToEvacuate());
|
1277
|
-
}
|
1278
|
-
|
1279
|
-
#ifdef DEBUG
|
1280
|
-
CheckNoMapsToEvacuate();
|
1281
|
-
#endif
|
1282
|
-
}
|
1283
|
-
|
1284
|
-
void UpdateMapPointersInRoots() {
|
1285
|
-
Heap::IterateRoots(&map_updating_visitor_, VISIT_ONLY_STRONG);
|
1286
|
-
GlobalHandles::IterateWeakRoots(&map_updating_visitor_);
|
1287
|
-
}
|
1288
|
-
|
1289
|
-
void FinishMapSpace() {
|
1290
|
-
// Iterate through to space and finish move.
|
1291
|
-
MapIterator it;
|
1292
|
-
HeapObject* o = it.next();
|
1293
|
-
for (; o != first_map_to_evacuate_; o = it.next()) {
|
1294
|
-
it.has_next(); // Must be called for side-effects, see bug 586.
|
1295
|
-
ASSERT(it.has_next());
|
1296
|
-
Map* map = reinterpret_cast<Map*>(o);
|
1297
|
-
ASSERT(!map->IsMarked());
|
1298
|
-
ASSERT(!map->IsOverflowed());
|
1299
|
-
ASSERT(map->IsMap());
|
1300
|
-
Heap::UpdateRSet(map);
|
1301
|
-
}
|
1302
|
-
}
|
1303
|
-
|
1304
|
-
void UpdateMapPointersInPagedSpace(PagedSpace* space) {
|
1305
|
-
ASSERT(space != Heap::map_space());
|
1306
|
-
|
1307
|
-
PageIterator it(space, PageIterator::PAGES_IN_USE);
|
1308
|
-
while (it.has_next()) {
|
1309
|
-
Page* p = it.next();
|
1310
|
-
UpdateMapPointersInRange(p->ObjectAreaStart(), p->AllocationTop());
|
1311
|
-
}
|
1312
|
-
}
|
1313
|
-
|
1314
|
-
void UpdateMapPointersInNewSpace() {
|
1315
|
-
NewSpace* space = Heap::new_space();
|
1316
|
-
UpdateMapPointersInRange(space->bottom(), space->top());
|
1317
|
-
}
|
1318
|
-
|
1319
|
-
void UpdateMapPointersInLargeObjectSpace() {
|
1320
|
-
LargeObjectIterator it(Heap::lo_space());
|
1321
|
-
while (true) {
|
1322
|
-
if (!it.has_next()) break;
|
1323
|
-
UpdateMapPointersInObject(it.next());
|
1324
|
-
}
|
1325
|
-
}
|
1326
|
-
|
1327
|
-
void Finish() {
|
1328
|
-
Heap::map_space()->FinishCompaction(to_evacuate_start_, live_maps_);
|
1329
|
-
}
|
1330
|
-
|
1331
|
-
private:
|
1332
|
-
int live_maps_;
|
1333
|
-
Address to_evacuate_start_;
|
1334
|
-
MapIterator vacant_map_it_;
|
1335
|
-
MapIterator map_to_evacuate_it_;
|
1336
|
-
Map* first_map_to_evacuate_;
|
1337
|
-
|
1338
|
-
// Helper class for updating map pointers in HeapObjects.
|
1339
|
-
class MapUpdatingVisitor: public ObjectVisitor {
|
1340
|
-
public:
|
1341
|
-
void VisitPointer(Object** p) {
|
1342
|
-
UpdateMapPointer(p);
|
1343
|
-
}
|
1344
|
-
|
1345
|
-
void VisitPointers(Object** start, Object** end) {
|
1346
|
-
for (Object** p = start; p < end; p++) UpdateMapPointer(p);
|
1347
|
-
}
|
1348
|
-
|
1349
|
-
private:
|
1350
|
-
void UpdateMapPointer(Object** p) {
|
1351
|
-
if (!(*p)->IsHeapObject()) return;
|
1352
|
-
HeapObject* old_map = reinterpret_cast<HeapObject*>(*p);
|
1353
|
-
|
1354
|
-
// Moved maps are tagged with overflowed map word. They are the only
|
1355
|
-
// objects those map word is overflowed as marking is already complete.
|
1356
|
-
MapWord map_word = old_map->map_word();
|
1357
|
-
if (!map_word.IsOverflowed()) return;
|
1358
|
-
|
1359
|
-
*p = GetForwardedMap(map_word);
|
1360
|
-
}
|
1361
|
-
};
|
1362
|
-
|
1363
|
-
static MapUpdatingVisitor map_updating_visitor_;
|
1364
|
-
|
1365
|
-
static Map* NextMap(MapIterator* it, HeapObject* last, bool live) {
|
1366
|
-
while (true) {
|
1367
|
-
it->has_next(); // Must be called for side-effects, see bug 586.
|
1368
|
-
ASSERT(it->has_next());
|
1369
|
-
HeapObject* next = it->next();
|
1370
|
-
if (next == last)
|
1371
|
-
return NULL;
|
1372
|
-
ASSERT(!next->IsOverflowed());
|
1373
|
-
ASSERT(!next->IsMarked());
|
1374
|
-
ASSERT(next->IsMap() || FreeListNode::IsFreeListNode(next));
|
1375
|
-
if (next->IsMap() == live)
|
1376
|
-
return reinterpret_cast<Map*>(next);
|
1377
|
-
}
|
1378
|
-
}
|
1379
|
-
|
1380
|
-
Map* NextVacantMap() {
|
1381
|
-
Map* map = NextMap(&vacant_map_it_, first_map_to_evacuate_, false);
|
1382
|
-
ASSERT(map == NULL || FreeListNode::IsFreeListNode(map));
|
1383
|
-
return map;
|
1384
|
-
}
|
1385
|
-
|
1386
|
-
Map* NextMapToEvacuate() {
|
1387
|
-
Map* map = NextMap(&map_to_evacuate_it_, NULL, true);
|
1388
|
-
ASSERT(map != NULL);
|
1389
|
-
ASSERT(map->IsMap());
|
1390
|
-
return map;
|
1391
|
-
}
|
1392
|
-
|
1393
|
-
static void EvacuateMap(Map* vacant_map, Map* map_to_evacuate) {
|
1394
|
-
ASSERT(FreeListNode::IsFreeListNode(vacant_map));
|
1395
|
-
ASSERT(map_to_evacuate->IsMap());
|
1396
|
-
|
1397
|
-
memcpy(
|
1398
|
-
reinterpret_cast<void*>(vacant_map->address()),
|
1399
|
-
reinterpret_cast<void*>(map_to_evacuate->address()),
|
1400
|
-
Map::kSize);
|
1401
|
-
ASSERT(vacant_map->IsMap()); // Due to memcpy above.
|
1402
|
-
|
1403
|
-
MapWord forwarding_map_word = MapWord::FromMap(vacant_map);
|
1404
|
-
forwarding_map_word.SetOverflow();
|
1405
|
-
map_to_evacuate->set_map_word(forwarding_map_word);
|
1406
|
-
|
1407
|
-
ASSERT(map_to_evacuate->map_word().IsOverflowed());
|
1408
|
-
ASSERT(GetForwardedMap(map_to_evacuate->map_word()) == vacant_map);
|
1409
|
-
}
|
1410
|
-
|
1411
|
-
static Map* GetForwardedMap(MapWord map_word) {
|
1412
|
-
ASSERT(map_word.IsOverflowed());
|
1413
|
-
map_word.ClearOverflow();
|
1414
|
-
Map* new_map = map_word.ToMap();
|
1415
|
-
ASSERT_MAP_ALIGNED(new_map->address());
|
1416
|
-
return new_map;
|
1417
|
-
}
|
1418
|
-
|
1419
|
-
static int UpdateMapPointersInObject(HeapObject* obj) {
|
1420
|
-
ASSERT(!obj->IsMarked());
|
1421
|
-
Map* map = obj->map();
|
1422
|
-
ASSERT(Heap::map_space()->Contains(map));
|
1423
|
-
MapWord map_word = map->map_word();
|
1424
|
-
ASSERT(!map_word.IsMarked());
|
1425
|
-
if (map_word.IsOverflowed()) {
|
1426
|
-
Map* new_map = GetForwardedMap(map_word);
|
1427
|
-
ASSERT(Heap::map_space()->Contains(new_map));
|
1428
|
-
obj->set_map(new_map);
|
1429
|
-
|
1430
|
-
#ifdef DEBUG
|
1431
|
-
if (FLAG_gc_verbose) {
|
1432
|
-
PrintF("update %p : %p -> %p\n", obj->address(),
|
1433
|
-
map, new_map);
|
1434
|
-
}
|
1435
|
-
#endif
|
1436
|
-
}
|
1437
|
-
|
1438
|
-
int size = obj->SizeFromMap(map);
|
1439
|
-
obj->IterateBody(map->instance_type(), size, &map_updating_visitor_);
|
1440
|
-
return size;
|
1441
|
-
}
|
1442
|
-
|
1443
|
-
static void UpdateMapPointersInRange(Address start, Address end) {
|
1444
|
-
HeapObject* object;
|
1445
|
-
int size;
|
1446
|
-
for (Address current = start; current < end; current += size) {
|
1447
|
-
object = HeapObject::FromAddress(current);
|
1448
|
-
size = UpdateMapPointersInObject(object);
|
1449
|
-
ASSERT(size > 0);
|
1450
|
-
}
|
1451
|
-
}
|
1452
|
-
|
1453
|
-
#ifdef DEBUG
|
1454
|
-
void CheckNoMapsToEvacuate() {
|
1455
|
-
if (!FLAG_enable_slow_asserts)
|
1456
|
-
return;
|
1457
|
-
|
1458
|
-
while (map_to_evacuate_it_.has_next())
|
1459
|
-
ASSERT(FreeListNode::IsFreeListNode(map_to_evacuate_it_.next()));
|
1460
|
-
}
|
1461
|
-
#endif
|
1462
|
-
};
|
1463
|
-
|
1464
|
-
MapCompact::MapUpdatingVisitor MapCompact::map_updating_visitor_;
|
1465
|
-
|
1466
|
-
|
1467
|
-
void MarkCompactCollector::SweepSpaces() {
|
1468
|
-
ASSERT(state_ == SWEEP_SPACES);
|
1469
|
-
ASSERT(!IsCompacting());
|
1470
|
-
// Noncompacting collections simply sweep the spaces to clear the mark
|
1471
|
-
// bits and free the nonlive blocks (for old and map spaces). We sweep
|
1472
|
-
// the map space last because freeing non-live maps overwrites them and
|
1473
|
-
// the other spaces rely on possibly non-live maps to get the sizes for
|
1474
|
-
// non-live objects.
|
1475
|
-
SweepSpace(Heap::old_pointer_space(), &DeallocateOldPointerBlock);
|
1476
|
-
SweepSpace(Heap::old_data_space(), &DeallocateOldDataBlock);
|
1477
|
-
SweepSpace(Heap::code_space(), &DeallocateCodeBlock);
|
1478
|
-
SweepSpace(Heap::cell_space(), &DeallocateCellBlock);
|
1479
|
-
SweepSpace(Heap::new_space());
|
1480
|
-
SweepSpace(Heap::map_space(), &DeallocateMapBlock);
|
1481
|
-
int live_maps = Heap::map_space()->Size() / Map::kSize;
|
1482
|
-
ASSERT(live_map_objects_ == live_maps);
|
1483
|
-
|
1484
|
-
if (Heap::map_space()->NeedsCompaction(live_maps)) {
|
1485
|
-
MapCompact map_compact(live_maps);
|
1486
|
-
|
1487
|
-
map_compact.CompactMaps();
|
1488
|
-
map_compact.UpdateMapPointersInRoots();
|
1489
|
-
|
1490
|
-
map_compact.FinishMapSpace();
|
1491
|
-
PagedSpaces spaces;
|
1492
|
-
while (PagedSpace* space = spaces.next()) {
|
1493
|
-
if (space == Heap::map_space()) continue;
|
1494
|
-
map_compact.UpdateMapPointersInPagedSpace(space);
|
1495
|
-
}
|
1496
|
-
map_compact.UpdateMapPointersInNewSpace();
|
1497
|
-
map_compact.UpdateMapPointersInLargeObjectSpace();
|
1498
|
-
|
1499
|
-
map_compact.Finish();
|
1500
|
-
}
|
1501
|
-
}
|
1502
|
-
|
1503
|
-
|
1504
|
-
// Iterate the live objects in a range of addresses (eg, a page or a
|
1505
|
-
// semispace). The live regions of the range have been linked into a list.
|
1506
|
-
// The first live region is [first_live_start, first_live_end), and the last
|
1507
|
-
// address in the range is top. The callback function is used to get the
|
1508
|
-
// size of each live object.
|
1509
|
-
int MarkCompactCollector::IterateLiveObjectsInRange(
|
1510
|
-
Address start,
|
1511
|
-
Address end,
|
1512
|
-
HeapObjectCallback size_func) {
|
1513
|
-
int live_objects = 0;
|
1514
|
-
Address current = start;
|
1515
|
-
while (current < end) {
|
1516
|
-
uint32_t encoded_map = Memory::uint32_at(current);
|
1517
|
-
if (encoded_map == kSingleFreeEncoding) {
|
1518
|
-
current += kPointerSize;
|
1519
|
-
} else if (encoded_map == kMultiFreeEncoding) {
|
1520
|
-
current += Memory::int_at(current + kIntSize);
|
1521
|
-
} else {
|
1522
|
-
live_objects++;
|
1523
|
-
current += size_func(HeapObject::FromAddress(current));
|
1524
|
-
}
|
1525
|
-
}
|
1526
|
-
return live_objects;
|
1527
|
-
}
|
1528
|
-
|
1529
|
-
|
1530
|
-
int MarkCompactCollector::IterateLiveObjects(NewSpace* space,
|
1531
|
-
HeapObjectCallback size_f) {
|
1532
|
-
ASSERT(MARK_LIVE_OBJECTS < state_ && state_ <= RELOCATE_OBJECTS);
|
1533
|
-
return IterateLiveObjectsInRange(space->bottom(), space->top(), size_f);
|
1534
|
-
}
|
1535
|
-
|
1536
|
-
|
1537
|
-
int MarkCompactCollector::IterateLiveObjects(PagedSpace* space,
|
1538
|
-
HeapObjectCallback size_f) {
|
1539
|
-
ASSERT(MARK_LIVE_OBJECTS < state_ && state_ <= RELOCATE_OBJECTS);
|
1540
|
-
int total = 0;
|
1541
|
-
PageIterator it(space, PageIterator::PAGES_IN_USE);
|
1542
|
-
while (it.has_next()) {
|
1543
|
-
Page* p = it.next();
|
1544
|
-
total += IterateLiveObjectsInRange(p->ObjectAreaStart(),
|
1545
|
-
p->AllocationTop(),
|
1546
|
-
size_f);
|
1547
|
-
}
|
1548
|
-
return total;
|
1549
|
-
}
|
1550
|
-
|
1551
|
-
|
1552
|
-
// -------------------------------------------------------------------------
|
1553
|
-
// Phase 3: Update pointers
|
1554
|
-
|
1555
|
-
// Helper class for updating pointers in HeapObjects.
|
1556
|
-
class UpdatingVisitor: public ObjectVisitor {
|
1557
|
-
public:
|
1558
|
-
void VisitPointer(Object** p) {
|
1559
|
-
UpdatePointer(p);
|
1560
|
-
}
|
1561
|
-
|
1562
|
-
void VisitPointers(Object** start, Object** end) {
|
1563
|
-
// Mark all HeapObject pointers in [start, end)
|
1564
|
-
for (Object** p = start; p < end; p++) UpdatePointer(p);
|
1565
|
-
}
|
1566
|
-
|
1567
|
-
void VisitCodeTarget(RelocInfo* rinfo) {
|
1568
|
-
ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
|
1569
|
-
Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
|
1570
|
-
VisitPointer(&target);
|
1571
|
-
rinfo->set_target_address(
|
1572
|
-
reinterpret_cast<Code*>(target)->instruction_start());
|
1573
|
-
}
|
1574
|
-
|
1575
|
-
void VisitDebugTarget(RelocInfo* rinfo) {
|
1576
|
-
ASSERT(RelocInfo::IsJSReturn(rinfo->rmode()) &&
|
1577
|
-
rinfo->IsPatchedReturnSequence());
|
1578
|
-
Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
|
1579
|
-
VisitPointer(&target);
|
1580
|
-
rinfo->set_call_address(
|
1581
|
-
reinterpret_cast<Code*>(target)->instruction_start());
|
1582
|
-
}
|
1583
|
-
|
1584
|
-
private:
|
1585
|
-
void UpdatePointer(Object** p) {
|
1586
|
-
if (!(*p)->IsHeapObject()) return;
|
1587
|
-
|
1588
|
-
HeapObject* obj = HeapObject::cast(*p);
|
1589
|
-
Address old_addr = obj->address();
|
1590
|
-
Address new_addr;
|
1591
|
-
ASSERT(!Heap::InFromSpace(obj));
|
1592
|
-
|
1593
|
-
if (Heap::new_space()->Contains(obj)) {
|
1594
|
-
Address forwarding_pointer_addr =
|
1595
|
-
Heap::new_space()->FromSpaceLow() +
|
1596
|
-
Heap::new_space()->ToSpaceOffsetForAddress(old_addr);
|
1597
|
-
new_addr = Memory::Address_at(forwarding_pointer_addr);
|
1598
|
-
|
1599
|
-
#ifdef DEBUG
|
1600
|
-
ASSERT(Heap::old_pointer_space()->Contains(new_addr) ||
|
1601
|
-
Heap::old_data_space()->Contains(new_addr) ||
|
1602
|
-
Heap::new_space()->FromSpaceContains(new_addr) ||
|
1603
|
-
Heap::lo_space()->Contains(HeapObject::FromAddress(new_addr)));
|
1604
|
-
|
1605
|
-
if (Heap::new_space()->FromSpaceContains(new_addr)) {
|
1606
|
-
ASSERT(Heap::new_space()->FromSpaceOffsetForAddress(new_addr) <=
|
1607
|
-
Heap::new_space()->ToSpaceOffsetForAddress(old_addr));
|
1608
|
-
}
|
1609
|
-
#endif
|
1610
|
-
|
1611
|
-
} else if (Heap::lo_space()->Contains(obj)) {
|
1612
|
-
// Don't move objects in the large object space.
|
1613
|
-
return;
|
1614
|
-
|
1615
|
-
} else {
|
1616
|
-
#ifdef DEBUG
|
1617
|
-
PagedSpaces spaces;
|
1618
|
-
PagedSpace* original_space = spaces.next();
|
1619
|
-
while (original_space != NULL) {
|
1620
|
-
if (original_space->Contains(obj)) break;
|
1621
|
-
original_space = spaces.next();
|
1622
|
-
}
|
1623
|
-
ASSERT(original_space != NULL);
|
1624
|
-
#endif
|
1625
|
-
new_addr = MarkCompactCollector::GetForwardingAddressInOldSpace(obj);
|
1626
|
-
ASSERT(original_space->Contains(new_addr));
|
1627
|
-
ASSERT(original_space->MCSpaceOffsetForAddress(new_addr) <=
|
1628
|
-
original_space->MCSpaceOffsetForAddress(old_addr));
|
1629
|
-
}
|
1630
|
-
|
1631
|
-
*p = HeapObject::FromAddress(new_addr);
|
1632
|
-
|
1633
|
-
#ifdef DEBUG
|
1634
|
-
if (FLAG_gc_verbose) {
|
1635
|
-
PrintF("update %p : %p -> %p\n",
|
1636
|
-
reinterpret_cast<Address>(p), old_addr, new_addr);
|
1637
|
-
}
|
1638
|
-
#endif
|
1639
|
-
}
|
1640
|
-
};
|
1641
|
-
|
1642
|
-
|
1643
|
-
void MarkCompactCollector::UpdatePointers() {
|
1644
|
-
#ifdef DEBUG
|
1645
|
-
ASSERT(state_ == ENCODE_FORWARDING_ADDRESSES);
|
1646
|
-
state_ = UPDATE_POINTERS;
|
1647
|
-
#endif
|
1648
|
-
UpdatingVisitor updating_visitor;
|
1649
|
-
Heap::IterateRoots(&updating_visitor, VISIT_ONLY_STRONG);
|
1650
|
-
GlobalHandles::IterateWeakRoots(&updating_visitor);
|
1651
|
-
|
1652
|
-
int live_maps = IterateLiveObjects(Heap::map_space(),
|
1653
|
-
&UpdatePointersInOldObject);
|
1654
|
-
int live_pointer_olds = IterateLiveObjects(Heap::old_pointer_space(),
|
1655
|
-
&UpdatePointersInOldObject);
|
1656
|
-
int live_data_olds = IterateLiveObjects(Heap::old_data_space(),
|
1657
|
-
&UpdatePointersInOldObject);
|
1658
|
-
int live_codes = IterateLiveObjects(Heap::code_space(),
|
1659
|
-
&UpdatePointersInOldObject);
|
1660
|
-
int live_cells = IterateLiveObjects(Heap::cell_space(),
|
1661
|
-
&UpdatePointersInOldObject);
|
1662
|
-
int live_news = IterateLiveObjects(Heap::new_space(),
|
1663
|
-
&UpdatePointersInNewObject);
|
1664
|
-
|
1665
|
-
// Large objects do not move, the map word can be updated directly.
|
1666
|
-
LargeObjectIterator it(Heap::lo_space());
|
1667
|
-
while (it.has_next()) UpdatePointersInNewObject(it.next());
|
1668
|
-
|
1669
|
-
USE(live_maps);
|
1670
|
-
USE(live_pointer_olds);
|
1671
|
-
USE(live_data_olds);
|
1672
|
-
USE(live_codes);
|
1673
|
-
USE(live_cells);
|
1674
|
-
USE(live_news);
|
1675
|
-
ASSERT(live_maps == live_map_objects_);
|
1676
|
-
ASSERT(live_data_olds == live_old_data_objects_);
|
1677
|
-
ASSERT(live_pointer_olds == live_old_pointer_objects_);
|
1678
|
-
ASSERT(live_codes == live_code_objects_);
|
1679
|
-
ASSERT(live_cells == live_cell_objects_);
|
1680
|
-
ASSERT(live_news == live_young_objects_);
|
1681
|
-
}
|
1682
|
-
|
1683
|
-
|
1684
|
-
int MarkCompactCollector::UpdatePointersInNewObject(HeapObject* obj) {
|
1685
|
-
// Keep old map pointers
|
1686
|
-
Map* old_map = obj->map();
|
1687
|
-
ASSERT(old_map->IsHeapObject());
|
1688
|
-
|
1689
|
-
Address forwarded = GetForwardingAddressInOldSpace(old_map);
|
1690
|
-
|
1691
|
-
ASSERT(Heap::map_space()->Contains(old_map));
|
1692
|
-
ASSERT(Heap::map_space()->Contains(forwarded));
|
1693
|
-
#ifdef DEBUG
|
1694
|
-
if (FLAG_gc_verbose) {
|
1695
|
-
PrintF("update %p : %p -> %p\n", obj->address(), old_map->address(),
|
1696
|
-
forwarded);
|
1697
|
-
}
|
1698
|
-
#endif
|
1699
|
-
// Update the map pointer.
|
1700
|
-
obj->set_map(reinterpret_cast<Map*>(HeapObject::FromAddress(forwarded)));
|
1701
|
-
|
1702
|
-
// We have to compute the object size relying on the old map because
|
1703
|
-
// map objects are not relocated yet.
|
1704
|
-
int obj_size = obj->SizeFromMap(old_map);
|
1705
|
-
|
1706
|
-
// Update pointers in the object body.
|
1707
|
-
UpdatingVisitor updating_visitor;
|
1708
|
-
obj->IterateBody(old_map->instance_type(), obj_size, &updating_visitor);
|
1709
|
-
return obj_size;
|
1710
|
-
}
|
1711
|
-
|
1712
|
-
|
1713
|
-
int MarkCompactCollector::UpdatePointersInOldObject(HeapObject* obj) {
|
1714
|
-
// Decode the map pointer.
|
1715
|
-
MapWord encoding = obj->map_word();
|
1716
|
-
Address map_addr = encoding.DecodeMapAddress(Heap::map_space());
|
1717
|
-
ASSERT(Heap::map_space()->Contains(HeapObject::FromAddress(map_addr)));
|
1718
|
-
|
1719
|
-
// At this point, the first word of map_addr is also encoded, cannot
|
1720
|
-
// cast it to Map* using Map::cast.
|
1721
|
-
Map* map = reinterpret_cast<Map*>(HeapObject::FromAddress(map_addr));
|
1722
|
-
int obj_size = obj->SizeFromMap(map);
|
1723
|
-
InstanceType type = map->instance_type();
|
1724
|
-
|
1725
|
-
// Update map pointer.
|
1726
|
-
Address new_map_addr = GetForwardingAddressInOldSpace(map);
|
1727
|
-
int offset = encoding.DecodeOffset();
|
1728
|
-
obj->set_map_word(MapWord::EncodeAddress(new_map_addr, offset));
|
1729
|
-
|
1730
|
-
#ifdef DEBUG
|
1731
|
-
if (FLAG_gc_verbose) {
|
1732
|
-
PrintF("update %p : %p -> %p\n", obj->address(),
|
1733
|
-
map_addr, new_map_addr);
|
1734
|
-
}
|
1735
|
-
#endif
|
1736
|
-
|
1737
|
-
// Update pointers in the object body.
|
1738
|
-
UpdatingVisitor updating_visitor;
|
1739
|
-
obj->IterateBody(type, obj_size, &updating_visitor);
|
1740
|
-
return obj_size;
|
1741
|
-
}
|
1742
|
-
|
1743
|
-
|
1744
|
-
Address MarkCompactCollector::GetForwardingAddressInOldSpace(HeapObject* obj) {
|
1745
|
-
// Object should either in old or map space.
|
1746
|
-
MapWord encoding = obj->map_word();
|
1747
|
-
|
1748
|
-
// Offset to the first live object's forwarding address.
|
1749
|
-
int offset = encoding.DecodeOffset();
|
1750
|
-
Address obj_addr = obj->address();
|
1751
|
-
|
1752
|
-
// Find the first live object's forwarding address.
|
1753
|
-
Page* p = Page::FromAddress(obj_addr);
|
1754
|
-
Address first_forwarded = p->mc_first_forwarded;
|
1755
|
-
|
1756
|
-
// Page start address of forwarded address.
|
1757
|
-
Page* forwarded_page = Page::FromAddress(first_forwarded);
|
1758
|
-
int forwarded_offset = forwarded_page->Offset(first_forwarded);
|
1759
|
-
|
1760
|
-
// Find end of allocation of in the page of first_forwarded.
|
1761
|
-
Address mc_top = forwarded_page->mc_relocation_top;
|
1762
|
-
int mc_top_offset = forwarded_page->Offset(mc_top);
|
1763
|
-
|
1764
|
-
// Check if current object's forward pointer is in the same page
|
1765
|
-
// as the first live object's forwarding pointer
|
1766
|
-
if (forwarded_offset + offset < mc_top_offset) {
|
1767
|
-
// In the same page.
|
1768
|
-
return first_forwarded + offset;
|
1769
|
-
}
|
1770
|
-
|
1771
|
-
// Must be in the next page, NOTE: this may cross chunks.
|
1772
|
-
Page* next_page = forwarded_page->next_page();
|
1773
|
-
ASSERT(next_page->is_valid());
|
1774
|
-
|
1775
|
-
offset -= (mc_top_offset - forwarded_offset);
|
1776
|
-
offset += Page::kObjectStartOffset;
|
1777
|
-
|
1778
|
-
ASSERT_PAGE_OFFSET(offset);
|
1779
|
-
ASSERT(next_page->OffsetToAddress(offset) < next_page->mc_relocation_top);
|
1780
|
-
|
1781
|
-
return next_page->OffsetToAddress(offset);
|
1782
|
-
}
|
1783
|
-
|
1784
|
-
|
1785
|
-
// -------------------------------------------------------------------------
|
1786
|
-
// Phase 4: Relocate objects
|
1787
|
-
|
1788
|
-
void MarkCompactCollector::RelocateObjects() {
|
1789
|
-
#ifdef DEBUG
|
1790
|
-
ASSERT(state_ == UPDATE_POINTERS);
|
1791
|
-
state_ = RELOCATE_OBJECTS;
|
1792
|
-
#endif
|
1793
|
-
// Relocates objects, always relocate map objects first. Relocating
|
1794
|
-
// objects in other space relies on map objects to get object size.
|
1795
|
-
int live_maps = IterateLiveObjects(Heap::map_space(), &RelocateMapObject);
|
1796
|
-
int live_pointer_olds = IterateLiveObjects(Heap::old_pointer_space(),
|
1797
|
-
&RelocateOldPointerObject);
|
1798
|
-
int live_data_olds = IterateLiveObjects(Heap::old_data_space(),
|
1799
|
-
&RelocateOldDataObject);
|
1800
|
-
int live_codes = IterateLiveObjects(Heap::code_space(), &RelocateCodeObject);
|
1801
|
-
int live_cells = IterateLiveObjects(Heap::cell_space(), &RelocateCellObject);
|
1802
|
-
int live_news = IterateLiveObjects(Heap::new_space(), &RelocateNewObject);
|
1803
|
-
|
1804
|
-
USE(live_maps);
|
1805
|
-
USE(live_data_olds);
|
1806
|
-
USE(live_pointer_olds);
|
1807
|
-
USE(live_codes);
|
1808
|
-
USE(live_cells);
|
1809
|
-
USE(live_news);
|
1810
|
-
ASSERT(live_maps == live_map_objects_);
|
1811
|
-
ASSERT(live_data_olds == live_old_data_objects_);
|
1812
|
-
ASSERT(live_pointer_olds == live_old_pointer_objects_);
|
1813
|
-
ASSERT(live_codes == live_code_objects_);
|
1814
|
-
ASSERT(live_cells == live_cell_objects_);
|
1815
|
-
ASSERT(live_news == live_young_objects_);
|
1816
|
-
|
1817
|
-
// Flip from and to spaces
|
1818
|
-
Heap::new_space()->Flip();
|
1819
|
-
|
1820
|
-
// Set age_mark to bottom in to space
|
1821
|
-
Address mark = Heap::new_space()->bottom();
|
1822
|
-
Heap::new_space()->set_age_mark(mark);
|
1823
|
-
|
1824
|
-
Heap::new_space()->MCCommitRelocationInfo();
|
1825
|
-
#ifdef DEBUG
|
1826
|
-
// It is safe to write to the remembered sets as remembered sets on a
|
1827
|
-
// page-by-page basis after committing the m-c forwarding pointer.
|
1828
|
-
Page::set_rset_state(Page::IN_USE);
|
1829
|
-
#endif
|
1830
|
-
PagedSpaces spaces;
|
1831
|
-
while (PagedSpace* space = spaces.next()) space->MCCommitRelocationInfo();
|
1832
|
-
}
|
1833
|
-
|
1834
|
-
|
1835
|
-
int MarkCompactCollector::RelocateMapObject(HeapObject* obj) {
|
1836
|
-
// Recover map pointer.
|
1837
|
-
MapWord encoding = obj->map_word();
|
1838
|
-
Address map_addr = encoding.DecodeMapAddress(Heap::map_space());
|
1839
|
-
ASSERT(Heap::map_space()->Contains(HeapObject::FromAddress(map_addr)));
|
1840
|
-
|
1841
|
-
// Get forwarding address before resetting map pointer
|
1842
|
-
Address new_addr = GetForwardingAddressInOldSpace(obj);
|
1843
|
-
|
1844
|
-
// Reset map pointer. The meta map object may not be copied yet so
|
1845
|
-
// Map::cast does not yet work.
|
1846
|
-
obj->set_map(reinterpret_cast<Map*>(HeapObject::FromAddress(map_addr)));
|
1847
|
-
|
1848
|
-
Address old_addr = obj->address();
|
1849
|
-
|
1850
|
-
if (new_addr != old_addr) {
|
1851
|
-
memmove(new_addr, old_addr, Map::kSize); // copy contents
|
1852
|
-
}
|
1853
|
-
|
1854
|
-
#ifdef DEBUG
|
1855
|
-
if (FLAG_gc_verbose) {
|
1856
|
-
PrintF("relocate %p -> %p\n", old_addr, new_addr);
|
1857
|
-
}
|
1858
|
-
#endif
|
1859
|
-
|
1860
|
-
return Map::kSize;
|
1861
|
-
}
|
1862
|
-
|
1863
|
-
|
1864
|
-
static inline int RestoreMap(HeapObject* obj,
|
1865
|
-
PagedSpace* space,
|
1866
|
-
Address new_addr,
|
1867
|
-
Address map_addr) {
|
1868
|
-
// This must be a non-map object, and the function relies on the
|
1869
|
-
// assumption that the Map space is compacted before the other paged
|
1870
|
-
// spaces (see RelocateObjects).
|
1871
|
-
|
1872
|
-
// Reset map pointer.
|
1873
|
-
obj->set_map(Map::cast(HeapObject::FromAddress(map_addr)));
|
1874
|
-
|
1875
|
-
int obj_size = obj->Size();
|
1876
|
-
ASSERT_OBJECT_SIZE(obj_size);
|
1877
|
-
|
1878
|
-
ASSERT(space->MCSpaceOffsetForAddress(new_addr) <=
|
1879
|
-
space->MCSpaceOffsetForAddress(obj->address()));
|
1880
|
-
|
1881
|
-
#ifdef DEBUG
|
1882
|
-
if (FLAG_gc_verbose) {
|
1883
|
-
PrintF("relocate %p -> %p\n", obj->address(), new_addr);
|
1884
|
-
}
|
1885
|
-
#endif
|
1886
|
-
|
1887
|
-
return obj_size;
|
1888
|
-
}
|
1889
|
-
|
1890
|
-
|
1891
|
-
int MarkCompactCollector::RelocateOldNonCodeObject(HeapObject* obj,
|
1892
|
-
PagedSpace* space) {
|
1893
|
-
// Recover map pointer.
|
1894
|
-
MapWord encoding = obj->map_word();
|
1895
|
-
Address map_addr = encoding.DecodeMapAddress(Heap::map_space());
|
1896
|
-
ASSERT(Heap::map_space()->Contains(map_addr));
|
1897
|
-
|
1898
|
-
// Get forwarding address before resetting map pointer.
|
1899
|
-
Address new_addr = GetForwardingAddressInOldSpace(obj);
|
1900
|
-
|
1901
|
-
// Reset the map pointer.
|
1902
|
-
int obj_size = RestoreMap(obj, space, new_addr, map_addr);
|
1903
|
-
|
1904
|
-
Address old_addr = obj->address();
|
1905
|
-
|
1906
|
-
if (new_addr != old_addr) {
|
1907
|
-
memmove(new_addr, old_addr, obj_size); // Copy contents
|
1908
|
-
}
|
1909
|
-
|
1910
|
-
ASSERT(!HeapObject::FromAddress(new_addr)->IsCode());
|
1911
|
-
|
1912
|
-
return obj_size;
|
1913
|
-
}
|
1914
|
-
|
1915
|
-
|
1916
|
-
int MarkCompactCollector::RelocateOldPointerObject(HeapObject* obj) {
|
1917
|
-
return RelocateOldNonCodeObject(obj, Heap::old_pointer_space());
|
1918
|
-
}
|
1919
|
-
|
1920
|
-
|
1921
|
-
int MarkCompactCollector::RelocateOldDataObject(HeapObject* obj) {
|
1922
|
-
return RelocateOldNonCodeObject(obj, Heap::old_data_space());
|
1923
|
-
}
|
1924
|
-
|
1925
|
-
|
1926
|
-
int MarkCompactCollector::RelocateCellObject(HeapObject* obj) {
|
1927
|
-
return RelocateOldNonCodeObject(obj, Heap::cell_space());
|
1928
|
-
}
|
1929
|
-
|
1930
|
-
|
1931
|
-
int MarkCompactCollector::RelocateCodeObject(HeapObject* obj) {
|
1932
|
-
// Recover map pointer.
|
1933
|
-
MapWord encoding = obj->map_word();
|
1934
|
-
Address map_addr = encoding.DecodeMapAddress(Heap::map_space());
|
1935
|
-
ASSERT(Heap::map_space()->Contains(HeapObject::FromAddress(map_addr)));
|
1936
|
-
|
1937
|
-
// Get forwarding address before resetting map pointer
|
1938
|
-
Address new_addr = GetForwardingAddressInOldSpace(obj);
|
1939
|
-
|
1940
|
-
// Reset the map pointer.
|
1941
|
-
int obj_size = RestoreMap(obj, Heap::code_space(), new_addr, map_addr);
|
1942
|
-
|
1943
|
-
Address old_addr = obj->address();
|
1944
|
-
|
1945
|
-
if (new_addr != old_addr) {
|
1946
|
-
memmove(new_addr, old_addr, obj_size); // Copy contents.
|
1947
|
-
}
|
1948
|
-
|
1949
|
-
HeapObject* copied_to = HeapObject::FromAddress(new_addr);
|
1950
|
-
if (copied_to->IsCode()) {
|
1951
|
-
// May also update inline cache target.
|
1952
|
-
Code::cast(copied_to)->Relocate(new_addr - old_addr);
|
1953
|
-
// Notify the logger that compiled code has moved.
|
1954
|
-
LOG(CodeMoveEvent(old_addr, new_addr));
|
1955
|
-
}
|
1956
|
-
|
1957
|
-
return obj_size;
|
1958
|
-
}
|
1959
|
-
|
1960
|
-
|
1961
|
-
int MarkCompactCollector::RelocateNewObject(HeapObject* obj) {
|
1962
|
-
int obj_size = obj->Size();
|
1963
|
-
|
1964
|
-
// Get forwarding address
|
1965
|
-
Address old_addr = obj->address();
|
1966
|
-
int offset = Heap::new_space()->ToSpaceOffsetForAddress(old_addr);
|
1967
|
-
|
1968
|
-
Address new_addr =
|
1969
|
-
Memory::Address_at(Heap::new_space()->FromSpaceLow() + offset);
|
1970
|
-
|
1971
|
-
#ifdef DEBUG
|
1972
|
-
if (Heap::new_space()->FromSpaceContains(new_addr)) {
|
1973
|
-
ASSERT(Heap::new_space()->FromSpaceOffsetForAddress(new_addr) <=
|
1974
|
-
Heap::new_space()->ToSpaceOffsetForAddress(old_addr));
|
1975
|
-
} else {
|
1976
|
-
ASSERT(Heap::TargetSpace(obj) == Heap::old_pointer_space() ||
|
1977
|
-
Heap::TargetSpace(obj) == Heap::old_data_space());
|
1978
|
-
}
|
1979
|
-
#endif
|
1980
|
-
|
1981
|
-
// New and old addresses cannot overlap.
|
1982
|
-
memcpy(reinterpret_cast<void*>(new_addr),
|
1983
|
-
reinterpret_cast<void*>(old_addr),
|
1984
|
-
obj_size);
|
1985
|
-
|
1986
|
-
#ifdef DEBUG
|
1987
|
-
if (FLAG_gc_verbose) {
|
1988
|
-
PrintF("relocate %p -> %p\n", old_addr, new_addr);
|
1989
|
-
}
|
1990
|
-
#endif
|
1991
|
-
|
1992
|
-
return obj_size;
|
1993
|
-
}
|
1994
|
-
|
1995
|
-
|
1996
|
-
// -------------------------------------------------------------------------
|
1997
|
-
// Phase 5: rebuild remembered sets
|
1998
|
-
|
1999
|
-
void MarkCompactCollector::RebuildRSets() {
|
2000
|
-
#ifdef DEBUG
|
2001
|
-
ASSERT(state_ == RELOCATE_OBJECTS);
|
2002
|
-
state_ = REBUILD_RSETS;
|
2003
|
-
#endif
|
2004
|
-
Heap::RebuildRSets();
|
2005
|
-
}
|
2006
|
-
|
2007
|
-
} } // namespace v8::internal
|