libv8 3.11.8.17 → 3.16.14.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.travis.yml +1 -2
- data/Gemfile +1 -1
- data/Rakefile +6 -7
- data/lib/libv8/version.rb +1 -1
- data/vendor/v8/.gitignore +24 -3
- data/vendor/v8/AUTHORS +7 -0
- data/vendor/v8/ChangeLog +839 -0
- data/vendor/v8/DEPS +1 -1
- data/vendor/v8/Makefile.android +92 -0
- data/vendor/v8/OWNERS +11 -0
- data/vendor/v8/PRESUBMIT.py +71 -0
- data/vendor/v8/SConstruct +34 -39
- data/vendor/v8/build/android.gypi +56 -37
- data/vendor/v8/build/common.gypi +112 -30
- data/vendor/v8/build/gyp_v8 +1 -1
- data/vendor/v8/build/standalone.gypi +15 -11
- data/vendor/v8/include/v8-debug.h +9 -1
- data/vendor/v8/include/v8-preparser.h +4 -3
- data/vendor/v8/include/v8-profiler.h +25 -25
- data/vendor/v8/include/v8-testing.h +4 -3
- data/vendor/v8/include/v8.h +994 -540
- data/vendor/v8/preparser/preparser-process.cc +3 -3
- data/vendor/v8/samples/lineprocessor.cc +20 -27
- data/vendor/v8/samples/process.cc +18 -14
- data/vendor/v8/samples/shell.cc +16 -15
- data/vendor/v8/src/SConscript +15 -14
- data/vendor/v8/src/accessors.cc +169 -77
- data/vendor/v8/src/accessors.h +4 -0
- data/vendor/v8/src/allocation-inl.h +2 -2
- data/vendor/v8/src/allocation.h +7 -7
- data/vendor/v8/src/api.cc +810 -497
- data/vendor/v8/src/api.h +85 -60
- data/vendor/v8/src/arm/assembler-arm-inl.h +179 -22
- data/vendor/v8/src/arm/assembler-arm.cc +633 -264
- data/vendor/v8/src/arm/assembler-arm.h +264 -197
- data/vendor/v8/src/arm/builtins-arm.cc +117 -27
- data/vendor/v8/src/arm/code-stubs-arm.cc +1241 -700
- data/vendor/v8/src/arm/code-stubs-arm.h +35 -138
- data/vendor/v8/src/arm/codegen-arm.cc +285 -16
- data/vendor/v8/src/arm/codegen-arm.h +22 -0
- data/vendor/v8/src/arm/constants-arm.cc +5 -3
- data/vendor/v8/src/arm/constants-arm.h +24 -11
- data/vendor/v8/src/arm/debug-arm.cc +3 -3
- data/vendor/v8/src/arm/deoptimizer-arm.cc +382 -92
- data/vendor/v8/src/arm/disasm-arm.cc +61 -12
- data/vendor/v8/src/arm/frames-arm.h +0 -14
- data/vendor/v8/src/arm/full-codegen-arm.cc +332 -304
- data/vendor/v8/src/arm/ic-arm.cc +180 -259
- data/vendor/v8/src/arm/lithium-arm.cc +364 -316
- data/vendor/v8/src/arm/lithium-arm.h +512 -275
- data/vendor/v8/src/arm/lithium-codegen-arm.cc +1768 -809
- data/vendor/v8/src/arm/lithium-codegen-arm.h +97 -35
- data/vendor/v8/src/arm/lithium-gap-resolver-arm.cc +12 -5
- data/vendor/v8/src/arm/macro-assembler-arm.cc +439 -228
- data/vendor/v8/src/arm/macro-assembler-arm.h +116 -70
- data/vendor/v8/src/arm/regexp-macro-assembler-arm.cc +54 -44
- data/vendor/v8/src/arm/regexp-macro-assembler-arm.h +3 -10
- data/vendor/v8/src/arm/simulator-arm.cc +272 -238
- data/vendor/v8/src/arm/simulator-arm.h +38 -8
- data/vendor/v8/src/arm/stub-cache-arm.cc +522 -895
- data/vendor/v8/src/array.js +101 -70
- data/vendor/v8/src/assembler.cc +270 -19
- data/vendor/v8/src/assembler.h +110 -15
- data/vendor/v8/src/ast.cc +79 -69
- data/vendor/v8/src/ast.h +255 -301
- data/vendor/v8/src/atomicops.h +7 -1
- data/vendor/v8/src/atomicops_internals_tsan.h +335 -0
- data/vendor/v8/src/bootstrapper.cc +481 -418
- data/vendor/v8/src/bootstrapper.h +4 -4
- data/vendor/v8/src/builtins.cc +498 -311
- data/vendor/v8/src/builtins.h +75 -47
- data/vendor/v8/src/checks.cc +2 -1
- data/vendor/v8/src/checks.h +8 -0
- data/vendor/v8/src/code-stubs-hydrogen.cc +253 -0
- data/vendor/v8/src/code-stubs.cc +249 -84
- data/vendor/v8/src/code-stubs.h +501 -169
- data/vendor/v8/src/codegen.cc +36 -18
- data/vendor/v8/src/codegen.h +25 -3
- data/vendor/v8/src/collection.js +54 -17
- data/vendor/v8/src/compilation-cache.cc +24 -16
- data/vendor/v8/src/compilation-cache.h +15 -6
- data/vendor/v8/src/compiler.cc +497 -195
- data/vendor/v8/src/compiler.h +246 -38
- data/vendor/v8/src/contexts.cc +64 -24
- data/vendor/v8/src/contexts.h +60 -29
- data/vendor/v8/src/conversions-inl.h +24 -14
- data/vendor/v8/src/conversions.h +7 -4
- data/vendor/v8/src/counters.cc +21 -12
- data/vendor/v8/src/counters.h +44 -16
- data/vendor/v8/src/cpu-profiler.h +1 -1
- data/vendor/v8/src/d8-debug.cc +2 -2
- data/vendor/v8/src/d8-readline.cc +13 -2
- data/vendor/v8/src/d8.cc +681 -273
- data/vendor/v8/src/d8.gyp +4 -4
- data/vendor/v8/src/d8.h +38 -18
- data/vendor/v8/src/d8.js +0 -617
- data/vendor/v8/src/data-flow.h +55 -0
- data/vendor/v8/src/date.js +1 -42
- data/vendor/v8/src/dateparser-inl.h +5 -1
- data/vendor/v8/src/debug-agent.cc +10 -15
- data/vendor/v8/src/debug-debugger.js +147 -149
- data/vendor/v8/src/debug.cc +323 -164
- data/vendor/v8/src/debug.h +26 -14
- data/vendor/v8/src/deoptimizer.cc +765 -290
- data/vendor/v8/src/deoptimizer.h +130 -28
- data/vendor/v8/src/disassembler.cc +10 -4
- data/vendor/v8/src/elements-kind.cc +7 -2
- data/vendor/v8/src/elements-kind.h +19 -0
- data/vendor/v8/src/elements.cc +607 -285
- data/vendor/v8/src/elements.h +36 -13
- data/vendor/v8/src/execution.cc +52 -31
- data/vendor/v8/src/execution.h +4 -4
- data/vendor/v8/src/extensions/externalize-string-extension.cc +5 -4
- data/vendor/v8/src/extensions/gc-extension.cc +5 -1
- data/vendor/v8/src/extensions/statistics-extension.cc +153 -0
- data/vendor/v8/src/{inspector.h → extensions/statistics-extension.h} +12 -23
- data/vendor/v8/src/factory.cc +101 -134
- data/vendor/v8/src/factory.h +36 -31
- data/vendor/v8/src/flag-definitions.h +102 -25
- data/vendor/v8/src/flags.cc +9 -5
- data/vendor/v8/src/frames-inl.h +10 -0
- data/vendor/v8/src/frames.cc +116 -26
- data/vendor/v8/src/frames.h +96 -12
- data/vendor/v8/src/full-codegen.cc +219 -74
- data/vendor/v8/src/full-codegen.h +63 -21
- data/vendor/v8/src/func-name-inferrer.cc +8 -7
- data/vendor/v8/src/func-name-inferrer.h +5 -3
- data/vendor/v8/src/gdb-jit.cc +71 -57
- data/vendor/v8/src/global-handles.cc +230 -101
- data/vendor/v8/src/global-handles.h +26 -27
- data/vendor/v8/src/globals.h +17 -19
- data/vendor/v8/src/handles-inl.h +59 -12
- data/vendor/v8/src/handles.cc +180 -200
- data/vendor/v8/src/handles.h +80 -11
- data/vendor/v8/src/hashmap.h +60 -40
- data/vendor/v8/src/heap-inl.h +107 -45
- data/vendor/v8/src/heap-profiler.cc +38 -19
- data/vendor/v8/src/heap-profiler.h +24 -14
- data/vendor/v8/src/heap.cc +1123 -738
- data/vendor/v8/src/heap.h +385 -146
- data/vendor/v8/src/hydrogen-instructions.cc +700 -217
- data/vendor/v8/src/hydrogen-instructions.h +1158 -472
- data/vendor/v8/src/hydrogen.cc +3319 -1662
- data/vendor/v8/src/hydrogen.h +411 -170
- data/vendor/v8/src/ia32/assembler-ia32-inl.h +46 -16
- data/vendor/v8/src/ia32/assembler-ia32.cc +131 -61
- data/vendor/v8/src/ia32/assembler-ia32.h +115 -57
- data/vendor/v8/src/ia32/builtins-ia32.cc +99 -5
- data/vendor/v8/src/ia32/code-stubs-ia32.cc +787 -495
- data/vendor/v8/src/ia32/code-stubs-ia32.h +10 -100
- data/vendor/v8/src/ia32/codegen-ia32.cc +227 -23
- data/vendor/v8/src/ia32/codegen-ia32.h +14 -0
- data/vendor/v8/src/ia32/deoptimizer-ia32.cc +428 -87
- data/vendor/v8/src/ia32/disasm-ia32.cc +28 -1
- data/vendor/v8/src/ia32/frames-ia32.h +6 -16
- data/vendor/v8/src/ia32/full-codegen-ia32.cc +280 -272
- data/vendor/v8/src/ia32/ic-ia32.cc +150 -250
- data/vendor/v8/src/ia32/lithium-codegen-ia32.cc +1600 -517
- data/vendor/v8/src/ia32/lithium-codegen-ia32.h +90 -24
- data/vendor/v8/src/ia32/lithium-gap-resolver-ia32.cc +10 -6
- data/vendor/v8/src/ia32/lithium-gap-resolver-ia32.h +2 -2
- data/vendor/v8/src/ia32/lithium-ia32.cc +405 -302
- data/vendor/v8/src/ia32/lithium-ia32.h +526 -271
- data/vendor/v8/src/ia32/macro-assembler-ia32.cc +378 -119
- data/vendor/v8/src/ia32/macro-assembler-ia32.h +62 -28
- data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.cc +43 -30
- data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.h +2 -10
- data/vendor/v8/src/ia32/stub-cache-ia32.cc +492 -678
- data/vendor/v8/src/ic-inl.h +9 -4
- data/vendor/v8/src/ic.cc +836 -923
- data/vendor/v8/src/ic.h +228 -247
- data/vendor/v8/src/incremental-marking-inl.h +26 -30
- data/vendor/v8/src/incremental-marking.cc +276 -248
- data/vendor/v8/src/incremental-marking.h +29 -37
- data/vendor/v8/src/interface.cc +34 -25
- data/vendor/v8/src/interface.h +69 -25
- data/vendor/v8/src/interpreter-irregexp.cc +2 -2
- data/vendor/v8/src/isolate.cc +382 -76
- data/vendor/v8/src/isolate.h +109 -56
- data/vendor/v8/src/json-parser.h +217 -104
- data/vendor/v8/src/json-stringifier.h +745 -0
- data/vendor/v8/src/json.js +10 -132
- data/vendor/v8/src/jsregexp-inl.h +106 -0
- data/vendor/v8/src/jsregexp.cc +517 -285
- data/vendor/v8/src/jsregexp.h +145 -117
- data/vendor/v8/src/list-inl.h +35 -22
- data/vendor/v8/src/list.h +46 -19
- data/vendor/v8/src/lithium-allocator-inl.h +22 -2
- data/vendor/v8/src/lithium-allocator.cc +85 -70
- data/vendor/v8/src/lithium-allocator.h +21 -39
- data/vendor/v8/src/lithium.cc +259 -5
- data/vendor/v8/src/lithium.h +131 -32
- data/vendor/v8/src/liveedit-debugger.js +52 -3
- data/vendor/v8/src/liveedit.cc +393 -113
- data/vendor/v8/src/liveedit.h +7 -3
- data/vendor/v8/src/log-utils.cc +4 -2
- data/vendor/v8/src/log.cc +170 -140
- data/vendor/v8/src/log.h +62 -11
- data/vendor/v8/src/macro-assembler.h +17 -0
- data/vendor/v8/src/macros.py +2 -0
- data/vendor/v8/src/mark-compact-inl.h +3 -23
- data/vendor/v8/src/mark-compact.cc +801 -830
- data/vendor/v8/src/mark-compact.h +154 -47
- data/vendor/v8/src/marking-thread.cc +85 -0
- data/vendor/v8/src/{inspector.cc → marking-thread.h} +32 -24
- data/vendor/v8/src/math.js +12 -18
- data/vendor/v8/src/messages.cc +18 -8
- data/vendor/v8/src/messages.js +314 -261
- data/vendor/v8/src/mips/assembler-mips-inl.h +58 -6
- data/vendor/v8/src/mips/assembler-mips.cc +92 -75
- data/vendor/v8/src/mips/assembler-mips.h +54 -60
- data/vendor/v8/src/mips/builtins-mips.cc +116 -17
- data/vendor/v8/src/mips/code-stubs-mips.cc +919 -556
- data/vendor/v8/src/mips/code-stubs-mips.h +22 -131
- data/vendor/v8/src/mips/codegen-mips.cc +281 -6
- data/vendor/v8/src/mips/codegen-mips.h +22 -0
- data/vendor/v8/src/mips/constants-mips.cc +2 -0
- data/vendor/v8/src/mips/constants-mips.h +12 -2
- data/vendor/v8/src/mips/deoptimizer-mips.cc +286 -50
- data/vendor/v8/src/mips/disasm-mips.cc +13 -0
- data/vendor/v8/src/mips/full-codegen-mips.cc +297 -284
- data/vendor/v8/src/mips/ic-mips.cc +182 -263
- data/vendor/v8/src/mips/lithium-codegen-mips.cc +1208 -556
- data/vendor/v8/src/mips/lithium-codegen-mips.h +72 -19
- data/vendor/v8/src/mips/lithium-gap-resolver-mips.cc +9 -2
- data/vendor/v8/src/mips/lithium-mips.cc +290 -302
- data/vendor/v8/src/mips/lithium-mips.h +463 -266
- data/vendor/v8/src/mips/macro-assembler-mips.cc +208 -115
- data/vendor/v8/src/mips/macro-assembler-mips.h +67 -24
- data/vendor/v8/src/mips/regexp-macro-assembler-mips.cc +40 -25
- data/vendor/v8/src/mips/regexp-macro-assembler-mips.h +3 -9
- data/vendor/v8/src/mips/simulator-mips.cc +112 -40
- data/vendor/v8/src/mips/simulator-mips.h +5 -0
- data/vendor/v8/src/mips/stub-cache-mips.cc +502 -884
- data/vendor/v8/src/mirror-debugger.js +157 -30
- data/vendor/v8/src/mksnapshot.cc +88 -14
- data/vendor/v8/src/object-observe.js +235 -0
- data/vendor/v8/src/objects-debug.cc +178 -176
- data/vendor/v8/src/objects-inl.h +1333 -486
- data/vendor/v8/src/objects-printer.cc +125 -43
- data/vendor/v8/src/objects-visiting-inl.h +578 -6
- data/vendor/v8/src/objects-visiting.cc +2 -2
- data/vendor/v8/src/objects-visiting.h +172 -79
- data/vendor/v8/src/objects.cc +3533 -2885
- data/vendor/v8/src/objects.h +1352 -1131
- data/vendor/v8/src/optimizing-compiler-thread.cc +152 -0
- data/vendor/v8/src/optimizing-compiler-thread.h +111 -0
- data/vendor/v8/src/parser.cc +390 -500
- data/vendor/v8/src/parser.h +45 -33
- data/vendor/v8/src/platform-cygwin.cc +10 -21
- data/vendor/v8/src/platform-freebsd.cc +36 -41
- data/vendor/v8/src/platform-linux.cc +160 -124
- data/vendor/v8/src/platform-macos.cc +30 -27
- data/vendor/v8/src/platform-nullos.cc +17 -1
- data/vendor/v8/src/platform-openbsd.cc +19 -50
- data/vendor/v8/src/platform-posix.cc +14 -0
- data/vendor/v8/src/platform-solaris.cc +20 -53
- data/vendor/v8/src/platform-win32.cc +49 -26
- data/vendor/v8/src/platform.h +40 -1
- data/vendor/v8/src/preparser.cc +8 -5
- data/vendor/v8/src/preparser.h +2 -2
- data/vendor/v8/src/prettyprinter.cc +16 -0
- data/vendor/v8/src/prettyprinter.h +2 -0
- data/vendor/v8/src/profile-generator-inl.h +1 -0
- data/vendor/v8/src/profile-generator.cc +209 -147
- data/vendor/v8/src/profile-generator.h +15 -12
- data/vendor/v8/src/property-details.h +46 -31
- data/vendor/v8/src/property.cc +27 -46
- data/vendor/v8/src/property.h +163 -83
- data/vendor/v8/src/proxy.js +7 -2
- data/vendor/v8/src/regexp-macro-assembler-irregexp.cc +4 -13
- data/vendor/v8/src/regexp-macro-assembler-irregexp.h +1 -2
- data/vendor/v8/src/regexp-macro-assembler-tracer.cc +1 -11
- data/vendor/v8/src/regexp-macro-assembler-tracer.h +0 -1
- data/vendor/v8/src/regexp-macro-assembler.cc +31 -14
- data/vendor/v8/src/regexp-macro-assembler.h +14 -11
- data/vendor/v8/src/regexp-stack.cc +1 -0
- data/vendor/v8/src/regexp.js +9 -8
- data/vendor/v8/src/rewriter.cc +18 -7
- data/vendor/v8/src/runtime-profiler.cc +52 -43
- data/vendor/v8/src/runtime-profiler.h +0 -25
- data/vendor/v8/src/runtime.cc +2006 -2023
- data/vendor/v8/src/runtime.h +56 -49
- data/vendor/v8/src/safepoint-table.cc +12 -18
- data/vendor/v8/src/safepoint-table.h +11 -8
- data/vendor/v8/src/scanner.cc +1 -0
- data/vendor/v8/src/scanner.h +4 -10
- data/vendor/v8/src/scopeinfo.cc +35 -9
- data/vendor/v8/src/scopeinfo.h +64 -3
- data/vendor/v8/src/scopes.cc +251 -156
- data/vendor/v8/src/scopes.h +61 -27
- data/vendor/v8/src/serialize.cc +348 -396
- data/vendor/v8/src/serialize.h +125 -114
- data/vendor/v8/src/small-pointer-list.h +11 -11
- data/vendor/v8/src/{smart-array-pointer.h → smart-pointers.h} +64 -15
- data/vendor/v8/src/snapshot-common.cc +64 -15
- data/vendor/v8/src/snapshot-empty.cc +7 -1
- data/vendor/v8/src/snapshot.h +9 -2
- data/vendor/v8/src/spaces-inl.h +17 -0
- data/vendor/v8/src/spaces.cc +477 -183
- data/vendor/v8/src/spaces.h +238 -58
- data/vendor/v8/src/splay-tree-inl.h +8 -7
- data/vendor/v8/src/splay-tree.h +24 -10
- data/vendor/v8/src/store-buffer.cc +12 -5
- data/vendor/v8/src/store-buffer.h +2 -4
- data/vendor/v8/src/string-search.h +22 -6
- data/vendor/v8/src/string-stream.cc +11 -8
- data/vendor/v8/src/string.js +47 -15
- data/vendor/v8/src/stub-cache.cc +461 -224
- data/vendor/v8/src/stub-cache.h +164 -102
- data/vendor/v8/src/sweeper-thread.cc +105 -0
- data/vendor/v8/src/sweeper-thread.h +81 -0
- data/vendor/v8/src/token.h +1 -0
- data/vendor/v8/src/transitions-inl.h +220 -0
- data/vendor/v8/src/transitions.cc +160 -0
- data/vendor/v8/src/transitions.h +207 -0
- data/vendor/v8/src/type-info.cc +182 -181
- data/vendor/v8/src/type-info.h +31 -19
- data/vendor/v8/src/unicode-inl.h +62 -106
- data/vendor/v8/src/unicode.cc +57 -67
- data/vendor/v8/src/unicode.h +45 -91
- data/vendor/v8/src/uri.js +57 -29
- data/vendor/v8/src/utils.h +105 -5
- data/vendor/v8/src/v8-counters.cc +54 -11
- data/vendor/v8/src/v8-counters.h +134 -19
- data/vendor/v8/src/v8.cc +29 -29
- data/vendor/v8/src/v8.h +1 -0
- data/vendor/v8/src/v8conversions.cc +26 -22
- data/vendor/v8/src/v8globals.h +56 -43
- data/vendor/v8/src/v8natives.js +83 -30
- data/vendor/v8/src/v8threads.cc +42 -21
- data/vendor/v8/src/v8threads.h +4 -1
- data/vendor/v8/src/v8utils.cc +9 -93
- data/vendor/v8/src/v8utils.h +37 -33
- data/vendor/v8/src/variables.cc +6 -3
- data/vendor/v8/src/variables.h +6 -13
- data/vendor/v8/src/version.cc +2 -2
- data/vendor/v8/src/vm-state-inl.h +11 -0
- data/vendor/v8/src/x64/assembler-x64-inl.h +39 -8
- data/vendor/v8/src/x64/assembler-x64.cc +78 -64
- data/vendor/v8/src/x64/assembler-x64.h +38 -33
- data/vendor/v8/src/x64/builtins-x64.cc +105 -7
- data/vendor/v8/src/x64/code-stubs-x64.cc +790 -413
- data/vendor/v8/src/x64/code-stubs-x64.h +10 -106
- data/vendor/v8/src/x64/codegen-x64.cc +210 -8
- data/vendor/v8/src/x64/codegen-x64.h +20 -1
- data/vendor/v8/src/x64/deoptimizer-x64.cc +336 -75
- data/vendor/v8/src/x64/disasm-x64.cc +15 -0
- data/vendor/v8/src/x64/frames-x64.h +0 -14
- data/vendor/v8/src/x64/full-codegen-x64.cc +293 -270
- data/vendor/v8/src/x64/ic-x64.cc +153 -251
- data/vendor/v8/src/x64/lithium-codegen-x64.cc +1379 -531
- data/vendor/v8/src/x64/lithium-codegen-x64.h +67 -23
- data/vendor/v8/src/x64/lithium-gap-resolver-x64.cc +2 -2
- data/vendor/v8/src/x64/lithium-x64.cc +349 -289
- data/vendor/v8/src/x64/lithium-x64.h +460 -250
- data/vendor/v8/src/x64/macro-assembler-x64.cc +350 -177
- data/vendor/v8/src/x64/macro-assembler-x64.h +67 -49
- data/vendor/v8/src/x64/regexp-macro-assembler-x64.cc +46 -33
- data/vendor/v8/src/x64/regexp-macro-assembler-x64.h +2 -3
- data/vendor/v8/src/x64/stub-cache-x64.cc +484 -653
- data/vendor/v8/src/zone-inl.h +9 -27
- data/vendor/v8/src/zone.cc +5 -5
- data/vendor/v8/src/zone.h +53 -27
- data/vendor/v8/test/benchmarks/testcfg.py +5 -0
- data/vendor/v8/test/cctest/cctest.cc +4 -0
- data/vendor/v8/test/cctest/cctest.gyp +3 -1
- data/vendor/v8/test/cctest/cctest.h +57 -9
- data/vendor/v8/test/cctest/cctest.status +15 -15
- data/vendor/v8/test/cctest/test-accessors.cc +26 -0
- data/vendor/v8/test/cctest/test-alloc.cc +22 -30
- data/vendor/v8/test/cctest/test-api.cc +1943 -314
- data/vendor/v8/test/cctest/test-assembler-arm.cc +133 -13
- data/vendor/v8/test/cctest/test-assembler-ia32.cc +1 -1
- data/vendor/v8/test/cctest/test-assembler-mips.cc +12 -0
- data/vendor/v8/test/cctest/test-ast.cc +4 -2
- data/vendor/v8/test/cctest/test-compiler.cc +61 -29
- data/vendor/v8/test/cctest/test-dataflow.cc +2 -2
- data/vendor/v8/test/cctest/test-debug.cc +212 -33
- data/vendor/v8/test/cctest/test-decls.cc +257 -11
- data/vendor/v8/test/cctest/test-dictionary.cc +24 -10
- data/vendor/v8/test/cctest/test-disasm-arm.cc +118 -1
- data/vendor/v8/test/cctest/test-disasm-ia32.cc +3 -2
- data/vendor/v8/test/cctest/test-flags.cc +14 -1
- data/vendor/v8/test/cctest/test-func-name-inference.cc +7 -4
- data/vendor/v8/test/cctest/test-global-object.cc +51 -0
- data/vendor/v8/test/cctest/test-hashing.cc +32 -23
- data/vendor/v8/test/cctest/test-heap-profiler.cc +131 -77
- data/vendor/v8/test/cctest/test-heap.cc +1084 -143
- data/vendor/v8/test/cctest/test-list.cc +1 -1
- data/vendor/v8/test/cctest/test-liveedit.cc +3 -2
- data/vendor/v8/test/cctest/test-lockers.cc +12 -13
- data/vendor/v8/test/cctest/test-log.cc +10 -8
- data/vendor/v8/test/cctest/test-macro-assembler-x64.cc +2 -2
- data/vendor/v8/test/cctest/test-mark-compact.cc +44 -22
- data/vendor/v8/test/cctest/test-object-observe.cc +434 -0
- data/vendor/v8/test/cctest/test-parsing.cc +86 -39
- data/vendor/v8/test/cctest/test-platform-linux.cc +6 -0
- data/vendor/v8/test/cctest/test-platform-win32.cc +7 -0
- data/vendor/v8/test/cctest/test-random.cc +5 -4
- data/vendor/v8/test/cctest/test-regexp.cc +137 -101
- data/vendor/v8/test/cctest/test-serialize.cc +150 -230
- data/vendor/v8/test/cctest/test-sockets.cc +1 -1
- data/vendor/v8/test/cctest/test-spaces.cc +139 -0
- data/vendor/v8/test/cctest/test-strings.cc +736 -74
- data/vendor/v8/test/cctest/test-thread-termination.cc +10 -11
- data/vendor/v8/test/cctest/test-threads.cc +4 -4
- data/vendor/v8/test/cctest/test-utils.cc +16 -0
- data/vendor/v8/test/cctest/test-weakmaps.cc +7 -3
- data/vendor/v8/test/cctest/testcfg.py +64 -5
- data/vendor/v8/test/es5conform/testcfg.py +5 -0
- data/vendor/v8/test/message/message.status +1 -1
- data/vendor/v8/test/message/overwritten-builtins.out +3 -0
- data/vendor/v8/test/message/testcfg.py +89 -8
- data/vendor/v8/test/message/try-catch-finally-no-message.out +26 -26
- data/vendor/v8/test/mjsunit/accessor-map-sharing.js +18 -2
- data/vendor/v8/test/mjsunit/allocation-site-info.js +126 -0
- data/vendor/v8/test/mjsunit/array-bounds-check-removal.js +62 -1
- data/vendor/v8/test/mjsunit/array-iteration.js +1 -1
- data/vendor/v8/test/mjsunit/array-literal-transitions.js +2 -0
- data/vendor/v8/test/mjsunit/array-natives-elements.js +317 -0
- data/vendor/v8/test/mjsunit/array-reduce.js +8 -8
- data/vendor/v8/test/mjsunit/array-slice.js +12 -0
- data/vendor/v8/test/mjsunit/array-store-and-grow.js +4 -1
- data/vendor/v8/test/mjsunit/assert-opt-and-deopt.js +1 -1
- data/vendor/v8/test/mjsunit/bugs/bug-2337.js +53 -0
- data/vendor/v8/test/mjsunit/compare-known-objects-slow.js +69 -0
- data/vendor/v8/test/mjsunit/compiler/alloc-object-huge.js +3 -1
- data/vendor/v8/test/mjsunit/compiler/inline-accessors.js +368 -0
- data/vendor/v8/test/mjsunit/compiler/inline-arguments.js +87 -1
- data/vendor/v8/test/mjsunit/compiler/inline-closures.js +49 -0
- data/vendor/v8/test/mjsunit/compiler/inline-construct.js +55 -43
- data/vendor/v8/test/mjsunit/compiler/inline-literals.js +39 -0
- data/vendor/v8/test/mjsunit/compiler/multiply-add.js +69 -0
- data/vendor/v8/test/mjsunit/compiler/optimized-closures.js +57 -0
- data/vendor/v8/test/mjsunit/compiler/parallel-proto-change.js +44 -0
- data/vendor/v8/test/mjsunit/compiler/property-static.js +69 -0
- data/vendor/v8/test/mjsunit/compiler/proto-chain-constant.js +55 -0
- data/vendor/v8/test/mjsunit/compiler/proto-chain-load.js +44 -0
- data/vendor/v8/test/mjsunit/compiler/regress-gvn.js +3 -2
- data/vendor/v8/test/mjsunit/compiler/regress-or.js +6 -2
- data/vendor/v8/test/mjsunit/compiler/rotate.js +224 -0
- data/vendor/v8/test/mjsunit/compiler/uint32.js +173 -0
- data/vendor/v8/test/mjsunit/count-based-osr.js +2 -1
- data/vendor/v8/test/mjsunit/d8-os.js +3 -3
- data/vendor/v8/test/mjsunit/date-parse.js +3 -0
- data/vendor/v8/test/mjsunit/date.js +22 -0
- data/vendor/v8/test/mjsunit/debug-break-inline.js +1 -0
- data/vendor/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js +22 -12
- data/vendor/v8/test/mjsunit/debug-evaluate-locals-optimized.js +21 -10
- data/vendor/v8/test/mjsunit/debug-liveedit-compile-error.js +60 -0
- data/vendor/v8/test/mjsunit/debug-liveedit-double-call.js +142 -0
- data/vendor/v8/test/mjsunit/debug-liveedit-literals.js +94 -0
- data/vendor/v8/test/mjsunit/debug-liveedit-restart-frame.js +153 -0
- data/vendor/v8/test/mjsunit/debug-multiple-breakpoints.js +1 -1
- data/vendor/v8/test/mjsunit/debug-script-breakpoints-closure.js +67 -0
- data/vendor/v8/test/mjsunit/debug-script-breakpoints-nested.js +82 -0
- data/vendor/v8/test/mjsunit/debug-script.js +4 -2
- data/vendor/v8/test/mjsunit/debug-set-variable-value.js +308 -0
- data/vendor/v8/test/mjsunit/debug-stepout-scope-part1.js +190 -0
- data/vendor/v8/test/mjsunit/debug-stepout-scope-part2.js +83 -0
- data/vendor/v8/test/mjsunit/debug-stepout-scope-part3.js +80 -0
- data/vendor/v8/test/mjsunit/debug-stepout-scope-part4.js +80 -0
- data/vendor/v8/test/mjsunit/debug-stepout-scope-part5.js +77 -0
- data/vendor/v8/test/mjsunit/debug-stepout-scope-part6.js +79 -0
- data/vendor/v8/test/mjsunit/debug-stepout-scope-part7.js +79 -0
- data/vendor/v8/test/mjsunit/{debug-stepout-scope.js → debug-stepout-scope-part8.js} +0 -189
- data/vendor/v8/test/mjsunit/delete-non-configurable.js +74 -0
- data/vendor/v8/test/mjsunit/deopt-minus-zero.js +56 -0
- data/vendor/v8/test/mjsunit/elements-kind.js +6 -4
- data/vendor/v8/test/mjsunit/elements-length-no-holey.js +33 -0
- data/vendor/v8/test/mjsunit/elements-transition-hoisting.js +46 -19
- data/vendor/v8/test/mjsunit/error-accessors.js +54 -0
- data/vendor/v8/test/mjsunit/error-constructors.js +1 -14
- data/vendor/v8/test/mjsunit/error-tostring.js +8 -0
- data/vendor/v8/test/mjsunit/eval-stack-trace.js +204 -0
- data/vendor/v8/test/mjsunit/external-array.js +364 -1
- data/vendor/v8/test/mjsunit/fast-array-length.js +37 -0
- data/vendor/v8/test/mjsunit/fast-non-keyed.js +113 -0
- data/vendor/v8/test/mjsunit/fast-prototype.js +117 -0
- data/vendor/v8/test/mjsunit/function-call.js +14 -18
- data/vendor/v8/test/mjsunit/fuzz-natives-part1.js +230 -0
- data/vendor/v8/test/mjsunit/fuzz-natives-part2.js +229 -0
- data/vendor/v8/test/mjsunit/fuzz-natives-part3.js +229 -0
- data/vendor/v8/test/mjsunit/{fuzz-natives.js → fuzz-natives-part4.js} +12 -2
- data/vendor/v8/test/mjsunit/generated-transition-stub.js +218 -0
- data/vendor/v8/test/mjsunit/greedy.js +1 -1
- data/vendor/v8/test/mjsunit/harmony/block-conflicts.js +2 -1
- data/vendor/v8/test/mjsunit/harmony/block-let-crankshaft.js +1 -1
- data/vendor/v8/test/mjsunit/harmony/collections.js +69 -11
- data/vendor/v8/test/mjsunit/harmony/debug-blockscopes.js +2 -2
- data/vendor/v8/test/mjsunit/harmony/module-linking.js +180 -3
- data/vendor/v8/test/mjsunit/harmony/module-parsing.js +31 -0
- data/vendor/v8/test/mjsunit/harmony/module-recompile.js +87 -0
- data/vendor/v8/test/mjsunit/harmony/module-resolution.js +15 -2
- data/vendor/v8/test/mjsunit/harmony/object-observe.js +1056 -0
- data/vendor/v8/test/mjsunit/harmony/proxies-json.js +178 -0
- data/vendor/v8/test/mjsunit/harmony/proxies.js +25 -10
- data/vendor/v8/test/mjsunit/json-parser-recursive.js +33 -0
- data/vendor/v8/test/mjsunit/json-stringify-recursive.js +52 -0
- data/vendor/v8/test/mjsunit/json.js +38 -2
- data/vendor/v8/test/mjsunit/json2.js +153 -0
- data/vendor/v8/test/mjsunit/limit-locals.js +5 -4
- data/vendor/v8/test/mjsunit/manual-parallel-recompile.js +79 -0
- data/vendor/v8/test/mjsunit/math-exp-precision.js +64 -0
- data/vendor/v8/test/mjsunit/math-floor-negative.js +59 -0
- data/vendor/v8/test/mjsunit/math-floor-of-div-minus-zero.js +41 -0
- data/vendor/v8/test/mjsunit/math-floor-of-div-nosudiv.js +288 -0
- data/vendor/v8/test/mjsunit/math-floor-of-div.js +81 -9
- data/vendor/v8/test/mjsunit/{math-floor.js → math-floor-part1.js} +1 -72
- data/vendor/v8/test/mjsunit/math-floor-part2.js +76 -0
- data/vendor/v8/test/mjsunit/math-floor-part3.js +78 -0
- data/vendor/v8/test/mjsunit/math-floor-part4.js +76 -0
- data/vendor/v8/test/mjsunit/mirror-object.js +43 -9
- data/vendor/v8/test/mjsunit/mjsunit.js +1 -1
- data/vendor/v8/test/mjsunit/mjsunit.status +52 -27
- data/vendor/v8/test/mjsunit/mul-exhaustive-part1.js +491 -0
- data/vendor/v8/test/mjsunit/mul-exhaustive-part10.js +470 -0
- data/vendor/v8/test/mjsunit/mul-exhaustive-part2.js +525 -0
- data/vendor/v8/test/mjsunit/mul-exhaustive-part3.js +532 -0
- data/vendor/v8/test/mjsunit/mul-exhaustive-part4.js +509 -0
- data/vendor/v8/test/mjsunit/mul-exhaustive-part5.js +505 -0
- data/vendor/v8/test/mjsunit/mul-exhaustive-part6.js +554 -0
- data/vendor/v8/test/mjsunit/mul-exhaustive-part7.js +497 -0
- data/vendor/v8/test/mjsunit/mul-exhaustive-part8.js +526 -0
- data/vendor/v8/test/mjsunit/mul-exhaustive-part9.js +533 -0
- data/vendor/v8/test/mjsunit/new-function.js +34 -0
- data/vendor/v8/test/mjsunit/numops-fuzz-part1.js +1172 -0
- data/vendor/v8/test/mjsunit/numops-fuzz-part2.js +1178 -0
- data/vendor/v8/test/mjsunit/numops-fuzz-part3.js +1178 -0
- data/vendor/v8/test/mjsunit/numops-fuzz-part4.js +1177 -0
- data/vendor/v8/test/mjsunit/object-define-property.js +107 -2
- data/vendor/v8/test/mjsunit/override-read-only-property.js +6 -4
- data/vendor/v8/test/mjsunit/packed-elements.js +2 -2
- data/vendor/v8/test/mjsunit/parse-int-float.js +4 -4
- data/vendor/v8/test/mjsunit/pixel-array-rounding.js +1 -1
- data/vendor/v8/test/mjsunit/readonly.js +228 -0
- data/vendor/v8/test/mjsunit/regexp-capture-3.js +16 -18
- data/vendor/v8/test/mjsunit/regexp-capture.js +2 -0
- data/vendor/v8/test/mjsunit/regexp-global.js +122 -0
- data/vendor/v8/test/mjsunit/regexp-results-cache.js +78 -0
- data/vendor/v8/test/mjsunit/regress/regress-1117.js +12 -3
- data/vendor/v8/test/mjsunit/regress/regress-1118.js +1 -1
- data/vendor/v8/test/mjsunit/regress/regress-115100.js +36 -0
- data/vendor/v8/test/mjsunit/regress/regress-1199637.js +1 -3
- data/vendor/v8/test/mjsunit/regress/regress-121407.js +1 -1
- data/vendor/v8/test/mjsunit/regress/regress-131923.js +30 -0
- data/vendor/v8/test/mjsunit/regress/regress-131994.js +70 -0
- data/vendor/v8/test/mjsunit/regress/regress-133211.js +35 -0
- data/vendor/v8/test/mjsunit/regress/regress-133211b.js +39 -0
- data/vendor/v8/test/mjsunit/regress/regress-136048.js +34 -0
- data/vendor/v8/test/mjsunit/regress/regress-137768.js +73 -0
- data/vendor/v8/test/mjsunit/regress/regress-143967.js +34 -0
- data/vendor/v8/test/mjsunit/regress/regress-145201.js +107 -0
- data/vendor/v8/test/mjsunit/regress/regress-147497.js +45 -0
- data/vendor/v8/test/mjsunit/regress/regress-148378.js +38 -0
- data/vendor/v8/test/mjsunit/regress/regress-1563.js +1 -1
- data/vendor/v8/test/mjsunit/regress/regress-1591.js +48 -0
- data/vendor/v8/test/mjsunit/regress/regress-164442.js +45 -0
- data/vendor/v8/test/mjsunit/regress/regress-165637.js +61 -0
- data/vendor/v8/test/mjsunit/regress/regress-166379.js +39 -0
- data/vendor/v8/test/mjsunit/regress/regress-166553.js +33 -0
- data/vendor/v8/test/mjsunit/regress/regress-1692.js +1 -1
- data/vendor/v8/test/mjsunit/regress/regress-171641.js +40 -0
- data/vendor/v8/test/mjsunit/regress/regress-1980.js +1 -1
- data/vendor/v8/test/mjsunit/regress/regress-2073.js +99 -0
- data/vendor/v8/test/mjsunit/regress/regress-2119.js +36 -0
- data/vendor/v8/test/mjsunit/regress/regress-2156.js +39 -0
- data/vendor/v8/test/mjsunit/regress/regress-2163.js +70 -0
- data/vendor/v8/test/mjsunit/regress/regress-2170.js +58 -0
- data/vendor/v8/test/mjsunit/regress/regress-2172.js +35 -0
- data/vendor/v8/test/mjsunit/regress/regress-2185-2.js +145 -0
- data/vendor/v8/test/mjsunit/regress/regress-2185.js +38 -0
- data/vendor/v8/test/mjsunit/regress/regress-2186.js +49 -0
- data/vendor/v8/test/mjsunit/regress/regress-2193.js +58 -0
- data/vendor/v8/test/mjsunit/regress/regress-2219.js +32 -0
- data/vendor/v8/test/mjsunit/regress/regress-2225.js +65 -0
- data/vendor/v8/test/mjsunit/regress/regress-2226.js +36 -0
- data/vendor/v8/test/mjsunit/regress/regress-2234.js +41 -0
- data/vendor/v8/test/mjsunit/regress/regress-2243.js +31 -0
- data/vendor/v8/test/mjsunit/regress/regress-2249.js +33 -0
- data/vendor/v8/test/mjsunit/regress/regress-2250.js +68 -0
- data/vendor/v8/test/mjsunit/regress/regress-2261.js +113 -0
- data/vendor/v8/test/mjsunit/regress/regress-2263.js +30 -0
- data/vendor/v8/test/mjsunit/regress/regress-2284.js +32 -0
- data/vendor/v8/test/mjsunit/regress/regress-2285.js +32 -0
- data/vendor/v8/test/mjsunit/regress/regress-2286.js +32 -0
- data/vendor/v8/test/mjsunit/regress/regress-2289.js +34 -0
- data/vendor/v8/test/mjsunit/regress/regress-2291.js +36 -0
- data/vendor/v8/test/mjsunit/regress/regress-2294.js +70 -0
- data/vendor/v8/test/mjsunit/regress/regress-2296.js +40 -0
- data/vendor/v8/test/mjsunit/regress/regress-2315.js +40 -0
- data/vendor/v8/test/mjsunit/regress/regress-2318.js +66 -0
- data/vendor/v8/test/mjsunit/regress/regress-2322.js +36 -0
- data/vendor/v8/test/mjsunit/regress/regress-2326.js +54 -0
- data/vendor/v8/test/mjsunit/regress/regress-2336.js +53 -0
- data/vendor/v8/test/mjsunit/regress/regress-2339.js +59 -0
- data/vendor/v8/test/mjsunit/regress/regress-2346.js +123 -0
- data/vendor/v8/test/mjsunit/regress/regress-2373.js +29 -0
- data/vendor/v8/test/mjsunit/regress/regress-2374.js +33 -0
- data/vendor/v8/test/mjsunit/regress/regress-2398.js +41 -0
- data/vendor/v8/test/mjsunit/regress/regress-2410.js +36 -0
- data/vendor/v8/test/mjsunit/regress/regress-2416.js +75 -0
- data/vendor/v8/test/mjsunit/regress/regress-2419.js +37 -0
- data/vendor/v8/test/mjsunit/regress/regress-2433.js +36 -0
- data/vendor/v8/test/mjsunit/regress/regress-2437.js +156 -0
- data/vendor/v8/test/mjsunit/regress/regress-2438.js +52 -0
- data/vendor/v8/test/mjsunit/regress/regress-2443.js +129 -0
- data/vendor/v8/test/mjsunit/regress/regress-2444.js +120 -0
- data/vendor/v8/test/mjsunit/regress/regress-2489.js +50 -0
- data/vendor/v8/test/mjsunit/regress/regress-2499.js +40 -0
- data/vendor/v8/test/mjsunit/regress/regress-334.js +1 -1
- data/vendor/v8/test/mjsunit/regress/regress-492.js +39 -1
- data/vendor/v8/test/mjsunit/regress/regress-builtin-array-op.js +38 -0
- data/vendor/v8/test/mjsunit/regress/regress-cnlt-elements.js +43 -0
- data/vendor/v8/test/mjsunit/regress/regress-cnlt-enum-indices.js +45 -0
- data/vendor/v8/test/mjsunit/regress/regress-cntl-descriptors-enum.js +46 -0
- data/vendor/v8/test/mjsunit/regress/regress-convert-enum.js +60 -0
- data/vendor/v8/test/mjsunit/regress/regress-convert-enum2.js +46 -0
- data/vendor/v8/test/mjsunit/regress/regress-convert-transition.js +40 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-119926.js +3 -1
- data/vendor/v8/test/mjsunit/regress/regress-crbug-125148.js +90 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-134055.js +63 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-134609.js +59 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-135008.js +45 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-135066.js +55 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-137689.js +47 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-138887.js +48 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-140083.js +44 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-142087.js +38 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-142218.js +44 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-145961.js +39 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-146910.js +33 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-147475.js +48 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-148376.js +35 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-150545.js +53 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-150729.js +39 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-157019.js +54 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-157520.js +38 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-158185.js +39 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-160010.js +35 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-162085.js +71 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-168545.js +34 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-170856.js +33 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-172345.js +34 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-173974.js +36 -0
- data/vendor/v8/test/mjsunit/regress/regress-crbug-18639.js +9 -5
- data/vendor/v8/test/mjsunit/regress/regress-debug-code-recompilation.js +2 -1
- data/vendor/v8/test/mjsunit/regress/regress-deep-proto.js +45 -0
- data/vendor/v8/test/mjsunit/regress/regress-delete-empty-double.js +40 -0
- data/vendor/v8/test/mjsunit/regress/regress-iteration-order.js +42 -0
- data/vendor/v8/test/mjsunit/regress/regress-json-stringify-gc.js +41 -0
- data/vendor/v8/test/mjsunit/regress/regress-latin-1.js +78 -0
- data/vendor/v8/test/mjsunit/regress/regress-load-elements.js +49 -0
- data/vendor/v8/test/mjsunit/regress/regress-observe-empty-double-array.js +38 -0
- data/vendor/v8/test/mjsunit/regress/regress-undefined-store-keyed-fast-element.js +37 -0
- data/vendor/v8/test/mjsunit/shift-for-integer-div.js +59 -0
- data/vendor/v8/test/mjsunit/stack-traces-gc.js +119 -0
- data/vendor/v8/test/mjsunit/stack-traces-overflow.js +122 -0
- data/vendor/v8/test/mjsunit/stack-traces.js +39 -1
- data/vendor/v8/test/mjsunit/str-to-num.js +7 -2
- data/vendor/v8/test/mjsunit/strict-mode.js +36 -11
- data/vendor/v8/test/mjsunit/string-charcodeat.js +3 -0
- data/vendor/v8/test/mjsunit/string-natives.js +72 -0
- data/vendor/v8/test/mjsunit/string-split.js +17 -0
- data/vendor/v8/test/mjsunit/testcfg.py +76 -6
- data/vendor/v8/test/mjsunit/tools/tickprocessor.js +4 -1
- data/vendor/v8/test/mjsunit/try-finally-continue.js +72 -0
- data/vendor/v8/test/mjsunit/typed-array-slice.js +61 -0
- data/vendor/v8/test/mjsunit/unbox-double-arrays.js +2 -0
- data/vendor/v8/test/mjsunit/uri.js +12 -0
- data/vendor/v8/test/mjsunit/with-readonly.js +4 -2
- data/vendor/v8/test/mozilla/mozilla.status +19 -113
- data/vendor/v8/test/mozilla/testcfg.py +122 -3
- data/vendor/v8/test/preparser/preparser.status +5 -0
- data/vendor/v8/test/preparser/strict-identifiers.pyt +1 -1
- data/vendor/v8/test/preparser/testcfg.py +101 -5
- data/vendor/v8/test/sputnik/sputnik.status +1 -1
- data/vendor/v8/test/sputnik/testcfg.py +5 -0
- data/vendor/v8/test/test262/README +2 -2
- data/vendor/v8/test/test262/test262.status +13 -36
- data/vendor/v8/test/test262/testcfg.py +102 -8
- data/vendor/v8/tools/android-build.sh +0 -0
- data/vendor/v8/tools/android-ll-prof.sh +69 -0
- data/vendor/v8/tools/android-run.py +109 -0
- data/vendor/v8/tools/android-sync.sh +105 -0
- data/vendor/v8/tools/bash-completion.sh +0 -0
- data/vendor/v8/tools/check-static-initializers.sh +0 -0
- data/vendor/v8/tools/common-includes.sh +15 -22
- data/vendor/v8/tools/disasm.py +4 -4
- data/vendor/v8/tools/fuzz-harness.sh +0 -0
- data/vendor/v8/tools/gen-postmortem-metadata.py +6 -8
- data/vendor/v8/tools/grokdump.py +404 -129
- data/vendor/v8/tools/gyp/v8.gyp +105 -43
- data/vendor/v8/tools/linux-tick-processor +5 -5
- data/vendor/v8/tools/ll_prof.py +75 -15
- data/vendor/v8/tools/merge-to-branch.sh +2 -2
- data/vendor/v8/tools/plot-timer-events +70 -0
- data/vendor/v8/tools/plot-timer-events.js +510 -0
- data/vendor/v8/tools/presubmit.py +1 -0
- data/vendor/v8/tools/push-to-trunk.sh +14 -4
- data/vendor/v8/tools/run-llprof.sh +69 -0
- data/vendor/v8/tools/run-tests.py +372 -0
- data/vendor/v8/tools/run-valgrind.py +1 -1
- data/vendor/v8/tools/status-file-converter.py +39 -0
- data/vendor/v8/tools/test-server.py +224 -0
- data/vendor/v8/tools/test-wrapper-gypbuild.py +13 -16
- data/vendor/v8/tools/test.py +10 -19
- data/vendor/v8/tools/testrunner/README +174 -0
- data/vendor/v8/tools/testrunner/__init__.py +26 -0
- data/vendor/v8/tools/testrunner/local/__init__.py +26 -0
- data/vendor/v8/tools/testrunner/local/commands.py +153 -0
- data/vendor/v8/tools/testrunner/local/execution.py +182 -0
- data/vendor/v8/tools/testrunner/local/old_statusfile.py +460 -0
- data/vendor/v8/tools/testrunner/local/progress.py +238 -0
- data/vendor/v8/tools/testrunner/local/statusfile.py +145 -0
- data/vendor/v8/tools/testrunner/local/testsuite.py +187 -0
- data/vendor/v8/tools/testrunner/local/utils.py +108 -0
- data/vendor/v8/tools/testrunner/local/verbose.py +99 -0
- data/vendor/v8/tools/testrunner/network/__init__.py +26 -0
- data/vendor/v8/tools/testrunner/network/distro.py +90 -0
- data/vendor/v8/tools/testrunner/network/endpoint.py +124 -0
- data/vendor/v8/tools/testrunner/network/network_execution.py +253 -0
- data/vendor/v8/tools/testrunner/network/perfdata.py +120 -0
- data/vendor/v8/tools/testrunner/objects/__init__.py +26 -0
- data/vendor/v8/tools/testrunner/objects/context.py +50 -0
- data/vendor/v8/tools/testrunner/objects/output.py +60 -0
- data/vendor/v8/tools/testrunner/objects/peer.py +80 -0
- data/vendor/v8/tools/testrunner/objects/testcase.py +83 -0
- data/vendor/v8/tools/testrunner/objects/workpacket.py +90 -0
- data/vendor/v8/tools/testrunner/server/__init__.py +26 -0
- data/vendor/v8/tools/testrunner/server/compression.py +111 -0
- data/vendor/v8/tools/testrunner/server/constants.py +51 -0
- data/vendor/v8/tools/testrunner/server/daemon.py +147 -0
- data/vendor/v8/tools/testrunner/server/local_handler.py +119 -0
- data/vendor/v8/tools/testrunner/server/main.py +245 -0
- data/vendor/v8/tools/testrunner/server/presence_handler.py +120 -0
- data/vendor/v8/tools/testrunner/server/signatures.py +63 -0
- data/vendor/v8/tools/testrunner/server/status_handler.py +112 -0
- data/vendor/v8/tools/testrunner/server/work_handler.py +150 -0
- data/vendor/v8/tools/tick-processor.html +168 -0
- data/vendor/v8/tools/tickprocessor-driver.js +5 -3
- data/vendor/v8/tools/tickprocessor.js +58 -15
- metadata +534 -30
- data/patches/add-freebsd9-and-freebsd10-to-gyp-GetFlavor.patch +0 -11
- data/patches/do-not-imply-vfp3-and-armv7.patch +0 -44
- data/patches/fPIC-on-x64.patch +0 -14
- data/vendor/v8/src/liveobjectlist-inl.h +0 -126
- data/vendor/v8/src/liveobjectlist.cc +0 -2631
- data/vendor/v8/src/liveobjectlist.h +0 -319
- data/vendor/v8/test/mjsunit/mul-exhaustive.js +0 -4629
- data/vendor/v8/test/mjsunit/numops-fuzz.js +0 -4609
- data/vendor/v8/test/mjsunit/regress/regress-1969.js +0 -5045
@@ -25,45 +25,49 @@
|
|
25
25
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
26
26
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
27
27
|
|
28
|
-
#ifndef
|
29
|
-
#define
|
28
|
+
#ifndef V8_SMART_POINTERS_H_
|
29
|
+
#define V8_SMART_POINTERS_H_
|
30
30
|
|
31
31
|
namespace v8 {
|
32
32
|
namespace internal {
|
33
33
|
|
34
34
|
|
35
|
-
|
36
|
-
|
37
|
-
template<typename T>
|
38
|
-
class SmartArrayPointer {
|
35
|
+
template<typename Deallocator, typename T>
|
36
|
+
class SmartPointerBase {
|
39
37
|
public:
|
40
38
|
// Default constructor. Constructs an empty scoped pointer.
|
41
|
-
inline
|
39
|
+
inline SmartPointerBase() : p_(NULL) {}
|
42
40
|
|
43
41
|
// Constructs a scoped pointer from a plain one.
|
44
|
-
explicit inline
|
42
|
+
explicit inline SmartPointerBase(T* ptr) : p_(ptr) {}
|
45
43
|
|
46
44
|
// Copy constructor removes the pointer from the original to avoid double
|
47
45
|
// freeing.
|
48
|
-
inline
|
49
|
-
|
46
|
+
inline SmartPointerBase(const SmartPointerBase<Deallocator, T>& rhs)
|
47
|
+
: p_(rhs.p_) {
|
48
|
+
const_cast<SmartPointerBase<Deallocator, T>&>(rhs).p_ = NULL;
|
50
49
|
}
|
51
50
|
|
52
51
|
// When the destructor of the scoped pointer is executed the plain pointer
|
53
52
|
// is deleted using DeleteArray. This implies that you must allocate with
|
54
53
|
// NewArray.
|
55
|
-
inline ~
|
54
|
+
inline ~SmartPointerBase() { if (p_) Deallocator::Delete(p_); }
|
56
55
|
|
57
56
|
inline T* operator->() const { return p_; }
|
58
57
|
|
59
58
|
// You can get the underlying pointer out with the * operator.
|
60
59
|
inline T* operator*() { return p_; }
|
61
60
|
|
62
|
-
// You can use [n] to index as if it was a plain pointer
|
61
|
+
// You can use [n] to index as if it was a plain pointer.
|
63
62
|
inline T& operator[](size_t i) {
|
64
63
|
return p_[i];
|
65
64
|
}
|
66
65
|
|
66
|
+
// You can use [n] to index as if it was a plain pointer.
|
67
|
+
const inline T& operator[](size_t i) const {
|
68
|
+
return p_[i];
|
69
|
+
}
|
70
|
+
|
67
71
|
// We don't have implicit conversion to a T* since that hinders migration:
|
68
72
|
// You would not be able to change a method from returning a T* to
|
69
73
|
// returning an SmartArrayPointer<T> and then get errors wherever it is used.
|
@@ -78,13 +82,19 @@ class SmartArrayPointer {
|
|
78
82
|
return temp;
|
79
83
|
}
|
80
84
|
|
85
|
+
inline void Reset(T* new_value) {
|
86
|
+
if (p_) Deallocator::Delete(p_);
|
87
|
+
p_ = new_value;
|
88
|
+
}
|
89
|
+
|
81
90
|
// Assignment requires an empty (NULL) SmartArrayPointer as the receiver. Like
|
82
91
|
// the copy constructor it removes the pointer in the original to avoid
|
83
92
|
// double freeing.
|
84
|
-
inline
|
93
|
+
inline SmartPointerBase<Deallocator, T>& operator=(
|
94
|
+
const SmartPointerBase<Deallocator, T>& rhs) {
|
85
95
|
ASSERT(is_empty());
|
86
96
|
T* tmp = rhs.p_; // swap to handle self-assignment
|
87
|
-
const_cast<
|
97
|
+
const_cast<SmartPointerBase<Deallocator, T>&>(rhs).p_ = NULL;
|
88
98
|
p_ = tmp;
|
89
99
|
return *this;
|
90
100
|
}
|
@@ -95,6 +105,45 @@ class SmartArrayPointer {
|
|
95
105
|
T* p_;
|
96
106
|
};
|
97
107
|
|
108
|
+
// A 'scoped array pointer' that calls DeleteArray on its pointer when the
|
109
|
+
// destructor is called.
|
110
|
+
|
111
|
+
template<typename T>
|
112
|
+
struct ArrayDeallocator {
|
113
|
+
static void Delete(T* array) {
|
114
|
+
DeleteArray(array);
|
115
|
+
}
|
116
|
+
};
|
117
|
+
|
118
|
+
|
119
|
+
template<typename T>
|
120
|
+
class SmartArrayPointer: public SmartPointerBase<ArrayDeallocator<T>, T> {
|
121
|
+
public:
|
122
|
+
inline SmartArrayPointer() { }
|
123
|
+
explicit inline SmartArrayPointer(T* ptr)
|
124
|
+
: SmartPointerBase<ArrayDeallocator<T>, T>(ptr) { }
|
125
|
+
inline SmartArrayPointer(const SmartArrayPointer<T>& rhs)
|
126
|
+
: SmartPointerBase<ArrayDeallocator<T>, T>(rhs) { }
|
127
|
+
};
|
128
|
+
|
129
|
+
|
130
|
+
template<typename T>
|
131
|
+
struct ObjectDeallocator {
|
132
|
+
static void Delete(T* array) {
|
133
|
+
Malloced::Delete(array);
|
134
|
+
}
|
135
|
+
};
|
136
|
+
|
137
|
+
template<typename T>
|
138
|
+
class SmartPointer: public SmartPointerBase<ObjectDeallocator<T>, T> {
|
139
|
+
public:
|
140
|
+
inline SmartPointer() { }
|
141
|
+
explicit inline SmartPointer(T* ptr)
|
142
|
+
: SmartPointerBase<ObjectDeallocator<T>, T>(ptr) { }
|
143
|
+
inline SmartPointer(const SmartPointer<T>& rhs)
|
144
|
+
: SmartPointerBase<ObjectDeallocator<T>, T>(rhs) { }
|
145
|
+
};
|
146
|
+
|
98
147
|
} } // namespace v8::internal
|
99
148
|
|
100
|
-
#endif //
|
149
|
+
#endif // V8_SMART_POINTERS_H_
|
@@ -37,10 +37,47 @@
|
|
37
37
|
namespace v8 {
|
38
38
|
namespace internal {
|
39
39
|
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
40
|
+
|
41
|
+
static void ReserveSpaceForSnapshot(Deserializer* deserializer,
|
42
|
+
const char* file_name) {
|
43
|
+
int file_name_length = StrLength(file_name) + 10;
|
44
|
+
Vector<char> name = Vector<char>::New(file_name_length + 1);
|
45
|
+
OS::SNPrintF(name, "%s.size", file_name);
|
46
|
+
FILE* fp = OS::FOpen(name.start(), "r");
|
47
|
+
CHECK_NE(NULL, fp);
|
48
|
+
int new_size, pointer_size, data_size, code_size, map_size, cell_size;
|
49
|
+
#ifdef _MSC_VER
|
50
|
+
// Avoid warning about unsafe fscanf from MSVC.
|
51
|
+
// Please note that this is only fine if %c and %s are not being used.
|
52
|
+
#define fscanf fscanf_s
|
53
|
+
#endif
|
54
|
+
CHECK_EQ(1, fscanf(fp, "new %d\n", &new_size));
|
55
|
+
CHECK_EQ(1, fscanf(fp, "pointer %d\n", &pointer_size));
|
56
|
+
CHECK_EQ(1, fscanf(fp, "data %d\n", &data_size));
|
57
|
+
CHECK_EQ(1, fscanf(fp, "code %d\n", &code_size));
|
58
|
+
CHECK_EQ(1, fscanf(fp, "map %d\n", &map_size));
|
59
|
+
CHECK_EQ(1, fscanf(fp, "cell %d\n", &cell_size));
|
60
|
+
#ifdef _MSC_VER
|
61
|
+
#undef fscanf
|
62
|
+
#endif
|
63
|
+
fclose(fp);
|
64
|
+
deserializer->set_reservation(NEW_SPACE, new_size);
|
65
|
+
deserializer->set_reservation(OLD_POINTER_SPACE, pointer_size);
|
66
|
+
deserializer->set_reservation(OLD_DATA_SPACE, data_size);
|
67
|
+
deserializer->set_reservation(CODE_SPACE, code_size);
|
68
|
+
deserializer->set_reservation(MAP_SPACE, map_size);
|
69
|
+
deserializer->set_reservation(CELL_SPACE, cell_size);
|
70
|
+
name.Dispose();
|
71
|
+
}
|
72
|
+
|
73
|
+
|
74
|
+
void Snapshot::ReserveSpaceForLinkedInSnapshot(Deserializer* deserializer) {
|
75
|
+
deserializer->set_reservation(NEW_SPACE, new_space_used_);
|
76
|
+
deserializer->set_reservation(OLD_POINTER_SPACE, pointer_space_used_);
|
77
|
+
deserializer->set_reservation(OLD_DATA_SPACE, data_space_used_);
|
78
|
+
deserializer->set_reservation(CODE_SPACE, code_space_used_);
|
79
|
+
deserializer->set_reservation(MAP_SPACE, map_space_used_);
|
80
|
+
deserializer->set_reservation(CELL_SPACE, cell_space_used_);
|
44
81
|
}
|
45
82
|
|
46
83
|
|
@@ -49,32 +86,44 @@ bool Snapshot::Initialize(const char* snapshot_file) {
|
|
49
86
|
int len;
|
50
87
|
byte* str = ReadBytes(snapshot_file, &len);
|
51
88
|
if (!str) return false;
|
52
|
-
|
89
|
+
bool success;
|
90
|
+
{
|
91
|
+
SnapshotByteSource source(str, len);
|
92
|
+
Deserializer deserializer(&source);
|
93
|
+
ReserveSpaceForSnapshot(&deserializer, snapshot_file);
|
94
|
+
success = V8::Initialize(&deserializer);
|
95
|
+
}
|
53
96
|
DeleteArray(str);
|
54
|
-
return
|
97
|
+
return success;
|
55
98
|
} else if (size_ > 0) {
|
56
|
-
|
57
|
-
|
99
|
+
SnapshotByteSource source(raw_data_, raw_size_);
|
100
|
+
Deserializer deserializer(&source);
|
101
|
+
ReserveSpaceForLinkedInSnapshot(&deserializer);
|
102
|
+
return V8::Initialize(&deserializer);
|
58
103
|
}
|
59
104
|
return false;
|
60
105
|
}
|
61
106
|
|
62
107
|
|
108
|
+
bool Snapshot::HaveASnapshotToStartFrom() {
|
109
|
+
return size_ != 0;
|
110
|
+
}
|
111
|
+
|
112
|
+
|
63
113
|
Handle<Context> Snapshot::NewContextFromSnapshot() {
|
64
114
|
if (context_size_ == 0) {
|
65
115
|
return Handle<Context>();
|
66
116
|
}
|
67
|
-
HEAP->ReserveSpace(new_space_used_,
|
68
|
-
pointer_space_used_,
|
69
|
-
data_space_used_,
|
70
|
-
code_space_used_,
|
71
|
-
map_space_used_,
|
72
|
-
cell_space_used_,
|
73
|
-
large_space_used_);
|
74
117
|
SnapshotByteSource source(context_raw_data_,
|
75
118
|
context_raw_size_);
|
76
119
|
Deserializer deserializer(&source);
|
77
120
|
Object* root;
|
121
|
+
deserializer.set_reservation(NEW_SPACE, context_new_space_used_);
|
122
|
+
deserializer.set_reservation(OLD_POINTER_SPACE, context_pointer_space_used_);
|
123
|
+
deserializer.set_reservation(OLD_DATA_SPACE, context_data_space_used_);
|
124
|
+
deserializer.set_reservation(CODE_SPACE, context_code_space_used_);
|
125
|
+
deserializer.set_reservation(MAP_SPACE, context_map_space_used_);
|
126
|
+
deserializer.set_reservation(CELL_SPACE, context_cell_space_used_);
|
78
127
|
deserializer.DeserializePartial(&root);
|
79
128
|
CHECK(root->IsContext());
|
80
129
|
return Handle<Context>(Context::cast(root));
|
@@ -49,6 +49,12 @@ const int Snapshot::data_space_used_ = 0;
|
|
49
49
|
const int Snapshot::code_space_used_ = 0;
|
50
50
|
const int Snapshot::map_space_used_ = 0;
|
51
51
|
const int Snapshot::cell_space_used_ = 0;
|
52
|
-
|
52
|
+
|
53
|
+
const int Snapshot::context_new_space_used_ = 0;
|
54
|
+
const int Snapshot::context_pointer_space_used_ = 0;
|
55
|
+
const int Snapshot::context_data_space_used_ = 0;
|
56
|
+
const int Snapshot::context_code_space_used_ = 0;
|
57
|
+
const int Snapshot::context_map_space_used_ = 0;
|
58
|
+
const int Snapshot::context_cell_space_used_ = 0;
|
53
59
|
|
54
60
|
} } // namespace v8::internal
|
data/vendor/v8/src/snapshot.h
CHANGED
@@ -40,6 +40,8 @@ class Snapshot {
|
|
40
40
|
// could be found.
|
41
41
|
static bool Initialize(const char* snapshot_file = NULL);
|
42
42
|
|
43
|
+
static bool HaveASnapshotToStartFrom();
|
44
|
+
|
43
45
|
// Create a new context using the internal partial snapshot.
|
44
46
|
static Handle<Context> NewContextFromSnapshot();
|
45
47
|
|
@@ -75,13 +77,18 @@ class Snapshot {
|
|
75
77
|
static const int code_space_used_;
|
76
78
|
static const int map_space_used_;
|
77
79
|
static const int cell_space_used_;
|
78
|
-
static const int
|
80
|
+
static const int context_new_space_used_;
|
81
|
+
static const int context_pointer_space_used_;
|
82
|
+
static const int context_data_space_used_;
|
83
|
+
static const int context_code_space_used_;
|
84
|
+
static const int context_map_space_used_;
|
85
|
+
static const int context_cell_space_used_;
|
79
86
|
static const int size_;
|
80
87
|
static const int raw_size_;
|
81
88
|
static const int context_size_;
|
82
89
|
static const int context_raw_size_;
|
83
90
|
|
84
|
-
static
|
91
|
+
static void ReserveSpaceForLinkedInSnapshot(Deserializer* deserializer);
|
85
92
|
|
86
93
|
DISALLOW_IMPLICIT_CONSTRUCTORS(Snapshot);
|
87
94
|
};
|
data/vendor/v8/src/spaces-inl.h
CHANGED
@@ -214,6 +214,19 @@ MemoryChunk* MemoryChunk::FromAnyPointerAddress(Address addr) {
|
|
214
214
|
}
|
215
215
|
|
216
216
|
|
217
|
+
void MemoryChunk::UpdateHighWaterMark(Address mark) {
|
218
|
+
if (mark == NULL) return;
|
219
|
+
// Need to subtract one from the mark because when a chunk is full the
|
220
|
+
// top points to the next address after the chunk, which effectively belongs
|
221
|
+
// to another chunk. See the comment to Page::FromAllocationTop.
|
222
|
+
MemoryChunk* chunk = MemoryChunk::FromAddress(mark - 1);
|
223
|
+
int new_mark = static_cast<int>(mark - chunk->address());
|
224
|
+
if (new_mark > chunk->high_water_mark_) {
|
225
|
+
chunk->high_water_mark_ = new_mark;
|
226
|
+
}
|
227
|
+
}
|
228
|
+
|
229
|
+
|
217
230
|
PointerChunkIterator::PointerChunkIterator(Heap* heap)
|
218
231
|
: state_(kOldPointerState),
|
219
232
|
old_pointer_iterator_(heap->old_pointer_space()),
|
@@ -269,6 +282,10 @@ MaybeObject* PagedSpace::AllocateRaw(int size_in_bytes) {
|
|
269
282
|
return object;
|
270
283
|
}
|
271
284
|
|
285
|
+
ASSERT(!heap()->linear_allocation() ||
|
286
|
+
(anchor_.next_chunk() == &anchor_ &&
|
287
|
+
anchor_.prev_chunk() == &anchor_));
|
288
|
+
|
272
289
|
object = free_list_.Allocate(size_in_bytes);
|
273
290
|
if (object != NULL) {
|
274
291
|
if (identity() == CODE_SPACE) {
|
data/vendor/v8/src/spaces.cc
CHANGED
@@ -27,7 +27,6 @@
|
|
27
27
|
|
28
28
|
#include "v8.h"
|
29
29
|
|
30
|
-
#include "liveobjectlist-inl.h"
|
31
30
|
#include "macro-assembler.h"
|
32
31
|
#include "mark-compact.h"
|
33
32
|
#include "platform.h"
|
@@ -207,17 +206,18 @@ void CodeRange::GetNextAllocationBlock(size_t requested) {
|
|
207
206
|
}
|
208
207
|
|
209
208
|
|
210
|
-
|
211
|
-
|
209
|
+
Address CodeRange::AllocateRawMemory(const size_t requested_size,
|
210
|
+
const size_t commit_size,
|
212
211
|
size_t* allocated) {
|
212
|
+
ASSERT(commit_size <= requested_size);
|
213
213
|
ASSERT(current_allocation_block_index_ < allocation_list_.length());
|
214
|
-
if (
|
214
|
+
if (requested_size > allocation_list_[current_allocation_block_index_].size) {
|
215
215
|
// Find an allocation block large enough. This function call may
|
216
216
|
// call V8::FatalProcessOutOfMemory if it cannot find a large enough block.
|
217
|
-
GetNextAllocationBlock(
|
217
|
+
GetNextAllocationBlock(requested_size);
|
218
218
|
}
|
219
219
|
// Commit the requested memory at the start of the current allocation block.
|
220
|
-
size_t aligned_requested = RoundUp(
|
220
|
+
size_t aligned_requested = RoundUp(requested_size, MemoryChunk::kAlignment);
|
221
221
|
FreeBlock current = allocation_list_[current_allocation_block_index_];
|
222
222
|
if (aligned_requested >= (current.size - Page::kPageSize)) {
|
223
223
|
// Don't leave a small free block, useless for a large object or chunk.
|
@@ -227,9 +227,10 @@ Address CodeRange::AllocateRawMemory(const size_t requested,
|
|
227
227
|
}
|
228
228
|
ASSERT(*allocated <= current.size);
|
229
229
|
ASSERT(IsAddressAligned(current.start, MemoryChunk::kAlignment));
|
230
|
-
if (!MemoryAllocator::
|
231
|
-
|
232
|
-
|
230
|
+
if (!MemoryAllocator::CommitExecutableMemory(code_range_,
|
231
|
+
current.start,
|
232
|
+
commit_size,
|
233
|
+
*allocated)) {
|
233
234
|
*allocated = 0;
|
234
235
|
return NULL;
|
235
236
|
}
|
@@ -242,6 +243,16 @@ Address CodeRange::AllocateRawMemory(const size_t requested,
|
|
242
243
|
}
|
243
244
|
|
244
245
|
|
246
|
+
bool CodeRange::CommitRawMemory(Address start, size_t length) {
|
247
|
+
return code_range_->Commit(start, length, true);
|
248
|
+
}
|
249
|
+
|
250
|
+
|
251
|
+
bool CodeRange::UncommitRawMemory(Address start, size_t length) {
|
252
|
+
return code_range_->Uncommit(start, length);
|
253
|
+
}
|
254
|
+
|
255
|
+
|
245
256
|
void CodeRange::FreeRawMemory(Address address, size_t length) {
|
246
257
|
ASSERT(IsAddressAligned(address, MemoryChunk::kAlignment));
|
247
258
|
free_list_.Add(FreeBlock(address, length));
|
@@ -353,20 +364,25 @@ Address MemoryAllocator::ReserveAlignedMemory(size_t size,
|
|
353
364
|
}
|
354
365
|
|
355
366
|
|
356
|
-
Address MemoryAllocator::AllocateAlignedMemory(size_t
|
367
|
+
Address MemoryAllocator::AllocateAlignedMemory(size_t reserve_size,
|
368
|
+
size_t commit_size,
|
357
369
|
size_t alignment,
|
358
370
|
Executability executable,
|
359
371
|
VirtualMemory* controller) {
|
372
|
+
ASSERT(commit_size <= reserve_size);
|
360
373
|
VirtualMemory reservation;
|
361
|
-
Address base = ReserveAlignedMemory(
|
374
|
+
Address base = ReserveAlignedMemory(reserve_size, alignment, &reservation);
|
362
375
|
if (base == NULL) return NULL;
|
363
376
|
|
364
377
|
if (executable == EXECUTABLE) {
|
365
|
-
if (!
|
378
|
+
if (!CommitExecutableMemory(&reservation,
|
379
|
+
base,
|
380
|
+
commit_size,
|
381
|
+
reserve_size)) {
|
366
382
|
base = NULL;
|
367
383
|
}
|
368
384
|
} else {
|
369
|
-
if (!reservation.Commit(base,
|
385
|
+
if (!reservation.Commit(base, commit_size, false)) {
|
370
386
|
base = NULL;
|
371
387
|
}
|
372
388
|
}
|
@@ -447,6 +463,10 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap,
|
|
447
463
|
chunk->InitializeReservedMemory();
|
448
464
|
chunk->slots_buffer_ = NULL;
|
449
465
|
chunk->skip_list_ = NULL;
|
466
|
+
chunk->write_barrier_counter_ = kWriteBarrierCounterGranularity;
|
467
|
+
chunk->progress_bar_ = 0;
|
468
|
+
chunk->high_water_mark_ = static_cast<int>(area_start - base);
|
469
|
+
chunk->parallel_sweeping_ = 0;
|
450
470
|
chunk->ResetLiveBytes();
|
451
471
|
Bitmap::Clear(chunk);
|
452
472
|
chunk->initialize_scan_on_scavenge(false);
|
@@ -467,6 +487,53 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap,
|
|
467
487
|
}
|
468
488
|
|
469
489
|
|
490
|
+
// Commit MemoryChunk area to the requested size.
|
491
|
+
bool MemoryChunk::CommitArea(size_t requested) {
|
492
|
+
size_t guard_size = IsFlagSet(IS_EXECUTABLE) ?
|
493
|
+
MemoryAllocator::CodePageGuardSize() : 0;
|
494
|
+
size_t header_size = area_start() - address() - guard_size;
|
495
|
+
size_t commit_size = RoundUp(header_size + requested, OS::CommitPageSize());
|
496
|
+
size_t committed_size = RoundUp(header_size + (area_end() - area_start()),
|
497
|
+
OS::CommitPageSize());
|
498
|
+
|
499
|
+
if (commit_size > committed_size) {
|
500
|
+
// Commit size should be less or equal than the reserved size.
|
501
|
+
ASSERT(commit_size <= size() - 2 * guard_size);
|
502
|
+
// Append the committed area.
|
503
|
+
Address start = address() + committed_size + guard_size;
|
504
|
+
size_t length = commit_size - committed_size;
|
505
|
+
if (reservation_.IsReserved()) {
|
506
|
+
if (!reservation_.Commit(start, length, IsFlagSet(IS_EXECUTABLE))) {
|
507
|
+
return false;
|
508
|
+
}
|
509
|
+
} else {
|
510
|
+
CodeRange* code_range = heap_->isolate()->code_range();
|
511
|
+
ASSERT(code_range->exists() && IsFlagSet(IS_EXECUTABLE));
|
512
|
+
if (!code_range->CommitRawMemory(start, length)) return false;
|
513
|
+
}
|
514
|
+
|
515
|
+
if (Heap::ShouldZapGarbage()) {
|
516
|
+
heap_->isolate()->memory_allocator()->ZapBlock(start, length);
|
517
|
+
}
|
518
|
+
} else if (commit_size < committed_size) {
|
519
|
+
ASSERT(commit_size > 0);
|
520
|
+
// Shrink the committed area.
|
521
|
+
size_t length = committed_size - commit_size;
|
522
|
+
Address start = address() + committed_size + guard_size - length;
|
523
|
+
if (reservation_.IsReserved()) {
|
524
|
+
if (!reservation_.Uncommit(start, length)) return false;
|
525
|
+
} else {
|
526
|
+
CodeRange* code_range = heap_->isolate()->code_range();
|
527
|
+
ASSERT(code_range->exists() && IsFlagSet(IS_EXECUTABLE));
|
528
|
+
if (!code_range->UncommitRawMemory(start, length)) return false;
|
529
|
+
}
|
530
|
+
}
|
531
|
+
|
532
|
+
area_end_ = area_start_ + requested;
|
533
|
+
return true;
|
534
|
+
}
|
535
|
+
|
536
|
+
|
470
537
|
void MemoryChunk::InsertAfter(MemoryChunk* other) {
|
471
538
|
next_chunk_ = other->next_chunk_;
|
472
539
|
prev_chunk_ = other;
|
@@ -487,17 +554,51 @@ void MemoryChunk::Unlink() {
|
|
487
554
|
}
|
488
555
|
|
489
556
|
|
490
|
-
MemoryChunk* MemoryAllocator::AllocateChunk(intptr_t
|
557
|
+
MemoryChunk* MemoryAllocator::AllocateChunk(intptr_t reserve_area_size,
|
558
|
+
intptr_t commit_area_size,
|
491
559
|
Executability executable,
|
492
560
|
Space* owner) {
|
561
|
+
ASSERT(commit_area_size <= reserve_area_size);
|
562
|
+
|
493
563
|
size_t chunk_size;
|
494
564
|
Heap* heap = isolate_->heap();
|
495
565
|
Address base = NULL;
|
496
566
|
VirtualMemory reservation;
|
497
567
|
Address area_start = NULL;
|
498
568
|
Address area_end = NULL;
|
569
|
+
|
570
|
+
//
|
571
|
+
// MemoryChunk layout:
|
572
|
+
//
|
573
|
+
// Executable
|
574
|
+
// +----------------------------+<- base aligned with MemoryChunk::kAlignment
|
575
|
+
// | Header |
|
576
|
+
// +----------------------------+<- base + CodePageGuardStartOffset
|
577
|
+
// | Guard |
|
578
|
+
// +----------------------------+<- area_start_
|
579
|
+
// | Area |
|
580
|
+
// +----------------------------+<- area_end_ (area_start + commit_area_size)
|
581
|
+
// | Committed but not used |
|
582
|
+
// +----------------------------+<- aligned at OS page boundary
|
583
|
+
// | Reserved but not committed |
|
584
|
+
// +----------------------------+<- aligned at OS page boundary
|
585
|
+
// | Guard |
|
586
|
+
// +----------------------------+<- base + chunk_size
|
587
|
+
//
|
588
|
+
// Non-executable
|
589
|
+
// +----------------------------+<- base aligned with MemoryChunk::kAlignment
|
590
|
+
// | Header |
|
591
|
+
// +----------------------------+<- area_start_ (base + kObjectStartOffset)
|
592
|
+
// | Area |
|
593
|
+
// +----------------------------+<- area_end_ (area_start + commit_area_size)
|
594
|
+
// | Committed but not used |
|
595
|
+
// +----------------------------+<- aligned at OS page boundary
|
596
|
+
// | Reserved but not committed |
|
597
|
+
// +----------------------------+<- base + chunk_size
|
598
|
+
//
|
599
|
+
|
499
600
|
if (executable == EXECUTABLE) {
|
500
|
-
chunk_size = RoundUp(CodePageAreaStartOffset() +
|
601
|
+
chunk_size = RoundUp(CodePageAreaStartOffset() + reserve_area_size,
|
501
602
|
OS::CommitPageSize()) + CodePageGuardSize();
|
502
603
|
|
503
604
|
// Check executable memory limit.
|
@@ -508,10 +609,15 @@ MemoryChunk* MemoryAllocator::AllocateChunk(intptr_t body_size,
|
|
508
609
|
return NULL;
|
509
610
|
}
|
510
611
|
|
612
|
+
// Size of header (not executable) plus area (executable).
|
613
|
+
size_t commit_size = RoundUp(CodePageGuardStartOffset() + commit_area_size,
|
614
|
+
OS::CommitPageSize());
|
511
615
|
// Allocate executable memory either from code range or from the
|
512
616
|
// OS.
|
513
617
|
if (isolate_->code_range()->exists()) {
|
514
|
-
base = isolate_->code_range()->AllocateRawMemory(chunk_size,
|
618
|
+
base = isolate_->code_range()->AllocateRawMemory(chunk_size,
|
619
|
+
commit_size,
|
620
|
+
&chunk_size);
|
515
621
|
ASSERT(IsAligned(reinterpret_cast<intptr_t>(base),
|
516
622
|
MemoryChunk::kAlignment));
|
517
623
|
if (base == NULL) return NULL;
|
@@ -520,6 +626,7 @@ MemoryChunk* MemoryAllocator::AllocateChunk(intptr_t body_size,
|
|
520
626
|
size_executable_ += chunk_size;
|
521
627
|
} else {
|
522
628
|
base = AllocateAlignedMemory(chunk_size,
|
629
|
+
commit_size,
|
523
630
|
MemoryChunk::kAlignment,
|
524
631
|
executable,
|
525
632
|
&reservation);
|
@@ -528,29 +635,36 @@ MemoryChunk* MemoryAllocator::AllocateChunk(intptr_t body_size,
|
|
528
635
|
size_executable_ += reservation.size();
|
529
636
|
}
|
530
637
|
|
531
|
-
|
532
|
-
|
533
|
-
|
534
|
-
|
638
|
+
if (Heap::ShouldZapGarbage()) {
|
639
|
+
ZapBlock(base, CodePageGuardStartOffset());
|
640
|
+
ZapBlock(base + CodePageAreaStartOffset(), commit_area_size);
|
641
|
+
}
|
642
|
+
|
535
643
|
area_start = base + CodePageAreaStartOffset();
|
536
|
-
area_end = area_start +
|
644
|
+
area_end = area_start + commit_area_size;
|
537
645
|
} else {
|
538
|
-
chunk_size = MemoryChunk::kObjectStartOffset +
|
646
|
+
chunk_size = RoundUp(MemoryChunk::kObjectStartOffset + reserve_area_size,
|
647
|
+
OS::CommitPageSize());
|
648
|
+
size_t commit_size = RoundUp(MemoryChunk::kObjectStartOffset +
|
649
|
+
commit_area_size, OS::CommitPageSize());
|
539
650
|
base = AllocateAlignedMemory(chunk_size,
|
651
|
+
commit_size,
|
540
652
|
MemoryChunk::kAlignment,
|
541
653
|
executable,
|
542
654
|
&reservation);
|
543
655
|
|
544
656
|
if (base == NULL) return NULL;
|
545
657
|
|
546
|
-
|
547
|
-
|
548
|
-
|
658
|
+
if (Heap::ShouldZapGarbage()) {
|
659
|
+
ZapBlock(base, Page::kObjectStartOffset + commit_area_size);
|
660
|
+
}
|
549
661
|
|
550
662
|
area_start = base + Page::kObjectStartOffset;
|
551
|
-
area_end =
|
663
|
+
area_end = area_start + commit_area_size;
|
552
664
|
}
|
553
665
|
|
666
|
+
// Use chunk_size for statistics and callbacks because we assume that they
|
667
|
+
// treat reserved but not-yet committed memory regions of chunks as allocated.
|
554
668
|
isolate_->counters()->memory_allocated()->
|
555
669
|
Increment(static_cast<int>(chunk_size));
|
556
670
|
|
@@ -575,7 +689,7 @@ MemoryChunk* MemoryAllocator::AllocateChunk(intptr_t body_size,
|
|
575
689
|
Page* MemoryAllocator::AllocatePage(intptr_t size,
|
576
690
|
PagedSpace* owner,
|
577
691
|
Executability executable) {
|
578
|
-
MemoryChunk* chunk = AllocateChunk(size, executable, owner);
|
692
|
+
MemoryChunk* chunk = AllocateChunk(size, size, executable, owner);
|
579
693
|
|
580
694
|
if (chunk == NULL) return NULL;
|
581
695
|
|
@@ -586,7 +700,10 @@ Page* MemoryAllocator::AllocatePage(intptr_t size,
|
|
586
700
|
LargePage* MemoryAllocator::AllocateLargePage(intptr_t object_size,
|
587
701
|
Space* owner,
|
588
702
|
Executability executable) {
|
589
|
-
MemoryChunk* chunk = AllocateChunk(object_size,
|
703
|
+
MemoryChunk* chunk = AllocateChunk(object_size,
|
704
|
+
object_size,
|
705
|
+
executable,
|
706
|
+
owner);
|
590
707
|
if (chunk == NULL) return NULL;
|
591
708
|
return LargePage::Initialize(isolate_->heap(), chunk);
|
592
709
|
}
|
@@ -621,9 +738,11 @@ bool MemoryAllocator::CommitBlock(Address start,
|
|
621
738
|
size_t size,
|
622
739
|
Executability executable) {
|
623
740
|
if (!VirtualMemory::CommitRegion(start, size, executable)) return false;
|
624
|
-
|
625
|
-
|
626
|
-
|
741
|
+
|
742
|
+
if (Heap::ShouldZapGarbage()) {
|
743
|
+
ZapBlock(start, size);
|
744
|
+
}
|
745
|
+
|
627
746
|
isolate_->counters()->memory_allocated()->Increment(static_cast<int>(size));
|
628
747
|
return true;
|
629
748
|
}
|
@@ -726,9 +845,10 @@ int MemoryAllocator::CodePageAreaEndOffset() {
|
|
726
845
|
}
|
727
846
|
|
728
847
|
|
729
|
-
bool MemoryAllocator::
|
730
|
-
|
731
|
-
|
848
|
+
bool MemoryAllocator::CommitExecutableMemory(VirtualMemory* vm,
|
849
|
+
Address start,
|
850
|
+
size_t commit_size,
|
851
|
+
size_t reserved_size) {
|
732
852
|
// Commit page header (not executable).
|
733
853
|
if (!vm->Commit(start,
|
734
854
|
CodePageGuardStartOffset(),
|
@@ -742,15 +862,14 @@ bool MemoryAllocator::CommitCodePage(VirtualMemory* vm,
|
|
742
862
|
}
|
743
863
|
|
744
864
|
// Commit page body (executable).
|
745
|
-
size_t area_size = size - CodePageAreaStartOffset() - CodePageGuardSize();
|
746
865
|
if (!vm->Commit(start + CodePageAreaStartOffset(),
|
747
|
-
|
866
|
+
commit_size - CodePageGuardStartOffset(),
|
748
867
|
true)) {
|
749
868
|
return false;
|
750
869
|
}
|
751
870
|
|
752
|
-
// Create guard page
|
753
|
-
if (!vm->Guard(start +
|
871
|
+
// Create guard page before the end.
|
872
|
+
if (!vm->Guard(start + reserved_size - CodePageGuardSize())) {
|
754
873
|
return false;
|
755
874
|
}
|
756
875
|
|
@@ -819,6 +938,18 @@ void PagedSpace::TearDown() {
|
|
819
938
|
}
|
820
939
|
|
821
940
|
|
941
|
+
size_t PagedSpace::CommittedPhysicalMemory() {
|
942
|
+
if (!VirtualMemory::HasLazyCommits()) return CommittedMemory();
|
943
|
+
MemoryChunk::UpdateHighWaterMark(allocation_info_.top);
|
944
|
+
size_t size = 0;
|
945
|
+
PageIterator it(this);
|
946
|
+
while (it.has_next()) {
|
947
|
+
size += it.next()->CommittedPhysicalMemory();
|
948
|
+
}
|
949
|
+
return size;
|
950
|
+
}
|
951
|
+
|
952
|
+
|
822
953
|
MaybeObject* PagedSpace::FindObject(Address addr) {
|
823
954
|
// Note: this function can only be called on precisely swept spaces.
|
824
955
|
ASSERT(!heap()->mark_compact_collector()->in_use());
|
@@ -881,10 +1012,10 @@ intptr_t PagedSpace::SizeOfFirstPage() {
|
|
881
1012
|
size = 192 * KB;
|
882
1013
|
break;
|
883
1014
|
case MAP_SPACE:
|
884
|
-
size =
|
1015
|
+
size = 16 * kPointerSize * KB;
|
885
1016
|
break;
|
886
1017
|
case CELL_SPACE:
|
887
|
-
size =
|
1018
|
+
size = 16 * kPointerSize * KB;
|
888
1019
|
break;
|
889
1020
|
case CODE_SPACE:
|
890
1021
|
if (kPointerSize == 8) {
|
@@ -984,8 +1115,7 @@ void PagedSpace::ReleaseAllUnusedPages() {
|
|
984
1115
|
void PagedSpace::Print() { }
|
985
1116
|
#endif
|
986
1117
|
|
987
|
-
|
988
|
-
#ifdef DEBUG
|
1118
|
+
#ifdef VERIFY_HEAP
|
989
1119
|
void PagedSpace::Verify(ObjectVisitor* visitor) {
|
990
1120
|
// We can only iterate over the pages if they were swept precisely.
|
991
1121
|
if (was_swept_conservatively_) return;
|
@@ -995,23 +1125,23 @@ void PagedSpace::Verify(ObjectVisitor* visitor) {
|
|
995
1125
|
PageIterator page_iterator(this);
|
996
1126
|
while (page_iterator.has_next()) {
|
997
1127
|
Page* page = page_iterator.next();
|
998
|
-
|
1128
|
+
CHECK(page->owner() == this);
|
999
1129
|
if (page == Page::FromAllocationTop(allocation_info_.top)) {
|
1000
1130
|
allocation_pointer_found_in_space = true;
|
1001
1131
|
}
|
1002
|
-
|
1132
|
+
CHECK(page->WasSweptPrecisely());
|
1003
1133
|
HeapObjectIterator it(page, NULL);
|
1004
1134
|
Address end_of_previous_object = page->area_start();
|
1005
1135
|
Address top = page->area_end();
|
1006
1136
|
int black_size = 0;
|
1007
1137
|
for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) {
|
1008
|
-
|
1138
|
+
CHECK(end_of_previous_object <= object->address());
|
1009
1139
|
|
1010
1140
|
// The first word should be a map, and we expect all map pointers to
|
1011
1141
|
// be in map space.
|
1012
1142
|
Map* map = object->map();
|
1013
|
-
|
1014
|
-
|
1143
|
+
CHECK(map->IsMap());
|
1144
|
+
CHECK(heap()->map_space()->Contains(map));
|
1015
1145
|
|
1016
1146
|
// Perform space-specific object verification.
|
1017
1147
|
VerifyObject(object);
|
@@ -1026,15 +1156,14 @@ void PagedSpace::Verify(ObjectVisitor* visitor) {
|
|
1026
1156
|
black_size += size;
|
1027
1157
|
}
|
1028
1158
|
|
1029
|
-
|
1159
|
+
CHECK(object->address() + size <= top);
|
1030
1160
|
end_of_previous_object = object->address() + size;
|
1031
1161
|
}
|
1032
|
-
|
1162
|
+
CHECK_LE(black_size, page->LiveBytes());
|
1033
1163
|
}
|
1034
|
-
|
1164
|
+
CHECK(allocation_pointer_found_in_space);
|
1035
1165
|
}
|
1036
|
-
#endif
|
1037
|
-
|
1166
|
+
#endif // VERIFY_HEAP
|
1038
1167
|
|
1039
1168
|
// -----------------------------------------------------------------------------
|
1040
1169
|
// NewSpace implementation
|
@@ -1172,6 +1301,7 @@ void NewSpace::Shrink() {
|
|
1172
1301
|
|
1173
1302
|
|
1174
1303
|
void NewSpace::UpdateAllocationInfo() {
|
1304
|
+
MemoryChunk::UpdateHighWaterMark(allocation_info_.top);
|
1175
1305
|
allocation_info_.top = to_space_.page_low();
|
1176
1306
|
allocation_info_.limit = to_space_.page_high();
|
1177
1307
|
|
@@ -1258,7 +1388,7 @@ MaybeObject* NewSpace::SlowAllocateRaw(int size_in_bytes) {
|
|
1258
1388
|
}
|
1259
1389
|
|
1260
1390
|
|
1261
|
-
#ifdef
|
1391
|
+
#ifdef VERIFY_HEAP
|
1262
1392
|
// We do not use the SemiSpaceIterator because verification doesn't assume
|
1263
1393
|
// that it works (it depends on the invariants we are checking).
|
1264
1394
|
void NewSpace::Verify() {
|
@@ -1307,8 +1437,8 @@ void NewSpace::Verify() {
|
|
1307
1437
|
}
|
1308
1438
|
|
1309
1439
|
// Check semi-spaces.
|
1310
|
-
|
1311
|
-
|
1440
|
+
CHECK_EQ(from_space_.id(), kFromSpace);
|
1441
|
+
CHECK_EQ(to_space_.id(), kToSpace);
|
1312
1442
|
from_space_.Verify();
|
1313
1443
|
to_space_.Verify();
|
1314
1444
|
}
|
@@ -1384,6 +1514,17 @@ bool SemiSpace::Uncommit() {
|
|
1384
1514
|
}
|
1385
1515
|
|
1386
1516
|
|
1517
|
+
size_t SemiSpace::CommittedPhysicalMemory() {
|
1518
|
+
if (!is_committed()) return 0;
|
1519
|
+
size_t size = 0;
|
1520
|
+
NewSpacePageIterator it(this);
|
1521
|
+
while (it.has_next()) {
|
1522
|
+
size += it.next()->CommittedPhysicalMemory();
|
1523
|
+
}
|
1524
|
+
return size;
|
1525
|
+
}
|
1526
|
+
|
1527
|
+
|
1387
1528
|
bool SemiSpace::GrowTo(int new_capacity) {
|
1388
1529
|
if (!is_committed()) {
|
1389
1530
|
if (!Commit()) return false;
|
@@ -1524,8 +1665,9 @@ void SemiSpace::set_age_mark(Address mark) {
|
|
1524
1665
|
|
1525
1666
|
#ifdef DEBUG
|
1526
1667
|
void SemiSpace::Print() { }
|
1668
|
+
#endif
|
1527
1669
|
|
1528
|
-
|
1670
|
+
#ifdef VERIFY_HEAP
|
1529
1671
|
void SemiSpace::Verify() {
|
1530
1672
|
bool is_from_space = (id_ == kFromSpace);
|
1531
1673
|
NewSpacePage* page = anchor_.next_page();
|
@@ -1555,8 +1697,9 @@ void SemiSpace::Verify() {
|
|
1555
1697
|
page = page->next_page();
|
1556
1698
|
}
|
1557
1699
|
}
|
1700
|
+
#endif
|
1558
1701
|
|
1559
|
-
|
1702
|
+
#ifdef DEBUG
|
1560
1703
|
void SemiSpace::AssertValidRange(Address start, Address end) {
|
1561
1704
|
// Addresses belong to same semi-space
|
1562
1705
|
NewSpacePage* page = NewSpacePage::FromLimit(start);
|
@@ -1649,6 +1792,7 @@ static void ReportCodeKindStatistics() {
|
|
1649
1792
|
CASE(FUNCTION);
|
1650
1793
|
CASE(OPTIMIZED_FUNCTION);
|
1651
1794
|
CASE(STUB);
|
1795
|
+
CASE(COMPILED_STUB);
|
1652
1796
|
CASE(BUILTIN);
|
1653
1797
|
CASE(LOAD_IC);
|
1654
1798
|
CASE(KEYED_LOAD_IC);
|
@@ -1816,6 +1960,17 @@ void NewSpace::RecordPromotion(HeapObject* obj) {
|
|
1816
1960
|
promoted_histogram_[type].increment_bytes(obj->Size());
|
1817
1961
|
}
|
1818
1962
|
|
1963
|
+
|
1964
|
+
size_t NewSpace::CommittedPhysicalMemory() {
|
1965
|
+
if (!VirtualMemory::HasLazyCommits()) return CommittedMemory();
|
1966
|
+
MemoryChunk::UpdateHighWaterMark(allocation_info_.top);
|
1967
|
+
size_t size = to_space_.CommittedPhysicalMemory();
|
1968
|
+
if (from_space_.is_committed()) {
|
1969
|
+
size += from_space_.CommittedPhysicalMemory();
|
1970
|
+
}
|
1971
|
+
return size;
|
1972
|
+
}
|
1973
|
+
|
1819
1974
|
// -----------------------------------------------------------------------------
|
1820
1975
|
// Free lists for old object spaces implementation
|
1821
1976
|
|
@@ -1887,52 +2042,72 @@ void FreeListNode::set_next(FreeListNode* next) {
|
|
1887
2042
|
}
|
1888
2043
|
|
1889
2044
|
|
1890
|
-
|
1891
|
-
|
1892
|
-
|
2045
|
+
intptr_t FreeListCategory::Concatenate(FreeListCategory* category) {
|
2046
|
+
intptr_t free_bytes = 0;
|
2047
|
+
if (category->top_ != NULL) {
|
2048
|
+
ASSERT(category->end_ != NULL);
|
2049
|
+
// This is safe (not going to deadlock) since Concatenate operations
|
2050
|
+
// are never performed on the same free lists at the same time in
|
2051
|
+
// reverse order.
|
2052
|
+
ScopedLock lock_target(mutex_);
|
2053
|
+
ScopedLock lock_source(category->mutex());
|
2054
|
+
free_bytes = category->available();
|
2055
|
+
if (end_ == NULL) {
|
2056
|
+
end_ = category->end();
|
2057
|
+
} else {
|
2058
|
+
category->end()->set_next(top_);
|
2059
|
+
}
|
2060
|
+
top_ = category->top();
|
2061
|
+
available_ += category->available();
|
2062
|
+
category->Reset();
|
2063
|
+
}
|
2064
|
+
return free_bytes;
|
1893
2065
|
}
|
1894
2066
|
|
1895
2067
|
|
1896
|
-
void
|
2068
|
+
void FreeListCategory::Reset() {
|
2069
|
+
top_ = NULL;
|
2070
|
+
end_ = NULL;
|
1897
2071
|
available_ = 0;
|
1898
|
-
small_list_ = NULL;
|
1899
|
-
medium_list_ = NULL;
|
1900
|
-
large_list_ = NULL;
|
1901
|
-
huge_list_ = NULL;
|
1902
2072
|
}
|
1903
2073
|
|
1904
2074
|
|
1905
|
-
|
1906
|
-
|
1907
|
-
FreeListNode*
|
1908
|
-
|
2075
|
+
intptr_t FreeListCategory::CountFreeListItemsInList(Page* p) {
|
2076
|
+
int sum = 0;
|
2077
|
+
FreeListNode* n = top_;
|
2078
|
+
while (n != NULL) {
|
2079
|
+
if (Page::FromAddress(n->address()) == p) {
|
2080
|
+
FreeSpace* free_space = reinterpret_cast<FreeSpace*>(n);
|
2081
|
+
sum += free_space->Size();
|
2082
|
+
}
|
2083
|
+
n = n->next();
|
2084
|
+
}
|
2085
|
+
return sum;
|
2086
|
+
}
|
1909
2087
|
|
1910
|
-
// Early return to drop too-small blocks on the floor.
|
1911
|
-
if (size_in_bytes < kSmallListMin) return size_in_bytes;
|
1912
2088
|
|
1913
|
-
|
1914
|
-
|
1915
|
-
|
1916
|
-
|
1917
|
-
|
1918
|
-
|
1919
|
-
|
1920
|
-
|
1921
|
-
|
1922
|
-
|
1923
|
-
|
1924
|
-
} else {
|
1925
|
-
node->set_next(huge_list_);
|
1926
|
-
huge_list_ = node;
|
2089
|
+
intptr_t FreeListCategory::EvictFreeListItemsInList(Page* p) {
|
2090
|
+
int sum = 0;
|
2091
|
+
FreeListNode** n = &top_;
|
2092
|
+
while (*n != NULL) {
|
2093
|
+
if (Page::FromAddress((*n)->address()) == p) {
|
2094
|
+
FreeSpace* free_space = reinterpret_cast<FreeSpace*>(*n);
|
2095
|
+
sum += free_space->Size();
|
2096
|
+
*n = (*n)->next();
|
2097
|
+
} else {
|
2098
|
+
n = (*n)->next_address();
|
2099
|
+
}
|
1927
2100
|
}
|
1928
|
-
|
1929
|
-
|
1930
|
-
|
2101
|
+
if (top_ == NULL) {
|
2102
|
+
end_ = NULL;
|
2103
|
+
}
|
2104
|
+
available_ -= sum;
|
2105
|
+
return sum;
|
1931
2106
|
}
|
1932
2107
|
|
1933
2108
|
|
1934
|
-
FreeListNode*
|
1935
|
-
FreeListNode* node =
|
2109
|
+
FreeListNode* FreeListCategory::PickNodeFromList(int *node_size) {
|
2110
|
+
FreeListNode* node = top_;
|
1936
2111
|
|
1937
2112
|
if (node == NULL) return NULL;
|
1938
2113
|
|
@@ -1943,46 +2118,129 @@ FreeListNode* FreeList::PickNodeFromList(FreeListNode** list, int* node_size) {
|
|
1943
2118
|
}
|
1944
2119
|
|
1945
2120
|
if (node != NULL) {
|
2121
|
+
set_top(node->next());
|
1946
2122
|
*node_size = node->Size();
|
1947
|
-
|
2123
|
+
available_ -= *node_size;
|
1948
2124
|
} else {
|
1949
|
-
|
2125
|
+
set_top(NULL);
|
2126
|
+
}
|
2127
|
+
|
2128
|
+
if (top() == NULL) {
|
2129
|
+
set_end(NULL);
|
1950
2130
|
}
|
1951
2131
|
|
1952
2132
|
return node;
|
1953
2133
|
}
|
1954
2134
|
|
1955
2135
|
|
2136
|
+
void FreeListCategory::Free(FreeListNode* node, int size_in_bytes) {
|
2137
|
+
node->set_next(top_);
|
2138
|
+
top_ = node;
|
2139
|
+
if (end_ == NULL) {
|
2140
|
+
end_ = node;
|
2141
|
+
}
|
2142
|
+
available_ += size_in_bytes;
|
2143
|
+
}
|
2144
|
+
|
2145
|
+
|
2146
|
+
void FreeListCategory::RepairFreeList(Heap* heap) {
|
2147
|
+
FreeListNode* n = top_;
|
2148
|
+
while (n != NULL) {
|
2149
|
+
Map** map_location = reinterpret_cast<Map**>(n->address());
|
2150
|
+
if (*map_location == NULL) {
|
2151
|
+
*map_location = heap->free_space_map();
|
2152
|
+
} else {
|
2153
|
+
ASSERT(*map_location == heap->free_space_map());
|
2154
|
+
}
|
2155
|
+
n = n->next();
|
2156
|
+
}
|
2157
|
+
}
|
2158
|
+
|
2159
|
+
|
2160
|
+
FreeList::FreeList(PagedSpace* owner)
|
2161
|
+
: owner_(owner), heap_(owner->heap()) {
|
2162
|
+
Reset();
|
2163
|
+
}
|
2164
|
+
|
2165
|
+
|
2166
|
+
intptr_t FreeList::Concatenate(FreeList* free_list) {
|
2167
|
+
intptr_t free_bytes = 0;
|
2168
|
+
free_bytes += small_list_.Concatenate(free_list->small_list());
|
2169
|
+
free_bytes += medium_list_.Concatenate(free_list->medium_list());
|
2170
|
+
free_bytes += large_list_.Concatenate(free_list->large_list());
|
2171
|
+
free_bytes += huge_list_.Concatenate(free_list->huge_list());
|
2172
|
+
return free_bytes;
|
2173
|
+
}
|
2174
|
+
|
2175
|
+
|
2176
|
+
void FreeList::Reset() {
|
2177
|
+
small_list_.Reset();
|
2178
|
+
medium_list_.Reset();
|
2179
|
+
large_list_.Reset();
|
2180
|
+
huge_list_.Reset();
|
2181
|
+
}
|
2182
|
+
|
2183
|
+
|
2184
|
+
int FreeList::Free(Address start, int size_in_bytes) {
|
2185
|
+
if (size_in_bytes == 0) return 0;
|
2186
|
+
|
2187
|
+
FreeListNode* node = FreeListNode::FromAddress(start);
|
2188
|
+
node->set_size(heap_, size_in_bytes);
|
2189
|
+
|
2190
|
+
// Early return to drop too-small blocks on the floor.
|
2191
|
+
if (size_in_bytes < kSmallListMin) return size_in_bytes;
|
2192
|
+
|
2193
|
+
// Insert other blocks at the head of a free list of the appropriate
|
2194
|
+
// magnitude.
|
2195
|
+
if (size_in_bytes <= kSmallListMax) {
|
2196
|
+
small_list_.Free(node, size_in_bytes);
|
2197
|
+
} else if (size_in_bytes <= kMediumListMax) {
|
2198
|
+
medium_list_.Free(node, size_in_bytes);
|
2199
|
+
} else if (size_in_bytes <= kLargeListMax) {
|
2200
|
+
large_list_.Free(node, size_in_bytes);
|
2201
|
+
} else {
|
2202
|
+
huge_list_.Free(node, size_in_bytes);
|
2203
|
+
}
|
2204
|
+
|
2205
|
+
ASSERT(IsVeryLong() || available() == SumFreeLists());
|
2206
|
+
return 0;
|
2207
|
+
}
|
2208
|
+
|
2209
|
+
|
1956
2210
|
FreeListNode* FreeList::FindNodeFor(int size_in_bytes, int* node_size) {
|
1957
2211
|
FreeListNode* node = NULL;
|
1958
2212
|
|
1959
2213
|
if (size_in_bytes <= kSmallAllocationMax) {
|
1960
|
-
node = PickNodeFromList(
|
2214
|
+
node = small_list_.PickNodeFromList(node_size);
|
1961
2215
|
if (node != NULL) return node;
|
1962
2216
|
}
|
1963
2217
|
|
1964
2218
|
if (size_in_bytes <= kMediumAllocationMax) {
|
1965
|
-
node = PickNodeFromList(
|
2219
|
+
node = medium_list_.PickNodeFromList(node_size);
|
1966
2220
|
if (node != NULL) return node;
|
1967
2221
|
}
|
1968
2222
|
|
1969
2223
|
if (size_in_bytes <= kLargeAllocationMax) {
|
1970
|
-
node = PickNodeFromList(
|
2224
|
+
node = large_list_.PickNodeFromList(node_size);
|
1971
2225
|
if (node != NULL) return node;
|
1972
2226
|
}
|
1973
2227
|
|
1974
|
-
|
2228
|
+
int huge_list_available = huge_list_.available();
|
2229
|
+
for (FreeListNode** cur = huge_list_.GetTopAddress();
|
1975
2230
|
*cur != NULL;
|
1976
2231
|
cur = (*cur)->next_address()) {
|
1977
2232
|
FreeListNode* cur_node = *cur;
|
1978
2233
|
while (cur_node != NULL &&
|
1979
2234
|
Page::FromAddress(cur_node->address())->IsEvacuationCandidate()) {
|
1980
|
-
|
2235
|
+
huge_list_available -= reinterpret_cast<FreeSpace*>(cur_node)->Size();
|
1981
2236
|
cur_node = cur_node->next();
|
1982
2237
|
}
|
1983
2238
|
|
1984
2239
|
*cur = cur_node;
|
1985
|
-
if (cur_node == NULL)
|
2240
|
+
if (cur_node == NULL) {
|
2241
|
+
huge_list_.set_end(NULL);
|
2242
|
+
break;
|
2243
|
+
}
|
1986
2244
|
|
1987
2245
|
ASSERT((*cur)->map() == HEAP->raw_unchecked_free_space_map());
|
1988
2246
|
FreeSpace* cur_as_free_space = reinterpret_cast<FreeSpace*>(*cur);
|
@@ -1990,12 +2248,20 @@ FreeListNode* FreeList::FindNodeFor(int size_in_bytes, int* node_size) {
|
|
1990
2248
|
if (size >= size_in_bytes) {
|
1991
2249
|
// Large enough node found. Unlink it from the list.
|
1992
2250
|
node = *cur;
|
1993
|
-
*node_size = size;
|
1994
2251
|
*cur = node->next();
|
2252
|
+
*node_size = size;
|
2253
|
+
huge_list_available -= size;
|
1995
2254
|
break;
|
1996
2255
|
}
|
1997
2256
|
}
|
1998
2257
|
|
2258
|
+
if (huge_list_.top() == NULL) {
|
2259
|
+
huge_list_.set_end(NULL);
|
2260
|
+
}
|
2261
|
+
|
2262
|
+
huge_list_.set_available(huge_list_available);
|
2263
|
+
ASSERT(IsVeryLong() || available() == SumFreeLists());
|
2264
|
+
|
1999
2265
|
return node;
|
2000
2266
|
}
|
2001
2267
|
|
@@ -2015,8 +2281,6 @@ HeapObject* FreeList::Allocate(int size_in_bytes) {
|
|
2015
2281
|
FreeListNode* new_node = FindNodeFor(size_in_bytes, &new_node_size);
|
2016
2282
|
if (new_node == NULL) return NULL;
|
2017
2283
|
|
2018
|
-
available_ -= new_node_size;
|
2019
|
-
ASSERT(IsVeryLong() || available_ == SumFreeLists());
|
2020
2284
|
|
2021
2285
|
int bytes_left = new_node_size - size_in_bytes;
|
2022
2286
|
ASSERT(bytes_left >= 0);
|
@@ -2027,15 +2291,16 @@ HeapObject* FreeList::Allocate(int size_in_bytes) {
|
|
2027
2291
|
// if it is big enough.
|
2028
2292
|
owner_->Free(owner_->top(), old_linear_size);
|
2029
2293
|
|
2294
|
+
owner_->heap()->incremental_marking()->OldSpaceStep(
|
2295
|
+
size_in_bytes - old_linear_size);
|
2296
|
+
|
2030
2297
|
#ifdef DEBUG
|
2031
2298
|
for (int i = 0; i < size_in_bytes / kPointerSize; i++) {
|
2032
|
-
reinterpret_cast<Object**>(new_node->address())[i] =
|
2299
|
+
reinterpret_cast<Object**>(new_node->address())[i] =
|
2300
|
+
Smi::FromInt(kCodeZapValue);
|
2033
2301
|
}
|
2034
2302
|
#endif
|
2035
2303
|
|
2036
|
-
owner_->heap()->incremental_marking()->OldSpaceStep(
|
2037
|
-
size_in_bytes - old_linear_size);
|
2038
|
-
|
2039
2304
|
// The old-space-step might have finished sweeping and restarted marking.
|
2040
2305
|
// Verify that it did not turn the page of the new node into an evacuation
|
2041
2306
|
// candidate.
|
@@ -2073,25 +2338,12 @@ HeapObject* FreeList::Allocate(int size_in_bytes) {
|
|
2073
2338
|
}
|
2074
2339
|
|
2075
2340
|
|
2076
|
-
static intptr_t CountFreeListItemsInList(FreeListNode* n, Page* p) {
|
2077
|
-
intptr_t sum = 0;
|
2078
|
-
while (n != NULL) {
|
2079
|
-
if (Page::FromAddress(n->address()) == p) {
|
2080
|
-
FreeSpace* free_space = reinterpret_cast<FreeSpace*>(n);
|
2081
|
-
sum += free_space->Size();
|
2082
|
-
}
|
2083
|
-
n = n->next();
|
2084
|
-
}
|
2085
|
-
return sum;
|
2086
|
-
}
|
2087
|
-
|
2088
|
-
|
2089
2341
|
void FreeList::CountFreeListItems(Page* p, SizeStats* sizes) {
|
2090
|
-
sizes->huge_size_ = CountFreeListItemsInList(
|
2342
|
+
sizes->huge_size_ = huge_list_.CountFreeListItemsInList(p);
|
2091
2343
|
if (sizes->huge_size_ < p->area_size()) {
|
2092
|
-
sizes->small_size_ = CountFreeListItemsInList(
|
2093
|
-
sizes->medium_size_ = CountFreeListItemsInList(
|
2094
|
-
sizes->large_size_ = CountFreeListItemsInList(
|
2344
|
+
sizes->small_size_ = small_list_.CountFreeListItemsInList(p);
|
2345
|
+
sizes->medium_size_ = medium_list_.CountFreeListItemsInList(p);
|
2346
|
+
sizes->large_size_ = large_list_.CountFreeListItemsInList(p);
|
2095
2347
|
} else {
|
2096
2348
|
sizes->small_size_ = 0;
|
2097
2349
|
sizes->medium_size_ = 0;
|
@@ -2100,39 +2352,31 @@ void FreeList::CountFreeListItems(Page* p, SizeStats* sizes) {
|
|
2100
2352
|
}
|
2101
2353
|
|
2102
2354
|
|
2103
|
-
static intptr_t EvictFreeListItemsInList(FreeListNode** n, Page* p) {
|
2104
|
-
intptr_t sum = 0;
|
2105
|
-
while (*n != NULL) {
|
2106
|
-
if (Page::FromAddress((*n)->address()) == p) {
|
2107
|
-
FreeSpace* free_space = reinterpret_cast<FreeSpace*>(*n);
|
2108
|
-
sum += free_space->Size();
|
2109
|
-
*n = (*n)->next();
|
2110
|
-
} else {
|
2111
|
-
n = (*n)->next_address();
|
2112
|
-
}
|
2113
|
-
}
|
2114
|
-
return sum;
|
2115
|
-
}
|
2116
|
-
|
2117
|
-
|
2118
2355
|
intptr_t FreeList::EvictFreeListItems(Page* p) {
|
2119
|
-
intptr_t sum = EvictFreeListItemsInList(
|
2356
|
+
intptr_t sum = huge_list_.EvictFreeListItemsInList(p);
|
2120
2357
|
|
2121
2358
|
if (sum < p->area_size()) {
|
2122
|
-
sum += EvictFreeListItemsInList(
|
2123
|
-
EvictFreeListItemsInList(
|
2124
|
-
EvictFreeListItemsInList(
|
2359
|
+
sum += small_list_.EvictFreeListItemsInList(p) +
|
2360
|
+
medium_list_.EvictFreeListItemsInList(p) +
|
2361
|
+
large_list_.EvictFreeListItemsInList(p);
|
2125
2362
|
}
|
2126
2363
|
|
2127
|
-
available_ -= static_cast<int>(sum);
|
2128
|
-
|
2129
2364
|
return sum;
|
2130
2365
|
}
|
2131
2366
|
|
2132
2367
|
|
2368
|
+
void FreeList::RepairLists(Heap* heap) {
|
2369
|
+
small_list_.RepairFreeList(heap);
|
2370
|
+
medium_list_.RepairFreeList(heap);
|
2371
|
+
large_list_.RepairFreeList(heap);
|
2372
|
+
huge_list_.RepairFreeList(heap);
|
2373
|
+
}
|
2374
|
+
|
2375
|
+
|
2133
2376
|
#ifdef DEBUG
|
2134
|
-
intptr_t
|
2377
|
+
intptr_t FreeListCategory::SumFreeList() {
|
2135
2378
|
intptr_t sum = 0;
|
2379
|
+
FreeListNode* cur = top_;
|
2136
2380
|
while (cur != NULL) {
|
2137
2381
|
ASSERT(cur->map() == HEAP->raw_unchecked_free_space_map());
|
2138
2382
|
FreeSpace* cur_as_free_space = reinterpret_cast<FreeSpace*>(cur);
|
@@ -2146,8 +2390,9 @@ intptr_t FreeList::SumFreeList(FreeListNode* cur) {
|
|
2146
2390
|
static const int kVeryLongFreeList = 500;
|
2147
2391
|
|
2148
2392
|
|
2149
|
-
int
|
2393
|
+
int FreeListCategory::FreeListLength() {
|
2150
2394
|
int length = 0;
|
2395
|
+
FreeListNode* cur = top_;
|
2151
2396
|
while (cur != NULL) {
|
2152
2397
|
length++;
|
2153
2398
|
cur = cur->next();
|
@@ -2158,10 +2403,10 @@ int FreeList::FreeListLength(FreeListNode* cur) {
|
|
2158
2403
|
|
2159
2404
|
|
2160
2405
|
bool FreeList::IsVeryLong() {
|
2161
|
-
if (FreeListLength(
|
2162
|
-
if (FreeListLength(
|
2163
|
-
if (FreeListLength(
|
2164
|
-
if (FreeListLength(
|
2406
|
+
if (small_list_.FreeListLength() == kVeryLongFreeList) return true;
|
2407
|
+
if (medium_list_.FreeListLength() == kVeryLongFreeList) return true;
|
2408
|
+
if (large_list_.FreeListLength() == kVeryLongFreeList) return true;
|
2409
|
+
if (huge_list_.FreeListLength() == kVeryLongFreeList) return true;
|
2165
2410
|
return false;
|
2166
2411
|
}
|
2167
2412
|
|
@@ -2170,10 +2415,10 @@ bool FreeList::IsVeryLong() {
|
|
2170
2415
|
// on the free list, so it should not be called if FreeListLength returns
|
2171
2416
|
// kVeryLongFreeList.
|
2172
2417
|
intptr_t FreeList::SumFreeLists() {
|
2173
|
-
intptr_t sum = SumFreeList(
|
2174
|
-
sum += SumFreeList(
|
2175
|
-
sum += SumFreeList(
|
2176
|
-
sum += SumFreeList(
|
2418
|
+
intptr_t sum = small_list_.SumFreeList();
|
2419
|
+
sum += medium_list_.SumFreeList();
|
2420
|
+
sum += large_list_.SumFreeList();
|
2421
|
+
sum += huge_list_.SumFreeList();
|
2177
2422
|
return sum;
|
2178
2423
|
}
|
2179
2424
|
#endif
|
@@ -2257,11 +2502,19 @@ bool PagedSpace::ReserveSpace(int size_in_bytes) {
|
|
2257
2502
|
Free(top(), old_linear_size);
|
2258
2503
|
|
2259
2504
|
SetTop(new_area->address(), new_area->address() + size_in_bytes);
|
2260
|
-
Allocate(size_in_bytes);
|
2261
2505
|
return true;
|
2262
2506
|
}
|
2263
2507
|
|
2264
2508
|
|
2509
|
+
// After we have booted, we have created a map which represents free space
|
2510
|
+
// on the heap. If there was already a free list then the elements on it
|
2511
|
+
// were created with the wrong FreeSpaceMap (normally NULL), so we need to
|
2512
|
+
// fix them.
|
2513
|
+
void PagedSpace::RepairFreeListsAfterBoot() {
|
2514
|
+
free_list_.RepairLists(heap());
|
2515
|
+
}
|
2516
|
+
|
2517
|
+
|
2265
2518
|
// You have to call this last, since the implementation from PagedSpace
|
2266
2519
|
// doesn't know that memory was 'promised' to large object space.
|
2267
2520
|
bool LargeObjectSpace::ReserveSpace(int bytes) {
|
@@ -2272,7 +2525,7 @@ bool LargeObjectSpace::ReserveSpace(int bytes) {
|
|
2272
2525
|
|
2273
2526
|
|
2274
2527
|
bool PagedSpace::AdvanceSweeper(intptr_t bytes_to_sweep) {
|
2275
|
-
if (
|
2528
|
+
if (IsLazySweepingComplete()) return true;
|
2276
2529
|
|
2277
2530
|
intptr_t freed_bytes = 0;
|
2278
2531
|
Page* p = first_unswept_page_;
|
@@ -2284,7 +2537,10 @@ bool PagedSpace::AdvanceSweeper(intptr_t bytes_to_sweep) {
|
|
2284
2537
|
reinterpret_cast<intptr_t>(p));
|
2285
2538
|
}
|
2286
2539
|
DecreaseUnsweptFreeBytes(p);
|
2287
|
-
freed_bytes +=
|
2540
|
+
freed_bytes +=
|
2541
|
+
MarkCompactCollector::
|
2542
|
+
SweepConservatively<MarkCompactCollector::SWEEP_SEQUENTIALLY>(
|
2543
|
+
this, NULL, p);
|
2288
2544
|
}
|
2289
2545
|
p = next_page;
|
2290
2546
|
} while (p != anchor() && freed_bytes < bytes_to_sweep);
|
@@ -2297,7 +2553,7 @@ bool PagedSpace::AdvanceSweeper(intptr_t bytes_to_sweep) {
|
|
2297
2553
|
|
2298
2554
|
heap()->FreeQueuedChunks();
|
2299
2555
|
|
2300
|
-
return
|
2556
|
+
return IsLazySweepingComplete();
|
2301
2557
|
}
|
2302
2558
|
|
2303
2559
|
|
@@ -2316,13 +2572,33 @@ void PagedSpace::EvictEvacuationCandidatesFromFreeLists() {
|
|
2316
2572
|
}
|
2317
2573
|
|
2318
2574
|
|
2575
|
+
bool PagedSpace::EnsureSweeperProgress(intptr_t size_in_bytes) {
|
2576
|
+
MarkCompactCollector* collector = heap()->mark_compact_collector();
|
2577
|
+
if (collector->AreSweeperThreadsActivated()) {
|
2578
|
+
if (FLAG_concurrent_sweeping) {
|
2579
|
+
if (collector->StealMemoryFromSweeperThreads(this) < size_in_bytes) {
|
2580
|
+
collector->WaitUntilSweepingCompleted();
|
2581
|
+
return true;
|
2582
|
+
}
|
2583
|
+
return false;
|
2584
|
+
}
|
2585
|
+
return true;
|
2586
|
+
} else {
|
2587
|
+
return AdvanceSweeper(size_in_bytes);
|
2588
|
+
}
|
2589
|
+
}
|
2590
|
+
|
2591
|
+
|
2319
2592
|
HeapObject* PagedSpace::SlowAllocateRaw(int size_in_bytes) {
|
2320
2593
|
// Allocation in this space has failed.
|
2321
2594
|
|
2322
|
-
// If there are unswept pages advance lazy sweeper
|
2323
|
-
//
|
2324
|
-
|
2325
|
-
|
2595
|
+
// If there are unswept pages advance lazy sweeper a bounded number of times
|
2596
|
+
// until we find a size_in_bytes contiguous piece of memory
|
2597
|
+
const int kMaxSweepingTries = 5;
|
2598
|
+
bool sweeping_complete = false;
|
2599
|
+
|
2600
|
+
for (int i = 0; i < kMaxSweepingTries && !sweeping_complete; i++) {
|
2601
|
+
sweeping_complete = EnsureSweeperProgress(size_in_bytes);
|
2326
2602
|
|
2327
2603
|
// Retry the free list allocation.
|
2328
2604
|
HeapObject* object = free_list_.Allocate(size_in_bytes);
|
@@ -2344,8 +2620,8 @@ HeapObject* PagedSpace::SlowAllocateRaw(int size_in_bytes) {
|
|
2344
2620
|
|
2345
2621
|
// Last ditch, sweep all the remaining pages to try to find space. This may
|
2346
2622
|
// cause a pause.
|
2347
|
-
if (!
|
2348
|
-
|
2623
|
+
if (!IsLazySweepingComplete()) {
|
2624
|
+
EnsureSweeperProgress(kMaxInt);
|
2349
2625
|
|
2350
2626
|
// Retry the free list allocation.
|
2351
2627
|
HeapObject* object = free_list_.Allocate(size_in_bytes);
|
@@ -2520,25 +2796,27 @@ void FixedSpace::PrepareForMarkCompact() {
|
|
2520
2796
|
|
2521
2797
|
// -----------------------------------------------------------------------------
|
2522
2798
|
// MapSpace implementation
|
2799
|
+
// TODO(mvstanton): this is weird...the compiler can't make a vtable unless
|
2800
|
+
// there is at least one non-inlined virtual function. I would prefer to hide
|
2801
|
+
// the VerifyObject definition behind VERIFY_HEAP.
|
2523
2802
|
|
2524
|
-
#ifdef DEBUG
|
2525
2803
|
void MapSpace::VerifyObject(HeapObject* object) {
|
2526
2804
|
// The object should be a map or a free-list node.
|
2527
|
-
|
2805
|
+
CHECK(object->IsMap() || object->IsFreeSpace());
|
2528
2806
|
}
|
2529
|
-
#endif
|
2530
2807
|
|
2531
2808
|
|
2532
2809
|
// -----------------------------------------------------------------------------
|
2533
2810
|
// GlobalPropertyCellSpace implementation
|
2811
|
+
// TODO(mvstanton): this is weird...the compiler can't make a vtable unless
|
2812
|
+
// there is at least one non-inlined virtual function. I would prefer to hide
|
2813
|
+
// the VerifyObject definition behind VERIFY_HEAP.
|
2534
2814
|
|
2535
|
-
#ifdef DEBUG
|
2536
2815
|
void CellSpace::VerifyObject(HeapObject* object) {
|
2537
2816
|
// The object should be a global object property cell or a free-list node.
|
2538
|
-
|
2817
|
+
CHECK(object->IsJSGlobalPropertyCell() ||
|
2539
2818
|
object->map() == heap()->two_pointer_filler_map());
|
2540
2819
|
}
|
2541
|
-
#endif
|
2542
2820
|
|
2543
2821
|
|
2544
2822
|
// -----------------------------------------------------------------------------
|
@@ -2648,18 +2926,31 @@ MaybeObject* LargeObjectSpace::AllocateRaw(int object_size,
|
|
2648
2926
|
|
2649
2927
|
HeapObject* object = page->GetObject();
|
2650
2928
|
|
2651
|
-
|
2652
|
-
|
2653
|
-
|
2654
|
-
|
2655
|
-
|
2656
|
-
|
2929
|
+
if (Heap::ShouldZapGarbage()) {
|
2930
|
+
// Make the object consistent so the heap can be verified in OldSpaceStep.
|
2931
|
+
// We only need to do this in debug builds or if verify_heap is on.
|
2932
|
+
reinterpret_cast<Object**>(object->address())[0] =
|
2933
|
+
heap()->fixed_array_map();
|
2934
|
+
reinterpret_cast<Object**>(object->address())[1] = Smi::FromInt(0);
|
2935
|
+
}
|
2657
2936
|
|
2658
2937
|
heap()->incremental_marking()->OldSpaceStep(object_size);
|
2659
2938
|
return object;
|
2660
2939
|
}
|
2661
2940
|
|
2662
2941
|
|
2942
|
+
size_t LargeObjectSpace::CommittedPhysicalMemory() {
|
2943
|
+
if (!VirtualMemory::HasLazyCommits()) return CommittedMemory();
|
2944
|
+
size_t size = 0;
|
2945
|
+
LargePage* current = first_page_;
|
2946
|
+
while (current != NULL) {
|
2947
|
+
size += current->CommittedPhysicalMemory();
|
2948
|
+
current = current->next_page();
|
2949
|
+
}
|
2950
|
+
return size;
|
2951
|
+
}
|
2952
|
+
|
2953
|
+
|
2663
2954
|
// GC support
|
2664
2955
|
MaybeObject* LargeObjectSpace::FindObject(Address a) {
|
2665
2956
|
LargePage* page = FindPage(a);
|
@@ -2698,7 +2989,8 @@ void LargeObjectSpace::FreeUnmarkedObjects() {
|
|
2698
2989
|
MarkBit mark_bit = Marking::MarkBitFrom(object);
|
2699
2990
|
if (mark_bit.Get()) {
|
2700
2991
|
mark_bit.Clear();
|
2701
|
-
|
2992
|
+
Page::FromAddress(object->address())->ResetProgressBar();
|
2993
|
+
Page::FromAddress(object->address())->ResetLiveBytes();
|
2702
2994
|
previous = current;
|
2703
2995
|
current = current->next_page();
|
2704
2996
|
} else {
|
@@ -2752,7 +3044,7 @@ bool LargeObjectSpace::Contains(HeapObject* object) {
|
|
2752
3044
|
}
|
2753
3045
|
|
2754
3046
|
|
2755
|
-
#ifdef
|
3047
|
+
#ifdef VERIFY_HEAP
|
2756
3048
|
// We do not assume that the large object iterator works, because it depends
|
2757
3049
|
// on the invariants we are checking during verification.
|
2758
3050
|
void LargeObjectSpace::Verify() {
|
@@ -2763,18 +3055,18 @@ void LargeObjectSpace::Verify() {
|
|
2763
3055
|
// object area start.
|
2764
3056
|
HeapObject* object = chunk->GetObject();
|
2765
3057
|
Page* page = Page::FromAddress(object->address());
|
2766
|
-
|
3058
|
+
CHECK(object->address() == page->area_start());
|
2767
3059
|
|
2768
3060
|
// The first word should be a map, and we expect all map pointers to be
|
2769
3061
|
// in map space.
|
2770
3062
|
Map* map = object->map();
|
2771
|
-
|
2772
|
-
|
3063
|
+
CHECK(map->IsMap());
|
3064
|
+
CHECK(heap()->map_space()->Contains(map));
|
2773
3065
|
|
2774
3066
|
// We have only code, sequential strings, external strings
|
2775
3067
|
// (sequential strings that have been morphed into external
|
2776
3068
|
// strings), fixed arrays, and byte arrays in large object space.
|
2777
|
-
|
3069
|
+
CHECK(object->IsCode() || object->IsSeqString() ||
|
2778
3070
|
object->IsExternalString() || object->IsFixedArray() ||
|
2779
3071
|
object->IsFixedDoubleArray() || object->IsByteArray());
|
2780
3072
|
|
@@ -2793,15 +3085,17 @@ void LargeObjectSpace::Verify() {
|
|
2793
3085
|
Object* element = array->get(j);
|
2794
3086
|
if (element->IsHeapObject()) {
|
2795
3087
|
HeapObject* element_object = HeapObject::cast(element);
|
2796
|
-
|
2797
|
-
|
3088
|
+
CHECK(heap()->Contains(element_object));
|
3089
|
+
CHECK(element_object->map()->IsMap());
|
2798
3090
|
}
|
2799
3091
|
}
|
2800
3092
|
}
|
2801
3093
|
}
|
2802
3094
|
}
|
3095
|
+
#endif
|
2803
3096
|
|
2804
3097
|
|
3098
|
+
#ifdef DEBUG
|
2805
3099
|
void LargeObjectSpace::Print() {
|
2806
3100
|
LargeObjectIterator it(this);
|
2807
3101
|
for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
|