therubyracer 0.8.1.pre2 → 0.8.1
Sign up to get free protection for your applications and to get access to all the features.
Potentially problematic release.
This version of therubyracer might be problematic. Click here for more details.
- data/Changelog.md +2 -1
- data/README.md +6 -3
- data/ext/v8/upstream/3.1.8/.gitignore +31 -0
- data/ext/v8/upstream/3.1.8/AUTHORS +40 -0
- data/ext/v8/upstream/3.1.8/ChangeLog +2566 -0
- data/ext/v8/upstream/3.1.8/LICENSE +52 -0
- data/ext/v8/upstream/3.1.8/LICENSE.strongtalk +29 -0
- data/ext/v8/upstream/3.1.8/LICENSE.v8 +26 -0
- data/ext/v8/upstream/3.1.8/LICENSE.valgrind +45 -0
- data/ext/v8/upstream/3.1.8/SConstruct +1192 -0
- data/ext/v8/upstream/3.1.8/build/README.txt +25 -0
- data/ext/v8/upstream/3.1.8/build/all.gyp +18 -0
- data/ext/v8/upstream/3.1.8/build/armu.gypi +32 -0
- data/ext/v8/upstream/3.1.8/build/common.gypi +82 -0
- data/ext/v8/upstream/3.1.8/build/gyp_v8 +145 -0
- data/ext/v8/upstream/3.1.8/include/v8-debug.h +384 -0
- data/ext/v8/upstream/3.1.8/include/v8-preparser.h +116 -0
- data/ext/v8/upstream/3.1.8/include/v8-profiler.h +426 -0
- data/ext/v8/upstream/3.1.8/include/v8-testing.h +99 -0
- data/ext/v8/upstream/3.1.8/include/v8.h +3846 -0
- data/ext/v8/upstream/3.1.8/include/v8stdint.h +53 -0
- data/ext/v8/upstream/3.1.8/preparser/preparser-process.cc +206 -0
- data/ext/v8/upstream/3.1.8/src/SConscript +356 -0
- data/ext/v8/upstream/3.1.8/src/accessors.cc +907 -0
- data/ext/v8/upstream/3.1.8/src/accessors.h +121 -0
- data/ext/v8/upstream/3.1.8/src/allocation.cc +204 -0
- data/ext/v8/upstream/3.1.8/src/allocation.h +176 -0
- data/ext/v8/upstream/3.1.8/src/api.cc +5191 -0
- data/ext/v8/upstream/3.1.8/src/api.h +508 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/apinatives.js +0 -0
- data/ext/v8/upstream/3.1.8/src/apiutils.h +80 -0
- data/ext/v8/upstream/3.1.8/src/arguments.h +105 -0
- data/ext/v8/upstream/3.1.8/src/arm/assembler-arm-inl.h +352 -0
- data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.cc +2756 -0
- data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.h +1294 -0
- data/ext/v8/upstream/3.1.8/src/arm/builtins-arm.cc +1628 -0
- data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.cc +6783 -0
- data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.h +657 -0
- data/ext/v8/upstream/3.1.8/src/arm/codegen-arm-inl.h +48 -0
- data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.cc +7403 -0
- data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.h +595 -0
- data/ext/v8/upstream/3.1.8/src/arm/constants-arm.cc +152 -0
- data/ext/v8/upstream/3.1.8/src/arm/constants-arm.h +769 -0
- data/ext/v8/upstream/3.1.8/src/arm/cpu-arm.cc +147 -0
- data/ext/v8/upstream/3.1.8/src/arm/debug-arm.cc +315 -0
- data/ext/v8/upstream/3.1.8/src/arm/deoptimizer-arm.cc +700 -0
- data/ext/v8/upstream/3.1.8/src/arm/disasm-arm.cc +1439 -0
- data/ext/v8/upstream/3.1.8/src/arm/frames-arm.cc +45 -0
- data/ext/v8/upstream/3.1.8/src/arm/frames-arm.h +168 -0
- data/ext/v8/upstream/3.1.8/src/arm/full-codegen-arm.cc +4230 -0
- data/ext/v8/upstream/3.1.8/src/arm/ic-arm.cc +1799 -0
- data/ext/v8/upstream/3.1.8/src/arm/jump-target-arm.cc +174 -0
- data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.cc +2041 -0
- data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.h +2046 -0
- data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.cc +3822 -0
- data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.h +312 -0
- data/ext/v8/upstream/3.1.8/src/arm/lithium-gap-resolver-arm.cc +303 -0
- data/ext/v8/upstream/3.1.8/src/arm/lithium-gap-resolver-arm.h +84 -0
- data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.cc +2701 -0
- data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.h +1015 -0
- data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.cc +1280 -0
- data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.h +252 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/register-allocator-arm-inl.h +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/register-allocator-arm.cc +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/register-allocator-arm.h +0 -0
- data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.cc +3165 -0
- data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.h +402 -0
- data/ext/v8/upstream/3.1.8/src/arm/stub-cache-arm.cc +4077 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/virtual-frame-arm-inl.h +0 -0
- data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.cc +843 -0
- data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.h +520 -0
- data/ext/v8/upstream/3.1.8/src/array.js +1231 -0
- data/ext/v8/upstream/3.1.8/src/assembler.cc +973 -0
- data/ext/v8/upstream/3.1.8/src/assembler.h +787 -0
- data/ext/v8/upstream/3.1.8/src/ast-inl.h +107 -0
- data/ext/v8/upstream/3.1.8/src/ast.cc +1067 -0
- data/ext/v8/upstream/3.1.8/src/ast.h +2177 -0
- data/ext/v8/upstream/3.1.8/src/atomicops.h +165 -0
- data/ext/v8/upstream/3.1.8/src/atomicops_internals_arm_gcc.h +145 -0
- data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_gcc.cc +126 -0
- data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_gcc.h +287 -0
- data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_macosx.h +301 -0
- data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_msvc.h +203 -0
- data/ext/v8/upstream/3.1.8/src/bignum-dtoa.cc +655 -0
- data/ext/v8/upstream/3.1.8/src/bignum-dtoa.h +81 -0
- data/ext/v8/upstream/3.1.8/src/bignum.cc +768 -0
- data/ext/v8/upstream/3.1.8/src/bignum.h +140 -0
- data/ext/v8/upstream/3.1.8/src/bootstrapper.cc +1888 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/bootstrapper.h +0 -0
- data/ext/v8/upstream/3.1.8/src/builtins.cc +1586 -0
- data/ext/v8/upstream/3.1.8/src/builtins.h +339 -0
- data/ext/v8/upstream/3.1.8/src/bytecodes-irregexp.h +105 -0
- data/ext/v8/upstream/3.1.8/src/cached-powers.cc +177 -0
- data/ext/v8/upstream/3.1.8/src/cached-powers.h +65 -0
- data/ext/v8/upstream/3.1.8/src/char-predicates-inl.h +94 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/char-predicates.h +0 -0
- data/ext/v8/upstream/3.1.8/src/checks.cc +110 -0
- data/ext/v8/upstream/3.1.8/src/checks.h +292 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/circular-queue-inl.h +0 -0
- data/ext/v8/upstream/3.1.8/src/circular-queue.cc +122 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/circular-queue.h +0 -0
- data/ext/v8/upstream/3.1.8/src/code-stubs.cc +230 -0
- data/ext/v8/upstream/3.1.8/src/code-stubs.h +950 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/code.h +0 -0
- data/ext/v8/upstream/3.1.8/src/codegen-inl.h +64 -0
- data/ext/v8/upstream/3.1.8/src/codegen.cc +495 -0
- data/ext/v8/upstream/3.1.8/src/codegen.h +245 -0
- data/ext/v8/upstream/3.1.8/src/compilation-cache.cc +654 -0
- data/ext/v8/upstream/3.1.8/src/compilation-cache.h +112 -0
- data/ext/v8/upstream/3.1.8/src/compiler.cc +806 -0
- data/ext/v8/upstream/3.1.8/src/compiler.h +290 -0
- data/ext/v8/upstream/3.1.8/src/contexts.cc +320 -0
- data/ext/v8/upstream/3.1.8/src/contexts.h +376 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/conversions-inl.h +0 -0
- data/ext/v8/upstream/3.1.8/src/conversions.cc +1069 -0
- data/ext/v8/upstream/3.1.8/src/conversions.h +122 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/counters.cc +0 -0
- data/ext/v8/upstream/3.1.8/src/counters.h +242 -0
- data/ext/v8/upstream/3.1.8/src/cpu-profiler-inl.h +100 -0
- data/ext/v8/upstream/3.1.8/src/cpu-profiler.cc +554 -0
- data/ext/v8/upstream/3.1.8/src/cpu-profiler.h +291 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/cpu.h +0 -0
- data/ext/v8/upstream/3.1.8/src/d8-debug.cc +367 -0
- data/ext/v8/upstream/3.1.8/src/d8-debug.h +157 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/d8-posix.cc +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/d8-readline.cc +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/d8-windows.cc +0 -0
- data/ext/v8/upstream/3.1.8/src/d8.cc +792 -0
- data/ext/v8/upstream/3.1.8/src/d8.gyp +85 -0
- data/ext/v8/upstream/3.1.8/src/d8.h +231 -0
- data/ext/v8/upstream/3.1.8/src/d8.js +2798 -0
- data/ext/v8/upstream/3.1.8/src/data-flow.cc +545 -0
- data/ext/v8/upstream/3.1.8/src/data-flow.h +379 -0
- data/ext/v8/upstream/3.1.8/src/date.js +1103 -0
- data/ext/v8/upstream/3.1.8/src/dateparser-inl.h +125 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/dateparser.cc +0 -0
- data/ext/v8/upstream/3.1.8/src/dateparser.h +263 -0
- data/ext/v8/upstream/3.1.8/src/debug-agent.cc +446 -0
- data/ext/v8/upstream/3.1.8/src/debug-agent.h +131 -0
- data/ext/v8/upstream/3.1.8/src/debug-debugger.js +2569 -0
- data/ext/v8/upstream/3.1.8/src/debug.cc +3085 -0
- data/ext/v8/upstream/3.1.8/src/debug.h +1025 -0
- data/ext/v8/upstream/3.1.8/src/deoptimizer.cc +1185 -0
- data/ext/v8/upstream/3.1.8/src/deoptimizer.h +529 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/disasm.h +0 -0
- data/ext/v8/upstream/3.1.8/src/disassembler.cc +338 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/disassembler.h +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/diy-fp.cc +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/diy-fp.h +0 -0
- data/ext/v8/upstream/3.1.8/src/double.h +238 -0
- data/ext/v8/upstream/3.1.8/src/dtoa.cc +103 -0
- data/ext/v8/upstream/3.1.8/src/dtoa.h +85 -0
- data/ext/v8/upstream/3.1.8/src/execution.cc +735 -0
- data/ext/v8/upstream/3.1.8/src/execution.h +322 -0
- data/ext/v8/upstream/3.1.8/src/extensions/experimental/experimental.gyp +53 -0
- data/ext/v8/upstream/3.1.8/src/extensions/experimental/i18n-extension.cc +264 -0
- data/ext/v8/upstream/3.1.8/src/extensions/experimental/i18n-extension.h +64 -0
- data/ext/v8/upstream/3.1.8/src/extensions/externalize-string-extension.cc +141 -0
- data/ext/v8/upstream/3.1.8/src/extensions/externalize-string-extension.h +50 -0
- data/ext/v8/upstream/3.1.8/src/extensions/gc-extension.cc +58 -0
- data/ext/v8/upstream/3.1.8/src/extensions/gc-extension.h +49 -0
- data/ext/v8/upstream/3.1.8/src/factory.cc +1087 -0
- data/ext/v8/upstream/3.1.8/src/factory.h +432 -0
- data/ext/v8/upstream/3.1.8/src/fast-dtoa.cc +736 -0
- data/ext/v8/upstream/3.1.8/src/fast-dtoa.h +83 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/fixed-dtoa.cc +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/fixed-dtoa.h +0 -0
- data/ext/v8/upstream/3.1.8/src/flag-definitions.h +552 -0
- data/ext/v8/upstream/3.1.8/src/flags.cc +551 -0
- data/ext/v8/upstream/3.1.8/src/flags.h +79 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/frame-element.cc +0 -0
- data/ext/v8/upstream/3.1.8/src/frame-element.h +277 -0
- data/ext/v8/upstream/3.1.8/src/frames-inl.h +210 -0
- data/ext/v8/upstream/3.1.8/src/frames.cc +1232 -0
- data/ext/v8/upstream/3.1.8/src/frames.h +826 -0
- data/ext/v8/upstream/3.1.8/src/full-codegen.cc +1382 -0
- data/ext/v8/upstream/3.1.8/src/full-codegen.h +751 -0
- data/ext/v8/upstream/3.1.8/src/func-name-inferrer.cc +90 -0
- data/ext/v8/upstream/3.1.8/src/func-name-inferrer.h +111 -0
- data/ext/v8/upstream/3.1.8/src/gdb-jit.cc +1547 -0
- data/ext/v8/upstream/3.1.8/src/gdb-jit.h +138 -0
- data/ext/v8/upstream/3.1.8/src/global-handles.cc +534 -0
- data/ext/v8/upstream/3.1.8/src/global-handles.h +181 -0
- data/ext/v8/upstream/3.1.8/src/globals.h +325 -0
- data/ext/v8/upstream/3.1.8/src/handles-inl.h +80 -0
- data/ext/v8/upstream/3.1.8/src/handles.cc +910 -0
- data/ext/v8/upstream/3.1.8/src/handles.h +424 -0
- data/ext/v8/upstream/3.1.8/src/hashmap.cc +230 -0
- data/ext/v8/upstream/3.1.8/src/hashmap.h +121 -0
- data/ext/v8/upstream/3.1.8/src/heap-inl.h +587 -0
- data/ext/v8/upstream/3.1.8/src/heap-profiler.cc +1128 -0
- data/ext/v8/upstream/3.1.8/src/heap-profiler.h +381 -0
- data/ext/v8/upstream/3.1.8/src/heap.cc +5610 -0
- data/ext/v8/upstream/3.1.8/src/heap.h +2218 -0
- data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.cc +1490 -0
- data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.h +3493 -0
- data/ext/v8/upstream/3.1.8/src/hydrogen.cc +6056 -0
- data/ext/v8/upstream/3.1.8/src/hydrogen.h +1091 -0
- data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32-inl.h +429 -0
- data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.cc +2800 -0
- data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.h +1093 -0
- data/ext/v8/upstream/3.1.8/src/ia32/builtins-ia32.cc +1590 -0
- data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.cc +6624 -0
- data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.h +536 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/codegen-ia32-inl.h +0 -0
- data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.cc +10354 -0
- data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.h +798 -0
- data/ext/v8/upstream/3.1.8/src/ia32/cpu-ia32.cc +87 -0
- data/ext/v8/upstream/3.1.8/src/ia32/debug-ia32.cc +309 -0
- data/ext/v8/upstream/3.1.8/src/ia32/deoptimizer-ia32.cc +664 -0
- data/ext/v8/upstream/3.1.8/src/ia32/disasm-ia32.cc +1597 -0
- data/ext/v8/upstream/3.1.8/src/ia32/frames-ia32.cc +45 -0
- data/ext/v8/upstream/3.1.8/src/ia32/frames-ia32.h +140 -0
- data/ext/v8/upstream/3.1.8/src/ia32/full-codegen-ia32.cc +4278 -0
- data/ext/v8/upstream/3.1.8/src/ia32/ic-ia32.cc +1786 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/jump-target-ia32.cc +0 -0
- data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.cc +3880 -0
- data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.h +309 -0
- data/ext/v8/upstream/3.1.8/src/ia32/lithium-gap-resolver-ia32.cc +460 -0
- data/ext/v8/upstream/3.1.8/src/ia32/lithium-gap-resolver-ia32.h +110 -0
- data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.cc +2095 -0
- data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.h +2127 -0
- data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.cc +2031 -0
- data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.h +798 -0
- data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.cc +1253 -0
- data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.h +215 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/register-allocator-ia32-inl.h +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/register-allocator-ia32.cc +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/register-allocator-ia32.h +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/simulator-ia32.cc +0 -0
- data/ext/v8/upstream/3.1.8/src/ia32/simulator-ia32.h +72 -0
- data/ext/v8/upstream/3.1.8/src/ia32/stub-cache-ia32.cc +3732 -0
- data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.cc +1360 -0
- data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.h +646 -0
- data/ext/v8/upstream/3.1.8/src/ic-inl.h +129 -0
- data/ext/v8/upstream/3.1.8/src/ic.cc +2333 -0
- data/ext/v8/upstream/3.1.8/src/ic.h +639 -0
- data/ext/v8/upstream/3.1.8/src/inspector.cc +63 -0
- data/ext/v8/upstream/3.1.8/src/inspector.h +62 -0
- data/ext/v8/upstream/3.1.8/src/interpreter-irregexp.cc +655 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/interpreter-irregexp.h +0 -0
- data/ext/v8/upstream/3.1.8/src/json.js +342 -0
- data/ext/v8/upstream/3.1.8/src/jsregexp.cc +5340 -0
- data/ext/v8/upstream/3.1.8/src/jsregexp.h +1484 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target-heavy-inl.h +0 -0
- data/ext/v8/upstream/3.1.8/src/jump-target-heavy.cc +430 -0
- data/ext/v8/upstream/3.1.8/src/jump-target-heavy.h +244 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target-inl.h +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target-light-inl.h +0 -0
- data/ext/v8/upstream/3.1.8/src/jump-target-light.cc +111 -0
- data/ext/v8/upstream/3.1.8/src/jump-target-light.h +193 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target.cc +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target.h +0 -0
- data/ext/v8/upstream/3.1.8/src/list-inl.h +206 -0
- data/ext/v8/upstream/3.1.8/src/list.h +164 -0
- data/ext/v8/upstream/3.1.8/src/lithium-allocator-inl.h +140 -0
- data/ext/v8/upstream/3.1.8/src/lithium-allocator.cc +2093 -0
- data/ext/v8/upstream/3.1.8/src/lithium-allocator.h +644 -0
- data/ext/v8/upstream/3.1.8/src/lithium.cc +168 -0
- data/ext/v8/upstream/3.1.8/src/lithium.h +592 -0
- data/ext/v8/upstream/3.1.8/src/liveedit-debugger.js +1082 -0
- data/ext/v8/upstream/3.1.8/src/liveedit.cc +1650 -0
- data/ext/v8/upstream/3.1.8/src/liveedit.h +174 -0
- data/ext/v8/upstream/3.1.8/src/liveobjectlist-inl.h +126 -0
- data/ext/v8/upstream/3.1.8/src/liveobjectlist.cc +2527 -0
- data/ext/v8/upstream/3.1.8/src/liveobjectlist.h +322 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/log-inl.h +0 -0
- data/ext/v8/upstream/3.1.8/src/log-utils.cc +336 -0
- data/ext/v8/upstream/3.1.8/src/log-utils.h +232 -0
- data/ext/v8/upstream/3.1.8/src/log.cc +1608 -0
- data/ext/v8/upstream/3.1.8/src/log.h +379 -0
- data/ext/v8/upstream/3.1.8/src/macro-assembler.h +120 -0
- data/ext/v8/upstream/3.1.8/src/macros.py +178 -0
- data/ext/v8/upstream/3.1.8/src/mark-compact.cc +2957 -0
- data/ext/v8/upstream/3.1.8/src/mark-compact.h +433 -0
- data/ext/v8/upstream/3.1.8/src/math.js +264 -0
- data/ext/v8/upstream/3.1.8/src/memory.h +82 -0
- data/ext/v8/upstream/3.1.8/src/messages.cc +164 -0
- data/ext/v8/upstream/3.1.8/src/messages.h +114 -0
- data/ext/v8/upstream/3.1.8/src/messages.js +1071 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/assembler-mips-inl.h +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/assembler-mips.cc +0 -0
- data/ext/v8/upstream/3.1.8/src/mips/assembler-mips.h +667 -0
- data/ext/v8/upstream/3.1.8/src/mips/builtins-mips.cc +205 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/codegen-mips-inl.h +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/codegen-mips.cc +0 -0
- data/ext/v8/upstream/3.1.8/src/mips/codegen-mips.h +431 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/constants-mips.cc +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/constants-mips.h +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/cpu-mips.cc +0 -0
- data/ext/v8/upstream/3.1.8/src/mips/debug-mips.cc +127 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/disasm-mips.cc +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/fast-codegen-mips.cc +0 -0
- data/ext/v8/upstream/3.1.8/src/mips/frames-mips.cc +96 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/frames-mips.h +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/full-codegen-mips.cc +0 -0
- data/ext/v8/upstream/3.1.8/src/mips/ic-mips.cc +208 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/jump-target-mips.cc +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/macro-assembler-mips.cc +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/macro-assembler-mips.h +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/register-allocator-mips-inl.h +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/register-allocator-mips.cc +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/register-allocator-mips.h +0 -0
- data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.cc +1650 -0
- data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.h +311 -0
- data/ext/v8/upstream/3.1.8/src/mips/stub-cache-mips.cc +418 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/virtual-frame-mips.cc +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/virtual-frame-mips.h +0 -0
- data/ext/v8/upstream/3.1.8/src/mirror-debugger.js +2380 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mksnapshot.cc +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/natives.h +0 -0
- data/ext/v8/upstream/3.1.8/src/objects-debug.cc +722 -0
- data/ext/v8/upstream/3.1.8/src/objects-inl.h +3946 -0
- data/ext/v8/upstream/3.1.8/src/objects-printer.cc +801 -0
- data/ext/v8/upstream/3.1.8/src/objects-visiting.cc +142 -0
- data/ext/v8/upstream/3.1.8/src/objects-visiting.h +401 -0
- data/ext/v8/upstream/3.1.8/src/objects.cc +10044 -0
- data/ext/v8/upstream/3.1.8/src/objects.h +6571 -0
- data/ext/v8/upstream/3.1.8/src/parser.cc +5165 -0
- data/ext/v8/upstream/3.1.8/src/parser.h +802 -0
- data/ext/v8/upstream/3.1.8/src/platform-cygwin.cc +745 -0
- data/ext/v8/upstream/3.1.8/src/platform-freebsd.cc +702 -0
- data/ext/v8/upstream/3.1.8/src/platform-linux.cc +981 -0
- data/ext/v8/upstream/3.1.8/src/platform-macos.cc +732 -0
- data/ext/v8/upstream/3.1.8/src/platform-nullos.cc +498 -0
- data/ext/v8/upstream/3.1.8/src/platform-openbsd.cc +657 -0
- data/ext/v8/upstream/3.1.8/src/platform-posix.cc +399 -0
- data/ext/v8/upstream/3.1.8/src/platform-solaris.cc +714 -0
- data/ext/v8/upstream/3.1.8/src/platform-win32.cc +1974 -0
- data/ext/v8/upstream/3.1.8/src/platform.h +636 -0
- data/ext/v8/upstream/3.1.8/src/preparse-data.cc +183 -0
- data/ext/v8/upstream/3.1.8/src/preparse-data.h +249 -0
- data/ext/v8/upstream/3.1.8/src/preparser-api.cc +213 -0
- data/ext/v8/upstream/3.1.8/src/preparser.cc +1205 -0
- data/ext/v8/upstream/3.1.8/src/preparser.h +278 -0
- data/ext/v8/upstream/3.1.8/src/prettyprinter.cc +1539 -0
- data/ext/v8/upstream/3.1.8/src/prettyprinter.h +223 -0
- data/ext/v8/upstream/3.1.8/src/profile-generator-inl.h +128 -0
- data/ext/v8/upstream/3.1.8/src/profile-generator.cc +2899 -0
- data/ext/v8/upstream/3.1.8/src/profile-generator.h +1151 -0
- data/ext/v8/upstream/3.1.8/src/property.cc +96 -0
- data/ext/v8/upstream/3.1.8/src/property.h +337 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/regexp-macro-assembler-irregexp-inl.h +0 -0
- data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-irregexp.cc +470 -0
- data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-irregexp.h +142 -0
- data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-tracer.cc +373 -0
- data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-tracer.h +104 -0
- data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.cc +257 -0
- data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.h +231 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/regexp-stack.cc +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/regexp-stack.h +0 -0
- data/ext/v8/upstream/3.1.8/src/regexp.js +483 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/register-allocator-inl.h +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/register-allocator.cc +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/register-allocator.h +0 -0
- data/ext/v8/upstream/3.1.8/src/rewriter.cc +1023 -0
- data/ext/v8/upstream/3.1.8/src/rewriter.h +59 -0
- data/ext/v8/upstream/3.1.8/src/runtime-profiler.cc +443 -0
- data/ext/v8/upstream/3.1.8/src/runtime-profiler.h +77 -0
- data/ext/v8/upstream/3.1.8/src/runtime.cc +11592 -0
- data/ext/v8/upstream/3.1.8/src/runtime.h +582 -0
- data/ext/v8/upstream/3.1.8/src/runtime.js +643 -0
- data/ext/v8/upstream/3.1.8/src/safepoint-table.cc +253 -0
- data/ext/v8/upstream/3.1.8/src/safepoint-table.h +263 -0
- data/ext/v8/upstream/3.1.8/src/scanner-base.cc +971 -0
- data/ext/v8/upstream/3.1.8/src/scanner-base.h +653 -0
- data/ext/v8/upstream/3.1.8/src/scanner.cc +586 -0
- data/ext/v8/upstream/3.1.8/src/scanner.h +194 -0
- data/ext/v8/upstream/3.1.8/src/scopeinfo.cc +636 -0
- data/ext/v8/upstream/3.1.8/src/scopeinfo.h +238 -0
- data/ext/v8/upstream/3.1.8/src/scopes.cc +1063 -0
- data/ext/v8/upstream/3.1.8/src/scopes.h +494 -0
- data/ext/v8/upstream/3.1.8/src/serialize.cc +1535 -0
- data/ext/v8/upstream/3.1.8/src/serialize.h +584 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/shell.h +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/simulator.h +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/smart-pointer.h +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/snapshot-common.cc +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/snapshot-empty.cc +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/snapshot.h +0 -0
- data/ext/v8/upstream/3.1.8/src/spaces-inl.h +524 -0
- data/ext/v8/upstream/3.1.8/src/spaces.cc +3254 -0
- data/ext/v8/upstream/3.1.8/src/spaces.h +2362 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/splay-tree-inl.h +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/splay-tree.h +0 -0
- data/ext/v8/upstream/3.1.8/src/string-search.cc +40 -0
- data/ext/v8/upstream/3.1.8/src/string-search.h +567 -0
- data/ext/v8/upstream/3.1.8/src/string-stream.cc +584 -0
- data/ext/v8/upstream/3.1.8/src/string-stream.h +191 -0
- data/ext/v8/upstream/3.1.8/src/string.js +915 -0
- data/ext/v8/upstream/3.1.8/src/strtod.cc +440 -0
- data/ext/v8/upstream/3.1.8/src/strtod.h +40 -0
- data/ext/v8/upstream/3.1.8/src/stub-cache.cc +1878 -0
- data/ext/v8/upstream/3.1.8/src/stub-cache.h +849 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/third_party/valgrind/valgrind.h +0 -0
- data/ext/v8/upstream/3.1.8/src/token.cc +63 -0
- data/ext/v8/upstream/3.1.8/src/token.h +288 -0
- data/ext/v8/upstream/3.1.8/src/top.cc +1152 -0
- data/ext/v8/upstream/3.1.8/src/top.h +608 -0
- data/ext/v8/upstream/3.1.8/src/type-info.cc +406 -0
- data/ext/v8/upstream/3.1.8/src/type-info.h +283 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/unbound-queue-inl.h +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/unbound-queue.h +0 -0
- data/ext/v8/upstream/3.1.8/src/unicode-inl.h +238 -0
- data/ext/v8/upstream/3.1.8/src/unicode.cc +1624 -0
- data/ext/v8/upstream/3.1.8/src/unicode.h +280 -0
- data/ext/v8/upstream/3.1.8/src/uri.js +402 -0
- data/ext/v8/upstream/3.1.8/src/utils.cc +371 -0
- data/ext/v8/upstream/3.1.8/src/utils.h +793 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/v8-counters.cc +0 -0
- data/ext/v8/upstream/3.1.8/src/v8-counters.h +290 -0
- data/ext/v8/upstream/3.1.8/src/v8.cc +270 -0
- data/ext/v8/upstream/3.1.8/src/v8.h +127 -0
- data/ext/v8/upstream/3.1.8/src/v8checks.h +64 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/v8dll-main.cc +0 -0
- data/ext/v8/upstream/3.1.8/src/v8globals.h +480 -0
- data/ext/v8/upstream/3.1.8/src/v8natives.js +1252 -0
- data/ext/v8/upstream/3.1.8/src/v8preparserdll-main.cc +39 -0
- data/ext/v8/upstream/3.1.8/src/v8threads.cc +440 -0
- data/ext/v8/upstream/3.1.8/src/v8threads.h +157 -0
- data/ext/v8/upstream/3.1.8/src/v8utils.h +354 -0
- data/ext/v8/upstream/3.1.8/src/variables.cc +132 -0
- data/ext/v8/upstream/3.1.8/src/variables.h +212 -0
- data/ext/v8/upstream/3.1.8/src/version.cc +95 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/version.h +0 -0
- data/ext/v8/upstream/3.1.8/src/virtual-frame-heavy-inl.h +190 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-heavy.cc +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-inl.h +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-light-inl.h +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-light.cc +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame.cc +0 -0
- data/ext/v8/upstream/3.1.8/src/virtual-frame.h +59 -0
- data/ext/v8/upstream/3.1.8/src/vm-state-inl.h +134 -0
- data/ext/v8/upstream/3.1.8/src/vm-state.h +68 -0
- data/ext/v8/upstream/3.1.8/src/win32-headers.h +95 -0
- data/ext/v8/upstream/3.1.8/src/x64/assembler-x64-inl.h +455 -0
- data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.cc +3162 -0
- data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.h +1584 -0
- data/ext/v8/upstream/3.1.8/src/x64/builtins-x64.cc +1492 -0
- data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.cc +5150 -0
- data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.h +519 -0
- data/ext/v8/upstream/3.1.8/src/x64/codegen-x64-inl.h +46 -0
- data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.cc +8835 -0
- data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.h +750 -0
- data/ext/v8/upstream/3.1.8/src/x64/cpu-x64.cc +86 -0
- data/ext/v8/upstream/3.1.8/src/x64/debug-x64.cc +316 -0
- data/ext/v8/upstream/3.1.8/src/x64/deoptimizer-x64.cc +781 -0
- data/ext/v8/upstream/3.1.8/src/x64/disasm-x64.cc +1737 -0
- data/ext/v8/upstream/3.1.8/src/x64/frames-x64.cc +45 -0
- data/ext/v8/upstream/3.1.8/src/x64/frames-x64.h +130 -0
- data/ext/v8/upstream/3.1.8/src/x64/full-codegen-x64.cc +3984 -0
- data/ext/v8/upstream/3.1.8/src/x64/ic-x64.cc +1761 -0
- data/ext/v8/upstream/3.1.8/src/x64/jump-target-x64.cc +437 -0
- data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.cc +3639 -0
- data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.h +305 -0
- data/ext/v8/upstream/3.1.8/src/x64/lithium-gap-resolver-x64.cc +320 -0
- data/ext/v8/upstream/3.1.8/src/x64/lithium-gap-resolver-x64.h +74 -0
- data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.cc +2044 -0
- data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.h +2052 -0
- data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.cc +2660 -0
- data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.h +1852 -0
- data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.cc +1382 -0
- data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.h +278 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/register-allocator-x64-inl.h +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/register-allocator-x64.cc +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/register-allocator-x64.h +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/simulator-x64.cc +0 -0
- data/ext/v8/upstream/3.1.8/src/x64/simulator-x64.h +71 -0
- data/ext/v8/upstream/3.1.8/src/x64/stub-cache-x64.cc +3509 -0
- data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.cc +1292 -0
- data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.h +593 -0
- data/ext/v8/upstream/3.1.8/src/zone-inl.h +83 -0
- data/ext/v8/upstream/3.1.8/src/zone.cc +195 -0
- data/ext/v8/upstream/3.1.8/src/zone.h +233 -0
- data/ext/v8/upstream/3.1.8/tools/codemap.js +265 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/consarray.js +0 -0
- data/ext/v8/upstream/3.1.8/tools/csvparser.js +78 -0
- data/ext/v8/upstream/3.1.8/tools/disasm.py +92 -0
- data/ext/v8/upstream/3.1.8/tools/gc-nvp-trace-processor.py +328 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/generate-ten-powers.scm +0 -0
- data/ext/v8/upstream/3.1.8/tools/grokdump.py +840 -0
- data/ext/v8/upstream/3.1.8/tools/gyp/v8.gyp +869 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/js2c.py +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/jsmin.py +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/linux-tick-processor +0 -0
- data/ext/v8/upstream/3.1.8/tools/ll_prof.py +919 -0
- data/ext/v8/upstream/3.1.8/tools/logreader.js +185 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/mac-nm +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/mac-tick-processor +0 -0
- data/ext/v8/upstream/3.1.8/tools/oom_dump/README +31 -0
- data/ext/v8/upstream/3.1.8/tools/oom_dump/SConstruct +42 -0
- data/ext/v8/upstream/3.1.8/tools/oom_dump/oom_dump.cc +288 -0
- data/ext/v8/upstream/3.1.8/tools/presubmit.py +305 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/process-heap-prof.py +0 -0
- data/ext/v8/upstream/3.1.8/tools/profile.js +751 -0
- data/ext/v8/upstream/3.1.8/tools/profile_view.js +219 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/run-valgrind.py +0 -0
- data/ext/v8/upstream/3.1.8/tools/splaytree.js +316 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/stats-viewer.py +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/tickprocessor-driver.js +0 -0
- data/ext/v8/upstream/3.1.8/tools/tickprocessor.js +863 -0
- data/ext/v8/upstream/3.1.8/tools/utils.py +96 -0
- data/ext/v8/upstream/3.1.8/tools/visual_studio/README.txt +70 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/arm.vsprops +0 -0
- data/ext/v8/upstream/3.1.8/tools/visual_studio/common.vsprops +34 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8.vcproj +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8_arm.vcproj +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8_x64.vcproj +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8js2c.cmd +0 -0
- data/ext/v8/upstream/3.1.8/tools/visual_studio/debug.vsprops +17 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/ia32.vsprops +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/js2c.cmd +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/release.vsprops +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8.sln +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8.vcproj +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_arm.sln +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_arm.vcproj +0 -0
- data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base.vcproj +1296 -0
- data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_arm.vcproj +1234 -0
- data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_x64.vcproj +1296 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_cctest.vcproj +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_cctest_arm.vcproj +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_cctest_x64.vcproj +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_mksnapshot.vcproj +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_mksnapshot_x64.vcproj +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_process_sample.vcproj +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_process_sample_arm.vcproj +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_process_sample_x64.vcproj +0 -0
- data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_shell_sample.vcproj +147 -0
- data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_shell_sample_arm.vcproj +147 -0
- data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_shell_sample_x64.vcproj +163 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot.vcproj +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot_cc.vcproj +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot_cc_x64.vcproj +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot_x64.vcproj +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_x64.sln +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_x64.vcproj +0 -0
- data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/x64.vsprops +0 -0
- data/ext/v8/upstream/3.1.8/tools/windows-tick-processor.bat +30 -0
- data/ext/v8/upstream/Makefile +2 -1
- data/ext/v8/v8_template.cpp +2 -2
- data/lib/v8/version.rb +1 -1
- data/spec/redjs/jsapi_spec.rb +2 -2
- metadata +552 -490
- data/ext/v8/upstream/2.3.3/.gitignore +0 -26
- data/ext/v8/upstream/2.3.3/AUTHORS +0 -31
- data/ext/v8/upstream/2.3.3/ChangeLog +0 -1916
- data/ext/v8/upstream/2.3.3/LICENSE +0 -55
- data/ext/v8/upstream/2.3.3/SConstruct +0 -1154
- data/ext/v8/upstream/2.3.3/include/v8-debug.h +0 -381
- data/ext/v8/upstream/2.3.3/include/v8-profiler.h +0 -353
- data/ext/v8/upstream/2.3.3/include/v8.h +0 -3616
- data/ext/v8/upstream/2.3.3/src/SConscript +0 -330
- data/ext/v8/upstream/2.3.3/src/accessors.cc +0 -661
- data/ext/v8/upstream/2.3.3/src/accessors.h +0 -114
- data/ext/v8/upstream/2.3.3/src/allocation.cc +0 -198
- data/ext/v8/upstream/2.3.3/src/allocation.h +0 -169
- data/ext/v8/upstream/2.3.3/src/api.cc +0 -4795
- data/ext/v8/upstream/2.3.3/src/api.h +0 -485
- data/ext/v8/upstream/2.3.3/src/apiutils.h +0 -69
- data/ext/v8/upstream/2.3.3/src/arguments.h +0 -96
- data/ext/v8/upstream/2.3.3/src/arm/assembler-arm-inl.h +0 -305
- data/ext/v8/upstream/2.3.3/src/arm/assembler-arm.cc +0 -2580
- data/ext/v8/upstream/2.3.3/src/arm/assembler-arm.h +0 -1275
- data/ext/v8/upstream/2.3.3/src/arm/builtins-arm.cc +0 -1320
- data/ext/v8/upstream/2.3.3/src/arm/codegen-arm-inl.h +0 -48
- data/ext/v8/upstream/2.3.3/src/arm/codegen-arm.cc +0 -11398
- data/ext/v8/upstream/2.3.3/src/arm/codegen-arm.h +0 -1102
- data/ext/v8/upstream/2.3.3/src/arm/constants-arm.cc +0 -154
- data/ext/v8/upstream/2.3.3/src/arm/constants-arm.h +0 -388
- data/ext/v8/upstream/2.3.3/src/arm/cpu-arm.cc +0 -142
- data/ext/v8/upstream/2.3.3/src/arm/debug-arm.cc +0 -309
- data/ext/v8/upstream/2.3.3/src/arm/disasm-arm.cc +0 -1459
- data/ext/v8/upstream/2.3.3/src/arm/fast-codegen-arm.cc +0 -241
- data/ext/v8/upstream/2.3.3/src/arm/frames-arm.cc +0 -123
- data/ext/v8/upstream/2.3.3/src/arm/frames-arm.h +0 -162
- data/ext/v8/upstream/2.3.3/src/arm/full-codegen-arm.cc +0 -3178
- data/ext/v8/upstream/2.3.3/src/arm/ic-arm.cc +0 -2258
- data/ext/v8/upstream/2.3.3/src/arm/jump-target-arm.cc +0 -164
- data/ext/v8/upstream/2.3.3/src/arm/macro-assembler-arm.cc +0 -1892
- data/ext/v8/upstream/2.3.3/src/arm/macro-assembler-arm.h +0 -727
- data/ext/v8/upstream/2.3.3/src/arm/regexp-macro-assembler-arm.cc +0 -1261
- data/ext/v8/upstream/2.3.3/src/arm/regexp-macro-assembler-arm.h +0 -266
- data/ext/v8/upstream/2.3.3/src/arm/simulator-arm.cc +0 -2822
- data/ext/v8/upstream/2.3.3/src/arm/simulator-arm.h +0 -361
- data/ext/v8/upstream/2.3.3/src/arm/stub-cache-arm.cc +0 -2387
- data/ext/v8/upstream/2.3.3/src/arm/virtual-frame-arm.cc +0 -834
- data/ext/v8/upstream/2.3.3/src/arm/virtual-frame-arm.h +0 -519
- data/ext/v8/upstream/2.3.3/src/array.js +0 -1127
- data/ext/v8/upstream/2.3.3/src/assembler.cc +0 -801
- data/ext/v8/upstream/2.3.3/src/assembler.h +0 -573
- data/ext/v8/upstream/2.3.3/src/ast-inl.h +0 -81
- data/ext/v8/upstream/2.3.3/src/ast.cc +0 -1152
- data/ext/v8/upstream/2.3.3/src/ast.h +0 -2106
- data/ext/v8/upstream/2.3.3/src/bootstrapper.cc +0 -1819
- data/ext/v8/upstream/2.3.3/src/builtins.cc +0 -1529
- data/ext/v8/upstream/2.3.3/src/builtins.h +0 -263
- data/ext/v8/upstream/2.3.3/src/bytecodes-irregexp.h +0 -104
- data/ext/v8/upstream/2.3.3/src/cached-powers.h +0 -119
- data/ext/v8/upstream/2.3.3/src/char-predicates-inl.h +0 -86
- data/ext/v8/upstream/2.3.3/src/checks.cc +0 -100
- data/ext/v8/upstream/2.3.3/src/checks.h +0 -310
- data/ext/v8/upstream/2.3.3/src/circular-queue.cc +0 -121
- data/ext/v8/upstream/2.3.3/src/code-stubs.cc +0 -177
- data/ext/v8/upstream/2.3.3/src/code-stubs.h +0 -177
- data/ext/v8/upstream/2.3.3/src/codegen-inl.h +0 -60
- data/ext/v8/upstream/2.3.3/src/codegen.cc +0 -516
- data/ext/v8/upstream/2.3.3/src/codegen.h +0 -897
- data/ext/v8/upstream/2.3.3/src/compilation-cache.cc +0 -562
- data/ext/v8/upstream/2.3.3/src/compilation-cache.h +0 -102
- data/ext/v8/upstream/2.3.3/src/compiler.cc +0 -654
- data/ext/v8/upstream/2.3.3/src/compiler.h +0 -299
- data/ext/v8/upstream/2.3.3/src/contexts.cc +0 -256
- data/ext/v8/upstream/2.3.3/src/contexts.h +0 -342
- data/ext/v8/upstream/2.3.3/src/conversions.cc +0 -1119
- data/ext/v8/upstream/2.3.3/src/conversions.h +0 -123
- data/ext/v8/upstream/2.3.3/src/counters.h +0 -239
- data/ext/v8/upstream/2.3.3/src/cpu-profiler-inl.h +0 -100
- data/ext/v8/upstream/2.3.3/src/cpu-profiler.cc +0 -538
- data/ext/v8/upstream/2.3.3/src/cpu-profiler.h +0 -285
- data/ext/v8/upstream/2.3.3/src/d8-debug.cc +0 -356
- data/ext/v8/upstream/2.3.3/src/d8-debug.h +0 -155
- data/ext/v8/upstream/2.3.3/src/d8.cc +0 -783
- data/ext/v8/upstream/2.3.3/src/d8.h +0 -227
- data/ext/v8/upstream/2.3.3/src/d8.js +0 -1683
- data/ext/v8/upstream/2.3.3/src/data-flow.cc +0 -758
- data/ext/v8/upstream/2.3.3/src/data-flow.h +0 -278
- data/ext/v8/upstream/2.3.3/src/date.js +0 -1059
- data/ext/v8/upstream/2.3.3/src/dateparser-inl.h +0 -123
- data/ext/v8/upstream/2.3.3/src/dateparser.h +0 -244
- data/ext/v8/upstream/2.3.3/src/debug-agent.cc +0 -427
- data/ext/v8/upstream/2.3.3/src/debug-agent.h +0 -129
- data/ext/v8/upstream/2.3.3/src/debug-debugger.js +0 -2227
- data/ext/v8/upstream/2.3.3/src/debug.cc +0 -3005
- data/ext/v8/upstream/2.3.3/src/debug.h +0 -993
- data/ext/v8/upstream/2.3.3/src/disassembler.cc +0 -312
- data/ext/v8/upstream/2.3.3/src/double.h +0 -169
- data/ext/v8/upstream/2.3.3/src/dtoa-config.c +0 -92
- data/ext/v8/upstream/2.3.3/src/dtoa.cc +0 -77
- data/ext/v8/upstream/2.3.3/src/dtoa.h +0 -81
- data/ext/v8/upstream/2.3.3/src/execution.cc +0 -809
- data/ext/v8/upstream/2.3.3/src/execution.h +0 -336
- data/ext/v8/upstream/2.3.3/src/factory.cc +0 -1003
- data/ext/v8/upstream/2.3.3/src/factory.h +0 -410
- data/ext/v8/upstream/2.3.3/src/fast-codegen.cc +0 -746
- data/ext/v8/upstream/2.3.3/src/fast-codegen.h +0 -161
- data/ext/v8/upstream/2.3.3/src/fast-dtoa.cc +0 -505
- data/ext/v8/upstream/2.3.3/src/fast-dtoa.h +0 -58
- data/ext/v8/upstream/2.3.3/src/flag-definitions.h +0 -455
- data/ext/v8/upstream/2.3.3/src/flags.cc +0 -551
- data/ext/v8/upstream/2.3.3/src/flags.h +0 -81
- data/ext/v8/upstream/2.3.3/src/flow-graph.cc +0 -763
- data/ext/v8/upstream/2.3.3/src/flow-graph.h +0 -180
- data/ext/v8/upstream/2.3.3/src/frame-element.h +0 -273
- data/ext/v8/upstream/2.3.3/src/frames-inl.h +0 -217
- data/ext/v8/upstream/2.3.3/src/frames.cc +0 -826
- data/ext/v8/upstream/2.3.3/src/frames.h +0 -682
- data/ext/v8/upstream/2.3.3/src/full-codegen.cc +0 -1443
- data/ext/v8/upstream/2.3.3/src/full-codegen.h +0 -548
- data/ext/v8/upstream/2.3.3/src/func-name-inferrer.cc +0 -76
- data/ext/v8/upstream/2.3.3/src/func-name-inferrer.h +0 -135
- data/ext/v8/upstream/2.3.3/src/global-handles.cc +0 -520
- data/ext/v8/upstream/2.3.3/src/global-handles.h +0 -180
- data/ext/v8/upstream/2.3.3/src/globals.h +0 -669
- data/ext/v8/upstream/2.3.3/src/handles-inl.h +0 -76
- data/ext/v8/upstream/2.3.3/src/handles.cc +0 -825
- data/ext/v8/upstream/2.3.3/src/handles.h +0 -393
- data/ext/v8/upstream/2.3.3/src/hashmap.cc +0 -226
- data/ext/v8/upstream/2.3.3/src/hashmap.h +0 -120
- data/ext/v8/upstream/2.3.3/src/heap-inl.h +0 -493
- data/ext/v8/upstream/2.3.3/src/heap-profiler.cc +0 -779
- data/ext/v8/upstream/2.3.3/src/heap-profiler.h +0 -323
- data/ext/v8/upstream/2.3.3/src/heap.cc +0 -4994
- data/ext/v8/upstream/2.3.3/src/heap.h +0 -1984
- data/ext/v8/upstream/2.3.3/src/ia32/assembler-ia32-inl.h +0 -360
- data/ext/v8/upstream/2.3.3/src/ia32/assembler-ia32.cc +0 -2600
- data/ext/v8/upstream/2.3.3/src/ia32/assembler-ia32.h +0 -969
- data/ext/v8/upstream/2.3.3/src/ia32/builtins-ia32.cc +0 -1261
- data/ext/v8/upstream/2.3.3/src/ia32/codegen-ia32.cc +0 -13968
- data/ext/v8/upstream/2.3.3/src/ia32/codegen-ia32.h +0 -1097
- data/ext/v8/upstream/2.3.3/src/ia32/cpu-ia32.cc +0 -83
- data/ext/v8/upstream/2.3.3/src/ia32/debug-ia32.cc +0 -309
- data/ext/v8/upstream/2.3.3/src/ia32/disasm-ia32.cc +0 -1471
- data/ext/v8/upstream/2.3.3/src/ia32/fast-codegen-ia32.cc +0 -954
- data/ext/v8/upstream/2.3.3/src/ia32/fast-codegen-ia32.h +0 -155
- data/ext/v8/upstream/2.3.3/src/ia32/frames-ia32.cc +0 -115
- data/ext/v8/upstream/2.3.3/src/ia32/frames-ia32.h +0 -135
- data/ext/v8/upstream/2.3.3/src/ia32/full-codegen-ia32.cc +0 -3281
- data/ext/v8/upstream/2.3.3/src/ia32/ic-ia32.cc +0 -1966
- data/ext/v8/upstream/2.3.3/src/ia32/macro-assembler-ia32.cc +0 -1610
- data/ext/v8/upstream/2.3.3/src/ia32/macro-assembler-ia32.h +0 -610
- data/ext/v8/upstream/2.3.3/src/ia32/regexp-macro-assembler-ia32.cc +0 -1247
- data/ext/v8/upstream/2.3.3/src/ia32/regexp-macro-assembler-ia32.h +0 -214
- data/ext/v8/upstream/2.3.3/src/ia32/simulator-ia32.h +0 -62
- data/ext/v8/upstream/2.3.3/src/ia32/stub-cache-ia32.cc +0 -2750
- data/ext/v8/upstream/2.3.3/src/ia32/virtual-frame-ia32.cc +0 -1334
- data/ext/v8/upstream/2.3.3/src/ia32/virtual-frame-ia32.h +0 -627
- data/ext/v8/upstream/2.3.3/src/ic-inl.h +0 -120
- data/ext/v8/upstream/2.3.3/src/ic.cc +0 -1827
- data/ext/v8/upstream/2.3.3/src/ic.h +0 -515
- data/ext/v8/upstream/2.3.3/src/interpreter-irregexp.cc +0 -646
- data/ext/v8/upstream/2.3.3/src/json.js +0 -268
- data/ext/v8/upstream/2.3.3/src/jsregexp.cc +0 -5283
- data/ext/v8/upstream/2.3.3/src/jsregexp.h +0 -1463
- data/ext/v8/upstream/2.3.3/src/jump-target-heavy.cc +0 -429
- data/ext/v8/upstream/2.3.3/src/jump-target-heavy.h +0 -244
- data/ext/v8/upstream/2.3.3/src/jump-target-light.cc +0 -110
- data/ext/v8/upstream/2.3.3/src/jump-target-light.h +0 -192
- data/ext/v8/upstream/2.3.3/src/list-inl.h +0 -166
- data/ext/v8/upstream/2.3.3/src/list.h +0 -159
- data/ext/v8/upstream/2.3.3/src/liveedit-debugger.js +0 -1057
- data/ext/v8/upstream/2.3.3/src/liveedit.cc +0 -1480
- data/ext/v8/upstream/2.3.3/src/liveedit.h +0 -170
- data/ext/v8/upstream/2.3.3/src/log-utils.cc +0 -497
- data/ext/v8/upstream/2.3.3/src/log-utils.h +0 -289
- data/ext/v8/upstream/2.3.3/src/log.cc +0 -1561
- data/ext/v8/upstream/2.3.3/src/log.h +0 -384
- data/ext/v8/upstream/2.3.3/src/macro-assembler.h +0 -86
- data/ext/v8/upstream/2.3.3/src/macros.py +0 -177
- data/ext/v8/upstream/2.3.3/src/mark-compact.cc +0 -2330
- data/ext/v8/upstream/2.3.3/src/mark-compact.h +0 -451
- data/ext/v8/upstream/2.3.3/src/math.js +0 -264
- data/ext/v8/upstream/2.3.3/src/memory.h +0 -74
- data/ext/v8/upstream/2.3.3/src/messages.cc +0 -183
- data/ext/v8/upstream/2.3.3/src/messages.h +0 -113
- data/ext/v8/upstream/2.3.3/src/messages.js +0 -982
- data/ext/v8/upstream/2.3.3/src/mips/assembler-mips.h +0 -668
- data/ext/v8/upstream/2.3.3/src/mips/builtins-mips.cc +0 -205
- data/ext/v8/upstream/2.3.3/src/mips/codegen-mips.h +0 -434
- data/ext/v8/upstream/2.3.3/src/mips/debug-mips.cc +0 -131
- data/ext/v8/upstream/2.3.3/src/mips/frames-mips.cc +0 -102
- data/ext/v8/upstream/2.3.3/src/mips/ic-mips.cc +0 -220
- data/ext/v8/upstream/2.3.3/src/mips/simulator-mips.cc +0 -1651
- data/ext/v8/upstream/2.3.3/src/mips/simulator-mips.h +0 -311
- data/ext/v8/upstream/2.3.3/src/mips/stub-cache-mips.cc +0 -403
- data/ext/v8/upstream/2.3.3/src/mirror-debugger.js +0 -2380
- data/ext/v8/upstream/2.3.3/src/objects-debug.cc +0 -1366
- data/ext/v8/upstream/2.3.3/src/objects-inl.h +0 -3333
- data/ext/v8/upstream/2.3.3/src/objects.cc +0 -8820
- data/ext/v8/upstream/2.3.3/src/objects.h +0 -5373
- data/ext/v8/upstream/2.3.3/src/oprofile-agent.cc +0 -108
- data/ext/v8/upstream/2.3.3/src/oprofile-agent.h +0 -77
- data/ext/v8/upstream/2.3.3/src/parser.cc +0 -5207
- data/ext/v8/upstream/2.3.3/src/parser.h +0 -197
- data/ext/v8/upstream/2.3.3/src/platform-freebsd.cc +0 -667
- data/ext/v8/upstream/2.3.3/src/platform-linux.cc +0 -862
- data/ext/v8/upstream/2.3.3/src/platform-macos.cc +0 -665
- data/ext/v8/upstream/2.3.3/src/platform-nullos.cc +0 -454
- data/ext/v8/upstream/2.3.3/src/platform-openbsd.cc +0 -622
- data/ext/v8/upstream/2.3.3/src/platform-posix.cc +0 -362
- data/ext/v8/upstream/2.3.3/src/platform-solaris.cc +0 -653
- data/ext/v8/upstream/2.3.3/src/platform-win32.cc +0 -1911
- data/ext/v8/upstream/2.3.3/src/platform.h +0 -577
- data/ext/v8/upstream/2.3.3/src/powers-ten.h +0 -2461
- data/ext/v8/upstream/2.3.3/src/prettyprinter.cc +0 -1531
- data/ext/v8/upstream/2.3.3/src/prettyprinter.h +0 -221
- data/ext/v8/upstream/2.3.3/src/profile-generator-inl.h +0 -148
- data/ext/v8/upstream/2.3.3/src/profile-generator.cc +0 -1830
- data/ext/v8/upstream/2.3.3/src/profile-generator.h +0 -853
- data/ext/v8/upstream/2.3.3/src/property.cc +0 -96
- data/ext/v8/upstream/2.3.3/src/property.h +0 -315
- data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-irregexp.cc +0 -464
- data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-irregexp.h +0 -141
- data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-tracer.cc +0 -356
- data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-tracer.h +0 -103
- data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler.cc +0 -261
- data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler.h +0 -228
- data/ext/v8/upstream/2.3.3/src/regexp.js +0 -549
- data/ext/v8/upstream/2.3.3/src/rewriter.cc +0 -1038
- data/ext/v8/upstream/2.3.3/src/rewriter.h +0 -54
- data/ext/v8/upstream/2.3.3/src/runtime.cc +0 -10599
- data/ext/v8/upstream/2.3.3/src/runtime.h +0 -459
- data/ext/v8/upstream/2.3.3/src/runtime.js +0 -629
- data/ext/v8/upstream/2.3.3/src/scanner.cc +0 -1346
- data/ext/v8/upstream/2.3.3/src/scanner.h +0 -503
- data/ext/v8/upstream/2.3.3/src/scopeinfo.cc +0 -637
- data/ext/v8/upstream/2.3.3/src/scopeinfo.h +0 -233
- data/ext/v8/upstream/2.3.3/src/scopes.cc +0 -962
- data/ext/v8/upstream/2.3.3/src/scopes.h +0 -400
- data/ext/v8/upstream/2.3.3/src/serialize.cc +0 -1461
- data/ext/v8/upstream/2.3.3/src/serialize.h +0 -581
- data/ext/v8/upstream/2.3.3/src/spaces-inl.h +0 -483
- data/ext/v8/upstream/2.3.3/src/spaces.cc +0 -2901
- data/ext/v8/upstream/2.3.3/src/spaces.h +0 -2197
- data/ext/v8/upstream/2.3.3/src/string-stream.cc +0 -584
- data/ext/v8/upstream/2.3.3/src/string-stream.h +0 -189
- data/ext/v8/upstream/2.3.3/src/string.js +0 -1006
- data/ext/v8/upstream/2.3.3/src/stub-cache.cc +0 -1379
- data/ext/v8/upstream/2.3.3/src/stub-cache.h +0 -756
- data/ext/v8/upstream/2.3.3/src/third_party/dtoa/COPYING +0 -15
- data/ext/v8/upstream/2.3.3/src/third_party/dtoa/dtoa.c +0 -3334
- data/ext/v8/upstream/2.3.3/src/token.cc +0 -56
- data/ext/v8/upstream/2.3.3/src/token.h +0 -270
- data/ext/v8/upstream/2.3.3/src/top.cc +0 -1067
- data/ext/v8/upstream/2.3.3/src/top.h +0 -463
- data/ext/v8/upstream/2.3.3/src/type-info.cc +0 -53
- data/ext/v8/upstream/2.3.3/src/type-info.h +0 -244
- data/ext/v8/upstream/2.3.3/src/unicode-inl.h +0 -238
- data/ext/v8/upstream/2.3.3/src/unicode.cc +0 -749
- data/ext/v8/upstream/2.3.3/src/unicode.h +0 -279
- data/ext/v8/upstream/2.3.3/src/uri.js +0 -415
- data/ext/v8/upstream/2.3.3/src/utils.cc +0 -285
- data/ext/v8/upstream/2.3.3/src/utils.h +0 -745
- data/ext/v8/upstream/2.3.3/src/v8-counters.h +0 -250
- data/ext/v8/upstream/2.3.3/src/v8.cc +0 -228
- data/ext/v8/upstream/2.3.3/src/v8.h +0 -121
- data/ext/v8/upstream/2.3.3/src/v8natives.js +0 -1188
- data/ext/v8/upstream/2.3.3/src/v8threads.cc +0 -461
- data/ext/v8/upstream/2.3.3/src/v8threads.h +0 -159
- data/ext/v8/upstream/2.3.3/src/variables.cc +0 -119
- data/ext/v8/upstream/2.3.3/src/variables.h +0 -205
- data/ext/v8/upstream/2.3.3/src/version.cc +0 -88
- data/ext/v8/upstream/2.3.3/src/virtual-frame-heavy-inl.h +0 -192
- data/ext/v8/upstream/2.3.3/src/virtual-frame.h +0 -46
- data/ext/v8/upstream/2.3.3/src/vm-state-inl.h +0 -137
- data/ext/v8/upstream/2.3.3/src/vm-state.cc +0 -39
- data/ext/v8/upstream/2.3.3/src/vm-state.h +0 -77
- data/ext/v8/upstream/2.3.3/src/x64/assembler-x64-inl.h +0 -400
- data/ext/v8/upstream/2.3.3/src/x64/assembler-x64.cc +0 -2963
- data/ext/v8/upstream/2.3.3/src/x64/assembler-x64.h +0 -1438
- data/ext/v8/upstream/2.3.3/src/x64/builtins-x64.cc +0 -1296
- data/ext/v8/upstream/2.3.3/src/x64/codegen-x64-inl.h +0 -46
- data/ext/v8/upstream/2.3.3/src/x64/codegen-x64.cc +0 -12491
- data/ext/v8/upstream/2.3.3/src/x64/codegen-x64.h +0 -1090
- data/ext/v8/upstream/2.3.3/src/x64/cpu-x64.cc +0 -83
- data/ext/v8/upstream/2.3.3/src/x64/debug-x64.cc +0 -267
- data/ext/v8/upstream/2.3.3/src/x64/disasm-x64.cc +0 -1696
- data/ext/v8/upstream/2.3.3/src/x64/fast-codegen-x64.cc +0 -250
- data/ext/v8/upstream/2.3.3/src/x64/frames-x64.cc +0 -113
- data/ext/v8/upstream/2.3.3/src/x64/frames-x64.h +0 -125
- data/ext/v8/upstream/2.3.3/src/x64/full-codegen-x64.cc +0 -3270
- data/ext/v8/upstream/2.3.3/src/x64/ic-x64.cc +0 -1907
- data/ext/v8/upstream/2.3.3/src/x64/jump-target-x64.cc +0 -437
- data/ext/v8/upstream/2.3.3/src/x64/macro-assembler-x64.cc +0 -2793
- data/ext/v8/upstream/2.3.3/src/x64/macro-assembler-x64.h +0 -916
- data/ext/v8/upstream/2.3.3/src/x64/regexp-macro-assembler-x64.cc +0 -1374
- data/ext/v8/upstream/2.3.3/src/x64/regexp-macro-assembler-x64.h +0 -277
- data/ext/v8/upstream/2.3.3/src/x64/simulator-x64.h +0 -63
- data/ext/v8/upstream/2.3.3/src/x64/stub-cache-x64.cc +0 -2560
- data/ext/v8/upstream/2.3.3/src/x64/virtual-frame-x64.cc +0 -1264
- data/ext/v8/upstream/2.3.3/src/x64/virtual-frame-x64.h +0 -590
- data/ext/v8/upstream/2.3.3/src/zone-inl.h +0 -82
- data/ext/v8/upstream/2.3.3/src/zone.cc +0 -194
- data/ext/v8/upstream/2.3.3/src/zone.h +0 -221
- data/ext/v8/upstream/2.3.3/tools/codemap.js +0 -270
- data/ext/v8/upstream/2.3.3/tools/csvparser.js +0 -83
- data/ext/v8/upstream/2.3.3/tools/gc-nvp-trace-processor.py +0 -317
- data/ext/v8/upstream/2.3.3/tools/gyp/v8.gyp +0 -749
- data/ext/v8/upstream/2.3.3/tools/linux-tick-processor.py +0 -78
- data/ext/v8/upstream/2.3.3/tools/logreader.js +0 -338
- data/ext/v8/upstream/2.3.3/tools/oprofile/annotate +0 -7
- data/ext/v8/upstream/2.3.3/tools/oprofile/common +0 -19
- data/ext/v8/upstream/2.3.3/tools/oprofile/dump +0 -7
- data/ext/v8/upstream/2.3.3/tools/oprofile/report +0 -7
- data/ext/v8/upstream/2.3.3/tools/oprofile/reset +0 -7
- data/ext/v8/upstream/2.3.3/tools/oprofile/run +0 -14
- data/ext/v8/upstream/2.3.3/tools/oprofile/shutdown +0 -7
- data/ext/v8/upstream/2.3.3/tools/oprofile/start +0 -7
- data/ext/v8/upstream/2.3.3/tools/presubmit.py +0 -299
- data/ext/v8/upstream/2.3.3/tools/profile.js +0 -691
- data/ext/v8/upstream/2.3.3/tools/profile_view.js +0 -224
- data/ext/v8/upstream/2.3.3/tools/splaytree.js +0 -322
- data/ext/v8/upstream/2.3.3/tools/splaytree.py +0 -226
- data/ext/v8/upstream/2.3.3/tools/tickprocessor.js +0 -862
- data/ext/v8/upstream/2.3.3/tools/tickprocessor.py +0 -571
- data/ext/v8/upstream/2.3.3/tools/utils.py +0 -88
- data/ext/v8/upstream/2.3.3/tools/visual_studio/README.txt +0 -71
- data/ext/v8/upstream/2.3.3/tools/visual_studio/common.vsprops +0 -34
- data/ext/v8/upstream/2.3.3/tools/visual_studio/debug.vsprops +0 -17
- data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_base.vcproj +0 -1143
- data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_base_arm.vcproj +0 -1115
- data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_base_x64.vcproj +0 -1096
- data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_shell_sample.vcproj +0 -145
- data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_shell_sample_arm.vcproj +0 -145
- data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_shell_sample_x64.vcproj +0 -161
- data/ext/v8/upstream/2.3.3/tools/windows-tick-processor.bat +0 -29
- data/ext/v8/upstream/2.3.3/tools/windows-tick-processor.py +0 -137
@@ -0,0 +1,164 @@
|
|
1
|
+
// Copyright 2006-2009 the V8 project authors. All rights reserved.
|
2
|
+
// Redistribution and use in source and binary forms, with or without
|
3
|
+
// modification, are permitted provided that the following conditions are
|
4
|
+
// met:
|
5
|
+
//
|
6
|
+
// * Redistributions of source code must retain the above copyright
|
7
|
+
// notice, this list of conditions and the following disclaimer.
|
8
|
+
// * Redistributions in binary form must reproduce the above
|
9
|
+
// copyright notice, this list of conditions and the following
|
10
|
+
// disclaimer in the documentation and/or other materials provided
|
11
|
+
// with the distribution.
|
12
|
+
// * Neither the name of Google Inc. nor the names of its
|
13
|
+
// contributors may be used to endorse or promote products derived
|
14
|
+
// from this software without specific prior written permission.
|
15
|
+
//
|
16
|
+
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
17
|
+
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
18
|
+
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
19
|
+
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
20
|
+
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
21
|
+
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
22
|
+
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
23
|
+
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
24
|
+
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
25
|
+
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
26
|
+
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
27
|
+
|
28
|
+
#ifndef V8_LIST_H_
|
29
|
+
#define V8_LIST_H_
|
30
|
+
|
31
|
+
namespace v8 {
|
32
|
+
namespace internal {
|
33
|
+
|
34
|
+
|
35
|
+
// ----------------------------------------------------------------------------
|
36
|
+
// The list is a template for very light-weight lists. We are not
|
37
|
+
// using the STL because we want full control over space and speed of
|
38
|
+
// the code. This implementation is based on code by Robert Griesemer
|
39
|
+
// and Rob Pike.
|
40
|
+
//
|
41
|
+
// The list is parameterized by the type of its elements (T) and by an
|
42
|
+
// allocation policy (P). The policy is used for allocating lists in
|
43
|
+
// the C free store or the zone; see zone.h.
|
44
|
+
|
45
|
+
// Forward defined as
|
46
|
+
// template <typename T, class P = FreeStoreAllocationPolicy> class List;
|
47
|
+
template <typename T, class P>
|
48
|
+
class List {
|
49
|
+
public:
|
50
|
+
|
51
|
+
List() { Initialize(0); }
|
52
|
+
INLINE(explicit List(int capacity)) { Initialize(capacity); }
|
53
|
+
INLINE(~List()) { DeleteData(data_); }
|
54
|
+
|
55
|
+
// Deallocates memory used by the list and leaves the list in a consistent
|
56
|
+
// empty state.
|
57
|
+
void Free() {
|
58
|
+
DeleteData(data_);
|
59
|
+
Initialize(0);
|
60
|
+
}
|
61
|
+
|
62
|
+
INLINE(void* operator new(size_t size)) {
|
63
|
+
return P::New(static_cast<int>(size));
|
64
|
+
}
|
65
|
+
INLINE(void operator delete(void* p, size_t)) { return P::Delete(p); }
|
66
|
+
|
67
|
+
// Returns a reference to the element at index i. This reference is
|
68
|
+
// not safe to use after operations that can change the list's
|
69
|
+
// backing store (eg, Add).
|
70
|
+
inline T& operator[](int i) const {
|
71
|
+
ASSERT(0 <= i);
|
72
|
+
ASSERT(i < length_);
|
73
|
+
return data_[i];
|
74
|
+
}
|
75
|
+
inline T& at(int i) const { return operator[](i); }
|
76
|
+
inline T& last() const { return at(length_ - 1); }
|
77
|
+
inline T& first() const { return at(0); }
|
78
|
+
|
79
|
+
INLINE(bool is_empty() const) { return length_ == 0; }
|
80
|
+
INLINE(int length() const) { return length_; }
|
81
|
+
INLINE(int capacity() const) { return capacity_; }
|
82
|
+
|
83
|
+
Vector<T> ToVector() { return Vector<T>(data_, length_); }
|
84
|
+
|
85
|
+
Vector<const T> ToConstVector() { return Vector<const T>(data_, length_); }
|
86
|
+
|
87
|
+
// Adds a copy of the given 'element' to the end of the list,
|
88
|
+
// expanding the list if necessary.
|
89
|
+
void Add(const T& element);
|
90
|
+
|
91
|
+
// Add all the elements from the argument list to this list.
|
92
|
+
void AddAll(const List<T, P>& other);
|
93
|
+
|
94
|
+
// Inserts the element at the specific index.
|
95
|
+
void InsertAt(int index, const T& element);
|
96
|
+
|
97
|
+
// Added 'count' elements with the value 'value' and returns a
|
98
|
+
// vector that allows access to the elements. The vector is valid
|
99
|
+
// until the next change is made to this list.
|
100
|
+
Vector<T> AddBlock(T value, int count);
|
101
|
+
|
102
|
+
// Removes the i'th element without deleting it even if T is a
|
103
|
+
// pointer type; moves all elements above i "down". Returns the
|
104
|
+
// removed element. This function's complexity is linear in the
|
105
|
+
// size of the list.
|
106
|
+
T Remove(int i);
|
107
|
+
|
108
|
+
// Remove the given element from the list. Returns whether or not
|
109
|
+
// the input is included in the list in the first place.
|
110
|
+
bool RemoveElement(const T& elm);
|
111
|
+
|
112
|
+
// Removes the last element without deleting it even if T is a
|
113
|
+
// pointer type. Returns the removed element.
|
114
|
+
INLINE(T RemoveLast()) { return Remove(length_ - 1); }
|
115
|
+
|
116
|
+
// Clears the list by setting the length to zero. Even if T is a
|
117
|
+
// pointer type, clearing the list doesn't delete the entries.
|
118
|
+
INLINE(void Clear());
|
119
|
+
|
120
|
+
// Drops all but the first 'pos' elements from the list.
|
121
|
+
INLINE(void Rewind(int pos));
|
122
|
+
|
123
|
+
// Drop the last 'count' elements from the list.
|
124
|
+
INLINE(void RewindBy(int count)) { Rewind(length_ - count); }
|
125
|
+
|
126
|
+
bool Contains(const T& elm) const;
|
127
|
+
int CountOccurrences(const T& elm, int start, int end) const;
|
128
|
+
|
129
|
+
// Iterate through all list entries, starting at index 0.
|
130
|
+
void Iterate(void (*callback)(T* x));
|
131
|
+
template<class Visitor>
|
132
|
+
void Iterate(Visitor* visitor);
|
133
|
+
|
134
|
+
// Sort all list entries (using QuickSort)
|
135
|
+
void Sort(int (*cmp)(const T* x, const T* y));
|
136
|
+
void Sort();
|
137
|
+
|
138
|
+
INLINE(void Initialize(int capacity));
|
139
|
+
|
140
|
+
private:
|
141
|
+
T* data_;
|
142
|
+
int capacity_;
|
143
|
+
int length_;
|
144
|
+
|
145
|
+
INLINE(T* NewData(int n)) { return static_cast<T*>(P::New(n * sizeof(T))); }
|
146
|
+
INLINE(void DeleteData(T* data)) { P::Delete(data); }
|
147
|
+
|
148
|
+
// Increase the capacity of a full list, and add an element.
|
149
|
+
// List must be full already.
|
150
|
+
void ResizeAdd(const T& element);
|
151
|
+
|
152
|
+
// Inlined implementation of ResizeAdd, shared by inlined and
|
153
|
+
// non-inlined versions of ResizeAdd.
|
154
|
+
void ResizeAddInternal(const T& element);
|
155
|
+
|
156
|
+
// Resize the list.
|
157
|
+
void Resize(int new_capacity);
|
158
|
+
|
159
|
+
DISALLOW_COPY_AND_ASSIGN(List);
|
160
|
+
};
|
161
|
+
|
162
|
+
} } // namespace v8::internal
|
163
|
+
|
164
|
+
#endif // V8_LIST_H_
|
@@ -0,0 +1,140 @@
|
|
1
|
+
// Copyright 2010 the V8 project authors. All rights reserved.
|
2
|
+
// Redistribution and use in source and binary forms, with or without
|
3
|
+
// modification, are permitted provided that the following conditions are
|
4
|
+
// met:
|
5
|
+
//
|
6
|
+
// * Redistributions of source code must retain the above copyright
|
7
|
+
// notice, this list of conditions and the following disclaimer.
|
8
|
+
// * Redistributions in binary form must reproduce the above
|
9
|
+
// copyright notice, this list of conditions and the following
|
10
|
+
// disclaimer in the documentation and/or other materials provided
|
11
|
+
// with the distribution.
|
12
|
+
// * Neither the name of Google Inc. nor the names of its
|
13
|
+
// contributors may be used to endorse or promote products derived
|
14
|
+
// from this software without specific prior written permission.
|
15
|
+
//
|
16
|
+
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
17
|
+
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
18
|
+
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
19
|
+
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
20
|
+
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
21
|
+
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
22
|
+
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
23
|
+
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
24
|
+
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
25
|
+
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
26
|
+
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
27
|
+
|
28
|
+
#ifndef V8_LITHIUM_ALLOCATOR_INL_H_
|
29
|
+
#define V8_LITHIUM_ALLOCATOR_INL_H_
|
30
|
+
|
31
|
+
#include "lithium-allocator.h"
|
32
|
+
|
33
|
+
#if V8_TARGET_ARCH_IA32
|
34
|
+
#include "ia32/lithium-ia32.h"
|
35
|
+
#elif V8_TARGET_ARCH_X64
|
36
|
+
#include "x64/lithium-x64.h"
|
37
|
+
#elif V8_TARGET_ARCH_ARM
|
38
|
+
#include "arm/lithium-arm.h"
|
39
|
+
#else
|
40
|
+
#error "Unknown architecture."
|
41
|
+
#endif
|
42
|
+
|
43
|
+
namespace v8 {
|
44
|
+
namespace internal {
|
45
|
+
|
46
|
+
bool LAllocator::IsGapAt(int index) { return chunk_->IsGapAt(index); }
|
47
|
+
|
48
|
+
|
49
|
+
LInstruction* LAllocator::InstructionAt(int index) {
|
50
|
+
return chunk_->instructions()->at(index);
|
51
|
+
}
|
52
|
+
|
53
|
+
|
54
|
+
LGap* LAllocator::GapAt(int index) {
|
55
|
+
return chunk_->GetGapAt(index);
|
56
|
+
}
|
57
|
+
|
58
|
+
|
59
|
+
TempIterator::TempIterator(LInstruction* instr)
|
60
|
+
: instr_(instr),
|
61
|
+
limit_(instr->TempCount()),
|
62
|
+
current_(0) {
|
63
|
+
current_ = AdvanceToNext(0);
|
64
|
+
}
|
65
|
+
|
66
|
+
|
67
|
+
bool TempIterator::HasNext() { return current_ < limit_; }
|
68
|
+
|
69
|
+
|
70
|
+
LOperand* TempIterator::Next() {
|
71
|
+
ASSERT(HasNext());
|
72
|
+
return instr_->TempAt(current_);
|
73
|
+
}
|
74
|
+
|
75
|
+
|
76
|
+
int TempIterator::AdvanceToNext(int start) {
|
77
|
+
while (start < limit_ && instr_->TempAt(start) == NULL) start++;
|
78
|
+
return start;
|
79
|
+
}
|
80
|
+
|
81
|
+
|
82
|
+
void TempIterator::Advance() {
|
83
|
+
current_ = AdvanceToNext(current_ + 1);
|
84
|
+
}
|
85
|
+
|
86
|
+
|
87
|
+
InputIterator::InputIterator(LInstruction* instr)
|
88
|
+
: instr_(instr),
|
89
|
+
limit_(instr->InputCount()),
|
90
|
+
current_(0) {
|
91
|
+
current_ = AdvanceToNext(0);
|
92
|
+
}
|
93
|
+
|
94
|
+
|
95
|
+
bool InputIterator::HasNext() { return current_ < limit_; }
|
96
|
+
|
97
|
+
|
98
|
+
LOperand* InputIterator::Next() {
|
99
|
+
ASSERT(HasNext());
|
100
|
+
return instr_->InputAt(current_);
|
101
|
+
}
|
102
|
+
|
103
|
+
|
104
|
+
void InputIterator::Advance() {
|
105
|
+
current_ = AdvanceToNext(current_ + 1);
|
106
|
+
}
|
107
|
+
|
108
|
+
|
109
|
+
int InputIterator::AdvanceToNext(int start) {
|
110
|
+
while (start < limit_ && instr_->InputAt(start)->IsConstantOperand()) start++;
|
111
|
+
return start;
|
112
|
+
}
|
113
|
+
|
114
|
+
|
115
|
+
UseIterator::UseIterator(LInstruction* instr)
|
116
|
+
: input_iterator_(instr), env_iterator_(instr->environment()) { }
|
117
|
+
|
118
|
+
|
119
|
+
bool UseIterator::HasNext() {
|
120
|
+
return input_iterator_.HasNext() || env_iterator_.HasNext();
|
121
|
+
}
|
122
|
+
|
123
|
+
|
124
|
+
LOperand* UseIterator::Next() {
|
125
|
+
ASSERT(HasNext());
|
126
|
+
return input_iterator_.HasNext()
|
127
|
+
? input_iterator_.Next()
|
128
|
+
: env_iterator_.Next();
|
129
|
+
}
|
130
|
+
|
131
|
+
|
132
|
+
void UseIterator::Advance() {
|
133
|
+
input_iterator_.HasNext()
|
134
|
+
? input_iterator_.Advance()
|
135
|
+
: env_iterator_.Advance();
|
136
|
+
}
|
137
|
+
|
138
|
+
} } // namespace v8::internal
|
139
|
+
|
140
|
+
#endif // V8_LITHIUM_ALLOCATOR_INL_H_
|
@@ -0,0 +1,2093 @@
|
|
1
|
+
// Copyright 2010 the V8 project authors. All rights reserved.
|
2
|
+
// Redistribution and use in source and binary forms, with or without
|
3
|
+
// modification, are permitted provided that the following conditions are
|
4
|
+
// met:
|
5
|
+
//
|
6
|
+
// * Redistributions of source code must retain the above copyright
|
7
|
+
// notice, this list of conditions and the following disclaimer.
|
8
|
+
// * Redistributions in binary form must reproduce the above
|
9
|
+
// copyright notice, this list of conditions and the following
|
10
|
+
// disclaimer in the documentation and/or other materials provided
|
11
|
+
// with the distribution.
|
12
|
+
// * Neither the name of Google Inc. nor the names of its
|
13
|
+
// contributors may be used to endorse or promote products derived
|
14
|
+
// from this software without specific prior written permission.
|
15
|
+
//
|
16
|
+
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
17
|
+
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
18
|
+
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
19
|
+
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
20
|
+
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
21
|
+
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
22
|
+
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
23
|
+
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
24
|
+
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
25
|
+
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
26
|
+
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
27
|
+
|
28
|
+
#include "lithium-allocator-inl.h"
|
29
|
+
|
30
|
+
#include "hydrogen.h"
|
31
|
+
#include "string-stream.h"
|
32
|
+
|
33
|
+
#if V8_TARGET_ARCH_IA32
|
34
|
+
#include "ia32/lithium-ia32.h"
|
35
|
+
#elif V8_TARGET_ARCH_X64
|
36
|
+
#include "x64/lithium-x64.h"
|
37
|
+
#elif V8_TARGET_ARCH_ARM
|
38
|
+
#include "arm/lithium-arm.h"
|
39
|
+
#else
|
40
|
+
#error "Unknown architecture."
|
41
|
+
#endif
|
42
|
+
|
43
|
+
namespace v8 {
|
44
|
+
namespace internal {
|
45
|
+
|
46
|
+
|
47
|
+
#define DEFINE_OPERAND_CACHE(name, type) \
|
48
|
+
name name::cache[name::kNumCachedOperands]; \
|
49
|
+
void name::SetupCache() { \
|
50
|
+
for (int i = 0; i < kNumCachedOperands; i++) { \
|
51
|
+
cache[i].ConvertTo(type, i); \
|
52
|
+
} \
|
53
|
+
}
|
54
|
+
|
55
|
+
DEFINE_OPERAND_CACHE(LConstantOperand, CONSTANT_OPERAND)
|
56
|
+
DEFINE_OPERAND_CACHE(LStackSlot, STACK_SLOT)
|
57
|
+
DEFINE_OPERAND_CACHE(LDoubleStackSlot, DOUBLE_STACK_SLOT)
|
58
|
+
DEFINE_OPERAND_CACHE(LRegister, REGISTER)
|
59
|
+
DEFINE_OPERAND_CACHE(LDoubleRegister, DOUBLE_REGISTER)
|
60
|
+
|
61
|
+
#undef DEFINE_OPERAND_CACHE
|
62
|
+
|
63
|
+
|
64
|
+
static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) {
|
65
|
+
return a.Value() < b.Value() ? a : b;
|
66
|
+
}
|
67
|
+
|
68
|
+
|
69
|
+
static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) {
|
70
|
+
return a.Value() > b.Value() ? a : b;
|
71
|
+
}
|
72
|
+
|
73
|
+
|
74
|
+
UsePosition::UsePosition(LifetimePosition pos, LOperand* operand)
|
75
|
+
: operand_(operand),
|
76
|
+
hint_(NULL),
|
77
|
+
pos_(pos),
|
78
|
+
next_(NULL),
|
79
|
+
requires_reg_(false),
|
80
|
+
register_beneficial_(true) {
|
81
|
+
if (operand_ != NULL && operand_->IsUnallocated()) {
|
82
|
+
LUnallocated* unalloc = LUnallocated::cast(operand_);
|
83
|
+
requires_reg_ = unalloc->HasRegisterPolicy();
|
84
|
+
register_beneficial_ = !unalloc->HasAnyPolicy();
|
85
|
+
}
|
86
|
+
ASSERT(pos_.IsValid());
|
87
|
+
}
|
88
|
+
|
89
|
+
|
90
|
+
bool UsePosition::HasHint() const {
|
91
|
+
return hint_ != NULL && !hint_->IsUnallocated();
|
92
|
+
}
|
93
|
+
|
94
|
+
|
95
|
+
bool UsePosition::RequiresRegister() const {
|
96
|
+
return requires_reg_;
|
97
|
+
}
|
98
|
+
|
99
|
+
|
100
|
+
bool UsePosition::RegisterIsBeneficial() const {
|
101
|
+
return register_beneficial_;
|
102
|
+
}
|
103
|
+
|
104
|
+
|
105
|
+
void UseInterval::SplitAt(LifetimePosition pos) {
|
106
|
+
ASSERT(Contains(pos) && pos.Value() != start().Value());
|
107
|
+
UseInterval* after = new UseInterval(pos, end_);
|
108
|
+
after->next_ = next_;
|
109
|
+
next_ = after;
|
110
|
+
end_ = pos;
|
111
|
+
}
|
112
|
+
|
113
|
+
|
114
|
+
#ifdef DEBUG
|
115
|
+
|
116
|
+
|
117
|
+
void LiveRange::Verify() const {
|
118
|
+
UsePosition* cur = first_pos_;
|
119
|
+
while (cur != NULL) {
|
120
|
+
ASSERT(Start().Value() <= cur->pos().Value() &&
|
121
|
+
cur->pos().Value() <= End().Value());
|
122
|
+
cur = cur->next();
|
123
|
+
}
|
124
|
+
}
|
125
|
+
|
126
|
+
|
127
|
+
bool LiveRange::HasOverlap(UseInterval* target) const {
|
128
|
+
UseInterval* current_interval = first_interval_;
|
129
|
+
while (current_interval != NULL) {
|
130
|
+
// Intervals overlap if the start of one is contained in the other.
|
131
|
+
if (current_interval->Contains(target->start()) ||
|
132
|
+
target->Contains(current_interval->start())) {
|
133
|
+
return true;
|
134
|
+
}
|
135
|
+
current_interval = current_interval->next();
|
136
|
+
}
|
137
|
+
return false;
|
138
|
+
}
|
139
|
+
|
140
|
+
|
141
|
+
#endif
|
142
|
+
|
143
|
+
|
144
|
+
LiveRange::LiveRange(int id)
|
145
|
+
: id_(id),
|
146
|
+
spilled_(false),
|
147
|
+
assigned_register_(kInvalidAssignment),
|
148
|
+
assigned_register_kind_(NONE),
|
149
|
+
last_interval_(NULL),
|
150
|
+
first_interval_(NULL),
|
151
|
+
first_pos_(NULL),
|
152
|
+
parent_(NULL),
|
153
|
+
next_(NULL),
|
154
|
+
current_interval_(NULL),
|
155
|
+
last_processed_use_(NULL),
|
156
|
+
spill_start_index_(kMaxInt) {
|
157
|
+
spill_operand_ = new LUnallocated(LUnallocated::IGNORE);
|
158
|
+
}
|
159
|
+
|
160
|
+
|
161
|
+
void LiveRange::set_assigned_register(int reg, RegisterKind register_kind) {
|
162
|
+
ASSERT(!HasRegisterAssigned() && !IsSpilled());
|
163
|
+
assigned_register_ = reg;
|
164
|
+
assigned_register_kind_ = register_kind;
|
165
|
+
ConvertOperands();
|
166
|
+
}
|
167
|
+
|
168
|
+
|
169
|
+
void LiveRange::MakeSpilled() {
|
170
|
+
ASSERT(!IsSpilled());
|
171
|
+
ASSERT(TopLevel()->HasAllocatedSpillOperand());
|
172
|
+
spilled_ = true;
|
173
|
+
assigned_register_ = kInvalidAssignment;
|
174
|
+
ConvertOperands();
|
175
|
+
}
|
176
|
+
|
177
|
+
|
178
|
+
bool LiveRange::HasAllocatedSpillOperand() const {
|
179
|
+
return spill_operand_ != NULL && !spill_operand_->IsUnallocated();
|
180
|
+
}
|
181
|
+
|
182
|
+
|
183
|
+
void LiveRange::SetSpillOperand(LOperand* operand) {
|
184
|
+
ASSERT(!operand->IsUnallocated());
|
185
|
+
ASSERT(spill_operand_ != NULL);
|
186
|
+
ASSERT(spill_operand_->IsUnallocated());
|
187
|
+
spill_operand_->ConvertTo(operand->kind(), operand->index());
|
188
|
+
}
|
189
|
+
|
190
|
+
|
191
|
+
UsePosition* LiveRange::NextUsePosition(LifetimePosition start) {
|
192
|
+
UsePosition* use_pos = last_processed_use_;
|
193
|
+
if (use_pos == NULL) use_pos = first_pos();
|
194
|
+
while (use_pos != NULL && use_pos->pos().Value() < start.Value()) {
|
195
|
+
use_pos = use_pos->next();
|
196
|
+
}
|
197
|
+
last_processed_use_ = use_pos;
|
198
|
+
return use_pos;
|
199
|
+
}
|
200
|
+
|
201
|
+
|
202
|
+
UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial(
|
203
|
+
LifetimePosition start) {
|
204
|
+
UsePosition* pos = NextUsePosition(start);
|
205
|
+
while (pos != NULL && !pos->RegisterIsBeneficial()) {
|
206
|
+
pos = pos->next();
|
207
|
+
}
|
208
|
+
return pos;
|
209
|
+
}
|
210
|
+
|
211
|
+
|
212
|
+
UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) {
|
213
|
+
UsePosition* pos = NextUsePosition(start);
|
214
|
+
while (pos != NULL && !pos->RequiresRegister()) {
|
215
|
+
pos = pos->next();
|
216
|
+
}
|
217
|
+
return pos;
|
218
|
+
}
|
219
|
+
|
220
|
+
|
221
|
+
bool LiveRange::CanBeSpilled(LifetimePosition pos) {
|
222
|
+
// TODO(kmillikin): Comment. Now.
|
223
|
+
if (pos.Value() <= Start().Value() && HasRegisterAssigned()) return false;
|
224
|
+
|
225
|
+
// We cannot spill a live range that has a use requiring a register
|
226
|
+
// at the current or the immediate next position.
|
227
|
+
UsePosition* use_pos = NextRegisterPosition(pos);
|
228
|
+
if (use_pos == NULL) return true;
|
229
|
+
return use_pos->pos().Value() > pos.NextInstruction().Value();
|
230
|
+
}
|
231
|
+
|
232
|
+
|
233
|
+
UsePosition* LiveRange::FirstPosWithHint() const {
|
234
|
+
UsePosition* pos = first_pos_;
|
235
|
+
while (pos != NULL && !pos->HasHint()) pos = pos->next();
|
236
|
+
return pos;
|
237
|
+
}
|
238
|
+
|
239
|
+
|
240
|
+
LOperand* LiveRange::CreateAssignedOperand() {
|
241
|
+
LOperand* op = NULL;
|
242
|
+
if (HasRegisterAssigned()) {
|
243
|
+
ASSERT(!IsSpilled());
|
244
|
+
if (IsDouble()) {
|
245
|
+
op = LDoubleRegister::Create(assigned_register());
|
246
|
+
} else {
|
247
|
+
op = LRegister::Create(assigned_register());
|
248
|
+
}
|
249
|
+
} else if (IsSpilled()) {
|
250
|
+
ASSERT(!HasRegisterAssigned());
|
251
|
+
op = TopLevel()->GetSpillOperand();
|
252
|
+
ASSERT(!op->IsUnallocated());
|
253
|
+
} else {
|
254
|
+
LUnallocated* unalloc = new LUnallocated(LUnallocated::NONE);
|
255
|
+
unalloc->set_virtual_register(id_);
|
256
|
+
op = unalloc;
|
257
|
+
}
|
258
|
+
return op;
|
259
|
+
}
|
260
|
+
|
261
|
+
|
262
|
+
UseInterval* LiveRange::FirstSearchIntervalForPosition(
|
263
|
+
LifetimePosition position) const {
|
264
|
+
if (current_interval_ == NULL) return first_interval_;
|
265
|
+
if (current_interval_->start().Value() > position.Value()) {
|
266
|
+
current_interval_ = NULL;
|
267
|
+
return first_interval_;
|
268
|
+
}
|
269
|
+
return current_interval_;
|
270
|
+
}
|
271
|
+
|
272
|
+
|
273
|
+
void LiveRange::AdvanceLastProcessedMarker(
|
274
|
+
UseInterval* to_start_of, LifetimePosition but_not_past) const {
|
275
|
+
if (to_start_of == NULL) return;
|
276
|
+
if (to_start_of->start().Value() > but_not_past.Value()) return;
|
277
|
+
LifetimePosition start =
|
278
|
+
current_interval_ == NULL ? LifetimePosition::Invalid()
|
279
|
+
: current_interval_->start();
|
280
|
+
if (to_start_of->start().Value() > start.Value()) {
|
281
|
+
current_interval_ = to_start_of;
|
282
|
+
}
|
283
|
+
}
|
284
|
+
|
285
|
+
|
286
|
+
void LiveRange::SplitAt(LifetimePosition position, LiveRange* result) {
|
287
|
+
ASSERT(Start().Value() < position.Value());
|
288
|
+
ASSERT(result->IsEmpty());
|
289
|
+
// Find the last interval that ends before the position. If the
|
290
|
+
// position is contained in one of the intervals in the chain, we
|
291
|
+
// split that interval and use the first part.
|
292
|
+
UseInterval* current = FirstSearchIntervalForPosition(position);
|
293
|
+
|
294
|
+
// If the split position coincides with the beginning of a use interval
|
295
|
+
// we need to split use positons in a special way.
|
296
|
+
bool split_at_start = false;
|
297
|
+
|
298
|
+
while (current != NULL) {
|
299
|
+
if (current->Contains(position)) {
|
300
|
+
current->SplitAt(position);
|
301
|
+
break;
|
302
|
+
}
|
303
|
+
UseInterval* next = current->next();
|
304
|
+
if (next->start().Value() >= position.Value()) {
|
305
|
+
split_at_start = (next->start().Value() == position.Value());
|
306
|
+
break;
|
307
|
+
}
|
308
|
+
current = next;
|
309
|
+
}
|
310
|
+
|
311
|
+
// Partition original use intervals to the two live ranges.
|
312
|
+
UseInterval* before = current;
|
313
|
+
UseInterval* after = before->next();
|
314
|
+
result->last_interval_ = (last_interval_ == before)
|
315
|
+
? after // Only interval in the range after split.
|
316
|
+
: last_interval_; // Last interval of the original range.
|
317
|
+
result->first_interval_ = after;
|
318
|
+
last_interval_ = before;
|
319
|
+
|
320
|
+
// Find the last use position before the split and the first use
|
321
|
+
// position after it.
|
322
|
+
UsePosition* use_after = first_pos_;
|
323
|
+
UsePosition* use_before = NULL;
|
324
|
+
if (split_at_start) {
|
325
|
+
// The split position coincides with the beginning of a use interval (the
|
326
|
+
// end of a lifetime hole). Use at this position should be attributed to
|
327
|
+
// the split child because split child owns use interval covering it.
|
328
|
+
while (use_after != NULL && use_after->pos().Value() < position.Value()) {
|
329
|
+
use_before = use_after;
|
330
|
+
use_after = use_after->next();
|
331
|
+
}
|
332
|
+
} else {
|
333
|
+
while (use_after != NULL && use_after->pos().Value() <= position.Value()) {
|
334
|
+
use_before = use_after;
|
335
|
+
use_after = use_after->next();
|
336
|
+
}
|
337
|
+
}
|
338
|
+
|
339
|
+
// Partition original use positions to the two live ranges.
|
340
|
+
if (use_before != NULL) {
|
341
|
+
use_before->next_ = NULL;
|
342
|
+
} else {
|
343
|
+
first_pos_ = NULL;
|
344
|
+
}
|
345
|
+
result->first_pos_ = use_after;
|
346
|
+
|
347
|
+
// Link the new live range in the chain before any of the other
|
348
|
+
// ranges linked from the range before the split.
|
349
|
+
result->parent_ = (parent_ == NULL) ? this : parent_;
|
350
|
+
result->next_ = next_;
|
351
|
+
next_ = result;
|
352
|
+
|
353
|
+
#ifdef DEBUG
|
354
|
+
Verify();
|
355
|
+
result->Verify();
|
356
|
+
#endif
|
357
|
+
}
|
358
|
+
|
359
|
+
|
360
|
+
// This implements an ordering on live ranges so that they are ordered by their
|
361
|
+
// start positions. This is needed for the correctness of the register
|
362
|
+
// allocation algorithm. If two live ranges start at the same offset then there
|
363
|
+
// is a tie breaker based on where the value is first used. This part of the
|
364
|
+
// ordering is merely a heuristic.
|
365
|
+
bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const {
|
366
|
+
LifetimePosition start = Start();
|
367
|
+
LifetimePosition other_start = other->Start();
|
368
|
+
if (start.Value() == other_start.Value()) {
|
369
|
+
UsePosition* pos = FirstPosWithHint();
|
370
|
+
if (pos == NULL) return false;
|
371
|
+
UsePosition* other_pos = other->first_pos();
|
372
|
+
if (other_pos == NULL) return true;
|
373
|
+
return pos->pos().Value() < other_pos->pos().Value();
|
374
|
+
}
|
375
|
+
return start.Value() < other_start.Value();
|
376
|
+
}
|
377
|
+
|
378
|
+
|
379
|
+
void LiveRange::ShortenTo(LifetimePosition start) {
|
380
|
+
LAllocator::TraceAlloc("Shorten live range %d to [%d\n", id_, start.Value());
|
381
|
+
ASSERT(first_interval_ != NULL);
|
382
|
+
ASSERT(first_interval_->start().Value() <= start.Value());
|
383
|
+
ASSERT(start.Value() < first_interval_->end().Value());
|
384
|
+
first_interval_->set_start(start);
|
385
|
+
}
|
386
|
+
|
387
|
+
|
388
|
+
void LiveRange::EnsureInterval(LifetimePosition start, LifetimePosition end) {
|
389
|
+
LAllocator::TraceAlloc("Ensure live range %d in interval [%d %d[\n",
|
390
|
+
id_,
|
391
|
+
start.Value(),
|
392
|
+
end.Value());
|
393
|
+
LifetimePosition new_end = end;
|
394
|
+
while (first_interval_ != NULL &&
|
395
|
+
first_interval_->start().Value() <= end.Value()) {
|
396
|
+
if (first_interval_->end().Value() > end.Value()) {
|
397
|
+
new_end = first_interval_->end();
|
398
|
+
}
|
399
|
+
first_interval_ = first_interval_->next();
|
400
|
+
}
|
401
|
+
|
402
|
+
UseInterval* new_interval = new UseInterval(start, new_end);
|
403
|
+
new_interval->next_ = first_interval_;
|
404
|
+
first_interval_ = new_interval;
|
405
|
+
if (new_interval->next() == NULL) {
|
406
|
+
last_interval_ = new_interval;
|
407
|
+
}
|
408
|
+
}
|
409
|
+
|
410
|
+
|
411
|
+
void LiveRange::AddUseInterval(LifetimePosition start, LifetimePosition end) {
|
412
|
+
LAllocator::TraceAlloc("Add to live range %d interval [%d %d[\n",
|
413
|
+
id_,
|
414
|
+
start.Value(),
|
415
|
+
end.Value());
|
416
|
+
if (first_interval_ == NULL) {
|
417
|
+
UseInterval* interval = new UseInterval(start, end);
|
418
|
+
first_interval_ = interval;
|
419
|
+
last_interval_ = interval;
|
420
|
+
} else {
|
421
|
+
if (end.Value() == first_interval_->start().Value()) {
|
422
|
+
first_interval_->set_start(start);
|
423
|
+
} else if (end.Value() < first_interval_->start().Value()) {
|
424
|
+
UseInterval* interval = new UseInterval(start, end);
|
425
|
+
interval->set_next(first_interval_);
|
426
|
+
first_interval_ = interval;
|
427
|
+
} else {
|
428
|
+
// Order of instruction's processing (see ProcessInstructions) guarantees
|
429
|
+
// that each new use interval either precedes or intersects with
|
430
|
+
// last added interval.
|
431
|
+
ASSERT(start.Value() < first_interval_->end().Value());
|
432
|
+
first_interval_->start_ = Min(start, first_interval_->start_);
|
433
|
+
first_interval_->end_ = Max(end, first_interval_->end_);
|
434
|
+
}
|
435
|
+
}
|
436
|
+
}
|
437
|
+
|
438
|
+
|
439
|
+
UsePosition* LiveRange::AddUsePosition(LifetimePosition pos,
|
440
|
+
LOperand* operand) {
|
441
|
+
LAllocator::TraceAlloc("Add to live range %d use position %d\n",
|
442
|
+
id_,
|
443
|
+
pos.Value());
|
444
|
+
UsePosition* use_pos = new UsePosition(pos, operand);
|
445
|
+
UsePosition* prev = NULL;
|
446
|
+
UsePosition* current = first_pos_;
|
447
|
+
while (current != NULL && current->pos().Value() < pos.Value()) {
|
448
|
+
prev = current;
|
449
|
+
current = current->next();
|
450
|
+
}
|
451
|
+
|
452
|
+
if (prev == NULL) {
|
453
|
+
use_pos->set_next(first_pos_);
|
454
|
+
first_pos_ = use_pos;
|
455
|
+
} else {
|
456
|
+
use_pos->next_ = prev->next_;
|
457
|
+
prev->next_ = use_pos;
|
458
|
+
}
|
459
|
+
|
460
|
+
return use_pos;
|
461
|
+
}
|
462
|
+
|
463
|
+
|
464
|
+
void LiveRange::ConvertOperands() {
|
465
|
+
LOperand* op = CreateAssignedOperand();
|
466
|
+
UsePosition* use_pos = first_pos();
|
467
|
+
while (use_pos != NULL) {
|
468
|
+
ASSERT(Start().Value() <= use_pos->pos().Value() &&
|
469
|
+
use_pos->pos().Value() <= End().Value());
|
470
|
+
|
471
|
+
if (use_pos->HasOperand()) {
|
472
|
+
ASSERT(op->IsRegister() || op->IsDoubleRegister() ||
|
473
|
+
!use_pos->RequiresRegister());
|
474
|
+
use_pos->operand()->ConvertTo(op->kind(), op->index());
|
475
|
+
}
|
476
|
+
use_pos = use_pos->next();
|
477
|
+
}
|
478
|
+
}
|
479
|
+
|
480
|
+
|
481
|
+
bool LiveRange::CanCover(LifetimePosition position) const {
|
482
|
+
if (IsEmpty()) return false;
|
483
|
+
return Start().Value() <= position.Value() &&
|
484
|
+
position.Value() < End().Value();
|
485
|
+
}
|
486
|
+
|
487
|
+
|
488
|
+
bool LiveRange::Covers(LifetimePosition position) {
|
489
|
+
if (!CanCover(position)) return false;
|
490
|
+
UseInterval* start_search = FirstSearchIntervalForPosition(position);
|
491
|
+
for (UseInterval* interval = start_search;
|
492
|
+
interval != NULL;
|
493
|
+
interval = interval->next()) {
|
494
|
+
ASSERT(interval->next() == NULL ||
|
495
|
+
interval->next()->start().Value() >= interval->start().Value());
|
496
|
+
AdvanceLastProcessedMarker(interval, position);
|
497
|
+
if (interval->Contains(position)) return true;
|
498
|
+
if (interval->start().Value() > position.Value()) return false;
|
499
|
+
}
|
500
|
+
return false;
|
501
|
+
}
|
502
|
+
|
503
|
+
|
504
|
+
LifetimePosition LiveRange::FirstIntersection(LiveRange* other) {
|
505
|
+
UseInterval* b = other->first_interval();
|
506
|
+
if (b == NULL) return LifetimePosition::Invalid();
|
507
|
+
LifetimePosition advance_last_processed_up_to = b->start();
|
508
|
+
UseInterval* a = FirstSearchIntervalForPosition(b->start());
|
509
|
+
while (a != NULL && b != NULL) {
|
510
|
+
if (a->start().Value() > other->End().Value()) break;
|
511
|
+
if (b->start().Value() > End().Value()) break;
|
512
|
+
LifetimePosition cur_intersection = a->Intersect(b);
|
513
|
+
if (cur_intersection.IsValid()) {
|
514
|
+
return cur_intersection;
|
515
|
+
}
|
516
|
+
if (a->start().Value() < b->start().Value()) {
|
517
|
+
a = a->next();
|
518
|
+
if (a == NULL || a->start().Value() > other->End().Value()) break;
|
519
|
+
AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
|
520
|
+
} else {
|
521
|
+
b = b->next();
|
522
|
+
}
|
523
|
+
}
|
524
|
+
return LifetimePosition::Invalid();
|
525
|
+
}
|
526
|
+
|
527
|
+
|
528
|
+
void LAllocator::InitializeLivenessAnalysis() {
|
529
|
+
// Initialize the live_in sets for each block to NULL.
|
530
|
+
int block_count = graph_->blocks()->length();
|
531
|
+
live_in_sets_.Initialize(block_count);
|
532
|
+
live_in_sets_.AddBlock(NULL, block_count);
|
533
|
+
}
|
534
|
+
|
535
|
+
|
536
|
+
BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) {
|
537
|
+
// Compute live out for the given block, except not including backward
|
538
|
+
// successor edges.
|
539
|
+
BitVector* live_out = new BitVector(next_virtual_register_);
|
540
|
+
|
541
|
+
// Process all successor blocks.
|
542
|
+
HBasicBlock* successor = block->end()->FirstSuccessor();
|
543
|
+
while (successor != NULL) {
|
544
|
+
// Add values live on entry to the successor. Note the successor's
|
545
|
+
// live_in will not be computed yet for backwards edges.
|
546
|
+
BitVector* live_in = live_in_sets_[successor->block_id()];
|
547
|
+
if (live_in != NULL) live_out->Union(*live_in);
|
548
|
+
|
549
|
+
// All phi input operands corresponding to this successor edge are live
|
550
|
+
// out from this block.
|
551
|
+
int index = successor->PredecessorIndexOf(block);
|
552
|
+
const ZoneList<HPhi*>* phis = successor->phis();
|
553
|
+
for (int i = 0; i < phis->length(); ++i) {
|
554
|
+
HPhi* phi = phis->at(i);
|
555
|
+
if (!phi->OperandAt(index)->IsConstant()) {
|
556
|
+
live_out->Add(phi->OperandAt(index)->id());
|
557
|
+
}
|
558
|
+
}
|
559
|
+
|
560
|
+
// Check if we are done with second successor.
|
561
|
+
if (successor == block->end()->SecondSuccessor()) break;
|
562
|
+
|
563
|
+
successor = block->end()->SecondSuccessor();
|
564
|
+
}
|
565
|
+
|
566
|
+
return live_out;
|
567
|
+
}
|
568
|
+
|
569
|
+
|
570
|
+
void LAllocator::AddInitialIntervals(HBasicBlock* block,
|
571
|
+
BitVector* live_out) {
|
572
|
+
// Add an interval that includes the entire block to the live range for
|
573
|
+
// each live_out value.
|
574
|
+
LifetimePosition start = LifetimePosition::FromInstructionIndex(
|
575
|
+
block->first_instruction_index());
|
576
|
+
LifetimePosition end = LifetimePosition::FromInstructionIndex(
|
577
|
+
block->last_instruction_index()).NextInstruction();
|
578
|
+
BitVector::Iterator iterator(live_out);
|
579
|
+
while (!iterator.Done()) {
|
580
|
+
int operand_index = iterator.Current();
|
581
|
+
LiveRange* range = LiveRangeFor(operand_index);
|
582
|
+
range->AddUseInterval(start, end);
|
583
|
+
iterator.Advance();
|
584
|
+
}
|
585
|
+
}
|
586
|
+
|
587
|
+
|
588
|
+
int LAllocator::FixedDoubleLiveRangeID(int index) {
|
589
|
+
return -index - 1 - Register::kNumAllocatableRegisters;
|
590
|
+
}
|
591
|
+
|
592
|
+
|
593
|
+
LOperand* LAllocator::AllocateFixed(LUnallocated* operand,
|
594
|
+
int pos,
|
595
|
+
bool is_tagged) {
|
596
|
+
TraceAlloc("Allocating fixed reg for op %d\n", operand->virtual_register());
|
597
|
+
ASSERT(operand->HasFixedPolicy());
|
598
|
+
if (operand->policy() == LUnallocated::FIXED_SLOT) {
|
599
|
+
operand->ConvertTo(LOperand::STACK_SLOT, operand->fixed_index());
|
600
|
+
} else if (operand->policy() == LUnallocated::FIXED_REGISTER) {
|
601
|
+
int reg_index = operand->fixed_index();
|
602
|
+
operand->ConvertTo(LOperand::REGISTER, reg_index);
|
603
|
+
} else if (operand->policy() == LUnallocated::FIXED_DOUBLE_REGISTER) {
|
604
|
+
int reg_index = operand->fixed_index();
|
605
|
+
operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index);
|
606
|
+
} else {
|
607
|
+
UNREACHABLE();
|
608
|
+
}
|
609
|
+
if (is_tagged) {
|
610
|
+
TraceAlloc("Fixed reg is tagged at %d\n", pos);
|
611
|
+
LInstruction* instr = InstructionAt(pos);
|
612
|
+
if (instr->HasPointerMap()) {
|
613
|
+
instr->pointer_map()->RecordPointer(operand);
|
614
|
+
}
|
615
|
+
}
|
616
|
+
return operand;
|
617
|
+
}
|
618
|
+
|
619
|
+
|
620
|
+
LiveRange* LAllocator::FixedLiveRangeFor(int index) {
|
621
|
+
if (index >= fixed_live_ranges_.length()) {
|
622
|
+
fixed_live_ranges_.AddBlock(NULL,
|
623
|
+
index - fixed_live_ranges_.length() + 1);
|
624
|
+
}
|
625
|
+
|
626
|
+
LiveRange* result = fixed_live_ranges_[index];
|
627
|
+
if (result == NULL) {
|
628
|
+
result = new LiveRange(FixedLiveRangeID(index));
|
629
|
+
ASSERT(result->IsFixed());
|
630
|
+
result->set_assigned_register(index, GENERAL_REGISTERS);
|
631
|
+
fixed_live_ranges_[index] = result;
|
632
|
+
}
|
633
|
+
return result;
|
634
|
+
}
|
635
|
+
|
636
|
+
|
637
|
+
LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) {
|
638
|
+
if (index >= fixed_double_live_ranges_.length()) {
|
639
|
+
fixed_double_live_ranges_.AddBlock(NULL,
|
640
|
+
index - fixed_double_live_ranges_.length() + 1);
|
641
|
+
}
|
642
|
+
|
643
|
+
LiveRange* result = fixed_double_live_ranges_[index];
|
644
|
+
if (result == NULL) {
|
645
|
+
result = new LiveRange(FixedDoubleLiveRangeID(index));
|
646
|
+
ASSERT(result->IsFixed());
|
647
|
+
result->set_assigned_register(index, DOUBLE_REGISTERS);
|
648
|
+
fixed_double_live_ranges_[index] = result;
|
649
|
+
}
|
650
|
+
return result;
|
651
|
+
}
|
652
|
+
|
653
|
+
LiveRange* LAllocator::LiveRangeFor(int index) {
|
654
|
+
if (index >= live_ranges_.length()) {
|
655
|
+
live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1);
|
656
|
+
}
|
657
|
+
LiveRange* result = live_ranges_[index];
|
658
|
+
if (result == NULL) {
|
659
|
+
result = new LiveRange(index);
|
660
|
+
live_ranges_[index] = result;
|
661
|
+
}
|
662
|
+
return result;
|
663
|
+
}
|
664
|
+
|
665
|
+
|
666
|
+
LGap* LAllocator::GetLastGap(HBasicBlock* block) {
|
667
|
+
int last_instruction = block->last_instruction_index();
|
668
|
+
int index = chunk_->NearestGapPos(last_instruction);
|
669
|
+
return GapAt(index);
|
670
|
+
}
|
671
|
+
|
672
|
+
|
673
|
+
HPhi* LAllocator::LookupPhi(LOperand* operand) const {
|
674
|
+
if (!operand->IsUnallocated()) return NULL;
|
675
|
+
int index = operand->VirtualRegister();
|
676
|
+
HValue* instr = graph_->LookupValue(index);
|
677
|
+
if (instr != NULL && instr->IsPhi()) {
|
678
|
+
return HPhi::cast(instr);
|
679
|
+
}
|
680
|
+
return NULL;
|
681
|
+
}
|
682
|
+
|
683
|
+
|
684
|
+
LiveRange* LAllocator::LiveRangeFor(LOperand* operand) {
|
685
|
+
if (operand->IsUnallocated()) {
|
686
|
+
return LiveRangeFor(LUnallocated::cast(operand)->virtual_register());
|
687
|
+
} else if (operand->IsRegister()) {
|
688
|
+
return FixedLiveRangeFor(operand->index());
|
689
|
+
} else if (operand->IsDoubleRegister()) {
|
690
|
+
return FixedDoubleLiveRangeFor(operand->index());
|
691
|
+
} else {
|
692
|
+
return NULL;
|
693
|
+
}
|
694
|
+
}
|
695
|
+
|
696
|
+
|
697
|
+
void LAllocator::Define(LifetimePosition position,
|
698
|
+
LOperand* operand,
|
699
|
+
LOperand* hint) {
|
700
|
+
LiveRange* range = LiveRangeFor(operand);
|
701
|
+
if (range == NULL) return;
|
702
|
+
|
703
|
+
if (range->IsEmpty() || range->Start().Value() > position.Value()) {
|
704
|
+
// Can happen if there is a definition without use.
|
705
|
+
range->AddUseInterval(position, position.NextInstruction());
|
706
|
+
range->AddUsePosition(position.NextInstruction(), NULL);
|
707
|
+
} else {
|
708
|
+
range->ShortenTo(position);
|
709
|
+
}
|
710
|
+
|
711
|
+
if (operand->IsUnallocated()) {
|
712
|
+
LUnallocated* unalloc_operand = LUnallocated::cast(operand);
|
713
|
+
range->AddUsePosition(position, unalloc_operand)->set_hint(hint);
|
714
|
+
}
|
715
|
+
}
|
716
|
+
|
717
|
+
|
718
|
+
void LAllocator::Use(LifetimePosition block_start,
|
719
|
+
LifetimePosition position,
|
720
|
+
LOperand* operand,
|
721
|
+
LOperand* hint) {
|
722
|
+
LiveRange* range = LiveRangeFor(operand);
|
723
|
+
if (range == NULL) return;
|
724
|
+
if (operand->IsUnallocated()) {
|
725
|
+
LUnallocated* unalloc_operand = LUnallocated::cast(operand);
|
726
|
+
range->AddUsePosition(position, unalloc_operand)->set_hint(hint);
|
727
|
+
}
|
728
|
+
range->AddUseInterval(block_start, position);
|
729
|
+
}
|
730
|
+
|
731
|
+
|
732
|
+
void LAllocator::AddConstraintsGapMove(int index,
|
733
|
+
LOperand* from,
|
734
|
+
LOperand* to) {
|
735
|
+
LGap* gap = GapAt(index);
|
736
|
+
LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
|
737
|
+
if (from->IsUnallocated()) {
|
738
|
+
const ZoneList<LMoveOperands>* move_operands = move->move_operands();
|
739
|
+
for (int i = 0; i < move_operands->length(); ++i) {
|
740
|
+
LMoveOperands cur = move_operands->at(i);
|
741
|
+
LOperand* cur_to = cur.destination();
|
742
|
+
if (cur_to->IsUnallocated()) {
|
743
|
+
if (cur_to->VirtualRegister() == from->VirtualRegister()) {
|
744
|
+
move->AddMove(cur.source(), to);
|
745
|
+
return;
|
746
|
+
}
|
747
|
+
}
|
748
|
+
}
|
749
|
+
}
|
750
|
+
move->AddMove(from, to);
|
751
|
+
}
|
752
|
+
|
753
|
+
|
754
|
+
void LAllocator::MeetRegisterConstraints(HBasicBlock* block) {
|
755
|
+
int start = block->first_instruction_index();
|
756
|
+
int end = block->last_instruction_index();
|
757
|
+
for (int i = start; i <= end; ++i) {
|
758
|
+
if (IsGapAt(i)) {
|
759
|
+
LInstruction* instr = NULL;
|
760
|
+
LInstruction* prev_instr = NULL;
|
761
|
+
if (i < end) instr = InstructionAt(i + 1);
|
762
|
+
if (i > start) prev_instr = InstructionAt(i - 1);
|
763
|
+
MeetConstraintsBetween(prev_instr, instr, i);
|
764
|
+
}
|
765
|
+
}
|
766
|
+
}
|
767
|
+
|
768
|
+
|
769
|
+
void LAllocator::MeetConstraintsBetween(LInstruction* first,
|
770
|
+
LInstruction* second,
|
771
|
+
int gap_index) {
|
772
|
+
// Handle fixed temporaries.
|
773
|
+
if (first != NULL) {
|
774
|
+
for (TempIterator it(first); it.HasNext(); it.Advance()) {
|
775
|
+
LUnallocated* temp = LUnallocated::cast(it.Next());
|
776
|
+
if (temp->HasFixedPolicy()) {
|
777
|
+
AllocateFixed(temp, gap_index - 1, false);
|
778
|
+
}
|
779
|
+
}
|
780
|
+
}
|
781
|
+
|
782
|
+
// Handle fixed output operand.
|
783
|
+
if (first != NULL && first->Output() != NULL) {
|
784
|
+
LUnallocated* first_output = LUnallocated::cast(first->Output());
|
785
|
+
LiveRange* range = LiveRangeFor(first_output->VirtualRegister());
|
786
|
+
bool assigned = false;
|
787
|
+
if (first_output->HasFixedPolicy()) {
|
788
|
+
LUnallocated* output_copy = first_output->CopyUnconstrained();
|
789
|
+
bool is_tagged = HasTaggedValue(first_output->VirtualRegister());
|
790
|
+
AllocateFixed(first_output, gap_index, is_tagged);
|
791
|
+
|
792
|
+
// This value is produced on the stack, we never need to spill it.
|
793
|
+
if (first_output->IsStackSlot()) {
|
794
|
+
range->SetSpillOperand(first_output);
|
795
|
+
range->SetSpillStartIndex(gap_index - 1);
|
796
|
+
assigned = true;
|
797
|
+
}
|
798
|
+
chunk_->AddGapMove(gap_index, first_output, output_copy);
|
799
|
+
}
|
800
|
+
|
801
|
+
if (!assigned) {
|
802
|
+
range->SetSpillStartIndex(gap_index);
|
803
|
+
|
804
|
+
// This move to spill operand is not a real use. Liveness analysis
|
805
|
+
// and splitting of live ranges do not account for it.
|
806
|
+
// Thus it should be inserted to a lifetime position corresponding to
|
807
|
+
// the instruction end.
|
808
|
+
LGap* gap = GapAt(gap_index);
|
809
|
+
LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE);
|
810
|
+
move->AddMove(first_output, range->GetSpillOperand());
|
811
|
+
}
|
812
|
+
}
|
813
|
+
|
814
|
+
// Handle fixed input operands of second instruction.
|
815
|
+
if (second != NULL) {
|
816
|
+
for (UseIterator it(second); it.HasNext(); it.Advance()) {
|
817
|
+
LUnallocated* cur_input = LUnallocated::cast(it.Next());
|
818
|
+
if (cur_input->HasFixedPolicy()) {
|
819
|
+
LUnallocated* input_copy = cur_input->CopyUnconstrained();
|
820
|
+
bool is_tagged = HasTaggedValue(cur_input->VirtualRegister());
|
821
|
+
AllocateFixed(cur_input, gap_index + 1, is_tagged);
|
822
|
+
AddConstraintsGapMove(gap_index, input_copy, cur_input);
|
823
|
+
} else if (cur_input->policy() == LUnallocated::WRITABLE_REGISTER) {
|
824
|
+
// The live range of writable input registers always goes until the end
|
825
|
+
// of the instruction.
|
826
|
+
ASSERT(!cur_input->IsUsedAtStart());
|
827
|
+
|
828
|
+
LUnallocated* input_copy = cur_input->CopyUnconstrained();
|
829
|
+
cur_input->set_virtual_register(next_virtual_register_++);
|
830
|
+
|
831
|
+
if (RequiredRegisterKind(input_copy->virtual_register()) ==
|
832
|
+
DOUBLE_REGISTERS) {
|
833
|
+
double_artificial_registers_.Add(
|
834
|
+
cur_input->virtual_register() - first_artificial_register_);
|
835
|
+
}
|
836
|
+
|
837
|
+
AddConstraintsGapMove(gap_index, input_copy, cur_input);
|
838
|
+
}
|
839
|
+
}
|
840
|
+
}
|
841
|
+
|
842
|
+
// Handle "output same as input" for second instruction.
|
843
|
+
if (second != NULL && second->Output() != NULL) {
|
844
|
+
LUnallocated* second_output = LUnallocated::cast(second->Output());
|
845
|
+
if (second_output->HasSameAsInputPolicy()) {
|
846
|
+
LUnallocated* cur_input = LUnallocated::cast(second->FirstInput());
|
847
|
+
int output_vreg = second_output->VirtualRegister();
|
848
|
+
int input_vreg = cur_input->VirtualRegister();
|
849
|
+
|
850
|
+
LUnallocated* input_copy = cur_input->CopyUnconstrained();
|
851
|
+
cur_input->set_virtual_register(second_output->virtual_register());
|
852
|
+
AddConstraintsGapMove(gap_index, input_copy, cur_input);
|
853
|
+
|
854
|
+
if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) {
|
855
|
+
int index = gap_index + 1;
|
856
|
+
LInstruction* instr = InstructionAt(index);
|
857
|
+
if (instr->HasPointerMap()) {
|
858
|
+
instr->pointer_map()->RecordPointer(input_copy);
|
859
|
+
}
|
860
|
+
} else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) {
|
861
|
+
// The input is assumed to immediately have a tagged representation,
|
862
|
+
// before the pointer map can be used. I.e. the pointer map at the
|
863
|
+
// instruction will include the output operand (whose value at the
|
864
|
+
// beginning of the instruction is equal to the input operand). If
|
865
|
+
// this is not desired, then the pointer map at this instruction needs
|
866
|
+
// to be adjusted manually.
|
867
|
+
}
|
868
|
+
}
|
869
|
+
}
|
870
|
+
}
|
871
|
+
|
872
|
+
|
873
|
+
void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
|
874
|
+
int block_start = block->first_instruction_index();
|
875
|
+
int index = block->last_instruction_index();
|
876
|
+
|
877
|
+
LifetimePosition block_start_position =
|
878
|
+
LifetimePosition::FromInstructionIndex(block_start);
|
879
|
+
|
880
|
+
while (index >= block_start) {
|
881
|
+
LifetimePosition curr_position =
|
882
|
+
LifetimePosition::FromInstructionIndex(index);
|
883
|
+
|
884
|
+
if (IsGapAt(index)) {
|
885
|
+
// We have a gap at this position.
|
886
|
+
LGap* gap = GapAt(index);
|
887
|
+
LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
|
888
|
+
const ZoneList<LMoveOperands>* move_operands = move->move_operands();
|
889
|
+
for (int i = 0; i < move_operands->length(); ++i) {
|
890
|
+
LMoveOperands* cur = &move_operands->at(i);
|
891
|
+
if (cur->IsIgnored()) continue;
|
892
|
+
LOperand* from = cur->source();
|
893
|
+
LOperand* to = cur->destination();
|
894
|
+
HPhi* phi = LookupPhi(to);
|
895
|
+
LOperand* hint = to;
|
896
|
+
if (phi != NULL) {
|
897
|
+
// This is a phi resolving move.
|
898
|
+
if (!phi->block()->IsLoopHeader()) {
|
899
|
+
hint = LiveRangeFor(phi->id())->FirstHint();
|
900
|
+
}
|
901
|
+
} else {
|
902
|
+
if (to->IsUnallocated()) {
|
903
|
+
if (live->Contains(to->VirtualRegister())) {
|
904
|
+
Define(curr_position, to, from);
|
905
|
+
live->Remove(to->VirtualRegister());
|
906
|
+
} else {
|
907
|
+
cur->Eliminate();
|
908
|
+
continue;
|
909
|
+
}
|
910
|
+
} else {
|
911
|
+
Define(curr_position, to, from);
|
912
|
+
}
|
913
|
+
}
|
914
|
+
Use(block_start_position, curr_position, from, hint);
|
915
|
+
if (from->IsUnallocated()) {
|
916
|
+
live->Add(from->VirtualRegister());
|
917
|
+
}
|
918
|
+
}
|
919
|
+
} else {
|
920
|
+
ASSERT(!IsGapAt(index));
|
921
|
+
LInstruction* instr = InstructionAt(index);
|
922
|
+
|
923
|
+
if (instr != NULL) {
|
924
|
+
LOperand* output = instr->Output();
|
925
|
+
if (output != NULL) {
|
926
|
+
if (output->IsUnallocated()) live->Remove(output->VirtualRegister());
|
927
|
+
Define(curr_position, output, NULL);
|
928
|
+
}
|
929
|
+
|
930
|
+
if (instr->IsMarkedAsCall()) {
|
931
|
+
for (int i = 0; i < Register::kNumAllocatableRegisters; ++i) {
|
932
|
+
if (output == NULL || !output->IsRegister() ||
|
933
|
+
output->index() != i) {
|
934
|
+
LiveRange* range = FixedLiveRangeFor(i);
|
935
|
+
range->AddUseInterval(curr_position,
|
936
|
+
curr_position.InstructionEnd());
|
937
|
+
}
|
938
|
+
}
|
939
|
+
}
|
940
|
+
|
941
|
+
if (instr->IsMarkedAsCall() || instr->IsMarkedAsSaveDoubles()) {
|
942
|
+
for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; ++i) {
|
943
|
+
if (output == NULL || !output->IsDoubleRegister() ||
|
944
|
+
output->index() != i) {
|
945
|
+
LiveRange* range = FixedDoubleLiveRangeFor(i);
|
946
|
+
range->AddUseInterval(curr_position,
|
947
|
+
curr_position.InstructionEnd());
|
948
|
+
}
|
949
|
+
}
|
950
|
+
}
|
951
|
+
|
952
|
+
for (UseIterator it(instr); it.HasNext(); it.Advance()) {
|
953
|
+
LOperand* input = it.Next();
|
954
|
+
|
955
|
+
LifetimePosition use_pos;
|
956
|
+
if (input->IsUnallocated() &&
|
957
|
+
LUnallocated::cast(input)->IsUsedAtStart()) {
|
958
|
+
use_pos = curr_position;
|
959
|
+
} else {
|
960
|
+
use_pos = curr_position.InstructionEnd();
|
961
|
+
}
|
962
|
+
|
963
|
+
Use(block_start_position, use_pos, input, NULL);
|
964
|
+
if (input->IsUnallocated()) live->Add(input->VirtualRegister());
|
965
|
+
}
|
966
|
+
|
967
|
+
for (TempIterator it(instr); it.HasNext(); it.Advance()) {
|
968
|
+
LOperand* temp = it.Next();
|
969
|
+
if (instr->IsMarkedAsCall()) {
|
970
|
+
if (temp->IsRegister()) continue;
|
971
|
+
if (temp->IsUnallocated()) {
|
972
|
+
LUnallocated* temp_unalloc = LUnallocated::cast(temp);
|
973
|
+
if (temp_unalloc->HasFixedPolicy()) {
|
974
|
+
continue;
|
975
|
+
}
|
976
|
+
}
|
977
|
+
}
|
978
|
+
Use(block_start_position, curr_position.InstructionEnd(), temp, NULL);
|
979
|
+
Define(curr_position, temp, NULL);
|
980
|
+
}
|
981
|
+
}
|
982
|
+
}
|
983
|
+
|
984
|
+
index = index - 1;
|
985
|
+
}
|
986
|
+
}
|
987
|
+
|
988
|
+
|
989
|
+
void LAllocator::ResolvePhis(HBasicBlock* block) {
|
990
|
+
const ZoneList<HPhi*>* phis = block->phis();
|
991
|
+
for (int i = 0; i < phis->length(); ++i) {
|
992
|
+
HPhi* phi = phis->at(i);
|
993
|
+
LUnallocated* phi_operand = new LUnallocated(LUnallocated::NONE);
|
994
|
+
phi_operand->set_virtual_register(phi->id());
|
995
|
+
for (int j = 0; j < phi->OperandCount(); ++j) {
|
996
|
+
HValue* op = phi->OperandAt(j);
|
997
|
+
LOperand* operand = NULL;
|
998
|
+
if (op->IsConstant() && op->EmitAtUses()) {
|
999
|
+
HConstant* constant = HConstant::cast(op);
|
1000
|
+
operand = chunk_->DefineConstantOperand(constant);
|
1001
|
+
} else {
|
1002
|
+
ASSERT(!op->EmitAtUses());
|
1003
|
+
LUnallocated* unalloc = new LUnallocated(LUnallocated::NONE);
|
1004
|
+
unalloc->set_virtual_register(op->id());
|
1005
|
+
operand = unalloc;
|
1006
|
+
}
|
1007
|
+
HBasicBlock* cur_block = block->predecessors()->at(j);
|
1008
|
+
// The gap move must be added without any special processing as in
|
1009
|
+
// the AddConstraintsGapMove.
|
1010
|
+
chunk_->AddGapMove(cur_block->last_instruction_index() - 1,
|
1011
|
+
operand,
|
1012
|
+
phi_operand);
|
1013
|
+
}
|
1014
|
+
|
1015
|
+
LiveRange* live_range = LiveRangeFor(phi->id());
|
1016
|
+
LLabel* label = chunk_->GetLabel(phi->block()->block_id());
|
1017
|
+
label->GetOrCreateParallelMove(LGap::START)->
|
1018
|
+
AddMove(phi_operand, live_range->GetSpillOperand());
|
1019
|
+
live_range->SetSpillStartIndex(phi->block()->first_instruction_index());
|
1020
|
+
}
|
1021
|
+
}
|
1022
|
+
|
1023
|
+
|
1024
|
+
void LAllocator::Allocate(LChunk* chunk) {
|
1025
|
+
ASSERT(chunk_ == NULL);
|
1026
|
+
chunk_ = chunk;
|
1027
|
+
MeetRegisterConstraints();
|
1028
|
+
ResolvePhis();
|
1029
|
+
BuildLiveRanges();
|
1030
|
+
AllocateGeneralRegisters();
|
1031
|
+
AllocateDoubleRegisters();
|
1032
|
+
PopulatePointerMaps();
|
1033
|
+
if (has_osr_entry_) ProcessOsrEntry();
|
1034
|
+
ConnectRanges();
|
1035
|
+
ResolveControlFlow();
|
1036
|
+
}
|
1037
|
+
|
1038
|
+
|
1039
|
+
void LAllocator::MeetRegisterConstraints() {
|
1040
|
+
HPhase phase("Register constraints", chunk_);
|
1041
|
+
first_artificial_register_ = next_virtual_register_;
|
1042
|
+
const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
|
1043
|
+
for (int i = 0; i < blocks->length(); ++i) {
|
1044
|
+
HBasicBlock* block = blocks->at(i);
|
1045
|
+
MeetRegisterConstraints(block);
|
1046
|
+
}
|
1047
|
+
}
|
1048
|
+
|
1049
|
+
|
1050
|
+
void LAllocator::ResolvePhis() {
|
1051
|
+
HPhase phase("Resolve phis", chunk_);
|
1052
|
+
|
1053
|
+
// Process the blocks in reverse order.
|
1054
|
+
const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
|
1055
|
+
for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
|
1056
|
+
HBasicBlock* block = blocks->at(block_id);
|
1057
|
+
ResolvePhis(block);
|
1058
|
+
}
|
1059
|
+
}
|
1060
|
+
|
1061
|
+
|
1062
|
+
void LAllocator::ResolveControlFlow(LiveRange* range,
|
1063
|
+
HBasicBlock* block,
|
1064
|
+
HBasicBlock* pred) {
|
1065
|
+
LifetimePosition pred_end =
|
1066
|
+
LifetimePosition::FromInstructionIndex(pred->last_instruction_index());
|
1067
|
+
LifetimePosition cur_start =
|
1068
|
+
LifetimePosition::FromInstructionIndex(block->first_instruction_index());
|
1069
|
+
LiveRange* pred_cover = NULL;
|
1070
|
+
LiveRange* cur_cover = NULL;
|
1071
|
+
LiveRange* cur_range = range;
|
1072
|
+
while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) {
|
1073
|
+
if (cur_range->CanCover(cur_start)) {
|
1074
|
+
ASSERT(cur_cover == NULL);
|
1075
|
+
cur_cover = cur_range;
|
1076
|
+
}
|
1077
|
+
if (cur_range->CanCover(pred_end)) {
|
1078
|
+
ASSERT(pred_cover == NULL);
|
1079
|
+
pred_cover = cur_range;
|
1080
|
+
}
|
1081
|
+
cur_range = cur_range->next();
|
1082
|
+
}
|
1083
|
+
|
1084
|
+
if (cur_cover->IsSpilled()) return;
|
1085
|
+
ASSERT(pred_cover != NULL && cur_cover != NULL);
|
1086
|
+
if (pred_cover != cur_cover) {
|
1087
|
+
LOperand* pred_op = pred_cover->CreateAssignedOperand();
|
1088
|
+
LOperand* cur_op = cur_cover->CreateAssignedOperand();
|
1089
|
+
if (!pred_op->Equals(cur_op)) {
|
1090
|
+
LGap* gap = NULL;
|
1091
|
+
if (block->predecessors()->length() == 1) {
|
1092
|
+
gap = GapAt(block->first_instruction_index());
|
1093
|
+
} else {
|
1094
|
+
ASSERT(pred->end()->SecondSuccessor() == NULL);
|
1095
|
+
gap = GetLastGap(pred);
|
1096
|
+
|
1097
|
+
// We are going to insert a move before the branch instruction.
|
1098
|
+
// Some branch instructions (e.g. loops' back edges)
|
1099
|
+
// can potentially cause a GC so they have a pointer map.
|
1100
|
+
// By insterting a move we essentially create a copy of a
|
1101
|
+
// value which is invisible to PopulatePointerMaps(), because we store
|
1102
|
+
// it into a location different from the operand of a live range
|
1103
|
+
// covering a branch instruction.
|
1104
|
+
// Thus we need to manually record a pointer.
|
1105
|
+
if (HasTaggedValue(range->id())) {
|
1106
|
+
LInstruction* branch = InstructionAt(pred->last_instruction_index());
|
1107
|
+
if (branch->HasPointerMap()) {
|
1108
|
+
branch->pointer_map()->RecordPointer(cur_op);
|
1109
|
+
}
|
1110
|
+
}
|
1111
|
+
}
|
1112
|
+
gap->GetOrCreateParallelMove(LGap::START)->AddMove(pred_op, cur_op);
|
1113
|
+
}
|
1114
|
+
}
|
1115
|
+
}
|
1116
|
+
|
1117
|
+
|
1118
|
+
LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) {
|
1119
|
+
int index = pos.InstructionIndex();
|
1120
|
+
if (IsGapAt(index)) {
|
1121
|
+
LGap* gap = GapAt(index);
|
1122
|
+
return gap->GetOrCreateParallelMove(
|
1123
|
+
pos.IsInstructionStart() ? LGap::START : LGap::END);
|
1124
|
+
}
|
1125
|
+
int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1);
|
1126
|
+
return GapAt(gap_pos)->GetOrCreateParallelMove(
|
1127
|
+
(gap_pos < index) ? LGap::AFTER : LGap::BEFORE);
|
1128
|
+
}
|
1129
|
+
|
1130
|
+
|
1131
|
+
HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) {
|
1132
|
+
LGap* gap = GapAt(chunk_->NearestGapPos(pos.InstructionIndex()));
|
1133
|
+
return gap->block();
|
1134
|
+
}
|
1135
|
+
|
1136
|
+
|
1137
|
+
void LAllocator::ConnectRanges() {
|
1138
|
+
HPhase phase("Connect ranges", this);
|
1139
|
+
for (int i = 0; i < live_ranges()->length(); ++i) {
|
1140
|
+
LiveRange* first_range = live_ranges()->at(i);
|
1141
|
+
if (first_range == NULL || first_range->parent() != NULL) continue;
|
1142
|
+
|
1143
|
+
LiveRange* second_range = first_range->next();
|
1144
|
+
while (second_range != NULL) {
|
1145
|
+
LifetimePosition pos = second_range->Start();
|
1146
|
+
|
1147
|
+
if (!second_range->IsSpilled()) {
|
1148
|
+
// Add gap move if the two live ranges touch and there is no block
|
1149
|
+
// boundary.
|
1150
|
+
if (first_range->End().Value() == pos.Value()) {
|
1151
|
+
bool should_insert = true;
|
1152
|
+
if (IsBlockBoundary(pos)) {
|
1153
|
+
should_insert = CanEagerlyResolveControlFlow(GetBlock(pos));
|
1154
|
+
}
|
1155
|
+
if (should_insert) {
|
1156
|
+
LParallelMove* move = GetConnectingParallelMove(pos);
|
1157
|
+
LOperand* prev_operand = first_range->CreateAssignedOperand();
|
1158
|
+
LOperand* cur_operand = second_range->CreateAssignedOperand();
|
1159
|
+
move->AddMove(prev_operand, cur_operand);
|
1160
|
+
}
|
1161
|
+
}
|
1162
|
+
}
|
1163
|
+
|
1164
|
+
first_range = second_range;
|
1165
|
+
second_range = second_range->next();
|
1166
|
+
}
|
1167
|
+
}
|
1168
|
+
}
|
1169
|
+
|
1170
|
+
|
1171
|
+
bool LAllocator::CanEagerlyResolveControlFlow(HBasicBlock* block) const {
|
1172
|
+
if (block->predecessors()->length() != 1) return false;
|
1173
|
+
return block->predecessors()->first()->block_id() == block->block_id() - 1;
|
1174
|
+
}
|
1175
|
+
|
1176
|
+
|
1177
|
+
void LAllocator::ResolveControlFlow() {
|
1178
|
+
HPhase phase("Resolve control flow", this);
|
1179
|
+
const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
|
1180
|
+
for (int block_id = 1; block_id < blocks->length(); ++block_id) {
|
1181
|
+
HBasicBlock* block = blocks->at(block_id);
|
1182
|
+
if (CanEagerlyResolveControlFlow(block)) continue;
|
1183
|
+
BitVector* live = live_in_sets_[block->block_id()];
|
1184
|
+
BitVector::Iterator iterator(live);
|
1185
|
+
while (!iterator.Done()) {
|
1186
|
+
int operand_index = iterator.Current();
|
1187
|
+
for (int i = 0; i < block->predecessors()->length(); ++i) {
|
1188
|
+
HBasicBlock* cur = block->predecessors()->at(i);
|
1189
|
+
LiveRange* cur_range = LiveRangeFor(operand_index);
|
1190
|
+
ResolveControlFlow(cur_range, block, cur);
|
1191
|
+
}
|
1192
|
+
iterator.Advance();
|
1193
|
+
}
|
1194
|
+
}
|
1195
|
+
}
|
1196
|
+
|
1197
|
+
|
1198
|
+
void LAllocator::BuildLiveRanges() {
|
1199
|
+
HPhase phase("Build live ranges", this);
|
1200
|
+
InitializeLivenessAnalysis();
|
1201
|
+
// Process the blocks in reverse order.
|
1202
|
+
const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
|
1203
|
+
for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
|
1204
|
+
HBasicBlock* block = blocks->at(block_id);
|
1205
|
+
BitVector* live = ComputeLiveOut(block);
|
1206
|
+
// Initially consider all live_out values live for the entire block. We
|
1207
|
+
// will shorten these intervals if necessary.
|
1208
|
+
AddInitialIntervals(block, live);
|
1209
|
+
|
1210
|
+
// Process the instructions in reverse order, generating and killing
|
1211
|
+
// live values.
|
1212
|
+
ProcessInstructions(block, live);
|
1213
|
+
// All phi output operands are killed by this block.
|
1214
|
+
const ZoneList<HPhi*>* phis = block->phis();
|
1215
|
+
for (int i = 0; i < phis->length(); ++i) {
|
1216
|
+
// The live range interval already ends at the first instruction of the
|
1217
|
+
// block.
|
1218
|
+
HPhi* phi = phis->at(i);
|
1219
|
+
live->Remove(phi->id());
|
1220
|
+
|
1221
|
+
LOperand* hint = NULL;
|
1222
|
+
LOperand* phi_operand = NULL;
|
1223
|
+
LGap* gap = GetLastGap(phi->block()->predecessors()->at(0));
|
1224
|
+
LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
|
1225
|
+
for (int j = 0; j < move->move_operands()->length(); ++j) {
|
1226
|
+
LOperand* to = move->move_operands()->at(j).destination();
|
1227
|
+
if (to->IsUnallocated() && to->VirtualRegister() == phi->id()) {
|
1228
|
+
hint = move->move_operands()->at(j).source();
|
1229
|
+
phi_operand = to;
|
1230
|
+
break;
|
1231
|
+
}
|
1232
|
+
}
|
1233
|
+
ASSERT(hint != NULL);
|
1234
|
+
|
1235
|
+
LifetimePosition block_start = LifetimePosition::FromInstructionIndex(
|
1236
|
+
block->first_instruction_index());
|
1237
|
+
Define(block_start, phi_operand, hint);
|
1238
|
+
}
|
1239
|
+
|
1240
|
+
// Now live is live_in for this block except not including values live
|
1241
|
+
// out on backward successor edges.
|
1242
|
+
live_in_sets_[block_id] = live;
|
1243
|
+
|
1244
|
+
// If this block is a loop header go back and patch up the necessary
|
1245
|
+
// predecessor blocks.
|
1246
|
+
if (block->IsLoopHeader()) {
|
1247
|
+
// TODO(kmillikin): Need to be able to get the last block of the loop
|
1248
|
+
// in the loop information. Add a live range stretching from the first
|
1249
|
+
// loop instruction to the last for each value live on entry to the
|
1250
|
+
// header.
|
1251
|
+
HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge();
|
1252
|
+
BitVector::Iterator iterator(live);
|
1253
|
+
LifetimePosition start = LifetimePosition::FromInstructionIndex(
|
1254
|
+
block->first_instruction_index());
|
1255
|
+
LifetimePosition end = LifetimePosition::FromInstructionIndex(
|
1256
|
+
back_edge->last_instruction_index()).NextInstruction();
|
1257
|
+
while (!iterator.Done()) {
|
1258
|
+
int operand_index = iterator.Current();
|
1259
|
+
LiveRange* range = LiveRangeFor(operand_index);
|
1260
|
+
range->EnsureInterval(start, end);
|
1261
|
+
iterator.Advance();
|
1262
|
+
}
|
1263
|
+
|
1264
|
+
for (int i = block->block_id() + 1; i <= back_edge->block_id(); ++i) {
|
1265
|
+
live_in_sets_[i]->Union(*live);
|
1266
|
+
}
|
1267
|
+
}
|
1268
|
+
|
1269
|
+
#ifdef DEBUG
|
1270
|
+
if (block_id == 0) {
|
1271
|
+
BitVector::Iterator iterator(live);
|
1272
|
+
bool found = false;
|
1273
|
+
while (!iterator.Done()) {
|
1274
|
+
found = true;
|
1275
|
+
int operand_index = iterator.Current();
|
1276
|
+
PrintF("Function: %s\n",
|
1277
|
+
*graph_->info()->function()->debug_name()->ToCString());
|
1278
|
+
PrintF("Value %d used before first definition!\n", operand_index);
|
1279
|
+
LiveRange* range = LiveRangeFor(operand_index);
|
1280
|
+
PrintF("First use is at %d\n", range->first_pos()->pos().Value());
|
1281
|
+
iterator.Advance();
|
1282
|
+
}
|
1283
|
+
ASSERT(!found);
|
1284
|
+
}
|
1285
|
+
#endif
|
1286
|
+
}
|
1287
|
+
}
|
1288
|
+
|
1289
|
+
|
1290
|
+
bool LAllocator::SafePointsAreInOrder() const {
|
1291
|
+
const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
|
1292
|
+
int safe_point = 0;
|
1293
|
+
for (int i = 0; i < pointer_maps->length(); ++i) {
|
1294
|
+
LPointerMap* map = pointer_maps->at(i);
|
1295
|
+
if (safe_point > map->lithium_position()) return false;
|
1296
|
+
safe_point = map->lithium_position();
|
1297
|
+
}
|
1298
|
+
return true;
|
1299
|
+
}
|
1300
|
+
|
1301
|
+
|
1302
|
+
void LAllocator::PopulatePointerMaps() {
|
1303
|
+
HPhase phase("Populate pointer maps", this);
|
1304
|
+
const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
|
1305
|
+
|
1306
|
+
ASSERT(SafePointsAreInOrder());
|
1307
|
+
|
1308
|
+
// Iterate over all safe point positions and record a pointer
|
1309
|
+
// for all spilled live ranges at this point.
|
1310
|
+
int first_safe_point_index = 0;
|
1311
|
+
int last_range_start = 0;
|
1312
|
+
for (int range_idx = 0; range_idx < live_ranges()->length(); ++range_idx) {
|
1313
|
+
LiveRange* range = live_ranges()->at(range_idx);
|
1314
|
+
if (range == NULL) continue;
|
1315
|
+
// Iterate over the first parts of multi-part live ranges.
|
1316
|
+
if (range->parent() != NULL) continue;
|
1317
|
+
// Skip non-pointer values.
|
1318
|
+
if (!HasTaggedValue(range->id())) continue;
|
1319
|
+
// Skip empty live ranges.
|
1320
|
+
if (range->IsEmpty()) continue;
|
1321
|
+
|
1322
|
+
// Find the extent of the range and its children.
|
1323
|
+
int start = range->Start().InstructionIndex();
|
1324
|
+
int end = 0;
|
1325
|
+
for (LiveRange* cur = range; cur != NULL; cur = cur->next()) {
|
1326
|
+
LifetimePosition this_end = cur->End();
|
1327
|
+
if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex();
|
1328
|
+
ASSERT(cur->Start().InstructionIndex() >= start);
|
1329
|
+
}
|
1330
|
+
|
1331
|
+
// Most of the ranges are in order, but not all. Keep an eye on when
|
1332
|
+
// they step backwards and reset the first_safe_point_index so we don't
|
1333
|
+
// miss any safe points.
|
1334
|
+
if (start < last_range_start) {
|
1335
|
+
first_safe_point_index = 0;
|
1336
|
+
}
|
1337
|
+
last_range_start = start;
|
1338
|
+
|
1339
|
+
// Step across all the safe points that are before the start of this range,
|
1340
|
+
// recording how far we step in order to save doing this for the next range.
|
1341
|
+
while (first_safe_point_index < pointer_maps->length()) {
|
1342
|
+
LPointerMap* map = pointer_maps->at(first_safe_point_index);
|
1343
|
+
int safe_point = map->lithium_position();
|
1344
|
+
if (safe_point >= start) break;
|
1345
|
+
first_safe_point_index++;
|
1346
|
+
}
|
1347
|
+
|
1348
|
+
// Step through the safe points to see whether they are in the range.
|
1349
|
+
for (int safe_point_index = first_safe_point_index;
|
1350
|
+
safe_point_index < pointer_maps->length();
|
1351
|
+
++safe_point_index) {
|
1352
|
+
LPointerMap* map = pointer_maps->at(safe_point_index);
|
1353
|
+
int safe_point = map->lithium_position();
|
1354
|
+
|
1355
|
+
// The safe points are sorted so we can stop searching here.
|
1356
|
+
if (safe_point - 1 > end) break;
|
1357
|
+
|
1358
|
+
// Advance to the next active range that covers the current
|
1359
|
+
// safe point position.
|
1360
|
+
LifetimePosition safe_point_pos =
|
1361
|
+
LifetimePosition::FromInstructionIndex(safe_point);
|
1362
|
+
LiveRange* cur = range;
|
1363
|
+
while (cur != NULL && !cur->Covers(safe_point_pos.PrevInstruction())) {
|
1364
|
+
cur = cur->next();
|
1365
|
+
}
|
1366
|
+
if (cur == NULL) continue;
|
1367
|
+
|
1368
|
+
// Check if the live range is spilled and the safe point is after
|
1369
|
+
// the spill position.
|
1370
|
+
if (range->HasAllocatedSpillOperand() &&
|
1371
|
+
safe_point >= range->spill_start_index()) {
|
1372
|
+
TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n",
|
1373
|
+
range->id(), range->spill_start_index(), safe_point);
|
1374
|
+
map->RecordPointer(range->GetSpillOperand());
|
1375
|
+
}
|
1376
|
+
|
1377
|
+
if (!cur->IsSpilled()) {
|
1378
|
+
TraceAlloc("Pointer in register for range %d (start at %d) "
|
1379
|
+
"at safe point %d\n",
|
1380
|
+
cur->id(), cur->Start().Value(), safe_point);
|
1381
|
+
LOperand* operand = cur->CreateAssignedOperand();
|
1382
|
+
ASSERT(!operand->IsStackSlot());
|
1383
|
+
map->RecordPointer(operand);
|
1384
|
+
}
|
1385
|
+
}
|
1386
|
+
}
|
1387
|
+
}
|
1388
|
+
|
1389
|
+
|
1390
|
+
void LAllocator::ProcessOsrEntry() {
|
1391
|
+
const ZoneList<LInstruction*>* instrs = chunk_->instructions();
|
1392
|
+
|
1393
|
+
// Linear search for the OSR entry instruction in the chunk.
|
1394
|
+
int index = -1;
|
1395
|
+
while (++index < instrs->length() &&
|
1396
|
+
!instrs->at(index)->IsOsrEntry()) {
|
1397
|
+
}
|
1398
|
+
ASSERT(index < instrs->length());
|
1399
|
+
LOsrEntry* instruction = LOsrEntry::cast(instrs->at(index));
|
1400
|
+
|
1401
|
+
LifetimePosition position = LifetimePosition::FromInstructionIndex(index);
|
1402
|
+
for (int i = 0; i < live_ranges()->length(); ++i) {
|
1403
|
+
LiveRange* range = live_ranges()->at(i);
|
1404
|
+
if (range != NULL) {
|
1405
|
+
if (range->Covers(position) &&
|
1406
|
+
range->HasRegisterAssigned() &&
|
1407
|
+
range->TopLevel()->HasAllocatedSpillOperand()) {
|
1408
|
+
int reg_index = range->assigned_register();
|
1409
|
+
LOperand* spill_operand = range->TopLevel()->GetSpillOperand();
|
1410
|
+
if (range->IsDouble()) {
|
1411
|
+
instruction->MarkSpilledDoubleRegister(reg_index, spill_operand);
|
1412
|
+
} else {
|
1413
|
+
instruction->MarkSpilledRegister(reg_index, spill_operand);
|
1414
|
+
}
|
1415
|
+
}
|
1416
|
+
}
|
1417
|
+
}
|
1418
|
+
}
|
1419
|
+
|
1420
|
+
|
1421
|
+
void LAllocator::AllocateGeneralRegisters() {
|
1422
|
+
HPhase phase("Allocate general registers", this);
|
1423
|
+
num_registers_ = Register::kNumAllocatableRegisters;
|
1424
|
+
mode_ = GENERAL_REGISTERS;
|
1425
|
+
AllocateRegisters();
|
1426
|
+
}
|
1427
|
+
|
1428
|
+
|
1429
|
+
void LAllocator::AllocateDoubleRegisters() {
|
1430
|
+
HPhase phase("Allocate double registers", this);
|
1431
|
+
num_registers_ = DoubleRegister::kNumAllocatableRegisters;
|
1432
|
+
mode_ = DOUBLE_REGISTERS;
|
1433
|
+
AllocateRegisters();
|
1434
|
+
}
|
1435
|
+
|
1436
|
+
|
1437
|
+
void LAllocator::AllocateRegisters() {
|
1438
|
+
ASSERT(mode_ != NONE);
|
1439
|
+
reusable_slots_.Clear();
|
1440
|
+
|
1441
|
+
for (int i = 0; i < live_ranges_.length(); ++i) {
|
1442
|
+
if (live_ranges_[i] != NULL) {
|
1443
|
+
if (RequiredRegisterKind(live_ranges_[i]->id()) == mode_) {
|
1444
|
+
AddToUnhandledUnsorted(live_ranges_[i]);
|
1445
|
+
}
|
1446
|
+
}
|
1447
|
+
}
|
1448
|
+
SortUnhandled();
|
1449
|
+
ASSERT(UnhandledIsSorted());
|
1450
|
+
|
1451
|
+
ASSERT(active_live_ranges_.is_empty());
|
1452
|
+
ASSERT(inactive_live_ranges_.is_empty());
|
1453
|
+
|
1454
|
+
if (mode_ == DOUBLE_REGISTERS) {
|
1455
|
+
for (int i = 0; i < fixed_double_live_ranges_.length(); ++i) {
|
1456
|
+
LiveRange* current = fixed_double_live_ranges_.at(i);
|
1457
|
+
if (current != NULL) {
|
1458
|
+
AddToInactive(current);
|
1459
|
+
}
|
1460
|
+
}
|
1461
|
+
} else {
|
1462
|
+
for (int i = 0; i < fixed_live_ranges_.length(); ++i) {
|
1463
|
+
LiveRange* current = fixed_live_ranges_.at(i);
|
1464
|
+
if (current != NULL) {
|
1465
|
+
AddToInactive(current);
|
1466
|
+
}
|
1467
|
+
}
|
1468
|
+
}
|
1469
|
+
|
1470
|
+
while (!unhandled_live_ranges_.is_empty()) {
|
1471
|
+
ASSERT(UnhandledIsSorted());
|
1472
|
+
LiveRange* current = unhandled_live_ranges_.RemoveLast();
|
1473
|
+
ASSERT(UnhandledIsSorted());
|
1474
|
+
LifetimePosition position = current->Start();
|
1475
|
+
TraceAlloc("Processing interval %d start=%d\n",
|
1476
|
+
current->id(),
|
1477
|
+
position.Value());
|
1478
|
+
|
1479
|
+
if (current->HasAllocatedSpillOperand()) {
|
1480
|
+
TraceAlloc("Live range %d already has a spill operand\n", current->id());
|
1481
|
+
LifetimePosition next_pos = position;
|
1482
|
+
if (IsGapAt(next_pos.InstructionIndex())) {
|
1483
|
+
next_pos = next_pos.NextInstruction();
|
1484
|
+
}
|
1485
|
+
UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos);
|
1486
|
+
// If the range already has a spill operand and it doesn't need a
|
1487
|
+
// register immediately, split it and spill the first part of the range.
|
1488
|
+
if (pos == NULL) {
|
1489
|
+
Spill(current);
|
1490
|
+
continue;
|
1491
|
+
} else if (pos->pos().Value() >
|
1492
|
+
current->Start().NextInstruction().Value()) {
|
1493
|
+
// Do not spill live range eagerly if use position that can benefit from
|
1494
|
+
// the register is too close to the start of live range.
|
1495
|
+
SpillBetween(current, current->Start(), pos->pos());
|
1496
|
+
ASSERT(UnhandledIsSorted());
|
1497
|
+
continue;
|
1498
|
+
}
|
1499
|
+
}
|
1500
|
+
|
1501
|
+
for (int i = 0; i < active_live_ranges_.length(); ++i) {
|
1502
|
+
LiveRange* cur_active = active_live_ranges_.at(i);
|
1503
|
+
if (cur_active->End().Value() <= position.Value()) {
|
1504
|
+
ActiveToHandled(cur_active);
|
1505
|
+
--i; // The live range was removed from the list of active live ranges.
|
1506
|
+
} else if (!cur_active->Covers(position)) {
|
1507
|
+
ActiveToInactive(cur_active);
|
1508
|
+
--i; // The live range was removed from the list of active live ranges.
|
1509
|
+
}
|
1510
|
+
}
|
1511
|
+
|
1512
|
+
for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
|
1513
|
+
LiveRange* cur_inactive = inactive_live_ranges_.at(i);
|
1514
|
+
if (cur_inactive->End().Value() <= position.Value()) {
|
1515
|
+
InactiveToHandled(cur_inactive);
|
1516
|
+
--i; // Live range was removed from the list of inactive live ranges.
|
1517
|
+
} else if (cur_inactive->Covers(position)) {
|
1518
|
+
InactiveToActive(cur_inactive);
|
1519
|
+
--i; // Live range was removed from the list of inactive live ranges.
|
1520
|
+
}
|
1521
|
+
}
|
1522
|
+
|
1523
|
+
ASSERT(!current->HasRegisterAssigned() && !current->IsSpilled());
|
1524
|
+
|
1525
|
+
bool result = TryAllocateFreeReg(current);
|
1526
|
+
if (!result) {
|
1527
|
+
AllocateBlockedReg(current);
|
1528
|
+
}
|
1529
|
+
|
1530
|
+
if (current->HasRegisterAssigned()) {
|
1531
|
+
AddToActive(current);
|
1532
|
+
}
|
1533
|
+
}
|
1534
|
+
|
1535
|
+
active_live_ranges_.Clear();
|
1536
|
+
inactive_live_ranges_.Clear();
|
1537
|
+
}
|
1538
|
+
|
1539
|
+
|
1540
|
+
void LAllocator::Setup() {
|
1541
|
+
LConstantOperand::SetupCache();
|
1542
|
+
LStackSlot::SetupCache();
|
1543
|
+
LDoubleStackSlot::SetupCache();
|
1544
|
+
LRegister::SetupCache();
|
1545
|
+
LDoubleRegister::SetupCache();
|
1546
|
+
}
|
1547
|
+
|
1548
|
+
|
1549
|
+
const char* LAllocator::RegisterName(int allocation_index) {
|
1550
|
+
ASSERT(mode_ != NONE);
|
1551
|
+
if (mode_ == GENERAL_REGISTERS) {
|
1552
|
+
return Register::AllocationIndexToString(allocation_index);
|
1553
|
+
} else {
|
1554
|
+
return DoubleRegister::AllocationIndexToString(allocation_index);
|
1555
|
+
}
|
1556
|
+
}
|
1557
|
+
|
1558
|
+
|
1559
|
+
void LAllocator::TraceAlloc(const char* msg, ...) {
|
1560
|
+
if (FLAG_trace_alloc) {
|
1561
|
+
va_list arguments;
|
1562
|
+
va_start(arguments, msg);
|
1563
|
+
OS::VPrint(msg, arguments);
|
1564
|
+
va_end(arguments);
|
1565
|
+
}
|
1566
|
+
}
|
1567
|
+
|
1568
|
+
|
1569
|
+
bool LAllocator::HasTaggedValue(int virtual_register) const {
|
1570
|
+
HValue* value = graph_->LookupValue(virtual_register);
|
1571
|
+
if (value == NULL) return false;
|
1572
|
+
return value->representation().IsTagged();
|
1573
|
+
}
|
1574
|
+
|
1575
|
+
|
1576
|
+
RegisterKind LAllocator::RequiredRegisterKind(int virtual_register) const {
|
1577
|
+
if (virtual_register < first_artificial_register_) {
|
1578
|
+
HValue* value = graph_->LookupValue(virtual_register);
|
1579
|
+
if (value != NULL && value->representation().IsDouble()) {
|
1580
|
+
return DOUBLE_REGISTERS;
|
1581
|
+
}
|
1582
|
+
} else if (double_artificial_registers_.Contains(
|
1583
|
+
virtual_register - first_artificial_register_)) {
|
1584
|
+
return DOUBLE_REGISTERS;
|
1585
|
+
}
|
1586
|
+
|
1587
|
+
return GENERAL_REGISTERS;
|
1588
|
+
}
|
1589
|
+
|
1590
|
+
|
1591
|
+
void LAllocator::RecordDefinition(HInstruction* instr, LUnallocated* operand) {
|
1592
|
+
operand->set_virtual_register(instr->id());
|
1593
|
+
}
|
1594
|
+
|
1595
|
+
|
1596
|
+
void LAllocator::RecordTemporary(LUnallocated* operand) {
|
1597
|
+
ASSERT(next_virtual_register_ < LUnallocated::kMaxVirtualRegisters);
|
1598
|
+
if (!operand->HasFixedPolicy()) {
|
1599
|
+
operand->set_virtual_register(next_virtual_register_++);
|
1600
|
+
}
|
1601
|
+
}
|
1602
|
+
|
1603
|
+
|
1604
|
+
void LAllocator::RecordUse(HValue* value, LUnallocated* operand) {
|
1605
|
+
operand->set_virtual_register(value->id());
|
1606
|
+
}
|
1607
|
+
|
1608
|
+
|
1609
|
+
int LAllocator::max_initial_value_ids() {
|
1610
|
+
return LUnallocated::kMaxVirtualRegisters / 32;
|
1611
|
+
}
|
1612
|
+
|
1613
|
+
|
1614
|
+
void LAllocator::AddToActive(LiveRange* range) {
|
1615
|
+
TraceAlloc("Add live range %d to active\n", range->id());
|
1616
|
+
active_live_ranges_.Add(range);
|
1617
|
+
}
|
1618
|
+
|
1619
|
+
|
1620
|
+
void LAllocator::AddToInactive(LiveRange* range) {
|
1621
|
+
TraceAlloc("Add live range %d to inactive\n", range->id());
|
1622
|
+
inactive_live_ranges_.Add(range);
|
1623
|
+
}
|
1624
|
+
|
1625
|
+
|
1626
|
+
void LAllocator::AddToUnhandledSorted(LiveRange* range) {
|
1627
|
+
if (range == NULL || range->IsEmpty()) return;
|
1628
|
+
ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
|
1629
|
+
for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) {
|
1630
|
+
LiveRange* cur_range = unhandled_live_ranges_.at(i);
|
1631
|
+
if (range->ShouldBeAllocatedBefore(cur_range)) {
|
1632
|
+
TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1);
|
1633
|
+
unhandled_live_ranges_.InsertAt(i + 1, range);
|
1634
|
+
ASSERT(UnhandledIsSorted());
|
1635
|
+
return;
|
1636
|
+
}
|
1637
|
+
}
|
1638
|
+
TraceAlloc("Add live range %d to unhandled at start\n", range->id());
|
1639
|
+
unhandled_live_ranges_.InsertAt(0, range);
|
1640
|
+
ASSERT(UnhandledIsSorted());
|
1641
|
+
}
|
1642
|
+
|
1643
|
+
|
1644
|
+
void LAllocator::AddToUnhandledUnsorted(LiveRange* range) {
|
1645
|
+
if (range == NULL || range->IsEmpty()) return;
|
1646
|
+
ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
|
1647
|
+
TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id());
|
1648
|
+
unhandled_live_ranges_.Add(range);
|
1649
|
+
}
|
1650
|
+
|
1651
|
+
|
1652
|
+
static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) {
|
1653
|
+
ASSERT(!(*a)->ShouldBeAllocatedBefore(*b) ||
|
1654
|
+
!(*b)->ShouldBeAllocatedBefore(*a));
|
1655
|
+
if ((*a)->ShouldBeAllocatedBefore(*b)) return 1;
|
1656
|
+
if ((*b)->ShouldBeAllocatedBefore(*a)) return -1;
|
1657
|
+
return (*a)->id() - (*b)->id();
|
1658
|
+
}
|
1659
|
+
|
1660
|
+
|
1661
|
+
// Sort the unhandled live ranges so that the ranges to be processed first are
|
1662
|
+
// at the end of the array list. This is convenient for the register allocation
|
1663
|
+
// algorithm because it is efficient to remove elements from the end.
|
1664
|
+
void LAllocator::SortUnhandled() {
|
1665
|
+
TraceAlloc("Sort unhandled\n");
|
1666
|
+
unhandled_live_ranges_.Sort(&UnhandledSortHelper);
|
1667
|
+
}
|
1668
|
+
|
1669
|
+
|
1670
|
+
bool LAllocator::UnhandledIsSorted() {
|
1671
|
+
int len = unhandled_live_ranges_.length();
|
1672
|
+
for (int i = 1; i < len; i++) {
|
1673
|
+
LiveRange* a = unhandled_live_ranges_.at(i - 1);
|
1674
|
+
LiveRange* b = unhandled_live_ranges_.at(i);
|
1675
|
+
if (a->Start().Value() < b->Start().Value()) return false;
|
1676
|
+
}
|
1677
|
+
return true;
|
1678
|
+
}
|
1679
|
+
|
1680
|
+
|
1681
|
+
void LAllocator::FreeSpillSlot(LiveRange* range) {
|
1682
|
+
// Check that we are the last range.
|
1683
|
+
if (range->next() != NULL) return;
|
1684
|
+
|
1685
|
+
if (!range->TopLevel()->HasAllocatedSpillOperand()) return;
|
1686
|
+
|
1687
|
+
int index = range->TopLevel()->GetSpillOperand()->index();
|
1688
|
+
if (index >= 0) {
|
1689
|
+
reusable_slots_.Add(range);
|
1690
|
+
}
|
1691
|
+
}
|
1692
|
+
|
1693
|
+
|
1694
|
+
LOperand* LAllocator::TryReuseSpillSlot(LiveRange* range) {
|
1695
|
+
if (reusable_slots_.is_empty()) return NULL;
|
1696
|
+
if (reusable_slots_.first()->End().Value() >
|
1697
|
+
range->TopLevel()->Start().Value()) {
|
1698
|
+
return NULL;
|
1699
|
+
}
|
1700
|
+
LOperand* result = reusable_slots_.first()->TopLevel()->GetSpillOperand();
|
1701
|
+
reusable_slots_.Remove(0);
|
1702
|
+
return result;
|
1703
|
+
}
|
1704
|
+
|
1705
|
+
|
1706
|
+
void LAllocator::ActiveToHandled(LiveRange* range) {
|
1707
|
+
ASSERT(active_live_ranges_.Contains(range));
|
1708
|
+
active_live_ranges_.RemoveElement(range);
|
1709
|
+
TraceAlloc("Moving live range %d from active to handled\n", range->id());
|
1710
|
+
FreeSpillSlot(range);
|
1711
|
+
}
|
1712
|
+
|
1713
|
+
|
1714
|
+
void LAllocator::ActiveToInactive(LiveRange* range) {
|
1715
|
+
ASSERT(active_live_ranges_.Contains(range));
|
1716
|
+
active_live_ranges_.RemoveElement(range);
|
1717
|
+
inactive_live_ranges_.Add(range);
|
1718
|
+
TraceAlloc("Moving live range %d from active to inactive\n", range->id());
|
1719
|
+
}
|
1720
|
+
|
1721
|
+
|
1722
|
+
void LAllocator::InactiveToHandled(LiveRange* range) {
|
1723
|
+
ASSERT(inactive_live_ranges_.Contains(range));
|
1724
|
+
inactive_live_ranges_.RemoveElement(range);
|
1725
|
+
TraceAlloc("Moving live range %d from inactive to handled\n", range->id());
|
1726
|
+
FreeSpillSlot(range);
|
1727
|
+
}
|
1728
|
+
|
1729
|
+
|
1730
|
+
void LAllocator::InactiveToActive(LiveRange* range) {
|
1731
|
+
ASSERT(inactive_live_ranges_.Contains(range));
|
1732
|
+
inactive_live_ranges_.RemoveElement(range);
|
1733
|
+
active_live_ranges_.Add(range);
|
1734
|
+
TraceAlloc("Moving live range %d from inactive to active\n", range->id());
|
1735
|
+
}
|
1736
|
+
|
1737
|
+
|
1738
|
+
// TryAllocateFreeReg and AllocateBlockedReg assume this
|
1739
|
+
// when allocating local arrays.
|
1740
|
+
STATIC_ASSERT(DoubleRegister::kNumAllocatableRegisters >=
|
1741
|
+
Register::kNumAllocatableRegisters);
|
1742
|
+
|
1743
|
+
|
1744
|
+
bool LAllocator::TryAllocateFreeReg(LiveRange* current) {
|
1745
|
+
LifetimePosition free_until_pos[DoubleRegister::kNumAllocatableRegisters];
|
1746
|
+
|
1747
|
+
for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; i++) {
|
1748
|
+
free_until_pos[i] = LifetimePosition::MaxPosition();
|
1749
|
+
}
|
1750
|
+
|
1751
|
+
for (int i = 0; i < active_live_ranges_.length(); ++i) {
|
1752
|
+
LiveRange* cur_active = active_live_ranges_.at(i);
|
1753
|
+
free_until_pos[cur_active->assigned_register()] =
|
1754
|
+
LifetimePosition::FromInstructionIndex(0);
|
1755
|
+
}
|
1756
|
+
|
1757
|
+
for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
|
1758
|
+
LiveRange* cur_inactive = inactive_live_ranges_.at(i);
|
1759
|
+
ASSERT(cur_inactive->End().Value() > current->Start().Value());
|
1760
|
+
LifetimePosition next_intersection =
|
1761
|
+
cur_inactive->FirstIntersection(current);
|
1762
|
+
if (!next_intersection.IsValid()) continue;
|
1763
|
+
int cur_reg = cur_inactive->assigned_register();
|
1764
|
+
free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
|
1765
|
+
}
|
1766
|
+
|
1767
|
+
UsePosition* hinted_use = current->FirstPosWithHint();
|
1768
|
+
if (hinted_use != NULL) {
|
1769
|
+
LOperand* hint = hinted_use->hint();
|
1770
|
+
if (hint->IsRegister() || hint->IsDoubleRegister()) {
|
1771
|
+
int register_index = hint->index();
|
1772
|
+
TraceAlloc(
|
1773
|
+
"Found reg hint %s (free until [%d) for live range %d (end %d[).\n",
|
1774
|
+
RegisterName(register_index),
|
1775
|
+
free_until_pos[register_index].Value(),
|
1776
|
+
current->id(),
|
1777
|
+
current->End().Value());
|
1778
|
+
|
1779
|
+
// The desired register is free until the end of the current live range.
|
1780
|
+
if (free_until_pos[register_index].Value() >= current->End().Value()) {
|
1781
|
+
TraceAlloc("Assigning preferred reg %s to live range %d\n",
|
1782
|
+
RegisterName(register_index),
|
1783
|
+
current->id());
|
1784
|
+
current->set_assigned_register(register_index, mode_);
|
1785
|
+
return true;
|
1786
|
+
}
|
1787
|
+
}
|
1788
|
+
}
|
1789
|
+
|
1790
|
+
// Find the register which stays free for the longest time.
|
1791
|
+
int reg = 0;
|
1792
|
+
for (int i = 1; i < RegisterCount(); ++i) {
|
1793
|
+
if (free_until_pos[i].Value() > free_until_pos[reg].Value()) {
|
1794
|
+
reg = i;
|
1795
|
+
}
|
1796
|
+
}
|
1797
|
+
|
1798
|
+
LifetimePosition pos = free_until_pos[reg];
|
1799
|
+
|
1800
|
+
if (pos.Value() <= current->Start().Value()) {
|
1801
|
+
// All registers are blocked.
|
1802
|
+
return false;
|
1803
|
+
}
|
1804
|
+
|
1805
|
+
if (pos.Value() < current->End().Value()) {
|
1806
|
+
// Register reg is available at the range start but becomes blocked before
|
1807
|
+
// the range end. Split current at position where it becomes blocked.
|
1808
|
+
LiveRange* tail = SplitAt(current, pos);
|
1809
|
+
AddToUnhandledSorted(tail);
|
1810
|
+
}
|
1811
|
+
|
1812
|
+
|
1813
|
+
// Register reg is available at the range start and is free until
|
1814
|
+
// the range end.
|
1815
|
+
ASSERT(pos.Value() >= current->End().Value());
|
1816
|
+
TraceAlloc("Assigning free reg %s to live range %d\n",
|
1817
|
+
RegisterName(reg),
|
1818
|
+
current->id());
|
1819
|
+
current->set_assigned_register(reg, mode_);
|
1820
|
+
|
1821
|
+
return true;
|
1822
|
+
}
|
1823
|
+
|
1824
|
+
|
1825
|
+
void LAllocator::AllocateBlockedReg(LiveRange* current) {
|
1826
|
+
UsePosition* register_use = current->NextRegisterPosition(current->Start());
|
1827
|
+
if (register_use == NULL) {
|
1828
|
+
// There is no use in the current live range that requires a register.
|
1829
|
+
// We can just spill it.
|
1830
|
+
Spill(current);
|
1831
|
+
return;
|
1832
|
+
}
|
1833
|
+
|
1834
|
+
|
1835
|
+
LifetimePosition use_pos[DoubleRegister::kNumAllocatableRegisters];
|
1836
|
+
LifetimePosition block_pos[DoubleRegister::kNumAllocatableRegisters];
|
1837
|
+
|
1838
|
+
for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; i++) {
|
1839
|
+
use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
|
1840
|
+
}
|
1841
|
+
|
1842
|
+
for (int i = 0; i < active_live_ranges_.length(); ++i) {
|
1843
|
+
LiveRange* range = active_live_ranges_[i];
|
1844
|
+
int cur_reg = range->assigned_register();
|
1845
|
+
if (range->IsFixed() || !range->CanBeSpilled(current->Start())) {
|
1846
|
+
block_pos[cur_reg] = use_pos[cur_reg] =
|
1847
|
+
LifetimePosition::FromInstructionIndex(0);
|
1848
|
+
} else {
|
1849
|
+
UsePosition* next_use = range->NextUsePositionRegisterIsBeneficial(
|
1850
|
+
current->Start());
|
1851
|
+
if (next_use == NULL) {
|
1852
|
+
use_pos[cur_reg] = range->End();
|
1853
|
+
} else {
|
1854
|
+
use_pos[cur_reg] = next_use->pos();
|
1855
|
+
}
|
1856
|
+
}
|
1857
|
+
}
|
1858
|
+
|
1859
|
+
for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
|
1860
|
+
LiveRange* range = inactive_live_ranges_.at(i);
|
1861
|
+
ASSERT(range->End().Value() > current->Start().Value());
|
1862
|
+
LifetimePosition next_intersection = range->FirstIntersection(current);
|
1863
|
+
if (!next_intersection.IsValid()) continue;
|
1864
|
+
int cur_reg = range->assigned_register();
|
1865
|
+
if (range->IsFixed()) {
|
1866
|
+
block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
|
1867
|
+
use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
|
1868
|
+
} else {
|
1869
|
+
use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
|
1870
|
+
}
|
1871
|
+
}
|
1872
|
+
|
1873
|
+
int reg = 0;
|
1874
|
+
for (int i = 1; i < RegisterCount(); ++i) {
|
1875
|
+
if (use_pos[i].Value() > use_pos[reg].Value()) {
|
1876
|
+
reg = i;
|
1877
|
+
}
|
1878
|
+
}
|
1879
|
+
|
1880
|
+
LifetimePosition pos = use_pos[reg];
|
1881
|
+
|
1882
|
+
if (pos.Value() < register_use->pos().Value()) {
|
1883
|
+
// All registers are blocked before the first use that requires a register.
|
1884
|
+
// Spill starting part of live range up to that use.
|
1885
|
+
//
|
1886
|
+
// Corner case: the first use position is equal to the start of the range.
|
1887
|
+
// In this case we have nothing to spill and SpillBetween will just return
|
1888
|
+
// this range to the list of unhandled ones. This will lead to the infinite
|
1889
|
+
// loop.
|
1890
|
+
ASSERT(current->Start().Value() < register_use->pos().Value());
|
1891
|
+
SpillBetween(current, current->Start(), register_use->pos());
|
1892
|
+
return;
|
1893
|
+
}
|
1894
|
+
|
1895
|
+
if (block_pos[reg].Value() < current->End().Value()) {
|
1896
|
+
// Register becomes blocked before the current range end. Split before that
|
1897
|
+
// position.
|
1898
|
+
LiveRange* tail = SplitBetween(current,
|
1899
|
+
current->Start(),
|
1900
|
+
block_pos[reg].InstructionStart());
|
1901
|
+
AddToUnhandledSorted(tail);
|
1902
|
+
}
|
1903
|
+
|
1904
|
+
// Register reg is not blocked for the whole range.
|
1905
|
+
ASSERT(block_pos[reg].Value() >= current->End().Value());
|
1906
|
+
TraceAlloc("Assigning blocked reg %s to live range %d\n",
|
1907
|
+
RegisterName(reg),
|
1908
|
+
current->id());
|
1909
|
+
current->set_assigned_register(reg, mode_);
|
1910
|
+
|
1911
|
+
// This register was not free. Thus we need to find and spill
|
1912
|
+
// parts of active and inactive live regions that use the same register
|
1913
|
+
// at the same lifetime positions as current.
|
1914
|
+
SplitAndSpillIntersecting(current);
|
1915
|
+
}
|
1916
|
+
|
1917
|
+
|
1918
|
+
void LAllocator::SplitAndSpillIntersecting(LiveRange* current) {
|
1919
|
+
ASSERT(current->HasRegisterAssigned());
|
1920
|
+
int reg = current->assigned_register();
|
1921
|
+
LifetimePosition split_pos = current->Start();
|
1922
|
+
for (int i = 0; i < active_live_ranges_.length(); ++i) {
|
1923
|
+
LiveRange* range = active_live_ranges_[i];
|
1924
|
+
if (range->assigned_register() == reg) {
|
1925
|
+
UsePosition* next_pos = range->NextRegisterPosition(current->Start());
|
1926
|
+
if (next_pos == NULL) {
|
1927
|
+
SpillAfter(range, split_pos);
|
1928
|
+
} else {
|
1929
|
+
SpillBetween(range, split_pos, next_pos->pos());
|
1930
|
+
}
|
1931
|
+
ActiveToHandled(range);
|
1932
|
+
--i;
|
1933
|
+
}
|
1934
|
+
}
|
1935
|
+
|
1936
|
+
for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
|
1937
|
+
LiveRange* range = inactive_live_ranges_[i];
|
1938
|
+
ASSERT(range->End().Value() > current->Start().Value());
|
1939
|
+
if (range->assigned_register() == reg && !range->IsFixed()) {
|
1940
|
+
LifetimePosition next_intersection = range->FirstIntersection(current);
|
1941
|
+
if (next_intersection.IsValid()) {
|
1942
|
+
UsePosition* next_pos = range->NextRegisterPosition(current->Start());
|
1943
|
+
if (next_pos == NULL) {
|
1944
|
+
SpillAfter(range, split_pos);
|
1945
|
+
} else {
|
1946
|
+
next_intersection = Min(next_intersection, next_pos->pos());
|
1947
|
+
SpillBetween(range, split_pos, next_intersection);
|
1948
|
+
}
|
1949
|
+
InactiveToHandled(range);
|
1950
|
+
--i;
|
1951
|
+
}
|
1952
|
+
}
|
1953
|
+
}
|
1954
|
+
}
|
1955
|
+
|
1956
|
+
|
1957
|
+
bool LAllocator::IsBlockBoundary(LifetimePosition pos) {
|
1958
|
+
return pos.IsInstructionStart() &&
|
1959
|
+
InstructionAt(pos.InstructionIndex())->IsLabel();
|
1960
|
+
}
|
1961
|
+
|
1962
|
+
|
1963
|
+
LiveRange* LAllocator::SplitAt(LiveRange* range, LifetimePosition pos) {
|
1964
|
+
ASSERT(!range->IsFixed());
|
1965
|
+
TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value());
|
1966
|
+
|
1967
|
+
if (pos.Value() <= range->Start().Value()) return range;
|
1968
|
+
|
1969
|
+
// We can't properly connect liveranges if split occured at the end
|
1970
|
+
// of control instruction.
|
1971
|
+
ASSERT(pos.IsInstructionStart() ||
|
1972
|
+
!chunk_->instructions()->at(pos.InstructionIndex())->IsControl());
|
1973
|
+
|
1974
|
+
LiveRange* result = LiveRangeFor(next_virtual_register_++);
|
1975
|
+
range->SplitAt(pos, result);
|
1976
|
+
return result;
|
1977
|
+
}
|
1978
|
+
|
1979
|
+
|
1980
|
+
LiveRange* LAllocator::SplitBetween(LiveRange* range,
|
1981
|
+
LifetimePosition start,
|
1982
|
+
LifetimePosition end) {
|
1983
|
+
ASSERT(!range->IsFixed());
|
1984
|
+
TraceAlloc("Splitting live range %d in position between [%d, %d]\n",
|
1985
|
+
range->id(),
|
1986
|
+
start.Value(),
|
1987
|
+
end.Value());
|
1988
|
+
|
1989
|
+
LifetimePosition split_pos = FindOptimalSplitPos(start, end);
|
1990
|
+
ASSERT(split_pos.Value() >= start.Value());
|
1991
|
+
return SplitAt(range, split_pos);
|
1992
|
+
}
|
1993
|
+
|
1994
|
+
|
1995
|
+
LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start,
|
1996
|
+
LifetimePosition end) {
|
1997
|
+
int start_instr = start.InstructionIndex();
|
1998
|
+
int end_instr = end.InstructionIndex();
|
1999
|
+
ASSERT(start_instr <= end_instr);
|
2000
|
+
|
2001
|
+
// We have no choice
|
2002
|
+
if (start_instr == end_instr) return end;
|
2003
|
+
|
2004
|
+
HBasicBlock* end_block = GetBlock(start);
|
2005
|
+
HBasicBlock* start_block = GetBlock(end);
|
2006
|
+
|
2007
|
+
if (end_block == start_block) {
|
2008
|
+
// The interval is split in the same basic block. Split at latest possible
|
2009
|
+
// position.
|
2010
|
+
return end;
|
2011
|
+
}
|
2012
|
+
|
2013
|
+
HBasicBlock* block = end_block;
|
2014
|
+
// Find header of outermost loop.
|
2015
|
+
while (block->parent_loop_header() != NULL &&
|
2016
|
+
block->parent_loop_header()->block_id() > start_block->block_id()) {
|
2017
|
+
block = block->parent_loop_header();
|
2018
|
+
}
|
2019
|
+
|
2020
|
+
if (block == end_block) return end;
|
2021
|
+
|
2022
|
+
return LifetimePosition::FromInstructionIndex(
|
2023
|
+
block->first_instruction_index());
|
2024
|
+
}
|
2025
|
+
|
2026
|
+
|
2027
|
+
void LAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
|
2028
|
+
LiveRange* second_part = SplitAt(range, pos);
|
2029
|
+
Spill(second_part);
|
2030
|
+
}
|
2031
|
+
|
2032
|
+
|
2033
|
+
void LAllocator::SpillBetween(LiveRange* range,
|
2034
|
+
LifetimePosition start,
|
2035
|
+
LifetimePosition end) {
|
2036
|
+
ASSERT(start.Value() < end.Value());
|
2037
|
+
LiveRange* second_part = SplitAt(range, start);
|
2038
|
+
|
2039
|
+
if (second_part->Start().Value() < end.Value()) {
|
2040
|
+
// The split result intersects with [start, end[.
|
2041
|
+
// Split it at position between ]start+1, end[, spill the middle part
|
2042
|
+
// and put the rest to unhandled.
|
2043
|
+
LiveRange* third_part = SplitBetween(
|
2044
|
+
second_part,
|
2045
|
+
second_part->Start().InstructionEnd(),
|
2046
|
+
end.PrevInstruction().InstructionEnd());
|
2047
|
+
|
2048
|
+
ASSERT(third_part != second_part);
|
2049
|
+
|
2050
|
+
Spill(second_part);
|
2051
|
+
AddToUnhandledSorted(third_part);
|
2052
|
+
} else {
|
2053
|
+
// The split result does not intersect with [start, end[.
|
2054
|
+
// Nothing to spill. Just put it to unhandled as whole.
|
2055
|
+
AddToUnhandledSorted(second_part);
|
2056
|
+
}
|
2057
|
+
}
|
2058
|
+
|
2059
|
+
|
2060
|
+
void LAllocator::Spill(LiveRange* range) {
|
2061
|
+
ASSERT(!range->IsSpilled());
|
2062
|
+
TraceAlloc("Spilling live range %d\n", range->id());
|
2063
|
+
LiveRange* first = range->TopLevel();
|
2064
|
+
|
2065
|
+
if (!first->HasAllocatedSpillOperand()) {
|
2066
|
+
LOperand* op = TryReuseSpillSlot(range);
|
2067
|
+
if (op == NULL) op = chunk_->GetNextSpillSlot(mode_ == DOUBLE_REGISTERS);
|
2068
|
+
first->SetSpillOperand(op);
|
2069
|
+
}
|
2070
|
+
range->MakeSpilled();
|
2071
|
+
}
|
2072
|
+
|
2073
|
+
|
2074
|
+
int LAllocator::RegisterCount() const {
|
2075
|
+
return num_registers_;
|
2076
|
+
}
|
2077
|
+
|
2078
|
+
|
2079
|
+
#ifdef DEBUG
|
2080
|
+
|
2081
|
+
|
2082
|
+
void LAllocator::Verify() const {
|
2083
|
+
for (int i = 0; i < live_ranges()->length(); ++i) {
|
2084
|
+
LiveRange* current = live_ranges()->at(i);
|
2085
|
+
if (current != NULL) current->Verify();
|
2086
|
+
}
|
2087
|
+
}
|
2088
|
+
|
2089
|
+
|
2090
|
+
#endif
|
2091
|
+
|
2092
|
+
|
2093
|
+
} } // namespace v8::internal
|