therubyracer 0.4.9-x86-linux
Sign up to get free protection for your applications and to get access to all the features.
Potentially problematic release.
This version of therubyracer might be problematic. Click here for more details.
- data/.gitignore +11 -0
- data/.gitmodules +3 -0
- data/Doxyfile +1514 -0
- data/History.txt +43 -0
- data/README.rdoc +157 -0
- data/Rakefile +51 -0
- data/docs/data_conversion.txt +18 -0
- data/ext/v8/callbacks.cpp +160 -0
- data/ext/v8/callbacks.h +14 -0
- data/ext/v8/convert_ruby.cpp +8 -0
- data/ext/v8/convert_ruby.h +99 -0
- data/ext/v8/convert_string.cpp +10 -0
- data/ext/v8/convert_string.h +73 -0
- data/ext/v8/convert_v8.cpp +9 -0
- data/ext/v8/convert_v8.h +121 -0
- data/ext/v8/converters.cpp +83 -0
- data/ext/v8/converters.h +23 -0
- data/ext/v8/extconf.rb +36 -0
- data/ext/v8/upstream/2.0.6/.gitignore +26 -0
- data/ext/v8/upstream/2.0.6/AUTHORS +23 -0
- data/ext/v8/upstream/2.0.6/ChangeLog +1479 -0
- data/ext/v8/upstream/2.0.6/LICENSE +55 -0
- data/ext/v8/upstream/2.0.6/SConstruct +1028 -0
- data/ext/v8/upstream/2.0.6/include/v8-debug.h +275 -0
- data/ext/v8/upstream/2.0.6/include/v8.h +3236 -0
- data/ext/v8/upstream/2.0.6/src/SConscript +283 -0
- data/ext/v8/upstream/2.0.6/src/accessors.cc +695 -0
- data/ext/v8/upstream/2.0.6/src/accessors.h +114 -0
- data/ext/v8/upstream/2.0.6/src/allocation.cc +198 -0
- data/ext/v8/upstream/2.0.6/src/allocation.h +169 -0
- data/ext/v8/upstream/2.0.6/src/api.cc +3831 -0
- data/ext/v8/upstream/2.0.6/src/api.h +479 -0
- data/ext/v8/upstream/2.0.6/src/apinatives.js +110 -0
- data/ext/v8/upstream/2.0.6/src/apiutils.h +69 -0
- data/ext/v8/upstream/2.0.6/src/arguments.h +97 -0
- data/ext/v8/upstream/2.0.6/src/arm/assembler-arm-inl.h +277 -0
- data/ext/v8/upstream/2.0.6/src/arm/assembler-arm.cc +1821 -0
- data/ext/v8/upstream/2.0.6/src/arm/assembler-arm.h +1027 -0
- data/ext/v8/upstream/2.0.6/src/arm/assembler-thumb2-inl.h +267 -0
- data/ext/v8/upstream/2.0.6/src/arm/assembler-thumb2.cc +1821 -0
- data/ext/v8/upstream/2.0.6/src/arm/assembler-thumb2.h +1027 -0
- data/ext/v8/upstream/2.0.6/src/arm/builtins-arm.cc +1271 -0
- data/ext/v8/upstream/2.0.6/src/arm/codegen-arm-inl.h +74 -0
- data/ext/v8/upstream/2.0.6/src/arm/codegen-arm.cc +6682 -0
- data/ext/v8/upstream/2.0.6/src/arm/codegen-arm.h +535 -0
- data/ext/v8/upstream/2.0.6/src/arm/constants-arm.cc +112 -0
- data/ext/v8/upstream/2.0.6/src/arm/constants-arm.h +347 -0
- data/ext/v8/upstream/2.0.6/src/arm/cpu-arm.cc +132 -0
- data/ext/v8/upstream/2.0.6/src/arm/debug-arm.cc +213 -0
- data/ext/v8/upstream/2.0.6/src/arm/disasm-arm.cc +1166 -0
- data/ext/v8/upstream/2.0.6/src/arm/fast-codegen-arm.cc +1698 -0
- data/ext/v8/upstream/2.0.6/src/arm/frames-arm.cc +123 -0
- data/ext/v8/upstream/2.0.6/src/arm/frames-arm.h +162 -0
- data/ext/v8/upstream/2.0.6/src/arm/ic-arm.cc +849 -0
- data/ext/v8/upstream/2.0.6/src/arm/jump-target-arm.cc +238 -0
- data/ext/v8/upstream/2.0.6/src/arm/macro-assembler-arm.cc +1259 -0
- data/ext/v8/upstream/2.0.6/src/arm/macro-assembler-arm.h +423 -0
- data/ext/v8/upstream/2.0.6/src/arm/regexp-macro-assembler-arm.cc +1266 -0
- data/ext/v8/upstream/2.0.6/src/arm/regexp-macro-assembler-arm.h +282 -0
- data/ext/v8/upstream/2.0.6/src/arm/register-allocator-arm-inl.h +103 -0
- data/ext/v8/upstream/2.0.6/src/arm/register-allocator-arm.cc +59 -0
- data/ext/v8/upstream/2.0.6/src/arm/register-allocator-arm.h +43 -0
- data/ext/v8/upstream/2.0.6/src/arm/simulator-arm.cc +2264 -0
- data/ext/v8/upstream/2.0.6/src/arm/simulator-arm.h +306 -0
- data/ext/v8/upstream/2.0.6/src/arm/stub-cache-arm.cc +1516 -0
- data/ext/v8/upstream/2.0.6/src/arm/virtual-frame-arm.cc +412 -0
- data/ext/v8/upstream/2.0.6/src/arm/virtual-frame-arm.h +532 -0
- data/ext/v8/upstream/2.0.6/src/array.js +1154 -0
- data/ext/v8/upstream/2.0.6/src/assembler.cc +772 -0
- data/ext/v8/upstream/2.0.6/src/assembler.h +525 -0
- data/ext/v8/upstream/2.0.6/src/ast.cc +512 -0
- data/ext/v8/upstream/2.0.6/src/ast.h +1820 -0
- data/ext/v8/upstream/2.0.6/src/bootstrapper.cc +1680 -0
- data/ext/v8/upstream/2.0.6/src/bootstrapper.h +103 -0
- data/ext/v8/upstream/2.0.6/src/builtins.cc +851 -0
- data/ext/v8/upstream/2.0.6/src/builtins.h +245 -0
- data/ext/v8/upstream/2.0.6/src/bytecodes-irregexp.h +104 -0
- data/ext/v8/upstream/2.0.6/src/char-predicates-inl.h +86 -0
- data/ext/v8/upstream/2.0.6/src/char-predicates.h +65 -0
- data/ext/v8/upstream/2.0.6/src/checks.cc +100 -0
- data/ext/v8/upstream/2.0.6/src/checks.h +284 -0
- data/ext/v8/upstream/2.0.6/src/code-stubs.cc +164 -0
- data/ext/v8/upstream/2.0.6/src/code-stubs.h +164 -0
- data/ext/v8/upstream/2.0.6/src/code.h +68 -0
- data/ext/v8/upstream/2.0.6/src/codegen-inl.h +88 -0
- data/ext/v8/upstream/2.0.6/src/codegen.cc +504 -0
- data/ext/v8/upstream/2.0.6/src/codegen.h +522 -0
- data/ext/v8/upstream/2.0.6/src/compilation-cache.cc +490 -0
- data/ext/v8/upstream/2.0.6/src/compilation-cache.h +98 -0
- data/ext/v8/upstream/2.0.6/src/compiler.cc +1132 -0
- data/ext/v8/upstream/2.0.6/src/compiler.h +107 -0
- data/ext/v8/upstream/2.0.6/src/contexts.cc +256 -0
- data/ext/v8/upstream/2.0.6/src/contexts.h +345 -0
- data/ext/v8/upstream/2.0.6/src/conversions-inl.h +95 -0
- data/ext/v8/upstream/2.0.6/src/conversions.cc +709 -0
- data/ext/v8/upstream/2.0.6/src/conversions.h +118 -0
- data/ext/v8/upstream/2.0.6/src/counters.cc +78 -0
- data/ext/v8/upstream/2.0.6/src/counters.h +239 -0
- data/ext/v8/upstream/2.0.6/src/cpu.h +65 -0
- data/ext/v8/upstream/2.0.6/src/d8-debug.cc +345 -0
- data/ext/v8/upstream/2.0.6/src/d8-debug.h +155 -0
- data/ext/v8/upstream/2.0.6/src/d8-posix.cc +675 -0
- data/ext/v8/upstream/2.0.6/src/d8-readline.cc +128 -0
- data/ext/v8/upstream/2.0.6/src/d8-windows.cc +42 -0
- data/ext/v8/upstream/2.0.6/src/d8.cc +776 -0
- data/ext/v8/upstream/2.0.6/src/d8.h +225 -0
- data/ext/v8/upstream/2.0.6/src/d8.js +1625 -0
- data/ext/v8/upstream/2.0.6/src/date-delay.js +1138 -0
- data/ext/v8/upstream/2.0.6/src/dateparser-inl.h +114 -0
- data/ext/v8/upstream/2.0.6/src/dateparser.cc +186 -0
- data/ext/v8/upstream/2.0.6/src/dateparser.h +240 -0
- data/ext/v8/upstream/2.0.6/src/debug-agent.cc +425 -0
- data/ext/v8/upstream/2.0.6/src/debug-agent.h +129 -0
- data/ext/v8/upstream/2.0.6/src/debug-delay.js +2073 -0
- data/ext/v8/upstream/2.0.6/src/debug.cc +2751 -0
- data/ext/v8/upstream/2.0.6/src/debug.h +866 -0
- data/ext/v8/upstream/2.0.6/src/disasm.h +77 -0
- data/ext/v8/upstream/2.0.6/src/disassembler.cc +318 -0
- data/ext/v8/upstream/2.0.6/src/disassembler.h +56 -0
- data/ext/v8/upstream/2.0.6/src/dtoa-config.c +91 -0
- data/ext/v8/upstream/2.0.6/src/execution.cc +701 -0
- data/ext/v8/upstream/2.0.6/src/execution.h +312 -0
- data/ext/v8/upstream/2.0.6/src/factory.cc +957 -0
- data/ext/v8/upstream/2.0.6/src/factory.h +393 -0
- data/ext/v8/upstream/2.0.6/src/fast-codegen.cc +725 -0
- data/ext/v8/upstream/2.0.6/src/fast-codegen.h +371 -0
- data/ext/v8/upstream/2.0.6/src/flag-definitions.h +426 -0
- data/ext/v8/upstream/2.0.6/src/flags.cc +555 -0
- data/ext/v8/upstream/2.0.6/src/flags.h +81 -0
- data/ext/v8/upstream/2.0.6/src/frame-element.cc +45 -0
- data/ext/v8/upstream/2.0.6/src/frame-element.h +235 -0
- data/ext/v8/upstream/2.0.6/src/frames-inl.h +215 -0
- data/ext/v8/upstream/2.0.6/src/frames.cc +749 -0
- data/ext/v8/upstream/2.0.6/src/frames.h +659 -0
- data/ext/v8/upstream/2.0.6/src/func-name-inferrer.cc +76 -0
- data/ext/v8/upstream/2.0.6/src/func-name-inferrer.h +135 -0
- data/ext/v8/upstream/2.0.6/src/global-handles.cc +516 -0
- data/ext/v8/upstream/2.0.6/src/global-handles.h +180 -0
- data/ext/v8/upstream/2.0.6/src/globals.h +608 -0
- data/ext/v8/upstream/2.0.6/src/handles-inl.h +76 -0
- data/ext/v8/upstream/2.0.6/src/handles.cc +811 -0
- data/ext/v8/upstream/2.0.6/src/handles.h +367 -0
- data/ext/v8/upstream/2.0.6/src/hashmap.cc +226 -0
- data/ext/v8/upstream/2.0.6/src/hashmap.h +120 -0
- data/ext/v8/upstream/2.0.6/src/heap-inl.h +407 -0
- data/ext/v8/upstream/2.0.6/src/heap-profiler.cc +695 -0
- data/ext/v8/upstream/2.0.6/src/heap-profiler.h +277 -0
- data/ext/v8/upstream/2.0.6/src/heap.cc +4204 -0
- data/ext/v8/upstream/2.0.6/src/heap.h +1704 -0
- data/ext/v8/upstream/2.0.6/src/ia32/assembler-ia32-inl.h +325 -0
- data/ext/v8/upstream/2.0.6/src/ia32/assembler-ia32.cc +2375 -0
- data/ext/v8/upstream/2.0.6/src/ia32/assembler-ia32.h +914 -0
- data/ext/v8/upstream/2.0.6/src/ia32/builtins-ia32.cc +1222 -0
- data/ext/v8/upstream/2.0.6/src/ia32/codegen-ia32-inl.h +46 -0
- data/ext/v8/upstream/2.0.6/src/ia32/codegen-ia32.cc +9770 -0
- data/ext/v8/upstream/2.0.6/src/ia32/codegen-ia32.h +834 -0
- data/ext/v8/upstream/2.0.6/src/ia32/cpu-ia32.cc +79 -0
- data/ext/v8/upstream/2.0.6/src/ia32/debug-ia32.cc +208 -0
- data/ext/v8/upstream/2.0.6/src/ia32/disasm-ia32.cc +1357 -0
- data/ext/v8/upstream/2.0.6/src/ia32/fast-codegen-ia32.cc +1813 -0
- data/ext/v8/upstream/2.0.6/src/ia32/frames-ia32.cc +111 -0
- data/ext/v8/upstream/2.0.6/src/ia32/frames-ia32.h +135 -0
- data/ext/v8/upstream/2.0.6/src/ia32/ic-ia32.cc +1490 -0
- data/ext/v8/upstream/2.0.6/src/ia32/jump-target-ia32.cc +432 -0
- data/ext/v8/upstream/2.0.6/src/ia32/macro-assembler-ia32.cc +1517 -0
- data/ext/v8/upstream/2.0.6/src/ia32/macro-assembler-ia32.h +528 -0
- data/ext/v8/upstream/2.0.6/src/ia32/regexp-macro-assembler-ia32.cc +1219 -0
- data/ext/v8/upstream/2.0.6/src/ia32/regexp-macro-assembler-ia32.h +230 -0
- data/ext/v8/upstream/2.0.6/src/ia32/register-allocator-ia32-inl.h +82 -0
- data/ext/v8/upstream/2.0.6/src/ia32/register-allocator-ia32.cc +99 -0
- data/ext/v8/upstream/2.0.6/src/ia32/register-allocator-ia32.h +43 -0
- data/ext/v8/upstream/2.0.6/src/ia32/simulator-ia32.cc +30 -0
- data/ext/v8/upstream/2.0.6/src/ia32/simulator-ia32.h +62 -0
- data/ext/v8/upstream/2.0.6/src/ia32/stub-cache-ia32.cc +1961 -0
- data/ext/v8/upstream/2.0.6/src/ia32/virtual-frame-ia32.cc +1105 -0
- data/ext/v8/upstream/2.0.6/src/ia32/virtual-frame-ia32.h +580 -0
- data/ext/v8/upstream/2.0.6/src/ic-inl.h +93 -0
- data/ext/v8/upstream/2.0.6/src/ic.cc +1426 -0
- data/ext/v8/upstream/2.0.6/src/ic.h +443 -0
- data/ext/v8/upstream/2.0.6/src/interpreter-irregexp.cc +646 -0
- data/ext/v8/upstream/2.0.6/src/interpreter-irregexp.h +48 -0
- data/ext/v8/upstream/2.0.6/src/json-delay.js +254 -0
- data/ext/v8/upstream/2.0.6/src/jsregexp.cc +5234 -0
- data/ext/v8/upstream/2.0.6/src/jsregexp.h +1439 -0
- data/ext/v8/upstream/2.0.6/src/jump-target-inl.h +49 -0
- data/ext/v8/upstream/2.0.6/src/jump-target.cc +383 -0
- data/ext/v8/upstream/2.0.6/src/jump-target.h +280 -0
- data/ext/v8/upstream/2.0.6/src/list-inl.h +166 -0
- data/ext/v8/upstream/2.0.6/src/list.h +158 -0
- data/ext/v8/upstream/2.0.6/src/log-inl.h +126 -0
- data/ext/v8/upstream/2.0.6/src/log-utils.cc +503 -0
- data/ext/v8/upstream/2.0.6/src/log-utils.h +292 -0
- data/ext/v8/upstream/2.0.6/src/log.cc +1457 -0
- data/ext/v8/upstream/2.0.6/src/log.h +371 -0
- data/ext/v8/upstream/2.0.6/src/macro-assembler.h +93 -0
- data/ext/v8/upstream/2.0.6/src/macros.py +137 -0
- data/ext/v8/upstream/2.0.6/src/mark-compact.cc +2007 -0
- data/ext/v8/upstream/2.0.6/src/mark-compact.h +442 -0
- data/ext/v8/upstream/2.0.6/src/math.js +263 -0
- data/ext/v8/upstream/2.0.6/src/memory.h +74 -0
- data/ext/v8/upstream/2.0.6/src/messages.cc +177 -0
- data/ext/v8/upstream/2.0.6/src/messages.h +112 -0
- data/ext/v8/upstream/2.0.6/src/messages.js +937 -0
- data/ext/v8/upstream/2.0.6/src/mirror-delay.js +2332 -0
- data/ext/v8/upstream/2.0.6/src/mksnapshot.cc +169 -0
- data/ext/v8/upstream/2.0.6/src/natives.h +63 -0
- data/ext/v8/upstream/2.0.6/src/objects-debug.cc +1317 -0
- data/ext/v8/upstream/2.0.6/src/objects-inl.h +3044 -0
- data/ext/v8/upstream/2.0.6/src/objects.cc +8306 -0
- data/ext/v8/upstream/2.0.6/src/objects.h +4960 -0
- data/ext/v8/upstream/2.0.6/src/oprofile-agent.cc +116 -0
- data/ext/v8/upstream/2.0.6/src/oprofile-agent.h +69 -0
- data/ext/v8/upstream/2.0.6/src/parser.cc +4810 -0
- data/ext/v8/upstream/2.0.6/src/parser.h +195 -0
- data/ext/v8/upstream/2.0.6/src/platform-freebsd.cc +645 -0
- data/ext/v8/upstream/2.0.6/src/platform-linux.cc +808 -0
- data/ext/v8/upstream/2.0.6/src/platform-macos.cc +643 -0
- data/ext/v8/upstream/2.0.6/src/platform-nullos.cc +454 -0
- data/ext/v8/upstream/2.0.6/src/platform-openbsd.cc +597 -0
- data/ext/v8/upstream/2.0.6/src/platform-posix.cc +380 -0
- data/ext/v8/upstream/2.0.6/src/platform-win32.cc +1908 -0
- data/ext/v8/upstream/2.0.6/src/platform.h +556 -0
- data/ext/v8/upstream/2.0.6/src/prettyprinter.cc +1511 -0
- data/ext/v8/upstream/2.0.6/src/prettyprinter.h +219 -0
- data/ext/v8/upstream/2.0.6/src/property.cc +96 -0
- data/ext/v8/upstream/2.0.6/src/property.h +327 -0
- data/ext/v8/upstream/2.0.6/src/regexp-delay.js +406 -0
- data/ext/v8/upstream/2.0.6/src/regexp-macro-assembler-irregexp-inl.h +78 -0
- data/ext/v8/upstream/2.0.6/src/regexp-macro-assembler-irregexp.cc +464 -0
- data/ext/v8/upstream/2.0.6/src/regexp-macro-assembler-irregexp.h +141 -0
- data/ext/v8/upstream/2.0.6/src/regexp-macro-assembler-tracer.cc +356 -0
- data/ext/v8/upstream/2.0.6/src/regexp-macro-assembler-tracer.h +103 -0
- data/ext/v8/upstream/2.0.6/src/regexp-macro-assembler.cc +240 -0
- data/ext/v8/upstream/2.0.6/src/regexp-macro-assembler.h +220 -0
- data/ext/v8/upstream/2.0.6/src/regexp-stack.cc +103 -0
- data/ext/v8/upstream/2.0.6/src/regexp-stack.h +123 -0
- data/ext/v8/upstream/2.0.6/src/register-allocator-inl.h +74 -0
- data/ext/v8/upstream/2.0.6/src/register-allocator.cc +100 -0
- data/ext/v8/upstream/2.0.6/src/register-allocator.h +295 -0
- data/ext/v8/upstream/2.0.6/src/rewriter.cc +855 -0
- data/ext/v8/upstream/2.0.6/src/rewriter.h +54 -0
- data/ext/v8/upstream/2.0.6/src/runtime.cc +8163 -0
- data/ext/v8/upstream/2.0.6/src/runtime.h +432 -0
- data/ext/v8/upstream/2.0.6/src/runtime.js +626 -0
- data/ext/v8/upstream/2.0.6/src/scanner.cc +1098 -0
- data/ext/v8/upstream/2.0.6/src/scanner.h +425 -0
- data/ext/v8/upstream/2.0.6/src/scopeinfo.cc +649 -0
- data/ext/v8/upstream/2.0.6/src/scopeinfo.h +236 -0
- data/ext/v8/upstream/2.0.6/src/scopes.cc +963 -0
- data/ext/v8/upstream/2.0.6/src/scopes.h +401 -0
- data/ext/v8/upstream/2.0.6/src/serialize.cc +1260 -0
- data/ext/v8/upstream/2.0.6/src/serialize.h +404 -0
- data/ext/v8/upstream/2.0.6/src/shell.h +55 -0
- data/ext/v8/upstream/2.0.6/src/simulator.h +41 -0
- data/ext/v8/upstream/2.0.6/src/smart-pointer.h +109 -0
- data/ext/v8/upstream/2.0.6/src/snapshot-common.cc +97 -0
- data/ext/v8/upstream/2.0.6/src/snapshot-empty.cc +40 -0
- data/ext/v8/upstream/2.0.6/src/snapshot.h +59 -0
- data/ext/v8/upstream/2.0.6/src/spaces-inl.h +372 -0
- data/ext/v8/upstream/2.0.6/src/spaces.cc +2864 -0
- data/ext/v8/upstream/2.0.6/src/spaces.h +2072 -0
- data/ext/v8/upstream/2.0.6/src/string-stream.cc +584 -0
- data/ext/v8/upstream/2.0.6/src/string-stream.h +189 -0
- data/ext/v8/upstream/2.0.6/src/string.js +901 -0
- data/ext/v8/upstream/2.0.6/src/stub-cache.cc +1108 -0
- data/ext/v8/upstream/2.0.6/src/stub-cache.h +578 -0
- data/ext/v8/upstream/2.0.6/src/third_party/dtoa/COPYING +15 -0
- data/ext/v8/upstream/2.0.6/src/third_party/dtoa/dtoa.c +3330 -0
- data/ext/v8/upstream/2.0.6/src/third_party/valgrind/valgrind.h +3925 -0
- data/ext/v8/upstream/2.0.6/src/token.cc +56 -0
- data/ext/v8/upstream/2.0.6/src/token.h +270 -0
- data/ext/v8/upstream/2.0.6/src/top.cc +991 -0
- data/ext/v8/upstream/2.0.6/src/top.h +459 -0
- data/ext/v8/upstream/2.0.6/src/unicode-inl.h +238 -0
- data/ext/v8/upstream/2.0.6/src/unicode.cc +749 -0
- data/ext/v8/upstream/2.0.6/src/unicode.h +279 -0
- data/ext/v8/upstream/2.0.6/src/uri.js +415 -0
- data/ext/v8/upstream/2.0.6/src/usage-analyzer.cc +426 -0
- data/ext/v8/upstream/2.0.6/src/usage-analyzer.h +40 -0
- data/ext/v8/upstream/2.0.6/src/utils.cc +322 -0
- data/ext/v8/upstream/2.0.6/src/utils.h +592 -0
- data/ext/v8/upstream/2.0.6/src/v8-counters.cc +55 -0
- data/ext/v8/upstream/2.0.6/src/v8-counters.h +198 -0
- data/ext/v8/upstream/2.0.6/src/v8.cc +193 -0
- data/ext/v8/upstream/2.0.6/src/v8.h +119 -0
- data/ext/v8/upstream/2.0.6/src/v8natives.js +846 -0
- data/ext/v8/upstream/2.0.6/src/v8threads.cc +450 -0
- data/ext/v8/upstream/2.0.6/src/v8threads.h +144 -0
- data/ext/v8/upstream/2.0.6/src/variables.cc +163 -0
- data/ext/v8/upstream/2.0.6/src/variables.h +235 -0
- data/ext/v8/upstream/2.0.6/src/version.cc +88 -0
- data/ext/v8/upstream/2.0.6/src/version.h +64 -0
- data/ext/v8/upstream/2.0.6/src/virtual-frame.cc +381 -0
- data/ext/v8/upstream/2.0.6/src/virtual-frame.h +44 -0
- data/ext/v8/upstream/2.0.6/src/x64/assembler-x64-inl.h +352 -0
- data/ext/v8/upstream/2.0.6/src/x64/assembler-x64.cc +2539 -0
- data/ext/v8/upstream/2.0.6/src/x64/assembler-x64.h +1399 -0
- data/ext/v8/upstream/2.0.6/src/x64/builtins-x64.cc +1255 -0
- data/ext/v8/upstream/2.0.6/src/x64/codegen-x64-inl.h +46 -0
- data/ext/v8/upstream/2.0.6/src/x64/codegen-x64.cc +8223 -0
- data/ext/v8/upstream/2.0.6/src/x64/codegen-x64.h +785 -0
- data/ext/v8/upstream/2.0.6/src/x64/cpu-x64.cc +79 -0
- data/ext/v8/upstream/2.0.6/src/x64/debug-x64.cc +202 -0
- data/ext/v8/upstream/2.0.6/src/x64/disasm-x64.cc +1596 -0
- data/ext/v8/upstream/2.0.6/src/x64/fast-codegen-x64.cc +1820 -0
- data/ext/v8/upstream/2.0.6/src/x64/frames-x64.cc +109 -0
- data/ext/v8/upstream/2.0.6/src/x64/frames-x64.h +121 -0
- data/ext/v8/upstream/2.0.6/src/x64/ic-x64.cc +1392 -0
- data/ext/v8/upstream/2.0.6/src/x64/jump-target-x64.cc +432 -0
- data/ext/v8/upstream/2.0.6/src/x64/macro-assembler-x64.cc +2409 -0
- data/ext/v8/upstream/2.0.6/src/x64/macro-assembler-x64.h +765 -0
- data/ext/v8/upstream/2.0.6/src/x64/regexp-macro-assembler-x64.cc +1337 -0
- data/ext/v8/upstream/2.0.6/src/x64/regexp-macro-assembler-x64.h +295 -0
- data/ext/v8/upstream/2.0.6/src/x64/register-allocator-x64-inl.h +86 -0
- data/ext/v8/upstream/2.0.6/src/x64/register-allocator-x64.cc +84 -0
- data/ext/v8/upstream/2.0.6/src/x64/register-allocator-x64.h +43 -0
- data/ext/v8/upstream/2.0.6/src/x64/simulator-x64.cc +27 -0
- data/ext/v8/upstream/2.0.6/src/x64/simulator-x64.h +63 -0
- data/ext/v8/upstream/2.0.6/src/x64/stub-cache-x64.cc +1884 -0
- data/ext/v8/upstream/2.0.6/src/x64/virtual-frame-x64.cc +1089 -0
- data/ext/v8/upstream/2.0.6/src/x64/virtual-frame-x64.h +560 -0
- data/ext/v8/upstream/2.0.6/src/zone-inl.h +297 -0
- data/ext/v8/upstream/2.0.6/src/zone.cc +193 -0
- data/ext/v8/upstream/2.0.6/src/zone.h +305 -0
- data/ext/v8/upstream/2.0.6/tools/codemap.js +258 -0
- data/ext/v8/upstream/2.0.6/tools/consarray.js +93 -0
- data/ext/v8/upstream/2.0.6/tools/csvparser.js +98 -0
- data/ext/v8/upstream/2.0.6/tools/gyp/v8.gyp +620 -0
- data/ext/v8/upstream/2.0.6/tools/js2c.py +376 -0
- data/ext/v8/upstream/2.0.6/tools/jsmin.py +280 -0
- data/ext/v8/upstream/2.0.6/tools/linux-tick-processor +24 -0
- data/ext/v8/upstream/2.0.6/tools/linux-tick-processor.py +78 -0
- data/ext/v8/upstream/2.0.6/tools/logreader.js +320 -0
- data/ext/v8/upstream/2.0.6/tools/mac-nm +18 -0
- data/ext/v8/upstream/2.0.6/tools/mac-tick-processor +6 -0
- data/ext/v8/upstream/2.0.6/tools/oprofile/annotate +7 -0
- data/ext/v8/upstream/2.0.6/tools/oprofile/common +19 -0
- data/ext/v8/upstream/2.0.6/tools/oprofile/dump +7 -0
- data/ext/v8/upstream/2.0.6/tools/oprofile/report +7 -0
- data/ext/v8/upstream/2.0.6/tools/oprofile/reset +7 -0
- data/ext/v8/upstream/2.0.6/tools/oprofile/run +14 -0
- data/ext/v8/upstream/2.0.6/tools/oprofile/shutdown +7 -0
- data/ext/v8/upstream/2.0.6/tools/oprofile/start +7 -0
- data/ext/v8/upstream/2.0.6/tools/presubmit.py +299 -0
- data/ext/v8/upstream/2.0.6/tools/process-heap-prof.py +120 -0
- data/ext/v8/upstream/2.0.6/tools/profile.js +621 -0
- data/ext/v8/upstream/2.0.6/tools/profile_view.js +224 -0
- data/ext/v8/upstream/2.0.6/tools/run-valgrind.py +77 -0
- data/ext/v8/upstream/2.0.6/tools/splaytree.js +322 -0
- data/ext/v8/upstream/2.0.6/tools/splaytree.py +226 -0
- data/ext/v8/upstream/2.0.6/tools/stats-viewer.py +456 -0
- data/ext/v8/upstream/2.0.6/tools/test.py +1370 -0
- data/ext/v8/upstream/2.0.6/tools/tickprocessor-driver.js +53 -0
- data/ext/v8/upstream/2.0.6/tools/tickprocessor.js +731 -0
- data/ext/v8/upstream/2.0.6/tools/tickprocessor.py +535 -0
- data/ext/v8/upstream/2.0.6/tools/utils.py +82 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/README.txt +71 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/arm.vsprops +14 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/common.vsprops +35 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/d8.vcproj +199 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/d8_arm.vcproj +199 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/d8_x64.vcproj +201 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/d8js2c.cmd +6 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/debug.vsprops +17 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/ia32.vsprops +13 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/js2c.cmd +6 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/release.vsprops +24 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8.sln +101 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8.vcproj +223 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_arm.sln +74 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_arm.vcproj +223 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_base.vcproj +971 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_base_arm.vcproj +983 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_base_x64.vcproj +959 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_cctest.vcproj +255 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_cctest_arm.vcproj +243 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_cctest_x64.vcproj +257 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_mksnapshot.vcproj +151 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_mksnapshot_x64.vcproj +151 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_process_sample.vcproj +151 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_process_sample_arm.vcproj +151 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_process_sample_x64.vcproj +151 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_shell_sample.vcproj +151 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_shell_sample_arm.vcproj +151 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_shell_sample_x64.vcproj +153 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_snapshot.vcproj +142 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_snapshot_cc.vcproj +92 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_snapshot_cc_x64.vcproj +92 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_snapshot_x64.vcproj +142 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_x64.sln +101 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_x64.vcproj +223 -0
- data/ext/v8/upstream/2.0.6/tools/visual_studio/x64.vsprops +13 -0
- data/ext/v8/upstream/2.0.6/tools/windows-tick-processor.bat +5 -0
- data/ext/v8/upstream/2.0.6/tools/windows-tick-processor.py +137 -0
- data/ext/v8/upstream/Makefile +31 -0
- data/ext/v8/upstream/no-strict-aliasing.patch +13 -0
- data/ext/v8/upstream/scons/CHANGES.txt +5183 -0
- data/ext/v8/upstream/scons/LICENSE.txt +20 -0
- data/ext/v8/upstream/scons/MANIFEST +202 -0
- data/ext/v8/upstream/scons/PKG-INFO +13 -0
- data/ext/v8/upstream/scons/README.txt +273 -0
- data/ext/v8/upstream/scons/RELEASE.txt +1040 -0
- data/ext/v8/upstream/scons/engine/SCons/Action.py +1256 -0
- data/ext/v8/upstream/scons/engine/SCons/Builder.py +868 -0
- data/ext/v8/upstream/scons/engine/SCons/CacheDir.py +217 -0
- data/ext/v8/upstream/scons/engine/SCons/Conftest.py +794 -0
- data/ext/v8/upstream/scons/engine/SCons/Debug.py +237 -0
- data/ext/v8/upstream/scons/engine/SCons/Defaults.py +485 -0
- data/ext/v8/upstream/scons/engine/SCons/Environment.py +2327 -0
- data/ext/v8/upstream/scons/engine/SCons/Errors.py +207 -0
- data/ext/v8/upstream/scons/engine/SCons/Executor.py +636 -0
- data/ext/v8/upstream/scons/engine/SCons/Job.py +435 -0
- data/ext/v8/upstream/scons/engine/SCons/Memoize.py +292 -0
- data/ext/v8/upstream/scons/engine/SCons/Node/Alias.py +153 -0
- data/ext/v8/upstream/scons/engine/SCons/Node/FS.py +3220 -0
- data/ext/v8/upstream/scons/engine/SCons/Node/Python.py +128 -0
- data/ext/v8/upstream/scons/engine/SCons/Node/__init__.py +1341 -0
- data/ext/v8/upstream/scons/engine/SCons/Options/BoolOption.py +50 -0
- data/ext/v8/upstream/scons/engine/SCons/Options/EnumOption.py +50 -0
- data/ext/v8/upstream/scons/engine/SCons/Options/ListOption.py +50 -0
- data/ext/v8/upstream/scons/engine/SCons/Options/PackageOption.py +50 -0
- data/ext/v8/upstream/scons/engine/SCons/Options/PathOption.py +76 -0
- data/ext/v8/upstream/scons/engine/SCons/Options/__init__.py +74 -0
- data/ext/v8/upstream/scons/engine/SCons/PathList.py +232 -0
- data/ext/v8/upstream/scons/engine/SCons/Platform/__init__.py +236 -0
- data/ext/v8/upstream/scons/engine/SCons/Platform/aix.py +70 -0
- data/ext/v8/upstream/scons/engine/SCons/Platform/cygwin.py +55 -0
- data/ext/v8/upstream/scons/engine/SCons/Platform/darwin.py +46 -0
- data/ext/v8/upstream/scons/engine/SCons/Platform/hpux.py +46 -0
- data/ext/v8/upstream/scons/engine/SCons/Platform/irix.py +44 -0
- data/ext/v8/upstream/scons/engine/SCons/Platform/os2.py +58 -0
- data/ext/v8/upstream/scons/engine/SCons/Platform/posix.py +264 -0
- data/ext/v8/upstream/scons/engine/SCons/Platform/sunos.py +50 -0
- data/ext/v8/upstream/scons/engine/SCons/Platform/win32.py +386 -0
- data/ext/v8/upstream/scons/engine/SCons/SConf.py +1038 -0
- data/ext/v8/upstream/scons/engine/SCons/SConsign.py +381 -0
- data/ext/v8/upstream/scons/engine/SCons/Scanner/C.py +132 -0
- data/ext/v8/upstream/scons/engine/SCons/Scanner/D.py +74 -0
- data/ext/v8/upstream/scons/engine/SCons/Scanner/Dir.py +111 -0
- data/ext/v8/upstream/scons/engine/SCons/Scanner/Fortran.py +320 -0
- data/ext/v8/upstream/scons/engine/SCons/Scanner/IDL.py +48 -0
- data/ext/v8/upstream/scons/engine/SCons/Scanner/LaTeX.py +378 -0
- data/ext/v8/upstream/scons/engine/SCons/Scanner/Prog.py +103 -0
- data/ext/v8/upstream/scons/engine/SCons/Scanner/RC.py +55 -0
- data/ext/v8/upstream/scons/engine/SCons/Scanner/__init__.py +415 -0
- data/ext/v8/upstream/scons/engine/SCons/Script/Interactive.py +386 -0
- data/ext/v8/upstream/scons/engine/SCons/Script/Main.py +1360 -0
- data/ext/v8/upstream/scons/engine/SCons/Script/SConsOptions.py +944 -0
- data/ext/v8/upstream/scons/engine/SCons/Script/SConscript.py +642 -0
- data/ext/v8/upstream/scons/engine/SCons/Script/__init__.py +414 -0
- data/ext/v8/upstream/scons/engine/SCons/Sig.py +63 -0
- data/ext/v8/upstream/scons/engine/SCons/Subst.py +911 -0
- data/ext/v8/upstream/scons/engine/SCons/Taskmaster.py +1030 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/386asm.py +61 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/BitKeeper.py +65 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/CVS.py +73 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/FortranCommon.py +247 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/JavaCommon.py +324 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/__init__.py +56 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/arch.py +61 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/common.py +210 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/netframework.py +84 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/sdk.py +321 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/vc.py +367 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/vs.py +497 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/Perforce.py +104 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/PharLapCommon.py +138 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/RCS.py +64 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/SCCS.py +64 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/Subversion.py +71 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/__init__.py +675 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/aixc++.py +82 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/aixcc.py +74 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/aixf77.py +80 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/aixlink.py +76 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/applelink.py +71 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/ar.py +63 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/as.py +78 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/bcc32.py +82 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/c++.py +99 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/cc.py +114 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/cvf.py +58 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/default.py +50 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/dmd.py +224 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/dvi.py +64 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/dvipdf.py +125 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/dvips.py +94 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/f77.py +62 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/f90.py +62 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/f95.py +63 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/filesystem.py +98 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/fortran.py +63 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/g++.py +90 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/g77.py +73 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/gas.py +53 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/gcc.py +80 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/gfortran.py +64 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/gnulink.py +63 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/gs.py +81 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/hpc++.py +85 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/hpcc.py +53 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/hplink.py +77 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/icc.py +59 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/icl.py +52 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/ifl.py +72 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/ifort.py +90 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/ilink.py +59 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/ilink32.py +60 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/install.py +229 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/intelc.py +490 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/ipkg.py +71 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/jar.py +110 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/javac.py +234 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/javah.py +138 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/latex.py +79 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/lex.py +99 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/link.py +121 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/linkloc.py +112 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/m4.py +63 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/masm.py +77 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/midl.py +90 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/mingw.py +159 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/mslib.py +64 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/mslink.py +266 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/mssdk.py +50 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/msvc.py +269 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/msvs.py +1439 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/mwcc.py +208 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/mwld.py +107 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/nasm.py +72 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/__init__.py +314 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/ipk.py +185 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/msi.py +526 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/rpm.py +367 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/src_tarbz2.py +43 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/src_targz.py +43 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/src_zip.py +43 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/tarbz2.py +44 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/targz.py +44 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/zip.py +44 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/pdf.py +78 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/pdflatex.py +83 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/pdftex.py +108 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/qt.py +336 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/rmic.py +121 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/rpcgen.py +70 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/rpm.py +132 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/sgiar.py +68 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/sgic++.py +58 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/sgicc.py +53 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/sgilink.py +63 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/sunar.py +67 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/sunc++.py +142 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/suncc.py +58 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/sunf77.py +63 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/sunf90.py +64 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/sunf95.py +64 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/sunlink.py +77 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/swig.py +186 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/tar.py +73 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/tex.py +805 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/textfile.py +175 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/tlib.py +53 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/wix.py +100 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/yacc.py +131 -0
- data/ext/v8/upstream/scons/engine/SCons/Tool/zip.py +100 -0
- data/ext/v8/upstream/scons/engine/SCons/Util.py +1645 -0
- data/ext/v8/upstream/scons/engine/SCons/Variables/BoolVariable.py +91 -0
- data/ext/v8/upstream/scons/engine/SCons/Variables/EnumVariable.py +107 -0
- data/ext/v8/upstream/scons/engine/SCons/Variables/ListVariable.py +139 -0
- data/ext/v8/upstream/scons/engine/SCons/Variables/PackageVariable.py +109 -0
- data/ext/v8/upstream/scons/engine/SCons/Variables/PathVariable.py +147 -0
- data/ext/v8/upstream/scons/engine/SCons/Variables/__init__.py +317 -0
- data/ext/v8/upstream/scons/engine/SCons/Warnings.py +228 -0
- data/ext/v8/upstream/scons/engine/SCons/__init__.py +49 -0
- data/ext/v8/upstream/scons/engine/SCons/compat/__init__.py +302 -0
- data/ext/v8/upstream/scons/engine/SCons/compat/_scons_UserString.py +98 -0
- data/ext/v8/upstream/scons/engine/SCons/compat/_scons_hashlib.py +91 -0
- data/ext/v8/upstream/scons/engine/SCons/compat/_scons_itertools.py +124 -0
- data/ext/v8/upstream/scons/engine/SCons/compat/_scons_optparse.py +1725 -0
- data/ext/v8/upstream/scons/engine/SCons/compat/_scons_sets.py +583 -0
- data/ext/v8/upstream/scons/engine/SCons/compat/_scons_sets15.py +176 -0
- data/ext/v8/upstream/scons/engine/SCons/compat/_scons_shlex.py +325 -0
- data/ext/v8/upstream/scons/engine/SCons/compat/_scons_subprocess.py +1296 -0
- data/ext/v8/upstream/scons/engine/SCons/compat/_scons_textwrap.py +382 -0
- data/ext/v8/upstream/scons/engine/SCons/compat/builtins.py +187 -0
- data/ext/v8/upstream/scons/engine/SCons/cpp.py +598 -0
- data/ext/v8/upstream/scons/engine/SCons/dblite.py +248 -0
- data/ext/v8/upstream/scons/engine/SCons/exitfuncs.py +77 -0
- data/ext/v8/upstream/scons/os_spawnv_fix.diff +83 -0
- data/ext/v8/upstream/scons/scons-time.1 +1017 -0
- data/ext/v8/upstream/scons/scons.1 +15179 -0
- data/ext/v8/upstream/scons/sconsign.1 +208 -0
- data/ext/v8/upstream/scons/script/scons +184 -0
- data/ext/v8/upstream/scons/script/scons-time +1529 -0
- data/ext/v8/upstream/scons/script/scons.bat +31 -0
- data/ext/v8/upstream/scons/script/sconsign +508 -0
- data/ext/v8/upstream/scons/setup.cfg +6 -0
- data/ext/v8/upstream/scons/setup.py +427 -0
- data/ext/v8/v8.cpp +89 -0
- data/ext/v8/v8_cxt.cpp +92 -0
- data/ext/v8/v8_cxt.h +20 -0
- data/ext/v8/v8_func.cpp +10 -0
- data/ext/v8/v8_func.h +11 -0
- data/ext/v8/v8_msg.cpp +54 -0
- data/ext/v8/v8_msg.h +18 -0
- data/ext/v8/v8_obj.cpp +52 -0
- data/ext/v8/v8_obj.h +13 -0
- data/ext/v8/v8_ref.cpp +26 -0
- data/ext/v8/v8_ref.h +31 -0
- data/ext/v8/v8_script.cpp +20 -0
- data/ext/v8/v8_script.h +8 -0
- data/ext/v8/v8_standalone.cpp +69 -0
- data/ext/v8/v8_standalone.h +31 -0
- data/ext/v8/v8_str.cpp +17 -0
- data/ext/v8/v8_str.h +9 -0
- data/ext/v8/v8_template.cpp +53 -0
- data/ext/v8/v8_template.h +13 -0
- data/lib/v8.rb +10 -0
- data/lib/v8/context.rb +101 -0
- data/lib/v8/object.rb +38 -0
- data/lib/v8/to.rb +33 -0
- data/lib/v8/v8.so +0 -0
- data/script/console +10 -0
- data/script/destroy +14 -0
- data/script/generate +14 -0
- data/spec/ext/cxt_spec.rb +25 -0
- data/spec/ext/obj_spec.rb +13 -0
- data/spec/redjs/jsapi_spec.rb +405 -0
- data/spec/redjs/tap.rb +8 -0
- data/spec/redjs_helper.rb +3 -0
- data/spec/spec.opts +1 -0
- data/spec/spec_helper.rb +14 -0
- data/spec/v8/to_spec.rb +15 -0
- data/tasks/rspec.rake +21 -0
- data/therubyracer.gemspec +680 -0
- metadata +697 -0
@@ -0,0 +1,432 @@
|
|
1
|
+
// Copyright 2008 the V8 project authors. All rights reserved.
|
2
|
+
// Redistribution and use in source and binary forms, with or without
|
3
|
+
// modification, are permitted provided that the following conditions are
|
4
|
+
// met:
|
5
|
+
//
|
6
|
+
// * Redistributions of source code must retain the above copyright
|
7
|
+
// notice, this list of conditions and the following disclaimer.
|
8
|
+
// * Redistributions in binary form must reproduce the above
|
9
|
+
// copyright notice, this list of conditions and the following
|
10
|
+
// disclaimer in the documentation and/or other materials provided
|
11
|
+
// with the distribution.
|
12
|
+
// * Neither the name of Google Inc. nor the names of its
|
13
|
+
// contributors may be used to endorse or promote products derived
|
14
|
+
// from this software without specific prior written permission.
|
15
|
+
//
|
16
|
+
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
17
|
+
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
18
|
+
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
19
|
+
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
20
|
+
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
21
|
+
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
22
|
+
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
23
|
+
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
24
|
+
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
25
|
+
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
26
|
+
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
27
|
+
|
28
|
+
#include "v8.h"
|
29
|
+
|
30
|
+
#include "codegen-inl.h"
|
31
|
+
#include "jump-target-inl.h"
|
32
|
+
#include "register-allocator-inl.h"
|
33
|
+
|
34
|
+
namespace v8 {
|
35
|
+
namespace internal {
|
36
|
+
|
37
|
+
// -------------------------------------------------------------------------
|
38
|
+
// JumpTarget implementation.
|
39
|
+
|
40
|
+
#define __ ACCESS_MASM(cgen()->masm())
|
41
|
+
|
42
|
+
void JumpTarget::DoJump() {
|
43
|
+
ASSERT(cgen()->has_valid_frame());
|
44
|
+
// Live non-frame registers are not allowed at unconditional jumps
|
45
|
+
// because we have no way of invalidating the corresponding results
|
46
|
+
// which are still live in the C++ code.
|
47
|
+
ASSERT(cgen()->HasValidEntryRegisters());
|
48
|
+
|
49
|
+
if (is_bound()) {
|
50
|
+
// Backward jump. There is an expected frame to merge to.
|
51
|
+
ASSERT(direction_ == BIDIRECTIONAL);
|
52
|
+
cgen()->frame()->PrepareMergeTo(entry_frame_);
|
53
|
+
cgen()->frame()->MergeTo(entry_frame_);
|
54
|
+
cgen()->DeleteFrame();
|
55
|
+
__ jmp(&entry_label_);
|
56
|
+
} else if (entry_frame_ != NULL) {
|
57
|
+
// Forward jump with a preconfigured entry frame. Assert the
|
58
|
+
// current frame matches the expected one and jump to the block.
|
59
|
+
ASSERT(cgen()->frame()->Equals(entry_frame_));
|
60
|
+
cgen()->DeleteFrame();
|
61
|
+
__ jmp(&entry_label_);
|
62
|
+
} else {
|
63
|
+
// Forward jump. Remember the current frame and emit a jump to
|
64
|
+
// its merge code.
|
65
|
+
AddReachingFrame(cgen()->frame());
|
66
|
+
RegisterFile empty;
|
67
|
+
cgen()->SetFrame(NULL, &empty);
|
68
|
+
__ jmp(&merge_labels_.last());
|
69
|
+
}
|
70
|
+
}
|
71
|
+
|
72
|
+
|
73
|
+
void JumpTarget::DoBranch(Condition cc, Hint hint) {
|
74
|
+
ASSERT(cgen() != NULL);
|
75
|
+
ASSERT(cgen()->has_valid_frame());
|
76
|
+
|
77
|
+
if (is_bound()) {
|
78
|
+
ASSERT(direction_ == BIDIRECTIONAL);
|
79
|
+
// Backward branch. We have an expected frame to merge to on the
|
80
|
+
// backward edge.
|
81
|
+
|
82
|
+
// Swap the current frame for a copy (we do the swapping to get
|
83
|
+
// the off-frame registers off the fall through) to use for the
|
84
|
+
// branch.
|
85
|
+
VirtualFrame* fall_through_frame = cgen()->frame();
|
86
|
+
VirtualFrame* branch_frame = new VirtualFrame(fall_through_frame);
|
87
|
+
RegisterFile non_frame_registers;
|
88
|
+
cgen()->SetFrame(branch_frame, &non_frame_registers);
|
89
|
+
|
90
|
+
// Check if we can avoid merge code.
|
91
|
+
cgen()->frame()->PrepareMergeTo(entry_frame_);
|
92
|
+
if (cgen()->frame()->Equals(entry_frame_)) {
|
93
|
+
// Branch right in to the block.
|
94
|
+
cgen()->DeleteFrame();
|
95
|
+
__ j(cc, &entry_label_, hint);
|
96
|
+
cgen()->SetFrame(fall_through_frame, &non_frame_registers);
|
97
|
+
return;
|
98
|
+
}
|
99
|
+
|
100
|
+
// Check if we can reuse existing merge code.
|
101
|
+
for (int i = 0; i < reaching_frames_.length(); i++) {
|
102
|
+
if (reaching_frames_[i] != NULL &&
|
103
|
+
cgen()->frame()->Equals(reaching_frames_[i])) {
|
104
|
+
// Branch to the merge code.
|
105
|
+
cgen()->DeleteFrame();
|
106
|
+
__ j(cc, &merge_labels_[i], hint);
|
107
|
+
cgen()->SetFrame(fall_through_frame, &non_frame_registers);
|
108
|
+
return;
|
109
|
+
}
|
110
|
+
}
|
111
|
+
|
112
|
+
// To emit the merge code here, we negate the condition and branch
|
113
|
+
// around the merge code on the fall through path.
|
114
|
+
Label original_fall_through;
|
115
|
+
__ j(NegateCondition(cc), &original_fall_through, NegateHint(hint));
|
116
|
+
cgen()->frame()->MergeTo(entry_frame_);
|
117
|
+
cgen()->DeleteFrame();
|
118
|
+
__ jmp(&entry_label_);
|
119
|
+
cgen()->SetFrame(fall_through_frame, &non_frame_registers);
|
120
|
+
__ bind(&original_fall_through);
|
121
|
+
|
122
|
+
} else if (entry_frame_ != NULL) {
|
123
|
+
// Forward branch with a preconfigured entry frame. Assert the
|
124
|
+
// current frame matches the expected one and branch to the block.
|
125
|
+
ASSERT(cgen()->frame()->Equals(entry_frame_));
|
126
|
+
// Explicitly use the macro assembler instead of __ as forward
|
127
|
+
// branches are expected to be a fixed size (no inserted
|
128
|
+
// coverage-checking instructions please). This is used in
|
129
|
+
// Reference::GetValue.
|
130
|
+
cgen()->masm()->j(cc, &entry_label_, hint);
|
131
|
+
|
132
|
+
} else {
|
133
|
+
// Forward branch. A copy of the current frame is remembered and
|
134
|
+
// a branch to the merge code is emitted. Explicitly use the
|
135
|
+
// macro assembler instead of __ as forward branches are expected
|
136
|
+
// to be a fixed size (no inserted coverage-checking instructions
|
137
|
+
// please). This is used in Reference::GetValue.
|
138
|
+
AddReachingFrame(new VirtualFrame(cgen()->frame()));
|
139
|
+
cgen()->masm()->j(cc, &merge_labels_.last(), hint);
|
140
|
+
}
|
141
|
+
}
|
142
|
+
|
143
|
+
|
144
|
+
void JumpTarget::Call() {
|
145
|
+
// Call is used to push the address of the catch block on the stack as
|
146
|
+
// a return address when compiling try/catch and try/finally. We
|
147
|
+
// fully spill the frame before making the call. The expected frame
|
148
|
+
// at the label (which should be the only one) is the spilled current
|
149
|
+
// frame plus an in-memory return address. The "fall-through" frame
|
150
|
+
// at the return site is the spilled current frame.
|
151
|
+
ASSERT(cgen() != NULL);
|
152
|
+
ASSERT(cgen()->has_valid_frame());
|
153
|
+
// There are no non-frame references across the call.
|
154
|
+
ASSERT(cgen()->HasValidEntryRegisters());
|
155
|
+
ASSERT(!is_linked());
|
156
|
+
|
157
|
+
cgen()->frame()->SpillAll();
|
158
|
+
VirtualFrame* target_frame = new VirtualFrame(cgen()->frame());
|
159
|
+
target_frame->Adjust(1);
|
160
|
+
// We do not expect a call with a preconfigured entry frame.
|
161
|
+
ASSERT(entry_frame_ == NULL);
|
162
|
+
AddReachingFrame(target_frame);
|
163
|
+
__ call(&merge_labels_.last());
|
164
|
+
}
|
165
|
+
|
166
|
+
|
167
|
+
void JumpTarget::DoBind() {
|
168
|
+
ASSERT(cgen() != NULL);
|
169
|
+
ASSERT(!is_bound());
|
170
|
+
|
171
|
+
// Live non-frame registers are not allowed at the start of a basic
|
172
|
+
// block.
|
173
|
+
ASSERT(!cgen()->has_valid_frame() || cgen()->HasValidEntryRegisters());
|
174
|
+
|
175
|
+
// Fast case: the jump target was manually configured with an entry
|
176
|
+
// frame to use.
|
177
|
+
if (entry_frame_ != NULL) {
|
178
|
+
// Assert no reaching frames to deal with.
|
179
|
+
ASSERT(reaching_frames_.is_empty());
|
180
|
+
ASSERT(!cgen()->has_valid_frame());
|
181
|
+
|
182
|
+
RegisterFile empty;
|
183
|
+
if (direction_ == BIDIRECTIONAL) {
|
184
|
+
// Copy the entry frame so the original can be used for a
|
185
|
+
// possible backward jump.
|
186
|
+
cgen()->SetFrame(new VirtualFrame(entry_frame_), &empty);
|
187
|
+
} else {
|
188
|
+
// Take ownership of the entry frame.
|
189
|
+
cgen()->SetFrame(entry_frame_, &empty);
|
190
|
+
entry_frame_ = NULL;
|
191
|
+
}
|
192
|
+
__ bind(&entry_label_);
|
193
|
+
return;
|
194
|
+
}
|
195
|
+
|
196
|
+
if (!is_linked()) {
|
197
|
+
ASSERT(cgen()->has_valid_frame());
|
198
|
+
if (direction_ == FORWARD_ONLY) {
|
199
|
+
// Fast case: no forward jumps and no possible backward jumps.
|
200
|
+
// The stack pointer can be floating above the top of the
|
201
|
+
// virtual frame before the bind. Afterward, it should not.
|
202
|
+
VirtualFrame* frame = cgen()->frame();
|
203
|
+
int difference = frame->stack_pointer_ - (frame->element_count() - 1);
|
204
|
+
if (difference > 0) {
|
205
|
+
frame->stack_pointer_ -= difference;
|
206
|
+
__ add(Operand(esp), Immediate(difference * kPointerSize));
|
207
|
+
}
|
208
|
+
} else {
|
209
|
+
ASSERT(direction_ == BIDIRECTIONAL);
|
210
|
+
// Fast case: no forward jumps, possible backward ones. Remove
|
211
|
+
// constants and copies above the watermark on the fall-through
|
212
|
+
// frame and use it as the entry frame.
|
213
|
+
cgen()->frame()->MakeMergable();
|
214
|
+
entry_frame_ = new VirtualFrame(cgen()->frame());
|
215
|
+
}
|
216
|
+
__ bind(&entry_label_);
|
217
|
+
return;
|
218
|
+
}
|
219
|
+
|
220
|
+
if (direction_ == FORWARD_ONLY &&
|
221
|
+
!cgen()->has_valid_frame() &&
|
222
|
+
reaching_frames_.length() == 1) {
|
223
|
+
// Fast case: no fall-through, a single forward jump, and no
|
224
|
+
// possible backward jumps. Pick up the only reaching frame, take
|
225
|
+
// ownership of it, and use it for the block about to be emitted.
|
226
|
+
VirtualFrame* frame = reaching_frames_[0];
|
227
|
+
RegisterFile empty;
|
228
|
+
cgen()->SetFrame(frame, &empty);
|
229
|
+
reaching_frames_[0] = NULL;
|
230
|
+
__ bind(&merge_labels_[0]);
|
231
|
+
|
232
|
+
// The stack pointer can be floating above the top of the
|
233
|
+
// virtual frame before the bind. Afterward, it should not.
|
234
|
+
int difference = frame->stack_pointer_ - (frame->element_count() - 1);
|
235
|
+
if (difference > 0) {
|
236
|
+
frame->stack_pointer_ -= difference;
|
237
|
+
__ add(Operand(esp), Immediate(difference * kPointerSize));
|
238
|
+
}
|
239
|
+
|
240
|
+
__ bind(&entry_label_);
|
241
|
+
return;
|
242
|
+
}
|
243
|
+
|
244
|
+
// If there is a current frame, record it as the fall-through. It
|
245
|
+
// is owned by the reaching frames for now.
|
246
|
+
bool had_fall_through = false;
|
247
|
+
if (cgen()->has_valid_frame()) {
|
248
|
+
had_fall_through = true;
|
249
|
+
AddReachingFrame(cgen()->frame()); // Return value ignored.
|
250
|
+
RegisterFile empty;
|
251
|
+
cgen()->SetFrame(NULL, &empty);
|
252
|
+
}
|
253
|
+
|
254
|
+
// Compute the frame to use for entry to the block.
|
255
|
+
ComputeEntryFrame();
|
256
|
+
|
257
|
+
// Some moves required to merge to an expected frame require purely
|
258
|
+
// frame state changes, and do not require any code generation.
|
259
|
+
// Perform those first to increase the possibility of finding equal
|
260
|
+
// frames below.
|
261
|
+
for (int i = 0; i < reaching_frames_.length(); i++) {
|
262
|
+
if (reaching_frames_[i] != NULL) {
|
263
|
+
reaching_frames_[i]->PrepareMergeTo(entry_frame_);
|
264
|
+
}
|
265
|
+
}
|
266
|
+
|
267
|
+
if (is_linked()) {
|
268
|
+
// There were forward jumps. Handle merging the reaching frames
|
269
|
+
// to the entry frame.
|
270
|
+
|
271
|
+
// Loop over the (non-null) reaching frames and process any that
|
272
|
+
// need merge code. Iterate backwards through the list to handle
|
273
|
+
// the fall-through frame first. Set frames that will be
|
274
|
+
// processed after 'i' to NULL if we want to avoid processing
|
275
|
+
// them.
|
276
|
+
for (int i = reaching_frames_.length() - 1; i >= 0; i--) {
|
277
|
+
VirtualFrame* frame = reaching_frames_[i];
|
278
|
+
|
279
|
+
if (frame != NULL) {
|
280
|
+
// Does the frame (probably) need merge code?
|
281
|
+
if (!frame->Equals(entry_frame_)) {
|
282
|
+
// We could have a valid frame as the fall through to the
|
283
|
+
// binding site or as the fall through from a previous merge
|
284
|
+
// code block. Jump around the code we are about to
|
285
|
+
// generate.
|
286
|
+
if (cgen()->has_valid_frame()) {
|
287
|
+
cgen()->DeleteFrame();
|
288
|
+
__ jmp(&entry_label_);
|
289
|
+
}
|
290
|
+
// Pick up the frame for this block. Assume ownership if
|
291
|
+
// there cannot be backward jumps.
|
292
|
+
RegisterFile empty;
|
293
|
+
if (direction_ == BIDIRECTIONAL) {
|
294
|
+
cgen()->SetFrame(new VirtualFrame(frame), &empty);
|
295
|
+
} else {
|
296
|
+
cgen()->SetFrame(frame, &empty);
|
297
|
+
reaching_frames_[i] = NULL;
|
298
|
+
}
|
299
|
+
__ bind(&merge_labels_[i]);
|
300
|
+
|
301
|
+
// Loop over the remaining (non-null) reaching frames,
|
302
|
+
// looking for any that can share merge code with this one.
|
303
|
+
for (int j = 0; j < i; j++) {
|
304
|
+
VirtualFrame* other = reaching_frames_[j];
|
305
|
+
if (other != NULL && other->Equals(cgen()->frame())) {
|
306
|
+
// Set the reaching frame element to null to avoid
|
307
|
+
// processing it later, and then bind its entry label.
|
308
|
+
reaching_frames_[j] = NULL;
|
309
|
+
__ bind(&merge_labels_[j]);
|
310
|
+
}
|
311
|
+
}
|
312
|
+
|
313
|
+
// Emit the merge code.
|
314
|
+
cgen()->frame()->MergeTo(entry_frame_);
|
315
|
+
} else if (i == reaching_frames_.length() - 1 && had_fall_through) {
|
316
|
+
// If this is the fall through frame, and it didn't need
|
317
|
+
// merge code, we need to pick up the frame so we can jump
|
318
|
+
// around subsequent merge blocks if necessary.
|
319
|
+
RegisterFile empty;
|
320
|
+
cgen()->SetFrame(frame, &empty);
|
321
|
+
reaching_frames_[i] = NULL;
|
322
|
+
}
|
323
|
+
}
|
324
|
+
}
|
325
|
+
|
326
|
+
// The code generator may not have a current frame if there was no
|
327
|
+
// fall through and none of the reaching frames needed merging.
|
328
|
+
// In that case, clone the entry frame as the current frame.
|
329
|
+
if (!cgen()->has_valid_frame()) {
|
330
|
+
RegisterFile empty;
|
331
|
+
cgen()->SetFrame(new VirtualFrame(entry_frame_), &empty);
|
332
|
+
}
|
333
|
+
|
334
|
+
// There may be unprocessed reaching frames that did not need
|
335
|
+
// merge code. They will have unbound merge labels. Bind their
|
336
|
+
// merge labels to be the same as the entry label and deallocate
|
337
|
+
// them.
|
338
|
+
for (int i = 0; i < reaching_frames_.length(); i++) {
|
339
|
+
if (!merge_labels_[i].is_bound()) {
|
340
|
+
reaching_frames_[i] = NULL;
|
341
|
+
__ bind(&merge_labels_[i]);
|
342
|
+
}
|
343
|
+
}
|
344
|
+
|
345
|
+
// There are non-NULL reaching frames with bound labels for each
|
346
|
+
// merge block, but only on backward targets.
|
347
|
+
} else {
|
348
|
+
// There were no forward jumps. There must be a current frame and
|
349
|
+
// this must be a bidirectional target.
|
350
|
+
ASSERT(reaching_frames_.length() == 1);
|
351
|
+
ASSERT(reaching_frames_[0] != NULL);
|
352
|
+
ASSERT(direction_ == BIDIRECTIONAL);
|
353
|
+
|
354
|
+
// Use a copy of the reaching frame so the original can be saved
|
355
|
+
// for possible reuse as a backward merge block.
|
356
|
+
RegisterFile empty;
|
357
|
+
cgen()->SetFrame(new VirtualFrame(reaching_frames_[0]), &empty);
|
358
|
+
__ bind(&merge_labels_[0]);
|
359
|
+
cgen()->frame()->MergeTo(entry_frame_);
|
360
|
+
}
|
361
|
+
|
362
|
+
__ bind(&entry_label_);
|
363
|
+
}
|
364
|
+
|
365
|
+
|
366
|
+
void BreakTarget::Jump() {
|
367
|
+
// Drop leftover statement state from the frame before merging, without
|
368
|
+
// emitting code.
|
369
|
+
ASSERT(cgen()->has_valid_frame());
|
370
|
+
int count = cgen()->frame()->height() - expected_height_;
|
371
|
+
cgen()->frame()->ForgetElements(count);
|
372
|
+
DoJump();
|
373
|
+
}
|
374
|
+
|
375
|
+
|
376
|
+
void BreakTarget::Jump(Result* arg) {
|
377
|
+
// Drop leftover statement state from the frame before merging, without
|
378
|
+
// emitting code.
|
379
|
+
ASSERT(cgen()->has_valid_frame());
|
380
|
+
int count = cgen()->frame()->height() - expected_height_;
|
381
|
+
cgen()->frame()->ForgetElements(count);
|
382
|
+
cgen()->frame()->Push(arg);
|
383
|
+
DoJump();
|
384
|
+
}
|
385
|
+
|
386
|
+
|
387
|
+
void BreakTarget::Bind() {
|
388
|
+
#ifdef DEBUG
|
389
|
+
// All the forward-reaching frames should have been adjusted at the
|
390
|
+
// jumps to this target.
|
391
|
+
for (int i = 0; i < reaching_frames_.length(); i++) {
|
392
|
+
ASSERT(reaching_frames_[i] == NULL ||
|
393
|
+
reaching_frames_[i]->height() == expected_height_);
|
394
|
+
}
|
395
|
+
#endif
|
396
|
+
// Drop leftover statement state from the frame before merging, even on
|
397
|
+
// the fall through. This is so we can bind the return target with state
|
398
|
+
// on the frame.
|
399
|
+
if (cgen()->has_valid_frame()) {
|
400
|
+
int count = cgen()->frame()->height() - expected_height_;
|
401
|
+
cgen()->frame()->ForgetElements(count);
|
402
|
+
}
|
403
|
+
DoBind();
|
404
|
+
}
|
405
|
+
|
406
|
+
|
407
|
+
void BreakTarget::Bind(Result* arg) {
|
408
|
+
#ifdef DEBUG
|
409
|
+
// All the forward-reaching frames should have been adjusted at the
|
410
|
+
// jumps to this target.
|
411
|
+
for (int i = 0; i < reaching_frames_.length(); i++) {
|
412
|
+
ASSERT(reaching_frames_[i] == NULL ||
|
413
|
+
reaching_frames_[i]->height() == expected_height_ + 1);
|
414
|
+
}
|
415
|
+
#endif
|
416
|
+
// Drop leftover statement state from the frame before merging, even on
|
417
|
+
// the fall through. This is so we can bind the return target with state
|
418
|
+
// on the frame.
|
419
|
+
if (cgen()->has_valid_frame()) {
|
420
|
+
int count = cgen()->frame()->height() - expected_height_;
|
421
|
+
cgen()->frame()->ForgetElements(count);
|
422
|
+
cgen()->frame()->Push(arg);
|
423
|
+
}
|
424
|
+
DoBind();
|
425
|
+
*arg = cgen()->frame()->Pop();
|
426
|
+
}
|
427
|
+
|
428
|
+
|
429
|
+
#undef __
|
430
|
+
|
431
|
+
|
432
|
+
} } // namespace v8::internal
|
@@ -0,0 +1,1517 @@
|
|
1
|
+
// Copyright 2006-2009 the V8 project authors. All rights reserved.
|
2
|
+
// Redistribution and use in source and binary forms, with or without
|
3
|
+
// modification, are permitted provided that the following conditions are
|
4
|
+
// met:
|
5
|
+
//
|
6
|
+
// * Redistributions of source code must retain the above copyright
|
7
|
+
// notice, this list of conditions and the following disclaimer.
|
8
|
+
// * Redistributions in binary form must reproduce the above
|
9
|
+
// copyright notice, this list of conditions and the following
|
10
|
+
// disclaimer in the documentation and/or other materials provided
|
11
|
+
// with the distribution.
|
12
|
+
// * Neither the name of Google Inc. nor the names of its
|
13
|
+
// contributors may be used to endorse or promote products derived
|
14
|
+
// from this software without specific prior written permission.
|
15
|
+
//
|
16
|
+
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
17
|
+
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
18
|
+
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
19
|
+
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
20
|
+
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
21
|
+
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
22
|
+
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
23
|
+
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
24
|
+
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
25
|
+
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
26
|
+
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
27
|
+
|
28
|
+
#include "v8.h"
|
29
|
+
|
30
|
+
#include "bootstrapper.h"
|
31
|
+
#include "codegen-inl.h"
|
32
|
+
#include "debug.h"
|
33
|
+
#include "runtime.h"
|
34
|
+
#include "serialize.h"
|
35
|
+
|
36
|
+
namespace v8 {
|
37
|
+
namespace internal {
|
38
|
+
|
39
|
+
// -------------------------------------------------------------------------
|
40
|
+
// MacroAssembler implementation.
|
41
|
+
|
42
|
+
MacroAssembler::MacroAssembler(void* buffer, int size)
|
43
|
+
: Assembler(buffer, size),
|
44
|
+
unresolved_(0),
|
45
|
+
generating_stub_(false),
|
46
|
+
allow_stub_calls_(true),
|
47
|
+
code_object_(Heap::undefined_value()) {
|
48
|
+
}
|
49
|
+
|
50
|
+
|
51
|
+
static void RecordWriteHelper(MacroAssembler* masm,
|
52
|
+
Register object,
|
53
|
+
Register addr,
|
54
|
+
Register scratch) {
|
55
|
+
Label fast;
|
56
|
+
|
57
|
+
// Compute the page start address from the heap object pointer, and reuse
|
58
|
+
// the 'object' register for it.
|
59
|
+
masm->and_(object, ~Page::kPageAlignmentMask);
|
60
|
+
Register page_start = object;
|
61
|
+
|
62
|
+
// Compute the bit addr in the remembered set/index of the pointer in the
|
63
|
+
// page. Reuse 'addr' as pointer_offset.
|
64
|
+
masm->sub(addr, Operand(page_start));
|
65
|
+
masm->shr(addr, kObjectAlignmentBits);
|
66
|
+
Register pointer_offset = addr;
|
67
|
+
|
68
|
+
// If the bit offset lies beyond the normal remembered set range, it is in
|
69
|
+
// the extra remembered set area of a large object.
|
70
|
+
masm->cmp(pointer_offset, Page::kPageSize / kPointerSize);
|
71
|
+
masm->j(less, &fast);
|
72
|
+
|
73
|
+
// Adjust 'page_start' so that addressing using 'pointer_offset' hits the
|
74
|
+
// extra remembered set after the large object.
|
75
|
+
|
76
|
+
// Find the length of the large object (FixedArray).
|
77
|
+
masm->mov(scratch, Operand(page_start, Page::kObjectStartOffset
|
78
|
+
+ FixedArray::kLengthOffset));
|
79
|
+
Register array_length = scratch;
|
80
|
+
|
81
|
+
// Extra remembered set starts right after the large object (a FixedArray), at
|
82
|
+
// page_start + kObjectStartOffset + objectSize
|
83
|
+
// where objectSize is FixedArray::kHeaderSize + kPointerSize * array_length.
|
84
|
+
// Add the delta between the end of the normal RSet and the start of the
|
85
|
+
// extra RSet to 'page_start', so that addressing the bit using
|
86
|
+
// 'pointer_offset' hits the extra RSet words.
|
87
|
+
masm->lea(page_start,
|
88
|
+
Operand(page_start, array_length, times_pointer_size,
|
89
|
+
Page::kObjectStartOffset + FixedArray::kHeaderSize
|
90
|
+
- Page::kRSetEndOffset));
|
91
|
+
|
92
|
+
// NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
|
93
|
+
// to limit code size. We should probably evaluate this decision by
|
94
|
+
// measuring the performance of an equivalent implementation using
|
95
|
+
// "simpler" instructions
|
96
|
+
masm->bind(&fast);
|
97
|
+
masm->bts(Operand(page_start, Page::kRSetOffset), pointer_offset);
|
98
|
+
}
|
99
|
+
|
100
|
+
|
101
|
+
class RecordWriteStub : public CodeStub {
|
102
|
+
public:
|
103
|
+
RecordWriteStub(Register object, Register addr, Register scratch)
|
104
|
+
: object_(object), addr_(addr), scratch_(scratch) { }
|
105
|
+
|
106
|
+
void Generate(MacroAssembler* masm);
|
107
|
+
|
108
|
+
private:
|
109
|
+
Register object_;
|
110
|
+
Register addr_;
|
111
|
+
Register scratch_;
|
112
|
+
|
113
|
+
#ifdef DEBUG
|
114
|
+
void Print() {
|
115
|
+
PrintF("RecordWriteStub (object reg %d), (addr reg %d), (scratch reg %d)\n",
|
116
|
+
object_.code(), addr_.code(), scratch_.code());
|
117
|
+
}
|
118
|
+
#endif
|
119
|
+
|
120
|
+
// Minor key encoding in 12 bits of three registers (object, address and
|
121
|
+
// scratch) OOOOAAAASSSS.
|
122
|
+
class ScratchBits: public BitField<uint32_t, 0, 4> {};
|
123
|
+
class AddressBits: public BitField<uint32_t, 4, 4> {};
|
124
|
+
class ObjectBits: public BitField<uint32_t, 8, 4> {};
|
125
|
+
|
126
|
+
Major MajorKey() { return RecordWrite; }
|
127
|
+
|
128
|
+
int MinorKey() {
|
129
|
+
// Encode the registers.
|
130
|
+
return ObjectBits::encode(object_.code()) |
|
131
|
+
AddressBits::encode(addr_.code()) |
|
132
|
+
ScratchBits::encode(scratch_.code());
|
133
|
+
}
|
134
|
+
};
|
135
|
+
|
136
|
+
|
137
|
+
void RecordWriteStub::Generate(MacroAssembler* masm) {
|
138
|
+
RecordWriteHelper(masm, object_, addr_, scratch_);
|
139
|
+
masm->ret(0);
|
140
|
+
}
|
141
|
+
|
142
|
+
|
143
|
+
// Set the remembered set bit for [object+offset].
|
144
|
+
// object is the object being stored into, value is the object being stored.
|
145
|
+
// If offset is zero, then the scratch register contains the array index into
|
146
|
+
// the elements array represented as a Smi.
|
147
|
+
// All registers are clobbered by the operation.
|
148
|
+
void MacroAssembler::RecordWrite(Register object, int offset,
|
149
|
+
Register value, Register scratch) {
|
150
|
+
// First, check if a remembered set write is even needed. The tests below
|
151
|
+
// catch stores of Smis and stores into young gen (which does not have space
|
152
|
+
// for the remembered set bits.
|
153
|
+
Label done;
|
154
|
+
|
155
|
+
// Skip barrier if writing a smi.
|
156
|
+
ASSERT_EQ(0, kSmiTag);
|
157
|
+
test(value, Immediate(kSmiTagMask));
|
158
|
+
j(zero, &done);
|
159
|
+
|
160
|
+
if (Serializer::enabled()) {
|
161
|
+
// Can't do arithmetic on external references if it might get serialized.
|
162
|
+
mov(value, Operand(object));
|
163
|
+
and_(value, Heap::NewSpaceMask());
|
164
|
+
cmp(Operand(value), Immediate(ExternalReference::new_space_start()));
|
165
|
+
j(equal, &done);
|
166
|
+
} else {
|
167
|
+
int32_t new_space_start = reinterpret_cast<int32_t>(
|
168
|
+
ExternalReference::new_space_start().address());
|
169
|
+
lea(value, Operand(object, -new_space_start));
|
170
|
+
and_(value, Heap::NewSpaceMask());
|
171
|
+
j(equal, &done);
|
172
|
+
}
|
173
|
+
|
174
|
+
if ((offset > 0) && (offset < Page::kMaxHeapObjectSize)) {
|
175
|
+
// Compute the bit offset in the remembered set, leave it in 'value'.
|
176
|
+
lea(value, Operand(object, offset));
|
177
|
+
and_(value, Page::kPageAlignmentMask);
|
178
|
+
shr(value, kPointerSizeLog2);
|
179
|
+
|
180
|
+
// Compute the page address from the heap object pointer, leave it in
|
181
|
+
// 'object'.
|
182
|
+
and_(object, ~Page::kPageAlignmentMask);
|
183
|
+
|
184
|
+
// NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
|
185
|
+
// to limit code size. We should probably evaluate this decision by
|
186
|
+
// measuring the performance of an equivalent implementation using
|
187
|
+
// "simpler" instructions
|
188
|
+
bts(Operand(object, Page::kRSetOffset), value);
|
189
|
+
} else {
|
190
|
+
Register dst = scratch;
|
191
|
+
if (offset != 0) {
|
192
|
+
lea(dst, Operand(object, offset));
|
193
|
+
} else {
|
194
|
+
// array access: calculate the destination address in the same manner as
|
195
|
+
// KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
|
196
|
+
// into an array of words.
|
197
|
+
ASSERT_EQ(1, kSmiTagSize);
|
198
|
+
ASSERT_EQ(0, kSmiTag);
|
199
|
+
lea(dst, Operand(object, dst, times_half_pointer_size,
|
200
|
+
FixedArray::kHeaderSize - kHeapObjectTag));
|
201
|
+
}
|
202
|
+
// If we are already generating a shared stub, not inlining the
|
203
|
+
// record write code isn't going to save us any memory.
|
204
|
+
if (generating_stub()) {
|
205
|
+
RecordWriteHelper(this, object, dst, value);
|
206
|
+
} else {
|
207
|
+
RecordWriteStub stub(object, dst, value);
|
208
|
+
CallStub(&stub);
|
209
|
+
}
|
210
|
+
}
|
211
|
+
|
212
|
+
bind(&done);
|
213
|
+
}
|
214
|
+
|
215
|
+
|
216
|
+
void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
|
217
|
+
cmp(esp,
|
218
|
+
Operand::StaticVariable(ExternalReference::address_of_stack_limit()));
|
219
|
+
j(below, on_stack_overflow);
|
220
|
+
}
|
221
|
+
|
222
|
+
|
223
|
+
#ifdef ENABLE_DEBUGGER_SUPPORT
|
224
|
+
void MacroAssembler::SaveRegistersToMemory(RegList regs) {
|
225
|
+
ASSERT((regs & ~kJSCallerSaved) == 0);
|
226
|
+
// Copy the content of registers to memory location.
|
227
|
+
for (int i = 0; i < kNumJSCallerSaved; i++) {
|
228
|
+
int r = JSCallerSavedCode(i);
|
229
|
+
if ((regs & (1 << r)) != 0) {
|
230
|
+
Register reg = { r };
|
231
|
+
ExternalReference reg_addr =
|
232
|
+
ExternalReference(Debug_Address::Register(i));
|
233
|
+
mov(Operand::StaticVariable(reg_addr), reg);
|
234
|
+
}
|
235
|
+
}
|
236
|
+
}
|
237
|
+
|
238
|
+
|
239
|
+
void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
|
240
|
+
ASSERT((regs & ~kJSCallerSaved) == 0);
|
241
|
+
// Copy the content of memory location to registers.
|
242
|
+
for (int i = kNumJSCallerSaved; --i >= 0;) {
|
243
|
+
int r = JSCallerSavedCode(i);
|
244
|
+
if ((regs & (1 << r)) != 0) {
|
245
|
+
Register reg = { r };
|
246
|
+
ExternalReference reg_addr =
|
247
|
+
ExternalReference(Debug_Address::Register(i));
|
248
|
+
mov(reg, Operand::StaticVariable(reg_addr));
|
249
|
+
}
|
250
|
+
}
|
251
|
+
}
|
252
|
+
|
253
|
+
|
254
|
+
void MacroAssembler::PushRegistersFromMemory(RegList regs) {
|
255
|
+
ASSERT((regs & ~kJSCallerSaved) == 0);
|
256
|
+
// Push the content of the memory location to the stack.
|
257
|
+
for (int i = 0; i < kNumJSCallerSaved; i++) {
|
258
|
+
int r = JSCallerSavedCode(i);
|
259
|
+
if ((regs & (1 << r)) != 0) {
|
260
|
+
ExternalReference reg_addr =
|
261
|
+
ExternalReference(Debug_Address::Register(i));
|
262
|
+
push(Operand::StaticVariable(reg_addr));
|
263
|
+
}
|
264
|
+
}
|
265
|
+
}
|
266
|
+
|
267
|
+
|
268
|
+
void MacroAssembler::PopRegistersToMemory(RegList regs) {
|
269
|
+
ASSERT((regs & ~kJSCallerSaved) == 0);
|
270
|
+
// Pop the content from the stack to the memory location.
|
271
|
+
for (int i = kNumJSCallerSaved; --i >= 0;) {
|
272
|
+
int r = JSCallerSavedCode(i);
|
273
|
+
if ((regs & (1 << r)) != 0) {
|
274
|
+
ExternalReference reg_addr =
|
275
|
+
ExternalReference(Debug_Address::Register(i));
|
276
|
+
pop(Operand::StaticVariable(reg_addr));
|
277
|
+
}
|
278
|
+
}
|
279
|
+
}
|
280
|
+
|
281
|
+
|
282
|
+
void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
|
283
|
+
Register scratch,
|
284
|
+
RegList regs) {
|
285
|
+
ASSERT((regs & ~kJSCallerSaved) == 0);
|
286
|
+
// Copy the content of the stack to the memory location and adjust base.
|
287
|
+
for (int i = kNumJSCallerSaved; --i >= 0;) {
|
288
|
+
int r = JSCallerSavedCode(i);
|
289
|
+
if ((regs & (1 << r)) != 0) {
|
290
|
+
mov(scratch, Operand(base, 0));
|
291
|
+
ExternalReference reg_addr =
|
292
|
+
ExternalReference(Debug_Address::Register(i));
|
293
|
+
mov(Operand::StaticVariable(reg_addr), scratch);
|
294
|
+
lea(base, Operand(base, kPointerSize));
|
295
|
+
}
|
296
|
+
}
|
297
|
+
}
|
298
|
+
#endif
|
299
|
+
|
300
|
+
void MacroAssembler::Set(Register dst, const Immediate& x) {
|
301
|
+
if (x.is_zero()) {
|
302
|
+
xor_(dst, Operand(dst)); // shorter than mov
|
303
|
+
} else {
|
304
|
+
mov(dst, x);
|
305
|
+
}
|
306
|
+
}
|
307
|
+
|
308
|
+
|
309
|
+
void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
|
310
|
+
mov(dst, x);
|
311
|
+
}
|
312
|
+
|
313
|
+
|
314
|
+
void MacroAssembler::CmpObjectType(Register heap_object,
|
315
|
+
InstanceType type,
|
316
|
+
Register map) {
|
317
|
+
mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
|
318
|
+
CmpInstanceType(map, type);
|
319
|
+
}
|
320
|
+
|
321
|
+
|
322
|
+
void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
|
323
|
+
cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
|
324
|
+
static_cast<int8_t>(type));
|
325
|
+
}
|
326
|
+
|
327
|
+
|
328
|
+
Condition MacroAssembler::IsObjectStringType(Register heap_object,
|
329
|
+
Register map,
|
330
|
+
Register instance_type) {
|
331
|
+
mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
|
332
|
+
movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
|
333
|
+
ASSERT(kNotStringTag != 0);
|
334
|
+
test(instance_type, Immediate(kIsNotStringMask));
|
335
|
+
return zero;
|
336
|
+
}
|
337
|
+
|
338
|
+
|
339
|
+
void MacroAssembler::FCmp() {
|
340
|
+
if (CpuFeatures::IsSupported(CMOV)) {
|
341
|
+
fucomip();
|
342
|
+
ffree(0);
|
343
|
+
fincstp();
|
344
|
+
} else {
|
345
|
+
fucompp();
|
346
|
+
push(eax);
|
347
|
+
fnstsw_ax();
|
348
|
+
sahf();
|
349
|
+
pop(eax);
|
350
|
+
}
|
351
|
+
}
|
352
|
+
|
353
|
+
|
354
|
+
void MacroAssembler::EnterFrame(StackFrame::Type type) {
|
355
|
+
push(ebp);
|
356
|
+
mov(ebp, Operand(esp));
|
357
|
+
push(esi);
|
358
|
+
push(Immediate(Smi::FromInt(type)));
|
359
|
+
push(Immediate(CodeObject()));
|
360
|
+
if (FLAG_debug_code) {
|
361
|
+
cmp(Operand(esp, 0), Immediate(Factory::undefined_value()));
|
362
|
+
Check(not_equal, "code object not properly patched");
|
363
|
+
}
|
364
|
+
}
|
365
|
+
|
366
|
+
|
367
|
+
void MacroAssembler::LeaveFrame(StackFrame::Type type) {
|
368
|
+
if (FLAG_debug_code) {
|
369
|
+
cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
|
370
|
+
Immediate(Smi::FromInt(type)));
|
371
|
+
Check(equal, "stack frame types must match");
|
372
|
+
}
|
373
|
+
leave();
|
374
|
+
}
|
375
|
+
|
376
|
+
void MacroAssembler::EnterExitFramePrologue(ExitFrame::Mode mode) {
|
377
|
+
// Setup the frame structure on the stack.
|
378
|
+
ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
|
379
|
+
ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
|
380
|
+
ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
|
381
|
+
push(ebp);
|
382
|
+
mov(ebp, Operand(esp));
|
383
|
+
|
384
|
+
// Reserve room for entry stack pointer and push the debug marker.
|
385
|
+
ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
|
386
|
+
push(Immediate(0)); // saved entry sp, patched before call
|
387
|
+
if (mode == ExitFrame::MODE_DEBUG) {
|
388
|
+
push(Immediate(0));
|
389
|
+
} else {
|
390
|
+
push(Immediate(CodeObject()));
|
391
|
+
}
|
392
|
+
|
393
|
+
// Save the frame pointer and the context in top.
|
394
|
+
ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
|
395
|
+
ExternalReference context_address(Top::k_context_address);
|
396
|
+
mov(Operand::StaticVariable(c_entry_fp_address), ebp);
|
397
|
+
mov(Operand::StaticVariable(context_address), esi);
|
398
|
+
}
|
399
|
+
|
400
|
+
void MacroAssembler::EnterExitFrameEpilogue(ExitFrame::Mode mode, int argc) {
|
401
|
+
#ifdef ENABLE_DEBUGGER_SUPPORT
|
402
|
+
// Save the state of all registers to the stack from the memory
|
403
|
+
// location. This is needed to allow nested break points.
|
404
|
+
if (mode == ExitFrame::MODE_DEBUG) {
|
405
|
+
// TODO(1243899): This should be symmetric to
|
406
|
+
// CopyRegistersFromStackToMemory() but it isn't! esp is assumed
|
407
|
+
// correct here, but computed for the other call. Very error
|
408
|
+
// prone! FIX THIS. Actually there are deeper problems with
|
409
|
+
// register saving than this asymmetry (see the bug report
|
410
|
+
// associated with this issue).
|
411
|
+
PushRegistersFromMemory(kJSCallerSaved);
|
412
|
+
}
|
413
|
+
#endif
|
414
|
+
|
415
|
+
// Reserve space for arguments.
|
416
|
+
sub(Operand(esp), Immediate(argc * kPointerSize));
|
417
|
+
|
418
|
+
// Get the required frame alignment for the OS.
|
419
|
+
static const int kFrameAlignment = OS::ActivationFrameAlignment();
|
420
|
+
if (kFrameAlignment > 0) {
|
421
|
+
ASSERT(IsPowerOf2(kFrameAlignment));
|
422
|
+
and_(esp, -kFrameAlignment);
|
423
|
+
}
|
424
|
+
|
425
|
+
// Patch the saved entry sp.
|
426
|
+
mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
|
427
|
+
}
|
428
|
+
|
429
|
+
|
430
|
+
void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode) {
|
431
|
+
EnterExitFramePrologue(mode);
|
432
|
+
|
433
|
+
// Setup argc and argv in callee-saved registers.
|
434
|
+
int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
|
435
|
+
mov(edi, Operand(eax));
|
436
|
+
lea(esi, Operand(ebp, eax, times_4, offset));
|
437
|
+
|
438
|
+
EnterExitFrameEpilogue(mode, 2);
|
439
|
+
}
|
440
|
+
|
441
|
+
|
442
|
+
void MacroAssembler::EnterApiExitFrame(ExitFrame::Mode mode,
|
443
|
+
int stack_space,
|
444
|
+
int argc) {
|
445
|
+
EnterExitFramePrologue(mode);
|
446
|
+
|
447
|
+
int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
|
448
|
+
lea(esi, Operand(ebp, (stack_space * kPointerSize) + offset));
|
449
|
+
|
450
|
+
EnterExitFrameEpilogue(mode, argc);
|
451
|
+
}
|
452
|
+
|
453
|
+
|
454
|
+
void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode) {
|
455
|
+
#ifdef ENABLE_DEBUGGER_SUPPORT
|
456
|
+
// Restore the memory copy of the registers by digging them out from
|
457
|
+
// the stack. This is needed to allow nested break points.
|
458
|
+
if (mode == ExitFrame::MODE_DEBUG) {
|
459
|
+
// It's okay to clobber register ebx below because we don't need
|
460
|
+
// the function pointer after this.
|
461
|
+
const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
|
462
|
+
int kOffset = ExitFrameConstants::kCodeOffset - kCallerSavedSize;
|
463
|
+
lea(ebx, Operand(ebp, kOffset));
|
464
|
+
CopyRegistersFromStackToMemory(ebx, ecx, kJSCallerSaved);
|
465
|
+
}
|
466
|
+
#endif
|
467
|
+
|
468
|
+
// Get the return address from the stack and restore the frame pointer.
|
469
|
+
mov(ecx, Operand(ebp, 1 * kPointerSize));
|
470
|
+
mov(ebp, Operand(ebp, 0 * kPointerSize));
|
471
|
+
|
472
|
+
// Pop the arguments and the receiver from the caller stack.
|
473
|
+
lea(esp, Operand(esi, 1 * kPointerSize));
|
474
|
+
|
475
|
+
// Restore current context from top and clear it in debug mode.
|
476
|
+
ExternalReference context_address(Top::k_context_address);
|
477
|
+
mov(esi, Operand::StaticVariable(context_address));
|
478
|
+
#ifdef DEBUG
|
479
|
+
mov(Operand::StaticVariable(context_address), Immediate(0));
|
480
|
+
#endif
|
481
|
+
|
482
|
+
// Push the return address to get ready to return.
|
483
|
+
push(ecx);
|
484
|
+
|
485
|
+
// Clear the top frame.
|
486
|
+
ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
|
487
|
+
mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
|
488
|
+
}
|
489
|
+
|
490
|
+
|
491
|
+
void MacroAssembler::PushTryHandler(CodeLocation try_location,
|
492
|
+
HandlerType type) {
|
493
|
+
// Adjust this code if not the case.
|
494
|
+
ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
|
495
|
+
// The pc (return address) is already on TOS.
|
496
|
+
if (try_location == IN_JAVASCRIPT) {
|
497
|
+
if (type == TRY_CATCH_HANDLER) {
|
498
|
+
push(Immediate(StackHandler::TRY_CATCH));
|
499
|
+
} else {
|
500
|
+
push(Immediate(StackHandler::TRY_FINALLY));
|
501
|
+
}
|
502
|
+
push(ebp);
|
503
|
+
} else {
|
504
|
+
ASSERT(try_location == IN_JS_ENTRY);
|
505
|
+
// The frame pointer does not point to a JS frame so we save NULL
|
506
|
+
// for ebp. We expect the code throwing an exception to check ebp
|
507
|
+
// before dereferencing it to restore the context.
|
508
|
+
push(Immediate(StackHandler::ENTRY));
|
509
|
+
push(Immediate(0)); // NULL frame pointer.
|
510
|
+
}
|
511
|
+
// Save the current handler as the next handler.
|
512
|
+
push(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
|
513
|
+
// Link this handler as the new current one.
|
514
|
+
mov(Operand::StaticVariable(ExternalReference(Top::k_handler_address)), esp);
|
515
|
+
}
|
516
|
+
|
517
|
+
|
518
|
+
void MacroAssembler::PopTryHandler() {
|
519
|
+
ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
|
520
|
+
pop(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
|
521
|
+
add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
|
522
|
+
}
|
523
|
+
|
524
|
+
|
525
|
+
Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg,
|
526
|
+
JSObject* holder, Register holder_reg,
|
527
|
+
Register scratch,
|
528
|
+
Label* miss) {
|
529
|
+
// Make sure there's no overlap between scratch and the other
|
530
|
+
// registers.
|
531
|
+
ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
|
532
|
+
|
533
|
+
// Keep track of the current object in register reg.
|
534
|
+
Register reg = object_reg;
|
535
|
+
int depth = 1;
|
536
|
+
|
537
|
+
// Check the maps in the prototype chain.
|
538
|
+
// Traverse the prototype chain from the object and do map checks.
|
539
|
+
while (object != holder) {
|
540
|
+
depth++;
|
541
|
+
|
542
|
+
// Only global objects and objects that do not require access
|
543
|
+
// checks are allowed in stubs.
|
544
|
+
ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
|
545
|
+
|
546
|
+
JSObject* prototype = JSObject::cast(object->GetPrototype());
|
547
|
+
if (Heap::InNewSpace(prototype)) {
|
548
|
+
// Get the map of the current object.
|
549
|
+
mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
|
550
|
+
cmp(Operand(scratch), Immediate(Handle<Map>(object->map())));
|
551
|
+
// Branch on the result of the map check.
|
552
|
+
j(not_equal, miss, not_taken);
|
553
|
+
// Check access rights to the global object. This has to happen
|
554
|
+
// after the map check so that we know that the object is
|
555
|
+
// actually a global object.
|
556
|
+
if (object->IsJSGlobalProxy()) {
|
557
|
+
CheckAccessGlobalProxy(reg, scratch, miss);
|
558
|
+
|
559
|
+
// Restore scratch register to be the map of the object.
|
560
|
+
// We load the prototype from the map in the scratch register.
|
561
|
+
mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
|
562
|
+
}
|
563
|
+
// The prototype is in new space; we cannot store a reference
|
564
|
+
// to it in the code. Load it from the map.
|
565
|
+
reg = holder_reg; // from now the object is in holder_reg
|
566
|
+
mov(reg, FieldOperand(scratch, Map::kPrototypeOffset));
|
567
|
+
|
568
|
+
} else {
|
569
|
+
// Check the map of the current object.
|
570
|
+
cmp(FieldOperand(reg, HeapObject::kMapOffset),
|
571
|
+
Immediate(Handle<Map>(object->map())));
|
572
|
+
// Branch on the result of the map check.
|
573
|
+
j(not_equal, miss, not_taken);
|
574
|
+
// Check access rights to the global object. This has to happen
|
575
|
+
// after the map check so that we know that the object is
|
576
|
+
// actually a global object.
|
577
|
+
if (object->IsJSGlobalProxy()) {
|
578
|
+
CheckAccessGlobalProxy(reg, scratch, miss);
|
579
|
+
}
|
580
|
+
// The prototype is in old space; load it directly.
|
581
|
+
reg = holder_reg; // from now the object is in holder_reg
|
582
|
+
mov(reg, Handle<JSObject>(prototype));
|
583
|
+
}
|
584
|
+
|
585
|
+
// Go to the next object in the prototype chain.
|
586
|
+
object = prototype;
|
587
|
+
}
|
588
|
+
|
589
|
+
// Check the holder map.
|
590
|
+
cmp(FieldOperand(reg, HeapObject::kMapOffset),
|
591
|
+
Immediate(Handle<Map>(holder->map())));
|
592
|
+
j(not_equal, miss, not_taken);
|
593
|
+
|
594
|
+
// Log the check depth.
|
595
|
+
LOG(IntEvent("check-maps-depth", depth));
|
596
|
+
|
597
|
+
// Perform security check for access to the global object and return
|
598
|
+
// the holder register.
|
599
|
+
ASSERT(object == holder);
|
600
|
+
ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
|
601
|
+
if (object->IsJSGlobalProxy()) {
|
602
|
+
CheckAccessGlobalProxy(reg, scratch, miss);
|
603
|
+
}
|
604
|
+
return reg;
|
605
|
+
}
|
606
|
+
|
607
|
+
|
608
|
+
void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
|
609
|
+
Register scratch,
|
610
|
+
Label* miss) {
|
611
|
+
Label same_contexts;
|
612
|
+
|
613
|
+
ASSERT(!holder_reg.is(scratch));
|
614
|
+
|
615
|
+
// Load current lexical context from the stack frame.
|
616
|
+
mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
|
617
|
+
|
618
|
+
// When generating debug code, make sure the lexical context is set.
|
619
|
+
if (FLAG_debug_code) {
|
620
|
+
cmp(Operand(scratch), Immediate(0));
|
621
|
+
Check(not_equal, "we should not have an empty lexical context");
|
622
|
+
}
|
623
|
+
// Load the global context of the current context.
|
624
|
+
int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
|
625
|
+
mov(scratch, FieldOperand(scratch, offset));
|
626
|
+
mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
|
627
|
+
|
628
|
+
// Check the context is a global context.
|
629
|
+
if (FLAG_debug_code) {
|
630
|
+
push(scratch);
|
631
|
+
// Read the first word and compare to global_context_map.
|
632
|
+
mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
|
633
|
+
cmp(scratch, Factory::global_context_map());
|
634
|
+
Check(equal, "JSGlobalObject::global_context should be a global context.");
|
635
|
+
pop(scratch);
|
636
|
+
}
|
637
|
+
|
638
|
+
// Check if both contexts are the same.
|
639
|
+
cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
|
640
|
+
j(equal, &same_contexts, taken);
|
641
|
+
|
642
|
+
// Compare security tokens, save holder_reg on the stack so we can use it
|
643
|
+
// as a temporary register.
|
644
|
+
//
|
645
|
+
// TODO(119): avoid push(holder_reg)/pop(holder_reg)
|
646
|
+
push(holder_reg);
|
647
|
+
// Check that the security token in the calling global object is
|
648
|
+
// compatible with the security token in the receiving global
|
649
|
+
// object.
|
650
|
+
mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
|
651
|
+
|
652
|
+
// Check the context is a global context.
|
653
|
+
if (FLAG_debug_code) {
|
654
|
+
cmp(holder_reg, Factory::null_value());
|
655
|
+
Check(not_equal, "JSGlobalProxy::context() should not be null.");
|
656
|
+
|
657
|
+
push(holder_reg);
|
658
|
+
// Read the first word and compare to global_context_map(),
|
659
|
+
mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
|
660
|
+
cmp(holder_reg, Factory::global_context_map());
|
661
|
+
Check(equal, "JSGlobalObject::global_context should be a global context.");
|
662
|
+
pop(holder_reg);
|
663
|
+
}
|
664
|
+
|
665
|
+
int token_offset = Context::kHeaderSize +
|
666
|
+
Context::SECURITY_TOKEN_INDEX * kPointerSize;
|
667
|
+
mov(scratch, FieldOperand(scratch, token_offset));
|
668
|
+
cmp(scratch, FieldOperand(holder_reg, token_offset));
|
669
|
+
pop(holder_reg);
|
670
|
+
j(not_equal, miss, not_taken);
|
671
|
+
|
672
|
+
bind(&same_contexts);
|
673
|
+
}
|
674
|
+
|
675
|
+
|
676
|
+
void MacroAssembler::LoadAllocationTopHelper(Register result,
|
677
|
+
Register result_end,
|
678
|
+
Register scratch,
|
679
|
+
AllocationFlags flags) {
|
680
|
+
ExternalReference new_space_allocation_top =
|
681
|
+
ExternalReference::new_space_allocation_top_address();
|
682
|
+
|
683
|
+
// Just return if allocation top is already known.
|
684
|
+
if ((flags & RESULT_CONTAINS_TOP) != 0) {
|
685
|
+
// No use of scratch if allocation top is provided.
|
686
|
+
ASSERT(scratch.is(no_reg));
|
687
|
+
#ifdef DEBUG
|
688
|
+
// Assert that result actually contains top on entry.
|
689
|
+
cmp(result, Operand::StaticVariable(new_space_allocation_top));
|
690
|
+
Check(equal, "Unexpected allocation top");
|
691
|
+
#endif
|
692
|
+
return;
|
693
|
+
}
|
694
|
+
|
695
|
+
// Move address of new object to result. Use scratch register if available.
|
696
|
+
if (scratch.is(no_reg)) {
|
697
|
+
mov(result, Operand::StaticVariable(new_space_allocation_top));
|
698
|
+
} else {
|
699
|
+
ASSERT(!scratch.is(result_end));
|
700
|
+
mov(Operand(scratch), Immediate(new_space_allocation_top));
|
701
|
+
mov(result, Operand(scratch, 0));
|
702
|
+
}
|
703
|
+
}
|
704
|
+
|
705
|
+
|
706
|
+
void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
|
707
|
+
Register scratch) {
|
708
|
+
if (FLAG_debug_code) {
|
709
|
+
test(result_end, Immediate(kObjectAlignmentMask));
|
710
|
+
Check(zero, "Unaligned allocation in new space");
|
711
|
+
}
|
712
|
+
|
713
|
+
ExternalReference new_space_allocation_top =
|
714
|
+
ExternalReference::new_space_allocation_top_address();
|
715
|
+
|
716
|
+
// Update new top. Use scratch if available.
|
717
|
+
if (scratch.is(no_reg)) {
|
718
|
+
mov(Operand::StaticVariable(new_space_allocation_top), result_end);
|
719
|
+
} else {
|
720
|
+
mov(Operand(scratch, 0), result_end);
|
721
|
+
}
|
722
|
+
}
|
723
|
+
|
724
|
+
|
725
|
+
void MacroAssembler::AllocateInNewSpace(int object_size,
|
726
|
+
Register result,
|
727
|
+
Register result_end,
|
728
|
+
Register scratch,
|
729
|
+
Label* gc_required,
|
730
|
+
AllocationFlags flags) {
|
731
|
+
ASSERT(!result.is(result_end));
|
732
|
+
|
733
|
+
// Load address of new object into result.
|
734
|
+
LoadAllocationTopHelper(result, result_end, scratch, flags);
|
735
|
+
|
736
|
+
// Calculate new top and bail out if new space is exhausted.
|
737
|
+
ExternalReference new_space_allocation_limit =
|
738
|
+
ExternalReference::new_space_allocation_limit_address();
|
739
|
+
lea(result_end, Operand(result, object_size));
|
740
|
+
cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
|
741
|
+
j(above, gc_required, not_taken);
|
742
|
+
|
743
|
+
// Tag result if requested.
|
744
|
+
if ((flags & TAG_OBJECT) != 0) {
|
745
|
+
lea(result, Operand(result, kHeapObjectTag));
|
746
|
+
}
|
747
|
+
|
748
|
+
// Update allocation top.
|
749
|
+
UpdateAllocationTopHelper(result_end, scratch);
|
750
|
+
}
|
751
|
+
|
752
|
+
|
753
|
+
void MacroAssembler::AllocateInNewSpace(int header_size,
|
754
|
+
ScaleFactor element_size,
|
755
|
+
Register element_count,
|
756
|
+
Register result,
|
757
|
+
Register result_end,
|
758
|
+
Register scratch,
|
759
|
+
Label* gc_required,
|
760
|
+
AllocationFlags flags) {
|
761
|
+
ASSERT(!result.is(result_end));
|
762
|
+
|
763
|
+
// Load address of new object into result.
|
764
|
+
LoadAllocationTopHelper(result, result_end, scratch, flags);
|
765
|
+
|
766
|
+
// Calculate new top and bail out if new space is exhausted.
|
767
|
+
ExternalReference new_space_allocation_limit =
|
768
|
+
ExternalReference::new_space_allocation_limit_address();
|
769
|
+
lea(result_end, Operand(result, element_count, element_size, header_size));
|
770
|
+
cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
|
771
|
+
j(above, gc_required);
|
772
|
+
|
773
|
+
// Tag result if requested.
|
774
|
+
if ((flags & TAG_OBJECT) != 0) {
|
775
|
+
lea(result, Operand(result, kHeapObjectTag));
|
776
|
+
}
|
777
|
+
|
778
|
+
// Update allocation top.
|
779
|
+
UpdateAllocationTopHelper(result_end, scratch);
|
780
|
+
}
|
781
|
+
|
782
|
+
|
783
|
+
void MacroAssembler::AllocateInNewSpace(Register object_size,
|
784
|
+
Register result,
|
785
|
+
Register result_end,
|
786
|
+
Register scratch,
|
787
|
+
Label* gc_required,
|
788
|
+
AllocationFlags flags) {
|
789
|
+
ASSERT(!result.is(result_end));
|
790
|
+
|
791
|
+
// Load address of new object into result.
|
792
|
+
LoadAllocationTopHelper(result, result_end, scratch, flags);
|
793
|
+
|
794
|
+
// Calculate new top and bail out if new space is exhausted.
|
795
|
+
ExternalReference new_space_allocation_limit =
|
796
|
+
ExternalReference::new_space_allocation_limit_address();
|
797
|
+
if (!object_size.is(result_end)) {
|
798
|
+
mov(result_end, object_size);
|
799
|
+
}
|
800
|
+
add(result_end, Operand(result));
|
801
|
+
cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
|
802
|
+
j(above, gc_required, not_taken);
|
803
|
+
|
804
|
+
// Tag result if requested.
|
805
|
+
if ((flags & TAG_OBJECT) != 0) {
|
806
|
+
lea(result, Operand(result, kHeapObjectTag));
|
807
|
+
}
|
808
|
+
|
809
|
+
// Update allocation top.
|
810
|
+
UpdateAllocationTopHelper(result_end, scratch);
|
811
|
+
}
|
812
|
+
|
813
|
+
|
814
|
+
void MacroAssembler::UndoAllocationInNewSpace(Register object) {
|
815
|
+
ExternalReference new_space_allocation_top =
|
816
|
+
ExternalReference::new_space_allocation_top_address();
|
817
|
+
|
818
|
+
// Make sure the object has no tag before resetting top.
|
819
|
+
and_(Operand(object), Immediate(~kHeapObjectTagMask));
|
820
|
+
#ifdef DEBUG
|
821
|
+
cmp(object, Operand::StaticVariable(new_space_allocation_top));
|
822
|
+
Check(below, "Undo allocation of non allocated memory");
|
823
|
+
#endif
|
824
|
+
mov(Operand::StaticVariable(new_space_allocation_top), object);
|
825
|
+
}
|
826
|
+
|
827
|
+
|
828
|
+
void MacroAssembler::AllocateHeapNumber(Register result,
|
829
|
+
Register scratch1,
|
830
|
+
Register scratch2,
|
831
|
+
Label* gc_required) {
|
832
|
+
// Allocate heap number in new space.
|
833
|
+
AllocateInNewSpace(HeapNumber::kSize,
|
834
|
+
result,
|
835
|
+
scratch1,
|
836
|
+
scratch2,
|
837
|
+
gc_required,
|
838
|
+
TAG_OBJECT);
|
839
|
+
|
840
|
+
// Set the map.
|
841
|
+
mov(FieldOperand(result, HeapObject::kMapOffset),
|
842
|
+
Immediate(Factory::heap_number_map()));
|
843
|
+
}
|
844
|
+
|
845
|
+
|
846
|
+
void MacroAssembler::AllocateTwoByteString(Register result,
|
847
|
+
Register length,
|
848
|
+
Register scratch1,
|
849
|
+
Register scratch2,
|
850
|
+
Register scratch3,
|
851
|
+
Label* gc_required) {
|
852
|
+
// Calculate the number of bytes needed for the characters in the string while
|
853
|
+
// observing object alignment.
|
854
|
+
ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
|
855
|
+
ASSERT(kShortSize == 2);
|
856
|
+
// scratch1 = length * 2 + kObjectAlignmentMask.
|
857
|
+
lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
|
858
|
+
and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
|
859
|
+
|
860
|
+
// Allocate two byte string in new space.
|
861
|
+
AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
|
862
|
+
times_1,
|
863
|
+
scratch1,
|
864
|
+
result,
|
865
|
+
scratch2,
|
866
|
+
scratch3,
|
867
|
+
gc_required,
|
868
|
+
TAG_OBJECT);
|
869
|
+
|
870
|
+
// Set the map, length and hash field.
|
871
|
+
mov(FieldOperand(result, HeapObject::kMapOffset),
|
872
|
+
Immediate(Factory::string_map()));
|
873
|
+
mov(FieldOperand(result, String::kLengthOffset), length);
|
874
|
+
mov(FieldOperand(result, String::kHashFieldOffset),
|
875
|
+
Immediate(String::kEmptyHashField));
|
876
|
+
}
|
877
|
+
|
878
|
+
|
879
|
+
void MacroAssembler::AllocateAsciiString(Register result,
|
880
|
+
Register length,
|
881
|
+
Register scratch1,
|
882
|
+
Register scratch2,
|
883
|
+
Register scratch3,
|
884
|
+
Label* gc_required) {
|
885
|
+
// Calculate the number of bytes needed for the characters in the string while
|
886
|
+
// observing object alignment.
|
887
|
+
ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
|
888
|
+
mov(scratch1, length);
|
889
|
+
ASSERT(kCharSize == 1);
|
890
|
+
add(Operand(scratch1), Immediate(kObjectAlignmentMask));
|
891
|
+
and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
|
892
|
+
|
893
|
+
// Allocate ascii string in new space.
|
894
|
+
AllocateInNewSpace(SeqAsciiString::kHeaderSize,
|
895
|
+
times_1,
|
896
|
+
scratch1,
|
897
|
+
result,
|
898
|
+
scratch2,
|
899
|
+
scratch3,
|
900
|
+
gc_required,
|
901
|
+
TAG_OBJECT);
|
902
|
+
|
903
|
+
// Set the map, length and hash field.
|
904
|
+
mov(FieldOperand(result, HeapObject::kMapOffset),
|
905
|
+
Immediate(Factory::ascii_string_map()));
|
906
|
+
mov(FieldOperand(result, String::kLengthOffset), length);
|
907
|
+
mov(FieldOperand(result, String::kHashFieldOffset),
|
908
|
+
Immediate(String::kEmptyHashField));
|
909
|
+
}
|
910
|
+
|
911
|
+
|
912
|
+
void MacroAssembler::AllocateConsString(Register result,
|
913
|
+
Register scratch1,
|
914
|
+
Register scratch2,
|
915
|
+
Label* gc_required) {
|
916
|
+
// Allocate heap number in new space.
|
917
|
+
AllocateInNewSpace(ConsString::kSize,
|
918
|
+
result,
|
919
|
+
scratch1,
|
920
|
+
scratch2,
|
921
|
+
gc_required,
|
922
|
+
TAG_OBJECT);
|
923
|
+
|
924
|
+
// Set the map. The other fields are left uninitialized.
|
925
|
+
mov(FieldOperand(result, HeapObject::kMapOffset),
|
926
|
+
Immediate(Factory::cons_string_map()));
|
927
|
+
}
|
928
|
+
|
929
|
+
|
930
|
+
void MacroAssembler::AllocateAsciiConsString(Register result,
|
931
|
+
Register scratch1,
|
932
|
+
Register scratch2,
|
933
|
+
Label* gc_required) {
|
934
|
+
// Allocate heap number in new space.
|
935
|
+
AllocateInNewSpace(ConsString::kSize,
|
936
|
+
result,
|
937
|
+
scratch1,
|
938
|
+
scratch2,
|
939
|
+
gc_required,
|
940
|
+
TAG_OBJECT);
|
941
|
+
|
942
|
+
// Set the map. The other fields are left uninitialized.
|
943
|
+
mov(FieldOperand(result, HeapObject::kMapOffset),
|
944
|
+
Immediate(Factory::cons_ascii_string_map()));
|
945
|
+
}
|
946
|
+
|
947
|
+
|
948
|
+
void MacroAssembler::NegativeZeroTest(CodeGenerator* cgen,
|
949
|
+
Register result,
|
950
|
+
Register op,
|
951
|
+
JumpTarget* then_target) {
|
952
|
+
JumpTarget ok;
|
953
|
+
test(result, Operand(result));
|
954
|
+
ok.Branch(not_zero, taken);
|
955
|
+
test(op, Operand(op));
|
956
|
+
then_target->Branch(sign, not_taken);
|
957
|
+
ok.Bind();
|
958
|
+
}
|
959
|
+
|
960
|
+
|
961
|
+
void MacroAssembler::NegativeZeroTest(Register result,
|
962
|
+
Register op,
|
963
|
+
Label* then_label) {
|
964
|
+
Label ok;
|
965
|
+
test(result, Operand(result));
|
966
|
+
j(not_zero, &ok, taken);
|
967
|
+
test(op, Operand(op));
|
968
|
+
j(sign, then_label, not_taken);
|
969
|
+
bind(&ok);
|
970
|
+
}
|
971
|
+
|
972
|
+
|
973
|
+
void MacroAssembler::NegativeZeroTest(Register result,
|
974
|
+
Register op1,
|
975
|
+
Register op2,
|
976
|
+
Register scratch,
|
977
|
+
Label* then_label) {
|
978
|
+
Label ok;
|
979
|
+
test(result, Operand(result));
|
980
|
+
j(not_zero, &ok, taken);
|
981
|
+
mov(scratch, Operand(op1));
|
982
|
+
or_(scratch, Operand(op2));
|
983
|
+
j(sign, then_label, not_taken);
|
984
|
+
bind(&ok);
|
985
|
+
}
|
986
|
+
|
987
|
+
|
988
|
+
void MacroAssembler::TryGetFunctionPrototype(Register function,
|
989
|
+
Register result,
|
990
|
+
Register scratch,
|
991
|
+
Label* miss) {
|
992
|
+
// Check that the receiver isn't a smi.
|
993
|
+
test(function, Immediate(kSmiTagMask));
|
994
|
+
j(zero, miss, not_taken);
|
995
|
+
|
996
|
+
// Check that the function really is a function.
|
997
|
+
CmpObjectType(function, JS_FUNCTION_TYPE, result);
|
998
|
+
j(not_equal, miss, not_taken);
|
999
|
+
|
1000
|
+
// Make sure that the function has an instance prototype.
|
1001
|
+
Label non_instance;
|
1002
|
+
movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
|
1003
|
+
test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
|
1004
|
+
j(not_zero, &non_instance, not_taken);
|
1005
|
+
|
1006
|
+
// Get the prototype or initial map from the function.
|
1007
|
+
mov(result,
|
1008
|
+
FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
|
1009
|
+
|
1010
|
+
// If the prototype or initial map is the hole, don't return it and
|
1011
|
+
// simply miss the cache instead. This will allow us to allocate a
|
1012
|
+
// prototype object on-demand in the runtime system.
|
1013
|
+
cmp(Operand(result), Immediate(Factory::the_hole_value()));
|
1014
|
+
j(equal, miss, not_taken);
|
1015
|
+
|
1016
|
+
// If the function does not have an initial map, we're done.
|
1017
|
+
Label done;
|
1018
|
+
CmpObjectType(result, MAP_TYPE, scratch);
|
1019
|
+
j(not_equal, &done);
|
1020
|
+
|
1021
|
+
// Get the prototype from the initial map.
|
1022
|
+
mov(result, FieldOperand(result, Map::kPrototypeOffset));
|
1023
|
+
jmp(&done);
|
1024
|
+
|
1025
|
+
// Non-instance prototype: Fetch prototype from constructor field
|
1026
|
+
// in initial map.
|
1027
|
+
bind(&non_instance);
|
1028
|
+
mov(result, FieldOperand(result, Map::kConstructorOffset));
|
1029
|
+
|
1030
|
+
// All done.
|
1031
|
+
bind(&done);
|
1032
|
+
}
|
1033
|
+
|
1034
|
+
|
1035
|
+
void MacroAssembler::CallStub(CodeStub* stub) {
|
1036
|
+
ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
|
1037
|
+
call(stub->GetCode(), RelocInfo::CODE_TARGET);
|
1038
|
+
}
|
1039
|
+
|
1040
|
+
|
1041
|
+
Object* MacroAssembler::TryCallStub(CodeStub* stub) {
|
1042
|
+
ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
|
1043
|
+
Object* result = stub->TryGetCode();
|
1044
|
+
if (!result->IsFailure()) {
|
1045
|
+
call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
|
1046
|
+
}
|
1047
|
+
return result;
|
1048
|
+
}
|
1049
|
+
|
1050
|
+
|
1051
|
+
void MacroAssembler::TailCallStub(CodeStub* stub) {
|
1052
|
+
ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
|
1053
|
+
jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
|
1054
|
+
}
|
1055
|
+
|
1056
|
+
|
1057
|
+
Object* MacroAssembler::TryTailCallStub(CodeStub* stub) {
|
1058
|
+
ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
|
1059
|
+
Object* result = stub->TryGetCode();
|
1060
|
+
if (!result->IsFailure()) {
|
1061
|
+
jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
|
1062
|
+
}
|
1063
|
+
return result;
|
1064
|
+
}
|
1065
|
+
|
1066
|
+
|
1067
|
+
void MacroAssembler::StubReturn(int argc) {
|
1068
|
+
ASSERT(argc >= 1 && generating_stub());
|
1069
|
+
ret((argc - 1) * kPointerSize);
|
1070
|
+
}
|
1071
|
+
|
1072
|
+
|
1073
|
+
void MacroAssembler::IllegalOperation(int num_arguments) {
|
1074
|
+
if (num_arguments > 0) {
|
1075
|
+
add(Operand(esp), Immediate(num_arguments * kPointerSize));
|
1076
|
+
}
|
1077
|
+
mov(eax, Immediate(Factory::undefined_value()));
|
1078
|
+
}
|
1079
|
+
|
1080
|
+
|
1081
|
+
void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
|
1082
|
+
CallRuntime(Runtime::FunctionForId(id), num_arguments);
|
1083
|
+
}
|
1084
|
+
|
1085
|
+
|
1086
|
+
Object* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
|
1087
|
+
int num_arguments) {
|
1088
|
+
return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
|
1089
|
+
}
|
1090
|
+
|
1091
|
+
|
1092
|
+
void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
|
1093
|
+
// If the expected number of arguments of the runtime function is
|
1094
|
+
// constant, we check that the actual number of arguments match the
|
1095
|
+
// expectation.
|
1096
|
+
if (f->nargs >= 0 && f->nargs != num_arguments) {
|
1097
|
+
IllegalOperation(num_arguments);
|
1098
|
+
return;
|
1099
|
+
}
|
1100
|
+
|
1101
|
+
Runtime::FunctionId function_id =
|
1102
|
+
static_cast<Runtime::FunctionId>(f->stub_id);
|
1103
|
+
RuntimeStub stub(function_id, num_arguments);
|
1104
|
+
CallStub(&stub);
|
1105
|
+
}
|
1106
|
+
|
1107
|
+
|
1108
|
+
Object* MacroAssembler::TryCallRuntime(Runtime::Function* f,
|
1109
|
+
int num_arguments) {
|
1110
|
+
if (f->nargs >= 0 && f->nargs != num_arguments) {
|
1111
|
+
IllegalOperation(num_arguments);
|
1112
|
+
// Since we did not call the stub, there was no allocation failure.
|
1113
|
+
// Return some non-failure object.
|
1114
|
+
return Heap::undefined_value();
|
1115
|
+
}
|
1116
|
+
|
1117
|
+
Runtime::FunctionId function_id =
|
1118
|
+
static_cast<Runtime::FunctionId>(f->stub_id);
|
1119
|
+
RuntimeStub stub(function_id, num_arguments);
|
1120
|
+
return TryCallStub(&stub);
|
1121
|
+
}
|
1122
|
+
|
1123
|
+
|
1124
|
+
void MacroAssembler::TailCallRuntime(const ExternalReference& ext,
|
1125
|
+
int num_arguments,
|
1126
|
+
int result_size) {
|
1127
|
+
// TODO(1236192): Most runtime routines don't need the number of
|
1128
|
+
// arguments passed in because it is constant. At some point we
|
1129
|
+
// should remove this need and make the runtime routine entry code
|
1130
|
+
// smarter.
|
1131
|
+
Set(eax, Immediate(num_arguments));
|
1132
|
+
JumpToRuntime(ext);
|
1133
|
+
}
|
1134
|
+
|
1135
|
+
|
1136
|
+
void MacroAssembler::PushHandleScope(Register scratch) {
|
1137
|
+
// Push the number of extensions, smi-tagged so the gc will ignore it.
|
1138
|
+
ExternalReference extensions_address =
|
1139
|
+
ExternalReference::handle_scope_extensions_address();
|
1140
|
+
mov(scratch, Operand::StaticVariable(extensions_address));
|
1141
|
+
ASSERT_EQ(0, kSmiTag);
|
1142
|
+
shl(scratch, kSmiTagSize);
|
1143
|
+
push(scratch);
|
1144
|
+
mov(Operand::StaticVariable(extensions_address), Immediate(0));
|
1145
|
+
// Push next and limit pointers which will be wordsize aligned and
|
1146
|
+
// hence automatically smi tagged.
|
1147
|
+
ExternalReference next_address =
|
1148
|
+
ExternalReference::handle_scope_next_address();
|
1149
|
+
push(Operand::StaticVariable(next_address));
|
1150
|
+
ExternalReference limit_address =
|
1151
|
+
ExternalReference::handle_scope_limit_address();
|
1152
|
+
push(Operand::StaticVariable(limit_address));
|
1153
|
+
}
|
1154
|
+
|
1155
|
+
|
1156
|
+
Object* MacroAssembler::PopHandleScopeHelper(Register saved,
|
1157
|
+
Register scratch,
|
1158
|
+
bool gc_allowed) {
|
1159
|
+
Object* result = NULL;
|
1160
|
+
ExternalReference extensions_address =
|
1161
|
+
ExternalReference::handle_scope_extensions_address();
|
1162
|
+
Label write_back;
|
1163
|
+
mov(scratch, Operand::StaticVariable(extensions_address));
|
1164
|
+
cmp(Operand(scratch), Immediate(0));
|
1165
|
+
j(equal, &write_back);
|
1166
|
+
// Calling a runtime function messes with registers so we save and
|
1167
|
+
// restore any one we're asked not to change
|
1168
|
+
if (saved.is_valid()) push(saved);
|
1169
|
+
if (gc_allowed) {
|
1170
|
+
CallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
|
1171
|
+
} else {
|
1172
|
+
result = TryCallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
|
1173
|
+
if (result->IsFailure()) return result;
|
1174
|
+
}
|
1175
|
+
if (saved.is_valid()) pop(saved);
|
1176
|
+
|
1177
|
+
bind(&write_back);
|
1178
|
+
ExternalReference limit_address =
|
1179
|
+
ExternalReference::handle_scope_limit_address();
|
1180
|
+
pop(Operand::StaticVariable(limit_address));
|
1181
|
+
ExternalReference next_address =
|
1182
|
+
ExternalReference::handle_scope_next_address();
|
1183
|
+
pop(Operand::StaticVariable(next_address));
|
1184
|
+
pop(scratch);
|
1185
|
+
shr(scratch, kSmiTagSize);
|
1186
|
+
mov(Operand::StaticVariable(extensions_address), scratch);
|
1187
|
+
|
1188
|
+
return result;
|
1189
|
+
}
|
1190
|
+
|
1191
|
+
|
1192
|
+
void MacroAssembler::PopHandleScope(Register saved, Register scratch) {
|
1193
|
+
PopHandleScopeHelper(saved, scratch, true);
|
1194
|
+
}
|
1195
|
+
|
1196
|
+
|
1197
|
+
Object* MacroAssembler::TryPopHandleScope(Register saved, Register scratch) {
|
1198
|
+
return PopHandleScopeHelper(saved, scratch, false);
|
1199
|
+
}
|
1200
|
+
|
1201
|
+
|
1202
|
+
void MacroAssembler::JumpToRuntime(const ExternalReference& ext) {
|
1203
|
+
// Set the entry point and jump to the C entry runtime stub.
|
1204
|
+
mov(ebx, Immediate(ext));
|
1205
|
+
CEntryStub ces(1);
|
1206
|
+
jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
|
1207
|
+
}
|
1208
|
+
|
1209
|
+
|
1210
|
+
void MacroAssembler::InvokePrologue(const ParameterCount& expected,
|
1211
|
+
const ParameterCount& actual,
|
1212
|
+
Handle<Code> code_constant,
|
1213
|
+
const Operand& code_operand,
|
1214
|
+
Label* done,
|
1215
|
+
InvokeFlag flag) {
|
1216
|
+
bool definitely_matches = false;
|
1217
|
+
Label invoke;
|
1218
|
+
if (expected.is_immediate()) {
|
1219
|
+
ASSERT(actual.is_immediate());
|
1220
|
+
if (expected.immediate() == actual.immediate()) {
|
1221
|
+
definitely_matches = true;
|
1222
|
+
} else {
|
1223
|
+
mov(eax, actual.immediate());
|
1224
|
+
const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
|
1225
|
+
if (expected.immediate() == sentinel) {
|
1226
|
+
// Don't worry about adapting arguments for builtins that
|
1227
|
+
// don't want that done. Skip adaption code by making it look
|
1228
|
+
// like we have a match between expected and actual number of
|
1229
|
+
// arguments.
|
1230
|
+
definitely_matches = true;
|
1231
|
+
} else {
|
1232
|
+
mov(ebx, expected.immediate());
|
1233
|
+
}
|
1234
|
+
}
|
1235
|
+
} else {
|
1236
|
+
if (actual.is_immediate()) {
|
1237
|
+
// Expected is in register, actual is immediate. This is the
|
1238
|
+
// case when we invoke function values without going through the
|
1239
|
+
// IC mechanism.
|
1240
|
+
cmp(expected.reg(), actual.immediate());
|
1241
|
+
j(equal, &invoke);
|
1242
|
+
ASSERT(expected.reg().is(ebx));
|
1243
|
+
mov(eax, actual.immediate());
|
1244
|
+
} else if (!expected.reg().is(actual.reg())) {
|
1245
|
+
// Both expected and actual are in (different) registers. This
|
1246
|
+
// is the case when we invoke functions using call and apply.
|
1247
|
+
cmp(expected.reg(), Operand(actual.reg()));
|
1248
|
+
j(equal, &invoke);
|
1249
|
+
ASSERT(actual.reg().is(eax));
|
1250
|
+
ASSERT(expected.reg().is(ebx));
|
1251
|
+
}
|
1252
|
+
}
|
1253
|
+
|
1254
|
+
if (!definitely_matches) {
|
1255
|
+
Handle<Code> adaptor =
|
1256
|
+
Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
|
1257
|
+
if (!code_constant.is_null()) {
|
1258
|
+
mov(edx, Immediate(code_constant));
|
1259
|
+
add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
|
1260
|
+
} else if (!code_operand.is_reg(edx)) {
|
1261
|
+
mov(edx, code_operand);
|
1262
|
+
}
|
1263
|
+
|
1264
|
+
if (flag == CALL_FUNCTION) {
|
1265
|
+
call(adaptor, RelocInfo::CODE_TARGET);
|
1266
|
+
jmp(done);
|
1267
|
+
} else {
|
1268
|
+
jmp(adaptor, RelocInfo::CODE_TARGET);
|
1269
|
+
}
|
1270
|
+
bind(&invoke);
|
1271
|
+
}
|
1272
|
+
}
|
1273
|
+
|
1274
|
+
|
1275
|
+
void MacroAssembler::InvokeCode(const Operand& code,
|
1276
|
+
const ParameterCount& expected,
|
1277
|
+
const ParameterCount& actual,
|
1278
|
+
InvokeFlag flag) {
|
1279
|
+
Label done;
|
1280
|
+
InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
|
1281
|
+
if (flag == CALL_FUNCTION) {
|
1282
|
+
call(code);
|
1283
|
+
} else {
|
1284
|
+
ASSERT(flag == JUMP_FUNCTION);
|
1285
|
+
jmp(code);
|
1286
|
+
}
|
1287
|
+
bind(&done);
|
1288
|
+
}
|
1289
|
+
|
1290
|
+
|
1291
|
+
void MacroAssembler::InvokeCode(Handle<Code> code,
|
1292
|
+
const ParameterCount& expected,
|
1293
|
+
const ParameterCount& actual,
|
1294
|
+
RelocInfo::Mode rmode,
|
1295
|
+
InvokeFlag flag) {
|
1296
|
+
Label done;
|
1297
|
+
Operand dummy(eax);
|
1298
|
+
InvokePrologue(expected, actual, code, dummy, &done, flag);
|
1299
|
+
if (flag == CALL_FUNCTION) {
|
1300
|
+
call(code, rmode);
|
1301
|
+
} else {
|
1302
|
+
ASSERT(flag == JUMP_FUNCTION);
|
1303
|
+
jmp(code, rmode);
|
1304
|
+
}
|
1305
|
+
bind(&done);
|
1306
|
+
}
|
1307
|
+
|
1308
|
+
|
1309
|
+
void MacroAssembler::InvokeFunction(Register fun,
|
1310
|
+
const ParameterCount& actual,
|
1311
|
+
InvokeFlag flag) {
|
1312
|
+
ASSERT(fun.is(edi));
|
1313
|
+
mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
|
1314
|
+
mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
|
1315
|
+
mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
|
1316
|
+
mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
|
1317
|
+
lea(edx, FieldOperand(edx, Code::kHeaderSize));
|
1318
|
+
|
1319
|
+
ParameterCount expected(ebx);
|
1320
|
+
InvokeCode(Operand(edx), expected, actual, flag);
|
1321
|
+
}
|
1322
|
+
|
1323
|
+
|
1324
|
+
void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
|
1325
|
+
bool resolved;
|
1326
|
+
Handle<Code> code = ResolveBuiltin(id, &resolved);
|
1327
|
+
|
1328
|
+
// Calls are not allowed in some stubs.
|
1329
|
+
ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
|
1330
|
+
|
1331
|
+
// Rely on the assertion to check that the number of provided
|
1332
|
+
// arguments match the expected number of arguments. Fake a
|
1333
|
+
// parameter count to avoid emitting code to do the check.
|
1334
|
+
ParameterCount expected(0);
|
1335
|
+
InvokeCode(Handle<Code>(code), expected, expected,
|
1336
|
+
RelocInfo::CODE_TARGET, flag);
|
1337
|
+
|
1338
|
+
const char* name = Builtins::GetName(id);
|
1339
|
+
int argc = Builtins::GetArgumentsCount(id);
|
1340
|
+
|
1341
|
+
if (!resolved) {
|
1342
|
+
uint32_t flags =
|
1343
|
+
Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
|
1344
|
+
Bootstrapper::FixupFlagsUseCodeObject::encode(false);
|
1345
|
+
Unresolved entry = { pc_offset() - sizeof(int32_t), flags, name };
|
1346
|
+
unresolved_.Add(entry);
|
1347
|
+
}
|
1348
|
+
}
|
1349
|
+
|
1350
|
+
|
1351
|
+
void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
|
1352
|
+
bool resolved;
|
1353
|
+
Handle<Code> code = ResolveBuiltin(id, &resolved);
|
1354
|
+
|
1355
|
+
const char* name = Builtins::GetName(id);
|
1356
|
+
int argc = Builtins::GetArgumentsCount(id);
|
1357
|
+
|
1358
|
+
mov(Operand(target), Immediate(code));
|
1359
|
+
if (!resolved) {
|
1360
|
+
uint32_t flags =
|
1361
|
+
Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
|
1362
|
+
Bootstrapper::FixupFlagsUseCodeObject::encode(true);
|
1363
|
+
Unresolved entry = { pc_offset() - sizeof(int32_t), flags, name };
|
1364
|
+
unresolved_.Add(entry);
|
1365
|
+
}
|
1366
|
+
add(Operand(target), Immediate(Code::kHeaderSize - kHeapObjectTag));
|
1367
|
+
}
|
1368
|
+
|
1369
|
+
|
1370
|
+
Handle<Code> MacroAssembler::ResolveBuiltin(Builtins::JavaScript id,
|
1371
|
+
bool* resolved) {
|
1372
|
+
// Move the builtin function into the temporary function slot by
|
1373
|
+
// reading it from the builtins object. NOTE: We should be able to
|
1374
|
+
// reduce this to two instructions by putting the function table in
|
1375
|
+
// the global object instead of the "builtins" object and by using a
|
1376
|
+
// real register for the function.
|
1377
|
+
mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
|
1378
|
+
mov(edx, FieldOperand(edx, GlobalObject::kBuiltinsOffset));
|
1379
|
+
int builtins_offset =
|
1380
|
+
JSBuiltinsObject::kJSBuiltinsOffset + (id * kPointerSize);
|
1381
|
+
mov(edi, FieldOperand(edx, builtins_offset));
|
1382
|
+
|
1383
|
+
return Builtins::GetCode(id, resolved);
|
1384
|
+
}
|
1385
|
+
|
1386
|
+
|
1387
|
+
void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
|
1388
|
+
if (context_chain_length > 0) {
|
1389
|
+
// Move up the chain of contexts to the context containing the slot.
|
1390
|
+
mov(dst, Operand(esi, Context::SlotOffset(Context::CLOSURE_INDEX)));
|
1391
|
+
// Load the function context (which is the incoming, outer context).
|
1392
|
+
mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
|
1393
|
+
for (int i = 1; i < context_chain_length; i++) {
|
1394
|
+
mov(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
|
1395
|
+
mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
|
1396
|
+
}
|
1397
|
+
// The context may be an intermediate context, not a function context.
|
1398
|
+
mov(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
|
1399
|
+
} else { // Slot is in the current function context.
|
1400
|
+
// The context may be an intermediate context, not a function context.
|
1401
|
+
mov(dst, Operand(esi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
|
1402
|
+
}
|
1403
|
+
}
|
1404
|
+
|
1405
|
+
|
1406
|
+
|
1407
|
+
void MacroAssembler::Ret() {
|
1408
|
+
ret(0);
|
1409
|
+
}
|
1410
|
+
|
1411
|
+
|
1412
|
+
void MacroAssembler::Drop(int stack_elements) {
|
1413
|
+
if (stack_elements > 0) {
|
1414
|
+
add(Operand(esp), Immediate(stack_elements * kPointerSize));
|
1415
|
+
}
|
1416
|
+
}
|
1417
|
+
|
1418
|
+
|
1419
|
+
void MacroAssembler::Move(Register dst, Handle<Object> value) {
|
1420
|
+
mov(dst, value);
|
1421
|
+
}
|
1422
|
+
|
1423
|
+
|
1424
|
+
void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
|
1425
|
+
if (FLAG_native_code_counters && counter->Enabled()) {
|
1426
|
+
mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
|
1427
|
+
}
|
1428
|
+
}
|
1429
|
+
|
1430
|
+
|
1431
|
+
void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
|
1432
|
+
ASSERT(value > 0);
|
1433
|
+
if (FLAG_native_code_counters && counter->Enabled()) {
|
1434
|
+
Operand operand = Operand::StaticVariable(ExternalReference(counter));
|
1435
|
+
if (value == 1) {
|
1436
|
+
inc(operand);
|
1437
|
+
} else {
|
1438
|
+
add(operand, Immediate(value));
|
1439
|
+
}
|
1440
|
+
}
|
1441
|
+
}
|
1442
|
+
|
1443
|
+
|
1444
|
+
void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
|
1445
|
+
ASSERT(value > 0);
|
1446
|
+
if (FLAG_native_code_counters && counter->Enabled()) {
|
1447
|
+
Operand operand = Operand::StaticVariable(ExternalReference(counter));
|
1448
|
+
if (value == 1) {
|
1449
|
+
dec(operand);
|
1450
|
+
} else {
|
1451
|
+
sub(operand, Immediate(value));
|
1452
|
+
}
|
1453
|
+
}
|
1454
|
+
}
|
1455
|
+
|
1456
|
+
|
1457
|
+
void MacroAssembler::Assert(Condition cc, const char* msg) {
|
1458
|
+
if (FLAG_debug_code) Check(cc, msg);
|
1459
|
+
}
|
1460
|
+
|
1461
|
+
|
1462
|
+
void MacroAssembler::Check(Condition cc, const char* msg) {
|
1463
|
+
Label L;
|
1464
|
+
j(cc, &L, taken);
|
1465
|
+
Abort(msg);
|
1466
|
+
// will not return here
|
1467
|
+
bind(&L);
|
1468
|
+
}
|
1469
|
+
|
1470
|
+
|
1471
|
+
void MacroAssembler::Abort(const char* msg) {
|
1472
|
+
// We want to pass the msg string like a smi to avoid GC
|
1473
|
+
// problems, however msg is not guaranteed to be aligned
|
1474
|
+
// properly. Instead, we pass an aligned pointer that is
|
1475
|
+
// a proper v8 smi, but also pass the alignment difference
|
1476
|
+
// from the real pointer as a smi.
|
1477
|
+
intptr_t p1 = reinterpret_cast<intptr_t>(msg);
|
1478
|
+
intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
|
1479
|
+
ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
|
1480
|
+
#ifdef DEBUG
|
1481
|
+
if (msg != NULL) {
|
1482
|
+
RecordComment("Abort message: ");
|
1483
|
+
RecordComment(msg);
|
1484
|
+
}
|
1485
|
+
#endif
|
1486
|
+
// Disable stub call restrictions to always allow calls to abort.
|
1487
|
+
set_allow_stub_calls(true);
|
1488
|
+
|
1489
|
+
push(eax);
|
1490
|
+
push(Immediate(p0));
|
1491
|
+
push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
|
1492
|
+
CallRuntime(Runtime::kAbort, 2);
|
1493
|
+
// will not return here
|
1494
|
+
int3();
|
1495
|
+
}
|
1496
|
+
|
1497
|
+
|
1498
|
+
CodePatcher::CodePatcher(byte* address, int size)
|
1499
|
+
: address_(address), size_(size), masm_(address, size + Assembler::kGap) {
|
1500
|
+
// Create a new macro assembler pointing to the address of the code to patch.
|
1501
|
+
// The size is adjusted with kGap on order for the assembler to generate size
|
1502
|
+
// bytes of instructions without failing with buffer size constraints.
|
1503
|
+
ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
|
1504
|
+
}
|
1505
|
+
|
1506
|
+
|
1507
|
+
CodePatcher::~CodePatcher() {
|
1508
|
+
// Indicate that code has changed.
|
1509
|
+
CPU::FlushICache(address_, size_);
|
1510
|
+
|
1511
|
+
// Check that the code was patched as expected.
|
1512
|
+
ASSERT(masm_.pc_ == address_ + size_);
|
1513
|
+
ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
|
1514
|
+
}
|
1515
|
+
|
1516
|
+
|
1517
|
+
} } // namespace v8::internal
|