lounge_lizard 0.1.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +11 -0
- data/.gitmodules +3 -0
- data/.rspec +2 -0
- data/CODE_OF_CONDUCT.md +74 -0
- data/Gemfile +4 -0
- data/Guardfile +33 -0
- data/LICENSE.txt +21 -0
- data/README.md +50 -0
- data/Rakefile +43 -0
- data/bin/console +14 -0
- data/bin/setup +8 -0
- data/circle.yml +14 -0
- data/ext/drafter/CHANGELOG.md +278 -0
- data/ext/drafter/Dockerfile +17 -0
- data/ext/drafter/Makefile +62 -0
- data/ext/drafter/appveyor.yml +17 -0
- data/ext/drafter/bin/drafter +0 -0
- data/ext/drafter/build/Makefile +387 -0
- data/ext/drafter/build/drafter.Makefile +6 -0
- data/ext/drafter/build/drafter.target.mk +159 -0
- data/ext/drafter/build/ext/snowcrash/libmarkdownparser.target.mk +141 -0
- data/ext/drafter/build/ext/snowcrash/libsnowcrash.target.mk +154 -0
- data/ext/drafter/build/ext/snowcrash/libsundown.target.mk +149 -0
- data/ext/drafter/build/ext/snowcrash/perf-libsnowcrash.target.mk +147 -0
- data/ext/drafter/build/ext/snowcrash/snowcrash.Makefile +6 -0
- data/ext/drafter/build/gyp-mac-tool +606 -0
- data/ext/drafter/build/libdrafter.target.mk +186 -0
- data/ext/drafter/build/libsos.target.mk +137 -0
- data/ext/drafter/build/out/Release/drafter +0 -0
- data/ext/drafter/build/out/Release/libdrafter.dylib +0 -0
- data/ext/drafter/build/out/Release/libmarkdownparser.a +0 -0
- data/ext/drafter/build/out/Release/libsnowcrash.a +0 -0
- data/ext/drafter/build/out/Release/libsos.a +0 -0
- data/ext/drafter/build/out/Release/libsundown.a +0 -0
- data/ext/drafter/build/out/Release/obj.target/drafter/src/config.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/drafter/src/main.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/drafter/src/reporting.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libdrafter/src/ConversionContext.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libdrafter/src/NamedTypesRegistry.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libdrafter/src/RefractAPI.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libdrafter/src/RefractDataStructure.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libdrafter/src/RefractElementFactory.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libdrafter/src/RefractSourceMap.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libdrafter/src/Render.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libdrafter/src/Serialize.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libdrafter/src/SerializeAST.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libdrafter/src/SerializeResult.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libdrafter/src/SerializeSourcemap.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libdrafter/src/drafter.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libdrafter/src/drafter_private.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libdrafter/src/refract/ComparableVisitor.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libdrafter/src/refract/Element.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libdrafter/src/refract/ExpandVisitor.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libdrafter/src/refract/IsExpandableVisitor.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libdrafter/src/refract/JSONSchemaVisitor.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libdrafter/src/refract/PrintVisitor.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libdrafter/src/refract/Query.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libdrafter/src/refract/Registry.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libdrafter/src/refract/RenderJSONVisitor.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libdrafter/src/refract/SerializeCompactVisitor.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libdrafter/src/refract/SerializeVisitor.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libdrafter/src/refract/TypeQueryVisitor.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libdrafter/src/refract/VisitorUtils.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libmarkdownparser/ext/snowcrash/ext/markdown-parser/src/ByteBuffer.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libmarkdownparser/ext/snowcrash/ext/markdown-parser/src/MarkdownNode.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libmarkdownparser/ext/snowcrash/ext/markdown-parser/src/MarkdownParser.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libsnowcrash/ext/snowcrash/src/Blueprint.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libsnowcrash/ext/snowcrash/src/BlueprintSourcemap.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libsnowcrash/ext/snowcrash/src/HTTP.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libsnowcrash/ext/snowcrash/src/HeadersParser.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libsnowcrash/ext/snowcrash/src/MSON.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libsnowcrash/ext/snowcrash/src/MSONOneOfParser.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libsnowcrash/ext/snowcrash/src/MSONSourcemap.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libsnowcrash/ext/snowcrash/src/MSONTypeSectionParser.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libsnowcrash/ext/snowcrash/src/MSONValueMemberParser.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libsnowcrash/ext/snowcrash/src/Section.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libsnowcrash/ext/snowcrash/src/Signature.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libsnowcrash/ext/snowcrash/src/UriTemplateParser.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libsnowcrash/ext/snowcrash/src/posix/RegexMatch.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libsnowcrash/ext/snowcrash/src/snowcrash.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libsos/ext/sos/src/sos.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libsundown/ext/snowcrash/ext/markdown-parser/ext/sundown/html/houdini_href_e.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libsundown/ext/snowcrash/ext/markdown-parser/ext/sundown/html/houdini_html_e.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libsundown/ext/snowcrash/ext/markdown-parser/ext/sundown/html/html.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libsundown/ext/snowcrash/ext/markdown-parser/ext/sundown/html/html_smartypants.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libsundown/ext/snowcrash/ext/markdown-parser/ext/sundown/src/autolink.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libsundown/ext/snowcrash/ext/markdown-parser/ext/sundown/src/buffer.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libsundown/ext/snowcrash/ext/markdown-parser/ext/sundown/src/markdown.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libsundown/ext/snowcrash/ext/markdown-parser/ext/sundown/src/src_map.o +0 -0
- data/ext/drafter/build/out/Release/obj.target/libsundown/ext/snowcrash/ext/markdown-parser/ext/sundown/src/stack.o +0 -0
- data/ext/drafter/circle.yml +32 -0
- data/ext/drafter/config.gypi +10 -0
- data/ext/drafter/config.mk +5 -0
- data/ext/drafter/configure +224 -0
- data/ext/drafter/drafter.gyp +189 -0
- data/ext/drafter/drafter.xcworkspace/contents.xcworkspacedata +13 -0
- data/ext/drafter/ext/snowcrash/Makefile +58 -0
- data/ext/drafter/ext/snowcrash/appveyor.yml +7 -0
- data/ext/drafter/ext/snowcrash/common.gypi +165 -0
- data/ext/drafter/ext/snowcrash/configure +197 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/Makefile +90 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/ext/sundown/CONTRIBUTING.md +10 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/ext/sundown/Makefile +84 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/ext/sundown/Makefile.win +33 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/ext/sundown/examples/smartypants.c +72 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/ext/sundown/examples/sundown.c +80 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/ext/sundown/html/houdini.h +37 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/ext/sundown/html/houdini_href_e.c +108 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/ext/sundown/html/houdini_html_e.c +84 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/ext/sundown/html/html.c +647 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/ext/sundown/html/html.h +77 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/ext/sundown/html/html_smartypants.c +389 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/ext/sundown/html_block_names.txt +25 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/ext/sundown/src/autolink.c +297 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/ext/sundown/src/autolink.h +51 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/ext/sundown/src/buffer.c +225 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/ext/sundown/src/buffer.h +96 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/ext/sundown/src/html_blocks.h +206 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/ext/sundown/src/markdown.c +2726 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/ext/sundown/src/markdown.h +147 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/ext/sundown/src/src_map.c +204 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/ext/sundown/src/src_map.h +58 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/ext/sundown/src/stack.c +81 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/ext/sundown/src/stack.h +29 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/ext/sundown/sundown.def +20 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/msvc/markdown/markdown.vcproj +188 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/msvc/msvc.sln +38 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/msvc/sundown/sundown.vcproj +206 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/src/ByteBuffer.cc +160 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/src/ByteBuffer.h +90 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/src/MarkdownNode.cc +152 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/src/MarkdownNode.h +103 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/src/MarkdownParser.cc +388 -0
- data/ext/drafter/ext/snowcrash/ext/markdown-parser/src/MarkdownParser.h +106 -0
- data/ext/drafter/ext/snowcrash/snowcrash.gyp +196 -0
- data/ext/drafter/ext/snowcrash/src/ActionParser.h +560 -0
- data/ext/drafter/ext/snowcrash/src/AssetParser.h +123 -0
- data/ext/drafter/ext/snowcrash/src/AttributesParser.h +123 -0
- data/ext/drafter/ext/snowcrash/src/Blueprint.cc +90 -0
- data/ext/drafter/ext/snowcrash/src/Blueprint.h +489 -0
- data/ext/drafter/ext/snowcrash/src/BlueprintParser.h +845 -0
- data/ext/drafter/ext/snowcrash/src/BlueprintSourcemap.cc +81 -0
- data/ext/drafter/ext/snowcrash/src/BlueprintSourcemap.h +345 -0
- data/ext/drafter/ext/snowcrash/src/BlueprintUtility.h +111 -0
- data/ext/drafter/ext/snowcrash/src/CodeBlockUtility.h +276 -0
- data/ext/drafter/ext/snowcrash/src/DataStructureGroupParser.h +157 -0
- data/ext/drafter/ext/snowcrash/src/HTTP.cc +49 -0
- data/ext/drafter/ext/snowcrash/src/HTTP.h +108 -0
- data/ext/drafter/ext/snowcrash/src/HeadersParser.cc +117 -0
- data/ext/drafter/ext/snowcrash/src/HeadersParser.h +377 -0
- data/ext/drafter/ext/snowcrash/src/MSON.cc +272 -0
- data/ext/drafter/ext/snowcrash/src/MSON.h +405 -0
- data/ext/drafter/ext/snowcrash/src/MSONMixinParser.h +103 -0
- data/ext/drafter/ext/snowcrash/src/MSONNamedTypeParser.h +135 -0
- data/ext/drafter/ext/snowcrash/src/MSONOneOfParser.cc +132 -0
- data/ext/drafter/ext/snowcrash/src/MSONOneOfParser.h +80 -0
- data/ext/drafter/ext/snowcrash/src/MSONParameterParser.h +166 -0
- data/ext/drafter/ext/snowcrash/src/MSONPropertyMemberParser.h +106 -0
- data/ext/drafter/ext/snowcrash/src/MSONSourcemap.cc +141 -0
- data/ext/drafter/ext/snowcrash/src/MSONSourcemap.h +181 -0
- data/ext/drafter/ext/snowcrash/src/MSONTypeSectionParser.cc +209 -0
- data/ext/drafter/ext/snowcrash/src/MSONTypeSectionParser.h +213 -0
- data/ext/drafter/ext/snowcrash/src/MSONUtility.h +506 -0
- data/ext/drafter/ext/snowcrash/src/MSONValueMemberParser.cc +214 -0
- data/ext/drafter/ext/snowcrash/src/MSONValueMemberParser.h +390 -0
- data/ext/drafter/ext/snowcrash/src/ModelTable.h +87 -0
- data/ext/drafter/ext/snowcrash/src/ParameterParser.h +516 -0
- data/ext/drafter/ext/snowcrash/src/ParametersParser.h +222 -0
- data/ext/drafter/ext/snowcrash/src/PayloadParser.h +733 -0
- data/ext/drafter/ext/snowcrash/src/Platform.h +33 -0
- data/ext/drafter/ext/snowcrash/src/RegexMatch.h +32 -0
- data/ext/drafter/ext/snowcrash/src/RelationParser.h +87 -0
- data/ext/drafter/ext/snowcrash/src/ResourceGroupParser.h +297 -0
- data/ext/drafter/ext/snowcrash/src/ResourceParser.h +536 -0
- data/ext/drafter/ext/snowcrash/src/Section.cc +48 -0
- data/ext/drafter/ext/snowcrash/src/Section.h +60 -0
- data/ext/drafter/ext/snowcrash/src/SectionParser.h +246 -0
- data/ext/drafter/ext/snowcrash/src/SectionParserData.h +109 -0
- data/ext/drafter/ext/snowcrash/src/SectionProcessor.h +299 -0
- data/ext/drafter/ext/snowcrash/src/Signature.cc +75 -0
- data/ext/drafter/ext/snowcrash/src/Signature.h +103 -0
- data/ext/drafter/ext/snowcrash/src/SignatureSectionProcessor.h +442 -0
- data/ext/drafter/ext/snowcrash/src/SourceAnnotation.h +166 -0
- data/ext/drafter/ext/snowcrash/src/StringUtility.h +323 -0
- data/ext/drafter/ext/snowcrash/src/UriTemplateParser.cc +195 -0
- data/ext/drafter/ext/snowcrash/src/UriTemplateParser.h +240 -0
- data/ext/drafter/ext/snowcrash/src/ValuesParser.h +111 -0
- data/ext/drafter/ext/snowcrash/src/posix/RegexMatch.cc +99 -0
- data/ext/drafter/ext/snowcrash/src/snowcrash.cc +90 -0
- data/ext/drafter/ext/snowcrash/src/snowcrash.h +44 -0
- data/ext/drafter/ext/snowcrash/src/win/RegexMatch.cc +78 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/AUTHORS +12 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/DEPS +23 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/OWNERS +1 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/PRESUBMIT.py +137 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/buildbot/buildbot_run.py +136 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/buildbot/commit_queue/OWNERS +6 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/buildbot/commit_queue/cq_config.json +15 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/codereview.settings +10 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/data/win/large-pdb-shim.cc +12 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/gyp +8 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/gyp.bat +5 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/gyp_main.py +16 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/MSVSNew.py +340 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/MSVSProject.py +208 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/MSVSSettings.py +1096 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/MSVSToolFile.py +58 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/MSVSUserFile.py +147 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/MSVSUtil.py +270 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/MSVSUtil.pyc +0 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/MSVSVersion.py +445 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/MSVSVersion.pyc +0 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/__init__.py +548 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/__init__.pyc +0 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/common.py +608 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/common.pyc +0 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/easy_xml.py +157 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/flock_tool.py +54 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/generator/__init__.py +0 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/generator/__init__.pyc +0 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/generator/analyzer.py +741 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/generator/android.py +1069 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/generator/cmake.py +1248 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/generator/dump_dependency_json.py +99 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/generator/eclipse.py +425 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/generator/gypd.py +94 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/generator/gypsh.py +56 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/generator/make.py +2218 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/generator/make.pyc +0 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/generator/msvs.py +3467 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/generator/ninja.py +2427 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/generator/ninja.pyc +0 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/generator/xcode.py +1300 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/generator/xcode.pyc +0 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/input.py +2899 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/input.pyc +0 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/mac_tool.py +605 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/msvs_emulation.py +1093 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/msvs_emulation.pyc +0 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/ninja_syntax.py +160 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/ninja_syntax.pyc +0 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/ordered_dict.py +289 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/simple_copy.py +46 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/simple_copy.pyc +0 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/win_tool.py +314 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/xcode_emulation.py +1664 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/xcode_emulation.pyc +0 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/xcode_ninja.py +276 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/xcode_ninja.pyc +0 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/xcodeproj_file.py +2927 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/xcodeproj_file.pyc +0 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylib/gyp/xml_fix.py +69 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/pylintrc +307 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/samples/samples +81 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/samples/samples.bat +5 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/setup.py +19 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/tools/Xcode/Specifications/gyp.pbfilespec +27 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/tools/Xcode/Specifications/gyp.xclangspec +226 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/tools/emacs/gyp.el +275 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/tools/graphviz.py +100 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/tools/pretty_gyp.py +155 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/tools/pretty_sln.py +169 -0
- data/ext/drafter/ext/snowcrash/tools/gyp/tools/pretty_vcproj.py +329 -0
- data/ext/drafter/ext/snowcrash/vcbuild.bat +139 -0
- data/ext/drafter/ext/sos/Makefile +62 -0
- data/ext/drafter/ext/sos/src/sos.cc +235 -0
- data/ext/drafter/ext/sos/src/sos.h +188 -0
- data/ext/drafter/ext/sos/src/sosJSON.h +121 -0
- data/ext/drafter/ext/sos/src/sosYAML.h +105 -0
- data/ext/drafter/src/ConversionContext.cc +39 -0
- data/ext/drafter/src/ConversionContext.h +34 -0
- data/ext/drafter/src/NamedTypesRegistry.cc +405 -0
- data/ext/drafter/src/NamedTypesRegistry.h +28 -0
- data/ext/drafter/src/NodeInfo.h +143 -0
- data/ext/drafter/src/RefractAPI.cc +579 -0
- data/ext/drafter/src/RefractAPI.h +28 -0
- data/ext/drafter/src/RefractDataStructure.cc +1199 -0
- data/ext/drafter/src/RefractDataStructure.h +26 -0
- data/ext/drafter/src/RefractElementFactory.cc +107 -0
- data/ext/drafter/src/RefractElementFactory.h +67 -0
- data/ext/drafter/src/RefractSourceMap.cc +29 -0
- data/ext/drafter/src/RefractSourceMap.h +57 -0
- data/ext/drafter/src/Render.cc +157 -0
- data/ext/drafter/src/Render.h +40 -0
- data/ext/drafter/src/Serialize.cc +160 -0
- data/ext/drafter/src/Serialize.h +289 -0
- data/ext/drafter/src/SerializeAST.cc +507 -0
- data/ext/drafter/src/SerializeAST.h +29 -0
- data/ext/drafter/src/SerializeResult.cc +170 -0
- data/ext/drafter/src/SerializeResult.h +34 -0
- data/ext/drafter/src/SerializeSourcemap.cc +331 -0
- data/ext/drafter/src/SerializeSourcemap.h +21 -0
- data/ext/drafter/src/Version.h +40 -0
- data/ext/drafter/src/config.cc +91 -0
- data/ext/drafter/src/config.h +38 -0
- data/ext/drafter/src/drafter.cc +137 -0
- data/ext/drafter/src/drafter.h +102 -0
- data/ext/drafter/src/drafter_private.cc +85 -0
- data/ext/drafter/src/drafter_private.h +34 -0
- data/ext/drafter/src/main.cc +137 -0
- data/ext/drafter/src/refract/AppendDecorator.h +58 -0
- data/ext/drafter/src/refract/Build.h +67 -0
- data/ext/drafter/src/refract/ComparableVisitor.cc +43 -0
- data/ext/drafter/src/refract/ComparableVisitor.h +62 -0
- data/ext/drafter/src/refract/Element.cc +409 -0
- data/ext/drafter/src/refract/Element.h +656 -0
- data/ext/drafter/src/refract/ElementFwd.h +37 -0
- data/ext/drafter/src/refract/ElementInserter.h +59 -0
- data/ext/drafter/src/refract/Exception.h +31 -0
- data/ext/drafter/src/refract/ExpandVisitor.cc +359 -0
- data/ext/drafter/src/refract/ExpandVisitor.h +58 -0
- data/ext/drafter/src/refract/FilterVisitor.h +52 -0
- data/ext/drafter/src/refract/IsExpandableVisitor.cc +140 -0
- data/ext/drafter/src/refract/IsExpandableVisitor.h +31 -0
- data/ext/drafter/src/refract/Iterate.h +160 -0
- data/ext/drafter/src/refract/JSONSchemaVisitor.cc +675 -0
- data/ext/drafter/src/refract/JSONSchemaVisitor.h +73 -0
- data/ext/drafter/src/refract/PrintVisitor.cc +164 -0
- data/ext/drafter/src/refract/PrintVisitor.h +50 -0
- data/ext/drafter/src/refract/Query.cc +13 -0
- data/ext/drafter/src/refract/Query.h +38 -0
- data/ext/drafter/src/refract/Registry.cc +114 -0
- data/ext/drafter/src/refract/Registry.h +43 -0
- data/ext/drafter/src/refract/RenderJSONVisitor.cc +255 -0
- data/ext/drafter/src/refract/RenderJSONVisitor.h +51 -0
- data/ext/drafter/src/refract/SerializeCompactVisitor.cc +167 -0
- data/ext/drafter/src/refract/SerializeCompactVisitor.h +56 -0
- data/ext/drafter/src/refract/SerializeVisitor.cc +214 -0
- data/ext/drafter/src/refract/SerializeVisitor.h +55 -0
- data/ext/drafter/src/refract/TypeQueryVisitor.cc +46 -0
- data/ext/drafter/src/refract/TypeQueryVisitor.h +110 -0
- data/ext/drafter/src/refract/Visitor.h +126 -0
- data/ext/drafter/src/refract/VisitorUtils.cc +63 -0
- data/ext/drafter/src/refract/VisitorUtils.h +231 -0
- data/ext/drafter/src/reporting.cc +263 -0
- data/ext/drafter/src/reporting.h +39 -0
- data/ext/drafter/src/stream.h +148 -0
- data/ext/drafter/tools/homebrew/drafter.rb +18 -0
- data/ext/drafter/tools/make-tarball.sh +39 -0
- data/ext/drafter/tools/refract-filter.py +96 -0
- data/ext/drafter/tools/release.sh +17 -0
- data/ext/drafter/vcbuild.bat +203 -0
- data/lib/lounge_lizard/binding.rb +29 -0
- data/lib/lounge_lizard/version.rb +3 -0
- data/lib/lounge_lizard.rb +18 -0
- data/lounge_lizard.gemspec +37 -0
- metadata +547 -0
@@ -0,0 +1,2899 @@
|
|
1
|
+
# Copyright (c) 2012 Google Inc. All rights reserved.
|
2
|
+
# Use of this source code is governed by a BSD-style license that can be
|
3
|
+
# found in the LICENSE file.
|
4
|
+
|
5
|
+
from compiler.ast import Const
|
6
|
+
from compiler.ast import Dict
|
7
|
+
from compiler.ast import Discard
|
8
|
+
from compiler.ast import List
|
9
|
+
from compiler.ast import Module
|
10
|
+
from compiler.ast import Node
|
11
|
+
from compiler.ast import Stmt
|
12
|
+
import compiler
|
13
|
+
import gyp.common
|
14
|
+
import gyp.simple_copy
|
15
|
+
import multiprocessing
|
16
|
+
import optparse
|
17
|
+
import os.path
|
18
|
+
import re
|
19
|
+
import shlex
|
20
|
+
import signal
|
21
|
+
import subprocess
|
22
|
+
import sys
|
23
|
+
import threading
|
24
|
+
import time
|
25
|
+
import traceback
|
26
|
+
from gyp.common import GypError
|
27
|
+
from gyp.common import OrderedSet
|
28
|
+
|
29
|
+
|
30
|
+
# A list of types that are treated as linkable.
|
31
|
+
linkable_types = [
|
32
|
+
'executable',
|
33
|
+
'shared_library',
|
34
|
+
'loadable_module',
|
35
|
+
'mac_kernel_extension',
|
36
|
+
]
|
37
|
+
|
38
|
+
# A list of sections that contain links to other targets.
|
39
|
+
dependency_sections = ['dependencies', 'export_dependent_settings']
|
40
|
+
|
41
|
+
# base_path_sections is a list of sections defined by GYP that contain
|
42
|
+
# pathnames. The generators can provide more keys, the two lists are merged
|
43
|
+
# into path_sections, but you should call IsPathSection instead of using either
|
44
|
+
# list directly.
|
45
|
+
base_path_sections = [
|
46
|
+
'destination',
|
47
|
+
'files',
|
48
|
+
'include_dirs',
|
49
|
+
'inputs',
|
50
|
+
'libraries',
|
51
|
+
'outputs',
|
52
|
+
'sources',
|
53
|
+
]
|
54
|
+
path_sections = set()
|
55
|
+
|
56
|
+
# These per-process dictionaries are used to cache build file data when loading
|
57
|
+
# in parallel mode.
|
58
|
+
per_process_data = {}
|
59
|
+
per_process_aux_data = {}
|
60
|
+
|
61
|
+
def IsPathSection(section):
|
62
|
+
# If section ends in one of the '=+?!' characters, it's applied to a section
|
63
|
+
# without the trailing characters. '/' is notably absent from this list,
|
64
|
+
# because there's no way for a regular expression to be treated as a path.
|
65
|
+
while section and section[-1:] in '=+?!':
|
66
|
+
section = section[:-1]
|
67
|
+
|
68
|
+
if section in path_sections:
|
69
|
+
return True
|
70
|
+
|
71
|
+
# Sections mathing the regexp '_(dir|file|path)s?$' are also
|
72
|
+
# considered PathSections. Using manual string matching since that
|
73
|
+
# is much faster than the regexp and this can be called hundreds of
|
74
|
+
# thousands of times so micro performance matters.
|
75
|
+
if "_" in section:
|
76
|
+
tail = section[-6:]
|
77
|
+
if tail[-1] == 's':
|
78
|
+
tail = tail[:-1]
|
79
|
+
if tail[-5:] in ('_file', '_path'):
|
80
|
+
return True
|
81
|
+
return tail[-4:] == '_dir'
|
82
|
+
|
83
|
+
return False
|
84
|
+
|
85
|
+
# base_non_configuration_keys is a list of key names that belong in the target
|
86
|
+
# itself and should not be propagated into its configurations. It is merged
|
87
|
+
# with a list that can come from the generator to
|
88
|
+
# create non_configuration_keys.
|
89
|
+
base_non_configuration_keys = [
|
90
|
+
# Sections that must exist inside targets and not configurations.
|
91
|
+
'actions',
|
92
|
+
'configurations',
|
93
|
+
'copies',
|
94
|
+
'default_configuration',
|
95
|
+
'dependencies',
|
96
|
+
'dependencies_original',
|
97
|
+
'libraries',
|
98
|
+
'postbuilds',
|
99
|
+
'product_dir',
|
100
|
+
'product_extension',
|
101
|
+
'product_name',
|
102
|
+
'product_prefix',
|
103
|
+
'rules',
|
104
|
+
'run_as',
|
105
|
+
'sources',
|
106
|
+
'standalone_static_library',
|
107
|
+
'suppress_wildcard',
|
108
|
+
'target_name',
|
109
|
+
'toolset',
|
110
|
+
'toolsets',
|
111
|
+
'type',
|
112
|
+
|
113
|
+
# Sections that can be found inside targets or configurations, but that
|
114
|
+
# should not be propagated from targets into their configurations.
|
115
|
+
'variables',
|
116
|
+
]
|
117
|
+
non_configuration_keys = []
|
118
|
+
|
119
|
+
# Keys that do not belong inside a configuration dictionary.
|
120
|
+
invalid_configuration_keys = [
|
121
|
+
'actions',
|
122
|
+
'all_dependent_settings',
|
123
|
+
'configurations',
|
124
|
+
'dependencies',
|
125
|
+
'direct_dependent_settings',
|
126
|
+
'libraries',
|
127
|
+
'link_settings',
|
128
|
+
'sources',
|
129
|
+
'standalone_static_library',
|
130
|
+
'target_name',
|
131
|
+
'type',
|
132
|
+
]
|
133
|
+
|
134
|
+
# Controls whether or not the generator supports multiple toolsets.
|
135
|
+
multiple_toolsets = False
|
136
|
+
|
137
|
+
# Paths for converting filelist paths to output paths: {
|
138
|
+
# toplevel,
|
139
|
+
# qualified_output_dir,
|
140
|
+
# }
|
141
|
+
generator_filelist_paths = None
|
142
|
+
|
143
|
+
def GetIncludedBuildFiles(build_file_path, aux_data, included=None):
|
144
|
+
"""Return a list of all build files included into build_file_path.
|
145
|
+
|
146
|
+
The returned list will contain build_file_path as well as all other files
|
147
|
+
that it included, either directly or indirectly. Note that the list may
|
148
|
+
contain files that were included into a conditional section that evaluated
|
149
|
+
to false and was not merged into build_file_path's dict.
|
150
|
+
|
151
|
+
aux_data is a dict containing a key for each build file or included build
|
152
|
+
file. Those keys provide access to dicts whose "included" keys contain
|
153
|
+
lists of all other files included by the build file.
|
154
|
+
|
155
|
+
included should be left at its default None value by external callers. It
|
156
|
+
is used for recursion.
|
157
|
+
|
158
|
+
The returned list will not contain any duplicate entries. Each build file
|
159
|
+
in the list will be relative to the current directory.
|
160
|
+
"""
|
161
|
+
|
162
|
+
if included == None:
|
163
|
+
included = []
|
164
|
+
|
165
|
+
if build_file_path in included:
|
166
|
+
return included
|
167
|
+
|
168
|
+
included.append(build_file_path)
|
169
|
+
|
170
|
+
for included_build_file in aux_data[build_file_path].get('included', []):
|
171
|
+
GetIncludedBuildFiles(included_build_file, aux_data, included)
|
172
|
+
|
173
|
+
return included
|
174
|
+
|
175
|
+
|
176
|
+
def CheckedEval(file_contents):
|
177
|
+
"""Return the eval of a gyp file.
|
178
|
+
|
179
|
+
The gyp file is restricted to dictionaries and lists only, and
|
180
|
+
repeated keys are not allowed.
|
181
|
+
|
182
|
+
Note that this is slower than eval() is.
|
183
|
+
"""
|
184
|
+
|
185
|
+
ast = compiler.parse(file_contents)
|
186
|
+
assert isinstance(ast, Module)
|
187
|
+
c1 = ast.getChildren()
|
188
|
+
assert c1[0] is None
|
189
|
+
assert isinstance(c1[1], Stmt)
|
190
|
+
c2 = c1[1].getChildren()
|
191
|
+
assert isinstance(c2[0], Discard)
|
192
|
+
c3 = c2[0].getChildren()
|
193
|
+
assert len(c3) == 1
|
194
|
+
return CheckNode(c3[0], [])
|
195
|
+
|
196
|
+
|
197
|
+
def CheckNode(node, keypath):
|
198
|
+
if isinstance(node, Dict):
|
199
|
+
c = node.getChildren()
|
200
|
+
dict = {}
|
201
|
+
for n in range(0, len(c), 2):
|
202
|
+
assert isinstance(c[n], Const)
|
203
|
+
key = c[n].getChildren()[0]
|
204
|
+
if key in dict:
|
205
|
+
raise GypError("Key '" + key + "' repeated at level " +
|
206
|
+
repr(len(keypath) + 1) + " with key path '" +
|
207
|
+
'.'.join(keypath) + "'")
|
208
|
+
kp = list(keypath) # Make a copy of the list for descending this node.
|
209
|
+
kp.append(key)
|
210
|
+
dict[key] = CheckNode(c[n + 1], kp)
|
211
|
+
return dict
|
212
|
+
elif isinstance(node, List):
|
213
|
+
c = node.getChildren()
|
214
|
+
children = []
|
215
|
+
for index, child in enumerate(c):
|
216
|
+
kp = list(keypath) # Copy list.
|
217
|
+
kp.append(repr(index))
|
218
|
+
children.append(CheckNode(child, kp))
|
219
|
+
return children
|
220
|
+
elif isinstance(node, Const):
|
221
|
+
return node.getChildren()[0]
|
222
|
+
else:
|
223
|
+
raise TypeError("Unknown AST node at key path '" + '.'.join(keypath) +
|
224
|
+
"': " + repr(node))
|
225
|
+
|
226
|
+
|
227
|
+
def LoadOneBuildFile(build_file_path, data, aux_data, includes,
|
228
|
+
is_target, check):
|
229
|
+
if build_file_path in data:
|
230
|
+
return data[build_file_path]
|
231
|
+
|
232
|
+
if os.path.exists(build_file_path):
|
233
|
+
build_file_contents = open(build_file_path).read()
|
234
|
+
else:
|
235
|
+
raise GypError("%s not found (cwd: %s)" % (build_file_path, os.getcwd()))
|
236
|
+
|
237
|
+
build_file_data = None
|
238
|
+
try:
|
239
|
+
if check:
|
240
|
+
build_file_data = CheckedEval(build_file_contents)
|
241
|
+
else:
|
242
|
+
build_file_data = eval(build_file_contents, {'__builtins__': None},
|
243
|
+
None)
|
244
|
+
except SyntaxError, e:
|
245
|
+
e.filename = build_file_path
|
246
|
+
raise
|
247
|
+
except Exception, e:
|
248
|
+
gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path)
|
249
|
+
raise
|
250
|
+
|
251
|
+
if type(build_file_data) is not dict:
|
252
|
+
raise GypError("%s does not evaluate to a dictionary." % build_file_path)
|
253
|
+
|
254
|
+
data[build_file_path] = build_file_data
|
255
|
+
aux_data[build_file_path] = {}
|
256
|
+
|
257
|
+
# Scan for includes and merge them in.
|
258
|
+
if ('skip_includes' not in build_file_data or
|
259
|
+
not build_file_data['skip_includes']):
|
260
|
+
try:
|
261
|
+
if is_target:
|
262
|
+
LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
|
263
|
+
aux_data, includes, check)
|
264
|
+
else:
|
265
|
+
LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
|
266
|
+
aux_data, None, check)
|
267
|
+
except Exception, e:
|
268
|
+
gyp.common.ExceptionAppend(e,
|
269
|
+
'while reading includes of ' + build_file_path)
|
270
|
+
raise
|
271
|
+
|
272
|
+
return build_file_data
|
273
|
+
|
274
|
+
|
275
|
+
def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data,
|
276
|
+
includes, check):
|
277
|
+
includes_list = []
|
278
|
+
if includes != None:
|
279
|
+
includes_list.extend(includes)
|
280
|
+
if 'includes' in subdict:
|
281
|
+
for include in subdict['includes']:
|
282
|
+
# "include" is specified relative to subdict_path, so compute the real
|
283
|
+
# path to include by appending the provided "include" to the directory
|
284
|
+
# in which subdict_path resides.
|
285
|
+
relative_include = \
|
286
|
+
os.path.normpath(os.path.join(os.path.dirname(subdict_path), include))
|
287
|
+
includes_list.append(relative_include)
|
288
|
+
# Unhook the includes list, it's no longer needed.
|
289
|
+
del subdict['includes']
|
290
|
+
|
291
|
+
# Merge in the included files.
|
292
|
+
for include in includes_list:
|
293
|
+
if not 'included' in aux_data[subdict_path]:
|
294
|
+
aux_data[subdict_path]['included'] = []
|
295
|
+
aux_data[subdict_path]['included'].append(include)
|
296
|
+
|
297
|
+
gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include)
|
298
|
+
|
299
|
+
MergeDicts(subdict,
|
300
|
+
LoadOneBuildFile(include, data, aux_data, None, False, check),
|
301
|
+
subdict_path, include)
|
302
|
+
|
303
|
+
# Recurse into subdictionaries.
|
304
|
+
for k, v in subdict.iteritems():
|
305
|
+
if type(v) is dict:
|
306
|
+
LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data,
|
307
|
+
None, check)
|
308
|
+
elif type(v) is list:
|
309
|
+
LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data,
|
310
|
+
check)
|
311
|
+
|
312
|
+
|
313
|
+
# This recurses into lists so that it can look for dicts.
|
314
|
+
def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data, check):
|
315
|
+
for item in sublist:
|
316
|
+
if type(item) is dict:
|
317
|
+
LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data,
|
318
|
+
None, check)
|
319
|
+
elif type(item) is list:
|
320
|
+
LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data, check)
|
321
|
+
|
322
|
+
# Processes toolsets in all the targets. This recurses into condition entries
|
323
|
+
# since they can contain toolsets as well.
|
324
|
+
def ProcessToolsetsInDict(data):
|
325
|
+
if 'targets' in data:
|
326
|
+
target_list = data['targets']
|
327
|
+
new_target_list = []
|
328
|
+
for target in target_list:
|
329
|
+
# If this target already has an explicit 'toolset', and no 'toolsets'
|
330
|
+
# list, don't modify it further.
|
331
|
+
if 'toolset' in target and 'toolsets' not in target:
|
332
|
+
new_target_list.append(target)
|
333
|
+
continue
|
334
|
+
if multiple_toolsets:
|
335
|
+
toolsets = target.get('toolsets', ['target'])
|
336
|
+
else:
|
337
|
+
toolsets = ['target']
|
338
|
+
# Make sure this 'toolsets' definition is only processed once.
|
339
|
+
if 'toolsets' in target:
|
340
|
+
del target['toolsets']
|
341
|
+
if len(toolsets) > 0:
|
342
|
+
# Optimization: only do copies if more than one toolset is specified.
|
343
|
+
for build in toolsets[1:]:
|
344
|
+
new_target = gyp.simple_copy.deepcopy(target)
|
345
|
+
new_target['toolset'] = build
|
346
|
+
new_target_list.append(new_target)
|
347
|
+
target['toolset'] = toolsets[0]
|
348
|
+
new_target_list.append(target)
|
349
|
+
data['targets'] = new_target_list
|
350
|
+
if 'conditions' in data:
|
351
|
+
for condition in data['conditions']:
|
352
|
+
if type(condition) is list:
|
353
|
+
for condition_dict in condition[1:]:
|
354
|
+
if type(condition_dict) is dict:
|
355
|
+
ProcessToolsetsInDict(condition_dict)
|
356
|
+
|
357
|
+
|
358
|
+
# TODO(mark): I don't love this name. It just means that it's going to load
|
359
|
+
# a build file that contains targets and is expected to provide a targets dict
|
360
|
+
# that contains the targets...
|
361
|
+
def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
|
362
|
+
depth, check, load_dependencies):
|
363
|
+
# If depth is set, predefine the DEPTH variable to be a relative path from
|
364
|
+
# this build file's directory to the directory identified by depth.
|
365
|
+
if depth:
|
366
|
+
# TODO(dglazkov) The backslash/forward-slash replacement at the end is a
|
367
|
+
# temporary measure. This should really be addressed by keeping all paths
|
368
|
+
# in POSIX until actual project generation.
|
369
|
+
d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path))
|
370
|
+
if d == '':
|
371
|
+
variables['DEPTH'] = '.'
|
372
|
+
else:
|
373
|
+
variables['DEPTH'] = d.replace('\\', '/')
|
374
|
+
|
375
|
+
# The 'target_build_files' key is only set when loading target build files in
|
376
|
+
# the non-parallel code path, where LoadTargetBuildFile is called
|
377
|
+
# recursively. In the parallel code path, we don't need to check whether the
|
378
|
+
# |build_file_path| has already been loaded, because the 'scheduled' set in
|
379
|
+
# ParallelState guarantees that we never load the same |build_file_path|
|
380
|
+
# twice.
|
381
|
+
if 'target_build_files' in data:
|
382
|
+
if build_file_path in data['target_build_files']:
|
383
|
+
# Already loaded.
|
384
|
+
return False
|
385
|
+
data['target_build_files'].add(build_file_path)
|
386
|
+
|
387
|
+
gyp.DebugOutput(gyp.DEBUG_INCLUDES,
|
388
|
+
"Loading Target Build File '%s'", build_file_path)
|
389
|
+
|
390
|
+
build_file_data = LoadOneBuildFile(build_file_path, data, aux_data,
|
391
|
+
includes, True, check)
|
392
|
+
|
393
|
+
# Store DEPTH for later use in generators.
|
394
|
+
build_file_data['_DEPTH'] = depth
|
395
|
+
|
396
|
+
# Set up the included_files key indicating which .gyp files contributed to
|
397
|
+
# this target dict.
|
398
|
+
if 'included_files' in build_file_data:
|
399
|
+
raise GypError(build_file_path + ' must not contain included_files key')
|
400
|
+
|
401
|
+
included = GetIncludedBuildFiles(build_file_path, aux_data)
|
402
|
+
build_file_data['included_files'] = []
|
403
|
+
for included_file in included:
|
404
|
+
# included_file is relative to the current directory, but it needs to
|
405
|
+
# be made relative to build_file_path's directory.
|
406
|
+
included_relative = \
|
407
|
+
gyp.common.RelativePath(included_file,
|
408
|
+
os.path.dirname(build_file_path))
|
409
|
+
build_file_data['included_files'].append(included_relative)
|
410
|
+
|
411
|
+
# Do a first round of toolsets expansion so that conditions can be defined
|
412
|
+
# per toolset.
|
413
|
+
ProcessToolsetsInDict(build_file_data)
|
414
|
+
|
415
|
+
# Apply "pre"/"early" variable expansions and condition evaluations.
|
416
|
+
ProcessVariablesAndConditionsInDict(
|
417
|
+
build_file_data, PHASE_EARLY, variables, build_file_path)
|
418
|
+
|
419
|
+
# Since some toolsets might have been defined conditionally, perform
|
420
|
+
# a second round of toolsets expansion now.
|
421
|
+
ProcessToolsetsInDict(build_file_data)
|
422
|
+
|
423
|
+
# Look at each project's target_defaults dict, and merge settings into
|
424
|
+
# targets.
|
425
|
+
if 'target_defaults' in build_file_data:
|
426
|
+
if 'targets' not in build_file_data:
|
427
|
+
raise GypError("Unable to find targets in build file %s" %
|
428
|
+
build_file_path)
|
429
|
+
|
430
|
+
index = 0
|
431
|
+
while index < len(build_file_data['targets']):
|
432
|
+
# This procedure needs to give the impression that target_defaults is
|
433
|
+
# used as defaults, and the individual targets inherit from that.
|
434
|
+
# The individual targets need to be merged into the defaults. Make
|
435
|
+
# a deep copy of the defaults for each target, merge the target dict
|
436
|
+
# as found in the input file into that copy, and then hook up the
|
437
|
+
# copy with the target-specific data merged into it as the replacement
|
438
|
+
# target dict.
|
439
|
+
old_target_dict = build_file_data['targets'][index]
|
440
|
+
new_target_dict = gyp.simple_copy.deepcopy(
|
441
|
+
build_file_data['target_defaults'])
|
442
|
+
MergeDicts(new_target_dict, old_target_dict,
|
443
|
+
build_file_path, build_file_path)
|
444
|
+
build_file_data['targets'][index] = new_target_dict
|
445
|
+
index += 1
|
446
|
+
|
447
|
+
# No longer needed.
|
448
|
+
del build_file_data['target_defaults']
|
449
|
+
|
450
|
+
# Look for dependencies. This means that dependency resolution occurs
|
451
|
+
# after "pre" conditionals and variable expansion, but before "post" -
|
452
|
+
# in other words, you can't put a "dependencies" section inside a "post"
|
453
|
+
# conditional within a target.
|
454
|
+
|
455
|
+
dependencies = []
|
456
|
+
if 'targets' in build_file_data:
|
457
|
+
for target_dict in build_file_data['targets']:
|
458
|
+
if 'dependencies' not in target_dict:
|
459
|
+
continue
|
460
|
+
for dependency in target_dict['dependencies']:
|
461
|
+
dependencies.append(
|
462
|
+
gyp.common.ResolveTarget(build_file_path, dependency, None)[0])
|
463
|
+
|
464
|
+
if load_dependencies:
|
465
|
+
for dependency in dependencies:
|
466
|
+
try:
|
467
|
+
LoadTargetBuildFile(dependency, data, aux_data, variables,
|
468
|
+
includes, depth, check, load_dependencies)
|
469
|
+
except Exception, e:
|
470
|
+
gyp.common.ExceptionAppend(
|
471
|
+
e, 'while loading dependencies of %s' % build_file_path)
|
472
|
+
raise
|
473
|
+
else:
|
474
|
+
return (build_file_path, dependencies)
|
475
|
+
|
476
|
+
def CallLoadTargetBuildFile(global_flags,
|
477
|
+
build_file_path, variables,
|
478
|
+
includes, depth, check,
|
479
|
+
generator_input_info):
|
480
|
+
"""Wrapper around LoadTargetBuildFile for parallel processing.
|
481
|
+
|
482
|
+
This wrapper is used when LoadTargetBuildFile is executed in
|
483
|
+
a worker process.
|
484
|
+
"""
|
485
|
+
|
486
|
+
try:
|
487
|
+
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
488
|
+
|
489
|
+
# Apply globals so that the worker process behaves the same.
|
490
|
+
for key, value in global_flags.iteritems():
|
491
|
+
globals()[key] = value
|
492
|
+
|
493
|
+
SetGeneratorGlobals(generator_input_info)
|
494
|
+
result = LoadTargetBuildFile(build_file_path, per_process_data,
|
495
|
+
per_process_aux_data, variables,
|
496
|
+
includes, depth, check, False)
|
497
|
+
if not result:
|
498
|
+
return result
|
499
|
+
|
500
|
+
(build_file_path, dependencies) = result
|
501
|
+
|
502
|
+
# We can safely pop the build_file_data from per_process_data because it
|
503
|
+
# will never be referenced by this process again, so we don't need to keep
|
504
|
+
# it in the cache.
|
505
|
+
build_file_data = per_process_data.pop(build_file_path)
|
506
|
+
|
507
|
+
# This gets serialized and sent back to the main process via a pipe.
|
508
|
+
# It's handled in LoadTargetBuildFileCallback.
|
509
|
+
return (build_file_path,
|
510
|
+
build_file_data,
|
511
|
+
dependencies)
|
512
|
+
except GypError, e:
|
513
|
+
sys.stderr.write("gyp: %s\n" % e)
|
514
|
+
return None
|
515
|
+
except Exception, e:
|
516
|
+
print >>sys.stderr, 'Exception:', e
|
517
|
+
print >>sys.stderr, traceback.format_exc()
|
518
|
+
return None
|
519
|
+
|
520
|
+
|
521
|
+
class ParallelProcessingError(Exception):
|
522
|
+
pass
|
523
|
+
|
524
|
+
|
525
|
+
class ParallelState(object):
|
526
|
+
"""Class to keep track of state when processing input files in parallel.
|
527
|
+
|
528
|
+
If build files are loaded in parallel, use this to keep track of
|
529
|
+
state during farming out and processing parallel jobs. It's stored
|
530
|
+
in a global so that the callback function can have access to it.
|
531
|
+
"""
|
532
|
+
|
533
|
+
def __init__(self):
|
534
|
+
# The multiprocessing pool.
|
535
|
+
self.pool = None
|
536
|
+
# The condition variable used to protect this object and notify
|
537
|
+
# the main loop when there might be more data to process.
|
538
|
+
self.condition = None
|
539
|
+
# The "data" dict that was passed to LoadTargetBuildFileParallel
|
540
|
+
self.data = None
|
541
|
+
# The number of parallel calls outstanding; decremented when a response
|
542
|
+
# was received.
|
543
|
+
self.pending = 0
|
544
|
+
# The set of all build files that have been scheduled, so we don't
|
545
|
+
# schedule the same one twice.
|
546
|
+
self.scheduled = set()
|
547
|
+
# A list of dependency build file paths that haven't been scheduled yet.
|
548
|
+
self.dependencies = []
|
549
|
+
# Flag to indicate if there was an error in a child process.
|
550
|
+
self.error = False
|
551
|
+
|
552
|
+
def LoadTargetBuildFileCallback(self, result):
|
553
|
+
"""Handle the results of running LoadTargetBuildFile in another process.
|
554
|
+
"""
|
555
|
+
self.condition.acquire()
|
556
|
+
if not result:
|
557
|
+
self.error = True
|
558
|
+
self.condition.notify()
|
559
|
+
self.condition.release()
|
560
|
+
return
|
561
|
+
(build_file_path0, build_file_data0, dependencies0) = result
|
562
|
+
self.data[build_file_path0] = build_file_data0
|
563
|
+
self.data['target_build_files'].add(build_file_path0)
|
564
|
+
for new_dependency in dependencies0:
|
565
|
+
if new_dependency not in self.scheduled:
|
566
|
+
self.scheduled.add(new_dependency)
|
567
|
+
self.dependencies.append(new_dependency)
|
568
|
+
self.pending -= 1
|
569
|
+
self.condition.notify()
|
570
|
+
self.condition.release()
|
571
|
+
|
572
|
+
|
573
|
+
def LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth,
|
574
|
+
check, generator_input_info):
|
575
|
+
parallel_state = ParallelState()
|
576
|
+
parallel_state.condition = threading.Condition()
|
577
|
+
# Make copies of the build_files argument that we can modify while working.
|
578
|
+
parallel_state.dependencies = list(build_files)
|
579
|
+
parallel_state.scheduled = set(build_files)
|
580
|
+
parallel_state.pending = 0
|
581
|
+
parallel_state.data = data
|
582
|
+
|
583
|
+
try:
|
584
|
+
parallel_state.condition.acquire()
|
585
|
+
while parallel_state.dependencies or parallel_state.pending:
|
586
|
+
if parallel_state.error:
|
587
|
+
break
|
588
|
+
if not parallel_state.dependencies:
|
589
|
+
parallel_state.condition.wait()
|
590
|
+
continue
|
591
|
+
|
592
|
+
dependency = parallel_state.dependencies.pop()
|
593
|
+
|
594
|
+
parallel_state.pending += 1
|
595
|
+
global_flags = {
|
596
|
+
'path_sections': globals()['path_sections'],
|
597
|
+
'non_configuration_keys': globals()['non_configuration_keys'],
|
598
|
+
'multiple_toolsets': globals()['multiple_toolsets']}
|
599
|
+
|
600
|
+
if not parallel_state.pool:
|
601
|
+
parallel_state.pool = multiprocessing.Pool(multiprocessing.cpu_count())
|
602
|
+
parallel_state.pool.apply_async(
|
603
|
+
CallLoadTargetBuildFile,
|
604
|
+
args = (global_flags, dependency,
|
605
|
+
variables, includes, depth, check, generator_input_info),
|
606
|
+
callback = parallel_state.LoadTargetBuildFileCallback)
|
607
|
+
except KeyboardInterrupt, e:
|
608
|
+
parallel_state.pool.terminate()
|
609
|
+
raise e
|
610
|
+
|
611
|
+
parallel_state.condition.release()
|
612
|
+
|
613
|
+
parallel_state.pool.close()
|
614
|
+
parallel_state.pool.join()
|
615
|
+
parallel_state.pool = None
|
616
|
+
|
617
|
+
if parallel_state.error:
|
618
|
+
sys.exit(1)
|
619
|
+
|
620
|
+
# Look for the bracket that matches the first bracket seen in a
|
621
|
+
# string, and return the start and end as a tuple. For example, if
|
622
|
+
# the input is something like "<(foo <(bar)) blah", then it would
|
623
|
+
# return (1, 13), indicating the entire string except for the leading
|
624
|
+
# "<" and trailing " blah".
|
625
|
+
LBRACKETS= set('{[(')
|
626
|
+
BRACKETS = {'}': '{', ']': '[', ')': '('}
|
627
|
+
def FindEnclosingBracketGroup(input_str):
|
628
|
+
stack = []
|
629
|
+
start = -1
|
630
|
+
for index, char in enumerate(input_str):
|
631
|
+
if char in LBRACKETS:
|
632
|
+
stack.append(char)
|
633
|
+
if start == -1:
|
634
|
+
start = index
|
635
|
+
elif char in BRACKETS:
|
636
|
+
if not stack:
|
637
|
+
return (-1, -1)
|
638
|
+
if stack.pop() != BRACKETS[char]:
|
639
|
+
return (-1, -1)
|
640
|
+
if not stack:
|
641
|
+
return (start, index + 1)
|
642
|
+
return (-1, -1)
|
643
|
+
|
644
|
+
|
645
|
+
def IsStrCanonicalInt(string):
|
646
|
+
"""Returns True if |string| is in its canonical integer form.
|
647
|
+
|
648
|
+
The canonical form is such that str(int(string)) == string.
|
649
|
+
"""
|
650
|
+
if type(string) is str:
|
651
|
+
# This function is called a lot so for maximum performance, avoid
|
652
|
+
# involving regexps which would otherwise make the code much
|
653
|
+
# shorter. Regexps would need twice the time of this function.
|
654
|
+
if string:
|
655
|
+
if string == "0":
|
656
|
+
return True
|
657
|
+
if string[0] == "-":
|
658
|
+
string = string[1:]
|
659
|
+
if not string:
|
660
|
+
return False
|
661
|
+
if '1' <= string[0] <= '9':
|
662
|
+
return string.isdigit()
|
663
|
+
|
664
|
+
return False
|
665
|
+
|
666
|
+
|
667
|
+
# This matches things like "<(asdf)", "<!(cmd)", "<!@(cmd)", "<|(list)",
|
668
|
+
# "<!interpreter(arguments)", "<([list])", and even "<([)" and "<(<())".
|
669
|
+
# In the last case, the inner "<()" is captured in match['content'].
|
670
|
+
early_variable_re = re.compile(
|
671
|
+
r'(?P<replace>(?P<type><(?:(?:!?@?)|\|)?)'
|
672
|
+
r'(?P<command_string>[-a-zA-Z0-9_.]+)?'
|
673
|
+
r'\((?P<is_array>\s*\[?)'
|
674
|
+
r'(?P<content>.*?)(\]?)\))')
|
675
|
+
|
676
|
+
# This matches the same as early_variable_re, but with '>' instead of '<'.
|
677
|
+
late_variable_re = re.compile(
|
678
|
+
r'(?P<replace>(?P<type>>(?:(?:!?@?)|\|)?)'
|
679
|
+
r'(?P<command_string>[-a-zA-Z0-9_.]+)?'
|
680
|
+
r'\((?P<is_array>\s*\[?)'
|
681
|
+
r'(?P<content>.*?)(\]?)\))')
|
682
|
+
|
683
|
+
# This matches the same as early_variable_re, but with '^' instead of '<'.
|
684
|
+
latelate_variable_re = re.compile(
|
685
|
+
r'(?P<replace>(?P<type>[\^](?:(?:!?@?)|\|)?)'
|
686
|
+
r'(?P<command_string>[-a-zA-Z0-9_.]+)?'
|
687
|
+
r'\((?P<is_array>\s*\[?)'
|
688
|
+
r'(?P<content>.*?)(\]?)\))')
|
689
|
+
|
690
|
+
# Global cache of results from running commands so they don't have to be run
|
691
|
+
# more then once.
|
692
|
+
cached_command_results = {}
|
693
|
+
|
694
|
+
|
695
|
+
def FixupPlatformCommand(cmd):
|
696
|
+
if sys.platform == 'win32':
|
697
|
+
if type(cmd) is list:
|
698
|
+
cmd = [re.sub('^cat ', 'type ', cmd[0])] + cmd[1:]
|
699
|
+
else:
|
700
|
+
cmd = re.sub('^cat ', 'type ', cmd)
|
701
|
+
return cmd
|
702
|
+
|
703
|
+
|
704
|
+
PHASE_EARLY = 0
|
705
|
+
PHASE_LATE = 1
|
706
|
+
PHASE_LATELATE = 2
|
707
|
+
|
708
|
+
|
709
|
+
def ExpandVariables(input, phase, variables, build_file):
|
710
|
+
# Look for the pattern that gets expanded into variables
|
711
|
+
if phase == PHASE_EARLY:
|
712
|
+
variable_re = early_variable_re
|
713
|
+
expansion_symbol = '<'
|
714
|
+
elif phase == PHASE_LATE:
|
715
|
+
variable_re = late_variable_re
|
716
|
+
expansion_symbol = '>'
|
717
|
+
elif phase == PHASE_LATELATE:
|
718
|
+
variable_re = latelate_variable_re
|
719
|
+
expansion_symbol = '^'
|
720
|
+
else:
|
721
|
+
assert False
|
722
|
+
|
723
|
+
input_str = str(input)
|
724
|
+
if IsStrCanonicalInt(input_str):
|
725
|
+
return int(input_str)
|
726
|
+
|
727
|
+
# Do a quick scan to determine if an expensive regex search is warranted.
|
728
|
+
if expansion_symbol not in input_str:
|
729
|
+
return input_str
|
730
|
+
|
731
|
+
# Get the entire list of matches as a list of MatchObject instances.
|
732
|
+
# (using findall here would return strings instead of MatchObjects).
|
733
|
+
matches = list(variable_re.finditer(input_str))
|
734
|
+
if not matches:
|
735
|
+
return input_str
|
736
|
+
|
737
|
+
output = input_str
|
738
|
+
# Reverse the list of matches so that replacements are done right-to-left.
|
739
|
+
# That ensures that earlier replacements won't mess up the string in a
|
740
|
+
# way that causes later calls to find the earlier substituted text instead
|
741
|
+
# of what's intended for replacement.
|
742
|
+
matches.reverse()
|
743
|
+
for match_group in matches:
|
744
|
+
match = match_group.groupdict()
|
745
|
+
gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Matches: %r", match)
|
746
|
+
# match['replace'] is the substring to look for, match['type']
|
747
|
+
# is the character code for the replacement type (< > <! >! <| >| <@
|
748
|
+
# >@ <!@ >!@), match['is_array'] contains a '[' for command
|
749
|
+
# arrays, and match['content'] is the name of the variable (< >)
|
750
|
+
# or command to run (<! >!). match['command_string'] is an optional
|
751
|
+
# command string. Currently, only 'pymod_do_main' is supported.
|
752
|
+
|
753
|
+
# run_command is true if a ! variant is used.
|
754
|
+
run_command = '!' in match['type']
|
755
|
+
command_string = match['command_string']
|
756
|
+
|
757
|
+
# file_list is true if a | variant is used.
|
758
|
+
file_list = '|' in match['type']
|
759
|
+
|
760
|
+
# Capture these now so we can adjust them later.
|
761
|
+
replace_start = match_group.start('replace')
|
762
|
+
replace_end = match_group.end('replace')
|
763
|
+
|
764
|
+
# Find the ending paren, and re-evaluate the contained string.
|
765
|
+
(c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:])
|
766
|
+
|
767
|
+
# Adjust the replacement range to match the entire command
|
768
|
+
# found by FindEnclosingBracketGroup (since the variable_re
|
769
|
+
# probably doesn't match the entire command if it contained
|
770
|
+
# nested variables).
|
771
|
+
replace_end = replace_start + c_end
|
772
|
+
|
773
|
+
# Find the "real" replacement, matching the appropriate closing
|
774
|
+
# paren, and adjust the replacement start and end.
|
775
|
+
replacement = input_str[replace_start:replace_end]
|
776
|
+
|
777
|
+
# Figure out what the contents of the variable parens are.
|
778
|
+
contents_start = replace_start + c_start + 1
|
779
|
+
contents_end = replace_end - 1
|
780
|
+
contents = input_str[contents_start:contents_end]
|
781
|
+
|
782
|
+
# Do filter substitution now for <|().
|
783
|
+
# Admittedly, this is different than the evaluation order in other
|
784
|
+
# contexts. However, since filtration has no chance to run on <|(),
|
785
|
+
# this seems like the only obvious way to give them access to filters.
|
786
|
+
if file_list:
|
787
|
+
processed_variables = gyp.simple_copy.deepcopy(variables)
|
788
|
+
ProcessListFiltersInDict(contents, processed_variables)
|
789
|
+
# Recurse to expand variables in the contents
|
790
|
+
contents = ExpandVariables(contents, phase,
|
791
|
+
processed_variables, build_file)
|
792
|
+
else:
|
793
|
+
# Recurse to expand variables in the contents
|
794
|
+
contents = ExpandVariables(contents, phase, variables, build_file)
|
795
|
+
|
796
|
+
# Strip off leading/trailing whitespace so that variable matches are
|
797
|
+
# simpler below (and because they are rarely needed).
|
798
|
+
contents = contents.strip()
|
799
|
+
|
800
|
+
# expand_to_list is true if an @ variant is used. In that case,
|
801
|
+
# the expansion should result in a list. Note that the caller
|
802
|
+
# is to be expecting a list in return, and not all callers do
|
803
|
+
# because not all are working in list context. Also, for list
|
804
|
+
# expansions, there can be no other text besides the variable
|
805
|
+
# expansion in the input string.
|
806
|
+
expand_to_list = '@' in match['type'] and input_str == replacement
|
807
|
+
|
808
|
+
if run_command or file_list:
|
809
|
+
# Find the build file's directory, so commands can be run or file lists
|
810
|
+
# generated relative to it.
|
811
|
+
build_file_dir = os.path.dirname(build_file)
|
812
|
+
if build_file_dir == '' and not file_list:
|
813
|
+
# If build_file is just a leaf filename indicating a file in the
|
814
|
+
# current directory, build_file_dir might be an empty string. Set
|
815
|
+
# it to None to signal to subprocess.Popen that it should run the
|
816
|
+
# command in the current directory.
|
817
|
+
build_file_dir = None
|
818
|
+
|
819
|
+
# Support <|(listfile.txt ...) which generates a file
|
820
|
+
# containing items from a gyp list, generated at gyp time.
|
821
|
+
# This works around actions/rules which have more inputs than will
|
822
|
+
# fit on the command line.
|
823
|
+
if file_list:
|
824
|
+
if type(contents) is list:
|
825
|
+
contents_list = contents
|
826
|
+
else:
|
827
|
+
contents_list = contents.split(' ')
|
828
|
+
replacement = contents_list[0]
|
829
|
+
if os.path.isabs(replacement):
|
830
|
+
raise GypError('| cannot handle absolute paths, got "%s"' % replacement)
|
831
|
+
|
832
|
+
if not generator_filelist_paths:
|
833
|
+
path = os.path.join(build_file_dir, replacement)
|
834
|
+
else:
|
835
|
+
if os.path.isabs(build_file_dir):
|
836
|
+
toplevel = generator_filelist_paths['toplevel']
|
837
|
+
rel_build_file_dir = gyp.common.RelativePath(build_file_dir, toplevel)
|
838
|
+
else:
|
839
|
+
rel_build_file_dir = build_file_dir
|
840
|
+
qualified_out_dir = generator_filelist_paths['qualified_out_dir']
|
841
|
+
path = os.path.join(qualified_out_dir, rel_build_file_dir, replacement)
|
842
|
+
gyp.common.EnsureDirExists(path)
|
843
|
+
|
844
|
+
replacement = gyp.common.RelativePath(path, build_file_dir)
|
845
|
+
f = gyp.common.WriteOnDiff(path)
|
846
|
+
for i in contents_list[1:]:
|
847
|
+
f.write('%s\n' % i)
|
848
|
+
f.close()
|
849
|
+
|
850
|
+
elif run_command:
|
851
|
+
use_shell = True
|
852
|
+
if match['is_array']:
|
853
|
+
contents = eval(contents)
|
854
|
+
use_shell = False
|
855
|
+
|
856
|
+
# Check for a cached value to avoid executing commands, or generating
|
857
|
+
# file lists more than once. The cache key contains the command to be
|
858
|
+
# run as well as the directory to run it from, to account for commands
|
859
|
+
# that depend on their current directory.
|
860
|
+
# TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory,
|
861
|
+
# someone could author a set of GYP files where each time the command
|
862
|
+
# is invoked it produces different output by design. When the need
|
863
|
+
# arises, the syntax should be extended to support no caching off a
|
864
|
+
# command's output so it is run every time.
|
865
|
+
cache_key = (str(contents), build_file_dir)
|
866
|
+
cached_value = cached_command_results.get(cache_key, None)
|
867
|
+
if cached_value is None:
|
868
|
+
gyp.DebugOutput(gyp.DEBUG_VARIABLES,
|
869
|
+
"Executing command '%s' in directory '%s'",
|
870
|
+
contents, build_file_dir)
|
871
|
+
|
872
|
+
replacement = ''
|
873
|
+
|
874
|
+
if command_string == 'pymod_do_main':
|
875
|
+
# <!pymod_do_main(modulename param eters) loads |modulename| as a
|
876
|
+
# python module and then calls that module's DoMain() function,
|
877
|
+
# passing ["param", "eters"] as a single list argument. For modules
|
878
|
+
# that don't load quickly, this can be faster than
|
879
|
+
# <!(python modulename param eters). Do this in |build_file_dir|.
|
880
|
+
oldwd = os.getcwd() # Python doesn't like os.open('.'): no fchdir.
|
881
|
+
if build_file_dir: # build_file_dir may be None (see above).
|
882
|
+
os.chdir(build_file_dir)
|
883
|
+
try:
|
884
|
+
|
885
|
+
parsed_contents = shlex.split(contents)
|
886
|
+
try:
|
887
|
+
py_module = __import__(parsed_contents[0])
|
888
|
+
except ImportError as e:
|
889
|
+
raise GypError("Error importing pymod_do_main"
|
890
|
+
"module (%s): %s" % (parsed_contents[0], e))
|
891
|
+
replacement = str(py_module.DoMain(parsed_contents[1:])).rstrip()
|
892
|
+
finally:
|
893
|
+
os.chdir(oldwd)
|
894
|
+
assert replacement != None
|
895
|
+
elif command_string:
|
896
|
+
raise GypError("Unknown command string '%s' in '%s'." %
|
897
|
+
(command_string, contents))
|
898
|
+
else:
|
899
|
+
# Fix up command with platform specific workarounds.
|
900
|
+
contents = FixupPlatformCommand(contents)
|
901
|
+
try:
|
902
|
+
p = subprocess.Popen(contents, shell=use_shell,
|
903
|
+
stdout=subprocess.PIPE,
|
904
|
+
stderr=subprocess.PIPE,
|
905
|
+
stdin=subprocess.PIPE,
|
906
|
+
cwd=build_file_dir)
|
907
|
+
except Exception, e:
|
908
|
+
raise GypError("%s while executing command '%s' in %s" %
|
909
|
+
(e, contents, build_file))
|
910
|
+
|
911
|
+
p_stdout, p_stderr = p.communicate('')
|
912
|
+
|
913
|
+
if p.wait() != 0 or p_stderr:
|
914
|
+
sys.stderr.write(p_stderr)
|
915
|
+
# Simulate check_call behavior, since check_call only exists
|
916
|
+
# in python 2.5 and later.
|
917
|
+
raise GypError("Call to '%s' returned exit status %d while in %s." %
|
918
|
+
(contents, p.returncode, build_file))
|
919
|
+
replacement = p_stdout.rstrip()
|
920
|
+
|
921
|
+
cached_command_results[cache_key] = replacement
|
922
|
+
else:
|
923
|
+
gyp.DebugOutput(gyp.DEBUG_VARIABLES,
|
924
|
+
"Had cache value for command '%s' in directory '%s'",
|
925
|
+
contents,build_file_dir)
|
926
|
+
replacement = cached_value
|
927
|
+
|
928
|
+
else:
|
929
|
+
if not contents in variables:
|
930
|
+
if contents[-1] in ['!', '/']:
|
931
|
+
# In order to allow cross-compiles (nacl) to happen more naturally,
|
932
|
+
# we will allow references to >(sources/) etc. to resolve to
|
933
|
+
# and empty list if undefined. This allows actions to:
|
934
|
+
# 'action!': [
|
935
|
+
# '>@(_sources!)',
|
936
|
+
# ],
|
937
|
+
# 'action/': [
|
938
|
+
# '>@(_sources/)',
|
939
|
+
# ],
|
940
|
+
replacement = []
|
941
|
+
else:
|
942
|
+
raise GypError('Undefined variable ' + contents +
|
943
|
+
' in ' + build_file)
|
944
|
+
else:
|
945
|
+
replacement = variables[contents]
|
946
|
+
|
947
|
+
if type(replacement) is list:
|
948
|
+
for item in replacement:
|
949
|
+
if not contents[-1] == '/' and type(item) not in (str, int):
|
950
|
+
raise GypError('Variable ' + contents +
|
951
|
+
' must expand to a string or list of strings; ' +
|
952
|
+
'list contains a ' +
|
953
|
+
item.__class__.__name__)
|
954
|
+
# Run through the list and handle variable expansions in it. Since
|
955
|
+
# the list is guaranteed not to contain dicts, this won't do anything
|
956
|
+
# with conditions sections.
|
957
|
+
ProcessVariablesAndConditionsInList(replacement, phase, variables,
|
958
|
+
build_file)
|
959
|
+
elif type(replacement) not in (str, int):
|
960
|
+
raise GypError('Variable ' + contents +
|
961
|
+
' must expand to a string or list of strings; ' +
|
962
|
+
'found a ' + replacement.__class__.__name__)
|
963
|
+
|
964
|
+
if expand_to_list:
|
965
|
+
# Expanding in list context. It's guaranteed that there's only one
|
966
|
+
# replacement to do in |input_str| and that it's this replacement. See
|
967
|
+
# above.
|
968
|
+
if type(replacement) is list:
|
969
|
+
# If it's already a list, make a copy.
|
970
|
+
output = replacement[:]
|
971
|
+
else:
|
972
|
+
# Split it the same way sh would split arguments.
|
973
|
+
output = shlex.split(str(replacement))
|
974
|
+
else:
|
975
|
+
# Expanding in string context.
|
976
|
+
encoded_replacement = ''
|
977
|
+
if type(replacement) is list:
|
978
|
+
# When expanding a list into string context, turn the list items
|
979
|
+
# into a string in a way that will work with a subprocess call.
|
980
|
+
#
|
981
|
+
# TODO(mark): This isn't completely correct. This should
|
982
|
+
# call a generator-provided function that observes the
|
983
|
+
# proper list-to-argument quoting rules on a specific
|
984
|
+
# platform instead of just calling the POSIX encoding
|
985
|
+
# routine.
|
986
|
+
encoded_replacement = gyp.common.EncodePOSIXShellList(replacement)
|
987
|
+
else:
|
988
|
+
encoded_replacement = replacement
|
989
|
+
|
990
|
+
output = output[:replace_start] + str(encoded_replacement) + \
|
991
|
+
output[replace_end:]
|
992
|
+
# Prepare for the next match iteration.
|
993
|
+
input_str = output
|
994
|
+
|
995
|
+
if output == input:
|
996
|
+
gyp.DebugOutput(gyp.DEBUG_VARIABLES,
|
997
|
+
"Found only identity matches on %r, avoiding infinite "
|
998
|
+
"recursion.",
|
999
|
+
output)
|
1000
|
+
else:
|
1001
|
+
# Look for more matches now that we've replaced some, to deal with
|
1002
|
+
# expanding local variables (variables defined in the same
|
1003
|
+
# variables block as this one).
|
1004
|
+
gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Found output %r, recursing.", output)
|
1005
|
+
if type(output) is list:
|
1006
|
+
if output and type(output[0]) is list:
|
1007
|
+
# Leave output alone if it's a list of lists.
|
1008
|
+
# We don't want such lists to be stringified.
|
1009
|
+
pass
|
1010
|
+
else:
|
1011
|
+
new_output = []
|
1012
|
+
for item in output:
|
1013
|
+
new_output.append(
|
1014
|
+
ExpandVariables(item, phase, variables, build_file))
|
1015
|
+
output = new_output
|
1016
|
+
else:
|
1017
|
+
output = ExpandVariables(output, phase, variables, build_file)
|
1018
|
+
|
1019
|
+
# Convert all strings that are canonically-represented integers into integers.
|
1020
|
+
if type(output) is list:
|
1021
|
+
for index in xrange(0, len(output)):
|
1022
|
+
if IsStrCanonicalInt(output[index]):
|
1023
|
+
output[index] = int(output[index])
|
1024
|
+
elif IsStrCanonicalInt(output):
|
1025
|
+
output = int(output)
|
1026
|
+
|
1027
|
+
return output
|
1028
|
+
|
1029
|
+
# The same condition is often evaluated over and over again so it
|
1030
|
+
# makes sense to cache as much as possible between evaluations.
|
1031
|
+
cached_conditions_asts = {}
|
1032
|
+
|
1033
|
+
def EvalCondition(condition, conditions_key, phase, variables, build_file):
|
1034
|
+
"""Returns the dict that should be used or None if the result was
|
1035
|
+
that nothing should be used."""
|
1036
|
+
if type(condition) is not list:
|
1037
|
+
raise GypError(conditions_key + ' must be a list')
|
1038
|
+
if len(condition) < 2:
|
1039
|
+
# It's possible that condition[0] won't work in which case this
|
1040
|
+
# attempt will raise its own IndexError. That's probably fine.
|
1041
|
+
raise GypError(conditions_key + ' ' + condition[0] +
|
1042
|
+
' must be at least length 2, not ' + str(len(condition)))
|
1043
|
+
|
1044
|
+
i = 0
|
1045
|
+
result = None
|
1046
|
+
while i < len(condition):
|
1047
|
+
cond_expr = condition[i]
|
1048
|
+
true_dict = condition[i + 1]
|
1049
|
+
if type(true_dict) is not dict:
|
1050
|
+
raise GypError('{} {} must be followed by a dictionary, not {}'.format(
|
1051
|
+
conditions_key, cond_expr, type(true_dict)))
|
1052
|
+
if len(condition) > i + 2 and type(condition[i + 2]) is dict:
|
1053
|
+
false_dict = condition[i + 2]
|
1054
|
+
i = i + 3
|
1055
|
+
if i != len(condition):
|
1056
|
+
raise GypError('{} {} has {} unexpected trailing items'.format(
|
1057
|
+
conditions_key, cond_expr, len(condition) - i))
|
1058
|
+
else:
|
1059
|
+
false_dict = None
|
1060
|
+
i = i + 2
|
1061
|
+
if result == None:
|
1062
|
+
result = EvalSingleCondition(
|
1063
|
+
cond_expr, true_dict, false_dict, phase, variables, build_file)
|
1064
|
+
|
1065
|
+
return result
|
1066
|
+
|
1067
|
+
|
1068
|
+
def EvalSingleCondition(
|
1069
|
+
cond_expr, true_dict, false_dict, phase, variables, build_file):
|
1070
|
+
"""Returns true_dict if cond_expr evaluates to true, and false_dict
|
1071
|
+
otherwise."""
|
1072
|
+
# Do expansions on the condition itself. Since the conditon can naturally
|
1073
|
+
# contain variable references without needing to resort to GYP expansion
|
1074
|
+
# syntax, this is of dubious value for variables, but someone might want to
|
1075
|
+
# use a command expansion directly inside a condition.
|
1076
|
+
cond_expr_expanded = ExpandVariables(cond_expr, phase, variables,
|
1077
|
+
build_file)
|
1078
|
+
if type(cond_expr_expanded) not in (str, int):
|
1079
|
+
raise ValueError(
|
1080
|
+
'Variable expansion in this context permits str and int ' + \
|
1081
|
+
'only, found ' + cond_expr_expanded.__class__.__name__)
|
1082
|
+
|
1083
|
+
try:
|
1084
|
+
if cond_expr_expanded in cached_conditions_asts:
|
1085
|
+
ast_code = cached_conditions_asts[cond_expr_expanded]
|
1086
|
+
else:
|
1087
|
+
ast_code = compile(cond_expr_expanded, '<string>', 'eval')
|
1088
|
+
cached_conditions_asts[cond_expr_expanded] = ast_code
|
1089
|
+
if eval(ast_code, {'__builtins__': None}, variables):
|
1090
|
+
return true_dict
|
1091
|
+
return false_dict
|
1092
|
+
except SyntaxError, e:
|
1093
|
+
syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s '
|
1094
|
+
'at character %d.' %
|
1095
|
+
(str(e.args[0]), e.text, build_file, e.offset),
|
1096
|
+
e.filename, e.lineno, e.offset, e.text)
|
1097
|
+
raise syntax_error
|
1098
|
+
except NameError, e:
|
1099
|
+
gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' %
|
1100
|
+
(cond_expr_expanded, build_file))
|
1101
|
+
raise GypError(e)
|
1102
|
+
|
1103
|
+
|
1104
|
+
def ProcessConditionsInDict(the_dict, phase, variables, build_file):
|
1105
|
+
# Process a 'conditions' or 'target_conditions' section in the_dict,
|
1106
|
+
# depending on phase.
|
1107
|
+
# early -> conditions
|
1108
|
+
# late -> target_conditions
|
1109
|
+
# latelate -> no conditions
|
1110
|
+
#
|
1111
|
+
# Each item in a conditions list consists of cond_expr, a string expression
|
1112
|
+
# evaluated as the condition, and true_dict, a dict that will be merged into
|
1113
|
+
# the_dict if cond_expr evaluates to true. Optionally, a third item,
|
1114
|
+
# false_dict, may be present. false_dict is merged into the_dict if
|
1115
|
+
# cond_expr evaluates to false.
|
1116
|
+
#
|
1117
|
+
# Any dict merged into the_dict will be recursively processed for nested
|
1118
|
+
# conditionals and other expansions, also according to phase, immediately
|
1119
|
+
# prior to being merged.
|
1120
|
+
|
1121
|
+
if phase == PHASE_EARLY:
|
1122
|
+
conditions_key = 'conditions'
|
1123
|
+
elif phase == PHASE_LATE:
|
1124
|
+
conditions_key = 'target_conditions'
|
1125
|
+
elif phase == PHASE_LATELATE:
|
1126
|
+
return
|
1127
|
+
else:
|
1128
|
+
assert False
|
1129
|
+
|
1130
|
+
if not conditions_key in the_dict:
|
1131
|
+
return
|
1132
|
+
|
1133
|
+
conditions_list = the_dict[conditions_key]
|
1134
|
+
# Unhook the conditions list, it's no longer needed.
|
1135
|
+
del the_dict[conditions_key]
|
1136
|
+
|
1137
|
+
for condition in conditions_list:
|
1138
|
+
merge_dict = EvalCondition(condition, conditions_key, phase, variables,
|
1139
|
+
build_file)
|
1140
|
+
|
1141
|
+
if merge_dict != None:
|
1142
|
+
# Expand variables and nested conditinals in the merge_dict before
|
1143
|
+
# merging it.
|
1144
|
+
ProcessVariablesAndConditionsInDict(merge_dict, phase,
|
1145
|
+
variables, build_file)
|
1146
|
+
|
1147
|
+
MergeDicts(the_dict, merge_dict, build_file, build_file)
|
1148
|
+
|
1149
|
+
|
1150
|
+
def LoadAutomaticVariablesFromDict(variables, the_dict):
|
1151
|
+
# Any keys with plain string values in the_dict become automatic variables.
|
1152
|
+
# The variable name is the key name with a "_" character prepended.
|
1153
|
+
for key, value in the_dict.iteritems():
|
1154
|
+
if type(value) in (str, int, list):
|
1155
|
+
variables['_' + key] = value
|
1156
|
+
|
1157
|
+
|
1158
|
+
def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key):
|
1159
|
+
# Any keys in the_dict's "variables" dict, if it has one, becomes a
|
1160
|
+
# variable. The variable name is the key name in the "variables" dict.
|
1161
|
+
# Variables that end with the % character are set only if they are unset in
|
1162
|
+
# the variables dict. the_dict_key is the name of the key that accesses
|
1163
|
+
# the_dict in the_dict's parent dict. If the_dict's parent is not a dict
|
1164
|
+
# (it could be a list or it could be parentless because it is a root dict),
|
1165
|
+
# the_dict_key will be None.
|
1166
|
+
for key, value in the_dict.get('variables', {}).iteritems():
|
1167
|
+
if type(value) not in (str, int, list):
|
1168
|
+
continue
|
1169
|
+
|
1170
|
+
if key.endswith('%'):
|
1171
|
+
variable_name = key[:-1]
|
1172
|
+
if variable_name in variables:
|
1173
|
+
# If the variable is already set, don't set it.
|
1174
|
+
continue
|
1175
|
+
if the_dict_key is 'variables' and variable_name in the_dict:
|
1176
|
+
# If the variable is set without a % in the_dict, and the_dict is a
|
1177
|
+
# variables dict (making |variables| a varaibles sub-dict of a
|
1178
|
+
# variables dict), use the_dict's definition.
|
1179
|
+
value = the_dict[variable_name]
|
1180
|
+
else:
|
1181
|
+
variable_name = key
|
1182
|
+
|
1183
|
+
variables[variable_name] = value
|
1184
|
+
|
1185
|
+
|
1186
|
+
def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in,
|
1187
|
+
build_file, the_dict_key=None):
|
1188
|
+
"""Handle all variable and command expansion and conditional evaluation.
|
1189
|
+
|
1190
|
+
This function is the public entry point for all variable expansions and
|
1191
|
+
conditional evaluations. The variables_in dictionary will not be modified
|
1192
|
+
by this function.
|
1193
|
+
"""
|
1194
|
+
|
1195
|
+
# Make a copy of the variables_in dict that can be modified during the
|
1196
|
+
# loading of automatics and the loading of the variables dict.
|
1197
|
+
variables = variables_in.copy()
|
1198
|
+
LoadAutomaticVariablesFromDict(variables, the_dict)
|
1199
|
+
|
1200
|
+
if 'variables' in the_dict:
|
1201
|
+
# Make sure all the local variables are added to the variables
|
1202
|
+
# list before we process them so that you can reference one
|
1203
|
+
# variable from another. They will be fully expanded by recursion
|
1204
|
+
# in ExpandVariables.
|
1205
|
+
for key, value in the_dict['variables'].iteritems():
|
1206
|
+
variables[key] = value
|
1207
|
+
|
1208
|
+
# Handle the associated variables dict first, so that any variable
|
1209
|
+
# references within can be resolved prior to using them as variables.
|
1210
|
+
# Pass a copy of the variables dict to avoid having it be tainted.
|
1211
|
+
# Otherwise, it would have extra automatics added for everything that
|
1212
|
+
# should just be an ordinary variable in this scope.
|
1213
|
+
ProcessVariablesAndConditionsInDict(the_dict['variables'], phase,
|
1214
|
+
variables, build_file, 'variables')
|
1215
|
+
|
1216
|
+
LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
|
1217
|
+
|
1218
|
+
for key, value in the_dict.iteritems():
|
1219
|
+
# Skip "variables", which was already processed if present.
|
1220
|
+
if key != 'variables' and type(value) is str:
|
1221
|
+
expanded = ExpandVariables(value, phase, variables, build_file)
|
1222
|
+
if type(expanded) not in (str, int):
|
1223
|
+
raise ValueError(
|
1224
|
+
'Variable expansion in this context permits str and int ' + \
|
1225
|
+
'only, found ' + expanded.__class__.__name__ + ' for ' + key)
|
1226
|
+
the_dict[key] = expanded
|
1227
|
+
|
1228
|
+
# Variable expansion may have resulted in changes to automatics. Reload.
|
1229
|
+
# TODO(mark): Optimization: only reload if no changes were made.
|
1230
|
+
variables = variables_in.copy()
|
1231
|
+
LoadAutomaticVariablesFromDict(variables, the_dict)
|
1232
|
+
LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
|
1233
|
+
|
1234
|
+
# Process conditions in this dict. This is done after variable expansion
|
1235
|
+
# so that conditions may take advantage of expanded variables. For example,
|
1236
|
+
# if the_dict contains:
|
1237
|
+
# {'type': '<(library_type)',
|
1238
|
+
# 'conditions': [['_type=="static_library"', { ... }]]},
|
1239
|
+
# _type, as used in the condition, will only be set to the value of
|
1240
|
+
# library_type if variable expansion is performed before condition
|
1241
|
+
# processing. However, condition processing should occur prior to recursion
|
1242
|
+
# so that variables (both automatic and "variables" dict type) may be
|
1243
|
+
# adjusted by conditions sections, merged into the_dict, and have the
|
1244
|
+
# intended impact on contained dicts.
|
1245
|
+
#
|
1246
|
+
# This arrangement means that a "conditions" section containing a "variables"
|
1247
|
+
# section will only have those variables effective in subdicts, not in
|
1248
|
+
# the_dict. The workaround is to put a "conditions" section within a
|
1249
|
+
# "variables" section. For example:
|
1250
|
+
# {'conditions': [['os=="mac"', {'variables': {'define': 'IS_MAC'}}]],
|
1251
|
+
# 'defines': ['<(define)'],
|
1252
|
+
# 'my_subdict': {'defines': ['<(define)']}},
|
1253
|
+
# will not result in "IS_MAC" being appended to the "defines" list in the
|
1254
|
+
# current scope but would result in it being appended to the "defines" list
|
1255
|
+
# within "my_subdict". By comparison:
|
1256
|
+
# {'variables': {'conditions': [['os=="mac"', {'define': 'IS_MAC'}]]},
|
1257
|
+
# 'defines': ['<(define)'],
|
1258
|
+
# 'my_subdict': {'defines': ['<(define)']}},
|
1259
|
+
# will append "IS_MAC" to both "defines" lists.
|
1260
|
+
|
1261
|
+
# Evaluate conditions sections, allowing variable expansions within them
|
1262
|
+
# as well as nested conditionals. This will process a 'conditions' or
|
1263
|
+
# 'target_conditions' section, perform appropriate merging and recursive
|
1264
|
+
# conditional and variable processing, and then remove the conditions section
|
1265
|
+
# from the_dict if it is present.
|
1266
|
+
ProcessConditionsInDict(the_dict, phase, variables, build_file)
|
1267
|
+
|
1268
|
+
# Conditional processing may have resulted in changes to automatics or the
|
1269
|
+
# variables dict. Reload.
|
1270
|
+
variables = variables_in.copy()
|
1271
|
+
LoadAutomaticVariablesFromDict(variables, the_dict)
|
1272
|
+
LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
|
1273
|
+
|
1274
|
+
# Recurse into child dicts, or process child lists which may result in
|
1275
|
+
# further recursion into descendant dicts.
|
1276
|
+
for key, value in the_dict.iteritems():
|
1277
|
+
# Skip "variables" and string values, which were already processed if
|
1278
|
+
# present.
|
1279
|
+
if key == 'variables' or type(value) is str:
|
1280
|
+
continue
|
1281
|
+
if type(value) is dict:
|
1282
|
+
# Pass a copy of the variables dict so that subdicts can't influence
|
1283
|
+
# parents.
|
1284
|
+
ProcessVariablesAndConditionsInDict(value, phase, variables,
|
1285
|
+
build_file, key)
|
1286
|
+
elif type(value) is list:
|
1287
|
+
# The list itself can't influence the variables dict, and
|
1288
|
+
# ProcessVariablesAndConditionsInList will make copies of the variables
|
1289
|
+
# dict if it needs to pass it to something that can influence it. No
|
1290
|
+
# copy is necessary here.
|
1291
|
+
ProcessVariablesAndConditionsInList(value, phase, variables,
|
1292
|
+
build_file)
|
1293
|
+
elif type(value) is not int:
|
1294
|
+
raise TypeError('Unknown type ' + value.__class__.__name__ + \
|
1295
|
+
' for ' + key)
|
1296
|
+
|
1297
|
+
|
1298
|
+
def ProcessVariablesAndConditionsInList(the_list, phase, variables,
|
1299
|
+
build_file):
|
1300
|
+
# Iterate using an index so that new values can be assigned into the_list.
|
1301
|
+
index = 0
|
1302
|
+
while index < len(the_list):
|
1303
|
+
item = the_list[index]
|
1304
|
+
if type(item) is dict:
|
1305
|
+
# Make a copy of the variables dict so that it won't influence anything
|
1306
|
+
# outside of its own scope.
|
1307
|
+
ProcessVariablesAndConditionsInDict(item, phase, variables, build_file)
|
1308
|
+
elif type(item) is list:
|
1309
|
+
ProcessVariablesAndConditionsInList(item, phase, variables, build_file)
|
1310
|
+
elif type(item) is str:
|
1311
|
+
expanded = ExpandVariables(item, phase, variables, build_file)
|
1312
|
+
if type(expanded) in (str, int):
|
1313
|
+
the_list[index] = expanded
|
1314
|
+
elif type(expanded) is list:
|
1315
|
+
the_list[index:index+1] = expanded
|
1316
|
+
index += len(expanded)
|
1317
|
+
|
1318
|
+
# index now identifies the next item to examine. Continue right now
|
1319
|
+
# without falling into the index increment below.
|
1320
|
+
continue
|
1321
|
+
else:
|
1322
|
+
raise ValueError(
|
1323
|
+
'Variable expansion in this context permits strings and ' + \
|
1324
|
+
'lists only, found ' + expanded.__class__.__name__ + ' at ' + \
|
1325
|
+
index)
|
1326
|
+
elif type(item) is not int:
|
1327
|
+
raise TypeError('Unknown type ' + item.__class__.__name__ + \
|
1328
|
+
' at index ' + index)
|
1329
|
+
index = index + 1
|
1330
|
+
|
1331
|
+
|
1332
|
+
def BuildTargetsDict(data):
|
1333
|
+
"""Builds a dict mapping fully-qualified target names to their target dicts.
|
1334
|
+
|
1335
|
+
|data| is a dict mapping loaded build files by pathname relative to the
|
1336
|
+
current directory. Values in |data| are build file contents. For each
|
1337
|
+
|data| value with a "targets" key, the value of the "targets" key is taken
|
1338
|
+
as a list containing target dicts. Each target's fully-qualified name is
|
1339
|
+
constructed from the pathname of the build file (|data| key) and its
|
1340
|
+
"target_name" property. These fully-qualified names are used as the keys
|
1341
|
+
in the returned dict. These keys provide access to the target dicts,
|
1342
|
+
the dicts in the "targets" lists.
|
1343
|
+
"""
|
1344
|
+
|
1345
|
+
targets = {}
|
1346
|
+
for build_file in data['target_build_files']:
|
1347
|
+
for target in data[build_file].get('targets', []):
|
1348
|
+
target_name = gyp.common.QualifiedTarget(build_file,
|
1349
|
+
target['target_name'],
|
1350
|
+
target['toolset'])
|
1351
|
+
if target_name in targets:
|
1352
|
+
raise GypError('Duplicate target definitions for ' + target_name)
|
1353
|
+
targets[target_name] = target
|
1354
|
+
|
1355
|
+
return targets
|
1356
|
+
|
1357
|
+
|
1358
|
+
def QualifyDependencies(targets):
|
1359
|
+
"""Make dependency links fully-qualified relative to the current directory.
|
1360
|
+
|
1361
|
+
|targets| is a dict mapping fully-qualified target names to their target
|
1362
|
+
dicts. For each target in this dict, keys known to contain dependency
|
1363
|
+
links are examined, and any dependencies referenced will be rewritten
|
1364
|
+
so that they are fully-qualified and relative to the current directory.
|
1365
|
+
All rewritten dependencies are suitable for use as keys to |targets| or a
|
1366
|
+
similar dict.
|
1367
|
+
"""
|
1368
|
+
|
1369
|
+
all_dependency_sections = [dep + op
|
1370
|
+
for dep in dependency_sections
|
1371
|
+
for op in ('', '!', '/')]
|
1372
|
+
|
1373
|
+
for target, target_dict in targets.iteritems():
|
1374
|
+
target_build_file = gyp.common.BuildFile(target)
|
1375
|
+
toolset = target_dict['toolset']
|
1376
|
+
for dependency_key in all_dependency_sections:
|
1377
|
+
dependencies = target_dict.get(dependency_key, [])
|
1378
|
+
for index in xrange(0, len(dependencies)):
|
1379
|
+
dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget(
|
1380
|
+
target_build_file, dependencies[index], toolset)
|
1381
|
+
if not multiple_toolsets:
|
1382
|
+
# Ignore toolset specification in the dependency if it is specified.
|
1383
|
+
dep_toolset = toolset
|
1384
|
+
dependency = gyp.common.QualifiedTarget(dep_file,
|
1385
|
+
dep_target,
|
1386
|
+
dep_toolset)
|
1387
|
+
dependencies[index] = dependency
|
1388
|
+
|
1389
|
+
# Make sure anything appearing in a list other than "dependencies" also
|
1390
|
+
# appears in the "dependencies" list.
|
1391
|
+
if dependency_key != 'dependencies' and \
|
1392
|
+
dependency not in target_dict['dependencies']:
|
1393
|
+
raise GypError('Found ' + dependency + ' in ' + dependency_key +
|
1394
|
+
' of ' + target + ', but not in dependencies')
|
1395
|
+
|
1396
|
+
|
1397
|
+
def ExpandWildcardDependencies(targets, data):
|
1398
|
+
"""Expands dependencies specified as build_file:*.
|
1399
|
+
|
1400
|
+
For each target in |targets|, examines sections containing links to other
|
1401
|
+
targets. If any such section contains a link of the form build_file:*, it
|
1402
|
+
is taken as a wildcard link, and is expanded to list each target in
|
1403
|
+
build_file. The |data| dict provides access to build file dicts.
|
1404
|
+
|
1405
|
+
Any target that does not wish to be included by wildcard can provide an
|
1406
|
+
optional "suppress_wildcard" key in its target dict. When present and
|
1407
|
+
true, a wildcard dependency link will not include such targets.
|
1408
|
+
|
1409
|
+
All dependency names, including the keys to |targets| and the values in each
|
1410
|
+
dependency list, must be qualified when this function is called.
|
1411
|
+
"""
|
1412
|
+
|
1413
|
+
for target, target_dict in targets.iteritems():
|
1414
|
+
toolset = target_dict['toolset']
|
1415
|
+
target_build_file = gyp.common.BuildFile(target)
|
1416
|
+
for dependency_key in dependency_sections:
|
1417
|
+
dependencies = target_dict.get(dependency_key, [])
|
1418
|
+
|
1419
|
+
# Loop this way instead of "for dependency in" or "for index in xrange"
|
1420
|
+
# because the dependencies list will be modified within the loop body.
|
1421
|
+
index = 0
|
1422
|
+
while index < len(dependencies):
|
1423
|
+
(dependency_build_file, dependency_target, dependency_toolset) = \
|
1424
|
+
gyp.common.ParseQualifiedTarget(dependencies[index])
|
1425
|
+
if dependency_target != '*' and dependency_toolset != '*':
|
1426
|
+
# Not a wildcard. Keep it moving.
|
1427
|
+
index = index + 1
|
1428
|
+
continue
|
1429
|
+
|
1430
|
+
if dependency_build_file == target_build_file:
|
1431
|
+
# It's an error for a target to depend on all other targets in
|
1432
|
+
# the same file, because a target cannot depend on itself.
|
1433
|
+
raise GypError('Found wildcard in ' + dependency_key + ' of ' +
|
1434
|
+
target + ' referring to same build file')
|
1435
|
+
|
1436
|
+
# Take the wildcard out and adjust the index so that the next
|
1437
|
+
# dependency in the list will be processed the next time through the
|
1438
|
+
# loop.
|
1439
|
+
del dependencies[index]
|
1440
|
+
index = index - 1
|
1441
|
+
|
1442
|
+
# Loop through the targets in the other build file, adding them to
|
1443
|
+
# this target's list of dependencies in place of the removed
|
1444
|
+
# wildcard.
|
1445
|
+
dependency_target_dicts = data[dependency_build_file]['targets']
|
1446
|
+
for dependency_target_dict in dependency_target_dicts:
|
1447
|
+
if int(dependency_target_dict.get('suppress_wildcard', False)):
|
1448
|
+
continue
|
1449
|
+
dependency_target_name = dependency_target_dict['target_name']
|
1450
|
+
if (dependency_target != '*' and
|
1451
|
+
dependency_target != dependency_target_name):
|
1452
|
+
continue
|
1453
|
+
dependency_target_toolset = dependency_target_dict['toolset']
|
1454
|
+
if (dependency_toolset != '*' and
|
1455
|
+
dependency_toolset != dependency_target_toolset):
|
1456
|
+
continue
|
1457
|
+
dependency = gyp.common.QualifiedTarget(dependency_build_file,
|
1458
|
+
dependency_target_name,
|
1459
|
+
dependency_target_toolset)
|
1460
|
+
index = index + 1
|
1461
|
+
dependencies.insert(index, dependency)
|
1462
|
+
|
1463
|
+
index = index + 1
|
1464
|
+
|
1465
|
+
|
1466
|
+
def Unify(l):
|
1467
|
+
"""Removes duplicate elements from l, keeping the first element."""
|
1468
|
+
seen = {}
|
1469
|
+
return [seen.setdefault(e, e) for e in l if e not in seen]
|
1470
|
+
|
1471
|
+
|
1472
|
+
def RemoveDuplicateDependencies(targets):
|
1473
|
+
"""Makes sure every dependency appears only once in all targets's dependency
|
1474
|
+
lists."""
|
1475
|
+
for target_name, target_dict in targets.iteritems():
|
1476
|
+
for dependency_key in dependency_sections:
|
1477
|
+
dependencies = target_dict.get(dependency_key, [])
|
1478
|
+
if dependencies:
|
1479
|
+
target_dict[dependency_key] = Unify(dependencies)
|
1480
|
+
|
1481
|
+
|
1482
|
+
def Filter(l, item):
|
1483
|
+
"""Removes item from l."""
|
1484
|
+
res = {}
|
1485
|
+
return [res.setdefault(e, e) for e in l if e != item]
|
1486
|
+
|
1487
|
+
|
1488
|
+
def RemoveSelfDependencies(targets):
|
1489
|
+
"""Remove self dependencies from targets that have the prune_self_dependency
|
1490
|
+
variable set."""
|
1491
|
+
for target_name, target_dict in targets.iteritems():
|
1492
|
+
for dependency_key in dependency_sections:
|
1493
|
+
dependencies = target_dict.get(dependency_key, [])
|
1494
|
+
if dependencies:
|
1495
|
+
for t in dependencies:
|
1496
|
+
if t == target_name:
|
1497
|
+
if targets[t].get('variables', {}).get('prune_self_dependency', 0):
|
1498
|
+
target_dict[dependency_key] = Filter(dependencies, target_name)
|
1499
|
+
|
1500
|
+
|
1501
|
+
def RemoveLinkDependenciesFromNoneTargets(targets):
|
1502
|
+
"""Remove dependencies having the 'link_dependency' attribute from the 'none'
|
1503
|
+
targets."""
|
1504
|
+
for target_name, target_dict in targets.iteritems():
|
1505
|
+
for dependency_key in dependency_sections:
|
1506
|
+
dependencies = target_dict.get(dependency_key, [])
|
1507
|
+
if dependencies:
|
1508
|
+
for t in dependencies:
|
1509
|
+
if target_dict.get('type', None) == 'none':
|
1510
|
+
if targets[t].get('variables', {}).get('link_dependency', 0):
|
1511
|
+
target_dict[dependency_key] = \
|
1512
|
+
Filter(target_dict[dependency_key], t)
|
1513
|
+
|
1514
|
+
|
1515
|
+
class DependencyGraphNode(object):
|
1516
|
+
"""
|
1517
|
+
|
1518
|
+
Attributes:
|
1519
|
+
ref: A reference to an object that this DependencyGraphNode represents.
|
1520
|
+
dependencies: List of DependencyGraphNodes on which this one depends.
|
1521
|
+
dependents: List of DependencyGraphNodes that depend on this one.
|
1522
|
+
"""
|
1523
|
+
|
1524
|
+
class CircularException(GypError):
|
1525
|
+
pass
|
1526
|
+
|
1527
|
+
def __init__(self, ref):
|
1528
|
+
self.ref = ref
|
1529
|
+
self.dependencies = []
|
1530
|
+
self.dependents = []
|
1531
|
+
|
1532
|
+
def __repr__(self):
|
1533
|
+
return '<DependencyGraphNode: %r>' % self.ref
|
1534
|
+
|
1535
|
+
def FlattenToList(self):
|
1536
|
+
# flat_list is the sorted list of dependencies - actually, the list items
|
1537
|
+
# are the "ref" attributes of DependencyGraphNodes. Every target will
|
1538
|
+
# appear in flat_list after all of its dependencies, and before all of its
|
1539
|
+
# dependents.
|
1540
|
+
flat_list = OrderedSet()
|
1541
|
+
|
1542
|
+
def ExtractNodeRef(node):
|
1543
|
+
"""Extracts the object that the node represents from the given node."""
|
1544
|
+
return node.ref
|
1545
|
+
|
1546
|
+
# in_degree_zeros is the list of DependencyGraphNodes that have no
|
1547
|
+
# dependencies not in flat_list. Initially, it is a copy of the children
|
1548
|
+
# of this node, because when the graph was built, nodes with no
|
1549
|
+
# dependencies were made implicit dependents of the root node.
|
1550
|
+
in_degree_zeros = sorted(self.dependents[:], key=ExtractNodeRef)
|
1551
|
+
|
1552
|
+
while in_degree_zeros:
|
1553
|
+
# Nodes in in_degree_zeros have no dependencies not in flat_list, so they
|
1554
|
+
# can be appended to flat_list. Take these nodes out of in_degree_zeros
|
1555
|
+
# as work progresses, so that the next node to process from the list can
|
1556
|
+
# always be accessed at a consistent position.
|
1557
|
+
node = in_degree_zeros.pop()
|
1558
|
+
flat_list.add(node.ref)
|
1559
|
+
|
1560
|
+
# Look at dependents of the node just added to flat_list. Some of them
|
1561
|
+
# may now belong in in_degree_zeros.
|
1562
|
+
for node_dependent in sorted(node.dependents, key=ExtractNodeRef):
|
1563
|
+
is_in_degree_zero = True
|
1564
|
+
# TODO: We want to check through the
|
1565
|
+
# node_dependent.dependencies list but if it's long and we
|
1566
|
+
# always start at the beginning, then we get O(n^2) behaviour.
|
1567
|
+
for node_dependent_dependency in (sorted(node_dependent.dependencies,
|
1568
|
+
key=ExtractNodeRef)):
|
1569
|
+
if not node_dependent_dependency.ref in flat_list:
|
1570
|
+
# The dependent one or more dependencies not in flat_list. There
|
1571
|
+
# will be more chances to add it to flat_list when examining
|
1572
|
+
# it again as a dependent of those other dependencies, provided
|
1573
|
+
# that there are no cycles.
|
1574
|
+
is_in_degree_zero = False
|
1575
|
+
break
|
1576
|
+
|
1577
|
+
if is_in_degree_zero:
|
1578
|
+
# All of the dependent's dependencies are already in flat_list. Add
|
1579
|
+
# it to in_degree_zeros where it will be processed in a future
|
1580
|
+
# iteration of the outer loop.
|
1581
|
+
in_degree_zeros += [node_dependent]
|
1582
|
+
|
1583
|
+
return list(flat_list)
|
1584
|
+
|
1585
|
+
def FindCycles(self):
|
1586
|
+
"""
|
1587
|
+
Returns a list of cycles in the graph, where each cycle is its own list.
|
1588
|
+
"""
|
1589
|
+
results = []
|
1590
|
+
visited = set()
|
1591
|
+
|
1592
|
+
def Visit(node, path):
|
1593
|
+
for child in node.dependents:
|
1594
|
+
if child in path:
|
1595
|
+
results.append([child] + path[:path.index(child) + 1])
|
1596
|
+
elif not child in visited:
|
1597
|
+
visited.add(child)
|
1598
|
+
Visit(child, [child] + path)
|
1599
|
+
|
1600
|
+
visited.add(self)
|
1601
|
+
Visit(self, [self])
|
1602
|
+
|
1603
|
+
return results
|
1604
|
+
|
1605
|
+
def DirectDependencies(self, dependencies=None):
|
1606
|
+
"""Returns a list of just direct dependencies."""
|
1607
|
+
if dependencies == None:
|
1608
|
+
dependencies = []
|
1609
|
+
|
1610
|
+
for dependency in self.dependencies:
|
1611
|
+
# Check for None, corresponding to the root node.
|
1612
|
+
if dependency.ref != None and dependency.ref not in dependencies:
|
1613
|
+
dependencies.append(dependency.ref)
|
1614
|
+
|
1615
|
+
return dependencies
|
1616
|
+
|
1617
|
+
def _AddImportedDependencies(self, targets, dependencies=None):
|
1618
|
+
"""Given a list of direct dependencies, adds indirect dependencies that
|
1619
|
+
other dependencies have declared to export their settings.
|
1620
|
+
|
1621
|
+
This method does not operate on self. Rather, it operates on the list
|
1622
|
+
of dependencies in the |dependencies| argument. For each dependency in
|
1623
|
+
that list, if any declares that it exports the settings of one of its
|
1624
|
+
own dependencies, those dependencies whose settings are "passed through"
|
1625
|
+
are added to the list. As new items are added to the list, they too will
|
1626
|
+
be processed, so it is possible to import settings through multiple levels
|
1627
|
+
of dependencies.
|
1628
|
+
|
1629
|
+
This method is not terribly useful on its own, it depends on being
|
1630
|
+
"primed" with a list of direct dependencies such as one provided by
|
1631
|
+
DirectDependencies. DirectAndImportedDependencies is intended to be the
|
1632
|
+
public entry point.
|
1633
|
+
"""
|
1634
|
+
|
1635
|
+
if dependencies == None:
|
1636
|
+
dependencies = []
|
1637
|
+
|
1638
|
+
index = 0
|
1639
|
+
while index < len(dependencies):
|
1640
|
+
dependency = dependencies[index]
|
1641
|
+
dependency_dict = targets[dependency]
|
1642
|
+
# Add any dependencies whose settings should be imported to the list
|
1643
|
+
# if not already present. Newly-added items will be checked for
|
1644
|
+
# their own imports when the list iteration reaches them.
|
1645
|
+
# Rather than simply appending new items, insert them after the
|
1646
|
+
# dependency that exported them. This is done to more closely match
|
1647
|
+
# the depth-first method used by DeepDependencies.
|
1648
|
+
add_index = 1
|
1649
|
+
for imported_dependency in \
|
1650
|
+
dependency_dict.get('export_dependent_settings', []):
|
1651
|
+
if imported_dependency not in dependencies:
|
1652
|
+
dependencies.insert(index + add_index, imported_dependency)
|
1653
|
+
add_index = add_index + 1
|
1654
|
+
index = index + 1
|
1655
|
+
|
1656
|
+
return dependencies
|
1657
|
+
|
1658
|
+
def DirectAndImportedDependencies(self, targets, dependencies=None):
|
1659
|
+
"""Returns a list of a target's direct dependencies and all indirect
|
1660
|
+
dependencies that a dependency has advertised settings should be exported
|
1661
|
+
through the dependency for.
|
1662
|
+
"""
|
1663
|
+
|
1664
|
+
dependencies = self.DirectDependencies(dependencies)
|
1665
|
+
return self._AddImportedDependencies(targets, dependencies)
|
1666
|
+
|
1667
|
+
def DeepDependencies(self, dependencies=None):
|
1668
|
+
"""Returns an OrderedSet of all of a target's dependencies, recursively."""
|
1669
|
+
if dependencies is None:
|
1670
|
+
# Using a list to get ordered output and a set to do fast "is it
|
1671
|
+
# already added" checks.
|
1672
|
+
dependencies = OrderedSet()
|
1673
|
+
|
1674
|
+
for dependency in self.dependencies:
|
1675
|
+
# Check for None, corresponding to the root node.
|
1676
|
+
if dependency.ref is None:
|
1677
|
+
continue
|
1678
|
+
if dependency.ref not in dependencies:
|
1679
|
+
dependency.DeepDependencies(dependencies)
|
1680
|
+
dependencies.add(dependency.ref)
|
1681
|
+
|
1682
|
+
return dependencies
|
1683
|
+
|
1684
|
+
def _LinkDependenciesInternal(self, targets, include_shared_libraries,
|
1685
|
+
dependencies=None, initial=True):
|
1686
|
+
"""Returns an OrderedSet of dependency targets that are linked
|
1687
|
+
into this target.
|
1688
|
+
|
1689
|
+
This function has a split personality, depending on the setting of
|
1690
|
+
|initial|. Outside callers should always leave |initial| at its default
|
1691
|
+
setting.
|
1692
|
+
|
1693
|
+
When adding a target to the list of dependencies, this function will
|
1694
|
+
recurse into itself with |initial| set to False, to collect dependencies
|
1695
|
+
that are linked into the linkable target for which the list is being built.
|
1696
|
+
|
1697
|
+
If |include_shared_libraries| is False, the resulting dependencies will not
|
1698
|
+
include shared_library targets that are linked into this target.
|
1699
|
+
"""
|
1700
|
+
if dependencies is None:
|
1701
|
+
# Using a list to get ordered output and a set to do fast "is it
|
1702
|
+
# already added" checks.
|
1703
|
+
dependencies = OrderedSet()
|
1704
|
+
|
1705
|
+
# Check for None, corresponding to the root node.
|
1706
|
+
if self.ref is None:
|
1707
|
+
return dependencies
|
1708
|
+
|
1709
|
+
# It's kind of sucky that |targets| has to be passed into this function,
|
1710
|
+
# but that's presently the easiest way to access the target dicts so that
|
1711
|
+
# this function can find target types.
|
1712
|
+
|
1713
|
+
if 'target_name' not in targets[self.ref]:
|
1714
|
+
raise GypError("Missing 'target_name' field in target.")
|
1715
|
+
|
1716
|
+
if 'type' not in targets[self.ref]:
|
1717
|
+
raise GypError("Missing 'type' field in target %s" %
|
1718
|
+
targets[self.ref]['target_name'])
|
1719
|
+
|
1720
|
+
target_type = targets[self.ref]['type']
|
1721
|
+
|
1722
|
+
is_linkable = target_type in linkable_types
|
1723
|
+
|
1724
|
+
if initial and not is_linkable:
|
1725
|
+
# If this is the first target being examined and it's not linkable,
|
1726
|
+
# return an empty list of link dependencies, because the link
|
1727
|
+
# dependencies are intended to apply to the target itself (initial is
|
1728
|
+
# True) and this target won't be linked.
|
1729
|
+
return dependencies
|
1730
|
+
|
1731
|
+
# Don't traverse 'none' targets if explicitly excluded.
|
1732
|
+
if (target_type == 'none' and
|
1733
|
+
not targets[self.ref].get('dependencies_traverse', True)):
|
1734
|
+
dependencies.add(self.ref)
|
1735
|
+
return dependencies
|
1736
|
+
|
1737
|
+
# Executables, mac kernel extensions and loadable modules are already fully
|
1738
|
+
# and finally linked. Nothing else can be a link dependency of them, there
|
1739
|
+
# can only be dependencies in the sense that a dependent target might run
|
1740
|
+
# an executable or load the loadable_module.
|
1741
|
+
if not initial and target_type in ('executable', 'loadable_module',
|
1742
|
+
'mac_kernel_extension'):
|
1743
|
+
return dependencies
|
1744
|
+
|
1745
|
+
# Shared libraries are already fully linked. They should only be included
|
1746
|
+
# in |dependencies| when adjusting static library dependencies (in order to
|
1747
|
+
# link against the shared_library's import lib), but should not be included
|
1748
|
+
# in |dependencies| when propagating link_settings.
|
1749
|
+
# The |include_shared_libraries| flag controls which of these two cases we
|
1750
|
+
# are handling.
|
1751
|
+
if (not initial and target_type == 'shared_library' and
|
1752
|
+
not include_shared_libraries):
|
1753
|
+
return dependencies
|
1754
|
+
|
1755
|
+
# The target is linkable, add it to the list of link dependencies.
|
1756
|
+
if self.ref not in dependencies:
|
1757
|
+
dependencies.add(self.ref)
|
1758
|
+
if initial or not is_linkable:
|
1759
|
+
# If this is a subsequent target and it's linkable, don't look any
|
1760
|
+
# further for linkable dependencies, as they'll already be linked into
|
1761
|
+
# this target linkable. Always look at dependencies of the initial
|
1762
|
+
# target, and always look at dependencies of non-linkables.
|
1763
|
+
for dependency in self.dependencies:
|
1764
|
+
dependency._LinkDependenciesInternal(targets,
|
1765
|
+
include_shared_libraries,
|
1766
|
+
dependencies, False)
|
1767
|
+
|
1768
|
+
return dependencies
|
1769
|
+
|
1770
|
+
def DependenciesForLinkSettings(self, targets):
|
1771
|
+
"""
|
1772
|
+
Returns a list of dependency targets whose link_settings should be merged
|
1773
|
+
into this target.
|
1774
|
+
"""
|
1775
|
+
|
1776
|
+
# TODO(sbaig) Currently, chrome depends on the bug that shared libraries'
|
1777
|
+
# link_settings are propagated. So for now, we will allow it, unless the
|
1778
|
+
# 'allow_sharedlib_linksettings_propagation' flag is explicitly set to
|
1779
|
+
# False. Once chrome is fixed, we can remove this flag.
|
1780
|
+
include_shared_libraries = \
|
1781
|
+
targets[self.ref].get('allow_sharedlib_linksettings_propagation', True)
|
1782
|
+
return self._LinkDependenciesInternal(targets, include_shared_libraries)
|
1783
|
+
|
1784
|
+
def DependenciesToLinkAgainst(self, targets):
|
1785
|
+
"""
|
1786
|
+
Returns a list of dependency targets that are linked into this target.
|
1787
|
+
"""
|
1788
|
+
return self._LinkDependenciesInternal(targets, True)
|
1789
|
+
|
1790
|
+
|
1791
|
+
def BuildDependencyList(targets):
|
1792
|
+
# Create a DependencyGraphNode for each target. Put it into a dict for easy
|
1793
|
+
# access.
|
1794
|
+
dependency_nodes = {}
|
1795
|
+
for target, spec in targets.iteritems():
|
1796
|
+
if target not in dependency_nodes:
|
1797
|
+
dependency_nodes[target] = DependencyGraphNode(target)
|
1798
|
+
|
1799
|
+
# Set up the dependency links. Targets that have no dependencies are treated
|
1800
|
+
# as dependent on root_node.
|
1801
|
+
root_node = DependencyGraphNode(None)
|
1802
|
+
for target, spec in targets.iteritems():
|
1803
|
+
target_node = dependency_nodes[target]
|
1804
|
+
target_build_file = gyp.common.BuildFile(target)
|
1805
|
+
dependencies = spec.get('dependencies')
|
1806
|
+
if not dependencies:
|
1807
|
+
target_node.dependencies = [root_node]
|
1808
|
+
root_node.dependents.append(target_node)
|
1809
|
+
else:
|
1810
|
+
for dependency in dependencies:
|
1811
|
+
dependency_node = dependency_nodes.get(dependency)
|
1812
|
+
if not dependency_node:
|
1813
|
+
raise GypError("Dependency '%s' not found while "
|
1814
|
+
"trying to load target %s" % (dependency, target))
|
1815
|
+
target_node.dependencies.append(dependency_node)
|
1816
|
+
dependency_node.dependents.append(target_node)
|
1817
|
+
|
1818
|
+
flat_list = root_node.FlattenToList()
|
1819
|
+
|
1820
|
+
# If there's anything left unvisited, there must be a circular dependency
|
1821
|
+
# (cycle).
|
1822
|
+
if len(flat_list) != len(targets):
|
1823
|
+
if not root_node.dependents:
|
1824
|
+
# If all targets have dependencies, add the first target as a dependent
|
1825
|
+
# of root_node so that the cycle can be discovered from root_node.
|
1826
|
+
target = targets.keys()[0]
|
1827
|
+
target_node = dependency_nodes[target]
|
1828
|
+
target_node.dependencies.append(root_node)
|
1829
|
+
root_node.dependents.append(target_node)
|
1830
|
+
|
1831
|
+
cycles = []
|
1832
|
+
for cycle in root_node.FindCycles():
|
1833
|
+
paths = [node.ref for node in cycle]
|
1834
|
+
cycles.append('Cycle: %s' % ' -> '.join(paths))
|
1835
|
+
raise DependencyGraphNode.CircularException(
|
1836
|
+
'Cycles in dependency graph detected:\n' + '\n'.join(cycles))
|
1837
|
+
|
1838
|
+
return [dependency_nodes, flat_list]
|
1839
|
+
|
1840
|
+
|
1841
|
+
def VerifyNoGYPFileCircularDependencies(targets):
|
1842
|
+
# Create a DependencyGraphNode for each gyp file containing a target. Put
|
1843
|
+
# it into a dict for easy access.
|
1844
|
+
dependency_nodes = {}
|
1845
|
+
for target in targets.iterkeys():
|
1846
|
+
build_file = gyp.common.BuildFile(target)
|
1847
|
+
if not build_file in dependency_nodes:
|
1848
|
+
dependency_nodes[build_file] = DependencyGraphNode(build_file)
|
1849
|
+
|
1850
|
+
# Set up the dependency links.
|
1851
|
+
for target, spec in targets.iteritems():
|
1852
|
+
build_file = gyp.common.BuildFile(target)
|
1853
|
+
build_file_node = dependency_nodes[build_file]
|
1854
|
+
target_dependencies = spec.get('dependencies', [])
|
1855
|
+
for dependency in target_dependencies:
|
1856
|
+
try:
|
1857
|
+
dependency_build_file = gyp.common.BuildFile(dependency)
|
1858
|
+
except GypError, e:
|
1859
|
+
gyp.common.ExceptionAppend(
|
1860
|
+
e, 'while computing dependencies of .gyp file %s' % build_file)
|
1861
|
+
raise
|
1862
|
+
|
1863
|
+
if dependency_build_file == build_file:
|
1864
|
+
# A .gyp file is allowed to refer back to itself.
|
1865
|
+
continue
|
1866
|
+
dependency_node = dependency_nodes.get(dependency_build_file)
|
1867
|
+
if not dependency_node:
|
1868
|
+
raise GypError("Dependancy '%s' not found" % dependency_build_file)
|
1869
|
+
if dependency_node not in build_file_node.dependencies:
|
1870
|
+
build_file_node.dependencies.append(dependency_node)
|
1871
|
+
dependency_node.dependents.append(build_file_node)
|
1872
|
+
|
1873
|
+
|
1874
|
+
# Files that have no dependencies are treated as dependent on root_node.
|
1875
|
+
root_node = DependencyGraphNode(None)
|
1876
|
+
for build_file_node in dependency_nodes.itervalues():
|
1877
|
+
if len(build_file_node.dependencies) == 0:
|
1878
|
+
build_file_node.dependencies.append(root_node)
|
1879
|
+
root_node.dependents.append(build_file_node)
|
1880
|
+
|
1881
|
+
flat_list = root_node.FlattenToList()
|
1882
|
+
|
1883
|
+
# If there's anything left unvisited, there must be a circular dependency
|
1884
|
+
# (cycle).
|
1885
|
+
if len(flat_list) != len(dependency_nodes):
|
1886
|
+
if not root_node.dependents:
|
1887
|
+
# If all files have dependencies, add the first file as a dependent
|
1888
|
+
# of root_node so that the cycle can be discovered from root_node.
|
1889
|
+
file_node = dependency_nodes.values()[0]
|
1890
|
+
file_node.dependencies.append(root_node)
|
1891
|
+
root_node.dependents.append(file_node)
|
1892
|
+
cycles = []
|
1893
|
+
for cycle in root_node.FindCycles():
|
1894
|
+
paths = [node.ref for node in cycle]
|
1895
|
+
cycles.append('Cycle: %s' % ' -> '.join(paths))
|
1896
|
+
raise DependencyGraphNode.CircularException(
|
1897
|
+
'Cycles in .gyp file dependency graph detected:\n' + '\n'.join(cycles))
|
1898
|
+
|
1899
|
+
|
1900
|
+
def DoDependentSettings(key, flat_list, targets, dependency_nodes):
|
1901
|
+
# key should be one of all_dependent_settings, direct_dependent_settings,
|
1902
|
+
# or link_settings.
|
1903
|
+
|
1904
|
+
for target in flat_list:
|
1905
|
+
target_dict = targets[target]
|
1906
|
+
build_file = gyp.common.BuildFile(target)
|
1907
|
+
|
1908
|
+
if key == 'all_dependent_settings':
|
1909
|
+
dependencies = dependency_nodes[target].DeepDependencies()
|
1910
|
+
elif key == 'direct_dependent_settings':
|
1911
|
+
dependencies = \
|
1912
|
+
dependency_nodes[target].DirectAndImportedDependencies(targets)
|
1913
|
+
elif key == 'link_settings':
|
1914
|
+
dependencies = \
|
1915
|
+
dependency_nodes[target].DependenciesForLinkSettings(targets)
|
1916
|
+
else:
|
1917
|
+
raise GypError("DoDependentSettings doesn't know how to determine "
|
1918
|
+
'dependencies for ' + key)
|
1919
|
+
|
1920
|
+
for dependency in dependencies:
|
1921
|
+
dependency_dict = targets[dependency]
|
1922
|
+
if not key in dependency_dict:
|
1923
|
+
continue
|
1924
|
+
dependency_build_file = gyp.common.BuildFile(dependency)
|
1925
|
+
MergeDicts(target_dict, dependency_dict[key],
|
1926
|
+
build_file, dependency_build_file)
|
1927
|
+
|
1928
|
+
|
1929
|
+
def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
|
1930
|
+
sort_dependencies):
|
1931
|
+
# Recompute target "dependencies" properties. For each static library
|
1932
|
+
# target, remove "dependencies" entries referring to other static libraries,
|
1933
|
+
# unless the dependency has the "hard_dependency" attribute set. For each
|
1934
|
+
# linkable target, add a "dependencies" entry referring to all of the
|
1935
|
+
# target's computed list of link dependencies (including static libraries
|
1936
|
+
# if no such entry is already present.
|
1937
|
+
for target in flat_list:
|
1938
|
+
target_dict = targets[target]
|
1939
|
+
target_type = target_dict['type']
|
1940
|
+
|
1941
|
+
if target_type == 'static_library':
|
1942
|
+
if not 'dependencies' in target_dict:
|
1943
|
+
continue
|
1944
|
+
|
1945
|
+
target_dict['dependencies_original'] = target_dict.get(
|
1946
|
+
'dependencies', [])[:]
|
1947
|
+
|
1948
|
+
# A static library should not depend on another static library unless
|
1949
|
+
# the dependency relationship is "hard," which should only be done when
|
1950
|
+
# a dependent relies on some side effect other than just the build
|
1951
|
+
# product, like a rule or action output. Further, if a target has a
|
1952
|
+
# non-hard dependency, but that dependency exports a hard dependency,
|
1953
|
+
# the non-hard dependency can safely be removed, but the exported hard
|
1954
|
+
# dependency must be added to the target to keep the same dependency
|
1955
|
+
# ordering.
|
1956
|
+
dependencies = \
|
1957
|
+
dependency_nodes[target].DirectAndImportedDependencies(targets)
|
1958
|
+
index = 0
|
1959
|
+
while index < len(dependencies):
|
1960
|
+
dependency = dependencies[index]
|
1961
|
+
dependency_dict = targets[dependency]
|
1962
|
+
|
1963
|
+
# Remove every non-hard static library dependency and remove every
|
1964
|
+
# non-static library dependency that isn't a direct dependency.
|
1965
|
+
if (dependency_dict['type'] == 'static_library' and \
|
1966
|
+
not dependency_dict.get('hard_dependency', False)) or \
|
1967
|
+
(dependency_dict['type'] != 'static_library' and \
|
1968
|
+
not dependency in target_dict['dependencies']):
|
1969
|
+
# Take the dependency out of the list, and don't increment index
|
1970
|
+
# because the next dependency to analyze will shift into the index
|
1971
|
+
# formerly occupied by the one being removed.
|
1972
|
+
del dependencies[index]
|
1973
|
+
else:
|
1974
|
+
index = index + 1
|
1975
|
+
|
1976
|
+
# Update the dependencies. If the dependencies list is empty, it's not
|
1977
|
+
# needed, so unhook it.
|
1978
|
+
if len(dependencies) > 0:
|
1979
|
+
target_dict['dependencies'] = dependencies
|
1980
|
+
else:
|
1981
|
+
del target_dict['dependencies']
|
1982
|
+
|
1983
|
+
elif target_type in linkable_types:
|
1984
|
+
# Get a list of dependency targets that should be linked into this
|
1985
|
+
# target. Add them to the dependencies list if they're not already
|
1986
|
+
# present.
|
1987
|
+
|
1988
|
+
link_dependencies = \
|
1989
|
+
dependency_nodes[target].DependenciesToLinkAgainst(targets)
|
1990
|
+
for dependency in link_dependencies:
|
1991
|
+
if dependency == target:
|
1992
|
+
continue
|
1993
|
+
if not 'dependencies' in target_dict:
|
1994
|
+
target_dict['dependencies'] = []
|
1995
|
+
if not dependency in target_dict['dependencies']:
|
1996
|
+
target_dict['dependencies'].append(dependency)
|
1997
|
+
# Sort the dependencies list in the order from dependents to dependencies.
|
1998
|
+
# e.g. If A and B depend on C and C depends on D, sort them in A, B, C, D.
|
1999
|
+
# Note: flat_list is already sorted in the order from dependencies to
|
2000
|
+
# dependents.
|
2001
|
+
if sort_dependencies and 'dependencies' in target_dict:
|
2002
|
+
target_dict['dependencies'] = [dep for dep in reversed(flat_list)
|
2003
|
+
if dep in target_dict['dependencies']]
|
2004
|
+
|
2005
|
+
|
2006
|
+
# Initialize this here to speed up MakePathRelative.
|
2007
|
+
exception_re = re.compile(r'''["']?[-/$<>^]''')
|
2008
|
+
|
2009
|
+
|
2010
|
+
def MakePathRelative(to_file, fro_file, item):
|
2011
|
+
# If item is a relative path, it's relative to the build file dict that it's
|
2012
|
+
# coming from. Fix it up to make it relative to the build file dict that
|
2013
|
+
# it's going into.
|
2014
|
+
# Exception: any |item| that begins with these special characters is
|
2015
|
+
# returned without modification.
|
2016
|
+
# / Used when a path is already absolute (shortcut optimization;
|
2017
|
+
# such paths would be returned as absolute anyway)
|
2018
|
+
# $ Used for build environment variables
|
2019
|
+
# - Used for some build environment flags (such as -lapr-1 in a
|
2020
|
+
# "libraries" section)
|
2021
|
+
# < Used for our own variable and command expansions (see ExpandVariables)
|
2022
|
+
# > Used for our own variable and command expansions (see ExpandVariables)
|
2023
|
+
# ^ Used for our own variable and command expansions (see ExpandVariables)
|
2024
|
+
#
|
2025
|
+
# "/' Used when a value is quoted. If these are present, then we
|
2026
|
+
# check the second character instead.
|
2027
|
+
#
|
2028
|
+
if to_file == fro_file or exception_re.match(item):
|
2029
|
+
return item
|
2030
|
+
else:
|
2031
|
+
# TODO(dglazkov) The backslash/forward-slash replacement at the end is a
|
2032
|
+
# temporary measure. This should really be addressed by keeping all paths
|
2033
|
+
# in POSIX until actual project generation.
|
2034
|
+
ret = os.path.normpath(os.path.join(
|
2035
|
+
gyp.common.RelativePath(os.path.dirname(fro_file),
|
2036
|
+
os.path.dirname(to_file)),
|
2037
|
+
item)).replace('\\', '/')
|
2038
|
+
if item[-1] == '/':
|
2039
|
+
ret += '/'
|
2040
|
+
return ret
|
2041
|
+
|
2042
|
+
def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True):
|
2043
|
+
# Python documentation recommends objects which do not support hash
|
2044
|
+
# set this value to None. Python library objects follow this rule.
|
2045
|
+
is_hashable = lambda val: val.__hash__
|
2046
|
+
|
2047
|
+
# If x is hashable, returns whether x is in s. Else returns whether x is in l.
|
2048
|
+
def is_in_set_or_list(x, s, l):
|
2049
|
+
if is_hashable(x):
|
2050
|
+
return x in s
|
2051
|
+
return x in l
|
2052
|
+
|
2053
|
+
prepend_index = 0
|
2054
|
+
|
2055
|
+
# Make membership testing of hashables in |to| (in particular, strings)
|
2056
|
+
# faster.
|
2057
|
+
hashable_to_set = set(x for x in to if is_hashable(x))
|
2058
|
+
for item in fro:
|
2059
|
+
singleton = False
|
2060
|
+
if type(item) in (str, int):
|
2061
|
+
# The cheap and easy case.
|
2062
|
+
if is_paths:
|
2063
|
+
to_item = MakePathRelative(to_file, fro_file, item)
|
2064
|
+
else:
|
2065
|
+
to_item = item
|
2066
|
+
|
2067
|
+
if not (type(item) is str and item.startswith('-')):
|
2068
|
+
# Any string that doesn't begin with a "-" is a singleton - it can
|
2069
|
+
# only appear once in a list, to be enforced by the list merge append
|
2070
|
+
# or prepend.
|
2071
|
+
singleton = True
|
2072
|
+
elif type(item) is dict:
|
2073
|
+
# Make a copy of the dictionary, continuing to look for paths to fix.
|
2074
|
+
# The other intelligent aspects of merge processing won't apply because
|
2075
|
+
# item is being merged into an empty dict.
|
2076
|
+
to_item = {}
|
2077
|
+
MergeDicts(to_item, item, to_file, fro_file)
|
2078
|
+
elif type(item) is list:
|
2079
|
+
# Recurse, making a copy of the list. If the list contains any
|
2080
|
+
# descendant dicts, path fixing will occur. Note that here, custom
|
2081
|
+
# values for is_paths and append are dropped; those are only to be
|
2082
|
+
# applied to |to| and |fro|, not sublists of |fro|. append shouldn't
|
2083
|
+
# matter anyway because the new |to_item| list is empty.
|
2084
|
+
to_item = []
|
2085
|
+
MergeLists(to_item, item, to_file, fro_file)
|
2086
|
+
else:
|
2087
|
+
raise TypeError(
|
2088
|
+
'Attempt to merge list item of unsupported type ' + \
|
2089
|
+
item.__class__.__name__)
|
2090
|
+
|
2091
|
+
if append:
|
2092
|
+
# If appending a singleton that's already in the list, don't append.
|
2093
|
+
# This ensures that the earliest occurrence of the item will stay put.
|
2094
|
+
if not singleton or not is_in_set_or_list(to_item, hashable_to_set, to):
|
2095
|
+
to.append(to_item)
|
2096
|
+
if is_hashable(to_item):
|
2097
|
+
hashable_to_set.add(to_item)
|
2098
|
+
else:
|
2099
|
+
# If prepending a singleton that's already in the list, remove the
|
2100
|
+
# existing instance and proceed with the prepend. This ensures that the
|
2101
|
+
# item appears at the earliest possible position in the list.
|
2102
|
+
while singleton and to_item in to:
|
2103
|
+
to.remove(to_item)
|
2104
|
+
|
2105
|
+
# Don't just insert everything at index 0. That would prepend the new
|
2106
|
+
# items to the list in reverse order, which would be an unwelcome
|
2107
|
+
# surprise.
|
2108
|
+
to.insert(prepend_index, to_item)
|
2109
|
+
if is_hashable(to_item):
|
2110
|
+
hashable_to_set.add(to_item)
|
2111
|
+
prepend_index = prepend_index + 1
|
2112
|
+
|
2113
|
+
|
2114
|
+
def MergeDicts(to, fro, to_file, fro_file):
|
2115
|
+
# I wanted to name the parameter "from" but it's a Python keyword...
|
2116
|
+
for k, v in fro.iteritems():
|
2117
|
+
# It would be nice to do "if not k in to: to[k] = v" but that wouldn't give
|
2118
|
+
# copy semantics. Something else may want to merge from the |fro| dict
|
2119
|
+
# later, and having the same dict ref pointed to twice in the tree isn't
|
2120
|
+
# what anyone wants considering that the dicts may subsequently be
|
2121
|
+
# modified.
|
2122
|
+
if k in to:
|
2123
|
+
bad_merge = False
|
2124
|
+
if type(v) in (str, int):
|
2125
|
+
if type(to[k]) not in (str, int):
|
2126
|
+
bad_merge = True
|
2127
|
+
elif type(v) is not type(to[k]):
|
2128
|
+
bad_merge = True
|
2129
|
+
|
2130
|
+
if bad_merge:
|
2131
|
+
raise TypeError(
|
2132
|
+
'Attempt to merge dict value of type ' + v.__class__.__name__ + \
|
2133
|
+
' into incompatible type ' + to[k].__class__.__name__ + \
|
2134
|
+
' for key ' + k)
|
2135
|
+
if type(v) in (str, int):
|
2136
|
+
# Overwrite the existing value, if any. Cheap and easy.
|
2137
|
+
is_path = IsPathSection(k)
|
2138
|
+
if is_path:
|
2139
|
+
to[k] = MakePathRelative(to_file, fro_file, v)
|
2140
|
+
else:
|
2141
|
+
to[k] = v
|
2142
|
+
elif type(v) is dict:
|
2143
|
+
# Recurse, guaranteeing copies will be made of objects that require it.
|
2144
|
+
if not k in to:
|
2145
|
+
to[k] = {}
|
2146
|
+
MergeDicts(to[k], v, to_file, fro_file)
|
2147
|
+
elif type(v) is list:
|
2148
|
+
# Lists in dicts can be merged with different policies, depending on
|
2149
|
+
# how the key in the "from" dict (k, the from-key) is written.
|
2150
|
+
#
|
2151
|
+
# If the from-key has ...the to-list will have this action
|
2152
|
+
# this character appended:... applied when receiving the from-list:
|
2153
|
+
# = replace
|
2154
|
+
# + prepend
|
2155
|
+
# ? set, only if to-list does not yet exist
|
2156
|
+
# (none) append
|
2157
|
+
#
|
2158
|
+
# This logic is list-specific, but since it relies on the associated
|
2159
|
+
# dict key, it's checked in this dict-oriented function.
|
2160
|
+
ext = k[-1]
|
2161
|
+
append = True
|
2162
|
+
if ext == '=':
|
2163
|
+
list_base = k[:-1]
|
2164
|
+
lists_incompatible = [list_base, list_base + '?']
|
2165
|
+
to[list_base] = []
|
2166
|
+
elif ext == '+':
|
2167
|
+
list_base = k[:-1]
|
2168
|
+
lists_incompatible = [list_base + '=', list_base + '?']
|
2169
|
+
append = False
|
2170
|
+
elif ext == '?':
|
2171
|
+
list_base = k[:-1]
|
2172
|
+
lists_incompatible = [list_base, list_base + '=', list_base + '+']
|
2173
|
+
else:
|
2174
|
+
list_base = k
|
2175
|
+
lists_incompatible = [list_base + '=', list_base + '?']
|
2176
|
+
|
2177
|
+
# Some combinations of merge policies appearing together are meaningless.
|
2178
|
+
# It's stupid to replace and append simultaneously, for example. Append
|
2179
|
+
# and prepend are the only policies that can coexist.
|
2180
|
+
for list_incompatible in lists_incompatible:
|
2181
|
+
if list_incompatible in fro:
|
2182
|
+
raise GypError('Incompatible list policies ' + k + ' and ' +
|
2183
|
+
list_incompatible)
|
2184
|
+
|
2185
|
+
if list_base in to:
|
2186
|
+
if ext == '?':
|
2187
|
+
# If the key ends in "?", the list will only be merged if it doesn't
|
2188
|
+
# already exist.
|
2189
|
+
continue
|
2190
|
+
elif type(to[list_base]) is not list:
|
2191
|
+
# This may not have been checked above if merging in a list with an
|
2192
|
+
# extension character.
|
2193
|
+
raise TypeError(
|
2194
|
+
'Attempt to merge dict value of type ' + v.__class__.__name__ + \
|
2195
|
+
' into incompatible type ' + to[list_base].__class__.__name__ + \
|
2196
|
+
' for key ' + list_base + '(' + k + ')')
|
2197
|
+
else:
|
2198
|
+
to[list_base] = []
|
2199
|
+
|
2200
|
+
# Call MergeLists, which will make copies of objects that require it.
|
2201
|
+
# MergeLists can recurse back into MergeDicts, although this will be
|
2202
|
+
# to make copies of dicts (with paths fixed), there will be no
|
2203
|
+
# subsequent dict "merging" once entering a list because lists are
|
2204
|
+
# always replaced, appended to, or prepended to.
|
2205
|
+
is_paths = IsPathSection(list_base)
|
2206
|
+
MergeLists(to[list_base], v, to_file, fro_file, is_paths, append)
|
2207
|
+
else:
|
2208
|
+
raise TypeError(
|
2209
|
+
'Attempt to merge dict value of unsupported type ' + \
|
2210
|
+
v.__class__.__name__ + ' for key ' + k)
|
2211
|
+
|
2212
|
+
|
2213
|
+
def MergeConfigWithInheritance(new_configuration_dict, build_file,
|
2214
|
+
target_dict, configuration, visited):
|
2215
|
+
# Skip if previously visted.
|
2216
|
+
if configuration in visited:
|
2217
|
+
return
|
2218
|
+
|
2219
|
+
# Look at this configuration.
|
2220
|
+
configuration_dict = target_dict['configurations'][configuration]
|
2221
|
+
|
2222
|
+
# Merge in parents.
|
2223
|
+
for parent in configuration_dict.get('inherit_from', []):
|
2224
|
+
MergeConfigWithInheritance(new_configuration_dict, build_file,
|
2225
|
+
target_dict, parent, visited + [configuration])
|
2226
|
+
|
2227
|
+
# Merge it into the new config.
|
2228
|
+
MergeDicts(new_configuration_dict, configuration_dict,
|
2229
|
+
build_file, build_file)
|
2230
|
+
|
2231
|
+
# Drop abstract.
|
2232
|
+
if 'abstract' in new_configuration_dict:
|
2233
|
+
del new_configuration_dict['abstract']
|
2234
|
+
|
2235
|
+
|
2236
|
+
def SetUpConfigurations(target, target_dict):
|
2237
|
+
# key_suffixes is a list of key suffixes that might appear on key names.
|
2238
|
+
# These suffixes are handled in conditional evaluations (for =, +, and ?)
|
2239
|
+
# and rules/exclude processing (for ! and /). Keys with these suffixes
|
2240
|
+
# should be treated the same as keys without.
|
2241
|
+
key_suffixes = ['=', '+', '?', '!', '/']
|
2242
|
+
|
2243
|
+
build_file = gyp.common.BuildFile(target)
|
2244
|
+
|
2245
|
+
# Provide a single configuration by default if none exists.
|
2246
|
+
# TODO(mark): Signal an error if default_configurations exists but
|
2247
|
+
# configurations does not.
|
2248
|
+
if not 'configurations' in target_dict:
|
2249
|
+
target_dict['configurations'] = {'Default': {}}
|
2250
|
+
if not 'default_configuration' in target_dict:
|
2251
|
+
concrete = [i for (i, config) in target_dict['configurations'].iteritems()
|
2252
|
+
if not config.get('abstract')]
|
2253
|
+
target_dict['default_configuration'] = sorted(concrete)[0]
|
2254
|
+
|
2255
|
+
merged_configurations = {}
|
2256
|
+
configs = target_dict['configurations']
|
2257
|
+
for (configuration, old_configuration_dict) in configs.iteritems():
|
2258
|
+
# Skip abstract configurations (saves work only).
|
2259
|
+
if old_configuration_dict.get('abstract'):
|
2260
|
+
continue
|
2261
|
+
# Configurations inherit (most) settings from the enclosing target scope.
|
2262
|
+
# Get the inheritance relationship right by making a copy of the target
|
2263
|
+
# dict.
|
2264
|
+
new_configuration_dict = {}
|
2265
|
+
for (key, target_val) in target_dict.iteritems():
|
2266
|
+
key_ext = key[-1:]
|
2267
|
+
if key_ext in key_suffixes:
|
2268
|
+
key_base = key[:-1]
|
2269
|
+
else:
|
2270
|
+
key_base = key
|
2271
|
+
if not key_base in non_configuration_keys:
|
2272
|
+
new_configuration_dict[key] = gyp.simple_copy.deepcopy(target_val)
|
2273
|
+
|
2274
|
+
# Merge in configuration (with all its parents first).
|
2275
|
+
MergeConfigWithInheritance(new_configuration_dict, build_file,
|
2276
|
+
target_dict, configuration, [])
|
2277
|
+
|
2278
|
+
merged_configurations[configuration] = new_configuration_dict
|
2279
|
+
|
2280
|
+
# Put the new configurations back into the target dict as a configuration.
|
2281
|
+
for configuration in merged_configurations.keys():
|
2282
|
+
target_dict['configurations'][configuration] = (
|
2283
|
+
merged_configurations[configuration])
|
2284
|
+
|
2285
|
+
# Now drop all the abstract ones.
|
2286
|
+
for configuration in target_dict['configurations'].keys():
|
2287
|
+
old_configuration_dict = target_dict['configurations'][configuration]
|
2288
|
+
if old_configuration_dict.get('abstract'):
|
2289
|
+
del target_dict['configurations'][configuration]
|
2290
|
+
|
2291
|
+
# Now that all of the target's configurations have been built, go through
|
2292
|
+
# the target dict's keys and remove everything that's been moved into a
|
2293
|
+
# "configurations" section.
|
2294
|
+
delete_keys = []
|
2295
|
+
for key in target_dict:
|
2296
|
+
key_ext = key[-1:]
|
2297
|
+
if key_ext in key_suffixes:
|
2298
|
+
key_base = key[:-1]
|
2299
|
+
else:
|
2300
|
+
key_base = key
|
2301
|
+
if not key_base in non_configuration_keys:
|
2302
|
+
delete_keys.append(key)
|
2303
|
+
for key in delete_keys:
|
2304
|
+
del target_dict[key]
|
2305
|
+
|
2306
|
+
# Check the configurations to see if they contain invalid keys.
|
2307
|
+
for configuration in target_dict['configurations'].keys():
|
2308
|
+
configuration_dict = target_dict['configurations'][configuration]
|
2309
|
+
for key in configuration_dict.keys():
|
2310
|
+
if key in invalid_configuration_keys:
|
2311
|
+
raise GypError('%s not allowed in the %s configuration, found in '
|
2312
|
+
'target %s' % (key, configuration, target))
|
2313
|
+
|
2314
|
+
|
2315
|
+
|
2316
|
+
def ProcessListFiltersInDict(name, the_dict):
|
2317
|
+
"""Process regular expression and exclusion-based filters on lists.
|
2318
|
+
|
2319
|
+
An exclusion list is in a dict key named with a trailing "!", like
|
2320
|
+
"sources!". Every item in such a list is removed from the associated
|
2321
|
+
main list, which in this example, would be "sources". Removed items are
|
2322
|
+
placed into a "sources_excluded" list in the dict.
|
2323
|
+
|
2324
|
+
Regular expression (regex) filters are contained in dict keys named with a
|
2325
|
+
trailing "/", such as "sources/" to operate on the "sources" list. Regex
|
2326
|
+
filters in a dict take the form:
|
2327
|
+
'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'],
|
2328
|
+
['include', '_mac\\.cc$'] ],
|
2329
|
+
The first filter says to exclude all files ending in _linux.cc, _mac.cc, and
|
2330
|
+
_win.cc. The second filter then includes all files ending in _mac.cc that
|
2331
|
+
are now or were once in the "sources" list. Items matching an "exclude"
|
2332
|
+
filter are subject to the same processing as would occur if they were listed
|
2333
|
+
by name in an exclusion list (ending in "!"). Items matching an "include"
|
2334
|
+
filter are brought back into the main list if previously excluded by an
|
2335
|
+
exclusion list or exclusion regex filter. Subsequent matching "exclude"
|
2336
|
+
patterns can still cause items to be excluded after matching an "include".
|
2337
|
+
"""
|
2338
|
+
|
2339
|
+
# Look through the dictionary for any lists whose keys end in "!" or "/".
|
2340
|
+
# These are lists that will be treated as exclude lists and regular
|
2341
|
+
# expression-based exclude/include lists. Collect the lists that are
|
2342
|
+
# needed first, looking for the lists that they operate on, and assemble
|
2343
|
+
# then into |lists|. This is done in a separate loop up front, because
|
2344
|
+
# the _included and _excluded keys need to be added to the_dict, and that
|
2345
|
+
# can't be done while iterating through it.
|
2346
|
+
|
2347
|
+
lists = []
|
2348
|
+
del_lists = []
|
2349
|
+
for key, value in the_dict.iteritems():
|
2350
|
+
operation = key[-1]
|
2351
|
+
if operation != '!' and operation != '/':
|
2352
|
+
continue
|
2353
|
+
|
2354
|
+
if type(value) is not list:
|
2355
|
+
raise ValueError(name + ' key ' + key + ' must be list, not ' + \
|
2356
|
+
value.__class__.__name__)
|
2357
|
+
|
2358
|
+
list_key = key[:-1]
|
2359
|
+
if list_key not in the_dict:
|
2360
|
+
# This happens when there's a list like "sources!" but no corresponding
|
2361
|
+
# "sources" list. Since there's nothing for it to operate on, queue up
|
2362
|
+
# the "sources!" list for deletion now.
|
2363
|
+
del_lists.append(key)
|
2364
|
+
continue
|
2365
|
+
|
2366
|
+
if type(the_dict[list_key]) is not list:
|
2367
|
+
value = the_dict[list_key]
|
2368
|
+
raise ValueError(name + ' key ' + list_key + \
|
2369
|
+
' must be list, not ' + \
|
2370
|
+
value.__class__.__name__ + ' when applying ' + \
|
2371
|
+
{'!': 'exclusion', '/': 'regex'}[operation])
|
2372
|
+
|
2373
|
+
if not list_key in lists:
|
2374
|
+
lists.append(list_key)
|
2375
|
+
|
2376
|
+
# Delete the lists that are known to be unneeded at this point.
|
2377
|
+
for del_list in del_lists:
|
2378
|
+
del the_dict[del_list]
|
2379
|
+
|
2380
|
+
for list_key in lists:
|
2381
|
+
the_list = the_dict[list_key]
|
2382
|
+
|
2383
|
+
# Initialize the list_actions list, which is parallel to the_list. Each
|
2384
|
+
# item in list_actions identifies whether the corresponding item in
|
2385
|
+
# the_list should be excluded, unconditionally preserved (included), or
|
2386
|
+
# whether no exclusion or inclusion has been applied. Items for which
|
2387
|
+
# no exclusion or inclusion has been applied (yet) have value -1, items
|
2388
|
+
# excluded have value 0, and items included have value 1. Includes and
|
2389
|
+
# excludes override previous actions. All items in list_actions are
|
2390
|
+
# initialized to -1 because no excludes or includes have been processed
|
2391
|
+
# yet.
|
2392
|
+
list_actions = list((-1,) * len(the_list))
|
2393
|
+
|
2394
|
+
exclude_key = list_key + '!'
|
2395
|
+
if exclude_key in the_dict:
|
2396
|
+
for exclude_item in the_dict[exclude_key]:
|
2397
|
+
for index in xrange(0, len(the_list)):
|
2398
|
+
if exclude_item == the_list[index]:
|
2399
|
+
# This item matches the exclude_item, so set its action to 0
|
2400
|
+
# (exclude).
|
2401
|
+
list_actions[index] = 0
|
2402
|
+
|
2403
|
+
# The "whatever!" list is no longer needed, dump it.
|
2404
|
+
del the_dict[exclude_key]
|
2405
|
+
|
2406
|
+
regex_key = list_key + '/'
|
2407
|
+
if regex_key in the_dict:
|
2408
|
+
for regex_item in the_dict[regex_key]:
|
2409
|
+
[action, pattern] = regex_item
|
2410
|
+
pattern_re = re.compile(pattern)
|
2411
|
+
|
2412
|
+
if action == 'exclude':
|
2413
|
+
# This item matches an exclude regex, so set its value to 0 (exclude).
|
2414
|
+
action_value = 0
|
2415
|
+
elif action == 'include':
|
2416
|
+
# This item matches an include regex, so set its value to 1 (include).
|
2417
|
+
action_value = 1
|
2418
|
+
else:
|
2419
|
+
# This is an action that doesn't make any sense.
|
2420
|
+
raise ValueError('Unrecognized action ' + action + ' in ' + name + \
|
2421
|
+
' key ' + regex_key)
|
2422
|
+
|
2423
|
+
for index in xrange(0, len(the_list)):
|
2424
|
+
list_item = the_list[index]
|
2425
|
+
if list_actions[index] == action_value:
|
2426
|
+
# Even if the regex matches, nothing will change so continue (regex
|
2427
|
+
# searches are expensive).
|
2428
|
+
continue
|
2429
|
+
if pattern_re.search(list_item):
|
2430
|
+
# Regular expression match.
|
2431
|
+
list_actions[index] = action_value
|
2432
|
+
|
2433
|
+
# The "whatever/" list is no longer needed, dump it.
|
2434
|
+
del the_dict[regex_key]
|
2435
|
+
|
2436
|
+
# Add excluded items to the excluded list.
|
2437
|
+
#
|
2438
|
+
# Note that exclude_key ("sources!") is different from excluded_key
|
2439
|
+
# ("sources_excluded"). The exclude_key list is input and it was already
|
2440
|
+
# processed and deleted; the excluded_key list is output and it's about
|
2441
|
+
# to be created.
|
2442
|
+
excluded_key = list_key + '_excluded'
|
2443
|
+
if excluded_key in the_dict:
|
2444
|
+
raise GypError(name + ' key ' + excluded_key +
|
2445
|
+
' must not be present prior '
|
2446
|
+
' to applying exclusion/regex filters for ' + list_key)
|
2447
|
+
|
2448
|
+
excluded_list = []
|
2449
|
+
|
2450
|
+
# Go backwards through the list_actions list so that as items are deleted,
|
2451
|
+
# the indices of items that haven't been seen yet don't shift. That means
|
2452
|
+
# that things need to be prepended to excluded_list to maintain them in the
|
2453
|
+
# same order that they existed in the_list.
|
2454
|
+
for index in xrange(len(list_actions) - 1, -1, -1):
|
2455
|
+
if list_actions[index] == 0:
|
2456
|
+
# Dump anything with action 0 (exclude). Keep anything with action 1
|
2457
|
+
# (include) or -1 (no include or exclude seen for the item).
|
2458
|
+
excluded_list.insert(0, the_list[index])
|
2459
|
+
del the_list[index]
|
2460
|
+
|
2461
|
+
# If anything was excluded, put the excluded list into the_dict at
|
2462
|
+
# excluded_key.
|
2463
|
+
if len(excluded_list) > 0:
|
2464
|
+
the_dict[excluded_key] = excluded_list
|
2465
|
+
|
2466
|
+
# Now recurse into subdicts and lists that may contain dicts.
|
2467
|
+
for key, value in the_dict.iteritems():
|
2468
|
+
if type(value) is dict:
|
2469
|
+
ProcessListFiltersInDict(key, value)
|
2470
|
+
elif type(value) is list:
|
2471
|
+
ProcessListFiltersInList(key, value)
|
2472
|
+
|
2473
|
+
|
2474
|
+
def ProcessListFiltersInList(name, the_list):
|
2475
|
+
for item in the_list:
|
2476
|
+
if type(item) is dict:
|
2477
|
+
ProcessListFiltersInDict(name, item)
|
2478
|
+
elif type(item) is list:
|
2479
|
+
ProcessListFiltersInList(name, item)
|
2480
|
+
|
2481
|
+
|
2482
|
+
def ValidateTargetType(target, target_dict):
|
2483
|
+
"""Ensures the 'type' field on the target is one of the known types.
|
2484
|
+
|
2485
|
+
Arguments:
|
2486
|
+
target: string, name of target.
|
2487
|
+
target_dict: dict, target spec.
|
2488
|
+
|
2489
|
+
Raises an exception on error.
|
2490
|
+
"""
|
2491
|
+
VALID_TARGET_TYPES = ('executable', 'loadable_module',
|
2492
|
+
'static_library', 'shared_library',
|
2493
|
+
'mac_kernel_extension', 'none')
|
2494
|
+
target_type = target_dict.get('type', None)
|
2495
|
+
if target_type not in VALID_TARGET_TYPES:
|
2496
|
+
raise GypError("Target %s has an invalid target type '%s'. "
|
2497
|
+
"Must be one of %s." %
|
2498
|
+
(target, target_type, '/'.join(VALID_TARGET_TYPES)))
|
2499
|
+
if (target_dict.get('standalone_static_library', 0) and
|
2500
|
+
not target_type == 'static_library'):
|
2501
|
+
raise GypError('Target %s has type %s but standalone_static_library flag is'
|
2502
|
+
' only valid for static_library type.' % (target,
|
2503
|
+
target_type))
|
2504
|
+
|
2505
|
+
|
2506
|
+
def ValidateSourcesInTarget(target, target_dict, build_file,
|
2507
|
+
duplicate_basename_check):
|
2508
|
+
if not duplicate_basename_check:
|
2509
|
+
return
|
2510
|
+
if target_dict.get('type', None) != 'static_library':
|
2511
|
+
return
|
2512
|
+
sources = target_dict.get('sources', [])
|
2513
|
+
basenames = {}
|
2514
|
+
for source in sources:
|
2515
|
+
name, ext = os.path.splitext(source)
|
2516
|
+
is_compiled_file = ext in [
|
2517
|
+
'.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
|
2518
|
+
if not is_compiled_file:
|
2519
|
+
continue
|
2520
|
+
basename = os.path.basename(name) # Don't include extension.
|
2521
|
+
basenames.setdefault(basename, []).append(source)
|
2522
|
+
|
2523
|
+
error = ''
|
2524
|
+
for basename, files in basenames.iteritems():
|
2525
|
+
if len(files) > 1:
|
2526
|
+
error += ' %s: %s\n' % (basename, ' '.join(files))
|
2527
|
+
|
2528
|
+
if error:
|
2529
|
+
print('static library %s has several files with the same basename:\n' %
|
2530
|
+
target + error + 'libtool on Mac cannot handle that. Use '
|
2531
|
+
'--no-duplicate-basename-check to disable this validation.')
|
2532
|
+
raise GypError('Duplicate basenames in sources section, see list above')
|
2533
|
+
|
2534
|
+
|
2535
|
+
def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
|
2536
|
+
"""Ensures that the rules sections in target_dict are valid and consistent,
|
2537
|
+
and determines which sources they apply to.
|
2538
|
+
|
2539
|
+
Arguments:
|
2540
|
+
target: string, name of target.
|
2541
|
+
target_dict: dict, target spec containing "rules" and "sources" lists.
|
2542
|
+
extra_sources_for_rules: a list of keys to scan for rule matches in
|
2543
|
+
addition to 'sources'.
|
2544
|
+
"""
|
2545
|
+
|
2546
|
+
# Dicts to map between values found in rules' 'rule_name' and 'extension'
|
2547
|
+
# keys and the rule dicts themselves.
|
2548
|
+
rule_names = {}
|
2549
|
+
rule_extensions = {}
|
2550
|
+
|
2551
|
+
rules = target_dict.get('rules', [])
|
2552
|
+
for rule in rules:
|
2553
|
+
# Make sure that there's no conflict among rule names and extensions.
|
2554
|
+
rule_name = rule['rule_name']
|
2555
|
+
if rule_name in rule_names:
|
2556
|
+
raise GypError('rule %s exists in duplicate, target %s' %
|
2557
|
+
(rule_name, target))
|
2558
|
+
rule_names[rule_name] = rule
|
2559
|
+
|
2560
|
+
rule_extension = rule['extension']
|
2561
|
+
if rule_extension.startswith('.'):
|
2562
|
+
rule_extension = rule_extension[1:]
|
2563
|
+
if rule_extension in rule_extensions:
|
2564
|
+
raise GypError(('extension %s associated with multiple rules, ' +
|
2565
|
+
'target %s rules %s and %s') %
|
2566
|
+
(rule_extension, target,
|
2567
|
+
rule_extensions[rule_extension]['rule_name'],
|
2568
|
+
rule_name))
|
2569
|
+
rule_extensions[rule_extension] = rule
|
2570
|
+
|
2571
|
+
# Make sure rule_sources isn't already there. It's going to be
|
2572
|
+
# created below if needed.
|
2573
|
+
if 'rule_sources' in rule:
|
2574
|
+
raise GypError(
|
2575
|
+
'rule_sources must not exist in input, target %s rule %s' %
|
2576
|
+
(target, rule_name))
|
2577
|
+
|
2578
|
+
rule_sources = []
|
2579
|
+
source_keys = ['sources']
|
2580
|
+
source_keys.extend(extra_sources_for_rules)
|
2581
|
+
for source_key in source_keys:
|
2582
|
+
for source in target_dict.get(source_key, []):
|
2583
|
+
(source_root, source_extension) = os.path.splitext(source)
|
2584
|
+
if source_extension.startswith('.'):
|
2585
|
+
source_extension = source_extension[1:]
|
2586
|
+
if source_extension == rule_extension:
|
2587
|
+
rule_sources.append(source)
|
2588
|
+
|
2589
|
+
if len(rule_sources) > 0:
|
2590
|
+
rule['rule_sources'] = rule_sources
|
2591
|
+
|
2592
|
+
|
2593
|
+
def ValidateRunAsInTarget(target, target_dict, build_file):
|
2594
|
+
target_name = target_dict.get('target_name')
|
2595
|
+
run_as = target_dict.get('run_as')
|
2596
|
+
if not run_as:
|
2597
|
+
return
|
2598
|
+
if type(run_as) is not dict:
|
2599
|
+
raise GypError("The 'run_as' in target %s from file %s should be a "
|
2600
|
+
"dictionary." %
|
2601
|
+
(target_name, build_file))
|
2602
|
+
action = run_as.get('action')
|
2603
|
+
if not action:
|
2604
|
+
raise GypError("The 'run_as' in target %s from file %s must have an "
|
2605
|
+
"'action' section." %
|
2606
|
+
(target_name, build_file))
|
2607
|
+
if type(action) is not list:
|
2608
|
+
raise GypError("The 'action' for 'run_as' in target %s from file %s "
|
2609
|
+
"must be a list." %
|
2610
|
+
(target_name, build_file))
|
2611
|
+
working_directory = run_as.get('working_directory')
|
2612
|
+
if working_directory and type(working_directory) is not str:
|
2613
|
+
raise GypError("The 'working_directory' for 'run_as' in target %s "
|
2614
|
+
"in file %s should be a string." %
|
2615
|
+
(target_name, build_file))
|
2616
|
+
environment = run_as.get('environment')
|
2617
|
+
if environment and type(environment) is not dict:
|
2618
|
+
raise GypError("The 'environment' for 'run_as' in target %s "
|
2619
|
+
"in file %s should be a dictionary." %
|
2620
|
+
(target_name, build_file))
|
2621
|
+
|
2622
|
+
|
2623
|
+
def ValidateActionsInTarget(target, target_dict, build_file):
|
2624
|
+
'''Validates the inputs to the actions in a target.'''
|
2625
|
+
target_name = target_dict.get('target_name')
|
2626
|
+
actions = target_dict.get('actions', [])
|
2627
|
+
for action in actions:
|
2628
|
+
action_name = action.get('action_name')
|
2629
|
+
if not action_name:
|
2630
|
+
raise GypError("Anonymous action in target %s. "
|
2631
|
+
"An action must have an 'action_name' field." %
|
2632
|
+
target_name)
|
2633
|
+
inputs = action.get('inputs', None)
|
2634
|
+
if inputs is None:
|
2635
|
+
raise GypError('Action in target %s has no inputs.' % target_name)
|
2636
|
+
action_command = action.get('action')
|
2637
|
+
if action_command and not action_command[0]:
|
2638
|
+
raise GypError("Empty action as command in target %s." % target_name)
|
2639
|
+
|
2640
|
+
|
2641
|
+
def TurnIntIntoStrInDict(the_dict):
|
2642
|
+
"""Given dict the_dict, recursively converts all integers into strings.
|
2643
|
+
"""
|
2644
|
+
# Use items instead of iteritems because there's no need to try to look at
|
2645
|
+
# reinserted keys and their associated values.
|
2646
|
+
for k, v in the_dict.items():
|
2647
|
+
if type(v) is int:
|
2648
|
+
v = str(v)
|
2649
|
+
the_dict[k] = v
|
2650
|
+
elif type(v) is dict:
|
2651
|
+
TurnIntIntoStrInDict(v)
|
2652
|
+
elif type(v) is list:
|
2653
|
+
TurnIntIntoStrInList(v)
|
2654
|
+
|
2655
|
+
if type(k) is int:
|
2656
|
+
del the_dict[k]
|
2657
|
+
the_dict[str(k)] = v
|
2658
|
+
|
2659
|
+
|
2660
|
+
def TurnIntIntoStrInList(the_list):
|
2661
|
+
"""Given list the_list, recursively converts all integers into strings.
|
2662
|
+
"""
|
2663
|
+
for index in xrange(0, len(the_list)):
|
2664
|
+
item = the_list[index]
|
2665
|
+
if type(item) is int:
|
2666
|
+
the_list[index] = str(item)
|
2667
|
+
elif type(item) is dict:
|
2668
|
+
TurnIntIntoStrInDict(item)
|
2669
|
+
elif type(item) is list:
|
2670
|
+
TurnIntIntoStrInList(item)
|
2671
|
+
|
2672
|
+
|
2673
|
+
def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets,
|
2674
|
+
data):
|
2675
|
+
"""Return only the targets that are deep dependencies of |root_targets|."""
|
2676
|
+
qualified_root_targets = []
|
2677
|
+
for target in root_targets:
|
2678
|
+
target = target.strip()
|
2679
|
+
qualified_targets = gyp.common.FindQualifiedTargets(target, flat_list)
|
2680
|
+
if not qualified_targets:
|
2681
|
+
raise GypError("Could not find target %s" % target)
|
2682
|
+
qualified_root_targets.extend(qualified_targets)
|
2683
|
+
|
2684
|
+
wanted_targets = {}
|
2685
|
+
for target in qualified_root_targets:
|
2686
|
+
wanted_targets[target] = targets[target]
|
2687
|
+
for dependency in dependency_nodes[target].DeepDependencies():
|
2688
|
+
wanted_targets[dependency] = targets[dependency]
|
2689
|
+
|
2690
|
+
wanted_flat_list = [t for t in flat_list if t in wanted_targets]
|
2691
|
+
|
2692
|
+
# Prune unwanted targets from each build_file's data dict.
|
2693
|
+
for build_file in data['target_build_files']:
|
2694
|
+
if not 'targets' in data[build_file]:
|
2695
|
+
continue
|
2696
|
+
new_targets = []
|
2697
|
+
for target in data[build_file]['targets']:
|
2698
|
+
qualified_name = gyp.common.QualifiedTarget(build_file,
|
2699
|
+
target['target_name'],
|
2700
|
+
target['toolset'])
|
2701
|
+
if qualified_name in wanted_targets:
|
2702
|
+
new_targets.append(target)
|
2703
|
+
data[build_file]['targets'] = new_targets
|
2704
|
+
|
2705
|
+
return wanted_targets, wanted_flat_list
|
2706
|
+
|
2707
|
+
|
2708
|
+
def VerifyNoCollidingTargets(targets):
|
2709
|
+
"""Verify that no two targets in the same directory share the same name.
|
2710
|
+
|
2711
|
+
Arguments:
|
2712
|
+
targets: A list of targets in the form 'path/to/file.gyp:target_name'.
|
2713
|
+
"""
|
2714
|
+
# Keep a dict going from 'subdirectory:target_name' to 'foo.gyp'.
|
2715
|
+
used = {}
|
2716
|
+
for target in targets:
|
2717
|
+
# Separate out 'path/to/file.gyp, 'target_name' from
|
2718
|
+
# 'path/to/file.gyp:target_name'.
|
2719
|
+
path, name = target.rsplit(':', 1)
|
2720
|
+
# Separate out 'path/to', 'file.gyp' from 'path/to/file.gyp'.
|
2721
|
+
subdir, gyp = os.path.split(path)
|
2722
|
+
# Use '.' for the current directory '', so that the error messages make
|
2723
|
+
# more sense.
|
2724
|
+
if not subdir:
|
2725
|
+
subdir = '.'
|
2726
|
+
# Prepare a key like 'path/to:target_name'.
|
2727
|
+
key = subdir + ':' + name
|
2728
|
+
if key in used:
|
2729
|
+
# Complain if this target is already used.
|
2730
|
+
raise GypError('Duplicate target name "%s" in directory "%s" used both '
|
2731
|
+
'in "%s" and "%s".' % (name, subdir, gyp, used[key]))
|
2732
|
+
used[key] = gyp
|
2733
|
+
|
2734
|
+
|
2735
|
+
def SetGeneratorGlobals(generator_input_info):
|
2736
|
+
# Set up path_sections and non_configuration_keys with the default data plus
|
2737
|
+
# the generator-specific data.
|
2738
|
+
global path_sections
|
2739
|
+
path_sections = set(base_path_sections)
|
2740
|
+
path_sections.update(generator_input_info['path_sections'])
|
2741
|
+
|
2742
|
+
global non_configuration_keys
|
2743
|
+
non_configuration_keys = base_non_configuration_keys[:]
|
2744
|
+
non_configuration_keys.extend(generator_input_info['non_configuration_keys'])
|
2745
|
+
|
2746
|
+
global multiple_toolsets
|
2747
|
+
multiple_toolsets = generator_input_info[
|
2748
|
+
'generator_supports_multiple_toolsets']
|
2749
|
+
|
2750
|
+
global generator_filelist_paths
|
2751
|
+
generator_filelist_paths = generator_input_info['generator_filelist_paths']
|
2752
|
+
|
2753
|
+
|
2754
|
+
def Load(build_files, variables, includes, depth, generator_input_info, check,
|
2755
|
+
circular_check, duplicate_basename_check, parallel, root_targets):
|
2756
|
+
SetGeneratorGlobals(generator_input_info)
|
2757
|
+
# A generator can have other lists (in addition to sources) be processed
|
2758
|
+
# for rules.
|
2759
|
+
extra_sources_for_rules = generator_input_info['extra_sources_for_rules']
|
2760
|
+
|
2761
|
+
# Load build files. This loads every target-containing build file into
|
2762
|
+
# the |data| dictionary such that the keys to |data| are build file names,
|
2763
|
+
# and the values are the entire build file contents after "early" or "pre"
|
2764
|
+
# processing has been done and includes have been resolved.
|
2765
|
+
# NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as
|
2766
|
+
# well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps
|
2767
|
+
# track of the keys corresponding to "target" files.
|
2768
|
+
data = {'target_build_files': set()}
|
2769
|
+
# Normalize paths everywhere. This is important because paths will be
|
2770
|
+
# used as keys to the data dict and for references between input files.
|
2771
|
+
build_files = set(map(os.path.normpath, build_files))
|
2772
|
+
if parallel:
|
2773
|
+
LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth,
|
2774
|
+
check, generator_input_info)
|
2775
|
+
else:
|
2776
|
+
aux_data = {}
|
2777
|
+
for build_file in build_files:
|
2778
|
+
try:
|
2779
|
+
LoadTargetBuildFile(build_file, data, aux_data,
|
2780
|
+
variables, includes, depth, check, True)
|
2781
|
+
except Exception, e:
|
2782
|
+
gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file)
|
2783
|
+
raise
|
2784
|
+
|
2785
|
+
# Build a dict to access each target's subdict by qualified name.
|
2786
|
+
targets = BuildTargetsDict(data)
|
2787
|
+
|
2788
|
+
# Fully qualify all dependency links.
|
2789
|
+
QualifyDependencies(targets)
|
2790
|
+
|
2791
|
+
# Remove self-dependencies from targets that have 'prune_self_dependencies'
|
2792
|
+
# set to 1.
|
2793
|
+
RemoveSelfDependencies(targets)
|
2794
|
+
|
2795
|
+
# Expand dependencies specified as build_file:*.
|
2796
|
+
ExpandWildcardDependencies(targets, data)
|
2797
|
+
|
2798
|
+
# Remove all dependencies marked as 'link_dependency' from the targets of
|
2799
|
+
# type 'none'.
|
2800
|
+
RemoveLinkDependenciesFromNoneTargets(targets)
|
2801
|
+
|
2802
|
+
# Apply exclude (!) and regex (/) list filters only for dependency_sections.
|
2803
|
+
for target_name, target_dict in targets.iteritems():
|
2804
|
+
tmp_dict = {}
|
2805
|
+
for key_base in dependency_sections:
|
2806
|
+
for op in ('', '!', '/'):
|
2807
|
+
key = key_base + op
|
2808
|
+
if key in target_dict:
|
2809
|
+
tmp_dict[key] = target_dict[key]
|
2810
|
+
del target_dict[key]
|
2811
|
+
ProcessListFiltersInDict(target_name, tmp_dict)
|
2812
|
+
# Write the results back to |target_dict|.
|
2813
|
+
for key in tmp_dict:
|
2814
|
+
target_dict[key] = tmp_dict[key]
|
2815
|
+
|
2816
|
+
# Make sure every dependency appears at most once.
|
2817
|
+
RemoveDuplicateDependencies(targets)
|
2818
|
+
|
2819
|
+
if circular_check:
|
2820
|
+
# Make sure that any targets in a.gyp don't contain dependencies in other
|
2821
|
+
# .gyp files that further depend on a.gyp.
|
2822
|
+
VerifyNoGYPFileCircularDependencies(targets)
|
2823
|
+
|
2824
|
+
[dependency_nodes, flat_list] = BuildDependencyList(targets)
|
2825
|
+
|
2826
|
+
if root_targets:
|
2827
|
+
# Remove, from |targets| and |flat_list|, the targets that are not deep
|
2828
|
+
# dependencies of the targets specified in |root_targets|.
|
2829
|
+
targets, flat_list = PruneUnwantedTargets(
|
2830
|
+
targets, flat_list, dependency_nodes, root_targets, data)
|
2831
|
+
|
2832
|
+
# Check that no two targets in the same directory have the same name.
|
2833
|
+
VerifyNoCollidingTargets(flat_list)
|
2834
|
+
|
2835
|
+
# Handle dependent settings of various types.
|
2836
|
+
for settings_type in ['all_dependent_settings',
|
2837
|
+
'direct_dependent_settings',
|
2838
|
+
'link_settings']:
|
2839
|
+
DoDependentSettings(settings_type, flat_list, targets, dependency_nodes)
|
2840
|
+
|
2841
|
+
# Take out the dependent settings now that they've been published to all
|
2842
|
+
# of the targets that require them.
|
2843
|
+
for target in flat_list:
|
2844
|
+
if settings_type in targets[target]:
|
2845
|
+
del targets[target][settings_type]
|
2846
|
+
|
2847
|
+
# Make sure static libraries don't declare dependencies on other static
|
2848
|
+
# libraries, but that linkables depend on all unlinked static libraries
|
2849
|
+
# that they need so that their link steps will be correct.
|
2850
|
+
gii = generator_input_info
|
2851
|
+
if gii['generator_wants_static_library_dependencies_adjusted']:
|
2852
|
+
AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
|
2853
|
+
gii['generator_wants_sorted_dependencies'])
|
2854
|
+
|
2855
|
+
# Apply "post"/"late"/"target" variable expansions and condition evaluations.
|
2856
|
+
for target in flat_list:
|
2857
|
+
target_dict = targets[target]
|
2858
|
+
build_file = gyp.common.BuildFile(target)
|
2859
|
+
ProcessVariablesAndConditionsInDict(
|
2860
|
+
target_dict, PHASE_LATE, variables, build_file)
|
2861
|
+
|
2862
|
+
# Move everything that can go into a "configurations" section into one.
|
2863
|
+
for target in flat_list:
|
2864
|
+
target_dict = targets[target]
|
2865
|
+
SetUpConfigurations(target, target_dict)
|
2866
|
+
|
2867
|
+
# Apply exclude (!) and regex (/) list filters.
|
2868
|
+
for target in flat_list:
|
2869
|
+
target_dict = targets[target]
|
2870
|
+
ProcessListFiltersInDict(target, target_dict)
|
2871
|
+
|
2872
|
+
# Apply "latelate" variable expansions and condition evaluations.
|
2873
|
+
for target in flat_list:
|
2874
|
+
target_dict = targets[target]
|
2875
|
+
build_file = gyp.common.BuildFile(target)
|
2876
|
+
ProcessVariablesAndConditionsInDict(
|
2877
|
+
target_dict, PHASE_LATELATE, variables, build_file)
|
2878
|
+
|
2879
|
+
# Make sure that the rules make sense, and build up rule_sources lists as
|
2880
|
+
# needed. Not all generators will need to use the rule_sources lists, but
|
2881
|
+
# some may, and it seems best to build the list in a common spot.
|
2882
|
+
# Also validate actions and run_as elements in targets.
|
2883
|
+
for target in flat_list:
|
2884
|
+
target_dict = targets[target]
|
2885
|
+
build_file = gyp.common.BuildFile(target)
|
2886
|
+
ValidateTargetType(target, target_dict)
|
2887
|
+
ValidateSourcesInTarget(target, target_dict, build_file,
|
2888
|
+
duplicate_basename_check)
|
2889
|
+
ValidateRulesInTarget(target, target_dict, extra_sources_for_rules)
|
2890
|
+
ValidateRunAsInTarget(target, target_dict, build_file)
|
2891
|
+
ValidateActionsInTarget(target, target_dict, build_file)
|
2892
|
+
|
2893
|
+
# Generators might not expect ints. Turn them into strs.
|
2894
|
+
TurnIntIntoStrInDict(data)
|
2895
|
+
|
2896
|
+
# TODO(mark): Return |data| for now because the generator needs a list of
|
2897
|
+
# build files that came in. In the future, maybe it should just accept
|
2898
|
+
# a list, and not the whole data dict.
|
2899
|
+
return [flat_list, targets, data]
|