redsnow 0.0.8

Sign up to get free protection for your applications and to get access to all the features.
Files changed (174) hide show
  1. checksums.yaml +7 -0
  2. data/.gitignore +34 -0
  3. data/.gitmodules +3 -0
  4. data/.travis.yml +20 -0
  5. data/CHANGELOG.md +4 -0
  6. data/Gemfile +4 -0
  7. data/LICENSE +21 -0
  8. data/README.md +62 -0
  9. data/Rakefile +36 -0
  10. data/Vagrantfile +20 -0
  11. data/ext/snowcrash/Makefile +64 -0
  12. data/ext/snowcrash/Vagrantfile +20 -0
  13. data/ext/snowcrash/bin/snowcrash +0 -0
  14. data/ext/snowcrash/common.gypi +163 -0
  15. data/ext/snowcrash/config.gypi +10 -0
  16. data/ext/snowcrash/config.mk +5 -0
  17. data/ext/snowcrash/configure +213 -0
  18. data/ext/snowcrash/provisioning.sh +15 -0
  19. data/ext/snowcrash/snowcrash.gyp +141 -0
  20. data/ext/snowcrash/src/ActionParser.h +503 -0
  21. data/ext/snowcrash/src/AssetParser.h +215 -0
  22. data/ext/snowcrash/src/BlockUtility.h +186 -0
  23. data/ext/snowcrash/src/Blueprint.h +283 -0
  24. data/ext/snowcrash/src/BlueprintParser.h +347 -0
  25. data/ext/snowcrash/src/BlueprintParserCore.h +190 -0
  26. data/ext/snowcrash/src/BlueprintSection.h +140 -0
  27. data/ext/snowcrash/src/BlueprintUtility.h +126 -0
  28. data/ext/snowcrash/src/CBlueprint.cc +600 -0
  29. data/ext/snowcrash/src/CBlueprint.h +354 -0
  30. data/ext/snowcrash/src/CSourceAnnotation.cc +140 -0
  31. data/ext/snowcrash/src/CSourceAnnotation.h +106 -0
  32. data/ext/snowcrash/src/CodeBlockUtility.h +189 -0
  33. data/ext/snowcrash/src/DescriptionSectionUtility.h +156 -0
  34. data/ext/snowcrash/src/HTTP.cc +46 -0
  35. data/ext/snowcrash/src/HTTP.h +105 -0
  36. data/ext/snowcrash/src/HeaderParser.h +289 -0
  37. data/ext/snowcrash/src/ListBlockUtility.h +273 -0
  38. data/ext/snowcrash/src/ListUtility.h +95 -0
  39. data/ext/snowcrash/src/MarkdownBlock.cc +176 -0
  40. data/ext/snowcrash/src/MarkdownBlock.h +93 -0
  41. data/ext/snowcrash/src/MarkdownParser.cc +266 -0
  42. data/ext/snowcrash/src/MarkdownParser.h +88 -0
  43. data/ext/snowcrash/src/ParameterDefinitonParser.h +570 -0
  44. data/ext/snowcrash/src/ParametersParser.h +252 -0
  45. data/ext/snowcrash/src/Parser.cc +71 -0
  46. data/ext/snowcrash/src/Parser.h +29 -0
  47. data/ext/snowcrash/src/ParserCore.cc +120 -0
  48. data/ext/snowcrash/src/ParserCore.h +82 -0
  49. data/ext/snowcrash/src/PayloadParser.h +672 -0
  50. data/ext/snowcrash/src/Platform.h +54 -0
  51. data/ext/snowcrash/src/RegexMatch.h +32 -0
  52. data/ext/snowcrash/src/ResourceGroupParser.h +195 -0
  53. data/ext/snowcrash/src/ResourceParser.h +584 -0
  54. data/ext/snowcrash/src/SectionUtility.h +142 -0
  55. data/ext/snowcrash/src/Serialize.cc +52 -0
  56. data/ext/snowcrash/src/Serialize.h +69 -0
  57. data/ext/snowcrash/src/SerializeJSON.cc +601 -0
  58. data/ext/snowcrash/src/SerializeJSON.h +21 -0
  59. data/ext/snowcrash/src/SerializeYAML.cc +336 -0
  60. data/ext/snowcrash/src/SerializeYAML.h +21 -0
  61. data/ext/snowcrash/src/SourceAnnotation.h +177 -0
  62. data/ext/snowcrash/src/StringUtility.h +109 -0
  63. data/ext/snowcrash/src/SymbolTable.h +83 -0
  64. data/ext/snowcrash/src/UriTemplateParser.cc +195 -0
  65. data/ext/snowcrash/src/UriTemplateParser.h +243 -0
  66. data/ext/snowcrash/src/Version.h +39 -0
  67. data/ext/snowcrash/src/csnowcrash.cc +23 -0
  68. data/ext/snowcrash/src/csnowcrash.h +38 -0
  69. data/ext/snowcrash/src/posix/RegexMatch.cc +99 -0
  70. data/ext/snowcrash/src/snowcrash.cc +18 -0
  71. data/ext/snowcrash/src/snowcrash.h +41 -0
  72. data/ext/snowcrash/src/snowcrash/snowcrash.cc +170 -0
  73. data/ext/snowcrash/src/win/RegexMatch.cc +78 -0
  74. data/ext/snowcrash/sundown/CONTRIBUTING.md +10 -0
  75. data/ext/snowcrash/sundown/Makefile +83 -0
  76. data/ext/snowcrash/sundown/Makefile.win +33 -0
  77. data/ext/snowcrash/sundown/examples/smartypants.c +72 -0
  78. data/ext/snowcrash/sundown/examples/sundown.c +80 -0
  79. data/ext/snowcrash/sundown/html/houdini.h +37 -0
  80. data/ext/snowcrash/sundown/html/houdini_href_e.c +108 -0
  81. data/ext/snowcrash/sundown/html/houdini_html_e.c +84 -0
  82. data/ext/snowcrash/sundown/html/html.c +647 -0
  83. data/ext/snowcrash/sundown/html/html.h +77 -0
  84. data/ext/snowcrash/sundown/html/html_smartypants.c +389 -0
  85. data/ext/snowcrash/sundown/html_block_names.txt +25 -0
  86. data/ext/snowcrash/sundown/src/autolink.c +297 -0
  87. data/ext/snowcrash/sundown/src/autolink.h +51 -0
  88. data/ext/snowcrash/sundown/src/buffer.c +225 -0
  89. data/ext/snowcrash/sundown/src/buffer.h +96 -0
  90. data/ext/snowcrash/sundown/src/html_blocks.h +206 -0
  91. data/ext/snowcrash/sundown/src/markdown.c +2701 -0
  92. data/ext/snowcrash/sundown/src/markdown.h +147 -0
  93. data/ext/snowcrash/sundown/src/src_map.c +200 -0
  94. data/ext/snowcrash/sundown/src/src_map.h +58 -0
  95. data/ext/snowcrash/sundown/src/stack.c +81 -0
  96. data/ext/snowcrash/sundown/src/stack.h +29 -0
  97. data/ext/snowcrash/sundown/sundown.def +20 -0
  98. data/ext/snowcrash/tools/gyp/AUTHORS +11 -0
  99. data/ext/snowcrash/tools/gyp/DEPS +24 -0
  100. data/ext/snowcrash/tools/gyp/OWNERS +1 -0
  101. data/ext/snowcrash/tools/gyp/PRESUBMIT.py +120 -0
  102. data/ext/snowcrash/tools/gyp/buildbot/buildbot_run.py +190 -0
  103. data/ext/snowcrash/tools/gyp/codereview.settings +10 -0
  104. data/ext/snowcrash/tools/gyp/data/win/large-pdb-shim.cc +12 -0
  105. data/ext/snowcrash/tools/gyp/gyp +8 -0
  106. data/ext/snowcrash/tools/gyp/gyp.bat +5 -0
  107. data/ext/snowcrash/tools/gyp/gyp_main.py +18 -0
  108. data/ext/snowcrash/tools/gyp/pylib/gyp/MSVSNew.py +340 -0
  109. data/ext/snowcrash/tools/gyp/pylib/gyp/MSVSProject.py +208 -0
  110. data/ext/snowcrash/tools/gyp/pylib/gyp/MSVSSettings.py +1063 -0
  111. data/ext/snowcrash/tools/gyp/pylib/gyp/MSVSToolFile.py +58 -0
  112. data/ext/snowcrash/tools/gyp/pylib/gyp/MSVSUserFile.py +147 -0
  113. data/ext/snowcrash/tools/gyp/pylib/gyp/MSVSUtil.py +267 -0
  114. data/ext/snowcrash/tools/gyp/pylib/gyp/MSVSVersion.py +409 -0
  115. data/ext/snowcrash/tools/gyp/pylib/gyp/__init__.py +537 -0
  116. data/ext/snowcrash/tools/gyp/pylib/gyp/__init__.pyc +0 -0
  117. data/ext/snowcrash/tools/gyp/pylib/gyp/common.py +521 -0
  118. data/ext/snowcrash/tools/gyp/pylib/gyp/common.pyc +0 -0
  119. data/ext/snowcrash/tools/gyp/pylib/gyp/easy_xml.py +157 -0
  120. data/ext/snowcrash/tools/gyp/pylib/gyp/flock_tool.py +49 -0
  121. data/ext/snowcrash/tools/gyp/pylib/gyp/generator/__init__.py +0 -0
  122. data/ext/snowcrash/tools/gyp/pylib/gyp/generator/__init__.pyc +0 -0
  123. data/ext/snowcrash/tools/gyp/pylib/gyp/generator/android.py +1069 -0
  124. data/ext/snowcrash/tools/gyp/pylib/gyp/generator/cmake.py +1143 -0
  125. data/ext/snowcrash/tools/gyp/pylib/gyp/generator/dump_dependency_json.py +81 -0
  126. data/ext/snowcrash/tools/gyp/pylib/gyp/generator/eclipse.py +335 -0
  127. data/ext/snowcrash/tools/gyp/pylib/gyp/generator/gypd.py +87 -0
  128. data/ext/snowcrash/tools/gyp/pylib/gyp/generator/gypsh.py +56 -0
  129. data/ext/snowcrash/tools/gyp/pylib/gyp/generator/make.py +2181 -0
  130. data/ext/snowcrash/tools/gyp/pylib/gyp/generator/make.pyc +0 -0
  131. data/ext/snowcrash/tools/gyp/pylib/gyp/generator/msvs.py +3335 -0
  132. data/ext/snowcrash/tools/gyp/pylib/gyp/generator/ninja.py +2156 -0
  133. data/ext/snowcrash/tools/gyp/pylib/gyp/generator/xcode.py +1224 -0
  134. data/ext/snowcrash/tools/gyp/pylib/gyp/generator/xcode.pyc +0 -0
  135. data/ext/snowcrash/tools/gyp/pylib/gyp/input.py +2809 -0
  136. data/ext/snowcrash/tools/gyp/pylib/gyp/input.pyc +0 -0
  137. data/ext/snowcrash/tools/gyp/pylib/gyp/mac_tool.py +510 -0
  138. data/ext/snowcrash/tools/gyp/pylib/gyp/msvs_emulation.py +972 -0
  139. data/ext/snowcrash/tools/gyp/pylib/gyp/ninja_syntax.py +160 -0
  140. data/ext/snowcrash/tools/gyp/pylib/gyp/ordered_dict.py +289 -0
  141. data/ext/snowcrash/tools/gyp/pylib/gyp/win_tool.py +292 -0
  142. data/ext/snowcrash/tools/gyp/pylib/gyp/xcode_emulation.py +1440 -0
  143. data/ext/snowcrash/tools/gyp/pylib/gyp/xcode_emulation.pyc +0 -0
  144. data/ext/snowcrash/tools/gyp/pylib/gyp/xcodeproj_file.py +2889 -0
  145. data/ext/snowcrash/tools/gyp/pylib/gyp/xcodeproj_file.pyc +0 -0
  146. data/ext/snowcrash/tools/gyp/pylib/gyp/xml_fix.py +69 -0
  147. data/ext/snowcrash/tools/gyp/pylintrc +307 -0
  148. data/ext/snowcrash/tools/gyp/samples/samples +81 -0
  149. data/ext/snowcrash/tools/gyp/samples/samples.bat +5 -0
  150. data/ext/snowcrash/tools/gyp/setup.py +19 -0
  151. data/ext/snowcrash/tools/gyp/tools/Xcode/Specifications/gyp.pbfilespec +27 -0
  152. data/ext/snowcrash/tools/gyp/tools/Xcode/Specifications/gyp.xclangspec +226 -0
  153. data/ext/snowcrash/tools/gyp/tools/emacs/gyp.el +252 -0
  154. data/ext/snowcrash/tools/gyp/tools/graphviz.py +100 -0
  155. data/ext/snowcrash/tools/gyp/tools/pretty_gyp.py +155 -0
  156. data/ext/snowcrash/tools/gyp/tools/pretty_sln.py +168 -0
  157. data/ext/snowcrash/tools/gyp/tools/pretty_vcproj.py +329 -0
  158. data/ext/snowcrash/tools/homebrew/snowcrash.rb +11 -0
  159. data/ext/snowcrash/vcbuild.bat +184 -0
  160. data/lib/redsnow.rb +31 -0
  161. data/lib/redsnow/binding.rb +132 -0
  162. data/lib/redsnow/blueprint.rb +365 -0
  163. data/lib/redsnow/object.rb +18 -0
  164. data/lib/redsnow/parseresult.rb +107 -0
  165. data/lib/redsnow/version.rb +4 -0
  166. data/provisioning.sh +20 -0
  167. data/redsnow.gemspec +35 -0
  168. data/test/_helper.rb +15 -0
  169. data/test/fixtures/sample-api-ast.json +97 -0
  170. data/test/fixtures/sample-api.apib +20 -0
  171. data/test/redsnow_binding_test.rb +35 -0
  172. data/test/redsnow_parseresult_test.rb +50 -0
  173. data/test/redsnow_test.rb +285 -0
  174. metadata +358 -0
@@ -0,0 +1,2809 @@
1
+ # Copyright (c) 2012 Google Inc. All rights reserved.
2
+ # Use of this source code is governed by a BSD-style license that can be
3
+ # found in the LICENSE file.
4
+
5
+ from compiler.ast import Const
6
+ from compiler.ast import Dict
7
+ from compiler.ast import Discard
8
+ from compiler.ast import List
9
+ from compiler.ast import Module
10
+ from compiler.ast import Node
11
+ from compiler.ast import Stmt
12
+ import compiler
13
+ import copy
14
+ import gyp.common
15
+ import multiprocessing
16
+ import optparse
17
+ import os.path
18
+ import re
19
+ import shlex
20
+ import signal
21
+ import subprocess
22
+ import sys
23
+ import threading
24
+ import time
25
+ import traceback
26
+ from gyp.common import GypError
27
+
28
+
29
+ # A list of types that are treated as linkable.
30
+ linkable_types = ['executable', 'shared_library', 'loadable_module']
31
+
32
+ # A list of sections that contain links to other targets.
33
+ dependency_sections = ['dependencies', 'export_dependent_settings']
34
+
35
+ # base_path_sections is a list of sections defined by GYP that contain
36
+ # pathnames. The generators can provide more keys, the two lists are merged
37
+ # into path_sections, but you should call IsPathSection instead of using either
38
+ # list directly.
39
+ base_path_sections = [
40
+ 'destination',
41
+ 'files',
42
+ 'include_dirs',
43
+ 'inputs',
44
+ 'libraries',
45
+ 'outputs',
46
+ 'sources',
47
+ ]
48
+ path_sections = []
49
+
50
+ is_path_section_charset = set('=+?!')
51
+ is_path_section_match_re = re.compile('_(dir|file|path)s?$')
52
+
53
+ def IsPathSection(section):
54
+ # If section ends in one of these characters, it's applied to a section
55
+ # without the trailing characters. '/' is notably absent from this list,
56
+ # because there's no way for a regular expression to be treated as a path.
57
+ while section[-1:] in is_path_section_charset:
58
+ section = section[:-1]
59
+ return section in path_sections or is_path_section_match_re.search(section)
60
+
61
+ # base_non_configuration_keys is a list of key names that belong in the target
62
+ # itself and should not be propagated into its configurations. It is merged
63
+ # with a list that can come from the generator to
64
+ # create non_configuration_keys.
65
+ base_non_configuration_keys = [
66
+ # Sections that must exist inside targets and not configurations.
67
+ 'actions',
68
+ 'configurations',
69
+ 'copies',
70
+ 'default_configuration',
71
+ 'dependencies',
72
+ 'dependencies_original',
73
+ 'libraries',
74
+ 'postbuilds',
75
+ 'product_dir',
76
+ 'product_extension',
77
+ 'product_name',
78
+ 'product_prefix',
79
+ 'rules',
80
+ 'run_as',
81
+ 'sources',
82
+ 'standalone_static_library',
83
+ 'suppress_wildcard',
84
+ 'target_name',
85
+ 'toolset',
86
+ 'toolsets',
87
+ 'type',
88
+
89
+ # Sections that can be found inside targets or configurations, but that
90
+ # should not be propagated from targets into their configurations.
91
+ 'variables',
92
+ ]
93
+ non_configuration_keys = []
94
+
95
+ # Keys that do not belong inside a configuration dictionary.
96
+ invalid_configuration_keys = [
97
+ 'actions',
98
+ 'all_dependent_settings',
99
+ 'configurations',
100
+ 'dependencies',
101
+ 'direct_dependent_settings',
102
+ 'libraries',
103
+ 'link_settings',
104
+ 'sources',
105
+ 'standalone_static_library',
106
+ 'target_name',
107
+ 'type',
108
+ ]
109
+
110
+ # Controls whether or not the generator supports multiple toolsets.
111
+ multiple_toolsets = False
112
+
113
+ # Paths for converting filelist paths to output paths: {
114
+ # toplevel,
115
+ # qualified_output_dir,
116
+ # }
117
+ generator_filelist_paths = None
118
+
119
+ def GetIncludedBuildFiles(build_file_path, aux_data, included=None):
120
+ """Return a list of all build files included into build_file_path.
121
+
122
+ The returned list will contain build_file_path as well as all other files
123
+ that it included, either directly or indirectly. Note that the list may
124
+ contain files that were included into a conditional section that evaluated
125
+ to false and was not merged into build_file_path's dict.
126
+
127
+ aux_data is a dict containing a key for each build file or included build
128
+ file. Those keys provide access to dicts whose "included" keys contain
129
+ lists of all other files included by the build file.
130
+
131
+ included should be left at its default None value by external callers. It
132
+ is used for recursion.
133
+
134
+ The returned list will not contain any duplicate entries. Each build file
135
+ in the list will be relative to the current directory.
136
+ """
137
+
138
+ if included == None:
139
+ included = []
140
+
141
+ if build_file_path in included:
142
+ return included
143
+
144
+ included.append(build_file_path)
145
+
146
+ for included_build_file in aux_data[build_file_path].get('included', []):
147
+ GetIncludedBuildFiles(included_build_file, aux_data, included)
148
+
149
+ return included
150
+
151
+
152
+ def CheckedEval(file_contents):
153
+ """Return the eval of a gyp file.
154
+
155
+ The gyp file is restricted to dictionaries and lists only, and
156
+ repeated keys are not allowed.
157
+
158
+ Note that this is slower than eval() is.
159
+ """
160
+
161
+ ast = compiler.parse(file_contents)
162
+ assert isinstance(ast, Module)
163
+ c1 = ast.getChildren()
164
+ assert c1[0] is None
165
+ assert isinstance(c1[1], Stmt)
166
+ c2 = c1[1].getChildren()
167
+ assert isinstance(c2[0], Discard)
168
+ c3 = c2[0].getChildren()
169
+ assert len(c3) == 1
170
+ return CheckNode(c3[0], [])
171
+
172
+
173
+ def CheckNode(node, keypath):
174
+ if isinstance(node, Dict):
175
+ c = node.getChildren()
176
+ dict = {}
177
+ for n in range(0, len(c), 2):
178
+ assert isinstance(c[n], Const)
179
+ key = c[n].getChildren()[0]
180
+ if key in dict:
181
+ raise GypError("Key '" + key + "' repeated at level " +
182
+ repr(len(keypath) + 1) + " with key path '" +
183
+ '.'.join(keypath) + "'")
184
+ kp = list(keypath) # Make a copy of the list for descending this node.
185
+ kp.append(key)
186
+ dict[key] = CheckNode(c[n + 1], kp)
187
+ return dict
188
+ elif isinstance(node, List):
189
+ c = node.getChildren()
190
+ children = []
191
+ for index, child in enumerate(c):
192
+ kp = list(keypath) # Copy list.
193
+ kp.append(repr(index))
194
+ children.append(CheckNode(child, kp))
195
+ return children
196
+ elif isinstance(node, Const):
197
+ return node.getChildren()[0]
198
+ else:
199
+ raise TypeError, "Unknown AST node at key path '" + '.'.join(keypath) + \
200
+ "': " + repr(node)
201
+
202
+
203
+ def LoadOneBuildFile(build_file_path, data, aux_data, variables, includes,
204
+ is_target, check):
205
+ if build_file_path in data:
206
+ return data[build_file_path]
207
+
208
+ if os.path.exists(build_file_path):
209
+ build_file_contents = open(build_file_path).read()
210
+ else:
211
+ raise GypError("%s not found (cwd: %s)" % (build_file_path, os.getcwd()))
212
+
213
+ build_file_data = None
214
+ try:
215
+ if check:
216
+ build_file_data = CheckedEval(build_file_contents)
217
+ else:
218
+ build_file_data = eval(build_file_contents, {'__builtins__': None},
219
+ None)
220
+ except SyntaxError, e:
221
+ e.filename = build_file_path
222
+ raise
223
+ except Exception, e:
224
+ gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path)
225
+ raise
226
+
227
+ if not isinstance(build_file_data, dict):
228
+ raise GypError("%s does not evaluate to a dictionary." % build_file_path)
229
+
230
+ data[build_file_path] = build_file_data
231
+ aux_data[build_file_path] = {}
232
+
233
+ # Scan for includes and merge them in.
234
+ if ('skip_includes' not in build_file_data or
235
+ not build_file_data['skip_includes']):
236
+ try:
237
+ if is_target:
238
+ LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
239
+ aux_data, variables, includes, check)
240
+ else:
241
+ LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
242
+ aux_data, variables, None, check)
243
+ except Exception, e:
244
+ gyp.common.ExceptionAppend(e,
245
+ 'while reading includes of ' + build_file_path)
246
+ raise
247
+
248
+ return build_file_data
249
+
250
+
251
+ def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data,
252
+ variables, includes, check):
253
+ includes_list = []
254
+ if includes != None:
255
+ includes_list.extend(includes)
256
+ if 'includes' in subdict:
257
+ for include in subdict['includes']:
258
+ # "include" is specified relative to subdict_path, so compute the real
259
+ # path to include by appending the provided "include" to the directory
260
+ # in which subdict_path resides.
261
+ relative_include = \
262
+ os.path.normpath(os.path.join(os.path.dirname(subdict_path), include))
263
+ includes_list.append(relative_include)
264
+ # Unhook the includes list, it's no longer needed.
265
+ del subdict['includes']
266
+
267
+ # Merge in the included files.
268
+ for include in includes_list:
269
+ if not 'included' in aux_data[subdict_path]:
270
+ aux_data[subdict_path]['included'] = []
271
+ aux_data[subdict_path]['included'].append(include)
272
+
273
+ gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include)
274
+
275
+ MergeDicts(subdict,
276
+ LoadOneBuildFile(include, data, aux_data, variables, None,
277
+ False, check),
278
+ subdict_path, include)
279
+
280
+ # Recurse into subdictionaries.
281
+ for k, v in subdict.iteritems():
282
+ if v.__class__ == dict:
283
+ LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, variables,
284
+ None, check)
285
+ elif v.__class__ == list:
286
+ LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data, variables,
287
+ check)
288
+
289
+
290
+ # This recurses into lists so that it can look for dicts.
291
+ def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data,
292
+ variables, check):
293
+ for item in sublist:
294
+ if item.__class__ == dict:
295
+ LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data,
296
+ variables, None, check)
297
+ elif item.__class__ == list:
298
+ LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data,
299
+ variables, check)
300
+
301
+ # Processes toolsets in all the targets. This recurses into condition entries
302
+ # since they can contain toolsets as well.
303
+ def ProcessToolsetsInDict(data):
304
+ if 'targets' in data:
305
+ target_list = data['targets']
306
+ new_target_list = []
307
+ for target in target_list:
308
+ # If this target already has an explicit 'toolset', and no 'toolsets'
309
+ # list, don't modify it further.
310
+ if 'toolset' in target and 'toolsets' not in target:
311
+ new_target_list.append(target)
312
+ continue
313
+ if multiple_toolsets:
314
+ toolsets = target.get('toolsets', ['target'])
315
+ else:
316
+ toolsets = ['target']
317
+ # Make sure this 'toolsets' definition is only processed once.
318
+ if 'toolsets' in target:
319
+ del target['toolsets']
320
+ if len(toolsets) > 0:
321
+ # Optimization: only do copies if more than one toolset is specified.
322
+ for build in toolsets[1:]:
323
+ new_target = copy.deepcopy(target)
324
+ new_target['toolset'] = build
325
+ new_target_list.append(new_target)
326
+ target['toolset'] = toolsets[0]
327
+ new_target_list.append(target)
328
+ data['targets'] = new_target_list
329
+ if 'conditions' in data:
330
+ for condition in data['conditions']:
331
+ if isinstance(condition, list):
332
+ for condition_dict in condition[1:]:
333
+ ProcessToolsetsInDict(condition_dict)
334
+
335
+
336
+ # TODO(mark): I don't love this name. It just means that it's going to load
337
+ # a build file that contains targets and is expected to provide a targets dict
338
+ # that contains the targets...
339
+ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
340
+ depth, check, load_dependencies):
341
+ # If depth is set, predefine the DEPTH variable to be a relative path from
342
+ # this build file's directory to the directory identified by depth.
343
+ if depth:
344
+ # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
345
+ # temporary measure. This should really be addressed by keeping all paths
346
+ # in POSIX until actual project generation.
347
+ d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path))
348
+ if d == '':
349
+ variables['DEPTH'] = '.'
350
+ else:
351
+ variables['DEPTH'] = d.replace('\\', '/')
352
+
353
+ if build_file_path in data['target_build_files']:
354
+ # Already loaded.
355
+ return False
356
+ data['target_build_files'].add(build_file_path)
357
+
358
+ gyp.DebugOutput(gyp.DEBUG_INCLUDES,
359
+ "Loading Target Build File '%s'", build_file_path)
360
+
361
+ build_file_data = LoadOneBuildFile(build_file_path, data, aux_data, variables,
362
+ includes, True, check)
363
+
364
+ # Store DEPTH for later use in generators.
365
+ build_file_data['_DEPTH'] = depth
366
+
367
+ # Set up the included_files key indicating which .gyp files contributed to
368
+ # this target dict.
369
+ if 'included_files' in build_file_data:
370
+ raise GypError(build_file_path + ' must not contain included_files key')
371
+
372
+ included = GetIncludedBuildFiles(build_file_path, aux_data)
373
+ build_file_data['included_files'] = []
374
+ for included_file in included:
375
+ # included_file is relative to the current directory, but it needs to
376
+ # be made relative to build_file_path's directory.
377
+ included_relative = \
378
+ gyp.common.RelativePath(included_file,
379
+ os.path.dirname(build_file_path))
380
+ build_file_data['included_files'].append(included_relative)
381
+
382
+ # Do a first round of toolsets expansion so that conditions can be defined
383
+ # per toolset.
384
+ ProcessToolsetsInDict(build_file_data)
385
+
386
+ # Apply "pre"/"early" variable expansions and condition evaluations.
387
+ ProcessVariablesAndConditionsInDict(
388
+ build_file_data, PHASE_EARLY, variables, build_file_path)
389
+
390
+ # Since some toolsets might have been defined conditionally, perform
391
+ # a second round of toolsets expansion now.
392
+ ProcessToolsetsInDict(build_file_data)
393
+
394
+ # Look at each project's target_defaults dict, and merge settings into
395
+ # targets.
396
+ if 'target_defaults' in build_file_data:
397
+ if 'targets' not in build_file_data:
398
+ raise GypError("Unable to find targets in build file %s" %
399
+ build_file_path)
400
+
401
+ index = 0
402
+ while index < len(build_file_data['targets']):
403
+ # This procedure needs to give the impression that target_defaults is
404
+ # used as defaults, and the individual targets inherit from that.
405
+ # The individual targets need to be merged into the defaults. Make
406
+ # a deep copy of the defaults for each target, merge the target dict
407
+ # as found in the input file into that copy, and then hook up the
408
+ # copy with the target-specific data merged into it as the replacement
409
+ # target dict.
410
+ old_target_dict = build_file_data['targets'][index]
411
+ new_target_dict = copy.deepcopy(build_file_data['target_defaults'])
412
+ MergeDicts(new_target_dict, old_target_dict,
413
+ build_file_path, build_file_path)
414
+ build_file_data['targets'][index] = new_target_dict
415
+ index += 1
416
+
417
+ # No longer needed.
418
+ del build_file_data['target_defaults']
419
+
420
+ # Look for dependencies. This means that dependency resolution occurs
421
+ # after "pre" conditionals and variable expansion, but before "post" -
422
+ # in other words, you can't put a "dependencies" section inside a "post"
423
+ # conditional within a target.
424
+
425
+ dependencies = []
426
+ if 'targets' in build_file_data:
427
+ for target_dict in build_file_data['targets']:
428
+ if 'dependencies' not in target_dict:
429
+ continue
430
+ for dependency in target_dict['dependencies']:
431
+ dependencies.append(
432
+ gyp.common.ResolveTarget(build_file_path, dependency, None)[0])
433
+
434
+ if load_dependencies:
435
+ for dependency in dependencies:
436
+ try:
437
+ LoadTargetBuildFile(dependency, data, aux_data, variables,
438
+ includes, depth, check, load_dependencies)
439
+ except Exception, e:
440
+ gyp.common.ExceptionAppend(
441
+ e, 'while loading dependencies of %s' % build_file_path)
442
+ raise
443
+ else:
444
+ return (build_file_path, dependencies)
445
+
446
+
447
+ def CallLoadTargetBuildFile(global_flags,
448
+ build_file_path, data,
449
+ aux_data, variables,
450
+ includes, depth, check,
451
+ generator_input_info):
452
+ """Wrapper around LoadTargetBuildFile for parallel processing.
453
+
454
+ This wrapper is used when LoadTargetBuildFile is executed in
455
+ a worker process.
456
+ """
457
+
458
+ try:
459
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
460
+
461
+ # Apply globals so that the worker process behaves the same.
462
+ for key, value in global_flags.iteritems():
463
+ globals()[key] = value
464
+
465
+ # Save the keys so we can return data that changed.
466
+ data_keys = set(data)
467
+ aux_data_keys = set(aux_data)
468
+
469
+ SetGeneratorGlobals(generator_input_info)
470
+ result = LoadTargetBuildFile(build_file_path, data,
471
+ aux_data, variables,
472
+ includes, depth, check, False)
473
+ if not result:
474
+ return result
475
+
476
+ (build_file_path, dependencies) = result
477
+
478
+ data_out = {}
479
+ for key in data:
480
+ if key == 'target_build_files':
481
+ continue
482
+ if key not in data_keys:
483
+ data_out[key] = data[key]
484
+ aux_data_out = {}
485
+ for key in aux_data:
486
+ if key not in aux_data_keys:
487
+ aux_data_out[key] = aux_data[key]
488
+
489
+ # This gets serialized and sent back to the main process via a pipe.
490
+ # It's handled in LoadTargetBuildFileCallback.
491
+ return (build_file_path,
492
+ data_out,
493
+ aux_data_out,
494
+ dependencies)
495
+ except GypError, e:
496
+ sys.stderr.write("gyp: %s\n" % e)
497
+ return None
498
+ except Exception, e:
499
+ print >>sys.stderr, 'Exception:', e
500
+ print >>sys.stderr, traceback.format_exc()
501
+ return None
502
+
503
+
504
+ class ParallelProcessingError(Exception):
505
+ pass
506
+
507
+
508
+ class ParallelState(object):
509
+ """Class to keep track of state when processing input files in parallel.
510
+
511
+ If build files are loaded in parallel, use this to keep track of
512
+ state during farming out and processing parallel jobs. It's stored
513
+ in a global so that the callback function can have access to it.
514
+ """
515
+
516
+ def __init__(self):
517
+ # The multiprocessing pool.
518
+ self.pool = None
519
+ # The condition variable used to protect this object and notify
520
+ # the main loop when there might be more data to process.
521
+ self.condition = None
522
+ # The "data" dict that was passed to LoadTargetBuildFileParallel
523
+ self.data = None
524
+ # The "aux_data" dict that was passed to LoadTargetBuildFileParallel
525
+ self.aux_data = None
526
+ # The number of parallel calls outstanding; decremented when a response
527
+ # was received.
528
+ self.pending = 0
529
+ # The set of all build files that have been scheduled, so we don't
530
+ # schedule the same one twice.
531
+ self.scheduled = set()
532
+ # A list of dependency build file paths that haven't been scheduled yet.
533
+ self.dependencies = []
534
+ # Flag to indicate if there was an error in a child process.
535
+ self.error = False
536
+
537
+ def LoadTargetBuildFileCallback(self, result):
538
+ """Handle the results of running LoadTargetBuildFile in another process.
539
+ """
540
+ self.condition.acquire()
541
+ if not result:
542
+ self.error = True
543
+ self.condition.notify()
544
+ self.condition.release()
545
+ return
546
+ (build_file_path0, data0, aux_data0, dependencies0) = result
547
+ self.data['target_build_files'].add(build_file_path0)
548
+ for key in data0:
549
+ self.data[key] = data0[key]
550
+ for key in aux_data0:
551
+ self.aux_data[key] = aux_data0[key]
552
+ for new_dependency in dependencies0:
553
+ if new_dependency not in self.scheduled:
554
+ self.scheduled.add(new_dependency)
555
+ self.dependencies.append(new_dependency)
556
+ self.pending -= 1
557
+ self.condition.notify()
558
+ self.condition.release()
559
+
560
+
561
+ def LoadTargetBuildFilesParallel(build_files, data, aux_data,
562
+ variables, includes, depth, check,
563
+ generator_input_info):
564
+ parallel_state = ParallelState()
565
+ parallel_state.condition = threading.Condition()
566
+ # Make copies of the build_files argument that we can modify while working.
567
+ parallel_state.dependencies = list(build_files)
568
+ parallel_state.scheduled = set(build_files)
569
+ parallel_state.pending = 0
570
+ parallel_state.data = data
571
+ parallel_state.aux_data = aux_data
572
+
573
+ try:
574
+ parallel_state.condition.acquire()
575
+ while parallel_state.dependencies or parallel_state.pending:
576
+ if parallel_state.error:
577
+ break
578
+ if not parallel_state.dependencies:
579
+ parallel_state.condition.wait()
580
+ continue
581
+
582
+ dependency = parallel_state.dependencies.pop()
583
+
584
+ parallel_state.pending += 1
585
+ data_in = {}
586
+ data_in['target_build_files'] = data['target_build_files']
587
+ aux_data_in = {}
588
+ global_flags = {
589
+ 'path_sections': globals()['path_sections'],
590
+ 'non_configuration_keys': globals()['non_configuration_keys'],
591
+ 'multiple_toolsets': globals()['multiple_toolsets']}
592
+
593
+ if not parallel_state.pool:
594
+ parallel_state.pool = multiprocessing.Pool(8)
595
+ parallel_state.pool.apply_async(
596
+ CallLoadTargetBuildFile,
597
+ args = (global_flags, dependency,
598
+ data_in, aux_data_in,
599
+ variables, includes, depth, check, generator_input_info),
600
+ callback = parallel_state.LoadTargetBuildFileCallback)
601
+ except KeyboardInterrupt, e:
602
+ parallel_state.pool.terminate()
603
+ raise e
604
+
605
+ parallel_state.condition.release()
606
+
607
+ parallel_state.pool.close()
608
+ parallel_state.pool.join()
609
+ parallel_state.pool = None
610
+
611
+ if parallel_state.error:
612
+ sys.exit(1)
613
+
614
+ # Look for the bracket that matches the first bracket seen in a
615
+ # string, and return the start and end as a tuple. For example, if
616
+ # the input is something like "<(foo <(bar)) blah", then it would
617
+ # return (1, 13), indicating the entire string except for the leading
618
+ # "<" and trailing " blah".
619
+ LBRACKETS= set('{[(')
620
+ BRACKETS = {'}': '{', ']': '[', ')': '('}
621
+ def FindEnclosingBracketGroup(input_str):
622
+ stack = []
623
+ start = -1
624
+ for index, char in enumerate(input_str):
625
+ if char in LBRACKETS:
626
+ stack.append(char)
627
+ if start == -1:
628
+ start = index
629
+ elif char in BRACKETS:
630
+ if not stack:
631
+ return (-1, -1)
632
+ if stack.pop() != BRACKETS[char]:
633
+ return (-1, -1)
634
+ if not stack:
635
+ return (start, index + 1)
636
+ return (-1, -1)
637
+
638
+
639
+ canonical_int_re = re.compile('(0|-?[1-9][0-9]*)$')
640
+
641
+
642
+ def IsStrCanonicalInt(string):
643
+ """Returns True if |string| is in its canonical integer form.
644
+
645
+ The canonical form is such that str(int(string)) == string.
646
+ """
647
+ return isinstance(string, str) and canonical_int_re.match(string)
648
+
649
+
650
+ # This matches things like "<(asdf)", "<!(cmd)", "<!@(cmd)", "<|(list)",
651
+ # "<!interpreter(arguments)", "<([list])", and even "<([)" and "<(<())".
652
+ # In the last case, the inner "<()" is captured in match['content'].
653
+ early_variable_re = re.compile(
654
+ '(?P<replace>(?P<type><(?:(?:!?@?)|\|)?)'
655
+ '(?P<command_string>[-a-zA-Z0-9_.]+)?'
656
+ '\((?P<is_array>\s*\[?)'
657
+ '(?P<content>.*?)(\]?)\))')
658
+
659
+ # This matches the same as early_variable_re, but with '>' instead of '<'.
660
+ late_variable_re = re.compile(
661
+ '(?P<replace>(?P<type>>(?:(?:!?@?)|\|)?)'
662
+ '(?P<command_string>[-a-zA-Z0-9_.]+)?'
663
+ '\((?P<is_array>\s*\[?)'
664
+ '(?P<content>.*?)(\]?)\))')
665
+
666
+ # This matches the same as early_variable_re, but with '^' instead of '<'.
667
+ latelate_variable_re = re.compile(
668
+ '(?P<replace>(?P<type>[\^](?:(?:!?@?)|\|)?)'
669
+ '(?P<command_string>[-a-zA-Z0-9_.]+)?'
670
+ '\((?P<is_array>\s*\[?)'
671
+ '(?P<content>.*?)(\]?)\))')
672
+
673
+ # Global cache of results from running commands so they don't have to be run
674
+ # more then once.
675
+ cached_command_results = {}
676
+
677
+
678
+ def FixupPlatformCommand(cmd):
679
+ if sys.platform == 'win32':
680
+ if type(cmd) == list:
681
+ cmd = [re.sub('^cat ', 'type ', cmd[0])] + cmd[1:]
682
+ else:
683
+ cmd = re.sub('^cat ', 'type ', cmd)
684
+ return cmd
685
+
686
+
687
+ PHASE_EARLY = 0
688
+ PHASE_LATE = 1
689
+ PHASE_LATELATE = 2
690
+
691
+
692
+ def ExpandVariables(input, phase, variables, build_file):
693
+ # Look for the pattern that gets expanded into variables
694
+ if phase == PHASE_EARLY:
695
+ variable_re = early_variable_re
696
+ expansion_symbol = '<'
697
+ elif phase == PHASE_LATE:
698
+ variable_re = late_variable_re
699
+ expansion_symbol = '>'
700
+ elif phase == PHASE_LATELATE:
701
+ variable_re = latelate_variable_re
702
+ expansion_symbol = '^'
703
+ else:
704
+ assert False
705
+
706
+ input_str = str(input)
707
+ if IsStrCanonicalInt(input_str):
708
+ return int(input_str)
709
+
710
+ # Do a quick scan to determine if an expensive regex search is warranted.
711
+ if expansion_symbol not in input_str:
712
+ return input_str
713
+
714
+ # Get the entire list of matches as a list of MatchObject instances.
715
+ # (using findall here would return strings instead of MatchObjects).
716
+ matches = list(variable_re.finditer(input_str))
717
+ if not matches:
718
+ return input_str
719
+
720
+ output = input_str
721
+ # Reverse the list of matches so that replacements are done right-to-left.
722
+ # That ensures that earlier replacements won't mess up the string in a
723
+ # way that causes later calls to find the earlier substituted text instead
724
+ # of what's intended for replacement.
725
+ matches.reverse()
726
+ for match_group in matches:
727
+ match = match_group.groupdict()
728
+ gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Matches: %r", match)
729
+ # match['replace'] is the substring to look for, match['type']
730
+ # is the character code for the replacement type (< > <! >! <| >| <@
731
+ # >@ <!@ >!@), match['is_array'] contains a '[' for command
732
+ # arrays, and match['content'] is the name of the variable (< >)
733
+ # or command to run (<! >!). match['command_string'] is an optional
734
+ # command string. Currently, only 'pymod_do_main' is supported.
735
+
736
+ # run_command is true if a ! variant is used.
737
+ run_command = '!' in match['type']
738
+ command_string = match['command_string']
739
+
740
+ # file_list is true if a | variant is used.
741
+ file_list = '|' in match['type']
742
+
743
+ # Capture these now so we can adjust them later.
744
+ replace_start = match_group.start('replace')
745
+ replace_end = match_group.end('replace')
746
+
747
+ # Find the ending paren, and re-evaluate the contained string.
748
+ (c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:])
749
+
750
+ # Adjust the replacement range to match the entire command
751
+ # found by FindEnclosingBracketGroup (since the variable_re
752
+ # probably doesn't match the entire command if it contained
753
+ # nested variables).
754
+ replace_end = replace_start + c_end
755
+
756
+ # Find the "real" replacement, matching the appropriate closing
757
+ # paren, and adjust the replacement start and end.
758
+ replacement = input_str[replace_start:replace_end]
759
+
760
+ # Figure out what the contents of the variable parens are.
761
+ contents_start = replace_start + c_start + 1
762
+ contents_end = replace_end - 1
763
+ contents = input_str[contents_start:contents_end]
764
+
765
+ # Do filter substitution now for <|().
766
+ # Admittedly, this is different than the evaluation order in other
767
+ # contexts. However, since filtration has no chance to run on <|(),
768
+ # this seems like the only obvious way to give them access to filters.
769
+ if file_list:
770
+ processed_variables = copy.deepcopy(variables)
771
+ ProcessListFiltersInDict(contents, processed_variables)
772
+ # Recurse to expand variables in the contents
773
+ contents = ExpandVariables(contents, phase,
774
+ processed_variables, build_file)
775
+ else:
776
+ # Recurse to expand variables in the contents
777
+ contents = ExpandVariables(contents, phase, variables, build_file)
778
+
779
+ # Strip off leading/trailing whitespace so that variable matches are
780
+ # simpler below (and because they are rarely needed).
781
+ contents = contents.strip()
782
+
783
+ # expand_to_list is true if an @ variant is used. In that case,
784
+ # the expansion should result in a list. Note that the caller
785
+ # is to be expecting a list in return, and not all callers do
786
+ # because not all are working in list context. Also, for list
787
+ # expansions, there can be no other text besides the variable
788
+ # expansion in the input string.
789
+ expand_to_list = '@' in match['type'] and input_str == replacement
790
+
791
+ if run_command or file_list:
792
+ # Find the build file's directory, so commands can be run or file lists
793
+ # generated relative to it.
794
+ build_file_dir = os.path.dirname(build_file)
795
+ if build_file_dir == '' and not file_list:
796
+ # If build_file is just a leaf filename indicating a file in the
797
+ # current directory, build_file_dir might be an empty string. Set
798
+ # it to None to signal to subprocess.Popen that it should run the
799
+ # command in the current directory.
800
+ build_file_dir = None
801
+
802
+ # Support <|(listfile.txt ...) which generates a file
803
+ # containing items from a gyp list, generated at gyp time.
804
+ # This works around actions/rules which have more inputs than will
805
+ # fit on the command line.
806
+ if file_list:
807
+ if type(contents) == list:
808
+ contents_list = contents
809
+ else:
810
+ contents_list = contents.split(' ')
811
+ replacement = contents_list[0]
812
+ if os.path.isabs(replacement):
813
+ raise GypError('| cannot handle absolute paths, got "%s"' % replacement)
814
+
815
+ if not generator_filelist_paths:
816
+ path = os.path.join(build_file_dir, replacement)
817
+ else:
818
+ if os.path.isabs(build_file_dir):
819
+ toplevel = generator_filelist_paths['toplevel']
820
+ rel_build_file_dir = gyp.common.RelativePath(build_file_dir, toplevel)
821
+ else:
822
+ rel_build_file_dir = build_file_dir
823
+ qualified_out_dir = generator_filelist_paths['qualified_out_dir']
824
+ path = os.path.join(qualified_out_dir, rel_build_file_dir, replacement)
825
+ gyp.common.EnsureDirExists(path)
826
+
827
+ replacement = gyp.common.RelativePath(path, build_file_dir)
828
+ f = gyp.common.WriteOnDiff(path)
829
+ for i in contents_list[1:]:
830
+ f.write('%s\n' % i)
831
+ f.close()
832
+
833
+ elif run_command:
834
+ use_shell = True
835
+ if match['is_array']:
836
+ contents = eval(contents)
837
+ use_shell = False
838
+
839
+ # Check for a cached value to avoid executing commands, or generating
840
+ # file lists more than once.
841
+ # TODO(http://code.google.com/p/gyp/issues/detail?id=112): It is
842
+ # possible that the command being invoked depends on the current
843
+ # directory. For that case the syntax needs to be extended so that the
844
+ # directory is also used in cache_key (it becomes a tuple).
845
+ # TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory,
846
+ # someone could author a set of GYP files where each time the command
847
+ # is invoked it produces different output by design. When the need
848
+ # arises, the syntax should be extended to support no caching off a
849
+ # command's output so it is run every time.
850
+ cache_key = str(contents)
851
+ cached_value = cached_command_results.get(cache_key, None)
852
+ if cached_value is None:
853
+ gyp.DebugOutput(gyp.DEBUG_VARIABLES,
854
+ "Executing command '%s' in directory '%s'",
855
+ contents, build_file_dir)
856
+
857
+ replacement = ''
858
+
859
+ if command_string == 'pymod_do_main':
860
+ # <!pymod_do_main(modulename param eters) loads |modulename| as a
861
+ # python module and then calls that module's DoMain() function,
862
+ # passing ["param", "eters"] as a single list argument. For modules
863
+ # that don't load quickly, this can be faster than
864
+ # <!(python modulename param eters). Do this in |build_file_dir|.
865
+ oldwd = os.getcwd() # Python doesn't like os.open('.'): no fchdir.
866
+ if build_file_dir: # build_file_dir may be None (see above).
867
+ os.chdir(build_file_dir)
868
+ try:
869
+
870
+ parsed_contents = shlex.split(contents)
871
+ try:
872
+ py_module = __import__(parsed_contents[0])
873
+ except ImportError as e:
874
+ raise GypError("Error importing pymod_do_main"
875
+ "module (%s): %s" % (parsed_contents[0], e))
876
+ replacement = str(py_module.DoMain(parsed_contents[1:])).rstrip()
877
+ finally:
878
+ os.chdir(oldwd)
879
+ assert replacement != None
880
+ elif command_string:
881
+ raise GypError("Unknown command string '%s' in '%s'." %
882
+ (command_string, contents))
883
+ else:
884
+ # Fix up command with platform specific workarounds.
885
+ contents = FixupPlatformCommand(contents)
886
+ p = subprocess.Popen(contents, shell=use_shell,
887
+ stdout=subprocess.PIPE,
888
+ stderr=subprocess.PIPE,
889
+ stdin=subprocess.PIPE,
890
+ cwd=build_file_dir)
891
+
892
+ p_stdout, p_stderr = p.communicate('')
893
+
894
+ if p.wait() != 0 or p_stderr:
895
+ sys.stderr.write(p_stderr)
896
+ # Simulate check_call behavior, since check_call only exists
897
+ # in python 2.5 and later.
898
+ raise GypError("Call to '%s' returned exit status %d." %
899
+ (contents, p.returncode))
900
+ replacement = p_stdout.rstrip()
901
+
902
+ cached_command_results[cache_key] = replacement
903
+ else:
904
+ gyp.DebugOutput(gyp.DEBUG_VARIABLES,
905
+ "Had cache value for command '%s' in directory '%s'",
906
+ contents,build_file_dir)
907
+ replacement = cached_value
908
+
909
+ else:
910
+ if not contents in variables:
911
+ if contents[-1] in ['!', '/']:
912
+ # In order to allow cross-compiles (nacl) to happen more naturally,
913
+ # we will allow references to >(sources/) etc. to resolve to
914
+ # and empty list if undefined. This allows actions to:
915
+ # 'action!': [
916
+ # '>@(_sources!)',
917
+ # ],
918
+ # 'action/': [
919
+ # '>@(_sources/)',
920
+ # ],
921
+ replacement = []
922
+ else:
923
+ raise GypError('Undefined variable ' + contents +
924
+ ' in ' + build_file)
925
+ else:
926
+ replacement = variables[contents]
927
+
928
+ if isinstance(replacement, list):
929
+ for item in replacement:
930
+ if (not contents[-1] == '/' and
931
+ not isinstance(item, str) and not isinstance(item, int)):
932
+ raise GypError('Variable ' + contents +
933
+ ' must expand to a string or list of strings; ' +
934
+ 'list contains a ' +
935
+ item.__class__.__name__)
936
+ # Run through the list and handle variable expansions in it. Since
937
+ # the list is guaranteed not to contain dicts, this won't do anything
938
+ # with conditions sections.
939
+ ProcessVariablesAndConditionsInList(replacement, phase, variables,
940
+ build_file)
941
+ elif not isinstance(replacement, str) and \
942
+ not isinstance(replacement, int):
943
+ raise GypError('Variable ' + contents +
944
+ ' must expand to a string or list of strings; ' +
945
+ 'found a ' + replacement.__class__.__name__)
946
+
947
+ if expand_to_list:
948
+ # Expanding in list context. It's guaranteed that there's only one
949
+ # replacement to do in |input_str| and that it's this replacement. See
950
+ # above.
951
+ if isinstance(replacement, list):
952
+ # If it's already a list, make a copy.
953
+ output = replacement[:]
954
+ else:
955
+ # Split it the same way sh would split arguments.
956
+ output = shlex.split(str(replacement))
957
+ else:
958
+ # Expanding in string context.
959
+ encoded_replacement = ''
960
+ if isinstance(replacement, list):
961
+ # When expanding a list into string context, turn the list items
962
+ # into a string in a way that will work with a subprocess call.
963
+ #
964
+ # TODO(mark): This isn't completely correct. This should
965
+ # call a generator-provided function that observes the
966
+ # proper list-to-argument quoting rules on a specific
967
+ # platform instead of just calling the POSIX encoding
968
+ # routine.
969
+ encoded_replacement = gyp.common.EncodePOSIXShellList(replacement)
970
+ else:
971
+ encoded_replacement = replacement
972
+
973
+ output = output[:replace_start] + str(encoded_replacement) + \
974
+ output[replace_end:]
975
+ # Prepare for the next match iteration.
976
+ input_str = output
977
+
978
+ # Look for more matches now that we've replaced some, to deal with
979
+ # expanding local variables (variables defined in the same
980
+ # variables block as this one).
981
+ gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Found output %r, recursing.", output)
982
+ if isinstance(output, list):
983
+ if output and isinstance(output[0], list):
984
+ # Leave output alone if it's a list of lists.
985
+ # We don't want such lists to be stringified.
986
+ pass
987
+ else:
988
+ new_output = []
989
+ for item in output:
990
+ new_output.append(
991
+ ExpandVariables(item, phase, variables, build_file))
992
+ output = new_output
993
+ else:
994
+ output = ExpandVariables(output, phase, variables, build_file)
995
+
996
+ # Convert all strings that are canonically-represented integers into integers.
997
+ if isinstance(output, list):
998
+ for index in xrange(0, len(output)):
999
+ if IsStrCanonicalInt(output[index]):
1000
+ output[index] = int(output[index])
1001
+ elif IsStrCanonicalInt(output):
1002
+ output = int(output)
1003
+
1004
+ return output
1005
+
1006
+
1007
+ def ProcessConditionsInDict(the_dict, phase, variables, build_file):
1008
+ # Process a 'conditions' or 'target_conditions' section in the_dict,
1009
+ # depending on phase.
1010
+ # early -> conditions
1011
+ # late -> target_conditions
1012
+ # latelate -> no conditions
1013
+ #
1014
+ # Each item in a conditions list consists of cond_expr, a string expression
1015
+ # evaluated as the condition, and true_dict, a dict that will be merged into
1016
+ # the_dict if cond_expr evaluates to true. Optionally, a third item,
1017
+ # false_dict, may be present. false_dict is merged into the_dict if
1018
+ # cond_expr evaluates to false.
1019
+ #
1020
+ # Any dict merged into the_dict will be recursively processed for nested
1021
+ # conditionals and other expansions, also according to phase, immediately
1022
+ # prior to being merged.
1023
+
1024
+ if phase == PHASE_EARLY:
1025
+ conditions_key = 'conditions'
1026
+ elif phase == PHASE_LATE:
1027
+ conditions_key = 'target_conditions'
1028
+ elif phase == PHASE_LATELATE:
1029
+ return
1030
+ else:
1031
+ assert False
1032
+
1033
+ if not conditions_key in the_dict:
1034
+ return
1035
+
1036
+ conditions_list = the_dict[conditions_key]
1037
+ # Unhook the conditions list, it's no longer needed.
1038
+ del the_dict[conditions_key]
1039
+
1040
+ for condition in conditions_list:
1041
+ if not isinstance(condition, list):
1042
+ raise GypError(conditions_key + ' must be a list')
1043
+ if len(condition) != 2 and len(condition) != 3:
1044
+ # It's possible that condition[0] won't work in which case this
1045
+ # attempt will raise its own IndexError. That's probably fine.
1046
+ raise GypError(conditions_key + ' ' + condition[0] +
1047
+ ' must be length 2 or 3, not ' + str(len(condition)))
1048
+
1049
+ [cond_expr, true_dict] = condition[0:2]
1050
+ false_dict = None
1051
+ if len(condition) == 3:
1052
+ false_dict = condition[2]
1053
+
1054
+ # Do expansions on the condition itself. Since the conditon can naturally
1055
+ # contain variable references without needing to resort to GYP expansion
1056
+ # syntax, this is of dubious value for variables, but someone might want to
1057
+ # use a command expansion directly inside a condition.
1058
+ cond_expr_expanded = ExpandVariables(cond_expr, phase, variables,
1059
+ build_file)
1060
+ if not isinstance(cond_expr_expanded, str) and \
1061
+ not isinstance(cond_expr_expanded, int):
1062
+ raise ValueError, \
1063
+ 'Variable expansion in this context permits str and int ' + \
1064
+ 'only, found ' + expanded.__class__.__name__
1065
+
1066
+ try:
1067
+ ast_code = compile(cond_expr_expanded, '<string>', 'eval')
1068
+
1069
+ if eval(ast_code, {'__builtins__': None}, variables):
1070
+ merge_dict = true_dict
1071
+ else:
1072
+ merge_dict = false_dict
1073
+ except SyntaxError, e:
1074
+ syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s '
1075
+ 'at character %d.' %
1076
+ (str(e.args[0]), e.text, build_file, e.offset),
1077
+ e.filename, e.lineno, e.offset, e.text)
1078
+ raise syntax_error
1079
+ except NameError, e:
1080
+ gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' %
1081
+ (cond_expr_expanded, build_file))
1082
+ raise GypError(e)
1083
+
1084
+ if merge_dict != None:
1085
+ # Expand variables and nested conditinals in the merge_dict before
1086
+ # merging it.
1087
+ ProcessVariablesAndConditionsInDict(merge_dict, phase,
1088
+ variables, build_file)
1089
+
1090
+ MergeDicts(the_dict, merge_dict, build_file, build_file)
1091
+
1092
+
1093
+ def LoadAutomaticVariablesFromDict(variables, the_dict):
1094
+ # Any keys with plain string values in the_dict become automatic variables.
1095
+ # The variable name is the key name with a "_" character prepended.
1096
+ for key, value in the_dict.iteritems():
1097
+ if isinstance(value, str) or isinstance(value, int) or \
1098
+ isinstance(value, list):
1099
+ variables['_' + key] = value
1100
+
1101
+
1102
+ def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key):
1103
+ # Any keys in the_dict's "variables" dict, if it has one, becomes a
1104
+ # variable. The variable name is the key name in the "variables" dict.
1105
+ # Variables that end with the % character are set only if they are unset in
1106
+ # the variables dict. the_dict_key is the name of the key that accesses
1107
+ # the_dict in the_dict's parent dict. If the_dict's parent is not a dict
1108
+ # (it could be a list or it could be parentless because it is a root dict),
1109
+ # the_dict_key will be None.
1110
+ for key, value in the_dict.get('variables', {}).iteritems():
1111
+ if not isinstance(value, str) and not isinstance(value, int) and \
1112
+ not isinstance(value, list):
1113
+ continue
1114
+
1115
+ if key.endswith('%'):
1116
+ variable_name = key[:-1]
1117
+ if variable_name in variables:
1118
+ # If the variable is already set, don't set it.
1119
+ continue
1120
+ if the_dict_key is 'variables' and variable_name in the_dict:
1121
+ # If the variable is set without a % in the_dict, and the_dict is a
1122
+ # variables dict (making |variables| a varaibles sub-dict of a
1123
+ # variables dict), use the_dict's definition.
1124
+ value = the_dict[variable_name]
1125
+ else:
1126
+ variable_name = key
1127
+
1128
+ variables[variable_name] = value
1129
+
1130
+
1131
+ def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in,
1132
+ build_file, the_dict_key=None):
1133
+ """Handle all variable and command expansion and conditional evaluation.
1134
+
1135
+ This function is the public entry point for all variable expansions and
1136
+ conditional evaluations. The variables_in dictionary will not be modified
1137
+ by this function.
1138
+ """
1139
+
1140
+ # Make a copy of the variables_in dict that can be modified during the
1141
+ # loading of automatics and the loading of the variables dict.
1142
+ variables = variables_in.copy()
1143
+ LoadAutomaticVariablesFromDict(variables, the_dict)
1144
+
1145
+ if 'variables' in the_dict:
1146
+ # Make sure all the local variables are added to the variables
1147
+ # list before we process them so that you can reference one
1148
+ # variable from another. They will be fully expanded by recursion
1149
+ # in ExpandVariables.
1150
+ for key, value in the_dict['variables'].iteritems():
1151
+ variables[key] = value
1152
+
1153
+ # Handle the associated variables dict first, so that any variable
1154
+ # references within can be resolved prior to using them as variables.
1155
+ # Pass a copy of the variables dict to avoid having it be tainted.
1156
+ # Otherwise, it would have extra automatics added for everything that
1157
+ # should just be an ordinary variable in this scope.
1158
+ ProcessVariablesAndConditionsInDict(the_dict['variables'], phase,
1159
+ variables, build_file, 'variables')
1160
+
1161
+ LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
1162
+
1163
+ for key, value in the_dict.iteritems():
1164
+ # Skip "variables", which was already processed if present.
1165
+ if key != 'variables' and isinstance(value, str):
1166
+ expanded = ExpandVariables(value, phase, variables, build_file)
1167
+ if not isinstance(expanded, str) and not isinstance(expanded, int):
1168
+ raise ValueError, \
1169
+ 'Variable expansion in this context permits str and int ' + \
1170
+ 'only, found ' + expanded.__class__.__name__ + ' for ' + key
1171
+ the_dict[key] = expanded
1172
+
1173
+ # Variable expansion may have resulted in changes to automatics. Reload.
1174
+ # TODO(mark): Optimization: only reload if no changes were made.
1175
+ variables = variables_in.copy()
1176
+ LoadAutomaticVariablesFromDict(variables, the_dict)
1177
+ LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
1178
+
1179
+ # Process conditions in this dict. This is done after variable expansion
1180
+ # so that conditions may take advantage of expanded variables. For example,
1181
+ # if the_dict contains:
1182
+ # {'type': '<(library_type)',
1183
+ # 'conditions': [['_type=="static_library"', { ... }]]},
1184
+ # _type, as used in the condition, will only be set to the value of
1185
+ # library_type if variable expansion is performed before condition
1186
+ # processing. However, condition processing should occur prior to recursion
1187
+ # so that variables (both automatic and "variables" dict type) may be
1188
+ # adjusted by conditions sections, merged into the_dict, and have the
1189
+ # intended impact on contained dicts.
1190
+ #
1191
+ # This arrangement means that a "conditions" section containing a "variables"
1192
+ # section will only have those variables effective in subdicts, not in
1193
+ # the_dict. The workaround is to put a "conditions" section within a
1194
+ # "variables" section. For example:
1195
+ # {'conditions': [['os=="mac"', {'variables': {'define': 'IS_MAC'}}]],
1196
+ # 'defines': ['<(define)'],
1197
+ # 'my_subdict': {'defines': ['<(define)']}},
1198
+ # will not result in "IS_MAC" being appended to the "defines" list in the
1199
+ # current scope but would result in it being appended to the "defines" list
1200
+ # within "my_subdict". By comparison:
1201
+ # {'variables': {'conditions': [['os=="mac"', {'define': 'IS_MAC'}]]},
1202
+ # 'defines': ['<(define)'],
1203
+ # 'my_subdict': {'defines': ['<(define)']}},
1204
+ # will append "IS_MAC" to both "defines" lists.
1205
+
1206
+ # Evaluate conditions sections, allowing variable expansions within them
1207
+ # as well as nested conditionals. This will process a 'conditions' or
1208
+ # 'target_conditions' section, perform appropriate merging and recursive
1209
+ # conditional and variable processing, and then remove the conditions section
1210
+ # from the_dict if it is present.
1211
+ ProcessConditionsInDict(the_dict, phase, variables, build_file)
1212
+
1213
+ # Conditional processing may have resulted in changes to automatics or the
1214
+ # variables dict. Reload.
1215
+ variables = variables_in.copy()
1216
+ LoadAutomaticVariablesFromDict(variables, the_dict)
1217
+ LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
1218
+
1219
+ # Recurse into child dicts, or process child lists which may result in
1220
+ # further recursion into descendant dicts.
1221
+ for key, value in the_dict.iteritems():
1222
+ # Skip "variables" and string values, which were already processed if
1223
+ # present.
1224
+ if key == 'variables' or isinstance(value, str):
1225
+ continue
1226
+ if isinstance(value, dict):
1227
+ # Pass a copy of the variables dict so that subdicts can't influence
1228
+ # parents.
1229
+ ProcessVariablesAndConditionsInDict(value, phase, variables,
1230
+ build_file, key)
1231
+ elif isinstance(value, list):
1232
+ # The list itself can't influence the variables dict, and
1233
+ # ProcessVariablesAndConditionsInList will make copies of the variables
1234
+ # dict if it needs to pass it to something that can influence it. No
1235
+ # copy is necessary here.
1236
+ ProcessVariablesAndConditionsInList(value, phase, variables,
1237
+ build_file)
1238
+ elif not isinstance(value, int):
1239
+ raise TypeError, 'Unknown type ' + value.__class__.__name__ + \
1240
+ ' for ' + key
1241
+
1242
+
1243
+ def ProcessVariablesAndConditionsInList(the_list, phase, variables,
1244
+ build_file):
1245
+ # Iterate using an index so that new values can be assigned into the_list.
1246
+ index = 0
1247
+ while index < len(the_list):
1248
+ item = the_list[index]
1249
+ if isinstance(item, dict):
1250
+ # Make a copy of the variables dict so that it won't influence anything
1251
+ # outside of its own scope.
1252
+ ProcessVariablesAndConditionsInDict(item, phase, variables, build_file)
1253
+ elif isinstance(item, list):
1254
+ ProcessVariablesAndConditionsInList(item, phase, variables, build_file)
1255
+ elif isinstance(item, str):
1256
+ expanded = ExpandVariables(item, phase, variables, build_file)
1257
+ if isinstance(expanded, str) or isinstance(expanded, int):
1258
+ the_list[index] = expanded
1259
+ elif isinstance(expanded, list):
1260
+ the_list[index:index+1] = expanded
1261
+ index += len(expanded)
1262
+
1263
+ # index now identifies the next item to examine. Continue right now
1264
+ # without falling into the index increment below.
1265
+ continue
1266
+ else:
1267
+ raise ValueError, \
1268
+ 'Variable expansion in this context permits strings and ' + \
1269
+ 'lists only, found ' + expanded.__class__.__name__ + ' at ' + \
1270
+ index
1271
+ elif not isinstance(item, int):
1272
+ raise TypeError, 'Unknown type ' + item.__class__.__name__ + \
1273
+ ' at index ' + index
1274
+ index = index + 1
1275
+
1276
+
1277
+ def BuildTargetsDict(data):
1278
+ """Builds a dict mapping fully-qualified target names to their target dicts.
1279
+
1280
+ |data| is a dict mapping loaded build files by pathname relative to the
1281
+ current directory. Values in |data| are build file contents. For each
1282
+ |data| value with a "targets" key, the value of the "targets" key is taken
1283
+ as a list containing target dicts. Each target's fully-qualified name is
1284
+ constructed from the pathname of the build file (|data| key) and its
1285
+ "target_name" property. These fully-qualified names are used as the keys
1286
+ in the returned dict. These keys provide access to the target dicts,
1287
+ the dicts in the "targets" lists.
1288
+ """
1289
+
1290
+ targets = {}
1291
+ for build_file in data['target_build_files']:
1292
+ for target in data[build_file].get('targets', []):
1293
+ target_name = gyp.common.QualifiedTarget(build_file,
1294
+ target['target_name'],
1295
+ target['toolset'])
1296
+ if target_name in targets:
1297
+ raise GypError('Duplicate target definitions for ' + target_name)
1298
+ targets[target_name] = target
1299
+
1300
+ return targets
1301
+
1302
+
1303
+ def QualifyDependencies(targets):
1304
+ """Make dependency links fully-qualified relative to the current directory.
1305
+
1306
+ |targets| is a dict mapping fully-qualified target names to their target
1307
+ dicts. For each target in this dict, keys known to contain dependency
1308
+ links are examined, and any dependencies referenced will be rewritten
1309
+ so that they are fully-qualified and relative to the current directory.
1310
+ All rewritten dependencies are suitable for use as keys to |targets| or a
1311
+ similar dict.
1312
+ """
1313
+
1314
+ all_dependency_sections = [dep + op
1315
+ for dep in dependency_sections
1316
+ for op in ('', '!', '/')]
1317
+
1318
+ for target, target_dict in targets.iteritems():
1319
+ target_build_file = gyp.common.BuildFile(target)
1320
+ toolset = target_dict['toolset']
1321
+ for dependency_key in all_dependency_sections:
1322
+ dependencies = target_dict.get(dependency_key, [])
1323
+ for index in xrange(0, len(dependencies)):
1324
+ dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget(
1325
+ target_build_file, dependencies[index], toolset)
1326
+ if not multiple_toolsets:
1327
+ # Ignore toolset specification in the dependency if it is specified.
1328
+ dep_toolset = toolset
1329
+ dependency = gyp.common.QualifiedTarget(dep_file,
1330
+ dep_target,
1331
+ dep_toolset)
1332
+ dependencies[index] = dependency
1333
+
1334
+ # Make sure anything appearing in a list other than "dependencies" also
1335
+ # appears in the "dependencies" list.
1336
+ if dependency_key != 'dependencies' and \
1337
+ dependency not in target_dict['dependencies']:
1338
+ raise GypError('Found ' + dependency + ' in ' + dependency_key +
1339
+ ' of ' + target + ', but not in dependencies')
1340
+
1341
+
1342
+ def ExpandWildcardDependencies(targets, data):
1343
+ """Expands dependencies specified as build_file:*.
1344
+
1345
+ For each target in |targets|, examines sections containing links to other
1346
+ targets. If any such section contains a link of the form build_file:*, it
1347
+ is taken as a wildcard link, and is expanded to list each target in
1348
+ build_file. The |data| dict provides access to build file dicts.
1349
+
1350
+ Any target that does not wish to be included by wildcard can provide an
1351
+ optional "suppress_wildcard" key in its target dict. When present and
1352
+ true, a wildcard dependency link will not include such targets.
1353
+
1354
+ All dependency names, including the keys to |targets| and the values in each
1355
+ dependency list, must be qualified when this function is called.
1356
+ """
1357
+
1358
+ for target, target_dict in targets.iteritems():
1359
+ toolset = target_dict['toolset']
1360
+ target_build_file = gyp.common.BuildFile(target)
1361
+ for dependency_key in dependency_sections:
1362
+ dependencies = target_dict.get(dependency_key, [])
1363
+
1364
+ # Loop this way instead of "for dependency in" or "for index in xrange"
1365
+ # because the dependencies list will be modified within the loop body.
1366
+ index = 0
1367
+ while index < len(dependencies):
1368
+ (dependency_build_file, dependency_target, dependency_toolset) = \
1369
+ gyp.common.ParseQualifiedTarget(dependencies[index])
1370
+ if dependency_target != '*' and dependency_toolset != '*':
1371
+ # Not a wildcard. Keep it moving.
1372
+ index = index + 1
1373
+ continue
1374
+
1375
+ if dependency_build_file == target_build_file:
1376
+ # It's an error for a target to depend on all other targets in
1377
+ # the same file, because a target cannot depend on itself.
1378
+ raise GypError('Found wildcard in ' + dependency_key + ' of ' +
1379
+ target + ' referring to same build file')
1380
+
1381
+ # Take the wildcard out and adjust the index so that the next
1382
+ # dependency in the list will be processed the next time through the
1383
+ # loop.
1384
+ del dependencies[index]
1385
+ index = index - 1
1386
+
1387
+ # Loop through the targets in the other build file, adding them to
1388
+ # this target's list of dependencies in place of the removed
1389
+ # wildcard.
1390
+ dependency_target_dicts = data[dependency_build_file]['targets']
1391
+ for dependency_target_dict in dependency_target_dicts:
1392
+ if int(dependency_target_dict.get('suppress_wildcard', False)):
1393
+ continue
1394
+ dependency_target_name = dependency_target_dict['target_name']
1395
+ if (dependency_target != '*' and
1396
+ dependency_target != dependency_target_name):
1397
+ continue
1398
+ dependency_target_toolset = dependency_target_dict['toolset']
1399
+ if (dependency_toolset != '*' and
1400
+ dependency_toolset != dependency_target_toolset):
1401
+ continue
1402
+ dependency = gyp.common.QualifiedTarget(dependency_build_file,
1403
+ dependency_target_name,
1404
+ dependency_target_toolset)
1405
+ index = index + 1
1406
+ dependencies.insert(index, dependency)
1407
+
1408
+ index = index + 1
1409
+
1410
+
1411
+ def Unify(l):
1412
+ """Removes duplicate elements from l, keeping the first element."""
1413
+ seen = {}
1414
+ return [seen.setdefault(e, e) for e in l if e not in seen]
1415
+
1416
+
1417
+ def RemoveDuplicateDependencies(targets):
1418
+ """Makes sure every dependency appears only once in all targets's dependency
1419
+ lists."""
1420
+ for target_name, target_dict in targets.iteritems():
1421
+ for dependency_key in dependency_sections:
1422
+ dependencies = target_dict.get(dependency_key, [])
1423
+ if dependencies:
1424
+ target_dict[dependency_key] = Unify(dependencies)
1425
+
1426
+
1427
+ def Filter(l, item):
1428
+ """Removes item from l."""
1429
+ res = {}
1430
+ return [res.setdefault(e, e) for e in l if e != item]
1431
+
1432
+
1433
+ def RemoveSelfDependencies(targets):
1434
+ """Remove self dependencies from targets that have the prune_self_dependency
1435
+ variable set."""
1436
+ for target_name, target_dict in targets.iteritems():
1437
+ for dependency_key in dependency_sections:
1438
+ dependencies = target_dict.get(dependency_key, [])
1439
+ if dependencies:
1440
+ for t in dependencies:
1441
+ if t == target_name:
1442
+ if targets[t].get('variables', {}).get('prune_self_dependency', 0):
1443
+ target_dict[dependency_key] = Filter(dependencies, target_name)
1444
+
1445
+
1446
+ class DependencyGraphNode(object):
1447
+ """
1448
+
1449
+ Attributes:
1450
+ ref: A reference to an object that this DependencyGraphNode represents.
1451
+ dependencies: List of DependencyGraphNodes on which this one depends.
1452
+ dependents: List of DependencyGraphNodes that depend on this one.
1453
+ """
1454
+
1455
+ class CircularException(GypError):
1456
+ pass
1457
+
1458
+ def __init__(self, ref):
1459
+ self.ref = ref
1460
+ self.dependencies = []
1461
+ self.dependents = []
1462
+
1463
+ def __repr__(self):
1464
+ return '<DependencyGraphNode: %r>' % self.ref
1465
+
1466
+ def FlattenToList(self):
1467
+ # flat_list is the sorted list of dependencies - actually, the list items
1468
+ # are the "ref" attributes of DependencyGraphNodes. Every target will
1469
+ # appear in flat_list after all of its dependencies, and before all of its
1470
+ # dependents.
1471
+ flat_list = []
1472
+
1473
+ # in_degree_zeros is the list of DependencyGraphNodes that have no
1474
+ # dependencies not in flat_list. Initially, it is a copy of the children
1475
+ # of this node, because when the graph was built, nodes with no
1476
+ # dependencies were made implicit dependents of the root node.
1477
+ in_degree_zeros = set(self.dependents[:])
1478
+
1479
+ while in_degree_zeros:
1480
+ # Nodes in in_degree_zeros have no dependencies not in flat_list, so they
1481
+ # can be appended to flat_list. Take these nodes out of in_degree_zeros
1482
+ # as work progresses, so that the next node to process from the list can
1483
+ # always be accessed at a consistent position.
1484
+ node = in_degree_zeros.pop()
1485
+ flat_list.append(node.ref)
1486
+
1487
+ # Look at dependents of the node just added to flat_list. Some of them
1488
+ # may now belong in in_degree_zeros.
1489
+ for node_dependent in node.dependents:
1490
+ is_in_degree_zero = True
1491
+ for node_dependent_dependency in node_dependent.dependencies:
1492
+ if not node_dependent_dependency.ref in flat_list:
1493
+ # The dependent one or more dependencies not in flat_list. There
1494
+ # will be more chances to add it to flat_list when examining
1495
+ # it again as a dependent of those other dependencies, provided
1496
+ # that there are no cycles.
1497
+ is_in_degree_zero = False
1498
+ break
1499
+
1500
+ if is_in_degree_zero:
1501
+ # All of the dependent's dependencies are already in flat_list. Add
1502
+ # it to in_degree_zeros where it will be processed in a future
1503
+ # iteration of the outer loop.
1504
+ in_degree_zeros.add(node_dependent)
1505
+
1506
+ return flat_list
1507
+
1508
+ def FindCycles(self, path=None):
1509
+ """
1510
+ Returns a list of cycles in the graph, where each cycle is its own list.
1511
+ """
1512
+ if path is None:
1513
+ path = [self]
1514
+
1515
+ results = []
1516
+ for node in self.dependents:
1517
+ if node in path:
1518
+ cycle = [node]
1519
+ for part in path:
1520
+ cycle.append(part)
1521
+ if part == node:
1522
+ break
1523
+ results.append(tuple(cycle))
1524
+ else:
1525
+ results.extend(node.FindCycles([node] + path))
1526
+
1527
+ return list(set(results))
1528
+
1529
+ def DirectDependencies(self, dependencies=None):
1530
+ """Returns a list of just direct dependencies."""
1531
+ if dependencies == None:
1532
+ dependencies = []
1533
+
1534
+ for dependency in self.dependencies:
1535
+ # Check for None, corresponding to the root node.
1536
+ if dependency.ref != None and dependency.ref not in dependencies:
1537
+ dependencies.append(dependency.ref)
1538
+
1539
+ return dependencies
1540
+
1541
+ def _AddImportedDependencies(self, targets, dependencies=None):
1542
+ """Given a list of direct dependencies, adds indirect dependencies that
1543
+ other dependencies have declared to export their settings.
1544
+
1545
+ This method does not operate on self. Rather, it operates on the list
1546
+ of dependencies in the |dependencies| argument. For each dependency in
1547
+ that list, if any declares that it exports the settings of one of its
1548
+ own dependencies, those dependencies whose settings are "passed through"
1549
+ are added to the list. As new items are added to the list, they too will
1550
+ be processed, so it is possible to import settings through multiple levels
1551
+ of dependencies.
1552
+
1553
+ This method is not terribly useful on its own, it depends on being
1554
+ "primed" with a list of direct dependencies such as one provided by
1555
+ DirectDependencies. DirectAndImportedDependencies is intended to be the
1556
+ public entry point.
1557
+ """
1558
+
1559
+ if dependencies == None:
1560
+ dependencies = []
1561
+
1562
+ index = 0
1563
+ while index < len(dependencies):
1564
+ dependency = dependencies[index]
1565
+ dependency_dict = targets[dependency]
1566
+ # Add any dependencies whose settings should be imported to the list
1567
+ # if not already present. Newly-added items will be checked for
1568
+ # their own imports when the list iteration reaches them.
1569
+ # Rather than simply appending new items, insert them after the
1570
+ # dependency that exported them. This is done to more closely match
1571
+ # the depth-first method used by DeepDependencies.
1572
+ add_index = 1
1573
+ for imported_dependency in \
1574
+ dependency_dict.get('export_dependent_settings', []):
1575
+ if imported_dependency not in dependencies:
1576
+ dependencies.insert(index + add_index, imported_dependency)
1577
+ add_index = add_index + 1
1578
+ index = index + 1
1579
+
1580
+ return dependencies
1581
+
1582
+ def DirectAndImportedDependencies(self, targets, dependencies=None):
1583
+ """Returns a list of a target's direct dependencies and all indirect
1584
+ dependencies that a dependency has advertised settings should be exported
1585
+ through the dependency for.
1586
+ """
1587
+
1588
+ dependencies = self.DirectDependencies(dependencies)
1589
+ return self._AddImportedDependencies(targets, dependencies)
1590
+
1591
+ def DeepDependencies(self, dependencies=None):
1592
+ """Returns a list of all of a target's dependencies, recursively."""
1593
+ if dependencies == None:
1594
+ dependencies = []
1595
+
1596
+ for dependency in self.dependencies:
1597
+ # Check for None, corresponding to the root node.
1598
+ if dependency.ref != None and dependency.ref not in dependencies:
1599
+ dependencies.append(dependency.ref)
1600
+ dependency.DeepDependencies(dependencies)
1601
+
1602
+ return dependencies
1603
+
1604
+ def _LinkDependenciesInternal(self, targets, include_shared_libraries,
1605
+ dependencies=None, initial=True):
1606
+ """Returns a list of dependency targets that are linked into this target.
1607
+
1608
+ This function has a split personality, depending on the setting of
1609
+ |initial|. Outside callers should always leave |initial| at its default
1610
+ setting.
1611
+
1612
+ When adding a target to the list of dependencies, this function will
1613
+ recurse into itself with |initial| set to False, to collect dependencies
1614
+ that are linked into the linkable target for which the list is being built.
1615
+
1616
+ If |include_shared_libraries| is False, the resulting dependencies will not
1617
+ include shared_library targets that are linked into this target.
1618
+ """
1619
+ if dependencies == None:
1620
+ dependencies = []
1621
+
1622
+ # Check for None, corresponding to the root node.
1623
+ if self.ref == None:
1624
+ return dependencies
1625
+
1626
+ # It's kind of sucky that |targets| has to be passed into this function,
1627
+ # but that's presently the easiest way to access the target dicts so that
1628
+ # this function can find target types.
1629
+
1630
+ if 'target_name' not in targets[self.ref]:
1631
+ raise GypError("Missing 'target_name' field in target.")
1632
+
1633
+ if 'type' not in targets[self.ref]:
1634
+ raise GypError("Missing 'type' field in target %s" %
1635
+ targets[self.ref]['target_name'])
1636
+
1637
+ target_type = targets[self.ref]['type']
1638
+
1639
+ is_linkable = target_type in linkable_types
1640
+
1641
+ if initial and not is_linkable:
1642
+ # If this is the first target being examined and it's not linkable,
1643
+ # return an empty list of link dependencies, because the link
1644
+ # dependencies are intended to apply to the target itself (initial is
1645
+ # True) and this target won't be linked.
1646
+ return dependencies
1647
+
1648
+ # Don't traverse 'none' targets if explicitly excluded.
1649
+ if (target_type == 'none' and
1650
+ not targets[self.ref].get('dependencies_traverse', True)):
1651
+ if self.ref not in dependencies:
1652
+ dependencies.append(self.ref)
1653
+ return dependencies
1654
+
1655
+ # Executables and loadable modules are already fully and finally linked.
1656
+ # Nothing else can be a link dependency of them, there can only be
1657
+ # dependencies in the sense that a dependent target might run an
1658
+ # executable or load the loadable_module.
1659
+ if not initial and target_type in ('executable', 'loadable_module'):
1660
+ return dependencies
1661
+
1662
+ # Shared libraries are already fully linked. They should only be included
1663
+ # in |dependencies| when adjusting static library dependencies (in order to
1664
+ # link against the shared_library's import lib), but should not be included
1665
+ # in |dependencies| when propagating link_settings.
1666
+ # The |include_shared_libraries| flag controls which of these two cases we
1667
+ # are handling.
1668
+ if (not initial and target_type == 'shared_library' and
1669
+ not include_shared_libraries):
1670
+ return dependencies
1671
+
1672
+ # The target is linkable, add it to the list of link dependencies.
1673
+ if self.ref not in dependencies:
1674
+ dependencies.append(self.ref)
1675
+ if initial or not is_linkable:
1676
+ # If this is a subsequent target and it's linkable, don't look any
1677
+ # further for linkable dependencies, as they'll already be linked into
1678
+ # this target linkable. Always look at dependencies of the initial
1679
+ # target, and always look at dependencies of non-linkables.
1680
+ for dependency in self.dependencies:
1681
+ dependency._LinkDependenciesInternal(targets,
1682
+ include_shared_libraries,
1683
+ dependencies, False)
1684
+
1685
+ return dependencies
1686
+
1687
+ def DependenciesForLinkSettings(self, targets):
1688
+ """
1689
+ Returns a list of dependency targets whose link_settings should be merged
1690
+ into this target.
1691
+ """
1692
+
1693
+ # TODO(sbaig) Currently, chrome depends on the bug that shared libraries'
1694
+ # link_settings are propagated. So for now, we will allow it, unless the
1695
+ # 'allow_sharedlib_linksettings_propagation' flag is explicitly set to
1696
+ # False. Once chrome is fixed, we can remove this flag.
1697
+ include_shared_libraries = \
1698
+ targets[self.ref].get('allow_sharedlib_linksettings_propagation', True)
1699
+ return self._LinkDependenciesInternal(targets, include_shared_libraries)
1700
+
1701
+ def DependenciesToLinkAgainst(self, targets):
1702
+ """
1703
+ Returns a list of dependency targets that are linked into this target.
1704
+ """
1705
+ return self._LinkDependenciesInternal(targets, True)
1706
+
1707
+
1708
+ def BuildDependencyList(targets):
1709
+ # Create a DependencyGraphNode for each target. Put it into a dict for easy
1710
+ # access.
1711
+ dependency_nodes = {}
1712
+ for target, spec in targets.iteritems():
1713
+ if target not in dependency_nodes:
1714
+ dependency_nodes[target] = DependencyGraphNode(target)
1715
+
1716
+ # Set up the dependency links. Targets that have no dependencies are treated
1717
+ # as dependent on root_node.
1718
+ root_node = DependencyGraphNode(None)
1719
+ for target, spec in targets.iteritems():
1720
+ target_node = dependency_nodes[target]
1721
+ target_build_file = gyp.common.BuildFile(target)
1722
+ dependencies = spec.get('dependencies')
1723
+ if not dependencies:
1724
+ target_node.dependencies = [root_node]
1725
+ root_node.dependents.append(target_node)
1726
+ else:
1727
+ for dependency in dependencies:
1728
+ dependency_node = dependency_nodes.get(dependency)
1729
+ if not dependency_node:
1730
+ raise GypError("Dependency '%s' not found while "
1731
+ "trying to load target %s" % (dependency, target))
1732
+ target_node.dependencies.append(dependency_node)
1733
+ dependency_node.dependents.append(target_node)
1734
+
1735
+ flat_list = root_node.FlattenToList()
1736
+
1737
+ # If there's anything left unvisited, there must be a circular dependency
1738
+ # (cycle). If you need to figure out what's wrong, look for elements of
1739
+ # targets that are not in flat_list.
1740
+ if len(flat_list) != len(targets):
1741
+ raise DependencyGraphNode.CircularException(
1742
+ 'Some targets not reachable, cycle in dependency graph detected: ' +
1743
+ ' '.join(set(flat_list) ^ set(targets)))
1744
+
1745
+ return [dependency_nodes, flat_list]
1746
+
1747
+
1748
+ def VerifyNoGYPFileCircularDependencies(targets):
1749
+ # Create a DependencyGraphNode for each gyp file containing a target. Put
1750
+ # it into a dict for easy access.
1751
+ dependency_nodes = {}
1752
+ for target in targets.iterkeys():
1753
+ build_file = gyp.common.BuildFile(target)
1754
+ if not build_file in dependency_nodes:
1755
+ dependency_nodes[build_file] = DependencyGraphNode(build_file)
1756
+
1757
+ # Set up the dependency links.
1758
+ for target, spec in targets.iteritems():
1759
+ build_file = gyp.common.BuildFile(target)
1760
+ build_file_node = dependency_nodes[build_file]
1761
+ target_dependencies = spec.get('dependencies', [])
1762
+ for dependency in target_dependencies:
1763
+ try:
1764
+ dependency_build_file = gyp.common.BuildFile(dependency)
1765
+ except GypError, e:
1766
+ gyp.common.ExceptionAppend(
1767
+ e, 'while computing dependencies of .gyp file %s' % build_file)
1768
+ raise
1769
+
1770
+ if dependency_build_file == build_file:
1771
+ # A .gyp file is allowed to refer back to itself.
1772
+ continue
1773
+ dependency_node = dependency_nodes.get(dependency_build_file)
1774
+ if not dependency_node:
1775
+ raise GypError("Dependancy '%s' not found" % dependency_build_file)
1776
+ if dependency_node not in build_file_node.dependencies:
1777
+ build_file_node.dependencies.append(dependency_node)
1778
+ dependency_node.dependents.append(build_file_node)
1779
+
1780
+
1781
+ # Files that have no dependencies are treated as dependent on root_node.
1782
+ root_node = DependencyGraphNode(None)
1783
+ for build_file_node in dependency_nodes.itervalues():
1784
+ if len(build_file_node.dependencies) == 0:
1785
+ build_file_node.dependencies.append(root_node)
1786
+ root_node.dependents.append(build_file_node)
1787
+
1788
+ flat_list = root_node.FlattenToList()
1789
+
1790
+ # If there's anything left unvisited, there must be a circular dependency
1791
+ # (cycle).
1792
+ if len(flat_list) != len(dependency_nodes):
1793
+ bad_files = []
1794
+ for file in dependency_nodes.iterkeys():
1795
+ if not file in flat_list:
1796
+ bad_files.append(file)
1797
+ common_path_prefix = os.path.commonprefix(dependency_nodes)
1798
+ cycles = []
1799
+ for cycle in root_node.FindCycles():
1800
+ simplified_paths = []
1801
+ for node in cycle:
1802
+ assert(node.ref.startswith(common_path_prefix))
1803
+ simplified_paths.append(node.ref[len(common_path_prefix):])
1804
+ cycles.append('Cycle: %s' % ' -> '.join(simplified_paths))
1805
+ raise DependencyGraphNode.CircularException, \
1806
+ 'Cycles in .gyp file dependency graph detected:\n' + '\n'.join(cycles)
1807
+
1808
+
1809
+ def DoDependentSettings(key, flat_list, targets, dependency_nodes):
1810
+ # key should be one of all_dependent_settings, direct_dependent_settings,
1811
+ # or link_settings.
1812
+
1813
+ for target in flat_list:
1814
+ target_dict = targets[target]
1815
+ build_file = gyp.common.BuildFile(target)
1816
+
1817
+ if key == 'all_dependent_settings':
1818
+ dependencies = dependency_nodes[target].DeepDependencies()
1819
+ elif key == 'direct_dependent_settings':
1820
+ dependencies = \
1821
+ dependency_nodes[target].DirectAndImportedDependencies(targets)
1822
+ elif key == 'link_settings':
1823
+ dependencies = \
1824
+ dependency_nodes[target].DependenciesForLinkSettings(targets)
1825
+ else:
1826
+ raise GypError("DoDependentSettings doesn't know how to determine "
1827
+ 'dependencies for ' + key)
1828
+
1829
+ for dependency in dependencies:
1830
+ dependency_dict = targets[dependency]
1831
+ if not key in dependency_dict:
1832
+ continue
1833
+ dependency_build_file = gyp.common.BuildFile(dependency)
1834
+ MergeDicts(target_dict, dependency_dict[key],
1835
+ build_file, dependency_build_file)
1836
+
1837
+
1838
+ def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
1839
+ sort_dependencies):
1840
+ # Recompute target "dependencies" properties. For each static library
1841
+ # target, remove "dependencies" entries referring to other static libraries,
1842
+ # unless the dependency has the "hard_dependency" attribute set. For each
1843
+ # linkable target, add a "dependencies" entry referring to all of the
1844
+ # target's computed list of link dependencies (including static libraries
1845
+ # if no such entry is already present.
1846
+ for target in flat_list:
1847
+ target_dict = targets[target]
1848
+ target_type = target_dict['type']
1849
+
1850
+ if target_type == 'static_library':
1851
+ if not 'dependencies' in target_dict:
1852
+ continue
1853
+
1854
+ target_dict['dependencies_original'] = target_dict.get(
1855
+ 'dependencies', [])[:]
1856
+
1857
+ # A static library should not depend on another static library unless
1858
+ # the dependency relationship is "hard," which should only be done when
1859
+ # a dependent relies on some side effect other than just the build
1860
+ # product, like a rule or action output. Further, if a target has a
1861
+ # non-hard dependency, but that dependency exports a hard dependency,
1862
+ # the non-hard dependency can safely be removed, but the exported hard
1863
+ # dependency must be added to the target to keep the same dependency
1864
+ # ordering.
1865
+ dependencies = \
1866
+ dependency_nodes[target].DirectAndImportedDependencies(targets)
1867
+ index = 0
1868
+ while index < len(dependencies):
1869
+ dependency = dependencies[index]
1870
+ dependency_dict = targets[dependency]
1871
+
1872
+ # Remove every non-hard static library dependency and remove every
1873
+ # non-static library dependency that isn't a direct dependency.
1874
+ if (dependency_dict['type'] == 'static_library' and \
1875
+ not dependency_dict.get('hard_dependency', False)) or \
1876
+ (dependency_dict['type'] != 'static_library' and \
1877
+ not dependency in target_dict['dependencies']):
1878
+ # Take the dependency out of the list, and don't increment index
1879
+ # because the next dependency to analyze will shift into the index
1880
+ # formerly occupied by the one being removed.
1881
+ del dependencies[index]
1882
+ else:
1883
+ index = index + 1
1884
+
1885
+ # Update the dependencies. If the dependencies list is empty, it's not
1886
+ # needed, so unhook it.
1887
+ if len(dependencies) > 0:
1888
+ target_dict['dependencies'] = dependencies
1889
+ else:
1890
+ del target_dict['dependencies']
1891
+
1892
+ elif target_type in linkable_types:
1893
+ # Get a list of dependency targets that should be linked into this
1894
+ # target. Add them to the dependencies list if they're not already
1895
+ # present.
1896
+
1897
+ link_dependencies = \
1898
+ dependency_nodes[target].DependenciesToLinkAgainst(targets)
1899
+ for dependency in link_dependencies:
1900
+ if dependency == target:
1901
+ continue
1902
+ if not 'dependencies' in target_dict:
1903
+ target_dict['dependencies'] = []
1904
+ if not dependency in target_dict['dependencies']:
1905
+ target_dict['dependencies'].append(dependency)
1906
+ # Sort the dependencies list in the order from dependents to dependencies.
1907
+ # e.g. If A and B depend on C and C depends on D, sort them in A, B, C, D.
1908
+ # Note: flat_list is already sorted in the order from dependencies to
1909
+ # dependents.
1910
+ if sort_dependencies and 'dependencies' in target_dict:
1911
+ target_dict['dependencies'] = [dep for dep in reversed(flat_list)
1912
+ if dep in target_dict['dependencies']]
1913
+
1914
+
1915
+ # Initialize this here to speed up MakePathRelative.
1916
+ exception_re = re.compile(r'''["']?[-/$<>^]''')
1917
+
1918
+
1919
+ def MakePathRelative(to_file, fro_file, item):
1920
+ # If item is a relative path, it's relative to the build file dict that it's
1921
+ # coming from. Fix it up to make it relative to the build file dict that
1922
+ # it's going into.
1923
+ # Exception: any |item| that begins with these special characters is
1924
+ # returned without modification.
1925
+ # / Used when a path is already absolute (shortcut optimization;
1926
+ # such paths would be returned as absolute anyway)
1927
+ # $ Used for build environment variables
1928
+ # - Used for some build environment flags (such as -lapr-1 in a
1929
+ # "libraries" section)
1930
+ # < Used for our own variable and command expansions (see ExpandVariables)
1931
+ # > Used for our own variable and command expansions (see ExpandVariables)
1932
+ # ^ Used for our own variable and command expansions (see ExpandVariables)
1933
+ #
1934
+ # "/' Used when a value is quoted. If these are present, then we
1935
+ # check the second character instead.
1936
+ #
1937
+ if to_file == fro_file or exception_re.match(item):
1938
+ return item
1939
+ else:
1940
+ # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
1941
+ # temporary measure. This should really be addressed by keeping all paths
1942
+ # in POSIX until actual project generation.
1943
+ ret = os.path.normpath(os.path.join(
1944
+ gyp.common.RelativePath(os.path.dirname(fro_file),
1945
+ os.path.dirname(to_file)),
1946
+ item)).replace('\\', '/')
1947
+ if item[-1] == '/':
1948
+ ret += '/'
1949
+ return ret
1950
+
1951
+ def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True):
1952
+ # Python documentation recommends objects which do not support hash
1953
+ # set this value to None. Python library objects follow this rule.
1954
+ is_hashable = lambda val: val.__hash__
1955
+
1956
+ # If x is hashable, returns whether x is in s. Else returns whether x is in l.
1957
+ def is_in_set_or_list(x, s, l):
1958
+ if is_hashable(x):
1959
+ return x in s
1960
+ return x in l
1961
+
1962
+ prepend_index = 0
1963
+
1964
+ # Make membership testing of hashables in |to| (in particular, strings)
1965
+ # faster.
1966
+ hashable_to_set = set(x for x in to if is_hashable(x))
1967
+ for item in fro:
1968
+ singleton = False
1969
+ if isinstance(item, str) or isinstance(item, int):
1970
+ # The cheap and easy case.
1971
+ if is_paths:
1972
+ to_item = MakePathRelative(to_file, fro_file, item)
1973
+ else:
1974
+ to_item = item
1975
+
1976
+ if not isinstance(item, str) or not item.startswith('-'):
1977
+ # Any string that doesn't begin with a "-" is a singleton - it can
1978
+ # only appear once in a list, to be enforced by the list merge append
1979
+ # or prepend.
1980
+ singleton = True
1981
+ elif isinstance(item, dict):
1982
+ # Make a copy of the dictionary, continuing to look for paths to fix.
1983
+ # The other intelligent aspects of merge processing won't apply because
1984
+ # item is being merged into an empty dict.
1985
+ to_item = {}
1986
+ MergeDicts(to_item, item, to_file, fro_file)
1987
+ elif isinstance(item, list):
1988
+ # Recurse, making a copy of the list. If the list contains any
1989
+ # descendant dicts, path fixing will occur. Note that here, custom
1990
+ # values for is_paths and append are dropped; those are only to be
1991
+ # applied to |to| and |fro|, not sublists of |fro|. append shouldn't
1992
+ # matter anyway because the new |to_item| list is empty.
1993
+ to_item = []
1994
+ MergeLists(to_item, item, to_file, fro_file)
1995
+ else:
1996
+ raise TypeError, \
1997
+ 'Attempt to merge list item of unsupported type ' + \
1998
+ item.__class__.__name__
1999
+
2000
+ if append:
2001
+ # If appending a singleton that's already in the list, don't append.
2002
+ # This ensures that the earliest occurrence of the item will stay put.
2003
+ if not singleton or not is_in_set_or_list(to_item, hashable_to_set, to):
2004
+ to.append(to_item)
2005
+ if is_hashable(to_item):
2006
+ hashable_to_set.add(to_item)
2007
+ else:
2008
+ # If prepending a singleton that's already in the list, remove the
2009
+ # existing instance and proceed with the prepend. This ensures that the
2010
+ # item appears at the earliest possible position in the list.
2011
+ while singleton and to_item in to:
2012
+ to.remove(to_item)
2013
+
2014
+ # Don't just insert everything at index 0. That would prepend the new
2015
+ # items to the list in reverse order, which would be an unwelcome
2016
+ # surprise.
2017
+ to.insert(prepend_index, to_item)
2018
+ if is_hashable(to_item):
2019
+ hashable_to_set.add(to_item)
2020
+ prepend_index = prepend_index + 1
2021
+
2022
+
2023
+ def MergeDicts(to, fro, to_file, fro_file):
2024
+ # I wanted to name the parameter "from" but it's a Python keyword...
2025
+ for k, v in fro.iteritems():
2026
+ # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give
2027
+ # copy semantics. Something else may want to merge from the |fro| dict
2028
+ # later, and having the same dict ref pointed to twice in the tree isn't
2029
+ # what anyone wants considering that the dicts may subsequently be
2030
+ # modified.
2031
+ if k in to:
2032
+ bad_merge = False
2033
+ if isinstance(v, str) or isinstance(v, int):
2034
+ if not (isinstance(to[k], str) or isinstance(to[k], int)):
2035
+ bad_merge = True
2036
+ elif v.__class__ != to[k].__class__:
2037
+ bad_merge = True
2038
+
2039
+ if bad_merge:
2040
+ raise TypeError, \
2041
+ 'Attempt to merge dict value of type ' + v.__class__.__name__ + \
2042
+ ' into incompatible type ' + to[k].__class__.__name__ + \
2043
+ ' for key ' + k
2044
+ if isinstance(v, str) or isinstance(v, int):
2045
+ # Overwrite the existing value, if any. Cheap and easy.
2046
+ is_path = IsPathSection(k)
2047
+ if is_path:
2048
+ to[k] = MakePathRelative(to_file, fro_file, v)
2049
+ else:
2050
+ to[k] = v
2051
+ elif isinstance(v, dict):
2052
+ # Recurse, guaranteeing copies will be made of objects that require it.
2053
+ if not k in to:
2054
+ to[k] = {}
2055
+ MergeDicts(to[k], v, to_file, fro_file)
2056
+ elif isinstance(v, list):
2057
+ # Lists in dicts can be merged with different policies, depending on
2058
+ # how the key in the "from" dict (k, the from-key) is written.
2059
+ #
2060
+ # If the from-key has ...the to-list will have this action
2061
+ # this character appended:... applied when receiving the from-list:
2062
+ # = replace
2063
+ # + prepend
2064
+ # ? set, only if to-list does not yet exist
2065
+ # (none) append
2066
+ #
2067
+ # This logic is list-specific, but since it relies on the associated
2068
+ # dict key, it's checked in this dict-oriented function.
2069
+ ext = k[-1]
2070
+ append = True
2071
+ if ext == '=':
2072
+ list_base = k[:-1]
2073
+ lists_incompatible = [list_base, list_base + '?']
2074
+ to[list_base] = []
2075
+ elif ext == '+':
2076
+ list_base = k[:-1]
2077
+ lists_incompatible = [list_base + '=', list_base + '?']
2078
+ append = False
2079
+ elif ext == '?':
2080
+ list_base = k[:-1]
2081
+ lists_incompatible = [list_base, list_base + '=', list_base + '+']
2082
+ else:
2083
+ list_base = k
2084
+ lists_incompatible = [list_base + '=', list_base + '?']
2085
+
2086
+ # Some combinations of merge policies appearing together are meaningless.
2087
+ # It's stupid to replace and append simultaneously, for example. Append
2088
+ # and prepend are the only policies that can coexist.
2089
+ for list_incompatible in lists_incompatible:
2090
+ if list_incompatible in fro:
2091
+ raise GypError('Incompatible list policies ' + k + ' and ' +
2092
+ list_incompatible)
2093
+
2094
+ if list_base in to:
2095
+ if ext == '?':
2096
+ # If the key ends in "?", the list will only be merged if it doesn't
2097
+ # already exist.
2098
+ continue
2099
+ if not isinstance(to[list_base], list):
2100
+ # This may not have been checked above if merging in a list with an
2101
+ # extension character.
2102
+ raise TypeError, \
2103
+ 'Attempt to merge dict value of type ' + v.__class__.__name__ + \
2104
+ ' into incompatible type ' + to[list_base].__class__.__name__ + \
2105
+ ' for key ' + list_base + '(' + k + ')'
2106
+ else:
2107
+ to[list_base] = []
2108
+
2109
+ # Call MergeLists, which will make copies of objects that require it.
2110
+ # MergeLists can recurse back into MergeDicts, although this will be
2111
+ # to make copies of dicts (with paths fixed), there will be no
2112
+ # subsequent dict "merging" once entering a list because lists are
2113
+ # always replaced, appended to, or prepended to.
2114
+ is_paths = IsPathSection(list_base)
2115
+ MergeLists(to[list_base], v, to_file, fro_file, is_paths, append)
2116
+ else:
2117
+ raise TypeError, \
2118
+ 'Attempt to merge dict value of unsupported type ' + \
2119
+ v.__class__.__name__ + ' for key ' + k
2120
+
2121
+
2122
+ def MergeConfigWithInheritance(new_configuration_dict, build_file,
2123
+ target_dict, configuration, visited):
2124
+ # Skip if previously visted.
2125
+ if configuration in visited:
2126
+ return
2127
+
2128
+ # Look at this configuration.
2129
+ configuration_dict = target_dict['configurations'][configuration]
2130
+
2131
+ # Merge in parents.
2132
+ for parent in configuration_dict.get('inherit_from', []):
2133
+ MergeConfigWithInheritance(new_configuration_dict, build_file,
2134
+ target_dict, parent, visited + [configuration])
2135
+
2136
+ # Merge it into the new config.
2137
+ MergeDicts(new_configuration_dict, configuration_dict,
2138
+ build_file, build_file)
2139
+
2140
+ # Drop abstract.
2141
+ if 'abstract' in new_configuration_dict:
2142
+ del new_configuration_dict['abstract']
2143
+
2144
+
2145
+ def SetUpConfigurations(target, target_dict):
2146
+ # key_suffixes is a list of key suffixes that might appear on key names.
2147
+ # These suffixes are handled in conditional evaluations (for =, +, and ?)
2148
+ # and rules/exclude processing (for ! and /). Keys with these suffixes
2149
+ # should be treated the same as keys without.
2150
+ key_suffixes = ['=', '+', '?', '!', '/']
2151
+
2152
+ build_file = gyp.common.BuildFile(target)
2153
+
2154
+ # Provide a single configuration by default if none exists.
2155
+ # TODO(mark): Signal an error if default_configurations exists but
2156
+ # configurations does not.
2157
+ if not 'configurations' in target_dict:
2158
+ target_dict['configurations'] = {'Default': {}}
2159
+ if not 'default_configuration' in target_dict:
2160
+ concrete = [i for i in target_dict['configurations'].iterkeys()
2161
+ if not target_dict['configurations'][i].get('abstract')]
2162
+ target_dict['default_configuration'] = sorted(concrete)[0]
2163
+
2164
+ for configuration in target_dict['configurations'].keys():
2165
+ old_configuration_dict = target_dict['configurations'][configuration]
2166
+ # Skip abstract configurations (saves work only).
2167
+ if old_configuration_dict.get('abstract'):
2168
+ continue
2169
+ # Configurations inherit (most) settings from the enclosing target scope.
2170
+ # Get the inheritance relationship right by making a copy of the target
2171
+ # dict.
2172
+ new_configuration_dict = copy.deepcopy(target_dict)
2173
+
2174
+ # Take out the bits that don't belong in a "configurations" section.
2175
+ # Since configuration setup is done before conditional, exclude, and rules
2176
+ # processing, be careful with handling of the suffix characters used in
2177
+ # those phases.
2178
+ delete_keys = []
2179
+ for key in new_configuration_dict:
2180
+ key_ext = key[-1:]
2181
+ if key_ext in key_suffixes:
2182
+ key_base = key[:-1]
2183
+ else:
2184
+ key_base = key
2185
+ if key_base in non_configuration_keys:
2186
+ delete_keys.append(key)
2187
+
2188
+ for key in delete_keys:
2189
+ del new_configuration_dict[key]
2190
+
2191
+ # Merge in configuration (with all its parents first).
2192
+ MergeConfigWithInheritance(new_configuration_dict, build_file,
2193
+ target_dict, configuration, [])
2194
+
2195
+ # Put the new result back into the target dict as a configuration.
2196
+ target_dict['configurations'][configuration] = new_configuration_dict
2197
+
2198
+ # Now drop all the abstract ones.
2199
+ for configuration in target_dict['configurations'].keys():
2200
+ old_configuration_dict = target_dict['configurations'][configuration]
2201
+ if old_configuration_dict.get('abstract'):
2202
+ del target_dict['configurations'][configuration]
2203
+
2204
+ # Now that all of the target's configurations have been built, go through
2205
+ # the target dict's keys and remove everything that's been moved into a
2206
+ # "configurations" section.
2207
+ delete_keys = []
2208
+ for key in target_dict:
2209
+ key_ext = key[-1:]
2210
+ if key_ext in key_suffixes:
2211
+ key_base = key[:-1]
2212
+ else:
2213
+ key_base = key
2214
+ if not key_base in non_configuration_keys:
2215
+ delete_keys.append(key)
2216
+ for key in delete_keys:
2217
+ del target_dict[key]
2218
+
2219
+ # Check the configurations to see if they contain invalid keys.
2220
+ for configuration in target_dict['configurations'].keys():
2221
+ configuration_dict = target_dict['configurations'][configuration]
2222
+ for key in configuration_dict.keys():
2223
+ if key in invalid_configuration_keys:
2224
+ raise GypError('%s not allowed in the %s configuration, found in '
2225
+ 'target %s' % (key, configuration, target))
2226
+
2227
+
2228
+
2229
+ def ProcessListFiltersInDict(name, the_dict):
2230
+ """Process regular expression and exclusion-based filters on lists.
2231
+
2232
+ An exclusion list is in a dict key named with a trailing "!", like
2233
+ "sources!". Every item in such a list is removed from the associated
2234
+ main list, which in this example, would be "sources". Removed items are
2235
+ placed into a "sources_excluded" list in the dict.
2236
+
2237
+ Regular expression (regex) filters are contained in dict keys named with a
2238
+ trailing "/", such as "sources/" to operate on the "sources" list. Regex
2239
+ filters in a dict take the form:
2240
+ 'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'],
2241
+ ['include', '_mac\\.cc$'] ],
2242
+ The first filter says to exclude all files ending in _linux.cc, _mac.cc, and
2243
+ _win.cc. The second filter then includes all files ending in _mac.cc that
2244
+ are now or were once in the "sources" list. Items matching an "exclude"
2245
+ filter are subject to the same processing as would occur if they were listed
2246
+ by name in an exclusion list (ending in "!"). Items matching an "include"
2247
+ filter are brought back into the main list if previously excluded by an
2248
+ exclusion list or exclusion regex filter. Subsequent matching "exclude"
2249
+ patterns can still cause items to be excluded after matching an "include".
2250
+ """
2251
+
2252
+ # Look through the dictionary for any lists whose keys end in "!" or "/".
2253
+ # These are lists that will be treated as exclude lists and regular
2254
+ # expression-based exclude/include lists. Collect the lists that are
2255
+ # needed first, looking for the lists that they operate on, and assemble
2256
+ # then into |lists|. This is done in a separate loop up front, because
2257
+ # the _included and _excluded keys need to be added to the_dict, and that
2258
+ # can't be done while iterating through it.
2259
+
2260
+ lists = []
2261
+ del_lists = []
2262
+ for key, value in the_dict.iteritems():
2263
+ operation = key[-1]
2264
+ if operation != '!' and operation != '/':
2265
+ continue
2266
+
2267
+ if not isinstance(value, list):
2268
+ raise ValueError, name + ' key ' + key + ' must be list, not ' + \
2269
+ value.__class__.__name__
2270
+
2271
+ list_key = key[:-1]
2272
+ if list_key not in the_dict:
2273
+ # This happens when there's a list like "sources!" but no corresponding
2274
+ # "sources" list. Since there's nothing for it to operate on, queue up
2275
+ # the "sources!" list for deletion now.
2276
+ del_lists.append(key)
2277
+ continue
2278
+
2279
+ if not isinstance(the_dict[list_key], list):
2280
+ value = the_dict[list_key]
2281
+ raise ValueError, name + ' key ' + list_key + \
2282
+ ' must be list, not ' + \
2283
+ value.__class__.__name__ + ' when applying ' + \
2284
+ {'!': 'exclusion', '/': 'regex'}[operation]
2285
+
2286
+ if not list_key in lists:
2287
+ lists.append(list_key)
2288
+
2289
+ # Delete the lists that are known to be unneeded at this point.
2290
+ for del_list in del_lists:
2291
+ del the_dict[del_list]
2292
+
2293
+ for list_key in lists:
2294
+ the_list = the_dict[list_key]
2295
+
2296
+ # Initialize the list_actions list, which is parallel to the_list. Each
2297
+ # item in list_actions identifies whether the corresponding item in
2298
+ # the_list should be excluded, unconditionally preserved (included), or
2299
+ # whether no exclusion or inclusion has been applied. Items for which
2300
+ # no exclusion or inclusion has been applied (yet) have value -1, items
2301
+ # excluded have value 0, and items included have value 1. Includes and
2302
+ # excludes override previous actions. All items in list_actions are
2303
+ # initialized to -1 because no excludes or includes have been processed
2304
+ # yet.
2305
+ list_actions = list((-1,) * len(the_list))
2306
+
2307
+ exclude_key = list_key + '!'
2308
+ if exclude_key in the_dict:
2309
+ for exclude_item in the_dict[exclude_key]:
2310
+ for index in xrange(0, len(the_list)):
2311
+ if exclude_item == the_list[index]:
2312
+ # This item matches the exclude_item, so set its action to 0
2313
+ # (exclude).
2314
+ list_actions[index] = 0
2315
+
2316
+ # The "whatever!" list is no longer needed, dump it.
2317
+ del the_dict[exclude_key]
2318
+
2319
+ regex_key = list_key + '/'
2320
+ if regex_key in the_dict:
2321
+ for regex_item in the_dict[regex_key]:
2322
+ [action, pattern] = regex_item
2323
+ pattern_re = re.compile(pattern)
2324
+
2325
+ if action == 'exclude':
2326
+ # This item matches an exclude regex, so set its value to 0 (exclude).
2327
+ action_value = 0
2328
+ elif action == 'include':
2329
+ # This item matches an include regex, so set its value to 1 (include).
2330
+ action_value = 1
2331
+ else:
2332
+ # This is an action that doesn't make any sense.
2333
+ raise ValueError, 'Unrecognized action ' + action + ' in ' + name + \
2334
+ ' key ' + regex_key
2335
+
2336
+ for index in xrange(0, len(the_list)):
2337
+ list_item = the_list[index]
2338
+ if list_actions[index] == action_value:
2339
+ # Even if the regex matches, nothing will change so continue (regex
2340
+ # searches are expensive).
2341
+ continue
2342
+ if pattern_re.search(list_item):
2343
+ # Regular expression match.
2344
+ list_actions[index] = action_value
2345
+
2346
+ # The "whatever/" list is no longer needed, dump it.
2347
+ del the_dict[regex_key]
2348
+
2349
+ # Add excluded items to the excluded list.
2350
+ #
2351
+ # Note that exclude_key ("sources!") is different from excluded_key
2352
+ # ("sources_excluded"). The exclude_key list is input and it was already
2353
+ # processed and deleted; the excluded_key list is output and it's about
2354
+ # to be created.
2355
+ excluded_key = list_key + '_excluded'
2356
+ if excluded_key in the_dict:
2357
+ raise GypError(name + ' key ' + excluded_key +
2358
+ ' must not be present prior '
2359
+ ' to applying exclusion/regex filters for ' + list_key)
2360
+
2361
+ excluded_list = []
2362
+
2363
+ # Go backwards through the list_actions list so that as items are deleted,
2364
+ # the indices of items that haven't been seen yet don't shift. That means
2365
+ # that things need to be prepended to excluded_list to maintain them in the
2366
+ # same order that they existed in the_list.
2367
+ for index in xrange(len(list_actions) - 1, -1, -1):
2368
+ if list_actions[index] == 0:
2369
+ # Dump anything with action 0 (exclude). Keep anything with action 1
2370
+ # (include) or -1 (no include or exclude seen for the item).
2371
+ excluded_list.insert(0, the_list[index])
2372
+ del the_list[index]
2373
+
2374
+ # If anything was excluded, put the excluded list into the_dict at
2375
+ # excluded_key.
2376
+ if len(excluded_list) > 0:
2377
+ the_dict[excluded_key] = excluded_list
2378
+
2379
+ # Now recurse into subdicts and lists that may contain dicts.
2380
+ for key, value in the_dict.iteritems():
2381
+ if isinstance(value, dict):
2382
+ ProcessListFiltersInDict(key, value)
2383
+ elif isinstance(value, list):
2384
+ ProcessListFiltersInList(key, value)
2385
+
2386
+
2387
+ def ProcessListFiltersInList(name, the_list):
2388
+ for item in the_list:
2389
+ if isinstance(item, dict):
2390
+ ProcessListFiltersInDict(name, item)
2391
+ elif isinstance(item, list):
2392
+ ProcessListFiltersInList(name, item)
2393
+
2394
+
2395
+ def ValidateTargetType(target, target_dict):
2396
+ """Ensures the 'type' field on the target is one of the known types.
2397
+
2398
+ Arguments:
2399
+ target: string, name of target.
2400
+ target_dict: dict, target spec.
2401
+
2402
+ Raises an exception on error.
2403
+ """
2404
+ VALID_TARGET_TYPES = ('executable', 'loadable_module',
2405
+ 'static_library', 'shared_library',
2406
+ 'none')
2407
+ target_type = target_dict.get('type', None)
2408
+ if target_type not in VALID_TARGET_TYPES:
2409
+ raise GypError("Target %s has an invalid target type '%s'. "
2410
+ "Must be one of %s." %
2411
+ (target, target_type, '/'.join(VALID_TARGET_TYPES)))
2412
+ if (target_dict.get('standalone_static_library', 0) and
2413
+ not target_type == 'static_library'):
2414
+ raise GypError('Target %s has type %s but standalone_static_library flag is'
2415
+ ' only valid for static_library type.' % (target,
2416
+ target_type))
2417
+
2418
+
2419
+ def ValidateSourcesInTarget(target, target_dict, build_file):
2420
+ # TODO: Check if MSVC allows this for loadable_module targets.
2421
+ if target_dict.get('type', None) not in ('static_library', 'shared_library'):
2422
+ return
2423
+ sources = target_dict.get('sources', [])
2424
+ basenames = {}
2425
+ for source in sources:
2426
+ name, ext = os.path.splitext(source)
2427
+ is_compiled_file = ext in [
2428
+ '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
2429
+ if not is_compiled_file:
2430
+ continue
2431
+ basename = os.path.basename(name) # Don't include extension.
2432
+ basenames.setdefault(basename, []).append(source)
2433
+
2434
+ error = ''
2435
+ for basename, files in basenames.iteritems():
2436
+ if len(files) > 1:
2437
+ error += ' %s: %s\n' % (basename, ' '.join(files))
2438
+
2439
+ if error:
2440
+ print('static library %s has several files with the same basename:\n' %
2441
+ target + error + 'Some build systems, e.g. MSVC08, '
2442
+ 'cannot handle that.')
2443
+ raise GypError('Duplicate basenames in sources section, see list above')
2444
+
2445
+
2446
+ def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
2447
+ """Ensures that the rules sections in target_dict are valid and consistent,
2448
+ and determines which sources they apply to.
2449
+
2450
+ Arguments:
2451
+ target: string, name of target.
2452
+ target_dict: dict, target spec containing "rules" and "sources" lists.
2453
+ extra_sources_for_rules: a list of keys to scan for rule matches in
2454
+ addition to 'sources'.
2455
+ """
2456
+
2457
+ # Dicts to map between values found in rules' 'rule_name' and 'extension'
2458
+ # keys and the rule dicts themselves.
2459
+ rule_names = {}
2460
+ rule_extensions = {}
2461
+
2462
+ rules = target_dict.get('rules', [])
2463
+ for rule in rules:
2464
+ # Make sure that there's no conflict among rule names and extensions.
2465
+ rule_name = rule['rule_name']
2466
+ if rule_name in rule_names:
2467
+ raise GypError('rule %s exists in duplicate, target %s' %
2468
+ (rule_name, target))
2469
+ rule_names[rule_name] = rule
2470
+
2471
+ rule_extension = rule['extension']
2472
+ if rule_extension.startswith('.'):
2473
+ rule_extension = rule_extension[1:]
2474
+ if rule_extension in rule_extensions:
2475
+ raise GypError(('extension %s associated with multiple rules, ' +
2476
+ 'target %s rules %s and %s') %
2477
+ (rule_extension, target,
2478
+ rule_extensions[rule_extension]['rule_name'],
2479
+ rule_name))
2480
+ rule_extensions[rule_extension] = rule
2481
+
2482
+ # Make sure rule_sources isn't already there. It's going to be
2483
+ # created below if needed.
2484
+ if 'rule_sources' in rule:
2485
+ raise GypError(
2486
+ 'rule_sources must not exist in input, target %s rule %s' %
2487
+ (target, rule_name))
2488
+
2489
+ rule_sources = []
2490
+ source_keys = ['sources']
2491
+ source_keys.extend(extra_sources_for_rules)
2492
+ for source_key in source_keys:
2493
+ for source in target_dict.get(source_key, []):
2494
+ (source_root, source_extension) = os.path.splitext(source)
2495
+ if source_extension.startswith('.'):
2496
+ source_extension = source_extension[1:]
2497
+ if source_extension == rule_extension:
2498
+ rule_sources.append(source)
2499
+
2500
+ if len(rule_sources) > 0:
2501
+ rule['rule_sources'] = rule_sources
2502
+
2503
+
2504
+ def ValidateRunAsInTarget(target, target_dict, build_file):
2505
+ target_name = target_dict.get('target_name')
2506
+ run_as = target_dict.get('run_as')
2507
+ if not run_as:
2508
+ return
2509
+ if not isinstance(run_as, dict):
2510
+ raise GypError("The 'run_as' in target %s from file %s should be a "
2511
+ "dictionary." %
2512
+ (target_name, build_file))
2513
+ action = run_as.get('action')
2514
+ if not action:
2515
+ raise GypError("The 'run_as' in target %s from file %s must have an "
2516
+ "'action' section." %
2517
+ (target_name, build_file))
2518
+ if not isinstance(action, list):
2519
+ raise GypError("The 'action' for 'run_as' in target %s from file %s "
2520
+ "must be a list." %
2521
+ (target_name, build_file))
2522
+ working_directory = run_as.get('working_directory')
2523
+ if working_directory and not isinstance(working_directory, str):
2524
+ raise GypError("The 'working_directory' for 'run_as' in target %s "
2525
+ "in file %s should be a string." %
2526
+ (target_name, build_file))
2527
+ environment = run_as.get('environment')
2528
+ if environment and not isinstance(environment, dict):
2529
+ raise GypError("The 'environment' for 'run_as' in target %s "
2530
+ "in file %s should be a dictionary." %
2531
+ (target_name, build_file))
2532
+
2533
+
2534
+ def ValidateActionsInTarget(target, target_dict, build_file):
2535
+ '''Validates the inputs to the actions in a target.'''
2536
+ target_name = target_dict.get('target_name')
2537
+ actions = target_dict.get('actions', [])
2538
+ for action in actions:
2539
+ action_name = action.get('action_name')
2540
+ if not action_name:
2541
+ raise GypError("Anonymous action in target %s. "
2542
+ "An action must have an 'action_name' field." %
2543
+ target_name)
2544
+ inputs = action.get('inputs', None)
2545
+ if inputs is None:
2546
+ raise GypError('Action in target %s has no inputs.' % target_name)
2547
+ action_command = action.get('action')
2548
+ if action_command and not action_command[0]:
2549
+ raise GypError("Empty action as command in target %s." % target_name)
2550
+
2551
+
2552
+ def TurnIntIntoStrInDict(the_dict):
2553
+ """Given dict the_dict, recursively converts all integers into strings.
2554
+ """
2555
+ # Use items instead of iteritems because there's no need to try to look at
2556
+ # reinserted keys and their associated values.
2557
+ for k, v in the_dict.items():
2558
+ if isinstance(v, int):
2559
+ v = str(v)
2560
+ the_dict[k] = v
2561
+ elif isinstance(v, dict):
2562
+ TurnIntIntoStrInDict(v)
2563
+ elif isinstance(v, list):
2564
+ TurnIntIntoStrInList(v)
2565
+
2566
+ if isinstance(k, int):
2567
+ the_dict[str(k)] = v
2568
+ del the_dict[k]
2569
+
2570
+
2571
+ def TurnIntIntoStrInList(the_list):
2572
+ """Given list the_list, recursively converts all integers into strings.
2573
+ """
2574
+ for index in xrange(0, len(the_list)):
2575
+ item = the_list[index]
2576
+ if isinstance(item, int):
2577
+ the_list[index] = str(item)
2578
+ elif isinstance(item, dict):
2579
+ TurnIntIntoStrInDict(item)
2580
+ elif isinstance(item, list):
2581
+ TurnIntIntoStrInList(item)
2582
+
2583
+
2584
+ def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets,
2585
+ data):
2586
+ """Return only the targets that are deep dependencies of |root_targets|."""
2587
+ qualified_root_targets = []
2588
+ for target in root_targets:
2589
+ target = target.strip()
2590
+ qualified_targets = gyp.common.FindQualifiedTargets(target, flat_list)
2591
+ if not qualified_targets:
2592
+ raise GypError("Could not find target %s" % target)
2593
+ qualified_root_targets.extend(qualified_targets)
2594
+
2595
+ wanted_targets = {}
2596
+ for target in qualified_root_targets:
2597
+ wanted_targets[target] = targets[target]
2598
+ for dependency in dependency_nodes[target].DeepDependencies():
2599
+ wanted_targets[dependency] = targets[dependency]
2600
+
2601
+ wanted_flat_list = [t for t in flat_list if t in wanted_targets]
2602
+
2603
+ # Prune unwanted targets from each build_file's data dict.
2604
+ for build_file in data['target_build_files']:
2605
+ if not 'targets' in data[build_file]:
2606
+ continue
2607
+ new_targets = []
2608
+ for target in data[build_file]['targets']:
2609
+ qualified_name = gyp.common.QualifiedTarget(build_file,
2610
+ target['target_name'],
2611
+ target['toolset'])
2612
+ if qualified_name in wanted_targets:
2613
+ new_targets.append(target)
2614
+ data[build_file]['targets'] = new_targets
2615
+
2616
+ return wanted_targets, wanted_flat_list
2617
+
2618
+
2619
+ def VerifyNoCollidingTargets(targets):
2620
+ """Verify that no two targets in the same directory share the same name.
2621
+
2622
+ Arguments:
2623
+ targets: A list of targets in the form 'path/to/file.gyp:target_name'.
2624
+ """
2625
+ # Keep a dict going from 'subdirectory:target_name' to 'foo.gyp'.
2626
+ used = {}
2627
+ for target in targets:
2628
+ # Separate out 'path/to/file.gyp, 'target_name' from
2629
+ # 'path/to/file.gyp:target_name'.
2630
+ path, name = target.rsplit(':', 1)
2631
+ # Separate out 'path/to', 'file.gyp' from 'path/to/file.gyp'.
2632
+ subdir, gyp = os.path.split(path)
2633
+ # Use '.' for the current directory '', so that the error messages make
2634
+ # more sense.
2635
+ if not subdir:
2636
+ subdir = '.'
2637
+ # Prepare a key like 'path/to:target_name'.
2638
+ key = subdir + ':' + name
2639
+ if key in used:
2640
+ # Complain if this target is already used.
2641
+ raise GypError('Duplicate target name "%s" in directory "%s" used both '
2642
+ 'in "%s" and "%s".' % (name, subdir, gyp, used[key]))
2643
+ used[key] = gyp
2644
+
2645
+
2646
+ def SetGeneratorGlobals(generator_input_info):
2647
+ # Set up path_sections and non_configuration_keys with the default data plus
2648
+ # the generator-specific data.
2649
+ global path_sections
2650
+ path_sections = base_path_sections[:]
2651
+ path_sections.extend(generator_input_info['path_sections'])
2652
+
2653
+ global non_configuration_keys
2654
+ non_configuration_keys = base_non_configuration_keys[:]
2655
+ non_configuration_keys.extend(generator_input_info['non_configuration_keys'])
2656
+
2657
+ global multiple_toolsets
2658
+ multiple_toolsets = generator_input_info[
2659
+ 'generator_supports_multiple_toolsets']
2660
+
2661
+ global generator_filelist_paths
2662
+ generator_filelist_paths = generator_input_info['generator_filelist_paths']
2663
+
2664
+
2665
+ def Load(build_files, variables, includes, depth, generator_input_info, check,
2666
+ circular_check, parallel, root_targets):
2667
+ SetGeneratorGlobals(generator_input_info)
2668
+ # A generator can have other lists (in addition to sources) be processed
2669
+ # for rules.
2670
+ extra_sources_for_rules = generator_input_info['extra_sources_for_rules']
2671
+
2672
+ # Load build files. This loads every target-containing build file into
2673
+ # the |data| dictionary such that the keys to |data| are build file names,
2674
+ # and the values are the entire build file contents after "early" or "pre"
2675
+ # processing has been done and includes have been resolved.
2676
+ # NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as
2677
+ # well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps
2678
+ # track of the keys corresponding to "target" files.
2679
+ data = {'target_build_files': set()}
2680
+ aux_data = {}
2681
+ # Normalize paths everywhere. This is important because paths will be
2682
+ # used as keys to the data dict and for references between input files.
2683
+ build_files = set(map(os.path.normpath, build_files))
2684
+ if parallel:
2685
+ LoadTargetBuildFilesParallel(build_files, data, aux_data,
2686
+ variables, includes, depth, check,
2687
+ generator_input_info)
2688
+ else:
2689
+ for build_file in build_files:
2690
+ try:
2691
+ LoadTargetBuildFile(build_file, data, aux_data,
2692
+ variables, includes, depth, check, True)
2693
+ except Exception, e:
2694
+ gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file)
2695
+ raise
2696
+
2697
+ # Build a dict to access each target's subdict by qualified name.
2698
+ targets = BuildTargetsDict(data)
2699
+
2700
+ # Fully qualify all dependency links.
2701
+ QualifyDependencies(targets)
2702
+
2703
+ # Remove self-dependencies from targets that have 'prune_self_dependencies'
2704
+ # set to 1.
2705
+ RemoveSelfDependencies(targets)
2706
+
2707
+ # Expand dependencies specified as build_file:*.
2708
+ ExpandWildcardDependencies(targets, data)
2709
+
2710
+ # Apply exclude (!) and regex (/) list filters only for dependency_sections.
2711
+ for target_name, target_dict in targets.iteritems():
2712
+ tmp_dict = {}
2713
+ for key_base in dependency_sections:
2714
+ for op in ('', '!', '/'):
2715
+ key = key_base + op
2716
+ if key in target_dict:
2717
+ tmp_dict[key] = target_dict[key]
2718
+ del target_dict[key]
2719
+ ProcessListFiltersInDict(target_name, tmp_dict)
2720
+ # Write the results back to |target_dict|.
2721
+ for key in tmp_dict:
2722
+ target_dict[key] = tmp_dict[key]
2723
+
2724
+ # Make sure every dependency appears at most once.
2725
+ RemoveDuplicateDependencies(targets)
2726
+
2727
+ if circular_check:
2728
+ # Make sure that any targets in a.gyp don't contain dependencies in other
2729
+ # .gyp files that further depend on a.gyp.
2730
+ VerifyNoGYPFileCircularDependencies(targets)
2731
+
2732
+ [dependency_nodes, flat_list] = BuildDependencyList(targets)
2733
+
2734
+ if root_targets:
2735
+ # Remove, from |targets| and |flat_list|, the targets that are not deep
2736
+ # dependencies of the targets specified in |root_targets|.
2737
+ targets, flat_list = PruneUnwantedTargets(
2738
+ targets, flat_list, dependency_nodes, root_targets, data)
2739
+
2740
+ # Check that no two targets in the same directory have the same name.
2741
+ VerifyNoCollidingTargets(flat_list)
2742
+
2743
+ # Handle dependent settings of various types.
2744
+ for settings_type in ['all_dependent_settings',
2745
+ 'direct_dependent_settings',
2746
+ 'link_settings']:
2747
+ DoDependentSettings(settings_type, flat_list, targets, dependency_nodes)
2748
+
2749
+ # Take out the dependent settings now that they've been published to all
2750
+ # of the targets that require them.
2751
+ for target in flat_list:
2752
+ if settings_type in targets[target]:
2753
+ del targets[target][settings_type]
2754
+
2755
+ # Make sure static libraries don't declare dependencies on other static
2756
+ # libraries, but that linkables depend on all unlinked static libraries
2757
+ # that they need so that their link steps will be correct.
2758
+ gii = generator_input_info
2759
+ if gii['generator_wants_static_library_dependencies_adjusted']:
2760
+ AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
2761
+ gii['generator_wants_sorted_dependencies'])
2762
+
2763
+ # Apply "post"/"late"/"target" variable expansions and condition evaluations.
2764
+ for target in flat_list:
2765
+ target_dict = targets[target]
2766
+ build_file = gyp.common.BuildFile(target)
2767
+ ProcessVariablesAndConditionsInDict(
2768
+ target_dict, PHASE_LATE, variables, build_file)
2769
+
2770
+ # Move everything that can go into a "configurations" section into one.
2771
+ for target in flat_list:
2772
+ target_dict = targets[target]
2773
+ SetUpConfigurations(target, target_dict)
2774
+
2775
+ # Apply exclude (!) and regex (/) list filters.
2776
+ for target in flat_list:
2777
+ target_dict = targets[target]
2778
+ ProcessListFiltersInDict(target, target_dict)
2779
+
2780
+ # Apply "latelate" variable expansions and condition evaluations.
2781
+ for target in flat_list:
2782
+ target_dict = targets[target]
2783
+ build_file = gyp.common.BuildFile(target)
2784
+ ProcessVariablesAndConditionsInDict(
2785
+ target_dict, PHASE_LATELATE, variables, build_file)
2786
+
2787
+ # Make sure that the rules make sense, and build up rule_sources lists as
2788
+ # needed. Not all generators will need to use the rule_sources lists, but
2789
+ # some may, and it seems best to build the list in a common spot.
2790
+ # Also validate actions and run_as elements in targets.
2791
+ for target in flat_list:
2792
+ target_dict = targets[target]
2793
+ build_file = gyp.common.BuildFile(target)
2794
+ ValidateTargetType(target, target_dict)
2795
+ # TODO(thakis): Get vpx_scale/arm/scalesystemdependent.c to be renamed to
2796
+ # scalesystemdependent_arm_additions.c or similar.
2797
+ if 'arm' not in variables.get('target_arch', ''):
2798
+ ValidateSourcesInTarget(target, target_dict, build_file)
2799
+ ValidateRulesInTarget(target, target_dict, extra_sources_for_rules)
2800
+ ValidateRunAsInTarget(target, target_dict, build_file)
2801
+ ValidateActionsInTarget(target, target_dict, build_file)
2802
+
2803
+ # Generators might not expect ints. Turn them into strs.
2804
+ TurnIntIntoStrInDict(data)
2805
+
2806
+ # TODO(mark): Return |data| for now because the generator needs a list of
2807
+ # build files that came in. In the future, maybe it should just accept
2808
+ # a list, and not the whole data dict.
2809
+ return [flat_list, targets, data]