cocoapods-dykit 0.5.2 → 0.5.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/pod/command.rb +2 -0
- data/lib/pod/command/dyinstall.rb +51 -0
- data/lib/pod/command/dyupdate.rb +106 -0
- data/lib/pod/command/fmwk.rb +4 -0
- data/lib/pod/command/lib/dylint.rb +1 -0
- data/lib/pod/gem_version.rb +1 -1
- data/lib/pod/installer.rb +715 -0
- data/lib/pod/installer/analyzer.rb +934 -0
- data/lib/pod/installer/analyzer/analysis_result.rb +57 -0
- data/lib/pod/installer/analyzer/locking_dependency_analyzer.rb +95 -0
- data/lib/pod/installer/analyzer/pod_variant.rb +68 -0
- data/lib/pod/installer/analyzer/pod_variant_set.rb +157 -0
- data/lib/pod/installer/analyzer/podfile_dependency_cache.rb +54 -0
- data/lib/pod/installer/analyzer/sandbox_analyzer.rb +251 -0
- data/lib/pod/installer/analyzer/specs_state.rb +84 -0
- data/lib/pod/installer/analyzer/target_inspection_result.rb +45 -0
- data/lib/pod/installer/analyzer/target_inspector.rb +254 -0
- data/lib/pod/installer/installation_options.rb +158 -0
- data/lib/pod/installer/pod_source_installer.rb +214 -0
- data/lib/pod/installer/pod_source_preparer.rb +77 -0
- data/lib/pod/installer/podfile_validator.rb +139 -0
- data/lib/pod/installer/post_install_hooks_context.rb +107 -0
- data/lib/pod/installer/pre_install_hooks_context.rb +42 -0
- data/lib/pod/installer/source_provider_hooks_context.rb +32 -0
- data/lib/pod/installer/user_project_integrator.rb +253 -0
- data/lib/pod/installer/user_project_integrator/target_integrator.rb +462 -0
- data/lib/pod/installer/user_project_integrator/target_integrator/xcconfig_integrator.rb +146 -0
- data/lib/pod/installer/xcode.rb +8 -0
- data/lib/pod/installer/xcode/pods_project_generator.rb +353 -0
- data/lib/pod/installer/xcode/pods_project_generator/aggregate_target_installer.rb +172 -0
- data/lib/pod/installer/xcode/pods_project_generator/file_references_installer.rb +367 -0
- data/lib/pod/installer/xcode/pods_project_generator/pod_target_installer.rb +718 -0
- data/lib/pod/installer/xcode/pods_project_generator/pod_target_integrator.rb +111 -0
- data/lib/pod/installer/xcode/pods_project_generator/target_installer.rb +265 -0
- data/lib/pod/installer/xcode/target_validator.rb +141 -0
- data/lib/pod/resolver.rb +632 -0
- metadata +34 -2
@@ -0,0 +1,934 @@
|
|
1
|
+
require File.expand_path('../../resolver', __FILE__)
|
2
|
+
module Pod
|
3
|
+
class DyInstaller
|
4
|
+
# Analyzes the Podfile, the Lockfile, and the sandbox manifest to generate
|
5
|
+
# the information relative to a CocoaPods installation.
|
6
|
+
#
|
7
|
+
class Analyzer
|
8
|
+
include Config::Mixin
|
9
|
+
include InstallationOptions::Mixin
|
10
|
+
|
11
|
+
delegate_installation_options { podfile }
|
12
|
+
|
13
|
+
autoload :AnalysisResult, File.expand_path('../analyzer/analysis_result', __FILE__)
|
14
|
+
autoload :LockingDependencyAnalyzer, File.expand_path('../analyzer/locking_dependency_analyzer', __FILE__)
|
15
|
+
autoload :PodfileDependencyCache, File.expand_path('../analyzer/podfile_dependency_cache', __FILE__)
|
16
|
+
autoload :PodVariant, File.expand_path('../analyzer/pod_variant', __FILE__)
|
17
|
+
autoload :PodVariantSet, File.expand_path('../analyzer/pod_variant_set', __FILE__)
|
18
|
+
autoload :SandboxAnalyzer, File.expand_path('../analyzer/sandbox_analyzer', __FILE__)
|
19
|
+
autoload :SpecsState, File.expand_path('../analyzer/specs_state', __FILE__)
|
20
|
+
autoload :TargetInspectionResult, File.expand_path('../analyzer/target_inspection_result', __FILE__)
|
21
|
+
autoload :TargetInspector, File.expand_path('../analyzer/target_inspector', __FILE__)
|
22
|
+
# autoload :DyResolver, File.expand_path('../../resolver', __FILE__)
|
23
|
+
|
24
|
+
# @return [Sandbox] The sandbox where the Pods should be installed.
|
25
|
+
#
|
26
|
+
attr_reader :sandbox
|
27
|
+
|
28
|
+
# @return [Podfile] The Podfile specification that contains the
|
29
|
+
# information of the Pods that should be installed.
|
30
|
+
#
|
31
|
+
attr_reader :podfile
|
32
|
+
|
33
|
+
# @return [Lockfile] The Lockfile that stores the information about the
|
34
|
+
# Pods previously installed on any machine.
|
35
|
+
#
|
36
|
+
attr_reader :lockfile
|
37
|
+
|
38
|
+
# @return [Array<Source>] Sources provided by plugins
|
39
|
+
#
|
40
|
+
attr_reader :plugin_sources
|
41
|
+
|
42
|
+
# Initialize a new instance
|
43
|
+
#
|
44
|
+
# @param [Sandbox] sandbox @see sandbox
|
45
|
+
# @param [Podfile] podfile @see podfile
|
46
|
+
# @param [Lockfile] lockfile @see lockfile
|
47
|
+
# @param [Array<Source>] plugin_sources @see plugin_sources
|
48
|
+
#
|
49
|
+
def initialize(sandbox, podfile, lockfile = nil, plugin_sources = nil)
|
50
|
+
@sandbox = sandbox
|
51
|
+
@podfile = podfile
|
52
|
+
@lockfile = lockfile
|
53
|
+
@plugin_sources = plugin_sources
|
54
|
+
|
55
|
+
@update = false
|
56
|
+
@allow_pre_downloads = true
|
57
|
+
@has_dependencies = true
|
58
|
+
@test_pod_target_analyzer_cache = {}
|
59
|
+
@test_pod_target_key = Struct.new(:name, :pod_targets)
|
60
|
+
@podfile_dependency_cache = PodfileDependencyCache.from_podfile(podfile)
|
61
|
+
end
|
62
|
+
|
63
|
+
# Performs the analysis.
|
64
|
+
#
|
65
|
+
# The Podfile and the Lockfile provide the information necessary to
|
66
|
+
# compute which specification should be installed. The manifest of the
|
67
|
+
# sandbox returns which specifications are installed.
|
68
|
+
#
|
69
|
+
# @param [Bool] allow_fetches
|
70
|
+
# whether external sources may be fetched
|
71
|
+
#
|
72
|
+
# @return [AnalysisResult]
|
73
|
+
#
|
74
|
+
def analyze(allow_fetches = true)
|
75
|
+
validate_podfile!
|
76
|
+
validate_lockfile_version!
|
77
|
+
@result = AnalysisResult.new
|
78
|
+
@result.podfile_dependency_cache = @podfile_dependency_cache
|
79
|
+
if installation_options.integrate_targets?
|
80
|
+
@result.target_inspections = inspect_targets_to_integrate
|
81
|
+
else
|
82
|
+
verify_platforms_specified!
|
83
|
+
end
|
84
|
+
@result.podfile_state = generate_podfile_state
|
85
|
+
|
86
|
+
store_existing_checkout_options
|
87
|
+
fetch_external_sources if allow_fetches
|
88
|
+
|
89
|
+
@locked_dependencies = generate_version_locking_dependencies
|
90
|
+
resolver_specs_by_target = resolve_dependencies
|
91
|
+
validate_platforms(resolver_specs_by_target)
|
92
|
+
@result.specifications = generate_specifications(resolver_specs_by_target)
|
93
|
+
@result.targets = generate_targets(resolver_specs_by_target)
|
94
|
+
@result.sandbox_state = generate_sandbox_state
|
95
|
+
@result.specs_by_target = resolver_specs_by_target.each_with_object({}) do |rspecs_by_target, hash|
|
96
|
+
hash[rspecs_by_target[0]] = rspecs_by_target[1].map(&:spec)
|
97
|
+
end
|
98
|
+
@result.specs_by_source = Hash[resolver_specs_by_target.values.flatten(1).group_by(&:source).map { |source, specs| [source, specs.map(&:spec).uniq] }]
|
99
|
+
sources.each { |s| @result.specs_by_source[s] ||= [] }
|
100
|
+
@result
|
101
|
+
end
|
102
|
+
|
103
|
+
attr_accessor :result
|
104
|
+
|
105
|
+
# @return [Bool] Whether an installation should be performed or this
|
106
|
+
# CocoaPods project is already up to date.
|
107
|
+
#
|
108
|
+
def needs_install?
|
109
|
+
analysis_result = analyze(false)
|
110
|
+
podfile_needs_install?(analysis_result) || sandbox_needs_install?(analysis_result)
|
111
|
+
end
|
112
|
+
|
113
|
+
# @param [AnalysisResult] analysis_result
|
114
|
+
# the analysis result to check for changes
|
115
|
+
#
|
116
|
+
# @return [Bool] Whether the podfile has changes respect to the lockfile.
|
117
|
+
#
|
118
|
+
def podfile_needs_install?(analysis_result)
|
119
|
+
state = analysis_result.podfile_state
|
120
|
+
needing_install = state.added + state.changed + state.deleted
|
121
|
+
!needing_install.empty?
|
122
|
+
end
|
123
|
+
|
124
|
+
# @param [AnalysisResult] analysis_result
|
125
|
+
# the analysis result to check for changes
|
126
|
+
#
|
127
|
+
# @return [Bool] Whether the sandbox is in synch with the lockfile.
|
128
|
+
#
|
129
|
+
def sandbox_needs_install?(analysis_result)
|
130
|
+
state = analysis_result.sandbox_state
|
131
|
+
needing_install = state.added + state.changed + state.deleted
|
132
|
+
!needing_install.empty?
|
133
|
+
end
|
134
|
+
|
135
|
+
#-----------------------------------------------------------------------#
|
136
|
+
|
137
|
+
# @!group Configuration
|
138
|
+
|
139
|
+
# @return [Hash, Boolean, nil] Pods that have been requested to be
|
140
|
+
# updated or true if all Pods should be updated
|
141
|
+
#
|
142
|
+
attr_accessor :update
|
143
|
+
|
144
|
+
# @return [Bool] Whether the version of the dependencies which did not
|
145
|
+
# change in the Podfile should be locked.
|
146
|
+
#
|
147
|
+
def update_mode?
|
148
|
+
update != nil
|
149
|
+
end
|
150
|
+
|
151
|
+
# @return [Symbol] Whether and how the dependencies in the Podfile
|
152
|
+
# should be updated.
|
153
|
+
#
|
154
|
+
def update_mode
|
155
|
+
if !update
|
156
|
+
:none
|
157
|
+
elsif update == true
|
158
|
+
:all
|
159
|
+
elsif !update[:pods].nil?
|
160
|
+
:selected
|
161
|
+
end
|
162
|
+
end
|
163
|
+
|
164
|
+
# @return [Bool] Whether the analysis allows pre-downloads and thus
|
165
|
+
# modifications to the sandbox.
|
166
|
+
#
|
167
|
+
# @note This flag should not be used in installations.
|
168
|
+
#
|
169
|
+
# @note This is used by the `pod outdated` command to prevent
|
170
|
+
# modification of the sandbox in the resolution process.
|
171
|
+
#
|
172
|
+
attr_accessor :allow_pre_downloads
|
173
|
+
alias_method :allow_pre_downloads?, :allow_pre_downloads
|
174
|
+
|
175
|
+
# @return [Bool] Whether the analysis has dependencies and thus
|
176
|
+
# sources must be configured.
|
177
|
+
#
|
178
|
+
# @note This is used by the `pod lib lint` command to prevent
|
179
|
+
# update of specs when not needed.
|
180
|
+
#
|
181
|
+
attr_accessor :has_dependencies
|
182
|
+
alias_method :has_dependencies?, :has_dependencies
|
183
|
+
|
184
|
+
#-----------------------------------------------------------------------#
|
185
|
+
|
186
|
+
private
|
187
|
+
|
188
|
+
# @return [Bool] Whether the analysis has updated sources repositories.
|
189
|
+
#
|
190
|
+
attr_accessor :specs_updated
|
191
|
+
alias_method :specs_updated?, :specs_updated
|
192
|
+
|
193
|
+
def validate_podfile!
|
194
|
+
validator = Installer::PodfileValidator.new(podfile, @podfile_dependency_cache)
|
195
|
+
validator.validate
|
196
|
+
|
197
|
+
unless validator.valid?
|
198
|
+
raise Informative, validator.message
|
199
|
+
end
|
200
|
+
validator.warnings.uniq.each { |w| UI.warn(w) }
|
201
|
+
end
|
202
|
+
|
203
|
+
# @!group Analysis steps
|
204
|
+
|
205
|
+
# @note The warning about the version of the Lockfile doesn't use the
|
206
|
+
# `UI.warn` method because it prints the output only at the end
|
207
|
+
# of the installation. At that time CocoaPods could have crashed.
|
208
|
+
#
|
209
|
+
def validate_lockfile_version!
|
210
|
+
if lockfile && lockfile.cocoapods_version > Version.new(VERSION)
|
211
|
+
STDERR.puts '[!] The version of CocoaPods used to generate ' \
|
212
|
+
"the lockfile (#{lockfile.cocoapods_version}) is "\
|
213
|
+
"higher than the version of the current executable (#{VERSION}). " \
|
214
|
+
'Incompatibility issues may arise.'.yellow
|
215
|
+
end
|
216
|
+
end
|
217
|
+
|
218
|
+
# Compares the {Podfile} with the {Lockfile} in order to detect which
|
219
|
+
# dependencies should be locked.
|
220
|
+
#
|
221
|
+
# @return [SpecsState] the states of the Podfile specs.
|
222
|
+
#
|
223
|
+
# @note As the target definitions share the same sandbox they should have
|
224
|
+
# the same version of a Pod. For this reason this method returns
|
225
|
+
# the name of the Pod (root name of the dependencies) and doesn't
|
226
|
+
# group them by target definition.
|
227
|
+
#
|
228
|
+
# @todo [CocoaPods > 0.18] If there isn't a Lockfile all the Pods should
|
229
|
+
# be marked as added.
|
230
|
+
#
|
231
|
+
def generate_podfile_state
|
232
|
+
if lockfile
|
233
|
+
pods_state = nil
|
234
|
+
UI.section 'Finding Podfile changes' do
|
235
|
+
pods_by_state = lockfile.detect_changes_with_podfile(podfile)
|
236
|
+
pods_state = SpecsState.new(pods_by_state)
|
237
|
+
pods_state.print
|
238
|
+
end
|
239
|
+
pods_state
|
240
|
+
else
|
241
|
+
state = SpecsState.new
|
242
|
+
state.added.merge(@podfile_dependency_cache.podfile_dependencies.map(&:root_name))
|
243
|
+
state
|
244
|
+
end
|
245
|
+
end
|
246
|
+
|
247
|
+
public
|
248
|
+
|
249
|
+
# Updates the git source repositories.
|
250
|
+
#
|
251
|
+
def update_repositories
|
252
|
+
sources.each do |source|
|
253
|
+
if source.git?
|
254
|
+
config.sources_manager.update(source.name, true)
|
255
|
+
else
|
256
|
+
UI.message "Skipping `#{source.name}` update because the repository is not a git source repository."
|
257
|
+
end
|
258
|
+
end
|
259
|
+
@specs_updated = true
|
260
|
+
end
|
261
|
+
|
262
|
+
private
|
263
|
+
|
264
|
+
# Copies the pod_targets of any of the app embedded aggregate targets into
|
265
|
+
# their potential host aggregate target, if that potential host aggregate target's
|
266
|
+
# user_target hosts any of the app embedded aggregate targets' user_targets
|
267
|
+
#
|
268
|
+
# @param [AggregateTarget] aggregate_target the aggregate target whose user_target
|
269
|
+
# might host one or more of the embedded aggregate targets' user_targets
|
270
|
+
#
|
271
|
+
# @param [Array<AggregateTarget>] embedded_aggregate_targets the aggregate targets
|
272
|
+
# representing the embedded targets to be integrated
|
273
|
+
#
|
274
|
+
# @param [Boolean] libraries_only if true, only library-type embedded
|
275
|
+
# targets are considered, otherwise, all other types are have
|
276
|
+
# their pods copied to their host targets as well (extensions, etc.)
|
277
|
+
#
|
278
|
+
def copy_embedded_target_pod_targets_to_host(aggregate_target, embedded_aggregate_targets, libraries_only)
|
279
|
+
return if aggregate_target.requires_host_target?
|
280
|
+
pod_target_names = Set.new(aggregate_target.pod_targets.map(&:name))
|
281
|
+
aggregate_user_target_uuids = Set.new(aggregate_target.user_targets.map(&:uuid))
|
282
|
+
embedded_aggregate_targets.each do |embedded_aggregate_target|
|
283
|
+
# Skip non libraries in library-only mode
|
284
|
+
next if libraries_only && !embedded_aggregate_target.library?
|
285
|
+
next unless embedded_aggregate_target.user_targets.any? do |embedded_user_target|
|
286
|
+
# You have to ask the host target's project for the host targets of
|
287
|
+
# the embedded target, as opposed to asking user_project for the
|
288
|
+
# embedded targets of the host target. The latter doesn't work when
|
289
|
+
# the embedded target lives in a sub-project. The lines below get
|
290
|
+
# the host target uuids for the embedded target and checks to see if
|
291
|
+
# those match to any of the user_target uuids in the aggregate_target.
|
292
|
+
host_target_uuids = Set.new(aggregate_target.user_project.host_targets_for_embedded_target(embedded_user_target).map(&:uuid))
|
293
|
+
!aggregate_user_target_uuids.intersection(host_target_uuids).empty?
|
294
|
+
end
|
295
|
+
# This embedded target is hosted by the aggregate target's user_target; copy over the non-duplicate pod_targets
|
296
|
+
aggregate_target.pod_targets = aggregate_target.pod_targets + embedded_aggregate_target.pod_targets.select do |pod_target|
|
297
|
+
!pod_target_names.include? pod_target.name
|
298
|
+
end
|
299
|
+
end
|
300
|
+
end
|
301
|
+
|
302
|
+
# Raises an error if there are embedded targets in the Podfile, but
|
303
|
+
# their host targets have not been declared in the Podfile. As it
|
304
|
+
# finds host targets, it collection information on host target types.
|
305
|
+
#
|
306
|
+
# @param [Array<AggregateTarget>] aggregate_targets the generated
|
307
|
+
# aggregate targets
|
308
|
+
#
|
309
|
+
# @param [Array<AggregateTarget>] embedded_aggregate_targets the aggregate targets
|
310
|
+
# representing the embedded targets to be integrated
|
311
|
+
#
|
312
|
+
def analyze_host_targets_in_podfile(aggregate_targets, embedded_aggregate_targets)
|
313
|
+
target_definitions_by_uuid = {}
|
314
|
+
# Collect aggregate target definitions by uuid to later lookup host target
|
315
|
+
# definitions and verify their compatiblity with their embedded targets
|
316
|
+
aggregate_targets.each do |target|
|
317
|
+
target.user_targets.map(&:uuid).each do |uuid|
|
318
|
+
target_definitions_by_uuid[uuid] = target.target_definition
|
319
|
+
end
|
320
|
+
end
|
321
|
+
aggregate_target_user_projects = aggregate_targets.map(&:user_project)
|
322
|
+
embedded_targets_missing_hosts = []
|
323
|
+
host_uuid_to_embedded_target_definitions = {}
|
324
|
+
# Search all of the known user projects for each embedded target's hosts
|
325
|
+
embedded_aggregate_targets.each do |target|
|
326
|
+
host_uuids = []
|
327
|
+
aggregate_target_user_projects.product(target.user_targets).each do |user_project, user_target|
|
328
|
+
host_uuids += user_project.host_targets_for_embedded_target(user_target).map(&:uuid)
|
329
|
+
end
|
330
|
+
# For each host, keep track of its embedded target definitions
|
331
|
+
# to later verify each embedded target's compatiblity with its host,
|
332
|
+
# ignoring the hosts that aren't known to CocoaPods (no target
|
333
|
+
# definitions in the Podfile)
|
334
|
+
host_uuids.each do |uuid|
|
335
|
+
(host_uuid_to_embedded_target_definitions[uuid] ||= []) << target.target_definition if target_definitions_by_uuid.key? uuid
|
336
|
+
end
|
337
|
+
# If none of the hosts are known to CocoaPods (no target definitions
|
338
|
+
# in the Podfile), add it to the list of targets missing hosts
|
339
|
+
embedded_targets_missing_hosts << target unless host_uuids.any? do |uuid|
|
340
|
+
target_definitions_by_uuid.key? uuid
|
341
|
+
end
|
342
|
+
end
|
343
|
+
|
344
|
+
unless embedded_targets_missing_hosts.empty?
|
345
|
+
embedded_targets_missing_hosts_product_types = Set.new embedded_targets_missing_hosts.flat_map(&:user_targets).map(&:symbol_type)
|
346
|
+
target_names = embedded_targets_missing_hosts.map do |target|
|
347
|
+
target.name.sub('Pods-', '') # Make the target names more recognizable to the user
|
348
|
+
end.join ', '
|
349
|
+
# If the targets missing hosts are only frameworks, then this is likely
|
350
|
+
# a project for doing framework development. In that case, just warn that
|
351
|
+
# the frameworks that these targets depend on won't be integrated anywhere
|
352
|
+
if embedded_targets_missing_hosts_product_types.subset?(Set.new([:framework, :static_library]))
|
353
|
+
UI.warn "The Podfile contains framework or static library targets (#{target_names}), for which the Podfile does not contain host targets (targets which embed the framework)." \
|
354
|
+
"\n" \
|
355
|
+
'If this project is for doing framework development, you can ignore this message. Otherwise, add a target to the Podfile that embeds these frameworks to make this message go away (e.g. a test target).'
|
356
|
+
else
|
357
|
+
raise Informative, "Unable to find host target(s) for #{target_names}. Please add the host targets for the embedded targets to the Podfile." \
|
358
|
+
"\n" \
|
359
|
+
'Certain kinds of targets require a host target. A host target is a "parent" target which embeds a "child" target. These are example types of targets that need a host target:' \
|
360
|
+
"\n- Framework" \
|
361
|
+
"\n- App Extension" \
|
362
|
+
"\n- Watch OS 1 Extension" \
|
363
|
+
"\n- Messages Extension (except when used with a Messages Application)"
|
364
|
+
end
|
365
|
+
end
|
366
|
+
|
367
|
+
target_mismatches = []
|
368
|
+
host_uuid_to_embedded_target_definitions.each do |uuid, target_definitions|
|
369
|
+
host_target_definition = target_definitions_by_uuid[uuid]
|
370
|
+
target_definitions.each do |target_definition|
|
371
|
+
unless host_target_definition.uses_frameworks? == target_definition.uses_frameworks?
|
372
|
+
target_mismatches << "- #{host_target_definition.name} (#{host_target_definition.uses_frameworks?}) and #{target_definition.name} (#{target_definition.uses_frameworks?}) do not both set use_frameworks!."
|
373
|
+
end
|
374
|
+
end
|
375
|
+
end
|
376
|
+
|
377
|
+
unless target_mismatches.empty?
|
378
|
+
heading = 'Unable to integrate the following embedded targets with their respective host targets (a host target is a "parent" target which embeds a "child" target like a framework or extension):'
|
379
|
+
raise Informative, heading + "\n\n" + target_mismatches.sort.uniq.join("\n")
|
380
|
+
end
|
381
|
+
end
|
382
|
+
|
383
|
+
# Creates the models that represent the targets generated by CocoaPods.
|
384
|
+
#
|
385
|
+
# @param [Hash{Podfile::TargetDefinition => Array<ResolvedSpecification>}] resolver_specs_by_target
|
386
|
+
# mapping of targets to resolved specs (containing information about test usage)
|
387
|
+
# aggregate targets
|
388
|
+
#
|
389
|
+
# @return [Array<AggregateTarget>] the list of aggregate targets generated.
|
390
|
+
#
|
391
|
+
def generate_targets(resolver_specs_by_target)
|
392
|
+
resolver_specs_by_target = resolver_specs_by_target.reject { |td, _| td.abstract? }
|
393
|
+
pod_targets = generate_pod_targets(resolver_specs_by_target)
|
394
|
+
aggregate_targets = resolver_specs_by_target.keys.map do |target_definition|
|
395
|
+
generate_target(target_definition, pod_targets, resolver_specs_by_target)
|
396
|
+
end
|
397
|
+
if installation_options.integrate_targets?
|
398
|
+
# Copy embedded target pods that cannot have their pods embedded as frameworks to
|
399
|
+
# their host targets, and ensure we properly link library pods to their host targets
|
400
|
+
embedded_targets = aggregate_targets.select(&:requires_host_target?)
|
401
|
+
analyze_host_targets_in_podfile(aggregate_targets, embedded_targets)
|
402
|
+
|
403
|
+
use_frameworks_embedded_targets, non_use_frameworks_embedded_targets = embedded_targets.partition(&:requires_frameworks?)
|
404
|
+
aggregate_targets.each do |target|
|
405
|
+
# For targets that require frameworks, we always have to copy their pods to their
|
406
|
+
# host targets because those frameworks will all be loaded from the host target's bundle
|
407
|
+
copy_embedded_target_pod_targets_to_host(target, use_frameworks_embedded_targets, false)
|
408
|
+
|
409
|
+
# For targets that don't require frameworks, we only have to consider library-type
|
410
|
+
# targets because their host targets will still need to link their pods
|
411
|
+
copy_embedded_target_pod_targets_to_host(target, non_use_frameworks_embedded_targets, true)
|
412
|
+
end
|
413
|
+
end
|
414
|
+
aggregate_targets.each do |target|
|
415
|
+
target.search_paths_aggregate_targets.concat(aggregate_targets.select do |aggregate_target|
|
416
|
+
target.target_definition.targets_to_inherit_search_paths.include?(aggregate_target.target_definition)
|
417
|
+
end).freeze
|
418
|
+
end
|
419
|
+
end
|
420
|
+
|
421
|
+
# Setup the aggregate target for a single user target
|
422
|
+
#
|
423
|
+
# @param [TargetDefinition] target_definition
|
424
|
+
# the target definition for the user target.
|
425
|
+
#
|
426
|
+
# @param [Array<PodTarget>] pod_targets
|
427
|
+
# the pod targets, which were generated.
|
428
|
+
#
|
429
|
+
# @param [Hash{Podfile::TargetDefinition => Array<ResolvedSpecification>}] resolver_specs_by_target
|
430
|
+
# the resolved specifications grouped by target.
|
431
|
+
#
|
432
|
+
# @return [AggregateTarget]
|
433
|
+
#
|
434
|
+
def generate_target(target_definition, pod_targets, resolver_specs_by_target)
|
435
|
+
target = AggregateTarget.new(target_definition, sandbox)
|
436
|
+
target.host_requires_frameworks |= target_definition.uses_frameworks?
|
437
|
+
|
438
|
+
if installation_options.integrate_targets?
|
439
|
+
target_inspection = result.target_inspections[target_definition]
|
440
|
+
raise "missing inspection: #{target_definition.name}" unless target_inspection
|
441
|
+
target.user_project = target_inspection.project
|
442
|
+
target.client_root = target.user_project_path.dirname.realpath
|
443
|
+
target.user_target_uuids = target_inspection.project_target_uuids
|
444
|
+
target.user_build_configurations = target_inspection.build_configurations
|
445
|
+
target.archs = target_inspection.archs
|
446
|
+
else
|
447
|
+
target.client_root = config.installation_root.realpath
|
448
|
+
target.user_target_uuids = []
|
449
|
+
target.user_build_configurations = target_definition.build_configurations || { 'Release' => :release, 'Debug' => :debug }
|
450
|
+
if target_definition.platform && target_definition.platform.name == :osx
|
451
|
+
target.archs = '$(ARCHS_STANDARD_64_BIT)'
|
452
|
+
end
|
453
|
+
end
|
454
|
+
|
455
|
+
target.pod_targets = filter_pod_targets_for_target_definition(target_definition, pod_targets, resolver_specs_by_target)
|
456
|
+
|
457
|
+
target
|
458
|
+
end
|
459
|
+
|
460
|
+
# Returns a filtered list of pod targets that should or should not be part of the target definition. Pod targets
|
461
|
+
# used by tests only are filtered.
|
462
|
+
#
|
463
|
+
# @param [TargetDefinition] target_definition
|
464
|
+
# the target definition to use as the base for filtering
|
465
|
+
#
|
466
|
+
# @param [Array<PodTarget>] pod_targets
|
467
|
+
# the array of pod targets to check against
|
468
|
+
#
|
469
|
+
# @param [Hash{Podfile::TargetDefinition => Array<ResolvedSpecification>}] resolver_specs_by_target
|
470
|
+
# the resolved specifications grouped by target.
|
471
|
+
#
|
472
|
+
# @return [Array<PodTarget>] the filtered list of pod targets.
|
473
|
+
#
|
474
|
+
def filter_pod_targets_for_target_definition(target_definition, pod_targets, resolver_specs_by_target)
|
475
|
+
pod_targets.select do |pod_target|
|
476
|
+
included_in_target_definition = pod_target.target_definitions.include?(target_definition)
|
477
|
+
used_by_tests_only = resolver_specs_by_target[target_definition].select { |resolver_spec| pod_target.specs.include?(resolver_spec.spec) }.all?(&:used_by_tests_only?)
|
478
|
+
included_in_target_definition && !used_by_tests_only
|
479
|
+
end
|
480
|
+
end
|
481
|
+
|
482
|
+
# Setup the pod targets for an aggregate target. Deduplicates resulting
|
483
|
+
# targets by grouping by platform and subspec by their root
|
484
|
+
# to create a {PodTarget} for each spec.
|
485
|
+
#
|
486
|
+
# @param [Hash{Podfile::TargetDefinition => Array<ResolvedSpecification>}] resolver_specs_by_target
|
487
|
+
# the resolved specifications grouped by target.
|
488
|
+
#
|
489
|
+
# @return [Array<PodTarget>]
|
490
|
+
#
|
491
|
+
def generate_pod_targets(resolver_specs_by_target)
|
492
|
+
if installation_options.deduplicate_targets?
|
493
|
+
distinct_targets = resolver_specs_by_target.each_with_object({}) do |dependency, hash|
|
494
|
+
target_definition, dependent_specs = *dependency
|
495
|
+
dependent_specs.group_by(&:root).each do |root_spec, resolver_specs|
|
496
|
+
all_specs = resolver_specs.map(&:spec)
|
497
|
+
test_specs, specs = all_specs.partition(&:test_specification?)
|
498
|
+
pod_variant = PodVariant.new(specs, test_specs, target_definition.platform, target_definition.uses_frameworks?)
|
499
|
+
hash[root_spec] ||= {}
|
500
|
+
(hash[root_spec][pod_variant] ||= []) << target_definition
|
501
|
+
hash[root_spec].keys.find { |k| k == pod_variant }.test_specs.concat(test_specs).uniq!
|
502
|
+
end
|
503
|
+
end
|
504
|
+
|
505
|
+
pod_targets = distinct_targets.flat_map do |_root, target_definitions_by_variant|
|
506
|
+
suffixes = PodVariantSet.new(target_definitions_by_variant.keys).scope_suffixes
|
507
|
+
target_definitions_by_variant.flat_map do |variant, target_definitions|
|
508
|
+
generate_pod_target(target_definitions, variant.specs + variant.test_specs, :scope_suffix => suffixes[variant])
|
509
|
+
end
|
510
|
+
end
|
511
|
+
|
512
|
+
all_resolver_specs = resolver_specs_by_target.values.flatten.map(&:spec).uniq
|
513
|
+
pod_targets_by_name = pod_targets.group_by(&:pod_name).each_with_object({}) do |(name, values), hash|
|
514
|
+
# Sort the target by the number of activated subspecs, so that
|
515
|
+
# we prefer a minimal target as transitive dependency.
|
516
|
+
hash[name] = values.sort_by { |pt| pt.specs.count }
|
517
|
+
end
|
518
|
+
pod_targets.each do |target|
|
519
|
+
all_specs = all_resolver_specs.to_set
|
520
|
+
dependencies = transitive_dependencies_for_specs(target.non_test_specs.to_set, target.platform, all_specs).group_by(&:root)
|
521
|
+
test_dependencies = transitive_dependencies_for_specs(target.test_specs.to_set, target.platform, all_specs).group_by(&:root)
|
522
|
+
test_dependencies.delete_if { |k| dependencies.key? k }
|
523
|
+
target.dependent_targets = filter_dependencies(dependencies, pod_targets_by_name, target)
|
524
|
+
target.test_dependent_targets = filter_dependencies(test_dependencies, pod_targets_by_name, target)
|
525
|
+
end
|
526
|
+
else
|
527
|
+
dedupe_cache = {}
|
528
|
+
resolver_specs_by_target.flat_map do |target_definition, specs|
|
529
|
+
grouped_specs = specs.group_by(&:root).values.uniq
|
530
|
+
pod_targets = grouped_specs.flat_map do |pod_specs|
|
531
|
+
generate_pod_target([target_definition], pod_specs.map(&:spec)).scoped(dedupe_cache)
|
532
|
+
end
|
533
|
+
|
534
|
+
pod_targets.each do |target|
|
535
|
+
all_specs = specs.map(&:spec).to_set
|
536
|
+
dependencies = transitive_dependencies_for_specs(target.non_test_specs.to_set, target.platform, all_specs).group_by(&:root)
|
537
|
+
test_dependencies = transitive_dependencies_for_specs(target.test_specs.to_set, target.platform, all_specs).group_by(&:root)
|
538
|
+
test_dependencies.delete_if { |k| dependencies.key? k }
|
539
|
+
target.dependent_targets = pod_targets.reject { |t| dependencies[t.root_spec].nil? }
|
540
|
+
target.test_dependent_targets = pod_targets.reject { |t| test_dependencies[t.root_spec].nil? }
|
541
|
+
end
|
542
|
+
end
|
543
|
+
end
|
544
|
+
end
|
545
|
+
|
546
|
+
def filter_dependencies(dependencies, pod_targets_by_name, target)
|
547
|
+
dependencies.map do |root_spec, deps|
|
548
|
+
pod_targets_by_name[root_spec.name].find do |t|
|
549
|
+
next false if t.platform.symbolic_name != target.platform.symbolic_name ||
|
550
|
+
t.requires_frameworks? != target.requires_frameworks?
|
551
|
+
spec_names = t.specs.map(&:name)
|
552
|
+
deps.all? { |dep| spec_names.include?(dep.name) }
|
553
|
+
end
|
554
|
+
end
|
555
|
+
end
|
556
|
+
|
557
|
+
# Returns the specs upon which the given specs _transitively_ depend.
|
558
|
+
#
|
559
|
+
# @note: This is implemented in the analyzer, because we don't have to
|
560
|
+
# care about the requirements after dependency resolution.
|
561
|
+
#
|
562
|
+
# @param [Array<Specification>] specs
|
563
|
+
# The specs, whose dependencies should be returned.
|
564
|
+
#
|
565
|
+
# @param [Platform] platform
|
566
|
+
# The platform for which the dependencies should be returned.
|
567
|
+
#
|
568
|
+
# @param [Array<Specification>] all_specs
|
569
|
+
# All specifications which are installed alongside.
|
570
|
+
#
|
571
|
+
# @return [Array<Specification>]
|
572
|
+
#
|
573
|
+
def transitive_dependencies_for_specs(specs, platform, all_specs)
|
574
|
+
return [] if specs.empty? || all_specs.empty?
|
575
|
+
|
576
|
+
dependent_specs = Set.new
|
577
|
+
specs.each do |spec|
|
578
|
+
spec.consumer(platform).dependencies.each do |dependency|
|
579
|
+
match = all_specs.find do |s|
|
580
|
+
next false unless s.name == dependency.name
|
581
|
+
next false if specs.include?(s)
|
582
|
+
true
|
583
|
+
end
|
584
|
+
dependent_specs << match if match
|
585
|
+
end
|
586
|
+
end
|
587
|
+
|
588
|
+
remaining_specs = all_specs - dependent_specs
|
589
|
+
|
590
|
+
dependent_specs.union transitive_dependencies_for_specs(dependent_specs, platform, remaining_specs)
|
591
|
+
end
|
592
|
+
|
593
|
+
# Create a target for each spec group
|
594
|
+
#
|
595
|
+
# @param [TargetDefinitions] target_definitions
|
596
|
+
# the aggregate target
|
597
|
+
#
|
598
|
+
# @param [Array<Specification>] pod_specs
|
599
|
+
# the specifications of an equal root.
|
600
|
+
#
|
601
|
+
# @param [String] scope_suffix
|
602
|
+
# @see PodTarget#scope_suffix
|
603
|
+
#
|
604
|
+
# @return [PodTarget]
|
605
|
+
#
|
606
|
+
def generate_pod_target(target_definitions, pod_specs, scope_suffix: nil)
|
607
|
+
pod_target = PodTarget.new(pod_specs, target_definitions, sandbox, scope_suffix)
|
608
|
+
pod_target.host_requires_frameworks = target_definitions.any?(&:uses_frameworks?)
|
609
|
+
|
610
|
+
if installation_options.integrate_targets?
|
611
|
+
target_inspections = result.target_inspections.select { |t, _| target_definitions.include?(t) }.values
|
612
|
+
pod_target.user_build_configurations = target_inspections.map(&:build_configurations).reduce({}, &:merge)
|
613
|
+
pod_target.archs = target_inspections.flat_map(&:archs).compact.uniq.sort
|
614
|
+
else
|
615
|
+
pod_target.user_build_configurations = {}
|
616
|
+
if target_definitions.first.platform.name == :osx
|
617
|
+
pod_target.archs = '$(ARCHS_STANDARD_64_BIT)'
|
618
|
+
end
|
619
|
+
end
|
620
|
+
|
621
|
+
pod_target
|
622
|
+
end
|
623
|
+
|
624
|
+
# Generates dependencies that require the specific version of the Pods
|
625
|
+
# that haven't changed in the {Lockfile}.
|
626
|
+
#
|
627
|
+
# These dependencies are passed to the {Resolver}, unless the installer
|
628
|
+
# is in update mode, to prevent it from upgrading the Pods that weren't
|
629
|
+
# changed in the {Podfile}.
|
630
|
+
#
|
631
|
+
# @return [Molinillo::DependencyGraph<Dependency>] the dependencies
|
632
|
+
# generated by the lockfile that prevent the resolver to update
|
633
|
+
# a Pod.
|
634
|
+
#
|
635
|
+
def generate_version_locking_dependencies
|
636
|
+
if update_mode == :all || !lockfile
|
637
|
+
LockingDependencyAnalyzer.unlocked_dependency_graph
|
638
|
+
else
|
639
|
+
pods_to_update = result.podfile_state.changed + result.podfile_state.deleted
|
640
|
+
pods_to_update += update[:pods] if update_mode == :selected
|
641
|
+
local_pod_names = @podfile_dependency_cache.podfile_dependencies.select(&:local?).map(&:root_name)
|
642
|
+
pods_to_unlock = local_pod_names.reject do |pod_name|
|
643
|
+
sandbox.specification(pod_name).checksum == lockfile.checksum(pod_name)
|
644
|
+
end
|
645
|
+
LockingDependencyAnalyzer.generate_version_locking_dependencies(lockfile, pods_to_update, pods_to_unlock)
|
646
|
+
end
|
647
|
+
end
|
648
|
+
|
649
|
+
# Fetches the podspecs of external sources if modifications to the
|
650
|
+
# sandbox are allowed.
|
651
|
+
#
|
652
|
+
# @note In update mode all the external sources are refreshed while in
|
653
|
+
# normal mode they are refreshed only if added or changed in the
|
654
|
+
# Podfile. Moreover, in normal specifications for unchanged Pods
|
655
|
+
# which are missing or are generated from an local source are
|
656
|
+
# fetched as well.
|
657
|
+
#
|
658
|
+
# @note It is possible to perform this step before the resolution
|
659
|
+
# process because external sources identify a single specific
|
660
|
+
# version (checkout). If the other dependencies are not
|
661
|
+
# compatible with the version reported by the podspec of the
|
662
|
+
# external source the resolver will raise.
|
663
|
+
#
|
664
|
+
# @return [void]
|
665
|
+
#
|
666
|
+
# TODO: Specs
|
667
|
+
#
|
668
|
+
def fetch_external_sources
|
669
|
+
return unless allow_pre_downloads?
|
670
|
+
|
671
|
+
verify_no_pods_with_different_sources!
|
672
|
+
unless dependencies_to_fetch.empty?
|
673
|
+
UI.section 'Fetching external sources' do
|
674
|
+
dependencies_to_fetch.sort.each do |dependency|
|
675
|
+
fetch_external_source(dependency, !pods_to_fetch.include?(dependency.root_name))
|
676
|
+
end
|
677
|
+
end
|
678
|
+
end
|
679
|
+
end
|
680
|
+
|
681
|
+
def verify_no_pods_with_different_sources!
|
682
|
+
deps_with_different_sources = @podfile_dependency_cache.podfile_dependencies.group_by(&:root_name).
|
683
|
+
select { |_root_name, dependencies| dependencies.map(&:external_source).uniq.count > 1 }
|
684
|
+
deps_with_different_sources.each do |root_name, dependencies|
|
685
|
+
raise Informative, 'There are multiple dependencies with different ' \
|
686
|
+
"sources for `#{root_name}` in #{UI.path podfile.defined_in_file}:" \
|
687
|
+
"\n\n- #{dependencies.map(&:to_s).join("\n- ")}"
|
688
|
+
end
|
689
|
+
end
|
690
|
+
|
691
|
+
def fetch_external_source(dependency, use_lockfile_options)
|
692
|
+
checkout_options = lockfile.checkout_options_for_pod_named(dependency.root_name) if lockfile
|
693
|
+
source = if checkout_options && use_lockfile_options
|
694
|
+
ExternalSources.from_params(checkout_options, dependency, podfile.defined_in_file)
|
695
|
+
else
|
696
|
+
ExternalSources.from_dependency(dependency, podfile.defined_in_file)
|
697
|
+
end
|
698
|
+
source.can_cache = installation_options.clean?
|
699
|
+
source.fetch(sandbox)
|
700
|
+
end
|
701
|
+
|
702
|
+
def dependencies_to_fetch
|
703
|
+
@deps_to_fetch ||= begin
|
704
|
+
deps_to_fetch = []
|
705
|
+
deps_with_external_source = @podfile_dependency_cache.podfile_dependencies.select(&:external_source)
|
706
|
+
|
707
|
+
if update_mode == :all
|
708
|
+
deps_to_fetch = deps_with_external_source
|
709
|
+
else
|
710
|
+
deps_to_fetch = deps_with_external_source.select { |dep| pods_to_fetch.include?(dep.root_name) }
|
711
|
+
deps_to_fetch_if_needed = deps_with_external_source.select { |dep| result.podfile_state.unchanged.include?(dep.root_name) }
|
712
|
+
deps_to_fetch += deps_to_fetch_if_needed.select do |dep|
|
713
|
+
sandbox.specification(dep.root_name).nil? ||
|
714
|
+
!dep.external_source[:path].nil? ||
|
715
|
+
!sandbox.pod_dir(dep.root_name).directory? ||
|
716
|
+
checkout_requires_update?(dep)
|
717
|
+
end
|
718
|
+
end
|
719
|
+
deps_to_fetch.uniq(&:root_name)
|
720
|
+
end
|
721
|
+
end
|
722
|
+
|
723
|
+
def checkout_requires_update?(dependency)
|
724
|
+
return true unless lockfile && sandbox.manifest
|
725
|
+
locked_checkout_options = lockfile.checkout_options_for_pod_named(dependency.root_name)
|
726
|
+
sandbox_checkout_options = sandbox.manifest.checkout_options_for_pod_named(dependency.root_name)
|
727
|
+
locked_checkout_options != sandbox_checkout_options
|
728
|
+
end
|
729
|
+
|
730
|
+
def pods_to_fetch
|
731
|
+
@pods_to_fetch ||= begin
|
732
|
+
pods_to_fetch = result.podfile_state.added + result.podfile_state.changed
|
733
|
+
if update_mode == :selected
|
734
|
+
pods_to_fetch += update[:pods]
|
735
|
+
elsif update_mode == :all
|
736
|
+
pods_to_fetch += result.podfile_state.unchanged + result.podfile_state.deleted
|
737
|
+
end
|
738
|
+
pods_to_fetch += @podfile_dependency_cache.podfile_dependencies.
|
739
|
+
select { |dep| Hash(dep.external_source).key?(:podspec) && sandbox.specification_path(dep.root_name).nil? }.
|
740
|
+
map(&:root_name)
|
741
|
+
pods_to_fetch
|
742
|
+
end
|
743
|
+
end
|
744
|
+
|
745
|
+
def store_existing_checkout_options
|
746
|
+
@podfile_dependency_cache.podfile_dependencies.select(&:external_source).each do |dep|
|
747
|
+
if checkout_options = lockfile && lockfile.checkout_options_for_pod_named(dep.root_name)
|
748
|
+
sandbox.store_checkout_source(dep.root_name, checkout_options)
|
749
|
+
end
|
750
|
+
end
|
751
|
+
end
|
752
|
+
|
753
|
+
# Converts the Podfile in a list of specifications grouped by target.
|
754
|
+
#
|
755
|
+
# @note As some dependencies might have external sources the resolver
|
756
|
+
# is aware of the {Sandbox} and interacts with it to download the
|
757
|
+
# podspecs of the external sources. This is necessary because the
|
758
|
+
# resolver needs their specifications to analyze their
|
759
|
+
# dependencies.
|
760
|
+
#
|
761
|
+
# @note The specifications of the external sources which are added,
|
762
|
+
# modified or removed need to deleted from the sandbox before the
|
763
|
+
# resolution process. Otherwise the resolver might use an
|
764
|
+
# incorrect specification instead of pre-downloading it.
|
765
|
+
#
|
766
|
+
# @note In update mode the resolver is set to always update the specs
|
767
|
+
# from external sources.
|
768
|
+
#
|
769
|
+
# @return [Hash{TargetDefinition => Array<Spec>}] the specifications
|
770
|
+
# grouped by target.
|
771
|
+
#
|
772
|
+
def resolve_dependencies
|
773
|
+
duplicate_dependencies = @podfile_dependency_cache.podfile_dependencies.group_by(&:name).
|
774
|
+
select { |_name, dependencies| dependencies.count > 1 }
|
775
|
+
duplicate_dependencies.each do |name, dependencies|
|
776
|
+
UI.warn "There are duplicate dependencies on `#{name}` in #{UI.path podfile.defined_in_file}:\n\n" \
|
777
|
+
"- #{dependencies.map(&:to_s).join("\n- ")}"
|
778
|
+
end
|
779
|
+
|
780
|
+
resolver_specs_by_target = nil
|
781
|
+
UI.section "Resolving dependencies of #{UI.path(podfile.defined_in_file) || 'Podfile'}" do
|
782
|
+
resolver = DyResolver.new(sandbox, podfile, locked_dependencies, sources, specs_updated?)
|
783
|
+
resolver_specs_by_target = resolver.resolve
|
784
|
+
resolver_specs_by_target.values.flatten(1).map(&:spec).each(&:validate_cocoapods_version)
|
785
|
+
end
|
786
|
+
resolver_specs_by_target
|
787
|
+
end
|
788
|
+
|
789
|
+
# Warns for any specification that is incompatible with its target.
|
790
|
+
#
|
791
|
+
# @param [Hash{TargetDefinition => Array<Spec>}] resolver_specs_by_target
|
792
|
+
# the specifications grouped by target.
|
793
|
+
#
|
794
|
+
# @return [Hash{TargetDefinition => Array<Spec>}] the specifications
|
795
|
+
# grouped by target.
|
796
|
+
#
|
797
|
+
def validate_platforms(resolver_specs_by_target)
|
798
|
+
resolver_specs_by_target.each do |target, specs|
|
799
|
+
specs.map(&:spec).each do |spec|
|
800
|
+
next unless target_platform = target.platform
|
801
|
+
unless spec.available_platforms.any? { |p| target_platform.supports?(p) }
|
802
|
+
UI.warn "The platform of the target `#{target.name}` " \
|
803
|
+
"(#{target.platform}) may not be compatible with `#{spec}` which has " \
|
804
|
+
"a minimum requirement of #{spec.available_platforms.join(' - ')}."
|
805
|
+
end
|
806
|
+
end
|
807
|
+
end
|
808
|
+
end
|
809
|
+
|
810
|
+
# Returns the list of all the resolved specifications.
|
811
|
+
#
|
812
|
+
# @return [Array<Specification>] the list of the specifications.
|
813
|
+
#
|
814
|
+
def generate_specifications(resolver_specs_by_target)
|
815
|
+
resolver_specs_by_target.values.flatten.map(&:spec).uniq
|
816
|
+
end
|
817
|
+
|
818
|
+
# Computes the state of the sandbox respect to the resolved
|
819
|
+
# specifications.
|
820
|
+
#
|
821
|
+
# @return [SpecsState] the representation of the state of the manifest
|
822
|
+
# specifications.
|
823
|
+
#
|
824
|
+
def generate_sandbox_state
|
825
|
+
sandbox_state = nil
|
826
|
+
UI.section 'Comparing resolved specification to the sandbox manifest' do
|
827
|
+
sandbox_analyzer = SandboxAnalyzer.new(sandbox, result.specifications, update_mode?, lockfile)
|
828
|
+
sandbox_state = sandbox_analyzer.analyze
|
829
|
+
sandbox_state.print
|
830
|
+
end
|
831
|
+
sandbox_state
|
832
|
+
end
|
833
|
+
|
834
|
+
#-----------------------------------------------------------------------#
|
835
|
+
|
836
|
+
# @!group Analysis internal products
|
837
|
+
|
838
|
+
# @return [Molinillo::DependencyGraph<Dependency>] the dependencies
|
839
|
+
# generated by the lockfile that prevent the resolver to update a
|
840
|
+
# Pod.
|
841
|
+
#
|
842
|
+
attr_reader :locked_dependencies
|
843
|
+
|
844
|
+
#-----------------------------------------------------------------------#
|
845
|
+
|
846
|
+
public
|
847
|
+
|
848
|
+
# Returns the sources used to query for specifications
|
849
|
+
#
|
850
|
+
# When no explicit Podfile sources or plugin sources are defined, this
|
851
|
+
# defaults to the master spec repository.
|
852
|
+
# available sources ({config.sources_manager.all}).
|
853
|
+
#
|
854
|
+
# @return [Array<Source>] the sources to be used in finding
|
855
|
+
# specifications, as specified by the {#podfile} or all sources.
|
856
|
+
#
|
857
|
+
def sources
|
858
|
+
@sources ||= begin
|
859
|
+
sources = podfile.sources
|
860
|
+
plugin_sources = @plugin_sources || []
|
861
|
+
|
862
|
+
# Add any sources specified using the :source flag on individual dependencies.
|
863
|
+
dependency_sources = @podfile_dependency_cache.podfile_dependencies.map(&:podspec_repo).compact
|
864
|
+
all_dependencies_have_sources = dependency_sources.count == @podfile_dependency_cache.podfile_dependencies.count
|
865
|
+
|
866
|
+
if all_dependencies_have_sources
|
867
|
+
sources = dependency_sources
|
868
|
+
elsif has_dependencies? && sources.empty? && plugin_sources.empty?
|
869
|
+
sources = ['https://github.com/CocoaPods/Specs.git']
|
870
|
+
else
|
871
|
+
sources += dependency_sources
|
872
|
+
end
|
873
|
+
|
874
|
+
result = sources.uniq.map do |source_url|
|
875
|
+
config.sources_manager.find_or_create_source_with_url(source_url)
|
876
|
+
end
|
877
|
+
unless plugin_sources.empty?
|
878
|
+
result.insert(0, *plugin_sources)
|
879
|
+
end
|
880
|
+
result
|
881
|
+
end
|
882
|
+
end
|
883
|
+
|
884
|
+
#-----------------------------------------------------------------------#
|
885
|
+
|
886
|
+
private
|
887
|
+
|
888
|
+
# @!group Analysis sub-steps
|
889
|
+
|
890
|
+
# Checks whether the platform is specified if not integrating
|
891
|
+
#
|
892
|
+
# @return [void]
|
893
|
+
#
|
894
|
+
def verify_platforms_specified!
|
895
|
+
unless installation_options.integrate_targets?
|
896
|
+
@podfile_dependency_cache.target_definition_list.each do |target_definition|
|
897
|
+
if !target_definition.empty? && target_definition.platform.nil?
|
898
|
+
raise Informative, 'It is necessary to specify the platform in the Podfile if not integrating.'
|
899
|
+
end
|
900
|
+
end
|
901
|
+
end
|
902
|
+
end
|
903
|
+
|
904
|
+
# Precompute information for each target_definition in the Podfile
|
905
|
+
#
|
906
|
+
# @note The platforms are computed and added to each target_definition
|
907
|
+
# because it might be necessary to infer the platform from the
|
908
|
+
# user targets.
|
909
|
+
#
|
910
|
+
# @return [Hash{TargetDefinition => TargetInspectionResult}]
|
911
|
+
#
|
912
|
+
def inspect_targets_to_integrate
|
913
|
+
inspection_result = {}
|
914
|
+
UI.section 'Inspecting targets to integrate' do
|
915
|
+
inspectors = @podfile_dependency_cache.target_definition_list.map do |target_definition|
|
916
|
+
next if target_definition.abstract?
|
917
|
+
TargetInspector.new(target_definition, config.installation_root)
|
918
|
+
end.compact
|
919
|
+
inspectors.group_by(&:compute_project_path).each do |project_path, target_inspectors|
|
920
|
+
project = Xcodeproj::Project.open(project_path)
|
921
|
+
target_inspectors.each do |inspector|
|
922
|
+
target_definition = inspector.target_definition
|
923
|
+
results = inspector.compute_results(project)
|
924
|
+
inspection_result[target_definition] = results
|
925
|
+
UI.message('Using `ARCHS` setting to build architectures of ' \
|
926
|
+
"target `#{target_definition.label}`: (`#{results.archs.join('`, `')}`)")
|
927
|
+
end
|
928
|
+
end
|
929
|
+
end
|
930
|
+
inspection_result
|
931
|
+
end
|
932
|
+
end
|
933
|
+
end
|
934
|
+
end
|