cocoapods 1.10.1 → 1.11.0.rc.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +224 -5
- data/README.md +11 -11
- data/lib/cocoapods/command/outdated.rb +12 -1
- data/lib/cocoapods/command/repo/push.rb +17 -0
- data/lib/cocoapods/command/spec/cat.rb +3 -1
- data/lib/cocoapods/command/spec/lint.rb +1 -1
- data/lib/cocoapods/command/spec/which.rb +3 -1
- data/lib/cocoapods/command/spec.rb +18 -9
- data/lib/cocoapods/config.rb +1 -1
- data/lib/cocoapods/downloader/cache.rb +95 -6
- data/lib/cocoapods/downloader.rb +4 -2
- data/lib/cocoapods/external_sources/podspec_source.rb +1 -1
- data/lib/cocoapods/gem_version.rb +1 -1
- data/lib/cocoapods/generator/acknowledgements.rb +1 -1
- data/lib/cocoapods/generator/app_target_helper.rb +7 -3
- data/lib/cocoapods/generator/copy_dsyms_script.rb +4 -4
- data/lib/cocoapods/generator/copy_xcframework_script.rb +4 -48
- data/lib/cocoapods/generator/embed_frameworks_script.rb +2 -1
- data/lib/cocoapods/generator/script_phase_constants.rb +1 -0
- data/lib/cocoapods/installer/analyzer/sandbox_analyzer.rb +31 -4
- data/lib/cocoapods/installer/analyzer.rb +12 -8
- data/lib/cocoapods/installer/podfile_validator.rb +2 -2
- data/lib/cocoapods/installer/pre_integrate_hooks_context.rb +9 -0
- data/lib/cocoapods/installer/project_cache/project_cache_analyzer.rb +9 -2
- data/lib/cocoapods/installer/project_cache/project_installation_cache.rb +15 -2
- data/lib/cocoapods/installer/project_cache/target_cache_key.rb +7 -4
- data/lib/cocoapods/installer/user_project_integrator/target_integrator.rb +149 -9
- data/lib/cocoapods/installer/xcode/pods_project_generator/app_host_installer.rb +10 -3
- data/lib/cocoapods/installer/xcode/pods_project_generator/file_references_installer.rb +25 -6
- data/lib/cocoapods/installer/xcode/pods_project_generator/pod_target_dependency_installer.rb +6 -19
- data/lib/cocoapods/installer/xcode/pods_project_generator/pod_target_installer.rb +70 -58
- data/lib/cocoapods/installer/xcode/pods_project_generator/pod_target_integrator.rb +48 -6
- data/lib/cocoapods/installer/xcode/pods_project_generator/target_installation_result.rb +2 -2
- data/lib/cocoapods/installer/xcode/pods_project_generator/target_installer.rb +2 -5
- data/lib/cocoapods/installer/xcode/pods_project_generator.rb +1 -1
- data/lib/cocoapods/installer.rb +52 -4
- data/lib/cocoapods/resolver.rb +4 -4
- data/lib/cocoapods/sandbox/file_accessor.rb +57 -10
- data/lib/cocoapods/sandbox/headers_store.rb +3 -1
- data/lib/cocoapods/sandbox/path_list.rb +1 -1
- data/lib/cocoapods/sandbox/pod_dir_cleaner.rb +1 -1
- data/lib/cocoapods/sources_manager.rb +14 -8
- data/lib/cocoapods/target/aggregate_target.rb +23 -1
- data/lib/cocoapods/target/build_settings.rb +45 -36
- data/lib/cocoapods/target/pod_target.rb +47 -22
- data/lib/cocoapods/user_interface.rb +4 -0
- data/lib/cocoapods/validator.rb +25 -5
- data/lib/cocoapods/version_metadata.rb +1 -1
- data/lib/cocoapods/xcode/xcframework.rb +8 -3
- metadata +28 -21
data/lib/cocoapods/config.rb
CHANGED
@@ -163,7 +163,7 @@ module Pod
|
|
163
163
|
#
|
164
164
|
def installation_root
|
165
165
|
@installation_root ||= begin
|
166
|
-
current_dir = Pathname.new(
|
166
|
+
current_dir = Pathname.new(Dir.pwd.unicode_normalize(:nfkc))
|
167
167
|
current_path = current_dir
|
168
168
|
until current_path.root?
|
169
169
|
if podfile_path_in_dir(current_path)
|
@@ -69,6 +69,91 @@ module Pod
|
|
69
69
|
end
|
70
70
|
end
|
71
71
|
|
72
|
+
# Convenience method for acquiring a shared lock to safely read from the
|
73
|
+
# cache. See `Cache.lock` for more details.
|
74
|
+
#
|
75
|
+
# @param [Pathname] location
|
76
|
+
# the path to require a lock for.
|
77
|
+
#
|
78
|
+
# @param [block] &block
|
79
|
+
# the block to execute inside the lock.
|
80
|
+
#
|
81
|
+
# @return [void]
|
82
|
+
#
|
83
|
+
def self.read_lock(location, &block)
|
84
|
+
Cache.lock(location, File::LOCK_SH, &block)
|
85
|
+
end
|
86
|
+
|
87
|
+
# Convenience method for acquiring an exclusive lock to safely write to
|
88
|
+
# the cache. See `Cache.lock` for more details.
|
89
|
+
#
|
90
|
+
# @param [Pathname] location
|
91
|
+
# the path to require a lock for.
|
92
|
+
#
|
93
|
+
# @param [block] &block
|
94
|
+
# the block to execute inside the lock.
|
95
|
+
#
|
96
|
+
# @return [void]
|
97
|
+
#
|
98
|
+
def self.write_lock(location, &block)
|
99
|
+
Cache.lock(location, File::LOCK_EX, &block)
|
100
|
+
end
|
101
|
+
|
102
|
+
# Creates a .lock file at `location`, aquires a lock of type
|
103
|
+
# `lock_type`, checks that it is valid, and executes passed block while
|
104
|
+
# holding on to that lock. Afterwards, the .lock file is deleted, which is
|
105
|
+
# why validation of the lock is necessary, as you might have a lock on a
|
106
|
+
# file that doesn't exist on the filesystem anymore.
|
107
|
+
#
|
108
|
+
# @param [Pathname] location
|
109
|
+
# the path to require a lock for.
|
110
|
+
#
|
111
|
+
# @param [locking_constant] lock_type
|
112
|
+
# the type of lock, either exclusive (File::LOCK_EX) or shared
|
113
|
+
# (File::LOCK_SH).
|
114
|
+
#
|
115
|
+
# @return [void]
|
116
|
+
#
|
117
|
+
def self.lock(location, lock_type)
|
118
|
+
raise ArgumentError, 'no block given' unless block_given?
|
119
|
+
lockfile = "#{location}.lock"
|
120
|
+
f = nil
|
121
|
+
loop do
|
122
|
+
f.close if f
|
123
|
+
f = File.open(lockfile, File::CREAT, 0o644)
|
124
|
+
f.flock(lock_type)
|
125
|
+
break if Cache.valid_lock?(f, lockfile)
|
126
|
+
end
|
127
|
+
begin
|
128
|
+
yield location
|
129
|
+
ensure
|
130
|
+
if lock_type == File::LOCK_SH
|
131
|
+
f.flock(File::LOCK_EX)
|
132
|
+
File.delete(lockfile) if Cache.valid_lock?(f, lockfile)
|
133
|
+
else
|
134
|
+
File.delete(lockfile)
|
135
|
+
end
|
136
|
+
f.close
|
137
|
+
end
|
138
|
+
end
|
139
|
+
|
140
|
+
# Checks that the lock is on a file that still exists on the filesystem.
|
141
|
+
#
|
142
|
+
# @param [File] file
|
143
|
+
# the actual file that we have a lock for.
|
144
|
+
#
|
145
|
+
# @param [String] filename
|
146
|
+
# the filename of the file that we have a lock for.
|
147
|
+
#
|
148
|
+
# @return [Boolean]
|
149
|
+
# true if `filename` still exists and is the same file as `file`
|
150
|
+
#
|
151
|
+
def self.valid_lock?(file, filename)
|
152
|
+
file.stat.ino == File.stat(filename).ino
|
153
|
+
rescue Errno::ENOENT
|
154
|
+
false
|
155
|
+
end
|
156
|
+
|
72
157
|
private
|
73
158
|
|
74
159
|
# Ensures the cache on disk was created with the same CocoaPods version as
|
@@ -112,7 +197,7 @@ module Pod
|
|
112
197
|
#
|
113
198
|
def path_for_spec(request, slug_opts = {})
|
114
199
|
path = root + 'Specs' + request.slug(**slug_opts)
|
115
|
-
path.
|
200
|
+
Pathname.new(path.to_path + '.podspec.json')
|
116
201
|
end
|
117
202
|
|
118
203
|
# @param [Request] request
|
@@ -197,10 +282,12 @@ module Pod
|
|
197
282
|
def copy_and_clean(source, destination, spec)
|
198
283
|
specs_by_platform = group_subspecs_by_platform(spec)
|
199
284
|
destination.parent.mkpath
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
285
|
+
Cache.write_lock(destination) do
|
286
|
+
FileUtils.rm_rf(destination)
|
287
|
+
FileUtils.cp_r(source, destination)
|
288
|
+
Pod::Installer::PodSourcePreparer.new(spec, destination).prepare!
|
289
|
+
Sandbox::PodDirCleaner.new(destination, specs_by_platform).clean!
|
290
|
+
end
|
204
291
|
end
|
205
292
|
|
206
293
|
def group_subspecs_by_platform(spec)
|
@@ -226,7 +313,9 @@ module Pod
|
|
226
313
|
#
|
227
314
|
def write_spec(spec, path)
|
228
315
|
path.dirname.mkpath
|
229
|
-
|
316
|
+
Cache.write_lock(path) do
|
317
|
+
path.open('w') { |f| f.write spec.to_pretty_json }
|
318
|
+
end
|
230
319
|
end
|
231
320
|
end
|
232
321
|
end
|
data/lib/cocoapods/downloader.rb
CHANGED
@@ -50,8 +50,10 @@ module Pod
|
|
50
50
|
|
51
51
|
if target && result.location && target != result.location
|
52
52
|
UI.message "Copying #{request.name} from `#{result.location}` to #{UI.path target}", '> ' do
|
53
|
-
|
54
|
-
|
53
|
+
Cache.read_lock(result.location) do
|
54
|
+
FileUtils.rm_rf target
|
55
|
+
FileUtils.cp_r(result.location, target)
|
56
|
+
end
|
55
57
|
end
|
56
58
|
end
|
57
59
|
result
|
@@ -16,7 +16,7 @@ module Pod
|
|
16
16
|
else
|
17
17
|
require 'cocoapods/open-uri'
|
18
18
|
begin
|
19
|
-
|
19
|
+
OpenURI.open_uri(podspec_uri) { |io| store_podspec(sandbox, io.read, is_json) }
|
20
20
|
rescue OpenURI::HTTPError => e
|
21
21
|
status = e.io.status.join(' ')
|
22
22
|
raise Informative, "Failed to fetch podspec for `#{name}` at `#{podspec_uri}`.\n Error: #{status}"
|
@@ -67,7 +67,7 @@ module Pod
|
|
67
67
|
# the specification for which license is needed.
|
68
68
|
#
|
69
69
|
# @return [String] The text of the license.
|
70
|
-
# @return [Nil] If
|
70
|
+
# @return [Nil] If no license text could be found.
|
71
71
|
#
|
72
72
|
def license_text(spec)
|
73
73
|
return nil unless spec.license
|
@@ -9,7 +9,7 @@ module Pod
|
|
9
9
|
# @param [Project] project
|
10
10
|
# the Xcodeproj to generate the target into.
|
11
11
|
#
|
12
|
-
# @param [Symbol]
|
12
|
+
# @param [Symbol] platform_name
|
13
13
|
# the platform of the target. Can be `:ios` or `:osx`, etc.
|
14
14
|
#
|
15
15
|
# @param [String] deployment_target
|
@@ -18,10 +18,14 @@ module Pod
|
|
18
18
|
# @param [String] name
|
19
19
|
# The name to use for the target, defaults to 'App'.
|
20
20
|
#
|
21
|
+
# @param [String] product_basename
|
22
|
+
# The product basename to use for the target, defaults to `name`.
|
23
|
+
#
|
21
24
|
# @return [PBXNativeTarget] the new target that was created.
|
22
25
|
#
|
23
|
-
def self.add_app_target(project,
|
24
|
-
project.new_target(:application, name,
|
26
|
+
def self.add_app_target(project, platform_name, deployment_target, name = 'App', product_basename = nil)
|
27
|
+
project.new_target(:application, name, platform_name, deployment_target, nil,
|
28
|
+
nil, product_basename)
|
25
29
|
end
|
26
30
|
|
27
31
|
# Creates and links an import file for the given pod target and into the given native target.
|
@@ -1,18 +1,18 @@
|
|
1
1
|
module Pod
|
2
2
|
module Generator
|
3
3
|
class CopydSYMsScript
|
4
|
-
# @return [Array<Pathname>] dsym_paths the dSYM paths to include in the script contents.
|
4
|
+
# @return [Array<Pathname, String>] dsym_paths the dSYM paths to include in the script contents.
|
5
5
|
#
|
6
6
|
attr_reader :dsym_paths
|
7
7
|
|
8
|
-
# @return [Array<Pathname>] bcsymbolmap_paths the bcsymbolmap paths to include in the script contents.
|
8
|
+
# @return [Array<Pathname, String>] bcsymbolmap_paths the bcsymbolmap paths to include in the script contents.
|
9
9
|
#
|
10
10
|
attr_reader :bcsymbolmap_paths
|
11
11
|
|
12
12
|
# Initialize a new instance
|
13
13
|
#
|
14
|
-
# @param [Array<Pathname>] dsym_paths @see dsym_paths
|
15
|
-
# @param [Array<Pathname>] bcsymbolmap_paths @see bcsymbolmap_paths
|
14
|
+
# @param [Array<Pathname, String>] dsym_paths @see dsym_paths
|
15
|
+
# @param [Array<Pathname, String>] bcsymbolmap_paths @see bcsymbolmap_paths
|
16
16
|
#
|
17
17
|
def initialize(dsym_paths, bcsymbolmap_paths)
|
18
18
|
@dsym_paths = Array(dsym_paths)
|
@@ -70,8 +70,8 @@ copy_dir()
|
|
70
70
|
local destination="$2"
|
71
71
|
|
72
72
|
# Use filter instead of exclude so missing patterns don't throw errors.
|
73
|
-
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \\"- CVS/\\" --filter \\"- .svn/\\" --filter \\"- .git/\\" --filter \\"- .hg/\\" \\"${source}
|
74
|
-
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" "${source}" "${destination}"
|
73
|
+
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \\"- CVS/\\" --filter \\"- .svn/\\" --filter \\"- .git/\\" --filter \\"- .hg/\\" \\"${source}*\\" \\"${destination}\\""
|
74
|
+
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" "${source}"/* "${destination}"
|
75
75
|
}
|
76
76
|
|
77
77
|
SELECT_SLICE_RETVAL=""
|
@@ -130,54 +130,11 @@ select_slice() {
|
|
130
130
|
done
|
131
131
|
}
|
132
132
|
|
133
|
-
install_library() {
|
134
|
-
local source="$1"
|
135
|
-
local name="$2"
|
136
|
-
local destination="#{Target::BuildSettings::XCFRAMEWORKS_BUILD_DIR_VARIABLE}/${name}"
|
137
|
-
|
138
|
-
# Libraries can contain headers, module maps, and a binary, so we'll copy everything in the folder over
|
139
|
-
|
140
|
-
local source="$binary"
|
141
|
-
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \\"- CVS/\\" --filter \\"- .svn/\\" --filter \\"- .git/\\" --filter \\"- .hg/\\" \\"${source}/*\\" \\"${destination}\\""
|
142
|
-
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" "${source}/*" "${destination}"
|
143
|
-
}
|
144
|
-
|
145
|
-
# Copies a framework to derived data for use in later build phases
|
146
|
-
install_framework()
|
147
|
-
{
|
148
|
-
local source="$1"
|
149
|
-
local name="$2"
|
150
|
-
local destination="#{Pod::Target::BuildSettings::XCFRAMEWORKS_BUILD_DIR_VARIABLE}/${name}"
|
151
|
-
|
152
|
-
if [ ! -d "$destination" ]; then
|
153
|
-
mkdir -p "$destination"
|
154
|
-
fi
|
155
|
-
|
156
|
-
copy_dir "$source" "$destination"
|
157
|
-
echo "Copied $source to $destination"
|
158
|
-
}
|
159
|
-
|
160
|
-
install_xcframework_library() {
|
161
|
-
local basepath="$1"
|
162
|
-
local name="$2"
|
163
|
-
local paths=("$@")
|
164
|
-
|
165
|
-
# Locate the correct slice of the .xcframework for the current architectures
|
166
|
-
select_slice "${paths[@]}"
|
167
|
-
local target_path="$SELECT_SLICE_RETVAL"
|
168
|
-
if [[ -z "$target_path" ]]; then
|
169
|
-
echo "warning: [CP] Unable to find matching .xcframework slice in '${paths[@]}' for the current build architectures ($ARCHS)."
|
170
|
-
return
|
171
|
-
fi
|
172
|
-
|
173
|
-
install_framework "$basepath/$target_path" "$name"
|
174
|
-
}
|
175
|
-
|
176
133
|
install_xcframework() {
|
177
134
|
local basepath="$1"
|
178
135
|
local name="$2"
|
179
136
|
local package_type="$3"
|
180
|
-
local paths=("
|
137
|
+
local paths=("${@:4}")
|
181
138
|
|
182
139
|
# Locate the correct slice of the .xcframework for the current architectures
|
183
140
|
select_slice "${paths[@]}"
|
@@ -195,7 +152,6 @@ install_xcframework() {
|
|
195
152
|
fi
|
196
153
|
|
197
154
|
copy_dir "$source/" "$destination"
|
198
|
-
|
199
155
|
echo "Copied $source to $destination"
|
200
156
|
}
|
201
157
|
|
@@ -218,7 +174,7 @@ install_xcframework() {
|
|
218
174
|
def install_xcframework_args(xcframework, slices)
|
219
175
|
root = xcframework.path
|
220
176
|
args = [shell_escape("${PODS_ROOT}/#{root.relative_path_from(sandbox_root)}")]
|
221
|
-
args << shell_escape(xcframework.
|
177
|
+
args << shell_escape(xcframework.target_name)
|
222
178
|
is_framework = xcframework.build_type.framework?
|
223
179
|
args << shell_escape(is_framework ? 'framework' : 'library')
|
224
180
|
slices.each do |slice|
|
@@ -159,8 +159,9 @@ code_sign_if_enabled() {
|
|
159
159
|
end
|
160
160
|
xcframeworks_by_config.each do |config, xcframeworks|
|
161
161
|
xcframeworks.select { |xcf| xcf.build_type.dynamic_framework? }.each do |xcframework|
|
162
|
+
target_name = xcframework.target_name
|
162
163
|
name = xcframework.name
|
163
|
-
contents_by_config[config] << %( install_framework "#{Target::BuildSettings::XCFRAMEWORKS_BUILD_DIR_VARIABLE}/#{
|
164
|
+
contents_by_config[config] << %( install_framework "#{Target::BuildSettings::XCFRAMEWORKS_BUILD_DIR_VARIABLE}/#{target_name}/#{name}.framework"\n)
|
164
165
|
end
|
165
166
|
end
|
166
167
|
script << "\n" unless contents_by_config.empty?
|
@@ -79,6 +79,7 @@ install_dsym() {
|
|
79
79
|
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
|
80
80
|
else
|
81
81
|
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
|
82
|
+
mkdir -p "${DWARF_DSYM_FOLDER_PATH}"
|
82
83
|
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.dSYM"
|
83
84
|
fi
|
84
85
|
fi
|
@@ -31,6 +31,10 @@ module Pod
|
|
31
31
|
#
|
32
32
|
attr_reader :sandbox
|
33
33
|
|
34
|
+
# @return [Podfile] The Podfile to analyze dependencies.
|
35
|
+
#
|
36
|
+
attr_reader :podfile
|
37
|
+
|
34
38
|
# @return [Array<Specifications>] The specifications returned by the
|
35
39
|
# resolver.
|
36
40
|
#
|
@@ -45,11 +49,13 @@ module Pod
|
|
45
49
|
# Init a new SandboxAnalyzer
|
46
50
|
#
|
47
51
|
# @param [Sandbox] sandbox @see sandbox
|
52
|
+
# @param [Podfile] podfile @see podfile
|
48
53
|
# @param [Array<Specifications>] specs @see specs
|
49
54
|
# @param [Bool] update_mode @see update_mode
|
50
55
|
#
|
51
|
-
def initialize(sandbox, specs, update_mode)
|
56
|
+
def initialize(sandbox, podfile, specs, update_mode)
|
52
57
|
@sandbox = sandbox
|
58
|
+
@podfile = podfile
|
53
59
|
@specs = specs
|
54
60
|
@update_mode = update_mode
|
55
61
|
end
|
@@ -103,7 +109,7 @@ module Pod
|
|
103
109
|
#
|
104
110
|
def pod_added?(pod)
|
105
111
|
return true if resolved_pods.include?(pod) && !sandbox_pods.include?(pod)
|
106
|
-
return true if !
|
112
|
+
return true if !sandbox.local?(pod) && !folder_exist?(pod)
|
107
113
|
false
|
108
114
|
end
|
109
115
|
|
@@ -139,6 +145,7 @@ module Pod
|
|
139
145
|
return true if spec.version != sandbox_version(pod)
|
140
146
|
return true if spec.checksum != sandbox_checksum(pod)
|
141
147
|
return true if resolved_spec_names(pod) != sandbox_spec_names(pod)
|
148
|
+
return true if podfile_dependency(pod) != sandbox_dependency(pod)
|
142
149
|
return true if sandbox.predownloaded?(pod)
|
143
150
|
return true if folder_empty?(pod)
|
144
151
|
false
|
@@ -161,14 +168,14 @@ module Pod
|
|
161
168
|
# @return [Array<String>] The name of the resolved Pods.
|
162
169
|
#
|
163
170
|
def resolved_pods
|
164
|
-
specs.map { |spec| spec.root.name }.uniq
|
171
|
+
@resolved_pods ||= specs.map { |spec| spec.root.name }.uniq
|
165
172
|
end
|
166
173
|
|
167
174
|
# @return [Array<String>] The name of the Pods stored in the sandbox
|
168
175
|
# manifest.
|
169
176
|
#
|
170
177
|
def sandbox_pods
|
171
|
-
sandbox_manifest.pod_names.map { |name| Specification.root_name(name) }.uniq
|
178
|
+
@sandbox_pods ||= sandbox_manifest.pod_names.map { |name| Specification.root_name(name) }.uniq
|
172
179
|
end
|
173
180
|
|
174
181
|
# @return [Array<String>] The name of the resolved specifications
|
@@ -223,6 +230,26 @@ module Pod
|
|
223
230
|
sandbox_manifest.checksum(pod)
|
224
231
|
end
|
225
232
|
|
233
|
+
# @return [Dependency, nil] The dependency with the given name stored in the sandbox.
|
234
|
+
#
|
235
|
+
# @param [String] pod
|
236
|
+
# the name of the Pod.
|
237
|
+
#
|
238
|
+
def sandbox_dependency(pod)
|
239
|
+
sandbox_manifest.dependencies.find { |d| d.name == pod }
|
240
|
+
end
|
241
|
+
|
242
|
+
#--------------------------------------#
|
243
|
+
|
244
|
+
# @return [Dependency, nil] The dependency with the given name from the podfile.
|
245
|
+
#
|
246
|
+
# @param [String] pod
|
247
|
+
# the name of the Pod.
|
248
|
+
#
|
249
|
+
def podfile_dependency(pod)
|
250
|
+
podfile.dependencies.find { |d| d.name == pod }
|
251
|
+
end
|
252
|
+
|
226
253
|
#--------------------------------------#
|
227
254
|
|
228
255
|
def folder_exist?(pod)
|
@@ -35,7 +35,7 @@ module Pod
|
|
35
35
|
#
|
36
36
|
attr_reader :podfile
|
37
37
|
|
38
|
-
# @return [Lockfile] The Lockfile, if available, that stores the information about the Pods previously installed.
|
38
|
+
# @return [Lockfile, nil] The Lockfile, if available, that stores the information about the Pods previously installed.
|
39
39
|
#
|
40
40
|
attr_reader :lockfile
|
41
41
|
|
@@ -67,7 +67,7 @@ module Pod
|
|
67
67
|
#
|
68
68
|
# @param [Sandbox] sandbox @see #sandbox
|
69
69
|
# @param [Podfile] podfile @see #podfile
|
70
|
-
# @param [Lockfile] lockfile @see #lockfile
|
70
|
+
# @param [Lockfile, nil] lockfile @see #lockfile
|
71
71
|
# @param [Array<Source>] plugin_sources @see #plugin_sources
|
72
72
|
# @param [Boolean] has_dependencies @see #has_dependencies
|
73
73
|
# @param [Hash, Boolean, nil] pods_to_update @see #pods_to_update
|
@@ -565,12 +565,11 @@ module Pod
|
|
565
565
|
pod_targets_by_build_config = Hash.new([].freeze)
|
566
566
|
build_configurations.each { |config| pod_targets_by_build_config[config] = [] }
|
567
567
|
|
568
|
+
dependencies_by_root_name = @podfile_dependency_cache.target_definition_dependencies(target_definition).group_by(&:root_name)
|
569
|
+
|
568
570
|
pod_targets_by_target_definition[target_definition].each do |pod_target|
|
569
571
|
pod_name = pod_target.pod_name
|
570
|
-
|
571
|
-
dependencies = @podfile_dependency_cache.target_definition_dependencies(target_definition).select do |dependency|
|
572
|
-
Specification.root_name(dependency.name) == pod_name
|
573
|
-
end
|
572
|
+
dependencies = dependencies_by_root_name[pod_name] || []
|
574
573
|
|
575
574
|
build_configurations.each do |configuration_name|
|
576
575
|
whitelists = dependencies.map do |dependency|
|
@@ -714,7 +713,12 @@ module Pod
|
|
714
713
|
dependencies.map do |root_spec, deps|
|
715
714
|
pod_targets_by_name[root_spec.name].find do |t|
|
716
715
|
next false if t.platform.symbolic_name != target.platform.symbolic_name ||
|
717
|
-
|
716
|
+
# In the case of variants we must ensure that the platform this target is meant for is the same
|
717
|
+
# as the one we are interested in.
|
718
|
+
t.target_definitions.first.platform != target.target_definitions.first.platform ||
|
719
|
+
# rather than target type or requires_frameworks? since we want to group by what was specified in that
|
720
|
+
# _target definition_.
|
721
|
+
t.build_as_framework? != target.build_as_framework?
|
718
722
|
spec_names = t.specs.map(&:name)
|
719
723
|
deps.all? { |dep| spec_names.include?(dep.name) }
|
720
724
|
end
|
@@ -1115,7 +1119,7 @@ module Pod
|
|
1115
1119
|
def generate_sandbox_state(specifications)
|
1116
1120
|
sandbox_state = nil
|
1117
1121
|
UI.section 'Comparing resolved specification to the sandbox manifest' do
|
1118
|
-
sandbox_analyzer = SandboxAnalyzer.new(sandbox, specifications, update_mode?)
|
1122
|
+
sandbox_analyzer = SandboxAnalyzer.new(sandbox, podfile, specifications, update_mode?)
|
1119
1123
|
sandbox_state = sandbox_analyzer.analyze
|
1120
1124
|
sandbox_state.print
|
1121
1125
|
end
|