carthage_cache_res 0.9.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,22 @@
1
+ require "carthage_cache_res/version"
2
+ require "carthage_cache_res/description"
3
+ require "carthage_cache_res/archive_builder"
4
+ require "carthage_cache_res/archive_installer"
5
+ require "carthage_cache_res/archiver"
6
+ require "carthage_cache_res/carthage_resolved_file"
7
+ require "carthage_cache_res/project"
8
+ require "carthage_cache_res/repository"
9
+ require "carthage_cache_res/terminal"
10
+ require "carthage_cache_res/configuration_validator"
11
+ require "carthage_cache_res/configuration"
12
+ require "carthage_cache_res/configurator"
13
+ require "carthage_cache_res/configurator_wizard"
14
+ require "carthage_cache_res/shell_command_executor"
15
+ require "carthage_cache_res/application"
16
+ require "carthage_cache_res/swift_version_resolver"
17
+ require "carthage_cache_res/build_collector"
18
+ require "carthage_cache_res/carthage_cache_res_lock"
19
+
20
+ module CarthageCacheRes
21
+
22
+ end
@@ -0,0 +1,93 @@
1
+ require 'yaml'
2
+
3
+ module CarthageCacheRes
4
+
5
+ class Application
6
+
7
+ CACHE_DIR_NAME = "carthage_cache_res"
8
+
9
+ attr_reader :terminal
10
+ attr_reader :archiver
11
+ attr_reader :repository
12
+ attr_reader :project
13
+ attr_reader :config
14
+
15
+ def initialize(project_path, verbose, config, repository: AWSRepository, terminal: Terminal, swift_version_resolver: SwiftVersionResolver)
16
+ @terminal = terminal.new(verbose)
17
+ @archiver = Archiver.new
18
+ @config = Configurator.new(@terminal, project_path, config).config
19
+ clazz = @config.read_only? ? HTTPRepository : repository
20
+ @repository = clazz.new(@config.bucket_name, @config.hash_object[:aws_s3_client_options])
21
+ @project = Project.new(project_path, CACHE_DIR_NAME, @config.archive_base_path, @terminal, @config.tmpdir, swift_version_resolver.new)
22
+ end
23
+
24
+ def archive_exist?
25
+ repository.archive_exist?(project.archive_path)
26
+ end
27
+
28
+ def install_archive
29
+ if archive_exist?
30
+ archive_installer.install
31
+ true
32
+ else
33
+ terminal.puts "There is no cached archive for the current Cartfile.resolved file."
34
+ false
35
+ end
36
+ end
37
+
38
+ def create_archive(force = false, prune = nil, prune_white_list = nil, platforms = nil)
39
+ prune ||= config.prune_on_publish
40
+ platforms ||= config.platforms
41
+ prune_white_list ||= config.prune_white_list
42
+
43
+ if force || !archive_exist?
44
+ carthage_cache_res_lock.write_lock_digest(project.archive_key)
45
+ prune_build_directory(prune_white_list) if prune
46
+ archive_builder.build(platforms)
47
+ end
48
+ end
49
+
50
+ def prune_build_directory(white_list)
51
+ white_list ||= config.prune_white_list
52
+
53
+ if white_list && File.exist?(white_list)
54
+ terminal.vputs "Pruning build directory with white list '#{white_list}' ..."
55
+ white_list = YAML.load(File.read(white_list))
56
+ else
57
+ white_list = {}
58
+ terminal.vputs "Pruning build directory ..."
59
+ end
60
+ build_collector.delete_unused_frameworks(white_list)
61
+ end
62
+
63
+ def validate_installation
64
+ if carthage_cache_res_lock.valid_digest?(project.archive_key)
65
+ terminal.puts "Your installation is valid."
66
+ true
67
+ else
68
+ terminal.puts "Your current Carthage digest '#{project.archive_key}' does not match digest '#{carthage_cache_res_lock.lock_digest}' in '#{carthage_cache_res_lock.lock_file_path}'"
69
+ false
70
+ end
71
+ end
72
+
73
+ private
74
+
75
+ def archive_installer
76
+ @archive_installer ||= ArchiveInstaller.new(terminal, repository, archiver, project)
77
+ end
78
+
79
+ def archive_builder
80
+ @archive_builder ||= ArchiveBuilder.new(terminal, repository, archiver, project)
81
+ end
82
+
83
+ def build_collector
84
+ @build_collector ||= BuildCollector.new(terminal, project.carthage_build_directory, project.all_frameworks)
85
+ end
86
+
87
+ def carthage_cache_res_lock
88
+ @carthage_cache_res_lock ||= CarthageCacheResLock.new(project.carthage_build_directory)
89
+ end
90
+
91
+ end
92
+
93
+ end
@@ -0,0 +1,57 @@
1
+ module CarthageCacheRes
2
+
3
+ class ArchiveBuilder
4
+
5
+ attr_reader :terminal
6
+ attr_reader :repository
7
+ attr_reader :archiver
8
+ attr_reader :project
9
+
10
+ def initialize(terminal, repository, archiver, project)
11
+ @terminal = terminal
12
+ @repository = repository
13
+ @archiver = archiver
14
+ @project = project
15
+ end
16
+
17
+ def build(platforms = nil)
18
+ archive_path = archive(platforms)
19
+ upload_archive(archive_path)
20
+ # TODO check if some old archives can be deleted
21
+ # I would store the last N archives and then delete
22
+ # the rest
23
+ end
24
+
25
+ private
26
+
27
+ def archive(platforms = nil)
28
+ archive_path = File.join(project.tmpdir, project.archive_filename)
29
+ if platforms
30
+ terminal.puts "Archiving Carthage build directory for #{platforms.join(',')} platforms."
31
+ else
32
+ terminal.puts "Archiving Carthage build directory for all platforms."
33
+ end
34
+
35
+ filter_block = nil
36
+ if platforms
37
+ filter_block = ->(file) do
38
+ lock_file?(file) || platforms.map(&:downcase).include?(file.downcase)
39
+ end
40
+ end
41
+
42
+ archiver.archive(project.carthage_build_directory, archive_path, &filter_block)
43
+ archive_path
44
+ end
45
+
46
+ def upload_archive(archive_path)
47
+ terminal.puts "Uploading archive with key '#{project.archive_key}'."
48
+ repository.upload(project.archive_path, archive_path)
49
+ end
50
+
51
+ def lock_file?(file)
52
+ file == CarthageCacheResLock::LOCK_FILE_NAME
53
+ end
54
+
55
+ end
56
+
57
+ end
@@ -0,0 +1,53 @@
1
+ module CarthageCacheRes
2
+
3
+ class ArchiveInstaller
4
+
5
+ attr_reader :terminal
6
+ attr_reader :repository
7
+ attr_reader :archiver
8
+ attr_reader :project
9
+
10
+ def initialize(terminal, repository, archiver, project)
11
+ @terminal = terminal
12
+ @repository = repository
13
+ @archiver = archiver
14
+ @project = project
15
+ end
16
+
17
+ def install
18
+ archive_path = download_archive
19
+ unarchive(archive_path)
20
+ end
21
+
22
+ private
23
+
24
+ def create_carthage_build_directory
25
+ unless File.exist?(project.carthage_build_directory)
26
+ terminal.vputs "Creating Carthage build directory '#{project.carthage_build_directory}'."
27
+ FileUtils.mkdir_p(project.carthage_build_directory)
28
+ end
29
+ project.carthage_build_directory
30
+ end
31
+
32
+ def download_archive
33
+ archive_path = File.join(project.tmpdir, project.archive_filename)
34
+
35
+ if File.exist?(archive_path)
36
+ terminal.puts "Archive with key '#{archive_path}' already downloaded in local cache."
37
+ else
38
+ terminal.puts "Downloading archive with key '#{archive_path}'."
39
+ repository.download(project.archive_path, archive_path)
40
+ end
41
+
42
+ archive_path
43
+ end
44
+
45
+ def unarchive(archive_path)
46
+ build_directory = create_carthage_build_directory
47
+ terminal.puts "Unarchiving '#{archive_path}' into '#{build_directory}'."
48
+ archiver.unarchive(archive_path, build_directory)
49
+ end
50
+
51
+ end
52
+
53
+ end
@@ -0,0 +1,24 @@
1
+ module CarthageCacheRes
2
+
3
+ class Archiver
4
+
5
+ attr_reader :executor
6
+
7
+ def initialize(executor = ShellCommandExecutor.new)
8
+ @executor = executor
9
+ end
10
+
11
+ def archive(archive_path, destination_path, &filter_block)
12
+ files = Dir.entries(archive_path).select { |x| !x.start_with?(".") }
13
+ files = files.select(&filter_block) if filter_block
14
+ files = files.sort_by(&:downcase)
15
+ executor.execute("cd #{archive_path} && zip -r -X #{File.expand_path(destination_path)} #{files.join(' ')} > /dev/null")
16
+ end
17
+
18
+ def unarchive(archive_path, destination_path)
19
+ executor.execute("unzip -o #{archive_path} -d #{destination_path} > /dev/null")
20
+ end
21
+
22
+ end
23
+
24
+ end
@@ -0,0 +1,86 @@
1
+ require 'fileutils'
2
+
3
+ module CarthageCacheRes
4
+
5
+ class BuildCollector
6
+
7
+ attr_reader :terminal
8
+ attr_reader :build_directory
9
+ attr_reader :required_frameworks
10
+ attr_reader :command_executor
11
+
12
+ def initialize(terminal, build_directory, required_frameworks, command_executor = ShellCommandExecutor.new)
13
+ @terminal = terminal
14
+ @build_directory = build_directory
15
+ @required_frameworks = Set.new(required_frameworks)
16
+ @command_executor = command_executor
17
+ end
18
+
19
+ def delete_unused_frameworks(white_list = {})
20
+ terminal.vputs "Deleting unused frameworks from '#{build_directory}' ..."
21
+ list_built_frameworks.each do |framework_path|
22
+ if delete_framework?(framework_path, white_list)
23
+ delete_framework_files(framework_path)
24
+ end
25
+ end
26
+ end
27
+
28
+ private
29
+
30
+ def delete_framework?(framework_path, white_list)
31
+ framework = framework_name(framework_path)
32
+ if required_frameworks.include?(white_list[framework])
33
+ false
34
+ else
35
+ ! required_frameworks.include?(framework)
36
+ end
37
+ end
38
+
39
+ def list_built_frameworks
40
+ Dir[File.join(build_directory, "/**/*.framework")]
41
+ end
42
+
43
+ def framework_name(framework_path)
44
+ Pathname.new(framework_path).basename(".framework").to_s
45
+ end
46
+
47
+ def delete_framework_files(framework_path)
48
+ framework_dsym_path = "#{framework_path}.dSYM"
49
+ terminal.vputs "Deleting #{framework_name(framework_path)} files because they are no longer needed ..."
50
+
51
+ # Deletes .framework file
52
+ terminal.vputs "Deleting '#{framework_path}' ..."
53
+ FileUtils.rm_r(framework_path) if File.exist?(framework_path)
54
+
55
+ # Deletes .bcsymbolmap files (needs .dSYM file)
56
+ if File.exist?(framework_dsym_path)
57
+ symbol_map_files(framework_dsym_path).each do |symbol_table_file|
58
+ terminal.vputs "Deleting '#{symbol_table_file}' ..."
59
+ FileUtils.rm(symbol_table_file) if File.exist?(symbol_table_file)
60
+ end
61
+ end
62
+
63
+ # Deletes .dSYM files
64
+ # .dSYM file MUST be deleted after .bcsymbolmap files because
65
+ # in order to match .bcsymbolmap files with framework file
66
+ # we need to use .dSYM file with dwarfdump command.
67
+ terminal.vputs "Deleting '#{framework_dsym_path}' ..."
68
+ FileUtils.rm_r(framework_dsym_path) if File.exist?(framework_dsym_path)
69
+
70
+ terminal.vputs ""
71
+ end
72
+
73
+ def symbol_map_files(framework_dsym_path)
74
+ uuid_dwarfdump(framework_dsym_path)
75
+ .split("\n")
76
+ .map { |line| line.match(/UUID: (.*) \(/)[1] }
77
+ .map { |uuid| File.expand_path(File.join(framework_dsym_path, "../#{uuid}.bcsymbolmap")) }
78
+ end
79
+
80
+ def uuid_dwarfdump(framework_dsym_path)
81
+ command_executor.execute("/usr/bin/xcrun dwarfdump --uuid #{framework_dsym_path}")
82
+ end
83
+
84
+ end
85
+
86
+ end
@@ -0,0 +1,28 @@
1
+
2
+ module CarthageCacheRes
3
+
4
+ class CarthageCacheResLock
5
+
6
+ LOCK_FILE_NAME = "CarthageCacheRes.lock"
7
+
8
+ attr_reader :lock_file_path
9
+
10
+ def initialize(build_directory)
11
+ @lock_file_path = File.join(build_directory, LOCK_FILE_NAME)
12
+ end
13
+
14
+ def lock_digest
15
+ File.read(lock_file_path).strip if File.exist?(lock_file_path)
16
+ end
17
+
18
+ def write_lock_digest(digest)
19
+ File.open(lock_file_path, "w") { |f| f.write(digest) }
20
+ end
21
+
22
+ def valid_digest?(digest)
23
+ lock_digest == digest
24
+ end
25
+
26
+ end
27
+
28
+ end
@@ -0,0 +1,49 @@
1
+ require "digest"
2
+
3
+ module CarthageCacheRes
4
+
5
+ class CartfileResolvedFile
6
+
7
+ attr_reader :file_path
8
+ attr_reader :terminal
9
+ attr_reader :swift_version_resolver
10
+
11
+ def initialize(file_path, terminal, swift_version_resolver = SwiftVersionResolver.new)
12
+ @file_path = file_path
13
+ @swift_version_resolver = swift_version_resolver
14
+ @terminal = terminal
15
+ end
16
+
17
+ def digest
18
+ @digest ||= generate_digest
19
+ end
20
+
21
+ def content
22
+ @content ||= File.read(file_path)
23
+ end
24
+
25
+ def swift_version
26
+ @swift_version ||= swift_version_resolver.swift_version
27
+ end
28
+
29
+ def frameworks
30
+ @frameworks ||= content.each_line.map { |line| extract_framework_name(line) }
31
+ end
32
+
33
+ private
34
+
35
+ def generate_digest
36
+ terminal.vputs "Generating carthage_cache_res archive digest using swift version '#{swift_version}' and " \
37
+ "the content of '#{file_path}'"
38
+ generated_digest = Digest::SHA256.hexdigest(content + "#{swift_version}")
39
+ terminal.vputs "Generated digest: #{generated_digest}"
40
+ generated_digest
41
+ end
42
+
43
+ def extract_framework_name(cartfile_line)
44
+ cartfile_line.split(" ")[1].split("/").last.gsub('"', "")
45
+ end
46
+
47
+ end
48
+
49
+ end
@@ -0,0 +1,122 @@
1
+ require "yaml"
2
+
3
+ module CarthageCacheRes
4
+
5
+ class Configuration
6
+
7
+ def self.supported_keys
8
+ @supported_keys ||= []
9
+ end
10
+
11
+ def self.config_key(name)
12
+ supported_keys << name
13
+ end
14
+
15
+ def self.valid?(config)
16
+ ConfigurationValidator.new(config).valid?
17
+ end
18
+
19
+ def self.read_only?(config)
20
+ ConfigurationValidator.new(config).read_only?
21
+ end
22
+
23
+ def self.parse(str)
24
+ new(YAML.load(str))
25
+ end
26
+
27
+ def self.default
28
+ @default ||= Configuration.new({
29
+ prune_on_publish: false,
30
+ platforms: nil,
31
+ prune_white_list: nil,
32
+ aws_s3_client_options: {
33
+ region: ENV['AWS_REGION'],
34
+ access_key_id: ENV['AWS_ACCESS_KEY_ID'],
35
+ secret_access_key: ENV['AWS_SECRET_ACCESS_KEY'],
36
+ profile: ENV['AWS_PROFILE'],
37
+ session_token: ENV['AWS_SESSION_TOKEN']
38
+
39
+ },
40
+ tmpdir: File.join(Dir.home, 'Library', 'Caches'),
41
+ archive_base_path: nil
42
+ })
43
+ end
44
+
45
+ config_key :bucket_name
46
+ config_key :prune_on_publish
47
+ config_key :prune_white_list
48
+ config_key :platforms
49
+ config_key :aws_region
50
+ config_key :aws_access_key_id
51
+ config_key :aws_secret_access_key
52
+ config_key :aws_profile
53
+ config_key :tmpdir
54
+ config_key :aws_session_token
55
+ config_key :archive_base_path
56
+
57
+ attr_reader :hash_object
58
+
59
+ def initialize(hash_object = {})
60
+ @hash_object = hash_object
61
+ end
62
+
63
+ def to_yaml
64
+ hash_object.to_yaml
65
+ end
66
+
67
+ def valid?
68
+ self.class.valid?(self)
69
+ end
70
+
71
+ def read_only?
72
+ self.class.read_only?(self)
73
+ end
74
+
75
+ def merge(c)
76
+ other_hash = nil
77
+ if c.is_a?(Hash)
78
+ other_hash = c
79
+ else
80
+ other_hash = c.hash_object
81
+ end
82
+
83
+ @hash_object = hash_object.merge(other_hash) do |key, oldval, newval|
84
+ oldval.is_a?(Hash) ? oldval.merge(newval) : newval
85
+ end
86
+ self
87
+ end
88
+
89
+ def method_missing(method_sym, *arguments, &block)
90
+ method_name = method_sym.to_s
91
+ key = method_name.chomp("=")
92
+ return super if !self.class.supported_keys.include?(key.to_sym)
93
+ config, key = extract_config_and_key(key)
94
+
95
+ if method_name.end_with?("=")
96
+ config[key] = arguments.first
97
+ else
98
+ config[key]
99
+ end
100
+ end
101
+
102
+ def respond_to?(method_sym, include_private = false)
103
+ if self.class.supported_keys.include?(method_sym)
104
+ true
105
+ else
106
+ super
107
+ end
108
+ end
109
+
110
+ private
111
+
112
+ def extract_config_and_key(method_name)
113
+ if method_name =~ /^aws_(.*)$/
114
+ [hash_object[:aws_s3_client_options] ||= {}, $1.to_sym]
115
+ else
116
+ [hash_object, method_name.to_sym]
117
+ end
118
+ end
119
+
120
+ end
121
+
122
+ end