gitchefsync 0.6.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,88 @@
1
+ require 'gitchefsync/opts'
2
+ require 'logger'
3
+ #Not available till ruby 2.0
4
+ #require 'syslog/logger'
5
+ require 'syslog'
6
+ require 'gitchefsync/log'
7
+
8
+ #Central point of configuration
9
+ module Gitchefsync
10
+ module Configuration
11
+
12
+ REL_BRANCH = 'master'
13
+
14
+
15
+ def initialize(opts)
16
+ @git_bin = 'git'
17
+
18
+ end
19
+
20
+ def configure(options)
21
+
22
+ @options = options
23
+ config = options[:config]
24
+ @git_bin = config['git']
25
+ @berks = config['berks']
26
+ @knife = config['knife']
27
+ @git_local = options[:git_local]
28
+ @token = options[:private_token]
29
+ @stage_dir = config['stage_dir']
30
+ @rel_branch = config['release_branch']
31
+ @rel_branch ||= 'master'
32
+ @stage_cb_dir = options[:stage_cookbook_dir]
33
+ @stage_cb_dir ||= '/tmp/cookbooks'
34
+ @berks_upload = false
35
+ @audit_keep_trim = config['audit_keep_trim']
36
+ @audit_keep_trim ||= 20
37
+
38
+ #backward compatibility for "sync_local" attribute
39
+ if config['sync_local'].is_a? String
40
+ if config['sync_local'] == "true"
41
+ config['sync_local'] = true
42
+ config[:sync_local] = true
43
+ else
44
+ config['sync_local'] = false
45
+ config[:sync_local] = false
46
+ end
47
+ end
48
+ options[:syslog] ?
49
+ @log = Gitchefsync::Log::SysLogger.new('gitchefsync') :
50
+ @log = Logger.new(STDOUT)
51
+ #json based configuration
52
+ @config = config
53
+
54
+ end
55
+
56
+ def parseAndConfigure(args)
57
+ include Parser
58
+
59
+ $args = args.clone
60
+ $opts = Parser.parseOpts args
61
+ configure $opts
62
+
63
+ #instantiate audit
64
+ #@audit = Audit.new(@config['stage_dir'] )
65
+
66
+ return $opts
67
+ end
68
+
69
+ def logger
70
+ @log
71
+ end
72
+
73
+ def self.log
74
+ Gitchefsync.logger()
75
+
76
+ end
77
+
78
+ def configuration
79
+ @config
80
+ end
81
+
82
+ def options
83
+ @options
84
+ end
85
+
86
+
87
+ end
88
+ end
@@ -0,0 +1,339 @@
1
+ require 'gitchefsync/git_util'
2
+ require 'gitchefsync/errors'
3
+ require 'gitchefsync/io_util'
4
+ require 'gitchefsync/audit'
5
+ require 'gitchefsync/config'
6
+ require 'gitchefsync/common'
7
+
8
+ module Gitchefsync
9
+
10
+ def self.included base
11
+ base.extend ClassMethods
12
+ end
13
+
14
+ class EnvRepo
15
+
16
+ attr_reader :git_delta
17
+
18
+ def initialize(https_url_to_repo)
19
+ options = Gitchefsync.options
20
+ config = Gitchefsync.configuration
21
+
22
+ Gitlab.private_token = options[:private_token]
23
+ @https_url_to_repo = https_url_to_repo
24
+ @git_group, @git_project = https_url_to_repo.split(File::SEPARATOR).last(2)
25
+ @stage_filepath = config['stage_dir']
26
+ @stage_target_path = File.join(@stage_filepath, [@git_group, @git_project.chomp(".git")].join('_'))
27
+ @git_default_branch = config['release_branch']
28
+ @git_delta = true
29
+ @git_bin = config['git']
30
+ end
31
+
32
+ def sync_repo
33
+ @git_delta = true
34
+ if Git.gitInit(@stage_target_path)
35
+ @git_delta = Gitchefsync.gitDelta(@stage_target_path, @git_default_branch)
36
+ msg = "cd #{@stage_target_path} && #{@git_bin} pull origin #{@git_default_branch}"
37
+ git_pull = lambda { |msg| Git.cmd msg }
38
+ if @git_delta
39
+ git_pull.call(msg)
40
+ Gitchefsync.logger.info "event_id=git_pull_repo_due_to_new_delta:repo=#{@stage_target_path}:git_delta=#{@git_delta}"
41
+ else
42
+ Gitchefsync.logger.info "event_id=skip_git_pull_repo_since_zero_delta:repo=#{@stage_target_path}:git_delta=#{@git_delta}"
43
+ end
44
+ else
45
+ stage_basename = @stage_target_path.split(File::SEPARATOR).last()
46
+ git_clone = Git.cmd "cd #{@stage_filepath} && #{@git_bin} clone #{@https_url_to_repo} #{stage_basename}"
47
+ check_default_branch = Git.cmd "cd #{@stage_target_path} && #{@git_bin} ls-remote origin #{@git_default_branch}"
48
+ Gitchefsync.logger.info "event_id=git_clone_repo_first_time:repo=#{@stage_target_path}:git_default_branch=#{@git_default_branch}"
49
+
50
+ #remove EnvRepo project in staging directory if default_branch does not exit
51
+ if check_default_branch.empty?
52
+ Gitchefsync.logger.fatal "event_id=rel_branch_does_not_exist=#{@git_default_branch}"
53
+ Gitchefsync.logger.fatal "event_id=removing_env_repo=#{@https_url_to_repo}, path: #{@stage_target_path}"
54
+ FS.cmd "rm -rf #{@stage_target_path}"
55
+ raise("#{@git_default_branch} does not exist in env_repo: #{@https_url_to_repo}")
56
+ end
57
+ end
58
+ end
59
+
60
+ def chef_path
61
+ return File.join(@stage_target_path, "chef-repo")
62
+ end
63
+
64
+ def validate_structure
65
+ if !File.directory?(self.chef_path)
66
+ Gitchefsync.logger.fatal "event_id=chef_repo_structure_problem"
67
+ raise("#{self.chef_path} is not a chef-repo path")
68
+ end
69
+ end
70
+ end
71
+
72
+ class EnvSync
73
+ def initialize(repo_list)
74
+ options = Gitchefsync.options
75
+ config = Gitchefsync.configuration
76
+
77
+ FS.knifeReady(config['stage_dir'],options[:knife_config])
78
+
79
+ repo_list.each do |repo|
80
+ repo.validate_structure
81
+ end
82
+
83
+ @knife = config['knife']
84
+ @stage_filepath = config['stage_dir']
85
+ @force_upload = config['force_upload']
86
+ @repo_list = repo_list
87
+ @audit = Audit.new(config['stage_dir'], 'env' )
88
+ @audit_keep_trim = config['audit_keep_trim']
89
+ @audit_keep_trim ||= 20
90
+ @env_file_list = Array.new()
91
+ @db_file_list = Array.new()
92
+ @role_file_list = Array.new()
93
+ end
94
+
95
+ def reject_json file
96
+ file_json = nil
97
+ begin
98
+ json = File.read file
99
+ file_json = JSON.parse json
100
+ rescue Exception => e
101
+ Gitchefsync.logger.error "event_id=env_parse_error:file=#{file}"
102
+ @audit.addEnv(file,'UPDATE', e )
103
+ end
104
+ file_json
105
+ end
106
+
107
+ def json_type file
108
+ return "env" unless FS.getBasePath(file, "environments").nil?
109
+ return "db" unless FS.getBasePath(file, "data_bags").nil?
110
+ return "role" unless FS.getBasePath(file, "roles").nil?
111
+ end
112
+
113
+ def validate_json(f, iden)
114
+ if f['basename'] != f['json'][iden]
115
+ raise ValidationError, "The file json's #{iden} attribute does not match basename: #{f['basename']}"
116
+ end
117
+ Gitchefsync.logger.debug "event_id=json_is_valid:iden=#{iden}:basename=#{f['basename']}"
118
+ end
119
+
120
+ def upload_env(f, git_delta)
121
+ Gitchefsync.logger.debug "event_id=upload_env:filepath=#{f['fullpath']}"
122
+ begin
123
+ validate_json(f, 'name')
124
+ @env_file_list << f['json']['name']
125
+ show_out = FS.cmdNoError "#{@knife} environment show #{f['json']['name']}"
126
+ if show_out.match("ERROR:") || git_delta || @force_upload
127
+ FS.cmd "#{@knife} environment from file #{f['fullpath']} --yes"
128
+ Gitchefsync.logger.info "event_id=environment_uploaded:file_json_name=#{f['json']['name']}:file=#{f['fullpath']}"
129
+ @audit.addEnv(f['fullpath'],'UPDATE' )
130
+ else
131
+ Gitchefsync.logger.debug "event_id=environment_not_uploaded:file_json_name=#{f['json']['name']}:file=#{f['fullpath']}"
132
+ @audit.addEnv(f['fullpath'],'EXISTING' )
133
+ end
134
+ rescue ValidationError => e
135
+ Gitchefsync.logger.error("event_id=validation_error:msg=#{e.message}")
136
+ @audit.addEnv(f['fullpath'],'UPDATE', e )
137
+ end
138
+ end
139
+
140
+ def data_bag_iden fullpath
141
+ chef_repo, data_bag = false, false
142
+ fullpath.split(File::SEPARATOR).each do |dir|
143
+ return dir if chef_repo && data_bag
144
+ chef_repo = dir.eql? "chef-repo" unless chef_repo
145
+ data_bag = dir.eql? "data_bags" if chef_repo
146
+ end
147
+ raise ValidationError, "event_id=invalid_path_to_data_bag_json:path=#{fullpath}"
148
+ end
149
+
150
+ def upload_db(f, git_delta)
151
+ Gitchefsync.logger.debug "event_id=upload_data_bag:filepath=#{f['fullpath']}"
152
+ db_iden = data_bag_iden(f['fullpath'])
153
+ begin
154
+ validate_json(f, 'id')
155
+ @db_file_list << [db_iden, f['json']['id']]
156
+ show_out = FS.cmdNoError "#{@knife} data bag show #{db_iden} #{f['json']['id']}"
157
+ if show_out.match("ERROR:") || git_delta || @force_upload
158
+ FS.cmd "#{@knife} data bag create #{db_iden}"
159
+ FS.cmd "#{@knife} data bag from file #{db_iden} #{f['fullpath']}"
160
+ Gitchefsync.logger.info "event_id=databag_uploaded:file_json_name=#{f['json']['id']}:file=#{f['fullpath']}"
161
+ @audit.addEnv(f['fullpath'],'UPDATE' )
162
+ else
163
+ Gitchefsync.logger.debug "event_id=data_bag_not_uploaded:file_json_name=#{f['json']['id']}:file=#{f['fullpath']}"
164
+ @audit.addEnv(f['fullpath'],'EXISTING')
165
+ end
166
+ rescue ValidationError => e
167
+ Gitchefsync.logger.error("event_id=validation_error:msg=#{e.message}")
168
+ @audit.addEnv(f['fullpath'],'UPDATE', e )
169
+ end
170
+ end
171
+
172
+ def upload_role(f, git_delta)
173
+ Gitchefsync.logger.debug "event_id=upload_role:fullpath=#{f['fullpath']}"
174
+ begin
175
+ validate_json(f, 'name')
176
+ @role_file_list << f['json']['name']
177
+ show_out = FS.cmdNoError "#{@knife} role show #{f['json']['name']}"
178
+ if show_out.match("ERROR:") || git_delta || @force_upload
179
+ FS.cmd "#{@knife} role from file #{f['fullpath']} --yes"
180
+ Gitchefsync.logger.info "event_id=role_uploaded:file_json_name=#{f['json']['name']}:file=#{f['fullpath']}"
181
+ @audit.addEnv(f['fullpath'],'UPDATE' )
182
+ else
183
+ Gitchefsync.logger.debug "event_id=role_not_uploaded:file_json_name=#{f['json']['name']}:file=#{f['fullpath']}"
184
+ @audit.addEnv(f['fullpath'],'EXISTING' )
185
+ end
186
+ rescue ValidationError => e
187
+ Gitchefsync.logger.error("event_id=validation_error:msg=#{e.message}")
188
+ @audit.addEnv(f['fullpath'],'UPDATE', e )
189
+ end
190
+ end
191
+
192
+ def cleanup_json_files
193
+ Gitchefsync.logger.info "cleanup_json_files"
194
+
195
+ #list env and compare on the server, deleting ones that aren't in git
196
+ knifeUtil = KnifeUtil.new(@knife, @stage_filepath)
197
+
198
+ delta_env_list = knifeUtil.listEnv() - @env_file_list
199
+ Gitchefsync.logger.info "event_id=env_diff:delta=#{delta_env_list}"
200
+ # MAND-672
201
+ delta_env_list.each do |env_name|
202
+ # TODO: Audit file may not be correct if someone manually
203
+ # uploaded an environment file with json 'name' variable different
204
+ # then actual environment filename.
205
+ a = AuditItem.new(env_name,'',nil)
206
+ a.setAction "DEL"
207
+ FS.cmd "#{@knife} environment delete #{env_name} --yes"
208
+ Gitchefsync.logger.info "event_id=environment_deleted:env_name=#{env_name}"
209
+ @audit.add(a)
210
+ end
211
+
212
+ delta_db_list = knifeUtil.listDB() - @db_file_list
213
+ Gitchefsync.logger.info "event_id=data_bag_item_diff:delta=#{delta_db_list}"
214
+ delta_db_list.each do |bag, item|
215
+ # TODO: Audit file may not be correct if someone manually
216
+ # uploaded an data bag with item json 'id' variable different
217
+ # then actual json filename.
218
+ a = AuditItem.new("BAG: #{bag} ITEM: #{item}",'',nil)
219
+ a.setAction "DEL"
220
+ FS.cmd "#{@knife} data bag delete #{bag} #{item} --yes"
221
+ Gitchefsync.logger.info "event_id=data_bag_item_deleted:bag=#{bag}:item=#{item}"
222
+ @audit.add(a)
223
+
224
+ items_remaining = knifeUtil.showDBItem(bag)
225
+ if items_remaining.empty?
226
+ a = AuditItem.new("BAG: #{bag}",'',nil)
227
+ a.setAction "DEL"
228
+ FS.cmd "#{@knife} data bag delete #{bag} --yes"
229
+ Gitchefsync.logger.info "event_id=data_bag_deleted:bag=#{bag}"
230
+ @audit.add(a)
231
+ end
232
+ end
233
+
234
+ delta_role_list = knifeUtil.listRole() - @role_file_list
235
+ Gitchefsync.logger.info "event_id=role_diff:delta=#{delta_role_list}"
236
+ delta_role_list.each do |role_name|
237
+ # TODO: Audit file may not be correct if someone manually
238
+ # uploaded an role file with json 'name' variable different
239
+ # then actual role filename.
240
+ a = AuditItem.new(role_name,'',nil)
241
+ a.setAction "DEL"
242
+ FS.cmd "#{@knife} role delete #{role_name} --yes"
243
+ Gitchefsync.logger.info "event_id=role_deleted:role_name=#{role_name}"
244
+ @audit.add(a)
245
+ end
246
+
247
+ #TODO: must create audit for removal
248
+
249
+ @audit.write
250
+ #trim the audit file
251
+ @audit.trim(@audit_keep_trim)
252
+ end
253
+
254
+ def update_json_files
255
+ Gitchefsync.logger.info "event_id=update_json_files"
256
+ @env_file_list.clear
257
+ @env_file_list << "_default"
258
+ @db_file_list.clear
259
+ @role_file_list.clear
260
+
261
+ @repo_list.each do |repo|
262
+ env_dir = repo.chef_path + "/**/*json"
263
+
264
+ Dir.glob(env_dir).each do |file|
265
+
266
+ file_attr = Hash.new()
267
+ file_attr['json'] = reject_json(file)
268
+ next if file_attr['json'].nil?
269
+ file_attr['type'] = json_type(file)
270
+ file_attr['filename'] = File.basename(file)
271
+ file_attr['basename'] = File.basename(file).chomp(".json")
272
+ file_attr['fullpath'] = file
273
+
274
+ if file_attr['type'].eql? "env"
275
+ upload_env(file_attr, repo.git_delta)
276
+ elsif file_attr['type'].eql? "db"
277
+ upload_db(file_attr, repo.git_delta)
278
+ elsif file_attr['type'].eql? "role"
279
+ upload_role(file_attr, repo.git_delta)
280
+ end
281
+ end
282
+ end
283
+
284
+ self.cleanup_json_files
285
+ end
286
+ end
287
+
288
+ # sync all environment, data_bags and roles json in repo(s)
289
+ def self.syncEnv
290
+ logger.info "event_id=env_sync_start"
291
+
292
+ #TODO: Auto discouver `chef-repo` type repositories known by chefbot
293
+ url = @config['git_env_repo']
294
+ url.gsub!("http://", "https://") if url.start_with? "http://"
295
+ envRepo = EnvRepo.new(https_url_to_repo=url)
296
+ if !@config['sync_local']
297
+ envRepo.sync_repo
298
+ else
299
+ logger.info "event_id=Skip_syncing_env_repos_from_git"
300
+ end
301
+
302
+ envSync = EnvSync.new(repo_list=[envRepo])
303
+ logger.info "event_id=start_to_update_json_files"
304
+ envSync.update_json_files
305
+ end
306
+
307
+ #Adding functionality to merge environment repos together
308
+ #by introspecting the "working directory" for chef-repo
309
+ #It's been assumed that all repositories have have pulled
310
+ def self.mergeEnvRepos
311
+ include FS,Git
312
+
313
+ global_env_path = @git_local + "/global_chef_env"
314
+
315
+ working_dir = Dir.entries(@git_local).reject! {|item| item.start_with?(".") || item.eql?("global_chef_env")}
316
+ working_dir.each do |dir|
317
+ path = File.join(@git_local, dir)
318
+ chef_repo_dir = path + "/chef-repo"
319
+ if Dir.exists?(chef_repo_dir)
320
+ logger.info("event_id=processing_child_env_repo:dir=#{dir}")
321
+ begin
322
+ #add this repository as a
323
+ Git.cmd "cd #{global_env_path} && #{@git_bin} remote add #{dir} file://#{File.join(path,dir)}"
324
+ rescue Exception => e
325
+ logger.info "event_id=git_remote_already_exists:#{e.message}"
326
+ end
327
+ begin
328
+ #Merge the content via pull
329
+ logger.info"event_id=env_merge:src=#{dir}"
330
+ output = Git.cmd "cd #{global_env_path} && #{@git_bin} pull #{dir} master"
331
+ logger.info "event_id=env_merge_sucess:msg=#{output}"
332
+ rescue Exception => e
333
+ logger.error "event_id=env:output=#{output}"
334
+ Git.cmd "cd #{global_env_path} && #{@git_bin} reset --hard origin/master"
335
+ end
336
+ end
337
+ end
338
+ end
339
+ end
@@ -0,0 +1,35 @@
1
+
2
+ #Define custom exception classes here
3
+ module Gitchefsync
4
+
5
+ # Custom error class for rescuing from all Gitlab errors.
6
+ class Error < StandardError; end
7
+
8
+ # command, system error
9
+ class CmdError < Error; end
10
+
11
+ #A Git error has occurred
12
+ #or "fatal/error condition"
13
+ class GitError < Error; end
14
+
15
+ #A knife cookbook is frozen error
16
+ class FrozenError < CmdError; end
17
+
18
+ class BerksError < CmdError; end
19
+
20
+ class NoBerksError < BerksError; end
21
+
22
+ class KnifeError < CmdError; end
23
+
24
+ class NoMetaDataError < KnifeError; end
25
+
26
+ class InvalidTar < Error; end
27
+
28
+ class AuditError < Error; end
29
+
30
+ class NoGitGroups < Error; end
31
+
32
+ class ValidationError < Error; end
33
+
34
+ class ConfigError < Error; end
35
+ end
@@ -0,0 +1,102 @@
1
+ require 'gitchefsync/config'
2
+ require 'gitlab'
3
+ #Git helper module
4
+ module Gitchefsync
5
+ module Git
6
+
7
+ def self.hasGit()
8
+ begin
9
+ git_ver = `git --version`
10
+ return git_ver.match('git version .*')
11
+ rescue
12
+ #TODO
13
+ raise NoGit, "Git must be installed"
14
+ end
15
+ end
16
+
17
+ #check that path exists and git is intializated
18
+ def self.gitInit (path)
19
+ return File.directory?(path + "/.git")
20
+ end
21
+
22
+ #a check to determine if a repository exists
23
+ #this is inherintly dangerous operation, as network issues could
24
+ #prevent the real issue
25
+ #will provide a 2 stage check, one a pull (if successful) return true
26
+ #second, if that failts
27
+ def self.remoteExists(path, remote)
28
+ Gitchefsync.logger.debug "event_id=checkRemoteExists:path=#{path}"
29
+ begin
30
+ self.cmd("cd #{path} && git ls-remote")
31
+ return true
32
+ rescue GitError => e
33
+ Gitchefsync.logger.warn "event_id=git_pull_err:msg=#{e.message}"
34
+ end
35
+ # we've passed the first
36
+ begin
37
+ #arbitrary gitlab command
38
+ Gitlab.users()
39
+ #successfully called Gitlab and not been able to pull remotely: give up
40
+ return false
41
+ rescue Exception => e
42
+ #in the face of not being able to contact Gitlab (for whatever reason) assume repository alive
43
+ return true
44
+ end
45
+ return true
46
+ end
47
+
48
+ #Return all git tags (which map to a SHA-1 hash) that only exist on monitoring branch (@rel_branch)
49
+ #Solves MAND-602 "ChefSync - Tagged Cookbooks on non-targeted git branches get synced"
50
+ def self.branchTags(path, branch)
51
+ self.cmd "cd #{path} && git clean -xdf"
52
+
53
+ self.cmd "cd #{path} && git checkout #{branch}"
54
+ git_graph = "git log --graph --oneline --branches=master --pretty='%H'"
55
+ branch_tags = "#{git_graph} | grep '^*' |" +
56
+ " tr -d '|' | awk '{ print $2 }' |" +
57
+ " xargs -n1 git describe --tags --exact-match 2>/dev/null"
58
+
59
+ status = `cd #{path} && git status`.split(/\n/)
60
+ graph = `cd #{path} && #{git_graph}`.split(/\n/)
61
+ Gitchefsync.logger.debug "event_id=branchTags: path=#{path}, status=#{status}"
62
+ Gitchefsync.logger.debug "event_id=branchTags: path=#{path}, graph=#{graph}"
63
+
64
+ tags = self.cmd("cd #{path} && #{branch_tags}").split(/\n/)
65
+ Gitchefsync.logger.info "event_id=branchTags: path=#{path}, tags=#{tags}"
66
+ tags
67
+ end
68
+
69
+ #executes a command line process
70
+ #raises and exception on stderr
71
+ #returns the sys output
72
+ def self.cmd(x)
73
+ ret = nil
74
+ err = nil
75
+ Open3.popen3(x) do |stdin, stdout, stderr, wait_thr|
76
+ ret = stdout.read
77
+ err = stderr.read
78
+ end
79
+ ret << err
80
+
81
+ if ret.start_with?"error:"
82
+ raise GitError, "stderr=#{ret}:cmd=#{x}"
83
+ end
84
+ if ret.start_with?"fatal:"
85
+ raise GitError, "stderr=#{ret}:cmd=#{x}"
86
+ end
87
+ ret
88
+ end
89
+
90
+ def self.cmdNoError(x)
91
+ ret = nil
92
+ err = nil
93
+ Open3.popen3(x) do |stdin, stdout, stderr, wait_thr|
94
+ ret = stdout.read
95
+ err = stderr.read
96
+ end
97
+
98
+ ret << err
99
+ ret
100
+ end
101
+ end
102
+ end
@@ -0,0 +1,83 @@
1
+ require 'open3'
2
+ require 'gitchefsync/errors'
3
+ # raise an excetpion here
4
+
5
+ module Gitchefsync
6
+ module FS
7
+
8
+ #copy the knife file over
9
+ #TODO: do this the ruby way
10
+ def self.knifeReady (working_dir,knife_file)
11
+ chef_dir = working_dir + "/" + ".chef"
12
+ if !File.exists?(chef_dir)
13
+ self.cmd "mkdir -p #{chef_dir}"
14
+ end
15
+ if !File.exists?(knife_file)
16
+ raise(KnifeError, "knife file must be defined")
17
+ end
18
+
19
+ self.cmd "cp -f #{knife_file} #{chef_dir}/knife.rb"
20
+ #check for knife readiness
21
+ self.cmd "cd #{working_dir} && knife client list"
22
+ end
23
+
24
+ #executes a command line process
25
+ #raises and exception on stderr
26
+ #returns the sys output
27
+ def self.cmd(x, env={})
28
+ ret = nil
29
+ err = nil
30
+ Open3.popen3(env, x) do |stdin, stdout, stderr, wait_thr|
31
+ ret = stdout.read
32
+ err = stderr.read
33
+ end
34
+ if err.to_s != ''
35
+ raise CmdError, "stdout=#{err}:cmd=#{x}"
36
+ end
37
+ ret
38
+ end
39
+
40
+ #there is a host of errors associated with berks
41
+ #"DEPRECATED: Your Berksfile contains a site location ..."
42
+ #this method is to allow filtering on the message to determine
43
+ #what is a real error versus what is just a warning - at this time
44
+ #will just have no checking
45
+ def self.cmdBerks(x)
46
+ self.cmdNoError(x)
47
+ end
48
+
49
+ def self.cmdNoError(x)
50
+ ret = nil
51
+ err = nil
52
+ Open3.popen3(x) do |stdin, stdout, stderr, wait_thr|
53
+ ret = stdout.read
54
+ err = stderr.read
55
+ end
56
+
57
+ ret << err
58
+ ret
59
+ end
60
+
61
+ def self.flatten(path, find)
62
+ arr_path = Array.new
63
+ arr_path.unshift(File.basename(path, ".*"))
64
+ fp = path
65
+ while true do
66
+ fp = File.expand_path("..", fp)
67
+ return nil if fp == "/"
68
+ break if File.basename(fp) == find
69
+ arr_path.unshift(File.basename(fp))
70
+ end
71
+ arr_path.join("_")
72
+ end
73
+
74
+ def self.getBasePath(path, find)
75
+ fp = path
76
+ while true do
77
+ fp = File.expand_path("..", fp)
78
+ return nil if fp == "/"
79
+ return fp if File.basename(fp) == find
80
+ end
81
+ end
82
+ end
83
+ end