gitchefsync 0.6.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,516 @@
1
+ require 'optparse'
2
+ require 'gitlab'
3
+ require 'json'
4
+ require 'open3'
5
+ require 'gitchefsync/version'
6
+ require 'gitchefsync/git_util'
7
+ require 'gitchefsync/errors'
8
+ require 'gitchefsync/opts'
9
+ require 'gitchefsync/io_util'
10
+ require 'gitchefsync/env_sync'
11
+ require 'gitchefsync/audit'
12
+ require 'gitchefsync/knife_util'
13
+ require 'gitchefsync/config'
14
+ require 'gitchefsync/common'
15
+ require 'gitchefsync/notify'
16
+ require 'gitchefsync/schedule'
17
+
18
+ module Gitchefsync
19
+ # include Gitchefsync::Configuration
20
+
21
+ #A summary of actions and cli options
22
+ def self.help
23
+ puts "Usage: gitchefsync [operation] -c config_file.json -t gitlab_token [--login=gitlabuser --password=gitlabpassword --syslog]"
24
+ puts "\tgitchefsync runMasterSync -c config_file.json -t gitlab_token"
25
+ puts "\tgitchefsync runSousSync -c config_file.json"
26
+ puts "\tgitchefsync syncCookbooks -c config_file.json -t gitlab_token"
27
+ puts "\tgitchefsync syncCookbooksLocal -c config_file.json"
28
+ puts "\tgitchefsync syncEnv -c config_file.json -t gitlab_token"
29
+ puts "\tgitchefsync stagedUpload -c config_file.json"
30
+ puts "\tgitchefsync reconcile -c config_file.json -t gitlab_token"
31
+ puts "\tgitchefsync gitCleanup -c config_file.json -t gitlab_token"
32
+ puts "\tgitchefsync trimAudit -c config_file.json"
33
+ end
34
+
35
+ #trims the environment and cookbook audits, keeping @audit_keep_trim
36
+ #number of files
37
+ def self.trimAudit
38
+ logger.debug("event_id=trim_audit_files:keep=#{@audit_keep_trim}")
39
+ audit = Audit.new(@config['stage_dir'], 'env' )
40
+ audit.trim(@audit_keep_trim)
41
+
42
+ audit = Audit.new(@config['stage_dir'], 'cb' )
43
+ audit.trim(@audit_keep_trim)
44
+
45
+ end
46
+
47
+ def self.notifyFromAudit
48
+
49
+ notification = Notification.new(@config['smtp_server'])
50
+
51
+ notification.notifyFromAudit(@config['stage_dir'], 'cb' )
52
+ notification.singleNotifyFromAudit(@config['stage_dir'],'env',@config['default_notify_email'])
53
+
54
+ notification.close
55
+ end
56
+
57
+
58
+ #performs a git synchronization of cookbooks, pulling
59
+ #in information from configured group of gitlab groups
60
+ #from each of the configured groups in sync-config.json
61
+ #pull all the projects associated with each group
62
+ #installed in working_directory, as specified in sync-config.json
63
+ #
64
+ #Each repository will only pull from configured release branch
65
+ #And fetch tags associated with that branch
66
+ #The last process is to invoke syncCookbooksLocal
67
+ def self.syncCookbooks
68
+ include FS
69
+
70
+
71
+
72
+ FS.knifeReady(@options[:git_local], @options[:knife_config])
73
+
74
+ if !@config['sync_local']
75
+
76
+ self.pullCookbooks()
77
+ else
78
+ logger.warn "event_id=Skip_cookbook_git_sync:path=#{@git_local}"
79
+ end
80
+ #git is synchronized (other than deletion - see gitCleanup if you want to clean up)
81
+ #move to synchronization on the local file system
82
+ self.syncCookbooksLocal
83
+ end
84
+
85
+ #Pull all the cookbooks that are configured via the configuration policy
86
+ #in sync_config.json
87
+ #For auto-discovery will pull every project and every project from each group
88
+ #that this user will have access to
89
+ def self.pullCookbooks
90
+ Gitlab.private_token = @token
91
+
92
+ group_names = (@config['gitlab_group_names'] or [])
93
+ group_ids = (@config['gitlab_group_ids'] or [])
94
+
95
+ if @config['gitlab_autodiscover']
96
+ # Find all projects known by gitlab-token
97
+ # Determine which of these projects contains .gitchefsync.yml at HEAD of default branch
98
+ self.pullAllProjects
99
+ else
100
+ logger.debug "Synchronizing group names: #{group_names}"
101
+ logger.debug "Synchronizing group ids: #{group_ids}"
102
+ self.getAllGroupIDs(group_names, group_ids).each do |groupid|
103
+ group = Gitlab.group groupid
104
+ projects = group.to_hash['projects']
105
+ projects.each do |project|
106
+ self.pullProject(project)
107
+ end
108
+ end
109
+ end
110
+
111
+ repo_list = @config['cookbook_repo_list']
112
+
113
+ #explicit set list of cookbook repositories
114
+ if repo_list != nil
115
+ logger.info "event_id=repo_list_sync:config=#{@config['cookbook_repo_list']}"
116
+ repo_list.each do |repo|
117
+ #match the "path: full_path/repo.git"
118
+ match = repo.split('/')
119
+ if match == nil
120
+ raise GitError, "Can not parse #{repo}"
121
+ end
122
+ path = match[match.length-1]
123
+ path = path[0..path.length-5]
124
+ begin
125
+ self.updateGit(@git_local + "/" + path, repo )
126
+ rescue GitError => e
127
+ logger.error "event_id=git_error:msg=#{e.message}:trace=#{e.backtrace}"
128
+ logger.error "event_id=remove_project_path: #{project_path}"
129
+ FS.cmd "rm -rf #{project_path}"
130
+ end
131
+ end
132
+ end
133
+ end
134
+ #cycle through the working directory to see if a repo got deleted
135
+ #by checking that the remote repository got deleted
136
+ def self.gitCleanup
137
+ include Git,FS
138
+ cookbook_dirs = Dir.entries(@git_local).reject! {|item| item.start_with?(".") }
139
+ cookbook_dirs.each do |dir|
140
+ if !Git.remoteExists(dir,@rel_branch)
141
+
142
+ #delete tar balls associated with this repo, the directory name
143
+ #subsequent calls to "reconcile" will clean up
144
+ cookbook = KnifeUtil.new(@knife,dir).parseMetaData(dir)
145
+ if cookbook != nil
146
+ #remove all files associated with this cookbook name
147
+ files = @stage_dir +"/" + cookbook.name() + "-*tar.gz"
148
+ FS.cmd("rm -fr #{files}")
149
+
150
+ end
151
+ end
152
+ end
153
+ end
154
+
155
+ #For each repository in the working directory (defined by sync-config.json)
156
+ #checkout each tag
157
+ # 1. upload to the configured chef server via a berks upload
158
+ # 2. package the cookbook in the stage_dir (defined in sync-config.json)
159
+ # 3. create an audit of each cookbook that was created
160
+ #
161
+ #param options - the list of options
162
+ def self.syncCookbooksLocal
163
+ include FS,Git
164
+
165
+ logger.info "event_id=stage_cookbooks:git_local=#{@git_local}"
166
+ FS.knifeReady(@options[:git_local], @options[:knife_config])
167
+ ret_status = Hash.new
168
+
169
+
170
+ #not sure if this should be globally governed?
171
+ audit = Audit.new(@config['stage_dir'], 'cb')
172
+
173
+
174
+ knifeUtil = KnifeUtil.new(@knife, @git_local)
175
+ #Have a delta point: interact with the chef server to identify delta
176
+ listCB = knifeUtil.listCookbooks
177
+
178
+ cookbook_dirs = Dir.entries(@git_local).reject! {|item| item.start_with?(".") }
179
+ cookbook_dirs.each do |dir|
180
+
181
+ path = File.join(@git_local, dir)
182
+
183
+ arr_tags = Git.branchTags(path, @rel_branch)
184
+
185
+
186
+ #match tag against version in metadata.rb
187
+ #possible error condition
188
+ arr_tags.each do |tag|
189
+
190
+
191
+ begin
192
+ logger.debug "event_id=git_checkout:path=#{path}:tag=#{tag}"
193
+ Git.cmd "cd #{path} && #{@git_bin} checkout #{tag}"
194
+
195
+ self.processCookbook(path,audit)
196
+ rescue NoMetaDataError => e
197
+ #No audit written on failure to parse metadata
198
+ logger.info "event_id=nometadata:dir=#{dir}"
199
+ next
200
+ rescue KnifeError => e
201
+ #No audit written on failure to parse metadata
202
+ logger.error "event_id=cmd_error:#{e.message}:trace=#{e.backtrace}"
203
+ next
204
+ rescue NoBerksError => e
205
+ #No audit written on no Berks file
206
+ logger.error "event_id=cmd_error:#{e.message}:trace=#{e.backtrace}"
207
+ next
208
+
209
+ rescue Exception => e
210
+
211
+ logger.error "event_id=git_error:msg=#{e.message}:trace=#{e.backtrace}"
212
+ cookbook = Cookbook.new(dir,tag) if cookbook.nil?
213
+ audit.addCookbook(cookbook,"ERROR",e)
214
+ next
215
+ end
216
+ end
217
+ end
218
+
219
+
220
+ #write out the audit file
221
+ audit.write
222
+ #clean the audit files
223
+ audit.trim(@audit_keep_trim)
224
+ end
225
+
226
+ #Process the cookbook from the working directory's path (or path specified)
227
+ #If the cookbook exists on the server, don't package or upload
228
+ #we may want to add one other condition to force this
229
+ #packaging behaviour and hence rsync
230
+ def self.processCookbook(path,audit)
231
+ knifeUtil = KnifeUtil.new(@knife, @git_local)
232
+ cookbook = knifeUtil.parseMetaData(path)
233
+ logger.debug "event_id=processing:cookbook=#{cookbook}"
234
+
235
+ if cookbook != nil
236
+ stage_tar = @config['stage_dir'] +"/" + cookbook.berksTar()
237
+ tar_exists = File.exists?(stage_tar)
238
+ end
239
+
240
+ begin
241
+ if (cookbook !=nil && (!knifeUtil.isCBinList(cookbook, self.serverCookbooks()) || !tar_exists ))
242
+ berks_tar = self.stageBerks(path , @config['stage_dir'])
243
+ #upload cookbooks still puts a Berksfile, will refactor this method
244
+ self.uploadBerks(path)
245
+ logger.debug("event_id=staging:cookbook=#{cookbook}:berks_tar=#{berks_tar}")
246
+ self.stageCBUpload(berks_tar, @stage_cb_dir, knifeUtil, self.serverCookbooks())
247
+ audit.addCookbook(cookbook) if berks_tar.nil?
248
+ logger.info "event_id=cookbook_staged:cookbook=#{cookbook}"
249
+
250
+ elsif cookbook !=nil && @config['force_package']
251
+ logger.info "event_id=cookbook_force_package:cookbook=#{cookbook}"
252
+ self.stageBerks(path, @config['stage_dir'])
253
+ else
254
+ audit.addCookbook(cookbook, "EXISTING")
255
+ logger.info "event_id=cookbook_untouched:cookbook=#{cookbook}"
256
+ end
257
+ rescue BerksError => e
258
+ logger.error "event_id=berks_package_failure:msg=#{e.message}:trace=#{e.backtrace}"
259
+ audit.addCookbook(cookbook, "ERROR", e)
260
+ end
261
+
262
+ Git.cmd "cd #{path} && git clean -xdf"
263
+ end
264
+
265
+ #do a berks upload of the path
266
+ #this will end up using sources in Berksfile
267
+ #which is not good for the production sync
268
+ def self.uploadBerks(path)
269
+ include FS
270
+
271
+ begin
272
+ if File.exists?(File.join(path, "Berksfile"))
273
+ logger.debug "Berkshelf orginally used in this tagged version of cookbook"
274
+ elsif File.exists?(File.join(path, "metadata.rb"))
275
+ logger.debug "Berkshelf was not orginally used in this tagged version of cookbook"
276
+ logger.info "event_id=create_berks:path=#{path}"
277
+ berksfile = File.new(File.join(path, "Berksfile"), "w")
278
+
279
+ version = FS.cmd "#{@berks} -v"
280
+ if version.start_with?("3.")
281
+ berksfile.puts("source \"https:\/\/api.berkshelf.com\"\nmetadata")
282
+ else
283
+ berksfile.puts("site :opscode\nmetadata")
284
+ end
285
+ berksfile.close
286
+ else
287
+ raise NoBerksError, "Unable to locate Berks file for #{path}"
288
+ end
289
+
290
+ if @berks_upload
291
+ logger.info "event_id=berks_install_upload&cookbook=#{path}"
292
+
293
+ out = FS.cmdBerks "cd #{path} && rm -f Berksfile.lock && #{@berks} install && #{@berks} upload"
294
+
295
+ logger.info "event_id=berks_upload=#{out}"
296
+ else
297
+ logger.debug "event_id=no_berks_upload&cookbook=#{path}"
298
+ end
299
+ rescue Exception => e
300
+ raise BerksError.new(e.message)
301
+ end
302
+ end
303
+
304
+
305
+ #do and install and package the berks cookbook
306
+ #in a staging directory
307
+ #returns the path to the berks tar file
308
+ def self.stageBerks(path, stage_dir)
309
+ include FS
310
+ begin
311
+ if File.exists?(File.join(path, "Berksfile"))
312
+ logger.debug "event_id=Stage_cookbook:path=#{path}"
313
+
314
+
315
+ #get the name from metadata if available
316
+ cookbook = KnifeUtil.new(@knife,path).parseMetaData(path)
317
+ if cookbook != nil
318
+ #remove residual tar - this could be problematic if there is are tars in the
319
+ #cookbook
320
+ FS.cmd "rm -f #{path}/#{cookbook.berksTar}"
321
+
322
+ #Since cmdBerks doesn't raise exception must provide alternate check
323
+ out = FS.cmdBerks "cd #{path} && #{@berks} package #{cookbook.berksTar}"
324
+ logger.info "event_id=berks_package=#{out}"
325
+ if File.exists? "#{path}/#{cookbook.berksTar}"
326
+
327
+ # empty tarballs in staging produced errors in staged upload
328
+ # this can happen when Berksfile is a blank file
329
+ file_count = FS.cmd "tar tf #{path}/#{cookbook.berksTar} | wc -l"
330
+ if file_count.to_i > 1
331
+ FS.cmd "mv #{path}/#{cookbook.berksTar} #{stage_dir}"
332
+ else
333
+ logger.info "event_id=berks_package_produced_empty_tarball: #{path}/#{cookbook.berksTar}"
334
+ FS.cmd "rm -f #{path}/#{cookbook.berksTar}"
335
+ raise BerksError.new("`berks package` produced empty tarball: #{path}/#{cookbook.berksTar}")
336
+ end
337
+
338
+ else
339
+ logger.info "event_id=berks_package_failed: #{path}/#{cookbook.berksTar}"
340
+ raise BerksError.new("Something went wrong generating berks file: #{out}")
341
+ end
342
+
343
+ return "#{stage_dir}/#{cookbook.berksTar}"
344
+ end
345
+ else
346
+ raise NoBerksError, "Unable to locate Berks file for #{path}"
347
+ end
348
+ rescue NoBerksError => e
349
+ raise e
350
+ rescue Exception => e
351
+ raise BerksError.new(e.message)
352
+ end
353
+
354
+ end
355
+
356
+
357
+
358
+ #Find all versions of cookbooks from the server via knife command
359
+ #From the stage directory, do knife upload on each of the tars
360
+ #if the cookbook and version exists on the server don't attempt the knife upload
361
+ #Each tar file is extracted to a cookbook directory
362
+ #where a knife upload -a is attempted on the entire directory
363
+ #as each tar is processed the directory is cleaned
364
+ def self.stagedUpload
365
+ include FS
366
+
367
+
368
+ #read in the latest audit - fail on non-null exceptions
369
+ audit = Audit.new(@config['stage_dir'],'cb' )
370
+
371
+ json = audit.parseLatest
372
+ if json != nil && audit.hasError(json)
373
+ logger.error "event_id=audit_error:audit=#{json}"
374
+
375
+ #Do not raise AuditError because it halts entire service
376
+ #Read MAND-613 for more information
377
+ #TODO: MAND-614 - Notification needed for errors in gitchefsync audit file
378
+ #raise AuditError
379
+ end
380
+
381
+ cookbook_dir = @stage_cb_dir
382
+
383
+ FS.cmd "mkdir -p #{cookbook_dir}"
384
+ FS.knifeReady(cookbook_dir,@options[:knife_config])
385
+
386
+ #Check on what is uploaded, knife util creates a list for us
387
+ knifeUtil = KnifeUtil.new(@knife, cookbook_dir)
388
+ listCB = knifeUtil.listCookbooks
389
+ logger.debug "list: #{listCB}"
390
+ stage = @stage_dir + "/*tar.gz"
391
+
392
+ Dir.glob(stage).each do |file|
393
+ logger.debug "event_id=stage_upload:file=#{file}"
394
+ stageCBUpload(file, cookbook_dir, knifeUtil, listCB)
395
+ end
396
+ end
397
+
398
+
399
+ #Extracts and uploads via knife
400
+ #from the staging directory
401
+ #don't like that knife or list instance is passed in, for later refactoring
402
+ def self.stageCBUpload(file, cookbook_dir, knifeUtil, listCB, forceUpload = false)
403
+ begin
404
+ logger.info "knife_cookbook_upload:file=#{file}:dest=#{cookbook_dir}"
405
+ match = File.basename(file).match(/(.*)-(\d+\.\d+\.\d+)/)
406
+
407
+ if match ==nil || match.length != 3
408
+ logger.error "event_id=invalid_tar:file=#{file}"
409
+ raise InvalidTar, "Invalid tar name #{file}"
410
+ end
411
+
412
+ logger.debug "In chef server? #{knifeUtil.inList(match[1],match[2],listCB)}"
413
+
414
+ if !knifeUtil.inList(match[1],match[2],listCB) || forceUpload
415
+ logger.info "event_id=stage_upload:cookbook=#{match[1]}:ver=#{match[2]}:dir=#{cookbook_dir}"
416
+ FS.cmd "tar -xf #{file} -C #{cookbook_dir}"
417
+ out = FS.cmd "cd #{cookbook_dir} && #{@knife} cookbook upload -a --force --cookbook-path=#{cookbook_dir}/cookbooks"
418
+
419
+ logger.debug "event_id=stage_upload_output=\n#{out}"
420
+ else
421
+ logger.info"event_id=stage_no_upload:cookbook=#{match[1]}:ver=#{match[2]}"
422
+ end
423
+ rescue CmdError => e
424
+ #logger.error "event_id=cmd_err:#{e.message}"
425
+
426
+ raise KnifeError.new(e.message)
427
+ rescue InvalidTar => e
428
+ logger.error "event_id=invalid_tar:msg=Continuing on invalid tar"
429
+ ensure
430
+ if File.exists?(cookbook_dir)
431
+ FS.cmd "rm -fr #{cookbook_dir}/*"
432
+ end
433
+ end
434
+
435
+ end
436
+
437
+ def self.init(opts)
438
+ configure(opts)
439
+
440
+ end
441
+
442
+ #Compares git with what is on staging directory
443
+ #WARN: the stage directory should be filled, meaning that cookbooks
444
+ #can be deleted if staging directory is empty
445
+ #verify that we've had at least one successful run, by virtue of the
446
+ #audit file created - that we've had at least one run
447
+ #
448
+ #Does a 2 way compare of the lists in on the chef server
449
+ #and the berks tar packages found in staging directory
450
+ #Adding cookbooks if they aren't found on the server,
451
+ #Deleting cookbooks
452
+ #I don't generate Audit file - move cookbook audit object to module scope
453
+ def self.reconcile
454
+
455
+ #Validation
456
+ if Audit.new(@config['stage_dir'], 'cb').latest == nil
457
+ logger.warn "event_id=reconcile_no_audit_detected"
458
+ return
459
+ end
460
+
461
+ logger.info "event_id=reconcile:dir=#{@stage_dir}"
462
+ knifeUtil = KnifeUtil.new(@knife, @git_local)
463
+ #Here is what is in the server
464
+ listCB = knifeUtil.listCookbooks
465
+
466
+ list_stage = Array.new
467
+ tmp_dir = @stage_cb_dir + "/.tarxf"
468
+ FS.cmd("mkdir -p #{tmp_dir}")
469
+
470
+ #Compile what is happening in the stage directory
471
+ stage = @stage_dir + "/*tar.gz"
472
+ Dir.glob(stage).each do |file|
473
+
474
+ begin
475
+ logger.debug "event_id=reconcile_file:file=#{file}"
476
+
477
+ FS.cmd "tar -tf #{file} | grep metadata.rb | tar -xf #{file} -C #{tmp_dir}"
478
+ local_list = Array.new
479
+ files = tmp_dir + "/cookbooks/*/metadata.rb"
480
+ Dir.glob(files) do |metadata|
481
+
482
+ cookbook = knifeUtil.parseMetaData(File.expand_path("..",metadata))
483
+ if cookbook !=nil
484
+ list_stage << cookbook
485
+ local_list << cookbook
486
+ end
487
+ end
488
+ #of the local list do we have all of them in chef?
489
+ add_list = knifeUtil.subtract(local_list,listCB)
490
+ logger.debug "local_list #{local_list} delta: #{add_list}"
491
+
492
+ self.stageCBUpload(file,@stage_cb_dir,knifeUtil,listCB,true) if !add_list.empty?()
493
+ rescue KnifeError => e
494
+ logger.warn "#{e.message}"
495
+ ensure
496
+ #finally remove what was in the berks tar and in the working tarxf dir
497
+ FS.cmd("rm -fr #{tmp_dir}/*")
498
+ end
499
+
500
+ end
501
+ #From the full list of local cookbooks (local_list)
502
+ #we have both sides (what is local) and what is on server
503
+ del_list = knifeUtil.subtract(listCB,list_stage)
504
+
505
+ if !del_list.empty?
506
+ logger.warn "event_id=del_cb_pending:cb=#{del_list}"
507
+ del_list.each do |cb|
508
+ logger.debug "deleting: #{cb}"
509
+ #deletion of cookbook - this currently doesn't check node usage
510
+ #so could have deterious side effects
511
+ knifeUtil.delCookbook(cb)
512
+ end
513
+ end
514
+ end
515
+
516
+ end
data/spec/README.md ADDED
File without changes
@@ -0,0 +1,13 @@
1
+ {
2
+ "knife":"/usr/local/bin/knife",
3
+ "git":"/usr/bin/git",
4
+ "berks":"/usr/bin/berks",
5
+ "working_directory":"/tmp/mandolin/sync",
6
+ "knife_file":"/home/marcus/tmp/.chef/knife.rb",
7
+
8
+ "git_env_repo": "git@gitlab.rim.net:mand-common/global_chef_env.git",
9
+ "gitlab_url_type" : "http",
10
+ "sync_local" : "false",
11
+ "stage_dir" : "/tmp/staging",
12
+ "cookbook_repo_list" : ["https://gitlab.rim.net/mand-ems/iems.git"]
13
+ }
@@ -0,0 +1,89 @@
1
+ require 'rspec'
2
+ require 'gitchefsync'
3
+ require 'gitchefsync/opts'
4
+ require 'gitlab'
5
+
6
+ class DummyClass
7
+ extend Gitchefsync
8
+ def self.included base
9
+ base.extend ClassMethods
10
+ end
11
+
12
+ end
13
+
14
+ #
15
+ #
16
+ describe "gitchefsync" do
17
+
18
+ before(:each) do
19
+ puts "before"
20
+
21
+
22
+ Gitlab.endpoint = 'https://gitlab.rim.net/api/v3'
23
+ @args = args "sync-config.json"
24
+
25
+ @opts = Gitchefsync.parseAndConfigure(@args)
26
+
27
+ #make the staging directory - TODO possibly to this in main code
28
+ `mkdir -p #{@opts[:config]['stage_dir']}`
29
+ end
30
+
31
+ after :each do
32
+ puts "Clean up..."
33
+
34
+ end
35
+
36
+ #requires git lab private token to be in environment
37
+ def args (configLoc)
38
+ conf = File.dirname(__FILE__) + "/#{configLoc}"
39
+ args = Array.new
40
+ args << "--private_token=#{ENV['TOKEN']}"
41
+ args << "--config=#{conf}"
42
+ return args
43
+ end
44
+
45
+ it "should be Configuration" do
46
+ #Gitchefsync.is_a Gitchefsync::Configuration
47
+ puts Gitchefsync.ancestors()
48
+
49
+ end
50
+
51
+ it "should process environment" do
52
+
53
+ puts "parse #{@ARGS}"
54
+ #@dummy_class.parseAndConfigure( nil )
55
+
56
+ Gitchefsync.syncEnv
57
+ end
58
+
59
+
60
+ it "should sync cookbooks" do
61
+
62
+ Gitchefsync.syncCookbooks()
63
+
64
+ end
65
+
66
+ it "should stage cookbooks" do
67
+
68
+ Gitchefsync.stagedUpload()
69
+ end
70
+
71
+ it "should work with no git group" do
72
+ Gitchefsync.parseAndConfigure( args("config-nogitgroup.json") )
73
+ Gitchefsync.syncCookbooks()
74
+ end
75
+
76
+ it "should be idempotent" do
77
+
78
+ end
79
+
80
+ it "should log syslog" do
81
+ arg = args("sync-config.json")
82
+ arg << "--syslog"
83
+ opts = Gitchefsync.parseAndConfigure( arg )
84
+
85
+
86
+ end
87
+
88
+ end
89
+
@@ -0,0 +1,68 @@
1
+ require 'gitchefsync'
2
+ require 'gitlab'
3
+
4
+
5
+ class WrapSync
6
+ extend Gitchefsync
7
+
8
+ end
9
+
10
+ #
11
+ #ALL specs are failing because of inheritence to all base method
12
+ describe Gitchefsync do
13
+
14
+
15
+ before :each do
16
+ @sync = WrapSync.new
17
+ puts "before..."
18
+ @config = File.dirname(__FILE__) + "/sync-config.json"
19
+ @ARGS = Array.new
20
+ @ARGS << "--private_token=#{ENV['TOKEN']}"
21
+ @ARGS << "--config=#{@config}"
22
+ Gitlab.endpoint = 'https://gitlab.rim.net/api/v3'
23
+
24
+ Gitchefsync.checkGit
25
+
26
+ @options = Gitchefsync.parseAndConfigure( @ARGS )
27
+
28
+ end
29
+
30
+ after :each do
31
+ puts "Clean up..."
32
+ end
33
+
34
+
35
+
36
+ it "parsing success" do
37
+ config = File.dirname(__FILE__) + "/../bin/sync-config.json"
38
+
39
+ ARGS = Array.new
40
+ ARGS << "--private_token=xxxyyyzzz"
41
+ ARGS << "--config=#{config}"
42
+
43
+ options = Gitchefsync.parseAndConfigure( ARGS )
44
+
45
+ options[:private_token].should == "xxxyyyzzz"
46
+ end
47
+
48
+ it "parsing failure" do
49
+ begin
50
+ options = Gitchefsync.parseAndConfigure( ARGS )
51
+ rescue Exception => e
52
+ puts "#{e.message}"
53
+ end
54
+
55
+ end
56
+
57
+
58
+
59
+ it "syncing" do
60
+ puts(" sync ARGS: #{@ARGS}")
61
+ Gitchefsync.parseAndConfigure( @ARGS )
62
+ Gitchefsync::Env.sync
63
+
64
+
65
+ end
66
+
67
+
68
+ end
@@ -0,0 +1,13 @@
1
+ {
2
+ "knife":"/usr/local/bin/knife",
3
+ "git":"/usr/bin/git",
4
+ "berks":"/usr/bin/berks",
5
+ "working_directory":"/tmp/mandolin/sync",
6
+ "knife_file":"/home/marcus/tmp/.chef/knife.rb",
7
+ "git_groups": [4085],
8
+ "git_env_repo": "git@gitlab.rim.net:mand-common/global_chef_env.git",
9
+ "gitlab_url_type" : "http",
10
+ "sync_local" : "false",
11
+ "stage_dir" : "/tmp/staging",
12
+ "cookbook_repo_list" : ["https://gitlab.rim.net/mand-ems/iems.git"]
13
+ }