knife-uploader 0.1.2
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/Gemfile +21 -0
- data/lib/chef/knife/uploader_base.rb +239 -0
- data/lib/chef/knife/uploader_data_bag.rb +310 -0
- data/lib/chef/knife/uploader_run_list.rb +108 -0
- data/lib/knife-uploader/version.rb +7 -0
- metadata +139 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: c4e25b5056710c266d3b7708ac768794455e7a12
|
4
|
+
data.tar.gz: 0cc7d86418deb2cf24696c8cfdca728a475c1034
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: bf67f010c4cf6321f51f810b1d1db9de19a4a9a6f1583c34b7c4d4725c8621b58393037ed3326679fbcd7b1bbb579dfac5492eab3d4b5466cf47438eee50a25d
|
7
|
+
data.tar.gz: 71c2a5401b1d8d71e09f74568421866c6f13fab89b6d9570134c64d5f5f3235c69287b92054065098982ade64b888431f1db44ee0e570527a995e180e5d97c90
|
data/Gemfile
ADDED
@@ -0,0 +1,21 @@
|
|
1
|
+
# Copyright (C) 2013 ClearStory Data, Inc.
|
2
|
+
# All rights reserved.
|
3
|
+
#
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
5
|
+
# you may not use this file except in compliance with the License.
|
6
|
+
# You may obtain a copy of the License at
|
7
|
+
#
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
9
|
+
#
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13
|
+
# See the License for the specific language governing permissions and
|
14
|
+
# limitations under the License.
|
15
|
+
|
16
|
+
# Ensure that we're using UTF-8
|
17
|
+
Encoding.default_external = Encoding::UTF_8
|
18
|
+
Encoding.default_internal = Encoding::UTF_8
|
19
|
+
|
20
|
+
source 'https://rubygems.org'
|
21
|
+
gemspec
|
@@ -0,0 +1,239 @@
|
|
1
|
+
# Copyright (C) 2013 ClearStory Data, Inc.
|
2
|
+
# All rights reserved.
|
3
|
+
#
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
5
|
+
# you may not use this file except in compliance with the License.
|
6
|
+
# You may obtain a copy of the License at
|
7
|
+
#
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
9
|
+
#
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13
|
+
# See the License for the specific language governing permissions and
|
14
|
+
# limitations under the License.
|
15
|
+
|
16
|
+
require 'chef/knife'
|
17
|
+
require 'hashie'
|
18
|
+
require 'logger'
|
19
|
+
require 'celluloid'
|
20
|
+
require 'varia_model'
|
21
|
+
|
22
|
+
# "Lazy select" from http://www.michaelharrison.ws/weblog/?p=163
|
23
|
+
class Enumerator
|
24
|
+
def lazy_select(&block)
|
25
|
+
Enumerator.new do |yielder|
|
26
|
+
self.each do |val|
|
27
|
+
yielder.yield(val) if block.call(val)
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
31
|
+
|
32
|
+
def lazy_map(&block)
|
33
|
+
Enumerator.new do |yielder|
|
34
|
+
self.each do |val|
|
35
|
+
yielder.yield(block.call(val))
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
40
|
+
|
41
|
+
module KnifeUploader
|
42
|
+
|
43
|
+
class KnifeConfigParser
|
44
|
+
attr_reader :knife
|
45
|
+
|
46
|
+
def initialize(knife_conf_path)
|
47
|
+
@knife = {}
|
48
|
+
instance_eval(IO.read(knife_conf_path), knife_conf_path)
|
49
|
+
end
|
50
|
+
|
51
|
+
def cookbook_path(path_list)
|
52
|
+
@cookbook_path_list = path_list
|
53
|
+
end
|
54
|
+
|
55
|
+
def get_cookbook_path_list
|
56
|
+
@cookbook_path_list
|
57
|
+
end
|
58
|
+
|
59
|
+
def data_bag_path(path)
|
60
|
+
@data_bag_path = path
|
61
|
+
end
|
62
|
+
|
63
|
+
def get_data_bag_path
|
64
|
+
@data_bag_path
|
65
|
+
end
|
66
|
+
|
67
|
+
def method_missing(meth, *args, &block)
|
68
|
+
# skip
|
69
|
+
end
|
70
|
+
end
|
71
|
+
|
72
|
+
module Utils
|
73
|
+
class << self
|
74
|
+
|
75
|
+
def sort_hash_keys(h)
|
76
|
+
Hash[*h.sort.flatten(1)]
|
77
|
+
end
|
78
|
+
|
79
|
+
def recursive_sort_hash_keys(obj)
|
80
|
+
if [Hash, Hashie::Mash, VariaModel::Attributes].include?(obj.class)
|
81
|
+
Hash[*obj.sort.map {|k, v| [k, recursive_sort_hash_keys(v)] }.flatten(1)]
|
82
|
+
elsif obj.instance_of?(Array)
|
83
|
+
obj.map {|element| recursive_sort_hash_keys(element) }
|
84
|
+
else
|
85
|
+
obj
|
86
|
+
end
|
87
|
+
end
|
88
|
+
|
89
|
+
def json_with_sorted_keys(h)
|
90
|
+
JSON.pretty_generate(recursive_sort_hash_keys(h)) + "\n"
|
91
|
+
end
|
92
|
+
|
93
|
+
end
|
94
|
+
end
|
95
|
+
|
96
|
+
module BaseCommandMixin
|
97
|
+
def self.included(includer)
|
98
|
+
includer.class_eval do
|
99
|
+
deps do
|
100
|
+
require 'ridley'
|
101
|
+
Celluloid.logger.level = Logger::ERROR
|
102
|
+
require 'diffy'
|
103
|
+
end
|
104
|
+
|
105
|
+
option :pattern,
|
106
|
+
:short => '-p PATTERN',
|
107
|
+
:long => '--pattern PATTERN',
|
108
|
+
:description => 'A regular expression pattern to restrict the set of objects to ' +
|
109
|
+
'manipulate',
|
110
|
+
:proc => Proc.new { |value| Chef::Config[:knife][:pattern] = value }
|
111
|
+
|
112
|
+
option :debug,
|
113
|
+
:long => '--debug',
|
114
|
+
:description => 'Turn on debug messages',
|
115
|
+
:proc => Proc.new { |value| Chef::Config[:knife][:debug] = value }
|
116
|
+
end
|
117
|
+
end
|
118
|
+
end
|
119
|
+
|
120
|
+
class BaseCommand < Chef::Knife
|
121
|
+
|
122
|
+
def initialize(args)
|
123
|
+
super
|
124
|
+
@pattern = locate_config_value(:pattern)
|
125
|
+
if @pattern
|
126
|
+
@pattern = Regexp.new(@pattern)
|
127
|
+
else
|
128
|
+
@pattern = // # matches anything
|
129
|
+
end
|
130
|
+
end
|
131
|
+
|
132
|
+
def diff(a, b)
|
133
|
+
::Diffy::Diff.new(a, b, :context => 2)
|
134
|
+
end
|
135
|
+
|
136
|
+
def diff_color(a, b)
|
137
|
+
diff(a, b).to_s(ui.color? ? :color : :text)
|
138
|
+
end
|
139
|
+
|
140
|
+
def debug(msg)
|
141
|
+
if locate_config_value(:debug)
|
142
|
+
ui.info("DEBUG: #{msg}")
|
143
|
+
end
|
144
|
+
end
|
145
|
+
|
146
|
+
def locate_config_value(key, kind = :optional)
|
147
|
+
raise unless [:required, :optional].include?(kind)
|
148
|
+
key = key.to_sym
|
149
|
+
value = config[key] || Chef::Config[:knife][key]
|
150
|
+
if kind == :required && value.nil?
|
151
|
+
raise "#{key} not specified"
|
152
|
+
end
|
153
|
+
value
|
154
|
+
end
|
155
|
+
|
156
|
+
def get_knife_config_path
|
157
|
+
locate_config_value(:config_file, :required)
|
158
|
+
end
|
159
|
+
|
160
|
+
def parsed_knife_config
|
161
|
+
unless @parsed_knife_config
|
162
|
+
@parsed_knife_config = KnifeConfigParser.new(get_knife_config_path)
|
163
|
+
end
|
164
|
+
|
165
|
+
@parsed_knife_config
|
166
|
+
end
|
167
|
+
|
168
|
+
def get_chef_repo_path
|
169
|
+
unless @chef_repo_path
|
170
|
+
path_list = parsed_knife_config.get_cookbook_path_list
|
171
|
+
path_list.each do |cookbooks_path|
|
172
|
+
[cookbooks_path, File.expand_path('..', cookbooks_path)].each do |path|
|
173
|
+
if ['.git', 'data_bags', 'environments', 'roles'].map do |subdir_name|
|
174
|
+
File.directory?(File.join(path, subdir_name))
|
175
|
+
end.all?
|
176
|
+
@chef_repo_path = path
|
177
|
+
end
|
178
|
+
end
|
179
|
+
end
|
180
|
+
|
181
|
+
raise "No chef repository checkout path could be determined using " +
|
182
|
+
"cookbook paths #{path_list}" unless @chef_repo_path
|
183
|
+
|
184
|
+
debug("Identified Chef repo path: #{@chef_repo_path}")
|
185
|
+
end
|
186
|
+
|
187
|
+
@chef_repo_path
|
188
|
+
end
|
189
|
+
|
190
|
+
def ridley
|
191
|
+
unless @ridley
|
192
|
+
knife_conf_path = get_knife_config_path
|
193
|
+
|
194
|
+
# Check file existence (Ridley will throw a confusing error).
|
195
|
+
raise "File #{knife_conf_path} does not exist" unless File.file?(knife_conf_path)
|
196
|
+
|
197
|
+
@ridley = Ridley.from_chef_config(knife_conf_path, :ssl => { :verify => false })
|
198
|
+
data_bag_secret_file_path = @ridley.options[:encrypted_data_bag_secret]
|
199
|
+
unless data_bag_secret_file_path
|
200
|
+
raise "No encrypted data bag secret location specified in #{knife_conf_path}"
|
201
|
+
end
|
202
|
+
|
203
|
+
unless File.file?(data_bag_secret_file_path)
|
204
|
+
raise "File #{data_bag_secret_file_path} does not exist"
|
205
|
+
end
|
206
|
+
|
207
|
+
# The encrypted data bag secret has to be the value, even though the readme in Ridley 1.5.2
|
208
|
+
# says it can also be a file name, so we have to re-create the Ridley object.
|
209
|
+
@ridley = Ridley.new(
|
210
|
+
server_url: @ridley.server_url,
|
211
|
+
client_name: @ridley.client_name,
|
212
|
+
client_key: @ridley.client_key,
|
213
|
+
encrypted_data_bag_secret: IO.read(data_bag_secret_file_path),
|
214
|
+
ssl: { verify: false }
|
215
|
+
)
|
216
|
+
end
|
217
|
+
|
218
|
+
@ridley
|
219
|
+
end
|
220
|
+
|
221
|
+
def report_errors(&block)
|
222
|
+
begin
|
223
|
+
yield
|
224
|
+
rescue => exception
|
225
|
+
ui.fatal("#{exception}: #{exception.backtrace.join("\n")}")
|
226
|
+
raise exception
|
227
|
+
end
|
228
|
+
end
|
229
|
+
|
230
|
+
def run
|
231
|
+
begin
|
232
|
+
run_internal
|
233
|
+
ensure
|
234
|
+
# Cleanup code can be added here.
|
235
|
+
end
|
236
|
+
end
|
237
|
+
end
|
238
|
+
|
239
|
+
end
|
@@ -0,0 +1,310 @@
|
|
1
|
+
# Copyright (C) 2013 ClearStory Data, Inc.
|
2
|
+
# All rights reserved.
|
3
|
+
#
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
5
|
+
# you may not use this file except in compliance with the License.
|
6
|
+
# You may obtain a copy of the License at
|
7
|
+
#
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
9
|
+
#
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13
|
+
# See the License for the specific language governing permissions and
|
14
|
+
# limitations under the License.
|
15
|
+
|
16
|
+
require 'chef/knife/uploader_base'
|
17
|
+
|
18
|
+
module KnifeUploader
|
19
|
+
|
20
|
+
module DataBagUtils
|
21
|
+
class << self
|
22
|
+
def decrypted_attributes(data_bag_item)
|
23
|
+
begin
|
24
|
+
[
|
25
|
+
Hash[data_bag_item.attributes.map do
|
26
|
+
|key, value| [key, key == "id" ? value : data_bag_item.decrypt_value(value)]
|
27
|
+
end],
|
28
|
+
true # decryption successful
|
29
|
+
]
|
30
|
+
rescue OpenSSL::Cipher::CipherError, NoMethodError, NotImplementedError, ArgumentError => ex
|
31
|
+
[data_bag_item.attributes.clone, false]
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
class UploaderDataBagCommand < BaseCommand
|
38
|
+
|
39
|
+
def list_data_bag_item_files(bag_name)
|
40
|
+
Dir[File.join(get_data_bag_dir(bag_name), '*.json')].select do |file_path|
|
41
|
+
data_bag_item_id_from_path(file_path) =~ @pattern
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
def list_data_bag_item_ids(bag_name)
|
46
|
+
list_data_bag_item_files(bag_name).map {|item_path| data_bag_item_id_from_path(item_path) }
|
47
|
+
end
|
48
|
+
|
49
|
+
def data_bag_item_id_from_path(item_path)
|
50
|
+
File::basename(item_path).gsub(/\.json$/, '')
|
51
|
+
end
|
52
|
+
|
53
|
+
def override_attributes(item, new_attributes)
|
54
|
+
item.attributes.clear
|
55
|
+
item.from_hash(new_attributes)
|
56
|
+
end
|
57
|
+
|
58
|
+
def diff_data_bag_item(item, item_id, old_attributes, new_attributes, diff_comment_prefix,
|
59
|
+
desc1, desc2)
|
60
|
+
old_attributes_formatted = Utils.json_with_sorted_keys(old_attributes)
|
61
|
+
new_attributes_formatted = Utils.json_with_sorted_keys(new_attributes)
|
62
|
+
|
63
|
+
if old_attributes_formatted == new_attributes_formatted
|
64
|
+
ui.info("#{item_id} has no differences (no decryption attempted)\n\n")
|
65
|
+
return false
|
66
|
+
end
|
67
|
+
|
68
|
+
override_attributes(item, old_attributes)
|
69
|
+
old_decrypted, old_could_decrypt = DataBagUtils.decrypted_attributes(item)
|
70
|
+
|
71
|
+
override_attributes(item, new_attributes)
|
72
|
+
new_decrypted, new_could_decrypt = DataBagUtils.decrypted_attributes(item)
|
73
|
+
|
74
|
+
if old_could_decrypt != new_could_decrypt
|
75
|
+
if old_could_decrypt
|
76
|
+
ui.warn("Could decrypt the old version of item #{item_id} but not the new one")
|
77
|
+
else
|
78
|
+
ui.warn("Could decrypt the new version of item #{item_id} but not the old one")
|
79
|
+
end
|
80
|
+
end
|
81
|
+
|
82
|
+
old_decrypted_formatted = Utils.json_with_sorted_keys(old_decrypted)
|
83
|
+
new_decrypted_formatted = Utils.json_with_sorted_keys(new_decrypted)
|
84
|
+
|
85
|
+
# Encrypted data could differ but decrypted data could still be the same.
|
86
|
+
if old_decrypted_formatted == new_decrypted_formatted
|
87
|
+
ui.info("#{item_id} has differences before decryption " +
|
88
|
+
"but no differences after decryption\n\n")
|
89
|
+
return false
|
90
|
+
end
|
91
|
+
|
92
|
+
ui.info("#{diff_comment_prefix} data bag item #{item_id} " +
|
93
|
+
"(#{old_could_decrypt ? 'decrypted' : 'raw'} #{desc1} vs." +
|
94
|
+
" #{new_could_decrypt ? 'decrypted' : 'raw'} #{desc2}):\n" +
|
95
|
+
diff_color(old_decrypted_formatted, new_decrypted_formatted) + "\n")
|
96
|
+
true
|
97
|
+
end
|
98
|
+
|
99
|
+
def set_data_bag_items(bag_name)
|
100
|
+
ensure_data_bag_dir_exists(bag_name)
|
101
|
+
|
102
|
+
data_bag = ridley.data_bag.find(bag_name)
|
103
|
+
if data_bag.nil?
|
104
|
+
if @dry_run
|
105
|
+
ui.warn("Data bag #{bag_name} does not exist on the Chef server, skipping")
|
106
|
+
return
|
107
|
+
else
|
108
|
+
ui.info("Data bag #{bag_name} does not exist on the Chef server, creating")
|
109
|
+
ridley.data_bag.create(:name => bag_name)
|
110
|
+
data_bag = ridley.data_bag.find(bag_name)
|
111
|
+
end
|
112
|
+
end
|
113
|
+
|
114
|
+
processed_items = Set.new()
|
115
|
+
ignored_items = Set.new()
|
116
|
+
updated_items = Set.new()
|
117
|
+
|
118
|
+
verb = @dry_run ? 'Would update' : 'Updating'
|
119
|
+
|
120
|
+
data_bag.item.all.sort_by {|item| item.chef_id }.each do |item|
|
121
|
+
item_id = item.chef_id
|
122
|
+
unless item_id =~ @pattern
|
123
|
+
ignored_items << item_id
|
124
|
+
next
|
125
|
+
end
|
126
|
+
|
127
|
+
processed_items << item_id
|
128
|
+
|
129
|
+
new_attributes = load_data_bag_item_file(bag_name, item_id, :can_skip)
|
130
|
+
next unless new_attributes
|
131
|
+
|
132
|
+
item = time("Loaded data bag item #{item_id} from server", :debug) do
|
133
|
+
data_bag.item.find(item_id)
|
134
|
+
end
|
135
|
+
old_attributes = item.attributes.clone
|
136
|
+
|
137
|
+
if diff_data_bag_item(item, item_id, old_attributes, new_attributes, verb,
|
138
|
+
'Chef server version', 'local')
|
139
|
+
updated_items << item_id
|
140
|
+
unless @dry_run
|
141
|
+
override_attributes(item, new_attributes)
|
142
|
+
time("Saved data bag item #{item_id} to server", :info) { item.save }
|
143
|
+
end
|
144
|
+
end
|
145
|
+
end
|
146
|
+
|
147
|
+
# Load remaining data bag files.
|
148
|
+
list_data_bag_item_files(bag_name).each do |item_path|
|
149
|
+
item_id = data_bag_item_id_from_path(item_path)
|
150
|
+
next if processed_items.include?(item_id) || ignored_items.include?(item_id)
|
151
|
+
|
152
|
+
processed_items << item_id
|
153
|
+
|
154
|
+
new_attributes = load_data_bag_item_file(bag_name, item_id, :must_exist)
|
155
|
+
if @dry_run
|
156
|
+
ui.info("Would create data bag item #{item_id} from #{item_path}\n\n")
|
157
|
+
else
|
158
|
+
time("Created data bag item #{item_id} from #{item_path}", :info) do
|
159
|
+
data_bag.item.create(new_attributes)
|
160
|
+
end
|
161
|
+
end
|
162
|
+
updated_items << item_id
|
163
|
+
end
|
164
|
+
|
165
|
+
unless updated_items.empty?
|
166
|
+
ui.info("#{@dry_run ? 'Would update' : 'Updated'} data bag items: " +
|
167
|
+
updated_items.sort.join(', ') + "\n\n")
|
168
|
+
end
|
169
|
+
ui.info("Processed #{processed_items.length} data bag items")
|
170
|
+
end
|
171
|
+
|
172
|
+
def get_data_bag_dir(bag_name)
|
173
|
+
File.join(
|
174
|
+
parsed_knife_config.get_data_bag_path || File::join(get_chef_repo_path, 'data_bags'),
|
175
|
+
bag_name
|
176
|
+
)
|
177
|
+
end
|
178
|
+
|
179
|
+
def ensure_data_bag_dir_exists(bag_name)
|
180
|
+
data_bag_dir = get_data_bag_dir(bag_name)
|
181
|
+
unless File.directory?(data_bag_dir)
|
182
|
+
raise "#{data_bag_dir} does not exist or is not a directory"
|
183
|
+
end
|
184
|
+
end
|
185
|
+
|
186
|
+
def load_data_bag_item_file(bag_name, item_id, mode)
|
187
|
+
raise unless [:can_skip, :must_exist].include?(mode)
|
188
|
+
|
189
|
+
data_bag_dir = get_data_bag_dir(bag_name)
|
190
|
+
item_file_path = File::join(data_bag_dir, item_id + '.json')
|
191
|
+
if File.file?(item_file_path)
|
192
|
+
contents = open(item_file_path) {|f| JSON.load(f) }
|
193
|
+
unless contents['id'] == item_id
|
194
|
+
raise "File #{item_file_path} contains an invalid id (expected #{item_id})"
|
195
|
+
end
|
196
|
+
contents
|
197
|
+
elsif mode == :can_skip
|
198
|
+
ui.warn("Data bag item file #{item_file_path} does not exist, skipping\n\n")
|
199
|
+
nil
|
200
|
+
else
|
201
|
+
raise "File #{item_file_path} does not exist"
|
202
|
+
end
|
203
|
+
end
|
204
|
+
|
205
|
+
def time(description, log_level, &block)
|
206
|
+
raise unless [:info, :debug].include?(log_level)
|
207
|
+
start_time = Time.now
|
208
|
+
result = yield
|
209
|
+
msg = "%s in %.3f seconds" % [description, Time.now - start_time]
|
210
|
+
if log_level == :info
|
211
|
+
ui.info(msg)
|
212
|
+
else
|
213
|
+
debug(msg)
|
214
|
+
end
|
215
|
+
result
|
216
|
+
end
|
217
|
+
end
|
218
|
+
|
219
|
+
class UploaderDataBagDiff < UploaderDataBagCommand
|
220
|
+
|
221
|
+
include BaseCommandMixin
|
222
|
+
|
223
|
+
banner 'knife uploader data bag diff BAG [BAG2]'
|
224
|
+
|
225
|
+
def diff_data_bag_item_files(bag_name1, bag_name2)
|
226
|
+
ensure_data_bag_dir_exists(bag_name1)
|
227
|
+
ensure_data_bag_dir_exists(bag_name2)
|
228
|
+
|
229
|
+
items_to_compare = {}
|
230
|
+
processed_items = Set.new()
|
231
|
+
list_data_bag_item_ids(bag_name1).each do |item_id|
|
232
|
+
item2 = load_data_bag_item_file(bag_name2, item_id, :can_skip)
|
233
|
+
if item2
|
234
|
+
item1 = load_data_bag_item_file(bag_name1, item_id, :must_exist)
|
235
|
+
processed_items << item_id
|
236
|
+
items_to_compare[item_id] = [item1, item2]
|
237
|
+
end
|
238
|
+
end
|
239
|
+
list_data_bag_item_ids(bag_name2).each do |item_id|
|
240
|
+
unless processed_items.include?(item_id)
|
241
|
+
item1 = load_data_bag_item_file(bag_name1, item_id, :can_skip)
|
242
|
+
if item1
|
243
|
+
item2 = load_data_bag_item_file(bag_name2, item_id, :must_exist)
|
244
|
+
items_to_compare[item_id] = [item1, item2]
|
245
|
+
end
|
246
|
+
end
|
247
|
+
end
|
248
|
+
|
249
|
+
if items_to_compare.empty?
|
250
|
+
ui.error("Did not find any data bag items to compare between #{bag_name1} and #{bag_name2}")
|
251
|
+
return
|
252
|
+
end
|
253
|
+
|
254
|
+
# Find at least one data bag item on the Chef server. This is necessary to be able to
|
255
|
+
# decrypt data bags for comparison.
|
256
|
+
data_bag1 = ridley.data_bag.find(bag_name1)
|
257
|
+
item = nil
|
258
|
+
items_to_compare.keys.sort.each do |item_id|
|
259
|
+
item = data_bag1.item.find(item_id)
|
260
|
+
break if item
|
261
|
+
end
|
262
|
+
unless item
|
263
|
+
fatal_error("Could not find any of the following items in the data bag #{bag_name1}: " +
|
264
|
+
items_to_compare.keys.sort.join(', '))
|
265
|
+
end
|
266
|
+
|
267
|
+
items_to_compare.sort.each do |item_id, attributes_pair|
|
268
|
+
item.id = item_id
|
269
|
+
diff_data_bag_item(item, item_id, *attributes_pair, 'Differences for',
|
270
|
+
"local #{bag_name1}", "local #{bag_name2}")
|
271
|
+
end
|
272
|
+
end
|
273
|
+
|
274
|
+
def run_internal
|
275
|
+
if name_args.size < 1 || name_args.size > 2
|
276
|
+
ui.fatal("One or two arguments (data bag names) expected")
|
277
|
+
show_usage
|
278
|
+
exit(1)
|
279
|
+
end
|
280
|
+
|
281
|
+
report_errors do
|
282
|
+
if name_args.size == 1
|
283
|
+
@dry_run = true
|
284
|
+
report_errors { set_data_bag_items(name_args.first) }
|
285
|
+
else
|
286
|
+
diff_data_bag_item_files(name_args[0], name_args[1])
|
287
|
+
end
|
288
|
+
end
|
289
|
+
end
|
290
|
+
end
|
291
|
+
|
292
|
+
class UploaderDataBagUpload < UploaderDataBagCommand
|
293
|
+
include BaseCommandMixin
|
294
|
+
|
295
|
+
banner 'knife uploader data bag upload BAG'
|
296
|
+
|
297
|
+
def run_internal
|
298
|
+
unless name_args.size == 1
|
299
|
+
ui.fatal("Exactly one argument expected")
|
300
|
+
show_usage
|
301
|
+
exit 1
|
302
|
+
end
|
303
|
+
|
304
|
+
report_errors do
|
305
|
+
report_errors { set_data_bag_items(name_args.first) }
|
306
|
+
end
|
307
|
+
end
|
308
|
+
end
|
309
|
+
|
310
|
+
end
|
@@ -0,0 +1,108 @@
|
|
1
|
+
# Copyright (C) 2013 ClearStory Data, Inc.
|
2
|
+
# All rights reserved.
|
3
|
+
#
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
5
|
+
# you may not use this file except in compliance with the License.
|
6
|
+
# You may obtain a copy of the License at
|
7
|
+
#
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
9
|
+
#
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13
|
+
# See the License for the specific language governing permissions and
|
14
|
+
# limitations under the License.
|
15
|
+
|
16
|
+
require 'chef/knife/uploader_base'
|
17
|
+
|
18
|
+
module KnifeUploader
|
19
|
+
|
20
|
+
class UploaderRunListCommand < BaseCommand
|
21
|
+
|
22
|
+
def filtered_chef_nodes
|
23
|
+
ridley.node.all.sort_by {|node| node.name }.to_enum
|
24
|
+
.lazy_select {|node| node.name =~ @pattern }
|
25
|
+
.lazy_map {|node| ridley.node.find(node.name) }
|
26
|
+
.lazy_select {|node| node.chef_environment == @env_name }
|
27
|
+
end
|
28
|
+
|
29
|
+
def set_run_lists
|
30
|
+
run_lists_file_path = File::join(get_chef_repo_path, 'run_lists', "#{@env_name}.json")
|
31
|
+
target_run_lists = File.open(run_lists_file_path) {|f| JSON.load(f) }
|
32
|
+
|
33
|
+
filtered_chef_nodes.each do |node|
|
34
|
+
|
35
|
+
debug("Comparing run lists for node #{node.name}")
|
36
|
+
old_run_list = node.run_list
|
37
|
+
new_run_list = []
|
38
|
+
|
39
|
+
# Concatenate all matching patterns. This allows to specify some common parts of run lists
|
40
|
+
# only once.
|
41
|
+
target_run_lists.each do |pattern, run_list|
|
42
|
+
if node.name =~ /\A#{pattern}\Z/
|
43
|
+
new_run_list += run_list
|
44
|
+
end
|
45
|
+
end
|
46
|
+
debug("New run list for node #{node.name}: #{new_run_list}")
|
47
|
+
|
48
|
+
next if old_run_list == new_run_list
|
49
|
+
|
50
|
+
unless new_run_list
|
51
|
+
ui.warn("No new run list defined for node #{node.name}, skipping")
|
52
|
+
next
|
53
|
+
end
|
54
|
+
|
55
|
+
ui.info((@dry_run ? 'Would modify' : 'Modifying') +
|
56
|
+
" the run list for node #{node.name}:\n" +
|
57
|
+
diff_color(old_run_list.join("\n") + "\n",
|
58
|
+
new_run_list.join("\n") + "\n"))
|
59
|
+
|
60
|
+
unless @dry_run
|
61
|
+
node.run_list = new_run_list
|
62
|
+
node.save
|
63
|
+
end
|
64
|
+
end
|
65
|
+
|
66
|
+
ui.info("Finished #{@dry_run ? 'showing differences for' : 'setting'} run lists in " +
|
67
|
+
"environment #{@env_name}")
|
68
|
+
end
|
69
|
+
|
70
|
+
def validate_arguments
|
71
|
+
if name_args.size != 1
|
72
|
+
ui.fatal("Exactly one argument expected: environment")
|
73
|
+
show_usage
|
74
|
+
exit(1)
|
75
|
+
end
|
76
|
+
|
77
|
+
@env_name = name_args[0]
|
78
|
+
end
|
79
|
+
|
80
|
+
end
|
81
|
+
|
82
|
+
class UploaderRunListDiff < UploaderRunListCommand
|
83
|
+
|
84
|
+
include BaseCommandMixin
|
85
|
+
|
86
|
+
banner 'knife uploader run list diff ENVIRONMENT'
|
87
|
+
|
88
|
+
def run_internal
|
89
|
+
validate_arguments
|
90
|
+
@dry_run = true
|
91
|
+
set_run_lists
|
92
|
+
end
|
93
|
+
|
94
|
+
end
|
95
|
+
|
96
|
+
class UploaderRunListUpload < UploaderRunListCommand
|
97
|
+
|
98
|
+
include BaseCommandMixin
|
99
|
+
|
100
|
+
banner 'knife uploader run list upload ENVIRONMENT'
|
101
|
+
|
102
|
+
def run_internal
|
103
|
+
validate_arguments
|
104
|
+
set_run_lists
|
105
|
+
end
|
106
|
+
end
|
107
|
+
|
108
|
+
end
|
metadata
ADDED
@@ -0,0 +1,139 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: knife-uploader
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.1.2
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Mikhail Bautin
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2014-02-02 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: chef
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - "~>"
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: '11.4'
|
20
|
+
type: :runtime
|
21
|
+
prerelease: false
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirements:
|
24
|
+
- - "~>"
|
25
|
+
- !ruby/object:Gem::Version
|
26
|
+
version: '11.4'
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: diffy
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - "~>"
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: '3.0'
|
34
|
+
type: :runtime
|
35
|
+
prerelease: false
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - "~>"
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: '3.0'
|
41
|
+
- !ruby/object:Gem::Dependency
|
42
|
+
name: hashie
|
43
|
+
requirement: !ruby/object:Gem::Requirement
|
44
|
+
requirements:
|
45
|
+
- - "~>"
|
46
|
+
- !ruby/object:Gem::Version
|
47
|
+
version: '2.0'
|
48
|
+
type: :runtime
|
49
|
+
prerelease: false
|
50
|
+
version_requirements: !ruby/object:Gem::Requirement
|
51
|
+
requirements:
|
52
|
+
- - "~>"
|
53
|
+
- !ruby/object:Gem::Version
|
54
|
+
version: '2.0'
|
55
|
+
- !ruby/object:Gem::Dependency
|
56
|
+
name: ridley
|
57
|
+
requirement: !ruby/object:Gem::Requirement
|
58
|
+
requirements:
|
59
|
+
- - "~>"
|
60
|
+
- !ruby/object:Gem::Version
|
61
|
+
version: 1.5.3
|
62
|
+
type: :runtime
|
63
|
+
prerelease: false
|
64
|
+
version_requirements: !ruby/object:Gem::Requirement
|
65
|
+
requirements:
|
66
|
+
- - "~>"
|
67
|
+
- !ruby/object:Gem::Version
|
68
|
+
version: 1.5.3
|
69
|
+
- !ruby/object:Gem::Dependency
|
70
|
+
name: varia_model
|
71
|
+
requirement: !ruby/object:Gem::Requirement
|
72
|
+
requirements:
|
73
|
+
- - "~>"
|
74
|
+
- !ruby/object:Gem::Version
|
75
|
+
version: 0.2.0
|
76
|
+
type: :runtime
|
77
|
+
prerelease: false
|
78
|
+
version_requirements: !ruby/object:Gem::Requirement
|
79
|
+
requirements:
|
80
|
+
- - "~>"
|
81
|
+
- !ruby/object:Gem::Version
|
82
|
+
version: 0.2.0
|
83
|
+
- !ruby/object:Gem::Dependency
|
84
|
+
name: celluloid
|
85
|
+
requirement: !ruby/object:Gem::Requirement
|
86
|
+
requirements:
|
87
|
+
- - "~>"
|
88
|
+
- !ruby/object:Gem::Version
|
89
|
+
version: 0.14.1
|
90
|
+
- - "<"
|
91
|
+
- !ruby/object:Gem::Version
|
92
|
+
version: 0.15.0
|
93
|
+
type: :runtime
|
94
|
+
prerelease: false
|
95
|
+
version_requirements: !ruby/object:Gem::Requirement
|
96
|
+
requirements:
|
97
|
+
- - "~>"
|
98
|
+
- !ruby/object:Gem::Version
|
99
|
+
version: 0.14.1
|
100
|
+
- - "<"
|
101
|
+
- !ruby/object:Gem::Version
|
102
|
+
version: 0.15.0
|
103
|
+
description: Knife plugin for better uploading of data bags, run lists, etc.
|
104
|
+
email:
|
105
|
+
- mbautin@gmail.com
|
106
|
+
executables: []
|
107
|
+
extensions: []
|
108
|
+
extra_rdoc_files: []
|
109
|
+
files:
|
110
|
+
- Gemfile
|
111
|
+
- lib/chef/knife/uploader_base.rb
|
112
|
+
- lib/chef/knife/uploader_data_bag.rb
|
113
|
+
- lib/chef/knife/uploader_run_list.rb
|
114
|
+
- lib/knife-uploader/version.rb
|
115
|
+
homepage: https://github.com/mbautin/knife-uploader
|
116
|
+
licenses:
|
117
|
+
- Apache
|
118
|
+
metadata: {}
|
119
|
+
post_install_message:
|
120
|
+
rdoc_options: []
|
121
|
+
require_paths:
|
122
|
+
- lib
|
123
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
124
|
+
requirements:
|
125
|
+
- - ">="
|
126
|
+
- !ruby/object:Gem::Version
|
127
|
+
version: '0'
|
128
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
129
|
+
requirements:
|
130
|
+
- - ">="
|
131
|
+
- !ruby/object:Gem::Version
|
132
|
+
version: '0'
|
133
|
+
requirements: []
|
134
|
+
rubyforge_project:
|
135
|
+
rubygems_version: 2.2.1
|
136
|
+
signing_key:
|
137
|
+
specification_version: 4
|
138
|
+
summary: Knife plugin for better uploading of data bags, run lists, etc.
|
139
|
+
test_files: []
|