simplygenius-atmos 0.7.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (48) hide show
  1. checksums.yaml +7 -0
  2. data/CHANGELOG.md +2 -0
  3. data/LICENSE +13 -0
  4. data/README.md +212 -0
  5. data/exe/atmos +4 -0
  6. data/exe/atmos-docker +12 -0
  7. data/lib/atmos.rb +12 -0
  8. data/lib/atmos/cli.rb +105 -0
  9. data/lib/atmos/commands/account.rb +65 -0
  10. data/lib/atmos/commands/apply.rb +20 -0
  11. data/lib/atmos/commands/auth_exec.rb +29 -0
  12. data/lib/atmos/commands/base_command.rb +12 -0
  13. data/lib/atmos/commands/bootstrap.rb +72 -0
  14. data/lib/atmos/commands/container.rb +58 -0
  15. data/lib/atmos/commands/destroy.rb +18 -0
  16. data/lib/atmos/commands/generate.rb +90 -0
  17. data/lib/atmos/commands/init.rb +18 -0
  18. data/lib/atmos/commands/new.rb +18 -0
  19. data/lib/atmos/commands/otp.rb +54 -0
  20. data/lib/atmos/commands/plan.rb +20 -0
  21. data/lib/atmos/commands/secret.rb +87 -0
  22. data/lib/atmos/commands/terraform.rb +52 -0
  23. data/lib/atmos/commands/user.rb +74 -0
  24. data/lib/atmos/config.rb +208 -0
  25. data/lib/atmos/exceptions.rb +9 -0
  26. data/lib/atmos/generator.rb +199 -0
  27. data/lib/atmos/generator_factory.rb +93 -0
  28. data/lib/atmos/ipc.rb +132 -0
  29. data/lib/atmos/ipc_actions/notify.rb +27 -0
  30. data/lib/atmos/ipc_actions/ping.rb +19 -0
  31. data/lib/atmos/logging.rb +160 -0
  32. data/lib/atmos/otp.rb +61 -0
  33. data/lib/atmos/provider_factory.rb +19 -0
  34. data/lib/atmos/providers/aws/account_manager.rb +82 -0
  35. data/lib/atmos/providers/aws/auth_manager.rb +208 -0
  36. data/lib/atmos/providers/aws/container_manager.rb +116 -0
  37. data/lib/atmos/providers/aws/provider.rb +51 -0
  38. data/lib/atmos/providers/aws/s3_secret_manager.rb +49 -0
  39. data/lib/atmos/providers/aws/user_manager.rb +211 -0
  40. data/lib/atmos/settings_hash.rb +90 -0
  41. data/lib/atmos/terraform_executor.rb +267 -0
  42. data/lib/atmos/ui.rb +159 -0
  43. data/lib/atmos/utils.rb +50 -0
  44. data/lib/atmos/version.rb +3 -0
  45. data/templates/new/config/atmos.yml +50 -0
  46. data/templates/new/config/atmos/runtime.yml +43 -0
  47. data/templates/new/templates.yml +1 -0
  48. metadata +526 -0
@@ -0,0 +1,49 @@
1
+ require_relative '../../../atmos'
2
+ require 'aws-sdk-s3'
3
+
4
+ module Atmos
5
+ module Providers
6
+ module Aws
7
+
8
+ class S3SecretManager
9
+ include GemLogger::LoggerSupport
10
+
11
+ def initialize(provider)
12
+ @provider = provider
13
+ logger.debug("Secrets config is: #{Atmos.config[:secret]}")
14
+ @bucket_name = Atmos.config[:secret][:bucket]
15
+ @encrypt = Atmos.config[:secret][:encrypt]
16
+ end
17
+
18
+ def set(key, value)
19
+ opts = {}
20
+ opts[:server_side_encryption] = "AES256" if @encrypt
21
+ bucket.object(key).put(body: value, **opts)
22
+ end
23
+
24
+ def get(key)
25
+ bucket.object(key).get.body.read
26
+ end
27
+
28
+ def delete(key)
29
+ bucket.object(key).delete
30
+ end
31
+
32
+ def to_h
33
+ Hash[bucket.objects.collect {|o|
34
+ [o.key, o.get.body.read]
35
+ }]
36
+ end
37
+
38
+ private
39
+
40
+ def bucket
41
+ raise ArgumentError.new("The s3 secret bucket is not set") unless @bucket_name
42
+ @bucket ||= ::Aws::S3::Bucket.new(@bucket_name)
43
+ end
44
+
45
+ end
46
+
47
+ end
48
+ end
49
+ end
@@ -0,0 +1,211 @@
1
+ require_relative '../../../atmos'
2
+ require_relative '../../../atmos/otp'
3
+ require 'aws-sdk-iam'
4
+ require 'securerandom'
5
+
6
+ module Atmos
7
+ module Providers
8
+ module Aws
9
+
10
+ class UserManager
11
+ include GemLogger::LoggerSupport
12
+ include FileUtils
13
+
14
+ def initialize(provider)
15
+ @provider = provider
16
+ end
17
+
18
+ def create_user(user_name)
19
+ result = {}
20
+ client = ::Aws::IAM::Client.new
21
+ resource = ::Aws::IAM::Resource.new
22
+
23
+ user = resource.user(user_name)
24
+
25
+ if user.exists?
26
+ logger.info "User '#{user_name}' already exists"
27
+ else
28
+ logger.info "Creating new user '#{user_name}'"
29
+ user = resource.create_user(user_name: user_name)
30
+ client.wait_until(:user_exists, user_name: user_name)
31
+ logger.debug "User created, user_name=#{user_name}"
32
+ end
33
+
34
+ result[:user_name] = user_name
35
+
36
+ return result
37
+ end
38
+
39
+ def set_groups(user_name, groups, force: false)
40
+ result = {}
41
+ resource = ::Aws::IAM::Resource.new
42
+
43
+ user = resource.user(user_name)
44
+
45
+ existing_groups = user.groups.collect(&:name)
46
+ groups_to_add = groups - existing_groups
47
+ groups_to_remove = existing_groups - groups
48
+
49
+ result[:groups] = existing_groups
50
+
51
+ groups_to_add.each do |group|
52
+ logger.debug "Adding group: #{group}"
53
+ user.add_group(group_name: group)
54
+ result[:groups] << group
55
+ end
56
+
57
+ if force
58
+ groups_to_remove.each do |group|
59
+ logger.debug "Removing group: #{group}"
60
+ user.remove_group(group_name: group)
61
+ result[:groups].delete(group)
62
+ end
63
+ end
64
+
65
+ logger.info "User associated with groups=#{result[:groups]}"
66
+
67
+ return result
68
+ end
69
+
70
+ def enable_login(user_name, force: false)
71
+ result = {}
72
+ resource = ::Aws::IAM::Resource.new
73
+
74
+ user = resource.user(user_name)
75
+
76
+ password = ""
77
+ classes = [/[a-z]/, /[A-Z]/, /[0-9]/, /[!@#$%^&*()_+\-=\[\]{}|']/]
78
+ while ! classes.all? {|c| password =~ c }
79
+ password = SecureRandom.base64(15)
80
+ end
81
+
82
+ exists = false
83
+ begin
84
+ user.login_profile.create_date
85
+ exists = true
86
+ rescue ::Aws::IAM::Errors::NoSuchEntity
87
+ exists = false
88
+ end
89
+
90
+ if exists
91
+ logger.info "User login already exists"
92
+ if force
93
+ user.login_profile.update(password: password, password_reset_required: true)
94
+ result[:password] = password
95
+ logger.info "Updated user login with password=#{password}"
96
+ end
97
+ else
98
+ user.create_login_profile(password: password, password_reset_required: true)
99
+ result[:password] = password
100
+ logger.info "User login enabled with password=#{password}"
101
+ end
102
+
103
+ return result
104
+ end
105
+
106
+ def enable_mfa(user_name, force: false)
107
+ result = {}
108
+ client = ::Aws::IAM::Client.new
109
+ resource = ::Aws::IAM::Resource.new
110
+
111
+ user = resource.user(user_name)
112
+
113
+ if user.mfa_devices.first
114
+ logger.info "User mfa devices already exist"
115
+ if force
116
+ logger.info "Deleting old mfa devices"
117
+ user.mfa_devices.each do |dev|
118
+ dev.disassociate
119
+ client.delete_virtual_mfa_device(serial_number: dev.serial_number)
120
+ Atmos::Otp.instance.remove(user_name)
121
+ end
122
+ else
123
+ return result
124
+ end
125
+ end
126
+
127
+ resp = client.create_virtual_mfa_device(
128
+ virtual_mfa_device_name: user_name
129
+ )
130
+
131
+ serial = resp.virtual_mfa_device.serial_number
132
+ seed = resp.virtual_mfa_device.base_32_string_seed
133
+
134
+ Atmos::Otp.instance.add(user_name, seed)
135
+ code1 = Atmos::Otp.instance.generate(user_name)
136
+ interval = (30 - (Time.now.to_i % 30)) + 1
137
+ logger.info "Waiting for #{interval}s to generate second otp key for enablement"
138
+ sleep interval
139
+ code2 = Atmos::Otp.instance.generate(user_name)
140
+ raise "MFA codes should not be the same" if code1 == code2
141
+
142
+ resp = client.enable_mfa_device({
143
+ user_name: user_name,
144
+ serial_number: serial,
145
+ authentication_code_1: code1,
146
+ authentication_code_2: code2,
147
+ })
148
+
149
+ result[:mfa_secret] = seed
150
+
151
+ return result
152
+ end
153
+
154
+ def enable_access_keys(user_name, force: false)
155
+ result = {}
156
+ resource = ::Aws::IAM::Resource.new
157
+
158
+ user = resource.user(user_name)
159
+
160
+ if user.access_keys.first
161
+ logger.info "User access keys already exist"
162
+ if force
163
+ logger.info "Deleting old access keys"
164
+ user.access_keys.each do |key|
165
+ key.delete
166
+ end
167
+ else
168
+ return result
169
+ end
170
+ end
171
+
172
+ # TODO: auto add to ~/.aws/credentials and config
173
+ key_pair = user.create_access_key_pair
174
+ result[:key] = key_pair.access_key_id
175
+ result[:secret] = key_pair.secret
176
+ logger.debug "User keys generated key=#{key_pair.access_key_id}, secret=#{key_pair.secret}"
177
+
178
+ return result
179
+ end
180
+
181
+ def set_public_key(user_name, public_key, force: false)
182
+ result = {}
183
+ client = ::Aws::IAM::Client.new
184
+ resource = ::Aws::IAM::Resource.new
185
+
186
+ user = resource.user(user_name)
187
+ keys = client.list_ssh_public_keys(user_name: user_name).ssh_public_keys
188
+ if keys.size > 0
189
+ logger.info "User ssh public keys already exist"
190
+ if force
191
+ logger.info "Deleting old ssh public keys"
192
+ keys.each do |key|
193
+ client.delete_ssh_public_key(user_name: user_name,
194
+ ssh_public_key_id: key.ssh_public_key_id)
195
+ end
196
+ else
197
+ return result
198
+ end
199
+ end
200
+
201
+ client.upload_ssh_public_key(user_name: user_name, ssh_public_key_body: public_key)
202
+ logger.debug "User public key assigned: #{public_key}"
203
+
204
+ return result
205
+ end
206
+
207
+ end
208
+
209
+ end
210
+ end
211
+ end
@@ -0,0 +1,90 @@
1
+ require 'hashie'
2
+
3
+ module Atmos
4
+
5
+ class SettingsHash < Hashie::Mash
6
+ include GemLogger::LoggerSupport
7
+ include Hashie::Extensions::DeepMerge
8
+ include Hashie::Extensions::DeepFetch
9
+ disable_warnings
10
+
11
+ PATH_PATTERN = /[\.\[\]]/
12
+
13
+ def notation_get(key)
14
+ path = key.to_s.split(PATH_PATTERN).compact
15
+ path = path.collect {|p| p =~ /^\d+$/ ? p.to_i : p }
16
+ result = nil
17
+
18
+ begin
19
+ result = deep_fetch(*path)
20
+ rescue Hashie::Extensions::DeepFetch::UndefinedPathError => e
21
+ logger.debug("Settings missing value for key='#{key}'")
22
+ end
23
+
24
+ return result
25
+ end
26
+
27
+ def notation_put(key, value, additive: true)
28
+ path = key.to_s.split(PATH_PATTERN).compact
29
+ path = path.collect {|p| p =~ /^\d+$/ ? p.to_i : p }
30
+ current_level = self
31
+ path.each_with_index do |p, i|
32
+
33
+ if i == path.size - 1
34
+ if additive && current_level[p].is_a?(Array)
35
+ current_level[p] = current_level[p] | Array(value)
36
+ else
37
+ current_level[p] = value
38
+ end
39
+ else
40
+ if current_level[p].nil?
41
+ if path[i+1].is_a?(Integer)
42
+ current_level[p] = []
43
+ else
44
+ current_level[p] = {}
45
+ end
46
+ end
47
+ end
48
+
49
+ current_level = current_level[p]
50
+ end
51
+ end
52
+
53
+ def self.add_config(yml_file, key, value, additive: true)
54
+ orig_config_with_comments = File.read(yml_file)
55
+
56
+ comment_places = {}
57
+ comment_lines = []
58
+ orig_config_with_comments.each_line do |line|
59
+ line.gsub!(/\s+$/, "\n")
60
+ if line =~ /^\s*(#.*)?$/
61
+ comment_lines << line
62
+ else
63
+ if comment_lines.present?
64
+ comment_places[line.chomp] = comment_lines
65
+ comment_lines = []
66
+ end
67
+ end
68
+ end
69
+ comment_places["<EOF>"] = comment_lines
70
+
71
+ orig_config = SettingsHash.new((YAML.load_file(yml_file) rescue {}))
72
+ orig_config.notation_put(key, value, additive: additive)
73
+ new_config_no_comments = YAML.dump(orig_config.to_hash)
74
+ new_config_no_comments.sub!(/\A---\n/, "")
75
+
76
+ new_yml = ""
77
+ new_config_no_comments.each_line do |line|
78
+ line.gsub!(/\s+$/, "\n")
79
+ cline = comment_places.keys.find {|k| line =~ /^#{k}/ }
80
+ comments = comment_places[cline]
81
+ comments.each {|comment| new_yml << comment } if comments
82
+ new_yml << line
83
+ end
84
+ comment_places["<EOF>"].each {|comment| new_yml << comment }
85
+
86
+ return new_yml
87
+ end
88
+
89
+ end
90
+ end
@@ -0,0 +1,267 @@
1
+ require_relative '../atmos'
2
+ require_relative '../atmos/ipc'
3
+ require_relative '../atmos/ui'
4
+ require 'open3'
5
+ require 'fileutils'
6
+ require 'find'
7
+ require 'climate_control'
8
+
9
+ module Atmos
10
+
11
+ class TerraformExecutor
12
+ include GemLogger::LoggerSupport
13
+ include FileUtils
14
+ include Atmos::UI
15
+
16
+ class ProcessFailed < RuntimeError; end
17
+
18
+ def initialize(process_env: ENV, working_group: 'default')
19
+ @process_env = process_env
20
+ @working_group = working_group
21
+ @working_dir = Atmos.config.tf_working_dir(@working_group)
22
+ @recipes = Atmos.config["recipes.#{@working_group}"]
23
+ end
24
+
25
+ def run(*terraform_args, skip_backend: false, skip_secrets: false, get_modules: false, output_io: nil)
26
+ setup_working_dir(skip_backend: skip_backend)
27
+
28
+ if get_modules
29
+ logger.debug("Getting modules")
30
+ get_modules_io = StringIO.new
31
+ begin
32
+ execute("get", output_io: get_modules_io)
33
+ rescue Atmos::TerraformExecutor::ProcessFailed => e
34
+ logger.info(get_modules_io.string)
35
+ raise
36
+ end
37
+ end
38
+
39
+ return execute(*terraform_args, skip_secrets: skip_secrets, output_io: output_io)
40
+ end
41
+
42
+ private
43
+
44
+ def tf_cmd(*args)
45
+ ['terraform'] + args
46
+ end
47
+
48
+ def execute(*terraform_args, skip_secrets: false, output_io: nil)
49
+ cmd = tf_cmd(*terraform_args)
50
+ logger.debug("Running terraform: #{cmd.join(' ')}")
51
+
52
+ env = Hash[@process_env]
53
+ if ! skip_secrets
54
+ begin
55
+ env = env.merge(secrets_env)
56
+ rescue => e
57
+ logger.debug("Secrets not available: #{e}")
58
+ end
59
+ end
60
+
61
+ # lets tempfiles create by subprocesses be easily found by users
62
+ env['TMPDIR'] = Atmos.config.tmp_dir
63
+
64
+ # Lets terraform communicate back to atmos, e.g. for UI notifications
65
+ ipc = Atmos::Ipc.new(Atmos.config.tmp_dir)
66
+
67
+ IO.pipe do |stdout, stdout_writer|
68
+ IO.pipe do |stderr, stderr_writer|
69
+
70
+ stdout_writer.sync = stderr_writer.sync = true
71
+ # TODO: more filtering on terraform output?
72
+ stdout_thr = pipe_stream(stdout, output_io.nil? ? $stdout : output_io) do |data|
73
+ if data =~ /^[\e\[\dm\s]*Enter a value:[\e\[\dm\s]*$/
74
+ notify(message: "Terraform is waiting for user input")
75
+ end
76
+ data
77
+ end
78
+ stderr_thr = pipe_stream(stderr, output_io.nil? ? $stderr : output_io)
79
+
80
+ ipc.listen do |sock_path|
81
+
82
+ if Atmos.config['ipc.disable']
83
+ # Using : as the command makes execution of ipc from the
84
+ # terraform side a no-op in both cases of how we call it. This
85
+ # way, terraform execution continues to work when IPC is disabled
86
+ # command = "$ATMOS_IPC_CLIENT <json_string>"
87
+ # program = ["sh", "-c", "$ATMOS_IPC_CLIENT"]
88
+ env['ATMOS_IPC_CLIENT'] = ":"
89
+ else
90
+ env['ATMOS_IPC_SOCK'] = sock_path
91
+ env['ATMOS_IPC_CLIENT'] = ipc.generate_client_script
92
+ end
93
+
94
+ # Was unable to get piping to work with stdin for some reason. It
95
+ # worked in simple case, but started to fail when terraform config
96
+ # got more extensive. Thus, using spawn to redirect stdin from the
97
+ # terminal direct to terraform, with IO.pipe to copy the outher
98
+ # streams. Maybe in the future we can completely disconnect stdin
99
+ # and have atmos do the output parsing and stdin prompting
100
+ pid = spawn(env, *cmd,
101
+ chdir: tf_recipes_dir,
102
+ :out=>stdout_writer, :err=> stderr_writer, :in => :in)
103
+
104
+ logger.debug("Terraform started with pid #{pid}")
105
+ Process.wait(pid)
106
+ end
107
+
108
+ stdout_writer.close
109
+ stderr_writer.close
110
+ stdout_thr.join
111
+ stderr_thr.join
112
+
113
+ status = $?.exitstatus
114
+ logger.debug("Terraform exited: #{status}")
115
+ if status != 0
116
+ raise ProcessFailed.new "Terraform exited with non-zero exit code: #{status}"
117
+ end
118
+
119
+ end
120
+ end
121
+
122
+ end
123
+
124
+ def setup_working_dir(skip_backend: false)
125
+ clean_links
126
+ link_shared_plugin_dir
127
+ link_support_dirs
128
+ link_recipes
129
+ write_atmos_vars
130
+ setup_backend(skip_backend)
131
+ end
132
+
133
+ def setup_backend(skip_backend=false)
134
+ backend_file = File.join(tf_recipes_dir, 'atmos-backend.tf.json')
135
+ backend_config = (Atmos.config["backend"] || {}).clone
136
+
137
+ if backend_config.present? && ! skip_backend
138
+ logger.debug("Writing out terraform state backend config")
139
+
140
+ # Use a different state file per group
141
+ if @working_group
142
+ backend_config['key'] = "#{@working_group}-#{backend_config['key']}"
143
+ end
144
+
145
+ backend_type = backend_config.delete("type")
146
+
147
+ backend = {
148
+ "terraform" => {
149
+ "backend" => {
150
+ backend_type => backend_config
151
+ }
152
+ }
153
+ }
154
+
155
+ File.write(backend_file, JSON.pretty_generate(backend))
156
+ else
157
+ logger.debug("Clearing terraform state backend config")
158
+ File.delete(backend_file) if File.exist?(backend_file)
159
+ end
160
+ end
161
+
162
+ # terraform currently (v0.11.3) doesn't handle maps with nested maps or
163
+ # lists well, so flatten them - nested maps get expanded into the top level
164
+ # one, with their keys being appended with underscores, and lists get
165
+ # joined with "," so we end up with a single hash with homogenous types
166
+ def homogenize_for_terraform(h, root={}, prefix="")
167
+ h.each do |k, v|
168
+ if v.is_a? Hash
169
+ homogenize_for_terraform(v, root, "#{k}_")
170
+ else
171
+ v = v.join(",") if v.is_a? Array
172
+ root["#{prefix}#{k}"] = v
173
+ end
174
+ end
175
+ return root
176
+ end
177
+
178
+ def tf_recipes_dir
179
+ @tf_recipes_dir ||= begin
180
+ dir = File.join(@working_dir, 'recipes')
181
+ logger.debug("Tf recipes dir: #{dir}")
182
+ mkdir_p(dir)
183
+ dir
184
+ end
185
+ end
186
+
187
+ def write_atmos_vars
188
+ File.open(File.join(tf_recipes_dir, 'atmos.auto.tfvars.json'), 'w') do |f|
189
+ atmos_var_config = atmos_config = homogenize_for_terraform(Atmos.config.to_h)
190
+
191
+ var_prefix = Atmos.config['var_prefix']
192
+ if var_prefix
193
+ atmos_var_config = Hash[atmos_var_config.collect {|k, v| ["#{var_prefix}#{k}", v]}]
194
+ end
195
+
196
+ var_hash = {
197
+ atmos_env: Atmos.config.atmos_env,
198
+ all_env_names: Atmos.config.all_env_names,
199
+ account_ids: Atmos.config.account_hash,
200
+ atmos_config: atmos_config
201
+ }
202
+ var_hash = var_hash.merge(atmos_var_config)
203
+ f.puts(JSON.pretty_generate(var_hash))
204
+ end
205
+ end
206
+
207
+ def secrets_env
208
+ # NOTE use an auto-deleting temp file if passing secrets through env ends
209
+ # up being problematic
210
+ # TODO fix the need for CC - TE calls for secrets which needs auth in
211
+ # ENV, so kinda clunk to have to do both CC and pass the env in
212
+ ClimateControl.modify(@process_env) do
213
+ secrets = Atmos.config.provider.secret_manager.to_h
214
+ env_secrets = Hash[secrets.collect { |k, v| ["TF_VAR_#{k}", v] }]
215
+ return env_secrets
216
+ end
217
+ end
218
+
219
+ def clean_links
220
+ Find.find(@working_dir) do |f|
221
+ Find.prune if f =~ /\/.terraform\/modules\//
222
+ File.delete(f) if File.symlink?(f)
223
+ end
224
+ end
225
+
226
+ def link_support_dirs
227
+ ['modules', 'templates'].each do |subdir|
228
+ ln_sf(File.join(Atmos.config.root_dir, subdir), @working_dir)
229
+ end
230
+ end
231
+
232
+ def link_shared_plugin_dir
233
+ if ! Atmos.config["terraform.disable_shared_plugins"]
234
+ shared_plugins_dir = File.join(Atmos.config.tmp_root, "terraform_plugins")
235
+ mkdir_p(shared_plugins_dir)
236
+ terraform_state_dir = File.join(tf_recipes_dir, '.terraform')
237
+ mkdir_p(terraform_state_dir)
238
+ terraform_plugins_dir = File.join(terraform_state_dir, 'plugins')
239
+ ln_sf(shared_plugins_dir, terraform_plugins_dir)
240
+ end
241
+ end
242
+
243
+ def link_recipes
244
+ @recipes.each do |recipe|
245
+ ln_sf(File.join(Atmos.config.root_dir, 'recipes', "#{recipe}.tf"), tf_recipes_dir)
246
+ end
247
+ end
248
+
249
+ def pipe_stream(src, dest)
250
+ Thread.new do
251
+ block_size = 1024
252
+ begin
253
+ while data = src.readpartial(block_size)
254
+ data = yield data if block_given?
255
+ dest.write(data)
256
+ end
257
+ rescue EOFError
258
+ nil
259
+ rescue Exception => e
260
+ logger.log_exception(e, "Stream failure")
261
+ end
262
+ end
263
+ end
264
+
265
+ end
266
+
267
+ end