sparkle_formation 0.4.0 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,25 +1,5 @@
1
- #
2
- # Author:: Chris Roberts <chris@hw-ops.com>
3
- # Copyright:: 2013, Heavy Water Operations, LLC
4
- # License:: Apache License, Version 2.0
5
- #
6
- # Licensed under the Apache License, Version 2.0 (the "License");
7
- # you may not use this file except in compliance with the License.
8
- # You may obtain a copy of the License at
9
- #
10
- # http://www.apache.org/licenses/LICENSE-2.0
11
- #
12
- # Unless required by applicable law or agreed to in writing, software
13
- # distributed under the License is distributed on an "AS IS" BASIS,
14
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15
- # See the License for the specific language governing permissions and
16
- # limitations under the License.
17
- #
18
-
19
1
  require 'sparkle_formation'
20
2
 
21
- SparkleFormation::SparkleStruct.camel_keys = true
22
-
23
3
  # Formation container
24
4
  class SparkleFormation
25
5
 
@@ -35,6 +15,9 @@ class SparkleFormation
35
15
  'registry'
36
16
  ]
37
17
 
18
+ # @return [String] default stack resource name
19
+ DEFAULT_STACK_RESOURCE = 'AWS::CloudFormation::Stack'
20
+
38
21
  class << self
39
22
 
40
23
  # @return [Hashish] loaded dynamics
@@ -106,12 +89,16 @@ class SparkleFormation
106
89
  # to pass through when compiling ({:state => {}})
107
90
  # @return [Hashish, SparkleStruct]
108
91
  def compile(path, *args)
92
+ opts = args.detect{|i| i.is_a?(Hash) } || {}
93
+ if(spath = (opts.delete(:sparkle_path) || SparkleFormation.sparkle_path))
94
+ container = Sparkle.new(:root => spath)
95
+ path = container.get(:template, path)[:path]
96
+ end
109
97
  formation = self.instance_eval(IO.read(path), path, 1)
110
98
  if(args.delete(:sparkle))
111
99
  formation
112
100
  else
113
- comp_arg = args.detect{|i| i.is_a?(Hash) }
114
- (comp_arg ? formation.compile(comp_arg) : formation.compile)._dump
101
+ formation.compile(opts)._dump
115
102
  end
116
103
  end
117
104
 
@@ -121,13 +108,9 @@ class SparkleFormation
121
108
  # @yield block to execute
122
109
  # @return [SparkleStruct] provided base or new struct
123
110
  def build(base=nil, &block)
124
- if(base || block.nil?)
125
- struct = base || SparkleStruct.new
126
- struct.instance_exec(&block)
127
- @_struct = struct
128
- else
129
- block
130
- end
111
+ struct = base || SparkleStruct.new
112
+ struct.instance_exec(&block)
113
+ struct
131
114
  end
132
115
 
133
116
  # Load component
@@ -136,6 +119,7 @@ class SparkleFormation
136
119
  # @return [SparkleStruct] resulting struct
137
120
  def load_component(path)
138
121
  self.instance_eval(IO.read(path), path, 1)
122
+ @_struct
139
123
  end
140
124
 
141
125
  # Load all dynamics within a directory
@@ -205,17 +189,19 @@ class SparkleFormation
205
189
  # @return [SparkleStruct]
206
190
  def insert(dynamic_name, struct, *args, &block)
207
191
  result = false
208
- if(@dynamics && @dynamics[dynamic_name])
209
- result = struct.instance_exec(*args, &@dynamics[dynamic_name][:block])
192
+ begin
193
+ dyn = struct._self.sparkle.get(:dynamic, dynamic_name)
194
+ raise dyn if dyn.is_a?(Exception)
195
+ result = struct.instance_exec(*args, &dyn[:block])
210
196
  if(block_given?)
211
197
  result.instance_exec(&block)
212
198
  end
213
199
  result = struct
214
- else
200
+ rescue Error::NotFound::Dynamic
215
201
  result = builtin_insert(dynamic_name, struct, *args, &block)
216
202
  end
217
203
  unless(result)
218
- raise "Failed to locate requested dynamic block for insertion: #{dynamic_name} (valid: #{(@dynamics || {}).keys.sort.join(', ')})"
204
+ raise "Failed to locate requested dynamic block for insertion: #{dynamic_name} (valid: #{struct._self.sparkle.dynamics.keys.sort.join(', ')})"
219
205
  end
220
206
  result
221
207
  end
@@ -229,33 +215,18 @@ class SparkleFormation
229
215
  # @note if symbol is provided for template, double underscores
230
216
  # will be used for directory separator and dashes will match underscores
231
217
  def nest(template, struct, *args, &block)
232
- options = args.detect{|i| i.is_a?(Hash)}
233
- if(options)
234
- args.delete(options)
235
- else
236
- options = {}
237
- end
238
- spath = SparkleFormation.new('stub').sparkle_path
239
- resource_name = [template.to_s.gsub(/(\/|__|-)/, '_'), *args].compact.join('_').to_sym
240
- path = template.is_a?(Symbol) ? template.to_s.gsub('__', '/') : template.to_s
241
- file = Dir.glob(File.join(spath, '**', '**', '*.rb')).detect do |local_path|
242
- strip_path = local_path.sub(spath, '').sub(/^\//, '').tr('-', '_').sub('.rb', '')
243
- strip_path == path
244
- end
245
- unless(file)
246
- raise ArgumentError.new("Failed to locate nested stack file! (#{template.inspect} -> #{path.inspect})")
247
- end
248
- instance = self.instance_eval(IO.read(file), file, 1)
249
- instance.parent = struct._self
250
- instance.name = Bogo::Utility.camel(resource_name)
218
+ to_nest = struct._self.sparkle.get(:template, template)
219
+ resource_name = (args.empty? ? template.to_s.gsub('__', '_') : args.map{|a| Bogo::Utility.snake(a)}.join('_')).to_sym
220
+ nested_template = self.compile(to_nest[:path], :sparkle)
221
+ nested_template.parent = struct._self
251
222
  struct.resources.set!(resource_name) do
252
- type 'AWS::CloudFormation::Stack'
223
+ type DEFAULT_STACK_RESOURCE
253
224
  end
254
- struct.resources.__send__(resource_name).properties.stack instance.compile(:state => options[:parameters])
225
+ struct.resources[resource_name].properties.stack nested_template
255
226
  if(block_given?)
256
- struct.resources.__send__(resource_name).instance_exec(&block)
227
+ struct.resources[resource_name].instance_exec(&block)
257
228
  end
258
- struct.resources.__send__(resource_name)
229
+ struct.resources[resource_name]
259
230
  end
260
231
 
261
232
  # Insert a builtin dynamic into a context
@@ -270,18 +241,19 @@ class SparkleFormation
270
241
  _config ||= {}
271
242
  return unless _name
272
243
  resource_name = "#{_name}_#{_config.delete(:resource_name_suffix) || dynamic_name}".to_sym
273
- new_resource = struct.resources.__send__(resource_name)
244
+ struct.resources.set!(resource_name)
245
+ new_resource = struct.resources[resource_name]
274
246
  new_resource.type lookup_key
275
247
  properties = new_resource.properties
248
+ config_keys = _config.keys.zip(_config.keys.map{|k| snake(k).to_s.tr('_', '')})
276
249
  SfnAws.resource(dynamic_name, :properties).each do |prop_name|
277
- value = [prop_name, snake(prop_name)].map do |key|
278
- _config[key] || _config[key.to_sym]
279
- end.compact.first
250
+ key = (config_keys.detect{|k| k.last == snake(prop_name).to_s.tr('_', '')} || []).first
251
+ value = _config[key] if key
280
252
  if(value)
281
253
  if(value.is_a?(Proc))
282
- properties.__send__(prop_name).instance_exec(&value)
254
+ properties[prop_name].to_sym.instance_exec(&value)
283
255
  else
284
- properties.__send__(prop_name, value)
256
+ properties.set!(prop_name, value)
285
257
  end
286
258
  end
287
259
  end
@@ -304,8 +276,12 @@ class SparkleFormation
304
276
  end
305
277
  end
306
278
 
279
+ include Bogo::Memoization
280
+
307
281
  # @return [Symbol] name of formation
308
- attr_accessor :name
282
+ attr_reader :name
283
+ # @return [Sparkle] parts store
284
+ attr_reader :sparkle
309
285
  # @return [String] base path
310
286
  attr_reader :sparkle_path
311
287
  # @return [String] components path
@@ -320,11 +296,7 @@ class SparkleFormation
320
296
  attr_reader :load_order
321
297
  # @return [Hash] parameters for stack generation
322
298
  attr_reader :parameters
323
- # @return [Hash] state hash for compile time parameters
324
- attr_accessor :compile_state
325
- # @return [Proc] block to call for setting compile time parameters
326
- attr_accessor :compile_time_parameter_setter
327
- # @return [SparkleFormation] parent instance
299
+ # @return [SparkleFormation] parent stack
328
300
  attr_accessor :parent
329
301
 
330
302
  # Create new instance
@@ -340,65 +312,59 @@ class SparkleFormation
340
312
  # @yield base context
341
313
  def initialize(name, options={}, &block)
342
314
  @name = name.to_sym
343
- @sparkle_path = options[:sparkle_path] ||
344
- self.class.custom_paths[:sparkle_path] ||
345
- File.join(Dir.pwd, 'cloudformation')
346
- @components_directory = options[:components_directory] ||
347
- self.class.custom_paths[:components_directory] ||
348
- File.join(sparkle_path, 'components')
349
- @dynamics_directory = options[:dynamics_directory] ||
350
- self.class.custom_paths[:dynamics_directory] ||
351
- File.join(sparkle_path, 'dynamics')
352
- @registry_directory = options[:registry_directory] ||
353
- self.class.custom_paths[:registry_directory] ||
354
- File.join(sparkle_path, 'registry')
355
- self.class.load_dynamics!(@dynamics_directory)
356
- self.class.load_registry!(@registry_directory)
315
+ @component_paths = []
316
+ @sparkle = SparkleCollection.new
317
+ @sparkle.set_root(
318
+ Sparkle.new(
319
+ Smash.new.tap{|h|
320
+ s_path = options.fetch(:sparkle_path,
321
+ self.class.custom_paths[:sparkle_path]
322
+ )
323
+ if(s_path)
324
+ h[:root] = s_path
325
+ end
326
+ }
327
+ )
328
+ )
357
329
  unless(options[:disable_aws_builtins])
358
330
  require 'sparkle_formation/aws'
359
331
  SfnAws.load!
360
332
  end
361
- @parameters = set_generation_parameters!(
362
- options.fetch(:parameters,
363
- options.fetch(:compile_time_parameters, {})
364
- )
365
- )
366
- @components = SparkleStruct.hashish.new
333
+ @parameters = set_generation_parameters!(options.fetch(:parameters, {}))
334
+ @components = Smash.new
367
335
  @load_order = []
368
336
  @overrides = []
337
+ @parent = options[:parent]
369
338
  if(block)
370
339
  load_block(block)
371
340
  end
372
341
  @compiled = nil
373
342
  end
374
343
 
375
- # Get or set the compile time parameter setting block. If a get
376
- # request the ancestor path will be searched to root
377
- #
378
- # @yield block to set compile time parameters
379
- # @yieldparam [SparkleFormation]
380
- # @return [Proc, NilClass]
381
- def compile_time_parameter_setter(&block)
382
- if(block)
383
- @compile_time_parameter_setter = block
344
+ # @return [SparkleFormation] root stack
345
+ def root
346
+ if(parent)
347
+ parent.root
384
348
  else
385
- if(@compile_time_parameter_setter)
386
- @compile_time_parameter_setter
387
- else
388
- parent.nil? ? nil : parent.compile_time_parameter_setter
389
- end
349
+ self
390
350
  end
391
351
  end
392
352
 
393
- # Set the compile time parameters for the stack if the setter proc
394
- # is available
395
- def set_compile_time_parameters!
396
- if(compile_time_parameter_setter)
397
- compile_time_parameter_setter.call(self)
353
+ # @return [Array<SparkleFormation] path to root
354
+ def root_path
355
+ if(parent)
356
+ [*parent.root_path, self].compact
357
+ else
358
+ [self]
398
359
  end
399
360
  end
400
361
 
401
- ALLOWED_GENERATION_PARAMETERS = ['type', 'default', 'description', 'multiple', 'prompt_when_nested']
362
+ # @return [TrueClass, FalseClass] current stack is root
363
+ def root?
364
+ root == self
365
+ end
366
+
367
+ ALLOWED_GENERATION_PARAMETERS = ['type', 'default']
402
368
  VALID_GENERATION_PARAMETER_TYPES = ['String', 'Number']
403
369
 
404
370
  # Validation parameters used for template generation to ensure they
@@ -436,13 +402,13 @@ class SparkleFormation
436
402
  # @return [self]
437
403
  def load(*args)
438
404
  args.each do |thing|
439
- if(thing.is_a?(Symbol))
440
- path = File.join(components_directory, "#{thing}.rb")
405
+ key = File.basename(thing.to_s).sub('.rb', '')
406
+ if(thing.is_a?(String))
407
+ # TODO: Test this!
408
+ components[key] = ->{ self.class.load_component(thing) }
441
409
  else
442
- path = thing
410
+ components[key] = sparkle.get(:component, thing)[:block]
443
411
  end
444
- key = File.basename(path).sub('.rb', '')
445
- components[key] = self.class.load_component(path)
446
412
  @load_order << key
447
413
  end
448
414
  self
@@ -463,16 +429,11 @@ class SparkleFormation
463
429
  # @option args [Hash] :state local state parameters
464
430
  # @return [SparkleStruct]
465
431
  def compile(args={})
466
- if(args.has_key?(:state))
467
- @compile_state = args[:state]
468
- @compiled = nil
469
- end
470
432
  unless(@compiled)
471
- set_compile_time_parameters!
472
433
  compiled = SparkleStruct.new
473
434
  compiled._set_self(self)
474
- if(compile_state)
475
- compiled.set_state!(compile_state)
435
+ if(args[:state])
436
+ compiled.set_state!(args[:state])
476
437
  end
477
438
  @load_order.each do |key|
478
439
  self.class.build(compiled, &components[key])
@@ -483,9 +444,6 @@ class SparkleFormation
483
444
  end
484
445
  self.class.build(compiled, &override[:block])
485
446
  end
486
- if(compile_state)
487
- compiled.outputs.compile_state.value MultiJson.dump(compile_state)
488
- end
489
447
  @compiled = compiled
490
448
  end
491
449
  @compiled
@@ -494,66 +452,239 @@ class SparkleFormation
494
452
  # Clear compiled stack if cached and perform compilation again
495
453
  #
496
454
  # @return [SparkleStruct]
497
- def recompile(args={})
498
- @compiled = nil
499
- compile(args)
455
+ def recompile
456
+ unmemoize(:compile)
457
+ compile
458
+ end
459
+
460
+ # @return [Array<SparkleFormation>]
461
+ def nested_stacks(*args)
462
+ compile.resources.keys!.map do |key|
463
+ if(compile.resources[key].type == DEFAULT_STACK_RESOURCE)
464
+ result = [compile.resources[key].properties.stack]
465
+ if(args.include?(:with_resource))
466
+ result.push(compile[:resources][key])
467
+ end
468
+ if(args.include?(:with_name))
469
+ result.push(key)
470
+ end
471
+ result.size == 1 ? result.first : result
472
+ end
473
+ end.compact
500
474
  end
501
475
 
502
476
  # @return [TrueClass, FalseClass] includes nested stacks
503
- def nested?
504
- !!compile.dump!.fetch('Resources', {}).detect do |r_name, resource|
505
- resource['Type'] == 'AWS::CloudFormation::Stack'
477
+ def nested?(stack_hash=nil)
478
+ stack_hash = compile.dump! unless stack_hash
479
+ !!stack_hash.fetch('Resources', {}).detect do |r_name, resource|
480
+ resource['Type'] == DEFAULT_STACK_RESOURCE
506
481
  end
507
482
  end
508
483
 
509
484
  # @return [TrueClass, FalseClass] includes _only_ nested stacks
510
- def isolated_nests?
511
- hash = compile.dump!
512
- hash.fetch('Resources', {}).all? do |name, resource|
513
- resource['Type'] == 'AWS::CloudFormation::Stack'
485
+ def isolated_nests?(stack_hash=nil)
486
+ stack_hash = compile.dump! unless stack_hash
487
+ stack_hash.fetch('Resources', {}).all? do |name, resource|
488
+ resource['Type'] == DEFAULT_STACK_RESOURCE
489
+ end
490
+ end
491
+
492
+ # @return [TrueClass, FalseClass] policies defined
493
+ def includes_policies?(stack_hash=nil)
494
+ stack_hash = compile.dump! unless stack_hash
495
+ stack_hash.fetch('Resources', {}).any? do |name, resource|
496
+ resource.has_key?('Policy')
514
497
  end
515
498
  end
516
499
 
517
- # Apply stack nesting logic. Will extract unique parameters from
518
- # nested stacks, update refs to use sibling stack outputs where
519
- # required and extract nested stack templates for remote persistence
500
+ # Generate policy for stack
520
501
  #
521
- # @yieldparam template_name [String] nested stack resource name
522
- # @yieldparam template [Hash] nested stack template
523
- # @yieldreturn [String] remote URL
524
- # @return [Hash] dumped template hash
525
- def apply_nesting(*args)
526
- hash = compile.dump!
527
- stacks = Hash[
528
- hash['Resources'].find_all do |r_name, resource|
529
- [r_name, MultiJson.load(MultiJson.dump(resource))]
502
+ # @return [Hash]
503
+ # @todo this is very AWS specific, so make this easy for swapping
504
+ def generate_policy
505
+ statements = []
506
+ compile.resources.keys!.each do |r_name|
507
+ r_object = compile.resources[r_name]
508
+ if(r_object['Policy'])
509
+ r_object['Policy'].keys!.each do |effect|
510
+ statements.push(
511
+ 'Effect' => effect.to_s.capitalize,
512
+ 'Action' => [r_object['Policy'][effect]].flatten.compact.map{|i| "Update:#{i}"},
513
+ 'Resource' => "LogicalResourceId/#{r_name}",
514
+ 'Principal' => '*'
515
+ )
516
+ end
517
+ r_object.delete!('Policy')
530
518
  end
531
- ]
532
- parameters = hash.fetch('Parameters', {})
533
- output_map = {}
534
- stacks.each do |stack_name, stack_resource|
535
- remap_nested_parameters(hash, parameters, stack_name, stack_resource, output_map)
536
- end
537
- hash['Parameters'] = parameters
538
- hash['Resources'].each do |resource_name, resource|
539
- if(resource['Type'] == 'AWS::CloudFormation::Stack')
540
- stack = resource['Properties'].delete('Stack')
541
- resource['Properties']['TemplateURL'] = yield(resource_name, stack)
519
+ end
520
+ statements.push(
521
+ 'Effect' => 'Allow',
522
+ 'Action' => 'Update:*',
523
+ 'Resource' => '*',
524
+ 'Principal' => '*'
525
+ )
526
+ Smash.new('Statement' => statements)
527
+ end
528
+
529
+ # Apply nesting logic to stack
530
+ #
531
+ # @param nest_type [Symbol] values: :shallow, :deep (default: :deep)
532
+ # @return [Hash] dumped stack
533
+ # @note see specific version for expected block parameters
534
+ def apply_nesting(*args, &block)
535
+ if(args.include?(:shallow))
536
+ apply_shallow_nesting(&block)
537
+ else
538
+ apply_deep_nesting(&block)
539
+ end
540
+ end
541
+
542
+ # Apply deeply nested stacks. This is the new nesting approach and
543
+ # does not bubble parameters up to the root stack. Parameters are
544
+ # isolated to the stack resource itself and output mapping is
545
+ # automatically applied.
546
+ #
547
+ # @yieldparam stack [SparkleFormation] stack instance
548
+ # @yieldparam resource [AttributeStruct] the stack resource
549
+ # @yieldparam s_name [String] stack resource name
550
+ # @yieldreturn [Hash] key/values to be merged into resource properties
551
+ # @return [Hash] dumped stack
552
+ def apply_deep_nesting(*args, &block)
553
+ outputs = collect_outputs
554
+ nested_stacks(:with_resource).each do |stack, resource|
555
+ unless(stack.nested_stacks.empty?)
556
+ stack.apply_deep_nesting(*args)
557
+ end
558
+ stack.compile.parameters.keys!.each do |parameter_name|
559
+ if(output_name = output_matched?(parameter_name, outputs.keys))
560
+ next if outputs[output_name] == stack
561
+ stack_output = stack.make_output_available(output_name, outputs)
562
+ resource.properties.parameters.set!(parameter_name, stack_output)
563
+ end
564
+ end
565
+ end
566
+ if(block_given?)
567
+ extract_templates(&block)
568
+ end
569
+ compile.dump!
570
+ end
571
+
572
+ # Check if parameter name matches an output name
573
+ #
574
+ # @param p_name [String, Symbol] parameter name
575
+ # @param output_names [Array<String>] list of available outputs
576
+ # @return [String, NilClass] matching output name
577
+ # @note will auto downcase name prior to comparison
578
+ def output_matched?(p_name, output_names)
579
+ output_names.detect do |o_name|
580
+ Bogo::Utility.snake(o_name).tr('_', '') == Bogo::Utility.snake(p_name).tr('_', '')
581
+ end
582
+ end
583
+
584
+ # Extract output to make available for stack parameter usage at the
585
+ # current depth
586
+ #
587
+ # @param output_name [String] name of output
588
+ # @param outputs [Hash] listing of outputs
589
+ # @reutrn [Hash] reference to output value (used for setting parameter)
590
+ def make_output_available(output_name, outputs)
591
+ bubble_path = outputs[output_name].root_path - root_path
592
+ drip_path = root_path - outputs[output_name].root_path
593
+ bubble_path.each_slice(2) do |base_sparkle, ref_sparkle|
594
+ next unless ref_sparkle
595
+ base_sparkle.compile.outputs.set!(output_name).set!(:value, base_sparkle.compile.attr!(ref_sparkle.name, "Outputs.#{output_name}"))
596
+ end
597
+ if(bubble_path.empty?)
598
+ raise ArgumentError.new "Failed to detect available bubbling path for output `#{output_name}`. This may be due to a circular dependency!"
599
+ end
600
+ result = compile.attr!(bubble_path.first.name, "Outputs.#{output_name}")
601
+ if(drip_path.size > 1)
602
+ parent = drip_path.first.parent
603
+ drip_path.unshift(parent) if parent
604
+ drip_path.each_slice(2) do |base_sparkle, ref_sparkle|
605
+ next unless ref_sparkle
606
+ base_sparkle.compile.resources[ref_sparkle.name].properties.parameters.set!(output_name, result)
607
+ ref_sparkle.compile.parameters.set!(output_name){ type 'String' } # TODO <<<<------ type check and prop
608
+ result = compile.ref!(output_name)
542
609
  end
543
610
  end
544
- if(args.include?(:collect_outputs))
611
+ result
612
+ end
613
+
614
+ # Extract and process nested stacks
615
+ #
616
+ # @yieldparam stack [SparkleFormation] stack instance
617
+ # @yieldparam resource [AttributeStruct] the stack resource
618
+ # @yieldparam s_name [String] stack resource name
619
+ # @yieldreturn [Hash] key/values to be merged into resource properties
620
+ def extract_templates(&block)
621
+ stack_template_extractor(nested_stacks(:with_resource, :with_name), &block)
622
+ end
623
+
624
+ # Run the stack extraction
625
+ #
626
+ # @param x_stacks [Array<Array<SparkleFormation, SparkleStruct, String>>]
627
+ def stack_template_extractor(x_stacks, &block)
628
+ x_stacks.each do |stack, resource, s_name|
629
+ unless(stack.nested_stacks.empty?)
630
+ stack_template_extractor(stack.nested_stacks(:with_resource, :with_name), &block)
631
+ end
632
+ resource.properties.set!(:stack, stack.compile.dump!)
633
+ block.call(s_name, stack, resource)
634
+ end
635
+ end
636
+
637
+ # Apply shallow nesting. This style of nesting will bubble
638
+ # parameters up to the root stack. This type of nesting is the
639
+ # original and now deprecated, but remains for compat issues so any
640
+ # existing usage won't be automatically busted.
641
+ #
642
+ # @yieldparam resource_name [String] name of stack resource
643
+ # @yieldparam stack [SparkleFormation] nested stack
644
+ # @yieldreturn [String] Remote URL storage for template
645
+ # @return [Hash]
646
+ def apply_shallow_nesting(*args, &block)
647
+ parameters = compile[:parameters] ? compile[:parameters]._dump : {}
648
+ output_map = {}
649
+ nested_stacks(:with_resource, :with_name).each do |stack, stack_resource, stack_name|
650
+ remap_nested_parameters(compile, parameters, stack_name, stack_resource, output_map)
651
+ end
652
+ extract_templates(&block)
653
+ compile.parameters parameters
654
+ if(args.include?(:bubble_outputs))
545
655
  outputs_hash = Hash[
546
656
  output_map do |name, value|
547
657
  [name, {'Value' => {'Fn::GetAtt' => value}}]
548
658
  end
549
659
  ]
550
- if(hash['Outputs'])
551
- hash['Outputs'].merge!(outputs_hash)
660
+ if(compile.outputs)
661
+ compile._merge(SparkleStruct.new(outputs_hash))
662
+ else
663
+ compile.outputs output_hash
664
+ end
665
+ end
666
+ compile.dump!
667
+ end
668
+
669
+ # @return [Smash<output_name:SparkleFormation>]
670
+ def collect_outputs(*args)
671
+ if(args.include?(:force) || root?)
672
+ if(compile.outputs)
673
+ outputs = Smash[
674
+ compile.outputs.keys!.zip(
675
+ [self] * compile.outputs.keys!.size
676
+ )
677
+ ]
552
678
  else
553
- hash['Outputs'] = outputs_hash
679
+ outputs = Smash.new
554
680
  end
681
+ nested_stacks.each do |nested_stack|
682
+ outputs = nested_stack.collect_outputs(:force).merge(outputs)
683
+ end
684
+ outputs
685
+ else
686
+ root.collect_outputs(:force)
555
687
  end
556
- hash
557
688
  end
558
689
 
559
690
  # Extract parameters from nested stacks. Check for previous nested
@@ -571,10 +702,9 @@ class SparkleFormation
571
702
  # @note if parameter has includes `StackUnique` a new parameter will
572
703
  # be added to container stack and it will not use outputs
573
704
  def remap_nested_parameters(template, parameters, stack_name, stack_resource, output_map)
574
- stack_parameters = stack_resource['Properties']['Stack']['Parameters']
575
- if(stack_parameters)
576
- template['Resources'][stack_name]['Properties']['Parameters'] ||= {}
577
- stack_parameters.each do |pname, pval|
705
+ stack_parameters = stack_resource.properties.stack.compile.parameters
706
+ unless(stack_parameters.nil?)
707
+ stack_parameters._dump.each do |pname, pval|
578
708
  if(pval['StackUnique'])
579
709
  check_name = [stack_name, pname].join
580
710
  else
@@ -586,24 +716,22 @@ class SparkleFormation
586
716
  else
587
717
  new_val = {'Ref' => check_name}
588
718
  end
589
- template['Resources'][stack_name]['Properties']['Parameters'][pname] = new_val
719
+ template.resources.set!(stack_name).properties.parameters.set!(pname, new_val)
590
720
  elsif(output_map[check_name])
591
- template['Resources'][stack_name]['Properties']['Parameters'][pname] = {
592
- 'Fn::GetAtt' => output_map[check_name]
593
- }
721
+ template.resources.set!(stack_name).properties.parameters.set!(pname, 'Fn::GetAtt' => output_map[check_name])
594
722
  else
595
723
  if(pval['Type'] == 'CommaDelimitedList')
596
724
  new_val = {'Fn::Join' => [',', {'Ref' => check_name}]}
597
725
  else
598
726
  new_val = {'Ref' => check_name}
599
727
  end
600
- template['Resources'][stack_name]['Properties']['Parameters'][pname] = new_val
728
+ template.resources.set!(stack_name).properties.parameters.set!(pname, new_val)
601
729
  parameters[check_name] = pval
602
730
  end
603
731
  end
604
732
  end
605
- if(stack_resource['Properties']['Stack']['Outputs'])
606
- stack_resource['Properties']['Stack']['Outputs'].keys.each do |oname|
733
+ unless(stack_resource.properties.stack.compile.outputs.nil?)
734
+ stack_resource.properties.stack.compile.outputs.keys!.each do |oname|
607
735
  output_map[oname] = [stack_name, "Outputs.#{oname}"]
608
736
  end
609
737
  end
@@ -616,8 +744,8 @@ class SparkleFormation
616
744
  end
617
745
 
618
746
  # @return [String] dumped hash JSON
619
- def to_json
620
- MultiJson.dump(compile.dump!)
747
+ def to_json(*args)
748
+ MultiJson.dump(compile.dump!, *args)
621
749
  end
622
750
 
623
751
  end