sparkle_formation 2.1.8 → 3.0.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -26,7 +26,7 @@ class SparkleFormation
26
26
  # @yieldparam resource [AttributeStruct] the stack resource
27
27
  # @yieldparam s_name [String] stack resource name
28
28
  # @yieldreturn [Hash] key/values to be merged into resource properties
29
- # @return [Hash] dumped stack
29
+ # @return [SparkleFormation::SparkleStruct] compiled structure
30
30
  def apply_deep_nesting(*args, &block)
31
31
  outputs = collect_outputs
32
32
  nested_stacks(:with_resource).each do |stack, resource|
@@ -44,7 +44,7 @@ class SparkleFormation
44
44
  if(block_given?)
45
45
  extract_templates(&block)
46
46
  end
47
- compile.dump!
47
+ compile
48
48
  end
49
49
 
50
50
  # Apply shallow nesting. This style of nesting will bubble
@@ -55,7 +55,7 @@ class SparkleFormation
55
55
  # @yieldparam resource_name [String] name of stack resource
56
56
  # @yieldparam stack [SparkleFormation] nested stack
57
57
  # @yieldreturn [String] Remote URL storage for template
58
- # @return [Hash]
58
+ # @return [SparkleFormation::SparkleStruct] compiled structure
59
59
  def apply_shallow_nesting(*args, &block)
60
60
  parameters = compile.parameters
61
61
  output_map = {}
@@ -68,7 +68,7 @@ class SparkleFormation
68
68
  compile.outputs._set(o_name).value compile._stack_output(*o_val)
69
69
  end
70
70
  end
71
- compile.dump!
71
+ compile
72
72
  end
73
73
 
74
74
  # Extract output to make available for stack parameter usage at the
@@ -0,0 +1,200 @@
1
+ require 'sparkle_formation'
2
+
3
+ class SparkleFormation
4
+ module Provider
5
+ # Google specific implementation
6
+ module Google
7
+
8
+ # Always return as nested since nesting is our final form
9
+ def nested?(*_)
10
+ true
11
+ end
12
+
13
+ # Extract nested stack templates and store in root level files
14
+ #
15
+ # @param template_hash [Hash] template hash to process
16
+ # @param dump_copy [Smash] translated dump
17
+ # @param parent_names [Array<String>] name of parent resources
18
+ # @return [Smash] dump_copy
19
+ def google_template_extractor(template_hash, dump_copy, parent_names=[])
20
+ template_hash.fetch('resources', []).each do |t_resource|
21
+ if(t_resource['type'] == stack_resource_type)
22
+ full_names = parent_names + [t_resource['name']]
23
+ stack = t_resource['properties'].delete('stack')
24
+ if(t_resource['properties'].empty?)
25
+ t_resource.delete('properties')
26
+ end
27
+ google_template_extractor(stack, dump_copy, full_names)
28
+ new_type = generate_template_files(full_names.join('-'), stack, dump_copy)
29
+ t_resource['type'] = new_type
30
+ end
31
+ end
32
+ dump_copy
33
+ end
34
+
35
+ # Sets stack template files into target copy and extracts parameters
36
+ # into schema files if available
37
+ #
38
+ # @param r_name [String] name used for template file name
39
+ # @param r_stack [Hash] template to store
40
+ # @param dump_copy [Smash] translated dump
41
+ # @return [String] new type for stack
42
+ def generate_template_files(r_name, r_stack, dump_copy)
43
+ f_name = "#{r_name}.jinja"
44
+ r_parameters = r_stack.delete('parameters')
45
+ dump_copy[:imports].push(
46
+ Smash.new(
47
+ :name => f_name,
48
+ :content => r_stack
49
+ )
50
+ )
51
+ if(r_parameters)
52
+ dump_copy[:imports].push(
53
+ Smash.new(
54
+ :name => "#{f_name}.schema",
55
+ :content => Smash.new.tap{|schema|
56
+ schema.set(:info, :title, "#{f_name} template")
57
+ schema.set(:info, :description, "#{f_name} template schema")
58
+ schema.set(:properties, r_parameters)
59
+ }
60
+ )
61
+ )
62
+ end
63
+ f_name
64
+ end
65
+
66
+ # Customized dump to break out templates into consumable structures for
67
+ # passing to the deployment manager API
68
+ #
69
+ # @return [Hash]
70
+ def google_dump
71
+ result = non_google_dump
72
+ if(root?)
73
+ dump_copy = Smash.new(:imports => [])
74
+ google_template_extractor(result, dump_copy)
75
+ dump_copy.set(:config, :content, result)
76
+ dump_copy.set(:config, :content, :imports,
77
+ dump_copy[:imports].map{|i| i[:name]}
78
+ )
79
+ dump_copy.to_hash
80
+ else
81
+ result
82
+ end
83
+ end
84
+
85
+ # Properly remap dumping methods
86
+ def self.included(klass)
87
+ klass.class_eval do
88
+ alias_method :non_google_dump, :dump
89
+ alias_method :dump, :google_dump
90
+ end
91
+ end
92
+
93
+ # Properly remap dumping methods
94
+ def self.extended(klass)
95
+ klass.instance_eval do
96
+ alias :non_google_dump :dump
97
+ alias :dump :google_dump
98
+ end
99
+ end
100
+
101
+ # @return [String] Type string for Google Deployment Manager stack resource
102
+ # @note Nested templates aren't defined as a specific type thus no "real"
103
+ # type exists. So we'll create a custom one!
104
+ def stack_resource_type
105
+ 'sparkleformation.stack'
106
+ end
107
+
108
+ # Generate policy for stack
109
+ #
110
+ # @return [Hash]
111
+ def generate_policy
112
+ {}
113
+ end
114
+
115
+ # Apply deeply nested stacks. This is the new nesting approach and
116
+ # does not bubble parameters up to the root stack. Parameters are
117
+ # isolated to the stack resource itself and output mapping is
118
+ # automatically applied.
119
+ #
120
+ # @yieldparam stack [SparkleFormation] stack instance
121
+ # @yieldparam resource [AttributeStruct] the stack resource
122
+ # @yieldparam s_name [String] stack resource name
123
+ # @yieldreturn [Hash] key/values to be merged into resource properties
124
+ # @return [SparkleFormation::SparkleStruct] compiled structure
125
+ def apply_deep_nesting(*args, &block)
126
+ outputs = collect_outputs
127
+ nested_stacks(:with_resource).each do |stack, resource|
128
+ unless(stack.nested_stacks.empty?)
129
+ stack.apply_deep_nesting(*args)
130
+ end
131
+ stack.compile.parameters.keys!.each do |parameter_name|
132
+ if(output_name = output_matched?(parameter_name, outputs.keys))
133
+ next if outputs[output_name] == stack
134
+ stack_output = stack.make_output_available(output_name, outputs, self)
135
+ # NOTE: Only set value if not already explicitly set
136
+ if(resource.properties._set(parameter_name).nil?)
137
+ resource.properties._set(parameter_name, stack_output)
138
+ end
139
+ end
140
+ end
141
+ end
142
+ if(block_given?)
143
+ extract_templates(&block)
144
+ end
145
+ self
146
+ end
147
+
148
+ # Forcibly disable shallow nesting as support for it with Google templates doesn't
149
+ # really make much sense.
150
+ def apply_shallow_nesting(*args, &block)
151
+ raise NotImplementedError.new 'Shallow nesting is not supported for this provider!'
152
+ end
153
+
154
+ # Extract output to make available for stack parameter usage at the
155
+ # current depth
156
+ #
157
+ # @param output_name [String] name of output
158
+ # @param outputs [Hash] listing of outputs
159
+ # @param source_stack [SparkleFormation] requesting stack
160
+ # @reutrn [Hash] reference to output value (used for setting parameter)
161
+ def make_output_available(output_name, outputs, source_stack)
162
+ bubble_path = outputs[output_name].root_path - root_path
163
+ drip_path = root_path - outputs[output_name].root_path
164
+ bubble_path.each_slice(2) do |base_sparkle, ref_sparkle|
165
+ next unless ref_sparkle
166
+ base_sparkle.compile.outputs._set(output_name)._set(
167
+ :value, base_sparkle.compile._stack_output(
168
+ ref_sparkle.name, output_name
169
+ )
170
+ )
171
+ end
172
+ if(bubble_path.empty?)
173
+ if(drip_path.size == 1)
174
+ parent = drip_path.first.parent
175
+ if(parent && !parent.compile.parameters._set(output_name).nil?)
176
+ return compile.parameter!(output_name)
177
+ end
178
+ end
179
+ raise ArgumentError.new "Failed to detect available bubbling path for output `#{output_name}`. " <<
180
+ 'This may be due to a circular dependency! ' <<
181
+ "(Output Path: #{outputs[output_name].root_path.map(&:name).join(' > ')} " <<
182
+ "Requester Path: #{root_path.map(&:name).join(' > ')})"
183
+ end
184
+ result = source_stack.compile._stack_output(bubble_path.first.name, output_name)
185
+ if(drip_path.size > 1)
186
+ parent = drip_path.first.parent
187
+ drip_path.unshift(parent) if parent
188
+ drip_path.each_slice(2) do |base_sparkle, ref_sparkle|
189
+ next unless ref_sparkle
190
+ base_sparkle.compile.resources[ref_sparkle.name].properties.parameters.value._set(output_name, result)
191
+ ref_sparkle.compile.parameters._set(output_name).type 'string' # TODO: <<<<------ type check and prop
192
+ result = compile._parameter(output_name)
193
+ end
194
+ end
195
+ result
196
+ end
197
+
198
+ end
199
+ end
200
+ end
@@ -26,7 +26,7 @@ class SparkleFormation
26
26
  # @yieldparam resource [AttributeStruct] the stack resource
27
27
  # @yieldparam s_name [String] stack resource name
28
28
  # @yieldreturn [Hash] key/values to be merged into resource properties
29
- # @return [Hash] dumped stack
29
+ # @return [SparkleFormation::SparkleStruct] compiled structure
30
30
  def apply_deep_nesting(*args, &block)
31
31
  outputs = collect_outputs
32
32
  nested_stacks(:with_resource).each do |stack, resource|
@@ -44,7 +44,7 @@ class SparkleFormation
44
44
  if(block_given?)
45
45
  extract_templates(&block)
46
46
  end
47
- compile.dump!
47
+ compile
48
48
  end
49
49
 
50
50
  # Apply shallow nesting. This style of nesting will bubble
@@ -55,7 +55,7 @@ class SparkleFormation
55
55
  # @yieldparam resource_name [String] name of stack resource
56
56
  # @yieldparam stack [SparkleFormation] nested stack
57
57
  # @yieldreturn [String] Remote URL storage for template
58
- # @return [Hash]
58
+ # @return [SparkleFormation::SparkleStruct] compiled structure
59
59
  def apply_shallow_nesting(*args, &block)
60
60
  parameters = compile.parameters
61
61
  output_map = {}
@@ -68,7 +68,7 @@ class SparkleFormation
68
68
  compile.outputs._set(o_name).value compile._stack_output(*o_val)
69
69
  end
70
70
  end
71
- compile.dump!
71
+ compile
72
72
  end
73
73
 
74
74
  # Extract output to make available for stack parameter usage at the
@@ -6,6 +6,7 @@ class SparkleFormation
6
6
 
7
7
  autoload :Aws, 'sparkle_formation/resources/aws'
8
8
  autoload :Azure, 'sparkle_formation/resources/azure'
9
+ autoload :Google, 'sparkle_formation/resources/google'
9
10
  autoload :Heat, 'sparkle_formation/resources/heat'
10
11
  autoload :Rackspace, 'sparkle_formation/resources/rackspace'
11
12
 
@@ -13,6 +14,64 @@ class SparkleFormation
13
14
  RESOURCE_TYPE_TR = '_'
14
15
  # String to split for resource namespacing
15
16
  RESOURCE_TYPE_NAMESPACE_SPLITTER = '::'
17
+ # Property update conditionals
18
+ # Format: Smash.new(RESOURCE_TYPE => {PROPERTY_NAME => [PropertyConditional]})
19
+ PROPERTY_UPDATE_CONDITIONALS = Smash.new
20
+
21
+ # Defines a resource type
22
+ #
23
+ # @param name [String] name of resource type
24
+ # @param properties [Array<Property>] resource properties
25
+ # @param raw [Hash] raw resource information
26
+ Resource = Struct.new(:name, :properties, :raw) do
27
+ # Get property by name
28
+ #
29
+ # @param name [String] name of property
30
+ # @return [Property, NilClass]
31
+ def property(name)
32
+ properties.detect do |prop|
33
+ prop.name == name
34
+ end
35
+ end
36
+ end
37
+
38
+ # Defines conditional result for cause of property update
39
+ #
40
+ # @param update_causes [String] one of: 'replacement', 'interrupt', 'unknown', 'none'
41
+ # @param conditional [Proc, TrueClass] condition logic. passed two values: Hash of resource "final" state and
42
+ # Hash of resource "original" state
43
+ UpdateCausesConditional = Struct.new(:update_causes, :conditional)
44
+
45
+ # Defines a resource property
46
+ #
47
+ # @param name [String] property name
48
+ # @param description [String] property descrition
49
+ # @param type [String] property data type
50
+ # @param required [TrueClass, FalseClass] property is required
51
+ # @param update_causes [String] one of: 'replacement', 'interrupt', 'unknown', 'none'
52
+ # @param conditionals [Array<UpdateCausesConditional>] conditionals for update causes
53
+ Property = Struct.new(:name, :description, :type, :required, :update_causes, :conditionals) do
54
+ # Determine result of property update
55
+ #
56
+ # @param final_resource [Hash] desired resource structure containing this property
57
+ # @return ['replacement', 'interrupt', 'unknown', 'none']
58
+ def update_causes(final_resource=nil, original_resource=nil)
59
+ if(conditionals && final_resource)
60
+ final_resource = final_resource.to_smash
61
+ original_resource = original_resource.to_smash
62
+ result = conditionals.detect do |p_cond|
63
+ p_cond == true || p_cond.conditional.call(final_resource, original_resource)
64
+ end
65
+ if(result)
66
+ result.update_causes
67
+ else
68
+ 'unknown'
69
+ end
70
+ else
71
+ self[:update_causes]
72
+ end
73
+ end
74
+ end
16
75
 
17
76
  class << self
18
77
 
@@ -80,27 +139,31 @@ class SparkleFormation
80
139
  # @param key [String, Symbol]
81
140
  # @return [String, NilClass]
82
141
  def registry_key(key)
83
- o_key = key
84
- key = key.to_s.tr(self.const_get(:RESOURCE_TYPE_TR), '') # rubocop:disable Style/RedundantSelf
85
- snake_parts = nil
86
- result = @@registry[base_key].keys.detect do |ref|
87
- ref = ref.downcase
88
- snake_parts = ref.split(resource_type_splitter)
89
- until(snake_parts.empty?)
90
- break if snake_parts.join('') == key
91
- snake_parts.shift
92
- end
93
- !snake_parts.empty?
94
- end
95
- if(result)
96
- collisions = @@registry[base_key].keys.find_all do |ref|
97
- split_ref = ref.downcase.split(resource_type_splitter)
98
- ref = split_ref.slice(split_ref.size - snake_parts.size, split_ref.size).join('')
99
- key == ref
142
+ if(registry[key])
143
+ result = key
144
+ else
145
+ o_key = key
146
+ key = key.to_s.tr(self.const_get(:RESOURCE_TYPE_TR), '') # rubocop:disable Style/RedundantSelf
147
+ snake_parts = nil
148
+ result = @@registry[base_key].keys.detect do |ref|
149
+ ref = ref.downcase
150
+ snake_parts = ref.split(resource_type_splitter)
151
+ until(snake_parts.empty?)
152
+ break if snake_parts.join('') == key
153
+ snake_parts.shift
154
+ end
155
+ !snake_parts.empty?
100
156
  end
101
- if(collisions.size > 1)
102
- raise ArgumentError.new 'Ambiguous dynamic name returned multiple matches! ' \
103
- "`#{o_key.inspect}` -> #{collisions.sort.join(', ')}"
157
+ if(result)
158
+ collisions = @@registry[base_key].keys.find_all do |ref|
159
+ split_ref = ref.downcase.split(resource_type_splitter)
160
+ ref = split_ref.slice(split_ref.size - snake_parts.size, split_ref.size).join('')
161
+ key == ref
162
+ end
163
+ if(collisions.size > 1)
164
+ raise ArgumentError.new 'Ambiguous dynamic name returned multiple matches! ' \
165
+ "`#{o_key.inspect}` -> #{collisions.sort.join(', ')}"
166
+ end
104
167
  end
105
168
  end
106
169
  result
@@ -121,7 +184,7 @@ class SparkleFormation
121
184
  # @param key [String, Symbol]
122
185
  # @return [Hashish, NilClass]
123
186
  def lookup(key)
124
- @@registry[base_key][registry_key(key)]
187
+ @@registry[base_key][key] || @@registry[base_key][registry_key(key)]
125
188
  end
126
189
 
127
190
  # @return [Hashish] currently loaded AWS registry
@@ -142,6 +205,28 @@ class SparkleFormation
142
205
  struct
143
206
  end
144
207
 
208
+ # Information about specific resource type
209
+ #
210
+ # @param type [String] resource type
211
+ # @return [Resource]
212
+ def resource_lookup(type)
213
+ result = registry[type]
214
+ if(result)
215
+ properties = result.fetch('full_properties', {}).map do |p_name, p_info|
216
+ Property.new(p_name,
217
+ p_info[:description],
218
+ p_info[:type],
219
+ p_info[:required],
220
+ p_info[:update_causes],
221
+ self.const_get(:PROPERTY_UPDATE_CONDITIONALS).get(type, p_name)
222
+ )
223
+ end
224
+ Resource.new(type, properties, result)
225
+ else
226
+ raise KeyError.new "Failed to locate requested resource type: `#{type}`"
227
+ end
228
+ end
229
+
145
230
  end
146
231
  end
147
232
  end
@@ -8,6 +8,258 @@ class SparkleFormation
8
8
  # AWS specific resources collection
9
9
  class Aws < Resources
10
10
 
11
+ # Conditionals for property updates
12
+ PROPERTY_UPDATE_CONDITIONALS = Smash.new(
13
+ 'AWS::DynamoDB::Table' => {
14
+ 'GlobalSecondaryIndexes' => [
15
+ # Updates not really supported here. Set as unknown to
16
+ # prompt user to investigate
17
+ UpdateCausesConditional.new('unknown', true)
18
+ ]
19
+ },
20
+ 'AWS::EC2::EIPAssociation' => {
21
+ 'AllocationId' => [
22
+ UpdateCausesConditional.new('replacement',
23
+ lambda{|final, original|
24
+ original.get('Properties', 'InstanceId') != final.get('Properties', 'InstanceId') ||
25
+ original.get('Properties', 'NetworkInterfaceId') != final.get('Properties', 'NewtorkInterfaceId')
26
+ }
27
+ ),
28
+ UpdateCausesConditional.new('none', true)
29
+ ],
30
+ 'EIP' => [
31
+ UpdateCausesConditional.new('replacement',
32
+ lambda{|final, original|
33
+ original.get('Properties', 'InstanceId') != final.get('Properties', 'InstanceId') ||
34
+ original.get('Properties', 'NetworkInterfaceId') != final.get('Properties', 'NewtorkInterfaceId')
35
+ }
36
+ ),
37
+ UpdateCausesConditional.new('none', true)
38
+ ],
39
+ 'InstanceId' => [
40
+ UpdateCausesConditional.new('replacement',
41
+ lambda{|final, original|
42
+ original.get('Properties', 'AllocationId') != final.get('Properties', 'AllocationId') ||
43
+ original.get('Properties', 'EIP') != final.get('Properties', 'EIP')
44
+ }
45
+ ),
46
+ UpdateCausesConditional.new('none', true)
47
+ ],
48
+ 'NetworkInterfaceId' => [
49
+ UpdateCausesConditional.new('replacement',
50
+ lambda{|final, original|
51
+ original.get('Properties', 'AllocationId') != final.get('Properties', 'AllocationId') ||
52
+ original.get('Properties', 'EIP') != final.get('Properties', 'EIP')
53
+ }
54
+ ),
55
+ UpdateCausesConditional.new('none', true)
56
+ ]
57
+ },
58
+ 'AWS::EC2::Instance' => {
59
+ 'AdditionalInfo' => [
60
+ UpdateCausesConditional.new('unknown', true) # EBS AMI dependent
61
+ ],
62
+ 'BlockDeviceMappings' => [
63
+ UpdateCausesConditional.new('replacement',
64
+ lambda{|final, original|
65
+ f_maps = final.fetch('Properties', 'BlockDeviceMappings', [])
66
+ o_maps = original.fetch('Properties', 'BlockDeviceMappings', [])
67
+ f_maps.map! do |m|
68
+ m.delete('DeleteOnTermination')
69
+ m.to_smash(:sorted)
70
+ end
71
+ o_maps.map! do |m|
72
+ m.delete('DeleteOnTermination')
73
+ m.to_smash(:sorted)
74
+ end
75
+ f_maps.size != o_maps.size ||
76
+ !f_maps.all?{|m| o_maps.include?(m)}
77
+ }
78
+ ),
79
+ UpdateCausesConditional.new('none', true)
80
+ ],
81
+ 'EbsOptimized' => [
82
+ UpdateCausesConditional.new('unknown', true) # EBS AMI dependent
83
+ ],
84
+ 'InstanceType' => [
85
+ UpdateCausesConditional.new('unknown', true) # EBS AMI dependent
86
+ ],
87
+ 'KernelId' => [
88
+ UpdateCausesConditional.new('unknown', true) # EBS AMI dependent
89
+ ],
90
+ 'RamdiskId' => [
91
+ UpdateCausesConditional.new('unknown', true) # EBS AMI dependent
92
+ ],
93
+ 'SecurityGroupIds' => [
94
+ UpdateCausesConditional.new('none',
95
+ lambda{|final, _orig|
96
+ final.get('Properties', 'SubnetId') ||
97
+ final.fetch('Properties', 'NetworkInterface', {}).values.include?('SubnetId')
98
+ }
99
+ ),
100
+ UpdateCausesConditional.new('replacement', true)
101
+ ],
102
+ 'UserData' => [
103
+ UpdateCausesConditional.new('unknown', true) # EBS AMI dependent
104
+ ]
105
+ },
106
+ 'AWS::EC2::NetworkInterface' => {
107
+ 'PrivateIpAddresses' => [
108
+ UpdateCausesConditional.new('replacement',
109
+ lambda{|final, original|
110
+ f_primary = final.fetch('Properties', 'PrivateIpAddresses', []).detect do |addr|
111
+ addr['Primary']
112
+ end || Smash.new
113
+ o_primary = original.fetch('Properties', 'PrivateIpAddresses', []).detect do |addr|
114
+ addr['Primary']
115
+ end || Smash.new
116
+ f_primary.to_smash(:sorted) != o_primary.to_smash(:sorted)
117
+ }
118
+ ),
119
+ UpdateCausesConditional.new('none', true)
120
+ ]
121
+ },
122
+ 'AWS::ElastiCache::CacheCluster' => {
123
+ 'NumCacheNodes' => [
124
+ UpdateCausesConditional.new('replacement',
125
+ lambda{|final, original|
126
+ [
127
+ final.get('Properties', 'PreferredAvailabilityZone'),
128
+ final.get('Properties', 'PreferredAvailabilityZones'),
129
+ original.get('Properties', 'PreferredAvailabilityZone'),
130
+ original.get('Properties', 'PreferredAvailabilityZones')
131
+ ].all?{|i| i.nil? || i.empty? }
132
+ }
133
+ ),
134
+ UpdateCausesConditional.new('none', true)
135
+ ],
136
+ 'PreferredAvailabilityZones' => [
137
+ UpdateCausesConditional.new('interrupt',
138
+ lambda{|final, original|
139
+ original.get('Properties', 'PreferredAvailabilityZones') ||
140
+ final.fetch('Properties', 'PreferredAvailabilityZones', []).include?(
141
+ original.get('Properties', 'PreferredAvailabilityZone')
142
+ )
143
+ }
144
+ ),
145
+ UpdateCausesConditional.new('replacement', true)
146
+ ]
147
+ },
148
+ 'AWS::ElasticLoadBalancing::LoadBalancer' => {
149
+ 'AvailabilityZones' => [
150
+ UpdateCausesConditional.new('replacement',
151
+ lambda{|final, original|
152
+ original.fetch('Properties', 'AvailabilityZones', []).empty? ||
153
+ final.fetch('Properties', 'AvailabilityZones', []).empty?
154
+ }
155
+ ),
156
+ UpdateCausesConditional.new('none', true)
157
+ ],
158
+ 'HealthCheck' => [
159
+ UpdateCausesConditional.new('replacement',
160
+ lambda{|final, original|
161
+ original.fetch('Properties', 'HealthCheck', {}).empty? ||
162
+ final.fetch('Properties', 'HealthCheck', {}).empty?
163
+ }
164
+ ),
165
+ UpdateCausesConditional.new('none', true)
166
+ ],
167
+ 'Subnets' => [
168
+ UpdateCausesConditional.new('replacement',
169
+ lambda{|final, original|
170
+ original.fetch('Properties', 'Subnets', []).empty? ||
171
+ final.fetch('Properties', 'Subnets', []).empty?
172
+ }
173
+ ),
174
+ UpdateCausesConditional.new('none', true)
175
+ ]
176
+ },
177
+ 'AWS::RDS::DBCluster' => {
178
+ 'BackupRetentionPeriod' => [
179
+ UpdateCausesConditional.new('interrupt',
180
+ lambda{|final, original|
181
+ fp = final.get('Properties', 'BackupRetentionPeriod').to_i
182
+ op = original.get('Properties', 'BackupRetentionPeriod').to_i
183
+ (fp == 0 && op != 0) ||
184
+ (op == 0 && fp != 0)
185
+ }
186
+ ),
187
+ UpdateCausesConditional.new('none', true)
188
+ ],
189
+ 'PreferredMaintenanceWindow' => [
190
+ # can interrupt if apply immediately is set on api call but
191
+ # no way to know
192
+ UpdateCausesConditional.new('unknown', true)
193
+ ]
194
+ },
195
+ 'AWS::RDS::DBClusterParameterGroup' => {
196
+ 'Parameters' => [
197
+ # dependent on what parameters have been changed. doesn't
198
+ # look like parameter modifications are applied immediately?
199
+ # set as unknown for safety
200
+ UpdateCausesConditional.new('unknown', true)
201
+ ]
202
+ },
203
+ 'AWS::RDS::DBInstance' => {
204
+ 'AutoMinorVersionUpgrade' => [
205
+ # can cause interrupts based on future actions (enables
206
+ # auto patching) so leave as unknown for safety
207
+ UpdateCausesConditional.new('unknown', true)
208
+ ],
209
+ 'BackupRetentionPeriod' => [
210
+ UpdateCausesConditional.new('interrupt',
211
+ lambda{|final, original|
212
+ fp = final.get('Properties', 'BackupRetentionPeriod').to_i
213
+ op = original.get('Properties', 'BackupRetentionPeriod').to_i
214
+ (fp == 0 && op != 0) ||
215
+ (op == 0 && fp != 0)
216
+ }
217
+ ),
218
+ UpdateCausesConditional.new('none', true)
219
+ ],
220
+ 'DBParameterGroupName' => [
221
+ # changes are not applied until reboot, but it could
222
+ # still be considered an interrupt? setting as unknown
223
+ # for safety
224
+ UpdateCausesConditional.new('unknown', true)
225
+ ],
226
+ 'PreferredMaintenanceWindow' => [
227
+ # can interrupt if apply immediately is set on api call but
228
+ # no way to know
229
+ UpdateCausesConditional.new('unknown', true)
230
+ ]
231
+ },
232
+ 'AWS::RDS::DBParameterGroup' => {
233
+ 'Parameters' => [
234
+ # dependent on what parameters have been changed. doesn't
235
+ # look like parameter modifications are applied immediately?
236
+ # set as unknown for safety
237
+ UpdateCausesConditional.new('unknown', true)
238
+ ]
239
+ },
240
+ 'AWS::RDS::EventSubscription' => {
241
+ 'SourceType' => [
242
+ UpdateCausesConditional.new('replacement',
243
+ lambda{|final, original|
244
+ !final.get('Properties', 'SourceType')
245
+ }
246
+ ),
247
+ UpdateCausesConditional.new('none', true)
248
+ ]
249
+ },
250
+ 'AWS::Route53::HostedZone' => {
251
+ 'VPCs' => [
252
+ UpdateCausesConditional.new('replacement',
253
+ lambda{|final, original|
254
+ !final.get('Properties', 'VPCs') ||
255
+ !original.get('Properties', 'VPCs')
256
+ }
257
+ ),
258
+ UpdateCausesConditional.new('none', true)
259
+ ]
260
+ }
261
+ )
262
+
11
263
  class << self
12
264
 
13
265
  include Bogo::Memoization