sparkle_formation 3.0.30 → 3.0.32
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +3 -0
- data/lib/sparkle_formation.rb +23 -23
- data/lib/sparkle_formation/aws.rb +1 -1
- data/lib/sparkle_formation/composition.rb +7 -7
- data/lib/sparkle_formation/error.rb +2 -2
- data/lib/sparkle_formation/function_struct.rb +22 -22
- data/lib/sparkle_formation/provider.rb +7 -7
- data/lib/sparkle_formation/provider/aws.rb +28 -28
- data/lib/sparkle_formation/provider/azure.rb +7 -7
- data/lib/sparkle_formation/provider/google.rb +16 -16
- data/lib/sparkle_formation/provider/heat.rb +6 -6
- data/lib/sparkle_formation/provider/terraform.rb +7 -7
- data/lib/sparkle_formation/resources.rb +18 -18
- data/lib/sparkle_formation/resources/aws.rb +216 -126
- data/lib/sparkle_formation/resources/aws_resources.json +3463 -1601
- data/lib/sparkle_formation/resources/azure.rb +6 -6
- data/lib/sparkle_formation/resources/google.rb +7 -7
- data/lib/sparkle_formation/resources/heat.rb +2 -2
- data/lib/sparkle_formation/resources/rackspace.rb +2 -2
- data/lib/sparkle_formation/resources/terraform.rb +6 -6
- data/lib/sparkle_formation/sparkle.rb +32 -32
- data/lib/sparkle_formation/sparkle_attribute.rb +10 -10
- data/lib/sparkle_formation/sparkle_attribute/aws.rb +30 -30
- data/lib/sparkle_formation/sparkle_attribute/azure.rb +39 -39
- data/lib/sparkle_formation/sparkle_attribute/google.rb +19 -19
- data/lib/sparkle_formation/sparkle_attribute/heat.rb +16 -16
- data/lib/sparkle_formation/sparkle_attribute/rackspace.rb +1 -1
- data/lib/sparkle_formation/sparkle_attribute/terraform.rb +41 -41
- data/lib/sparkle_formation/sparkle_collection.rb +4 -4
- data/lib/sparkle_formation/sparkle_collection/rainbow.rb +3 -3
- data/lib/sparkle_formation/sparkle_formation.rb +31 -31
- data/lib/sparkle_formation/sparkle_struct.rb +5 -5
- data/lib/sparkle_formation/translation.rb +32 -32
- data/lib/sparkle_formation/translation/heat.rb +126 -126
- data/lib/sparkle_formation/translation/rackspace.rb +118 -118
- data/lib/sparkle_formation/utils.rb +5 -5
- data/lib/sparkle_formation/version.rb +1 -1
- data/sparkle_formation.gemspec +1 -1
- metadata +7 -7
|
@@ -1,11 +1,11 @@
|
|
|
1
|
-
require
|
|
1
|
+
require "sparkle_formation"
|
|
2
2
|
|
|
3
3
|
class SparkleFormation
|
|
4
4
|
# Provides a collection of sparkles
|
|
5
5
|
# @todo add unmemoize behavior on collection modification to prevent
|
|
6
6
|
# leak on long running processes with long lasting collections
|
|
7
7
|
class SparkleCollection < Sparkle
|
|
8
|
-
autoload :Rainbow,
|
|
8
|
+
autoload :Rainbow, "sparkle_formation/sparkle_collection/rainbow"
|
|
9
9
|
|
|
10
10
|
# @return [Symbol] provider
|
|
11
11
|
attr_accessor :provider
|
|
@@ -16,7 +16,7 @@ class SparkleFormation
|
|
|
16
16
|
# @option args [Symbol, String] :provider name of default provider
|
|
17
17
|
# @return [self]
|
|
18
18
|
def initialize(args = {})
|
|
19
|
-
@provider = Bogo::Utility.snake(args.to_smash.fetch(:provider,
|
|
19
|
+
@provider = Bogo::Utility.snake(args.to_smash.fetch(:provider, "aws")).to_sym
|
|
20
20
|
@root = nil
|
|
21
21
|
@sparkles = []
|
|
22
22
|
end
|
|
@@ -164,7 +164,7 @@ class SparkleFormation
|
|
|
164
164
|
target_provider = provider
|
|
165
165
|
end
|
|
166
166
|
result = send(type_name).get(target_provider, name)
|
|
167
|
-
if result.nil? && type_name ==
|
|
167
|
+
if result.nil? && type_name == "templates"
|
|
168
168
|
t_direct = sparkles.map do |pack|
|
|
169
169
|
begin
|
|
170
170
|
pack.get(:template, name, target_provider)
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
require
|
|
2
|
-
require
|
|
1
|
+
require "sparkle_formation"
|
|
2
|
+
require "forwardable"
|
|
3
3
|
|
|
4
4
|
class SparkleFormation
|
|
5
5
|
class SparkleCollection
|
|
@@ -73,7 +73,7 @@ class SparkleFormation
|
|
|
73
73
|
def monochrome
|
|
74
74
|
Array.new.tap do |result|
|
|
75
75
|
spectrum.each do |item|
|
|
76
|
-
unless item.get(:args, :layering).to_s ==
|
|
76
|
+
unless item.get(:args, :layering).to_s == "merge"
|
|
77
77
|
result.clear
|
|
78
78
|
end
|
|
79
79
|
result << item
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
require
|
|
1
|
+
require "sparkle_formation"
|
|
2
2
|
|
|
3
3
|
# Formation container
|
|
4
4
|
class SparkleFormation
|
|
@@ -9,13 +9,13 @@ class SparkleFormation
|
|
|
9
9
|
|
|
10
10
|
# @return [Array<String>] directory names to ignore
|
|
11
11
|
IGNORE_DIRECTORIES = [
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
12
|
+
"components",
|
|
13
|
+
"dynamics",
|
|
14
|
+
"registry",
|
|
15
15
|
]
|
|
16
16
|
|
|
17
17
|
# @return [String] default stack resource name
|
|
18
|
-
DEFAULT_STACK_RESOURCE =
|
|
18
|
+
DEFAULT_STACK_RESOURCE = "AWS::CloudFormation::Stack"
|
|
19
19
|
# @return [Array<String>] collection of valid stack resource types
|
|
20
20
|
VALID_STACK_RESOURCES = [DEFAULT_STACK_RESOURCE]
|
|
21
21
|
|
|
@@ -40,9 +40,9 @@ class SparkleFormation
|
|
|
40
40
|
def sparkle_path=(path = nil)
|
|
41
41
|
if path
|
|
42
42
|
custom_paths[:sparkle_path] = path
|
|
43
|
-
custom_paths[:components_directory] = File.join(path,
|
|
44
|
-
custom_paths[:dynamics_directory] = File.join(path,
|
|
45
|
-
custom_paths[:registry_directory] = File.join(path,
|
|
43
|
+
custom_paths[:components_directory] = File.join(path, "components")
|
|
44
|
+
custom_paths[:dynamics_directory] = File.join(path, "dynamics")
|
|
45
|
+
custom_paths[:registry_directory] = File.join(path, "registry")
|
|
46
46
|
end
|
|
47
47
|
custom_paths[:sparkle_path]
|
|
48
48
|
end
|
|
@@ -141,7 +141,7 @@ class SparkleFormation
|
|
|
141
141
|
# @return [TrueClass]
|
|
142
142
|
def load_dynamics!(directory)
|
|
143
143
|
@loaded_dynamics ||= []
|
|
144
|
-
Dir.glob(File.join(directory,
|
|
144
|
+
Dir.glob(File.join(directory, "*.rb")).each do |dyn|
|
|
145
145
|
dyn = File.expand_path(dyn)
|
|
146
146
|
next if @loaded_dynamics.include?(dyn)
|
|
147
147
|
instance_eval(IO.read(dyn), dyn, 1)
|
|
@@ -156,7 +156,7 @@ class SparkleFormation
|
|
|
156
156
|
# @param directory [String]
|
|
157
157
|
# @return [TrueClass]
|
|
158
158
|
def load_registry!(directory)
|
|
159
|
-
Dir.glob(File.join(directory,
|
|
159
|
+
Dir.glob(File.join(directory, "*.rb")).each do |reg|
|
|
160
160
|
reg = File.expand_path(reg)
|
|
161
161
|
require reg
|
|
162
162
|
end
|
|
@@ -240,13 +240,13 @@ class SparkleFormation
|
|
|
240
240
|
result = builtin_insert(dynamic_name, struct, *args, &block)
|
|
241
241
|
unless result
|
|
242
242
|
message = "Failed to locate requested dynamic block for insertion: #{dynamic_name} " \
|
|
243
|
-
"(valid: #{struct._self.sparkle.dynamics.fetch(struct._self.sparkle.provider, {}).keys.sort.join(
|
|
243
|
+
"(valid: #{struct._self.sparkle.dynamics.fetch(struct._self.sparkle.provider, {}).keys.sort.join(", ")})"
|
|
244
244
|
if struct._self.provider_resources && struct._self.provider_resources.registry.keys.size > 1
|
|
245
245
|
t_name = struct._self.provider_resources.registry.keys.first
|
|
246
246
|
valid_t_name = Bogo::Utility.snake(
|
|
247
247
|
t_name.split(
|
|
248
248
|
struct._self.provider_resources.resource_type_splitter
|
|
249
|
-
).join(
|
|
249
|
+
).join("_")
|
|
250
250
|
)
|
|
251
251
|
message << "\nBuiltin dynamics pattern `#{t_name}` -> `:#{Bogo::Utility.snake(valid_t_name)}`"
|
|
252
252
|
end
|
|
@@ -275,12 +275,12 @@ class SparkleFormation
|
|
|
275
275
|
__t_stringish(item)
|
|
276
276
|
end
|
|
277
277
|
to_nest = struct._self.sparkle.get(:template, template, options[:provider])
|
|
278
|
-
resource_name = template.to_s.gsub(
|
|
278
|
+
resource_name = template.to_s.gsub("__", "_")
|
|
279
279
|
unless args.empty?
|
|
280
280
|
resource_name = [
|
|
281
281
|
options.delete(:overwrite_name) ? nil : resource_name,
|
|
282
|
-
args.map { |a| Bogo::Utility.snake(a) }.join(
|
|
283
|
-
].flatten.compact.join(
|
|
282
|
+
args.map { |a| Bogo::Utility.snake(a) }.join("_"),
|
|
283
|
+
].flatten.compact.join("_").to_sym
|
|
284
284
|
end
|
|
285
285
|
resource_name = struct._process_key(resource_name.to_sym)
|
|
286
286
|
nested_template = compile(to_nest[:path], :sparkle)
|
|
@@ -318,7 +318,7 @@ class SparkleFormation
|
|
|
318
318
|
resource_name = [
|
|
319
319
|
_name,
|
|
320
320
|
_config.fetch(:resource_name_suffix, dynamic_name),
|
|
321
|
-
].compact.join(
|
|
321
|
+
].compact.join("_").to_sym
|
|
322
322
|
else
|
|
323
323
|
resource_name = _name._root
|
|
324
324
|
end
|
|
@@ -326,9 +326,9 @@ class SparkleFormation
|
|
|
326
326
|
new_resource = struct.resources.set!(resource_name)
|
|
327
327
|
new_resource.type lookup_key
|
|
328
328
|
properties = new_resource.properties
|
|
329
|
-
config_keys = _config.keys.zip(_config.keys.map { |k| snake(k).to_s.tr(
|
|
329
|
+
config_keys = _config.keys.zip(_config.keys.map { |k| snake(k).to_s.tr("_", "") })
|
|
330
330
|
struct._self.provider_resources.resource(dynamic_name, :properties).each do |prop_name|
|
|
331
|
-
key = (config_keys.detect { |k| k.last == snake(prop_name).to_s.tr(
|
|
331
|
+
key = (config_keys.detect { |k| k.last == snake(prop_name).to_s.tr("_", "") } || []).first
|
|
332
332
|
value = _config[key] if key
|
|
333
333
|
if value
|
|
334
334
|
if value.is_a?(Proc)
|
|
@@ -432,7 +432,7 @@ class SparkleFormation
|
|
|
432
432
|
end
|
|
433
433
|
self.provider = options.fetch(:provider, @parent ? @parent.provider : :aws)
|
|
434
434
|
if provider == :aws || !options[:disable_aws_builtins]
|
|
435
|
-
require
|
|
435
|
+
require "sparkle_formation/aws"
|
|
436
436
|
end
|
|
437
437
|
@parameters = set_generation_parameters!(
|
|
438
438
|
options.fetch(:compile_time_parameters,
|
|
@@ -463,12 +463,12 @@ class SparkleFormation
|
|
|
463
463
|
def seed_self
|
|
464
464
|
memoize(:seed) do
|
|
465
465
|
options = @seed
|
|
466
|
-
if options[:inherit] && options[:layering].to_s ==
|
|
467
|
-
raise ArgumentError.new
|
|
466
|
+
if options[:inherit] && options[:layering].to_s == "merge"
|
|
467
|
+
raise ArgumentError.new "Cannot merge and inherit!"
|
|
468
468
|
end
|
|
469
469
|
if options[:inherit]
|
|
470
470
|
inherit_from(options[:inherit])
|
|
471
|
-
elsif options[:layering].to_s ==
|
|
471
|
+
elsif options[:layering].to_s == "merge"
|
|
472
472
|
merge_previous!
|
|
473
473
|
end
|
|
474
474
|
true
|
|
@@ -521,7 +521,7 @@ class SparkleFormation
|
|
|
521
521
|
:components => template.composition.composite,
|
|
522
522
|
:overrides => composition.overrides)
|
|
523
523
|
composition.components.each do |item|
|
|
524
|
-
if item.respond_to?(:key) && item.key ==
|
|
524
|
+
if item.respond_to?(:key) && item.key == "__base__"
|
|
525
525
|
item.key = Smash.new(
|
|
526
526
|
:template => name,
|
|
527
527
|
:component => :__base__,
|
|
@@ -592,12 +592,12 @@ class SparkleFormation
|
|
|
592
592
|
|
|
593
593
|
# Attributes allowed for generation parameter definitions
|
|
594
594
|
ALLOWED_GENERATION_PARAMETERS = [
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
595
|
+
"type", "default", "description", "multiple", "prompt_when_nested",
|
|
596
|
+
"allowed_values", "allowed_pattern", "max_length", "min_length",
|
|
597
|
+
"max_value", "min_value",
|
|
598
598
|
]
|
|
599
599
|
# Allowed data types for parameters
|
|
600
|
-
VALID_GENERATION_PARAMETER_TYPES = [
|
|
600
|
+
VALID_GENERATION_PARAMETER_TYPES = ["String", "Number", "Complex"]
|
|
601
601
|
|
|
602
602
|
# Get or set the compile time parameter setting block. If a get
|
|
603
603
|
# request the ancestor path will be searched to root
|
|
@@ -663,7 +663,7 @@ class SparkleFormation
|
|
|
663
663
|
if thing.is_a?(String)
|
|
664
664
|
# NOTE: This needs to be deprecated and removed
|
|
665
665
|
# TODO: deprecate
|
|
666
|
-
key = File.basename(thing.to_s).sub(
|
|
666
|
+
key = File.basename(thing.to_s).sub(".rb", "")
|
|
667
667
|
composition.new_component(key, &self.class.load_component(thing))
|
|
668
668
|
else
|
|
669
669
|
composition.new_component(thing)
|
|
@@ -701,7 +701,7 @@ class SparkleFormation
|
|
|
701
701
|
set_compile_time_parameters!
|
|
702
702
|
if provider && SparkleStruct.const_defined?(camel(provider))
|
|
703
703
|
struct_class = SparkleStruct.const_get(camel(provider))
|
|
704
|
-
struct_name = [SparkleStruct.name, camel(provider)].join(
|
|
704
|
+
struct_name = [SparkleStruct.name, camel(provider)].join("::")
|
|
705
705
|
struct_class.define_singleton_method(:name) { struct_name }
|
|
706
706
|
struct_class.define_singleton_method(:to_s) { struct_name }
|
|
707
707
|
else
|
|
@@ -755,7 +755,7 @@ class SparkleFormation
|
|
|
755
755
|
def set_compiled_state(compiled)
|
|
756
756
|
storage_compile_state = Smash.new
|
|
757
757
|
parameters.each do |param_key, param_config|
|
|
758
|
-
if param_config.fetch(:type,
|
|
758
|
+
if param_config.fetch(:type, "string").to_s.downcase.to_sym != :complex
|
|
759
759
|
storage_compile_state[param_key] = compile_state[param_key]
|
|
760
760
|
end
|
|
761
761
|
end
|
|
@@ -873,7 +873,7 @@ class SparkleFormation
|
|
|
873
873
|
# @note will auto downcase name prior to comparison
|
|
874
874
|
def output_matched?(p_name, output_names)
|
|
875
875
|
output_names.detect do |o_name|
|
|
876
|
-
Bogo::Utility.snake(o_name).tr(
|
|
876
|
+
Bogo::Utility.snake(o_name).tr("_", "") == Bogo::Utility.snake(p_name).tr("_", "")
|
|
877
877
|
end
|
|
878
878
|
end
|
|
879
879
|
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
require
|
|
2
|
-
require
|
|
1
|
+
require "sparkle_formation"
|
|
2
|
+
require "attribute_struct/monkey_camels"
|
|
3
3
|
|
|
4
4
|
class SparkleFormation
|
|
5
5
|
|
|
@@ -74,7 +74,7 @@ class SparkleFormation
|
|
|
74
74
|
def _self(*_)
|
|
75
75
|
unless @self
|
|
76
76
|
if _parent.nil?
|
|
77
|
-
::Kernel.raise ::ArgumentError.new
|
|
77
|
+
::Kernel.raise ::ArgumentError.new "Creator did not provide return reference!"
|
|
78
78
|
else
|
|
79
79
|
_parent._self
|
|
80
80
|
end
|
|
@@ -114,13 +114,13 @@ class SparkleFormation
|
|
|
114
114
|
# FunctionStruct
|
|
115
115
|
def method_missing(sym, *args, &block)
|
|
116
116
|
if sym.is_a?(::String) || sym.is_a?(::Symbol)
|
|
117
|
-
if sym.to_s.start_with?(
|
|
117
|
+
if sym.to_s.start_with?("_") || sym.to_s.end_with?("!")
|
|
118
118
|
::Kernel.raise ::NoMethodError.new "Undefined method `#{sym}` for #{_klass.name}"
|
|
119
119
|
end
|
|
120
120
|
end
|
|
121
121
|
super(*[sym, *args], &block)
|
|
122
122
|
if sym.is_a?(::String) || sym.is_a?(::Symbol)
|
|
123
|
-
if (s = sym.to_s).end_with?(
|
|
123
|
+
if (s = sym.to_s).end_with?("=")
|
|
124
124
|
s.slice!(-1, s.length)
|
|
125
125
|
sym = s
|
|
126
126
|
end
|
|
@@ -1,12 +1,12 @@
|
|
|
1
|
-
require
|
|
2
|
-
require
|
|
3
|
-
require
|
|
1
|
+
require "sparkle_formation"
|
|
2
|
+
require "multi_json"
|
|
3
|
+
require "logger"
|
|
4
4
|
|
|
5
5
|
class SparkleFormation
|
|
6
6
|
# Translator
|
|
7
7
|
class Translation
|
|
8
|
-
autoload :Heat,
|
|
9
|
-
autoload :Rackspace,
|
|
8
|
+
autoload :Heat, "sparkle_formation/translation/heat"
|
|
9
|
+
autoload :Rackspace, "sparkle_formation/translation/rackspace"
|
|
10
10
|
|
|
11
11
|
include SparkleFormation::Utils::AnimalStrings
|
|
12
12
|
include SparkleFormation::SparkleAttribute
|
|
@@ -41,25 +41,25 @@ class SparkleFormation
|
|
|
41
41
|
# @return [Hash] parameters for template
|
|
42
42
|
def parameters
|
|
43
43
|
Hash[
|
|
44
|
-
@original.fetch(
|
|
45
|
-
[k, v.fetch(
|
|
44
|
+
@original.fetch("Parameters", {}).map do |k, v|
|
|
45
|
+
[k, v.fetch("Default", "")]
|
|
46
46
|
end
|
|
47
47
|
].merge(@parameters)
|
|
48
48
|
end
|
|
49
49
|
|
|
50
50
|
# @return [Hash] mappings for template
|
|
51
51
|
def mappings
|
|
52
|
-
@original.fetch(
|
|
52
|
+
@original.fetch("Mappings", {})
|
|
53
53
|
end
|
|
54
54
|
|
|
55
55
|
# @return [Hash] resources for template
|
|
56
56
|
def resources
|
|
57
|
-
@original.fetch(
|
|
57
|
+
@original.fetch("Resources", {})
|
|
58
58
|
end
|
|
59
59
|
|
|
60
60
|
# @return [Hash] outputs for template
|
|
61
61
|
def outputs
|
|
62
|
-
@original.fetch(
|
|
62
|
+
@original.fetch("Outputs", {})
|
|
63
63
|
end
|
|
64
64
|
|
|
65
65
|
# @return [Hash] resource mapping
|
|
@@ -96,18 +96,18 @@ class SparkleFormation
|
|
|
96
96
|
# @return [Hash, NilClass] new resource Hash or nil
|
|
97
97
|
def resource_translation(resource_name, resource_args)
|
|
98
98
|
new_resource = {}
|
|
99
|
-
lookup = map[:resources][resource_args[
|
|
99
|
+
lookup = map[:resources][resource_args["Type"]]
|
|
100
100
|
if lookup.nil?
|
|
101
|
-
logger.warn "Failed to locate resource type: #{resource_args[
|
|
101
|
+
logger.warn "Failed to locate resource type: #{resource_args["Type"]}"
|
|
102
102
|
nil
|
|
103
103
|
elsif lookup == :delete
|
|
104
104
|
logger.warn "Deleting resource #{resource_name} due to configuration"
|
|
105
105
|
nil
|
|
106
106
|
else
|
|
107
|
-
new_resource[
|
|
108
|
-
if resource_args[
|
|
109
|
-
new_resource[
|
|
110
|
-
:original_properties => resource_args[
|
|
107
|
+
new_resource["Type"] = lookup[:name]
|
|
108
|
+
if resource_args["Properties"]
|
|
109
|
+
new_resource["Properties"] = format_properties(
|
|
110
|
+
:original_properties => resource_args["Properties"],
|
|
111
111
|
:property_map => lookup[:properties],
|
|
112
112
|
:new_resource => new_resource,
|
|
113
113
|
:original_resource => resource_args,
|
|
@@ -130,7 +130,7 @@ class SparkleFormation
|
|
|
130
130
|
# @option args [Hash] :original_resource
|
|
131
131
|
# @return [Hash]
|
|
132
132
|
def format_properties(args)
|
|
133
|
-
args[:new_resource][
|
|
133
|
+
args[:new_resource]["Properties"] = {}.tap do |new_properties|
|
|
134
134
|
args[:original_properties].each do |property_name, property_value|
|
|
135
135
|
new_key = args[:property_map][property_name]
|
|
136
136
|
if new_key
|
|
@@ -147,7 +147,7 @@ class SparkleFormation
|
|
|
147
147
|
end
|
|
148
148
|
else
|
|
149
149
|
logger.warn "Failed to locate property conversion for `#{property_name}` on " \
|
|
150
|
-
"resource type `#{args[:new_resource][
|
|
150
|
+
"resource type `#{args[:new_resource]["Type"]}`. Passing directly."
|
|
151
151
|
new_properties[default_key_format(property_name)] = property_value
|
|
152
152
|
end
|
|
153
153
|
end
|
|
@@ -159,8 +159,8 @@ class SparkleFormation
|
|
|
159
159
|
# @param value [Hash] resources hash
|
|
160
160
|
# @return [Hash]
|
|
161
161
|
def translate_resources(value)
|
|
162
|
-
translated[
|
|
163
|
-
translated[
|
|
162
|
+
translated["Resources"] = {}
|
|
163
|
+
translated["Resources"].tap do |modified_resources|
|
|
164
164
|
value.each do |resource_name, resource_args|
|
|
165
165
|
new_resource = resource_translation(resource_name, resource_args)
|
|
166
166
|
if new_resource
|
|
@@ -185,7 +185,7 @@ class SparkleFormation
|
|
|
185
185
|
def dereference(obj)
|
|
186
186
|
result = obj
|
|
187
187
|
if obj.is_a?(Hash)
|
|
188
|
-
name = obj[
|
|
188
|
+
name = obj["Ref"] || obj["get_param"]
|
|
189
189
|
if name
|
|
190
190
|
p_val = parameters[name.to_s]
|
|
191
191
|
if p_val
|
|
@@ -203,7 +203,7 @@ class SparkleFormation
|
|
|
203
203
|
def resource_name(obj)
|
|
204
204
|
case obj
|
|
205
205
|
when Hash
|
|
206
|
-
obj[
|
|
206
|
+
obj["Ref"] || obj["get_resource"]
|
|
207
207
|
else
|
|
208
208
|
obj.to_s
|
|
209
209
|
end
|
|
@@ -258,13 +258,13 @@ class SparkleFormation
|
|
|
258
258
|
def apply_rename(hash, names = [])
|
|
259
259
|
k, v = hash.first
|
|
260
260
|
if hash.size == 1
|
|
261
|
-
if k.start_with?(
|
|
261
|
+
if k.start_with?("Fn::")
|
|
262
262
|
{self.class.const_get(:FN_MAPPING).fetch(k, k) => attr_mapping(*v)}
|
|
263
|
-
elsif k ==
|
|
263
|
+
elsif k == "Ref"
|
|
264
264
|
if resources.key?(v)
|
|
265
|
-
{
|
|
265
|
+
{"get_resource" => v}
|
|
266
266
|
else
|
|
267
|
-
{
|
|
267
|
+
{"get_param" => self.class.const_get(:REF_MAPPING).fetch(v, v)}
|
|
268
268
|
end
|
|
269
269
|
else
|
|
270
270
|
hash
|
|
@@ -283,7 +283,7 @@ class SparkleFormation
|
|
|
283
283
|
result = [resource_name, value]
|
|
284
284
|
if r = resources[resource_name]
|
|
285
285
|
attr_map = self.class.const_get(:FN_ATT_MAPPING)
|
|
286
|
-
if attr_map[r[
|
|
286
|
+
if attr_map[r["Type"]] && replacement = attr_map[r["Type"]][value]
|
|
287
287
|
result = [resource_name, *[replacement].flatten.compact]
|
|
288
288
|
end
|
|
289
289
|
end
|
|
@@ -299,11 +299,11 @@ class SparkleFormation
|
|
|
299
299
|
# replacements using the REF_MAPPING constant
|
|
300
300
|
def apply_function(hash, funcs = [])
|
|
301
301
|
k, v = hash.first
|
|
302
|
-
if hash.size == 1 && (k.start_with?(
|
|
302
|
+
if hash.size == 1 && (k.start_with?("Fn") || k == "Ref") && (funcs.empty? || funcs.include?(k))
|
|
303
303
|
case k
|
|
304
|
-
when
|
|
304
|
+
when "Fn::Join"
|
|
305
305
|
v.last.join(v.first)
|
|
306
|
-
when
|
|
306
|
+
when "Fn::FindInMap"
|
|
307
307
|
map_holder = mappings[v[0]]
|
|
308
308
|
if map_holder
|
|
309
309
|
map_item = map_holder[dereference(v[1])]
|
|
@@ -315,8 +315,8 @@ class SparkleFormation
|
|
|
315
315
|
else
|
|
316
316
|
raise "Failed to find mapping! (#{v[0]})"
|
|
317
317
|
end
|
|
318
|
-
when
|
|
319
|
-
{
|
|
318
|
+
when "Ref"
|
|
319
|
+
{"Ref" => self.class.const_get(:REF_MAPPING).fetch(v, v)}
|
|
320
320
|
else
|
|
321
321
|
hash
|
|
322
322
|
end
|
|
@@ -17,14 +17,14 @@ class SparkleFormation
|
|
|
17
17
|
translated[snake(k).to_s] = v
|
|
18
18
|
end
|
|
19
19
|
# params
|
|
20
|
-
cache.fetch(
|
|
21
|
-
translated[
|
|
20
|
+
cache.fetch("Parameters", {}).each do |k, v|
|
|
21
|
+
translated["parameters"][k] = Hash[
|
|
22
22
|
v.map do |key, value|
|
|
23
|
-
if key ==
|
|
23
|
+
if key == "Type"
|
|
24
24
|
[snake(key).to_s, value.downcase]
|
|
25
|
-
elsif key ==
|
|
25
|
+
elsif key == "AllowedValues"
|
|
26
26
|
# @todo fix this up to properly build constraints
|
|
27
|
-
[
|
|
27
|
+
["constraints", [{"allowed_values" => value}]]
|
|
28
28
|
else
|
|
29
29
|
[snake(key).to_s, value]
|
|
30
30
|
end
|
|
@@ -32,34 +32,34 @@ class SparkleFormation
|
|
|
32
32
|
]
|
|
33
33
|
end
|
|
34
34
|
# resources
|
|
35
|
-
cache.fetch(
|
|
36
|
-
translated[
|
|
35
|
+
cache.fetch("Resources", {}).each do |r_name, r_value|
|
|
36
|
+
translated["resources"][r_name] = Hash[
|
|
37
37
|
r_value.map do |k, v|
|
|
38
38
|
[snake(k).to_s, v]
|
|
39
39
|
end
|
|
40
40
|
]
|
|
41
41
|
end
|
|
42
42
|
# outputs
|
|
43
|
-
cache.fetch(
|
|
44
|
-
translated[
|
|
43
|
+
cache.fetch("Outputs", {}).each do |o_name, o_value|
|
|
44
|
+
translated["outputs"][o_name] = Hash[
|
|
45
45
|
o_value.map do |k, v|
|
|
46
46
|
[snake(k).to_s, v]
|
|
47
47
|
end
|
|
48
48
|
]
|
|
49
49
|
end
|
|
50
|
-
translated.delete(
|
|
51
|
-
translated[
|
|
50
|
+
translated.delete("awstemplate_format_version")
|
|
51
|
+
translated["heat_template_version"] = "2013-05-23"
|
|
52
52
|
# no HOT support for mappings, so remove and clean pseudo
|
|
53
53
|
# params in refs
|
|
54
|
-
if translated[
|
|
55
|
-
translated[
|
|
56
|
-
translated[
|
|
54
|
+
if translated["resources"]
|
|
55
|
+
translated["resources"] = dereference_processor(translated["resources"], ["Fn::FindInMap", "Ref"])
|
|
56
|
+
translated["resources"] = rename_processor(translated["resources"])
|
|
57
57
|
end
|
|
58
|
-
if translated[
|
|
59
|
-
translated[
|
|
60
|
-
translated[
|
|
58
|
+
if translated["outputs"]
|
|
59
|
+
translated["outputs"] = dereference_processor(translated["outputs"], ["Fn::FindInMap", "Ref"])
|
|
60
|
+
translated["outputs"] = rename_processor(translated["outputs"])
|
|
61
61
|
end
|
|
62
|
-
translated.delete(
|
|
62
|
+
translated.delete("mappings")
|
|
63
63
|
complete_launch_config_lb_setups
|
|
64
64
|
true
|
|
65
65
|
end
|
|
@@ -76,9 +76,9 @@ class SparkleFormation
|
|
|
76
76
|
# @return [Object]
|
|
77
77
|
# rubocop:disable Metrics/MethodLength
|
|
78
78
|
def neutron_loadbalancer_finalizer(resource_name, new_resource, old_resource)
|
|
79
|
-
listeners = new_resource[
|
|
80
|
-
healthcheck = new_resource[
|
|
81
|
-
subnet = (new_resource[
|
|
79
|
+
listeners = new_resource["Properties"].delete("listeners") || []
|
|
80
|
+
healthcheck = new_resource["Properties"].delete("health_check")
|
|
81
|
+
subnet = (new_resource["Properties"].delete("subnets") || []).first
|
|
82
82
|
|
|
83
83
|
# if health check is provided, create resource and apply to
|
|
84
84
|
# all pools generated
|
|
@@ -86,83 +86,83 @@ class SparkleFormation
|
|
|
86
86
|
healthcheck_name = "#{resource_name}HealthCheck"
|
|
87
87
|
check = {
|
|
88
88
|
healthcheck_name => {
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
{
|
|
89
|
+
"Type" => "OS::Neutron::HealthMonitor",
|
|
90
|
+
"Properties" => {}.tap { |properties|
|
|
91
|
+
{"Timeout" => "timeout", "Interval" => "delay", "HealthyThreshold" => "max_retries"}.each do |aws, hot|
|
|
92
92
|
if healthcheck[aws]
|
|
93
93
|
properties[hot] = healthcheck[aws]
|
|
94
94
|
end
|
|
95
95
|
end
|
|
96
|
-
type, port, path = healthcheck[
|
|
97
|
-
properties[
|
|
96
|
+
type, port, path = healthcheck["Target"].split(%r{(:|/.*)}).find_all { |x| x != ":" }
|
|
97
|
+
properties["type"] = type
|
|
98
98
|
if path
|
|
99
|
-
properties[
|
|
99
|
+
properties["url_path"] = path
|
|
100
100
|
end
|
|
101
101
|
},
|
|
102
102
|
},
|
|
103
103
|
}
|
|
104
|
-
translated[
|
|
104
|
+
translated["Resources"].merge!(check)
|
|
105
105
|
end
|
|
106
106
|
|
|
107
107
|
base_listener = listeners.shift
|
|
108
108
|
base_pool_name = "#{resource_name}Pool"
|
|
109
109
|
base_pool = {
|
|
110
110
|
base_pool_name => {
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
{
|
|
111
|
+
"Type" => "OS::Neutron::Pool",
|
|
112
|
+
"Properties" => {
|
|
113
|
+
"lb_method" => "ROUND_ROBIN",
|
|
114
|
+
"monitors" => [
|
|
115
|
+
{"get_resource" => healthcheck_name},
|
|
116
116
|
],
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
117
|
+
"protocol" => base_listener["Protocol"],
|
|
118
|
+
"vip" => {
|
|
119
|
+
"protocol_port" => base_listener["LoadBalancerPort"],
|
|
120
120
|
},
|
|
121
|
-
|
|
121
|
+
"subnet" => subnet,
|
|
122
122
|
},
|
|
123
123
|
},
|
|
124
124
|
}
|
|
125
125
|
if healthcheck
|
|
126
|
-
base_pool[base_pool_name][
|
|
127
|
-
|
|
128
|
-
{
|
|
126
|
+
base_pool[base_pool_name]["Properties"].merge(
|
|
127
|
+
"monitors" => [
|
|
128
|
+
{"get_resource" => healthcheck_name},
|
|
129
129
|
],
|
|
130
130
|
)
|
|
131
131
|
end
|
|
132
132
|
|
|
133
|
-
translated[
|
|
134
|
-
new_resource[
|
|
135
|
-
new_resource[
|
|
133
|
+
translated["Resources"].merge!(base_pool)
|
|
134
|
+
new_resource["Properties"]["pool_id"] = {"get_resource" => base_pool_name}
|
|
135
|
+
new_resource["Properties"]["protocol_port"] = base_listener["InstancePort"]
|
|
136
136
|
|
|
137
137
|
listeners.each_with_index do |listener, count|
|
|
138
138
|
pool_name = "#{resource_name}PoolVip#{count}"
|
|
139
139
|
pool = {
|
|
140
140
|
pool_name => {
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
141
|
+
"Type" => "OS::Neutron::Pool",
|
|
142
|
+
"Properties" => {
|
|
143
|
+
"lb_method" => "ROUND_ROBIN",
|
|
144
|
+
"protocol" => listener["Protocol"],
|
|
145
|
+
"subnet" => subnet,
|
|
146
|
+
"vip" => {
|
|
147
|
+
"protocol_port" => listener["LoadBalancerPort"],
|
|
148
148
|
},
|
|
149
149
|
},
|
|
150
150
|
},
|
|
151
151
|
}
|
|
152
152
|
if healthcheck
|
|
153
|
-
pool[pool_name][
|
|
154
|
-
|
|
155
|
-
{
|
|
153
|
+
pool[pool_name]["Properties"].merge(
|
|
154
|
+
"monitors" => [
|
|
155
|
+
{"get_resource" => healthcheck_name},
|
|
156
156
|
],
|
|
157
157
|
)
|
|
158
158
|
end
|
|
159
159
|
|
|
160
160
|
lb_name = "#{resource_name}Vip#{count}"
|
|
161
161
|
lb = {lb_name => MultiJson.load(MultiJson.dump(new_resource))}
|
|
162
|
-
lb[lb_name][
|
|
163
|
-
lb[lb_name][
|
|
164
|
-
translated[
|
|
165
|
-
translated[
|
|
162
|
+
lb[lb_name]["Properties"]["pool_id"] = {"get_resource" => pool_name}
|
|
163
|
+
lb[lb_name]["Properties"]["protocol_port"] = listener["InstancePort"]
|
|
164
|
+
translated["Resources"].merge!(pool)
|
|
165
|
+
translated["Resources"].merge!(lb)
|
|
166
166
|
end
|
|
167
167
|
end
|
|
168
168
|
|
|
@@ -171,18 +171,18 @@ class SparkleFormation
|
|
|
171
171
|
# multiple listeners (ports) have been defined resulting in
|
|
172
172
|
# multiple isolated LB resources
|
|
173
173
|
def complete_launch_config_lb_setups
|
|
174
|
-
translated[
|
|
175
|
-
resource[
|
|
174
|
+
translated["resources"].find_all do |resource_name, resource|
|
|
175
|
+
resource["type"] == "OS::Heat::AutoScalingGroup"
|
|
176
176
|
end.each do |name, value|
|
|
177
|
-
if lbs = value[
|
|
177
|
+
if lbs = value["properties"].delete("load_balancers")
|
|
178
178
|
lbs.each do |lb_ref|
|
|
179
179
|
lb_name = resource_name(lb_ref)
|
|
180
|
-
lb_resource = translated[
|
|
181
|
-
vip_resources = translated[
|
|
182
|
-
k.match(/#{lb_name}Vip\d+/) && v[
|
|
180
|
+
lb_resource = translated["resources"][lb_name]
|
|
181
|
+
vip_resources = translated["resources"].find_all do |k, v|
|
|
182
|
+
k.match(/#{lb_name}Vip\d+/) && v["type"] == "OS::Neutron::LoadBalancer"
|
|
183
183
|
end
|
|
184
|
-
value[
|
|
185
|
-
{
|
|
184
|
+
value["properties"]["load_balancers"] = vip_resources.map do |vip_name|
|
|
185
|
+
{"get_resource" => vip_name}
|
|
186
186
|
end
|
|
187
187
|
end
|
|
188
188
|
end
|
|
@@ -200,7 +200,7 @@ class SparkleFormation
|
|
|
200
200
|
# @return [Array<String, Object>] name and new value
|
|
201
201
|
# @todo implement
|
|
202
202
|
def nova_server_block_device_mapping(value, args = {})
|
|
203
|
-
[
|
|
203
|
+
["block_device_mapping", value]
|
|
204
204
|
end
|
|
205
205
|
|
|
206
206
|
# Custom mapping for server user data
|
|
@@ -212,8 +212,8 @@ class SparkleFormation
|
|
|
212
212
|
# @option args [Hash] :original_resource
|
|
213
213
|
# @return [Array<String, Object>] name and new value
|
|
214
214
|
def nova_server_user_data(value, args = {})
|
|
215
|
-
args[:new_properties][:user_data_format] =
|
|
216
|
-
args[:new_properties][:config_drive] =
|
|
215
|
+
args[:new_properties][:user_data_format] = "RAW"
|
|
216
|
+
args[:new_properties][:config_drive] = "true"
|
|
217
217
|
[:user_data, Hash[value.values.first]]
|
|
218
218
|
end
|
|
219
219
|
|
|
@@ -225,25 +225,25 @@ class SparkleFormation
|
|
|
225
225
|
# @param old_resource [Hash]
|
|
226
226
|
# @return [Object]
|
|
227
227
|
def nova_server_finalizer(resource_name, new_resource, old_resource)
|
|
228
|
-
if old_resource[
|
|
229
|
-
new_resource[
|
|
230
|
-
proceed = new_resource[
|
|
231
|
-
new_resource[
|
|
232
|
-
config = new_resource[
|
|
228
|
+
if old_resource["Metadata"]
|
|
229
|
+
new_resource["Metadata"] = old_resource["Metadata"]
|
|
230
|
+
proceed = new_resource["Metadata"] &&
|
|
231
|
+
new_resource["Metadata"]["AWS::CloudFormation::Init"] &&
|
|
232
|
+
config = new_resource["Metadata"]["AWS::CloudFormation::Init"]["config"]
|
|
233
233
|
if proceed
|
|
234
234
|
# NOTE: This is a stupid hack since HOT gives the URL to
|
|
235
235
|
# wget directly and if special characters exist, it fails
|
|
236
|
-
if files = config[
|
|
236
|
+
if files = config["files"]
|
|
237
237
|
files.each do |key, args|
|
|
238
|
-
if args[
|
|
239
|
-
if args[
|
|
240
|
-
args[
|
|
238
|
+
if args["source"]
|
|
239
|
+
if args["source"].is_a?(String)
|
|
240
|
+
args["source"].replace("\"#{args["source"]}\"")
|
|
241
241
|
else
|
|
242
|
-
args[
|
|
243
|
-
|
|
244
|
-
|
|
242
|
+
args["source"] = {
|
|
243
|
+
"Fn::Join" => [
|
|
244
|
+
"", [
|
|
245
245
|
"\"",
|
|
246
|
-
args[
|
|
246
|
+
args["source"],
|
|
247
247
|
"\"",
|
|
248
248
|
],
|
|
249
249
|
],
|
|
@@ -265,13 +265,13 @@ class SparkleFormation
|
|
|
265
265
|
# @param old_resource [Hash]
|
|
266
266
|
# @return [TrueClass]
|
|
267
267
|
def neutron_subnet_finalizer(resource_name, new_resource, old_resource)
|
|
268
|
-
azs = new_resource[
|
|
268
|
+
azs = new_resource["Properties"].delete("availability_zone")
|
|
269
269
|
if azs
|
|
270
270
|
network_name = "NetworkFor#{resource_name}"
|
|
271
|
-
translated[
|
|
272
|
-
|
|
271
|
+
translated["Resources"][network_name] = {
|
|
272
|
+
"type" => "OS::Neutron::Network",
|
|
273
273
|
}
|
|
274
|
-
new_resource[
|
|
274
|
+
new_resource["Properties"]["network"] = {"get_resource" => network_name}
|
|
275
275
|
end
|
|
276
276
|
true
|
|
277
277
|
end
|
|
@@ -283,7 +283,7 @@ class SparkleFormation
|
|
|
283
283
|
# @param old_resource [Hash]
|
|
284
284
|
# @return [TrueClass]
|
|
285
285
|
def neutron_net_finalizer(resource_name, new_resource, old_resource)
|
|
286
|
-
new_resource[
|
|
286
|
+
new_resource["Properties"].clear
|
|
287
287
|
true
|
|
288
288
|
end
|
|
289
289
|
|
|
@@ -312,7 +312,7 @@ class SparkleFormation
|
|
|
312
312
|
# @return [Array<String, Object>] name and new value
|
|
313
313
|
# @todo implement
|
|
314
314
|
def autoscaling_group_launchconfig(value, args = {})
|
|
315
|
-
[
|
|
315
|
+
["resource", value]
|
|
316
316
|
end
|
|
317
317
|
|
|
318
318
|
# Default keys to snake cased format (underscore)
|
|
@@ -326,71 +326,71 @@ class SparkleFormation
|
|
|
326
326
|
# Heat translation mapping
|
|
327
327
|
MAP = {
|
|
328
328
|
:resources => {
|
|
329
|
-
|
|
330
|
-
:name =>
|
|
329
|
+
"AWS::EC2::Instance" => {
|
|
330
|
+
:name => "OS::Nova::Server",
|
|
331
331
|
:finalizer => :nova_server_finalizer,
|
|
332
332
|
:properties => {
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
333
|
+
"AvailabilityZone" => "availability_zone",
|
|
334
|
+
"BlockDeviceMappings" => :nova_server_block_device_mapping,
|
|
335
|
+
"ImageId" => "image",
|
|
336
|
+
"InstanceType" => "flavor",
|
|
337
|
+
"KeyName" => "key_name",
|
|
338
|
+
"NetworkInterfaces" => "networks",
|
|
339
|
+
"SecurityGroups" => "security_groups",
|
|
340
|
+
"SecurityGroupIds" => "security_groups",
|
|
341
|
+
"Tags" => "metadata",
|
|
342
|
+
"UserData" => :nova_server_user_data,
|
|
343
343
|
},
|
|
344
344
|
},
|
|
345
|
-
|
|
346
|
-
:name =>
|
|
345
|
+
"AWS::AutoScaling::AutoScalingGroup" => {
|
|
346
|
+
:name => "OS::Heat::AutoScalingGroup",
|
|
347
347
|
:properties => {
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
348
|
+
"Cooldown" => "cooldown",
|
|
349
|
+
"DesiredCapacity" => "desired_capacity",
|
|
350
|
+
"MaxSize" => "max_size",
|
|
351
|
+
"MinSize" => "min_size",
|
|
352
|
+
"LaunchConfigurationName" => :autoscaling_group_launchconfig,
|
|
353
353
|
},
|
|
354
354
|
},
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
:name =>
|
|
355
|
+
"AWS::AutoScaling::LaunchConfiguration" => :delete,
|
|
356
|
+
"AWS::ElasticLoadBalancing::LoadBalancer" => {
|
|
357
|
+
:name => "OS::Neutron::LoadBalancer",
|
|
358
358
|
:finalizer => :neutron_loadbalancer_finalizer,
|
|
359
359
|
:properties => {
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
360
|
+
"Instances" => "members",
|
|
361
|
+
"Listeners" => "listeners",
|
|
362
|
+
"HealthCheck" => "health_check",
|
|
363
|
+
"Subnets" => "subnets",
|
|
364
364
|
},
|
|
365
365
|
},
|
|
366
|
-
|
|
367
|
-
:name =>
|
|
366
|
+
"AWS::EC2::VPC" => {
|
|
367
|
+
:name => "OS::Neutron::Net",
|
|
368
368
|
:finalizer => :neutron_net_finalizer,
|
|
369
369
|
:properties => {
|
|
370
|
-
|
|
370
|
+
"CidrBlock" => "cidr",
|
|
371
371
|
},
|
|
372
372
|
},
|
|
373
|
-
|
|
374
|
-
:name =>
|
|
373
|
+
"AWS::EC2::Subnet" => {
|
|
374
|
+
:name => "OS::Neutron::Subnet",
|
|
375
375
|
:finalizer => :neutron_subnet_finalizer,
|
|
376
376
|
:properties => {
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
377
|
+
"CidrBlock" => "cidr",
|
|
378
|
+
"VpcId" => "network",
|
|
379
|
+
"AvailabilityZone" => "availability_zone",
|
|
380
380
|
},
|
|
381
381
|
},
|
|
382
382
|
},
|
|
383
383
|
}
|
|
384
384
|
|
|
385
385
|
REF_MAPPING = {
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
386
|
+
"AWS::StackName" => "OS::stack_name",
|
|
387
|
+
"AWS::StackId" => "OS::stack_id",
|
|
388
|
+
"AWS::Region" => "OS::stack_id", # @todo i see it set in source, but no function. wat
|
|
389
389
|
}
|
|
390
390
|
|
|
391
391
|
FN_MAPPING = {
|
|
392
|
-
|
|
393
|
-
|
|
392
|
+
"Fn::GetAtt" => "get_attr",
|
|
393
|
+
"Fn::Join" => "list_join",
|
|
394
394
|
}
|
|
395
395
|
end
|
|
396
396
|
end
|