cloudformation-tool 0.5.0 → 0.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA1:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: b895c712fd73503a7aafbb3f761b9f95951f171e
|
|
4
|
+
data.tar.gz: 343a03cf57bbbc961e13165a55e76e3a9730aad2
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: 3b52e88265e708f87295600173e454627384fd4308dce79bc96e895718f6d8c99e27e0213f14584e71518375ce84513f934cfc7bf722e8a990857a70cea89a66
|
|
7
|
+
data.tar.gz: 987f7d224e4b33a5b8ed2219d7d53ddcec2d6974d88c3c2e43a0ac5b019dc66e7e7888fb0b7c6bfe8e6e24fa89127265fea4bdeafa503cf48103022a3829ab4c
|
|
@@ -7,7 +7,7 @@ module CloudFormationTool
|
|
|
7
7
|
|
|
8
8
|
def execute
|
|
9
9
|
if file.end_with? '.init'
|
|
10
|
-
puts CloudInit.new(file).
|
|
10
|
+
puts CloudInit.new(file).encode(false) # make sure cloud-init files obey AWS user-data restrictions, but are also printable
|
|
11
11
|
else
|
|
12
12
|
puts CloudFormation.parse(file).to_yaml
|
|
13
13
|
# raise CloudFormationTool::Errors::AppError.new("not a valid template file. Only .init and .yaml are supported")
|
|
@@ -42,9 +42,9 @@ module CloudFormationTool
|
|
|
42
42
|
"#cloud-config\n" + @initfile.to_yaml
|
|
43
43
|
end
|
|
44
44
|
|
|
45
|
-
def encode
|
|
45
|
+
def encode(allow_gzip = true)
|
|
46
46
|
yamlout = compile
|
|
47
|
-
if yamlout.size > 16384 # max AWS EC2 user data size - try compressing it
|
|
47
|
+
if allow_gzip and yamlout.size > 16384 # max AWS EC2 user data size - try compressing it
|
|
48
48
|
yamlout = Zlib::Deflate.new(nil, 31).deflate(yamlout, Zlib::FINISH) # 31 is the magic word to have deflate create a gzip compatible header
|
|
49
49
|
end
|
|
50
50
|
if yamlout.size > 16384 # still to big, we should upload to S3 and create an include file
|