cloudformation-tool 0.9.1 → 1.0.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
- SHA1:
3
- metadata.gz: 3e948781a997b22548ec5adf8ff16d7eb0bbc3db
4
- data.tar.gz: dfef8541f9c310b4a69b064ee7206b3827e6d0be
2
+ SHA256:
3
+ metadata.gz: ef4e953cd8194f197b68c1e110dae6f3a81f5c4e2730fe63950d5ac1392f1b02
4
+ data.tar.gz: 9d5ea95468d689707ec79534281ea14ae51521d3491787e523a1ef0c11c295a8
5
5
  SHA512:
6
- metadata.gz: e2e5f5a9026d6fc920c232b0fad618beb951c5b79d8e1507b715ea690fb7c9597e25e6ec1361c19b706ae78203cd5ee411a6c445e7841c2cbe7a0371f354d44a
7
- data.tar.gz: 2ab9fb5203d7fa729a70640c215dc0db4721e61b2a8b9ba7010a1390425893e87c1d30c3bedb22168ae20543968ded5001b42b561f47151e5d07b7978323eefe
6
+ metadata.gz: d43ffe6a9b74696f7a96c36a4f25bcc51322bd61f6d72000133ba6ef13720bd9412132c693597d40018b9bfd771038e576a1739e3c0b2e1cf895ecbdfaaaeee1
7
+ data.tar.gz: 1b6226952862d335bfa9970b8c394e9201548284af96363a39c54c00aac1a6ba946207c2076676c983fbe17c07fef566c23047aec842cc4dd71db6a3f4b7c31d
data/README.md CHANGED
@@ -192,8 +192,10 @@ specifying the S3 bucket and object key, either of the following fields may be u
192
192
  - The field `URL` may be used to specify an HTTP URL from which the code is to be uploaded to AWS Lambda. The tool
193
193
  will download the code file from the specified URL, upload it to S3 and specify the correct S3 location for
194
194
  CloudFormation.
195
- - The field `Path` may be used to specify a local file or directory containing the code to be uploaded.
196
- If the path specifies a directory, it will be compressed as a Zip file.
195
+ - The field `Path` may be used to specify a local file or directory containing the code to be uploaded.
196
+ If the path specifies a directory, it will be compressed and uploaded to S3 as a Zip file. If the path is a
197
+ single file, it will be converted to a `ZipFile`, allowing implicit use of the CloudFormation `cfn-response` module
198
+ and the AWS SDK, but the file is also subject to all `ZipFile` restrictions - such as limited to 4KB size.
197
199
 
198
200
  #### Example:
199
201
 
@@ -14,7 +14,7 @@ module CloudFormationTool
14
14
  else
15
15
  tpl = CloudFormation.parse(file)
16
16
  params = get_params
17
- data = tpl.compile;
17
+ data = tpl.compile(params);
18
18
  data['Parameters'].each do |name,param|
19
19
  param['Default'] = params[name] if params.has_key? name
20
20
  end
@@ -7,47 +7,92 @@ module CloudFormationTool
7
7
  class LambdaCode
8
8
  include Storable
9
9
 
10
- def initialize(url: nil, path: nil)
11
- log "Downloading Lambda code from #{url}#{path}"
12
- case url
13
- when nil
14
- @s3_url = if File.directory?(path)
15
- URI(upload(make_filename(path.split('/').last), fetch_from_folder(path), mime_type: 'application/zip', gzip: false))
16
- else
17
- URI(upload(make_filename(path.split('/').last), File.open(path, "rb").read, gzip: false))
18
- end
19
- else
10
+ def initialize(code, tpl)
11
+ @data = code
12
+ @data['Url'] = @data.delete 'URL' if @data.key? 'URL' # normalize to CF convention if seeing old key
13
+ if @data.key? 'Url'
14
+ log "Trying Lambda code from #{@data['Url']}"
15
+ @data['Url'] = url = tpl.resolveVal(@data['Url'])
16
+ return unless url.is_a? String
17
+ log "Downloading Lambda code from #{url}"
18
+ unless already_in_cache(url)
20
19
  res = fetch_from_url(url)
21
20
  @s3_url = URI(upload(make_filename(url.split('.').last), res.body, mime_type: res['content-type'], gzip: false))
21
+ log "uploaded Lambda function to #{@s3_url}"
22
22
  end
23
- log "uploaded Lambda function to #{@s3_url}"
23
+ elsif @data.key? 'Path'
24
+ @data['Path'] = path = tpl.resolveVal(@data['Path'])
25
+ return unless path.is_a? String
26
+ log "Reading Lambda code from #{path}"
27
+ path = if path.start_with? "/" then path else "#{tpl.basedir}/#{path}" end
28
+ if File.directory?(path)
29
+ @s3_url = URI(upload(make_filename('zip'), zip_path(path), mime_type: 'application/zip', gzip: false))
30
+ log "uploaded Lambda function to #{@s3_url}"
31
+ else # Convert files to ZipFile
32
+ @data.delete 'Path'
33
+ @data['ZipFile'] = File.read(path)
34
+ end
35
+ end
24
36
  end
25
37
 
26
- def fetch_from_folder(path_str)
38
+ def zip_path(path)
39
+ temp_file = Tempfile.new
40
+ temp_path = temp_file.path + '.zip'
27
41
  begin
28
- temp_file = Tempfile.new("#{path_str.split('/').last}.zip")
29
- Zip::ZipOutputStream.open(temp_file) { |zos| }
30
- Zip::ZipFile.open(temp_file.path, Zip::ZipFile::CREATE) do |zipfile|
31
- Dir[File.join(path_str, '*')].each do |file|
32
- zipfile.add(file.sub("#{path_str}/", ''), file)
42
+ Zip::ZipFile.open(temp_path, true) do |zipfile|
43
+ Dir[File.join(path, '**','*')].each do |file|
44
+ zipfile.add(file.sub("#{path}/", ''), file)
33
45
  end
34
46
  end
35
- zip_data = File.read(temp_file.path)
47
+ File.read(temp_path)
36
48
  ensure
37
- temp_file.close
38
- temp_file.unlink
49
+ temp_file.close!
50
+ File.unlink temp_path
39
51
  end
40
- zip_data
41
52
  end
42
53
 
43
- def fetch_from_url(uri_str)
44
- $__fetch_cache ||= Hash.new do |h, url|
45
- h[url] = fetch_from_url_real(url)
54
+ def already_in_cache(uri_str, limit = 10)
55
+ raise ArgumentError, 'too many HTTP redirects' if limit == 0
56
+ url = URI(uri_str)
57
+ begin
58
+ Net::HTTP.start(url.host, url.port) do |http|
59
+ request = Net::HTTP::Get.new(url)
60
+ http.request(request) do |response|
61
+ # handle redirects like Github likes to do
62
+ case response
63
+ when Net::HTTPSuccess then
64
+ check_cached(response['ETag'])
65
+ when Net::HTTPRedirection then
66
+ location = response['location']
67
+ log "redirected to #{location}"
68
+ already_in_cache(location, limit - 1)
69
+ else
70
+ raise ArgumentError, "Error getting response: #{response}"
71
+ end
72
+ end
73
+ end
74
+ rescue EOFError
75
+ end
76
+ !@s3_url.nil?
77
+ end
78
+
79
+ def check_cached(etag)
80
+ etag.gsub!(/"/,'') unless etag.nil?
81
+ o = cached_object(etag)
82
+ unless o.nil?
83
+ log 'reusing cached object'
84
+ @s3_url = o.public_url
85
+ end
86
+ end
87
+
88
+ def fetch_from_url(uri_str)
89
+ $__fetch_cache ||= Hash.new do |h, url|
90
+ h[url] = fetch_from_url_real(url)
91
+ end
92
+ $__fetch_cache[uri_str]
46
93
  end
47
- $__fetch_cache[uri_str]
48
- end
49
94
 
50
- def fetch_from_url_real(uri_str, limit = 10)
95
+ def fetch_from_url_real(uri_str, limit = 10)
51
96
  raise ArgumentError, 'too many HTTP redirects' if limit == 0
52
97
  response = Net::HTTP.get_response(URI(uri_str))
53
98
  case response
@@ -63,10 +108,14 @@ module CloudFormationTool
63
108
  end
64
109
 
65
110
  def to_cloudformation
66
- {
67
- 'S3Bucket' => @s3_url.hostname.split('.').first,
68
- 'S3Key' => @s3_url.path[1..-1]
69
- }
111
+ if @s3_url.nil?
112
+ @data
113
+ else
114
+ {
115
+ 'S3Bucket' => @s3_url.hostname.split('.').first,
116
+ 'S3Key' => @s3_url.path[1..-1]
117
+ }
118
+ end
70
119
  end
71
120
  end
72
121
 
@@ -10,6 +10,8 @@ module CloudFormationTool
10
10
  CloudFormation.new(path)
11
11
  end
12
12
 
13
+ attr_reader :basedir
14
+
13
15
  def initialize(path)
14
16
  log "Loading #{path}"
15
17
  @path = path
@@ -35,9 +37,8 @@ module CloudFormationTool
35
37
  end
36
38
  end
37
39
 
38
- def compile
39
- return @data if @compiled
40
- @compiled = true
40
+ def compile(parameters = nil)
41
+ @params = parameters unless parameters.nil?
41
42
  embed_includes
42
43
  @data = load_files(@data)
43
44
  end
@@ -127,7 +128,7 @@ module CloudFormationTool
127
128
  def resolveVal(value)
128
129
  case value
129
130
  when Hash
130
- if value['Ref']
131
+ if value.key? 'Ref'
131
132
  if @params.nil?
132
133
  # no parameters, we are probably in a sub template, just return the ref and hope
133
134
  # a parent template has what it takes to resolve the ref
@@ -143,41 +144,37 @@ module CloudFormationTool
143
144
  raise CloudFormationTool::Errors::AppError, "Value #{value} is not a valid value or reference"
144
145
  end
145
146
  else
146
- value;
147
+ value
147
148
  end
148
149
  end
149
150
 
150
- def load_files(data)
151
+ def load_files(data, restype = nil)
151
152
  case data
152
153
  when Array
153
- data.collect { |data| load_files(data) }
154
+ data.collect { |data| load_files(data, restype) }
154
155
  when Hash
156
+ # remember the current resource type
157
+ restype = data['Type'] if restype.nil? and data.key?('Type')
155
158
  data.inject({}) do |dict, (key, val)|
156
- dict[key] = if (key == "UserData") and (val["File"])
157
- # Support LaunchConfiguration UserData from file
158
- CloudInit.new("#{@basedir}/#{val["File"]}").to_base64
159
- elsif (key == "UserData") and (val["FileTemplate"])
160
- # Support LaunchConfiguration UserData from file with substitutions
161
- { "Fn::Base64" => { "Fn::Sub" => CloudInit.new("#{@basedir}/#{val["FileTemplate"]}").compile } }
162
- elsif (key == "Code") and (val["URL"])
163
- # Support Lambda Code from arbitrary URLs
164
- url = resolveVal(val["URL"])
165
- log "Resolving lambda code URL: #{url}"
166
- if url.is_a? String # resolving works
167
- LambdaCode.new(url: url).to_cloudformation
168
- else # resolving didn't work - we probably don't have parameters
169
- # push it upstream and hope a parent template can resolve it
170
- val
171
- end
172
- elsif (key == "Code") and (val["Path"])
173
- path = resolveVal(val["Path"])
174
- if path.is_a? String # resolving works
175
- LambdaCode.new(path: if path.start_with? "/" then path else "#{@basedir}/#{path}" end).to_cloudformation
176
- else # resolving didn't work - we probably don't have parameters
177
- val
178
- end
179
- else
180
- load_files(val)
159
+ dict[key] = case restype
160
+ when 'AWS::AutoScaling::LaunchConfiguration'
161
+ if (key == "UserData") and (val["File"])
162
+ # Support LaunchConfiguration UserData from file
163
+ CloudInit.new("#{@basedir}/#{val["File"]}").to_base64
164
+ elsif (key == "UserData") and (val["FileTemplate"])
165
+ # Support LaunchConfiguration UserData from file with substitutions
166
+ { "Fn::Base64" => { "Fn::Sub" => CloudInit.new("#{@basedir}/#{val["FileTemplate"]}").compile } }
167
+ else
168
+ load_files(val, restype)
169
+ end
170
+ when 'AWS::Lambda::Function'
171
+ if key == 'Code'
172
+ LambdaCode.new(val, self).to_cloudformation
173
+ else
174
+ load_files(val, restype)
175
+ end
176
+ else
177
+ load_files(val, restype)
181
178
  end
182
179
  dict
183
180
  end
@@ -14,6 +14,10 @@ module CloudFormationTool
14
14
  end
15
15
  end
16
16
 
17
+ def cached_object(md5)
18
+ Aws::S3::Bucket.new(s3_bucket_name(region), client: awss3(region)).objects(prefix: "cf-compiled/#{md5[0]}/#{md5[1..2]}/#{md5}/").first
19
+ end
20
+
17
21
  def upload(path, content, mime_type: 'text/yaml', gzip: true)
18
22
  md5 = Digest::MD5.hexdigest content
19
23
  prefix = "#{md5[0]}/#{md5[1..2]}/#{md5}"
@@ -23,7 +27,7 @@ module CloudFormationTool
23
27
  # do a local copy to the requested path) because this way cloudformation can see
24
28
  # that the updated template is exactly the same as the old one and will not force
25
29
  # an unneeded update.
26
- o = b.objects(prefix: "cf-compiled/#{prefix}/").first
30
+ o = cached_object(md5)
27
31
  if o.nil?
28
32
  # no such luck, we need to actually upload the file
29
33
  o = b.object("cf-compiled/#{prefix}/#{path}")
@@ -1,3 +1,3 @@
1
1
  module CloudFormationTool
2
- VERSION = '0.9.1'
2
+ VERSION = '1.0.1'
3
3
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: cloudformation-tool
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.9.1
4
+ version: 1.0.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Oded Arbel
@@ -176,7 +176,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
176
176
  version: '0'
177
177
  requirements: []
178
178
  rubyforge_project:
179
- rubygems_version: 2.6.14
179
+ rubygems_version: 2.7.6
180
180
  signing_key:
181
181
  specification_version: 4
182
182
  summary: A pre-compiler tool for CloudFormation YAML templates