lambda_convert 0.0.5 → 0.0.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +4 -0
- data/lib/lambda_convert/cli.rb +79 -54
- metadata +1 -1
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 6c605d6308b66aa522b51fe09121ea05a1e962eb
|
4
|
+
data.tar.gz: 375b23a2819be672e58bb8894fc7c0df5fc3c10a
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: a55edfd1babe87ff83d2a9cc763e9a47501b63af31d7c1028f04479f222bffe1a566ae614af7ff896668e8c1cb1838e8586e7e521f138da47851f5263464c197
|
7
|
+
data.tar.gz: 5f557c99c83614749f0b189e56764fc1640359a343f1a649e2943546086e5691e978cce192597e3b723437b2781110f96d702e8e3ccf9c3eab3db5687aaf2049
|
data/README.md
CHANGED
@@ -27,3 +27,7 @@ To eliminate the big image file uploading issue and the security risk, the idea
|
|
27
27
|
- **CONVERT_S3_KEY_PREFIX** - AWS S3 temporary file uploading prefix, default value is `_convert_tmp/`
|
28
28
|
- **CONVERT_LAMBDA_FUNCTION** - Name of the AWS Lambda function to invoke, default value is `image-convert-prod`
|
29
29
|
- **CONVERT_DISABLE_FALLBACK** - By default, this command line tool fallbacks to local `convert` command if remote operation fails. Set this value to 1 to disable the fallback behavior.
|
30
|
+
|
31
|
+
## The AWS Lambda function
|
32
|
+
|
33
|
+
The AWS Lambda function for running ImageMagick can be found here at https://github.com/envoy/envoy-convert
|
data/lib/lambda_convert/cli.rb
CHANGED
@@ -1,5 +1,6 @@
|
|
1
1
|
require 'rubygems'
|
2
2
|
require 'json'
|
3
|
+
require 'securerandom'
|
3
4
|
require 'English'
|
4
5
|
|
5
6
|
require 'aws-sdk'
|
@@ -9,95 +10,119 @@ module LambdaConvert
|
|
9
10
|
module CLI
|
10
11
|
class <<self
|
11
12
|
attr_accessor :logger
|
12
|
-
end
|
13
13
|
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
)
|
21
|
-
s3_bucket = ENV['CONVERT_S3_BUCKET']
|
22
|
-
s3_key_prefix = ENV['CONVERT_S3_KEY_PREFIX'] || '_convert_tmp/'
|
23
|
-
lambda_function = ENV['CONVERT_LAMBDA_FUNCTION'] || 'image-convert-prod'
|
14
|
+
def aws_credentials
|
15
|
+
Aws::Credentials.new(
|
16
|
+
ENV['CONVERT_ACCESS_KEY'] || ENV['AWS_ACCESS_KEY_ID'],
|
17
|
+
ENV['CONVERT_SECRET_ACCESS_KEY'] || ENV['AWS_SECRET_ACCESS_KEY']
|
18
|
+
)
|
19
|
+
end
|
24
20
|
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
)
|
29
|
-
aws_lambda = Aws::Lambda::Client.new(
|
30
|
-
region: lambda_region,
|
31
|
-
credentials: aws_credentials
|
32
|
-
)
|
21
|
+
def lambda_region
|
22
|
+
ENV['CONVERT_LAMBDA_REGION'] || ENV['AWS_REGION']
|
23
|
+
end
|
33
24
|
|
34
|
-
|
25
|
+
def lambda_function
|
26
|
+
ENV['CONVERT_LAMBDA_FUNCTION'] || 'image-convert-prod'
|
27
|
+
end
|
35
28
|
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
# Notice: there is also special output file syntax for convert command,
|
40
|
-
# but we are not supporting them now, as we probably won't use it
|
41
|
-
output_file = ARGV[-1]
|
29
|
+
def s3_region
|
30
|
+
ENV['CONVERT_S3_REGION'] || ENV['AWS_REGION']
|
31
|
+
end
|
42
32
|
|
43
|
-
|
44
|
-
|
33
|
+
def s3_bucket
|
34
|
+
ENV['CONVERT_S3_BUCKET']
|
35
|
+
end
|
45
36
|
|
37
|
+
def s3_key_prefix
|
38
|
+
ENV['CONVERT_S3_KEY_PREFIX'] || '_convert_tmp/'
|
39
|
+
end
|
40
|
+
|
41
|
+
def s3_client
|
42
|
+
@s3_client ||= Aws::S3::Client.new(
|
43
|
+
region: s3_region,
|
44
|
+
credentials: aws_credentials
|
45
|
+
)
|
46
|
+
end
|
47
|
+
|
48
|
+
def lambda_client
|
49
|
+
@aws_lambda ||= Aws::Lambda::Client.new(
|
50
|
+
region: lambda_region,
|
51
|
+
credentials: aws_credentials
|
52
|
+
)
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
56
|
+
def self.upload_file(input_file, input_key)
|
46
57
|
logger.info("Uploading file to s3://#{s3_bucket}/#{input_key}")
|
47
58
|
File.open(input_file, 'rb') do |file|
|
48
|
-
|
59
|
+
s3_client.put_object(bucket: s3_bucket, key: input_key, body: file)
|
49
60
|
end
|
61
|
+
end
|
62
|
+
|
63
|
+
def self.invoke_lambda(input_key, input_selecting, args, output_key)
|
50
64
|
source = '{source}'
|
51
65
|
source += "[#{input_selecting}]" unless input_selecting.nil?
|
52
66
|
instruction = {
|
53
67
|
schema: 'envoy-convert-instruction',
|
54
68
|
original: input_key,
|
55
69
|
bucket: s3_bucket,
|
56
|
-
write_options: {
|
57
|
-
acl: 'private'
|
58
|
-
},
|
70
|
+
write_options: {},
|
59
71
|
key: output_key,
|
60
|
-
args: [source] +
|
72
|
+
args: [source] + args + ['{dest}']
|
61
73
|
}
|
62
74
|
logger.info("Invoking lambda with instruction #{instruction}")
|
63
75
|
|
64
|
-
resp =
|
76
|
+
resp = lambda_client.invoke(
|
65
77
|
function_name: lambda_function,
|
66
78
|
invocation_type: 'RequestResponse',
|
67
79
|
payload: JSON.dump(instruction)
|
68
80
|
)
|
69
81
|
logger.info("Get response of invoke #{resp}")
|
70
82
|
raise 'Failed to run convert on Lambda' if resp.status_code != 200
|
83
|
+
end
|
71
84
|
|
85
|
+
def self.download_file(output_key, output_file)
|
72
86
|
logger.info(
|
73
87
|
"Downloading file from s3://#{s3_bucket}/#{output_key} to " \
|
74
88
|
"#{output_file}"
|
75
89
|
)
|
76
|
-
|
90
|
+
s3_client.get_object(
|
77
91
|
response_target: output_file,
|
78
92
|
bucket: s3_bucket,
|
79
93
|
key: output_key
|
80
94
|
)
|
95
|
+
end
|
96
|
+
|
97
|
+
def self.delete_files(keys)
|
98
|
+
logger.info("Delete files #{keys} from #{s3_bucket}")
|
99
|
+
s3_client.delete_objects(
|
100
|
+
bucket: s3_bucket,
|
101
|
+
delete: {
|
102
|
+
objects: keys.map { |key| { key: key } },
|
103
|
+
quiet: true
|
104
|
+
}
|
105
|
+
)
|
106
|
+
end
|
107
|
+
|
108
|
+
def self.lambda_convert
|
109
|
+
input_file, input_selecting = LambdaConvert::Utils.parse_input_path(
|
110
|
+
ARGV[0]
|
111
|
+
)
|
112
|
+
# Notice: there is also special output file syntax for convert command,
|
113
|
+
# but we are not supporting them now, as we probably won't use it
|
114
|
+
output_file = ARGV[-1]
|
115
|
+
input_key = "#{s3_key_prefix}#{SecureRandom.uuid}"
|
116
|
+
output_key = "#{s3_key_prefix}#{SecureRandom.uuid}"
|
117
|
+
|
118
|
+
upload_file(input_file, input_key)
|
119
|
+
invoke_lambda(input_key, input_selecting, ARGV[1..-2], output_key)
|
120
|
+
download_file(output_key, output_file)
|
121
|
+
|
81
122
|
logger.info('Done')
|
82
123
|
ensure
|
83
|
-
if !
|
84
|
-
|
85
|
-
"Delete files #{input_key} and #{output_key} from #{s3_bucket}"
|
86
|
-
)
|
87
|
-
s3.delete_objects(
|
88
|
-
bucket: s3_bucket,
|
89
|
-
delete: {
|
90
|
-
objects: [
|
91
|
-
{
|
92
|
-
key: input_key
|
93
|
-
},
|
94
|
-
{
|
95
|
-
key: output_key
|
96
|
-
}
|
97
|
-
],
|
98
|
-
quiet: true
|
99
|
-
}
|
100
|
-
)
|
124
|
+
if !input_key.nil? && !output_key.nil?
|
125
|
+
delete_files([input_key, output_key])
|
101
126
|
end
|
102
127
|
end
|
103
128
|
|
@@ -117,9 +142,9 @@ module LambdaConvert
|
|
117
142
|
|
118
143
|
def self.main
|
119
144
|
abort('Recursive call') if ENV['CONVERT_RECURSIVE_FLAG'] == '1'
|
145
|
+
abort('Invalid arguments') if ARGV.count < 2
|
120
146
|
lambda_convert
|
121
147
|
rescue StandardError => e
|
122
|
-
|
123
148
|
logger.warn("Failed to convert via lambda, error=#{e}")
|
124
149
|
fallback_disabled = (ENV['CONVERT_DISABLE_FALLBACK'].to_i != 0) || false
|
125
150
|
if fallback_disabled
|