clientside_aws 0.0.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.gitignore +4 -0
- data/Dockerfile +46 -0
- data/Gemfile +23 -0
- data/Gemfile.lock +99 -0
- data/README.md +105 -0
- data/bin/clientside_aws_build +6 -0
- data/bin/clientside_aws_run +5 -0
- data/bin/clientside_aws_test +4 -0
- data/clientside_aws.gemspec +31 -0
- data/clientside_aws/dynamodb.rb +722 -0
- data/clientside_aws/ec2.rb +103 -0
- data/clientside_aws/elastic_transcoder.rb +179 -0
- data/clientside_aws/firehose.rb +13 -0
- data/clientside_aws/kinesis.rb +13 -0
- data/clientside_aws/mock/core.rb +7 -0
- data/clientside_aws/mock/firehose.rb +14 -0
- data/clientside_aws/mock/kinesis.rb +18 -0
- data/clientside_aws/mock/s3.rb +59 -0
- data/clientside_aws/mock/ses.rb +74 -0
- data/clientside_aws/mock/sns.rb +17 -0
- data/clientside_aws/s3.rb +223 -0
- data/clientside_aws/ses.rb +9 -0
- data/clientside_aws/sns.rb +41 -0
- data/clientside_aws/sqs.rb +233 -0
- data/docker/clientside-aws-run +3 -0
- data/docker/redis-server-run +2 -0
- data/index.rb +57 -0
- data/lib/clientside_aws.rb +27 -0
- data/lib/clientside_aws/configuration.rb +14 -0
- data/lib/clientside_aws/mock.rb +224 -0
- data/lib/clientside_aws/version.rb +3 -0
- data/public/images/jscruff.jpg +0 -0
- data/public/images/spacer.gif +0 -0
- data/public/images/stock_video.mp4 +0 -0
- data/spec/dynamodb_spec.rb +1069 -0
- data/spec/ec2_spec.rb +138 -0
- data/spec/firehose_spec.rb +16 -0
- data/spec/kinesis_spec.rb +22 -0
- data/spec/s3_spec.rb +219 -0
- data/spec/sns_spec.rb +72 -0
- data/spec/spec_helper.rb +71 -0
- data/spec/sqs_spec.rb +87 -0
- data/spec/test_client/test.rb +45 -0
- data/spec/transcoder_spec.rb +138 -0
- metadata +241 -0
@@ -0,0 +1,103 @@
|
|
1
|
+
require 'ipaddr'
|
2
|
+
|
3
|
+
helpers do
|
4
|
+
def hkey_from_params(params)
|
5
|
+
"#{params['IpPermissions.1.IpProtocol']}:" \
|
6
|
+
"#{params['IpPermissions.1.FromPort']}:" \
|
7
|
+
"#{params['IpPermissions.1.ToPort']}"
|
8
|
+
end
|
9
|
+
|
10
|
+
def authorize_security_group_ingress(params)
|
11
|
+
hkey = hkey_from_params(params)
|
12
|
+
existing = AWS_REDIS.hget("ingress:#{params['GroupId']}",
|
13
|
+
hkey)
|
14
|
+
value = existing ? JSON.parse(existing).to_set : Set.new
|
15
|
+
ip_addr = IPAddr.new(params['IpPermissions.1.IpRanges.1.CidrIp'])
|
16
|
+
mask = params['IpPermissions.1.IpRanges.1.CidrIp'].split('/').last
|
17
|
+
# Interpret the mask, so 10.0.0.1/24 converts to 10.0.0.0/24
|
18
|
+
value << "#{ip_addr}/#{mask}"
|
19
|
+
|
20
|
+
AWS_REDIS.hset("ingress:#{params['GroupId']}",
|
21
|
+
hkey,
|
22
|
+
value.to_a.to_json)
|
23
|
+
end
|
24
|
+
|
25
|
+
def revoke_security_group_ingress(params)
|
26
|
+
hkey = hkey_from_params(params)
|
27
|
+
value = AWS_REDIS.hget("ingress:#{params['GroupId']}", hkey)
|
28
|
+
|
29
|
+
return unless value
|
30
|
+
|
31
|
+
new_value = JSON.parse(value).reject do |r|
|
32
|
+
r == params['IpPermissions.1.IpRanges.1.CidrIp']
|
33
|
+
end
|
34
|
+
|
35
|
+
if new_value.length.positive?
|
36
|
+
AWS_REDIS.hset("ingress:#{params['GroupId']}", hkey, new_value.to_json)
|
37
|
+
else
|
38
|
+
AWS_REDIS.hdel("ingress:#{params['GroupId']}", hkey)
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
42
|
+
def describe_security_groups
|
43
|
+
group_id = params['GroupId.1']
|
44
|
+
|
45
|
+
xml = Builder::XmlMarkup.new
|
46
|
+
xml.instruct!
|
47
|
+
xmlns = 'http://ec2.amazonaws.com/doc/2016-11-15/'
|
48
|
+
xml.DescribeSecurityGroupsResponse(xmlns: xmlns) do
|
49
|
+
xml.tag!(:requestId, SecureRandom.hex(10))
|
50
|
+
xml.securityGroupInfo do
|
51
|
+
xml.item do
|
52
|
+
xml.tag!(:ownerId, SecureRandom.hex(10))
|
53
|
+
xml.tag!(:groupId, group_id)
|
54
|
+
xml.tag!(:groupName, 'group-name')
|
55
|
+
xml.tag!(:groupDescription, 'group-description')
|
56
|
+
xml.tag!(:vpcId, 'vpc-00000000')
|
57
|
+
xml.ipPermissions do
|
58
|
+
AWS_REDIS.hkeys("ingress:#{group_id}").each do |protocol_port_tuple|
|
59
|
+
xml.item do
|
60
|
+
(protocol, from_port, to_port) = protocol_port_tuple.split(':')
|
61
|
+
|
62
|
+
xml.tag!(:ipProtocol, protocol)
|
63
|
+
xml.tag!(:fromPort, from_port.to_i)
|
64
|
+
xml.tag!(:toPort, to_port.to_i)
|
65
|
+
xml.tag!(:groups, nil)
|
66
|
+
|
67
|
+
xml.ipRanges do
|
68
|
+
ingress_permissions = \
|
69
|
+
AWS_REDIS.hget("ingress:#{group_id}", protocol_port_tuple)
|
70
|
+
JSON.parse(ingress_permissions).each do |cidr_ip|
|
71
|
+
xml.item do
|
72
|
+
xml.tag!(:cidrIp, cidr_ip)
|
73
|
+
end
|
74
|
+
end
|
75
|
+
end
|
76
|
+
xml.tag!(:ipv6Ranges, nil)
|
77
|
+
xml.tag!(:prefixListIds, nil)
|
78
|
+
end
|
79
|
+
end
|
80
|
+
end
|
81
|
+
end
|
82
|
+
end
|
83
|
+
end
|
84
|
+
|
85
|
+
content_type :xml
|
86
|
+
xml.target!
|
87
|
+
end
|
88
|
+
end
|
89
|
+
|
90
|
+
post %r{/ec2(\.(\w+?)\.amazonaws\.com)?/?(.*)} do
|
91
|
+
case params[:Action]
|
92
|
+
when 'AuthorizeSecurityGroupIngress'
|
93
|
+
authorize_security_group_ingress(params)
|
94
|
+
|
95
|
+
200
|
96
|
+
when 'RevokeSecurityGroupIngress'
|
97
|
+
revoke_security_group_ingress(params)
|
98
|
+
|
99
|
+
200
|
100
|
+
when 'DescribeSecurityGroups'
|
101
|
+
describe_security_groups
|
102
|
+
end
|
103
|
+
end
|
@@ -0,0 +1,179 @@
|
|
1
|
+
helpers do
|
2
|
+
def encode_video(source_key, dest_key, pipeline_id)
|
3
|
+
pipeline = JSON.parse(AWS_REDIS.get("pipeline:#{pipeline_id}"))
|
4
|
+
bucket = pipeline['InputBucket']
|
5
|
+
input_obj_name = source_key
|
6
|
+
input_obj_key = "s3:bucket:#{bucket}:#{input_obj_name}"
|
7
|
+
|
8
|
+
bucket = pipeline['OutputBucket']
|
9
|
+
output_obj_name = dest_key
|
10
|
+
output_obj_key = "s3:bucket:#{bucket}:#{output_obj_name}"
|
11
|
+
input_obj_body = AWS_REDIS.hget input_obj_key, 'body'
|
12
|
+
|
13
|
+
tmp = Tempfile.new(source_key)
|
14
|
+
tmp.write(input_obj_body)
|
15
|
+
tmp.close
|
16
|
+
|
17
|
+
# Setup paths
|
18
|
+
encoded_path = tmp.path + '.enc'
|
19
|
+
faststart_path = tmp.path + '.fast'
|
20
|
+
final_path = nil
|
21
|
+
|
22
|
+
# Android is already encoded; ios needs re-encoding
|
23
|
+
# Everyone gets faststart treatment
|
24
|
+
ffmpeg_video(tmp.path, encoded_path)
|
25
|
+
|
26
|
+
if File.exist?(encoded_path)
|
27
|
+
final_path = if faststart_video(encoded_path, faststart_path)
|
28
|
+
faststart_path
|
29
|
+
else
|
30
|
+
encoded_path
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
if final_path && File.exist?(final_path)
|
35
|
+
# Write
|
36
|
+
file = File.open(final_path, 'rb')
|
37
|
+
video = file.read
|
38
|
+
file.close
|
39
|
+
|
40
|
+
AWS_REDIS.hset output_obj_key, 'body', video
|
41
|
+
AWS_REDIS.hset output_obj_key, 'content-type', 'video/mp4'
|
42
|
+
|
43
|
+
begin
|
44
|
+
File.delete(final_path) if File.exist?(final_path)
|
45
|
+
rescue Exception => e
|
46
|
+
end
|
47
|
+
begin
|
48
|
+
File.delete(encoded_path) if File.exist?(encoded_path)
|
49
|
+
rescue Exception => e
|
50
|
+
end
|
51
|
+
begin
|
52
|
+
File.delete(faststart_path) if File.exist?(faststart_path)
|
53
|
+
rescue Exception => e
|
54
|
+
end
|
55
|
+
begin
|
56
|
+
tmp.delete
|
57
|
+
rescue Exception => e
|
58
|
+
end
|
59
|
+
end
|
60
|
+
end
|
61
|
+
|
62
|
+
def ffmpeg_video(path, output)
|
63
|
+
flip = nil
|
64
|
+
transpose = nil
|
65
|
+
info = `avprobe #{path} 2>&1`
|
66
|
+
match = info.match(/rotate\s+:\s(\d+)\s/)
|
67
|
+
if match
|
68
|
+
rotation = match[1]
|
69
|
+
if rotation == '90'
|
70
|
+
transpose = 1
|
71
|
+
elsif rotation == '270'
|
72
|
+
transpose = 2
|
73
|
+
elsif rotation == '180'
|
74
|
+
flip = true
|
75
|
+
end
|
76
|
+
end
|
77
|
+
|
78
|
+
args = []
|
79
|
+
|
80
|
+
args << 'avconv'
|
81
|
+
args << '-y'
|
82
|
+
|
83
|
+
args << '-i'
|
84
|
+
args << path
|
85
|
+
|
86
|
+
args << '-f'
|
87
|
+
args << 'mp4'
|
88
|
+
|
89
|
+
if transpose
|
90
|
+
args << '-vf'
|
91
|
+
args << "transpose=#{transpose}"
|
92
|
+
elsif flip
|
93
|
+
args << '-vf'
|
94
|
+
args << 'vflip,hflip'
|
95
|
+
end
|
96
|
+
|
97
|
+
args << '-b:v'
|
98
|
+
args << '900k'
|
99
|
+
|
100
|
+
args << '-vcodec'
|
101
|
+
args << 'libx264'
|
102
|
+
|
103
|
+
args << '-ac'
|
104
|
+
args << '1'
|
105
|
+
|
106
|
+
args << '-ar'
|
107
|
+
args << '44100'
|
108
|
+
|
109
|
+
args << '-profile:v'
|
110
|
+
args << 'baseline'
|
111
|
+
|
112
|
+
# Just for avconv, because it complains about aac
|
113
|
+
args << '-strict'
|
114
|
+
args << 'experimental'
|
115
|
+
|
116
|
+
args << output + '-tmp'
|
117
|
+
|
118
|
+
encode_command = args.join(' ')
|
119
|
+
_results = `#{encode_command} 2>&1`
|
120
|
+
|
121
|
+
`mv #{output}-tmp #{output} 2>&1`
|
122
|
+
end
|
123
|
+
|
124
|
+
def faststart_video(path, output)
|
125
|
+
unless `which qt-faststart`.empty?
|
126
|
+
`qt-faststart #{path} #{output}`
|
127
|
+
return true
|
128
|
+
end
|
129
|
+
|
130
|
+
false
|
131
|
+
end
|
132
|
+
|
133
|
+
def create_job(args)
|
134
|
+
input_obj_name = args['Input']['Key']
|
135
|
+
output_obj_name = args['Output']['Key']
|
136
|
+
pipeline_id = args['PipelineId']
|
137
|
+
|
138
|
+
encode_video(input_obj_name, output_obj_name, pipeline_id)
|
139
|
+
|
140
|
+
content_type 'application/x-amz-json-1.0'
|
141
|
+
{ Job: {
|
142
|
+
Id: 1,
|
143
|
+
Input: args['Input'],
|
144
|
+
Output: args['Output'],
|
145
|
+
PipelineId: args['PipelineId']
|
146
|
+
} }.to_json
|
147
|
+
end
|
148
|
+
|
149
|
+
def create_pipeline(args)
|
150
|
+
if AWS_REDIS.get "pipeline:#{args['Name']}"
|
151
|
+
pipeline_id = AWS_REDIS.get "pipeline:#{args['Name']}"
|
152
|
+
else
|
153
|
+
pipeline_id = SecureRandom.hex(10) + args['OutputBucket']
|
154
|
+
AWS_REDIS.set "pipeline:#{pipeline_id}", args.to_json
|
155
|
+
AWS_REDIS.set "pipeline:#{args['Name']}", pipeline_id
|
156
|
+
end
|
157
|
+
|
158
|
+
content_type 'application/x-amz-json-1.0'
|
159
|
+
{ Pipeline: {
|
160
|
+
Id: pipeline_id,
|
161
|
+
Name: args['Name'],
|
162
|
+
Status: 'Completed',
|
163
|
+
InputBucket: args['InputBucket'],
|
164
|
+
OutputBucket: args['OutputBucket'],
|
165
|
+
Role: args['Role'],
|
166
|
+
Notifications: args['Notifications']
|
167
|
+
} }.to_json
|
168
|
+
end
|
169
|
+
end
|
170
|
+
|
171
|
+
post %r{/elastictranscoder\.(.+?)\.amazonaws\.com/?(.*)?} do
|
172
|
+
args = JSON.parse(env['rack.input'].read)
|
173
|
+
|
174
|
+
if args['Input'] && args['Output']
|
175
|
+
create_job(args)
|
176
|
+
else
|
177
|
+
create_pipeline(args)
|
178
|
+
end
|
179
|
+
end
|
@@ -0,0 +1,13 @@
|
|
1
|
+
post %r{/firehose(\.(\w+?)\.amazonaws\.com)?/?(.*)} do
|
2
|
+
args = if env['REQUEST_METHOD'] == 'POST'
|
3
|
+
JSON.parse(env['rack.input'].read)
|
4
|
+
else
|
5
|
+
env['rack.request.form_hash']
|
6
|
+
end
|
7
|
+
|
8
|
+
AWS_REDIS.zadd "firehose:#{args['DeliveryStreamName']}",
|
9
|
+
Time.now.to_i,
|
10
|
+
Base64.decode64(args['Records'].to_json)
|
11
|
+
|
12
|
+
200
|
13
|
+
end
|
@@ -0,0 +1,13 @@
|
|
1
|
+
post %r{/kinesis(\.(\w+?)\.amazonaws\.com)?/?(.*)} do
|
2
|
+
args = if env['REQUEST_METHOD'] == 'POST'
|
3
|
+
JSON.parse(env['rack.input'].read)
|
4
|
+
else
|
5
|
+
env['rack.request.form_hash']
|
6
|
+
end
|
7
|
+
|
8
|
+
AWS_REDIS.zadd "kinesis:#{args['StreamName']}",
|
9
|
+
Time.now.to_i,
|
10
|
+
Base64.decode64(args['Data'])
|
11
|
+
|
12
|
+
200
|
13
|
+
end
|
@@ -0,0 +1,14 @@
|
|
1
|
+
module Aws
|
2
|
+
module Firehose
|
3
|
+
class Client
|
4
|
+
# Monkeypatch to save the last sent message
|
5
|
+
attr_reader :last_stream_name
|
6
|
+
attr_reader :last_records
|
7
|
+
|
8
|
+
def put_record_batch(delivery_stream_name:, records:)
|
9
|
+
@last_stream_name = delivery_stream_name
|
10
|
+
@last_records = records
|
11
|
+
end
|
12
|
+
end
|
13
|
+
end
|
14
|
+
end
|
@@ -0,0 +1,18 @@
|
|
1
|
+
module AWS
|
2
|
+
class Kinesis
|
3
|
+
class Client < Core::JSONClient
|
4
|
+
# Monkeypatch to save the last sent message
|
5
|
+
class V20131202
|
6
|
+
attr_reader :last_stream_name
|
7
|
+
attr_reader :last_data
|
8
|
+
attr_reader :last_partition_key
|
9
|
+
|
10
|
+
def put_record(stream_name:, data:, partition_key:)
|
11
|
+
@last_stream_name = stream_name
|
12
|
+
@last_data = data
|
13
|
+
@last_partition_key = partition_key
|
14
|
+
end
|
15
|
+
end
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
@@ -0,0 +1,59 @@
|
|
1
|
+
module AWS
|
2
|
+
class S3
|
3
|
+
class Bucket
|
4
|
+
begin
|
5
|
+
old_exists = instance_method(:exists?)
|
6
|
+
define_method(:exists?) do
|
7
|
+
begin
|
8
|
+
old_exists.bind(self).call
|
9
|
+
rescue Errors::NoSuchKey
|
10
|
+
false # bucket does not exist
|
11
|
+
end
|
12
|
+
end
|
13
|
+
rescue NameError
|
14
|
+
# aws-sdk-v1 is not being used
|
15
|
+
end
|
16
|
+
end
|
17
|
+
|
18
|
+
class PresignedPost
|
19
|
+
@@host = nil
|
20
|
+
@@port = nil
|
21
|
+
def self.mock_host=(host)
|
22
|
+
@@host = host
|
23
|
+
end
|
24
|
+
|
25
|
+
def self.mock_port=(port)
|
26
|
+
@@port = port
|
27
|
+
end
|
28
|
+
|
29
|
+
def mock_host
|
30
|
+
@@host || config.s3_endpoint.split(':').first
|
31
|
+
end
|
32
|
+
|
33
|
+
def mock_port
|
34
|
+
@@port || config.s3_endpoint.split(':')[1].split('/').first.to_i
|
35
|
+
end
|
36
|
+
|
37
|
+
def url
|
38
|
+
URI::HTTP.build(host: mock_host, path: "/s3/#{bucket.name}", port: mock_port)
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
42
|
+
# class Client < Core::Client
|
43
|
+
# module Validators
|
44
|
+
# # this keeps it from fucking up our hostname
|
45
|
+
# def path_style_bucket_name?(_bucket_name)
|
46
|
+
# true
|
47
|
+
# end
|
48
|
+
# end
|
49
|
+
# end
|
50
|
+
|
51
|
+
# class S3Object
|
52
|
+
# def presign_v4(method, _options)
|
53
|
+
# if method == :read || method == :get
|
54
|
+
# "http://#{client.endpoint}/#{bucket.name}/#{key}"
|
55
|
+
# end
|
56
|
+
# end
|
57
|
+
# end
|
58
|
+
end
|
59
|
+
end
|
@@ -0,0 +1,74 @@
|
|
1
|
+
require 'uuid'
|
2
|
+
|
3
|
+
module AWS
|
4
|
+
# Mock SES and enable retrieval of last message sent
|
5
|
+
# We also save messages to message_directory, if set
|
6
|
+
class SimpleEmailService
|
7
|
+
class SESMessage
|
8
|
+
def initialize
|
9
|
+
@id = SecureRandom.hex(10)
|
10
|
+
end
|
11
|
+
|
12
|
+
def successful?
|
13
|
+
true
|
14
|
+
end
|
15
|
+
|
16
|
+
def data
|
17
|
+
{ message_id: @id }
|
18
|
+
end
|
19
|
+
end
|
20
|
+
|
21
|
+
@@message_directory = nil
|
22
|
+
@@sent_message = nil
|
23
|
+
@@sent_email = nil
|
24
|
+
def self.mock_clear_sent
|
25
|
+
@@sent_email = nil
|
26
|
+
@@sent_message = nil
|
27
|
+
end
|
28
|
+
|
29
|
+
def self.message_directory=(path)
|
30
|
+
@@message_directory = path
|
31
|
+
end
|
32
|
+
|
33
|
+
def self.mock_sent_email(clear = nil)
|
34
|
+
msg = @@sent_email
|
35
|
+
mock_clear_sent if clear
|
36
|
+
msg
|
37
|
+
end
|
38
|
+
|
39
|
+
def self.mock_sent_message(clear = nil)
|
40
|
+
msg = @@sent_message
|
41
|
+
mock_clear_sent if clear
|
42
|
+
msg
|
43
|
+
end
|
44
|
+
|
45
|
+
def quotas
|
46
|
+
{ max_24_hour_send: 200, max_send_rate: 100.0, sent_last_24_hours: 22 }
|
47
|
+
end
|
48
|
+
|
49
|
+
def send_email(msg)
|
50
|
+
ses_message = SESMessage.new
|
51
|
+
to_adr = msg[:to]
|
52
|
+
from_adr = msg[:from]
|
53
|
+
_to_adr = to_adr[/(?<=<).*(?=>)/]
|
54
|
+
_from_adr = from_adr[/(?<=<).*(?=>)/]
|
55
|
+
fname = ses_message.data[:message_id]
|
56
|
+
log_msg("#{fname}.txt", "#{msg[:subject]}\n\n#{msg[:body_text]}") if msg[:body_text]
|
57
|
+
log_msg("#{fname}.html", msg[:body_html]) if msg[:body_html]
|
58
|
+
@@sent_email = msg
|
59
|
+
@@sent_message = ses_message
|
60
|
+
ses_message
|
61
|
+
end
|
62
|
+
|
63
|
+
private
|
64
|
+
|
65
|
+
def log_msg(file_name, content)
|
66
|
+
email_dir = @@message_directory
|
67
|
+
if email_dir
|
68
|
+
email_dir += '/' unless email_dir.end_with? '/'
|
69
|
+
FileUtils.mkdir_p(email_dir) unless File.directory?(email_dir)
|
70
|
+
File.open("#{email_dir}#{file_name}", 'w') { |file| file.write(content) }
|
71
|
+
end
|
72
|
+
end
|
73
|
+
end
|
74
|
+
end
|