desoto-photoapp 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,106 @@
1
+ require "desoto-photoapp/version"
2
+ require "desoto-photoapp/photo"
3
+ require "desoto-photoapp/s3"
4
+ require 'yaml'
5
+ require 'colorator'
6
+
7
+ module Photoapp
8
+ class Session
9
+ attr_accessor :photos, :print, :upload
10
+
11
+ ROOT = File.expand_path('~/cave.pics') # where photos are stored
12
+
13
+ # relative to root
14
+ CONFIG_FILE = 'photoapp.yml'
15
+ UPLOAD = 'upload'
16
+ PRINT = 'print'
17
+
18
+
19
+ def initialize(options={})
20
+ @photos = []
21
+ @config = config(options)
22
+ end
23
+
24
+ def config(options={})
25
+ @config || begin
26
+
27
+ config = {
28
+ 'source' => Dir.pwd, # where photos are located
29
+ 'url_base' => 'www.cave.pics',
30
+ 'watermark' => gem_dir('assets', 'watermark.png'),
31
+ 'font' => gem_dir('assets', "SourceSansPro-Semibold.ttf"),
32
+ 'font_size' => 30,
33
+ 'config' => 'photoapp.yml',
34
+ 'upload' => 'upload',
35
+ 'print' => 'print'
36
+ }
37
+
38
+ config_file = root(options['config'] || config['config'])
39
+
40
+ config['source'] = options['source'] || config['source']
41
+
42
+ if File.exist?(config_file)
43
+ config.merge!(YAML.load(File.read(config_file)) || {})
44
+ end
45
+
46
+ config['upload'] = root(config['upload'])
47
+ config['print'] = root(config['print'])
48
+
49
+ config
50
+ end
51
+
52
+ end
53
+
54
+ def gem_dir(*paths)
55
+ File.expand_path(File.join(File.dirname(__FILE__), '..', *paths))
56
+ end
57
+
58
+ def root(path='')
59
+ File.expand_path(File.join(ROOT, path))
60
+ end
61
+
62
+ def process
63
+ logo = Magick::Image.read(config['watermark']).first
64
+ photos = []
65
+ tmp = root('.tmp')
66
+ FileUtils.mkdir_p(tmp)
67
+
68
+ if empty_print_queue?
69
+ FileUtils.rm_rf(config['print'])
70
+ end
71
+
72
+ load_photos.each do |f|
73
+ FileUtils.mv f, tmp
74
+ path = File.join(tmp, File.basename(f))
75
+ `automator -i #{path} #{gem_dir("lib/adjust-image.workflow")}`
76
+ photos << Photo.new(path, logo, self)
77
+ end
78
+
79
+ photos.each do |p|
80
+ p.write
81
+ p.add_to_photos
82
+ p.print
83
+ end
84
+
85
+ FileUtils.rm_rf tmp
86
+ end
87
+
88
+ def load_photos
89
+ files = ['*.jpg', '*.JPG', '*.JPEG', '*.jpeg'].map! { |f| File.join(config['source'], f) }
90
+
91
+ Dir[*files]
92
+ end
93
+
94
+ def empty_print_queue?
95
+ if printer = `lpstat -d`
96
+ printer = printer.scan(/:\s*(.+)/).flatten.first.strip
97
+ `lpstat -o -P #{printer}`.strip == ''
98
+ end
99
+ end
100
+
101
+ def upload
102
+ S3.new(@config).push
103
+ FileUtils.rm_rf config['upload']
104
+ end
105
+ end
106
+ end
@@ -0,0 +1,99 @@
1
+ require 'RMagick'
2
+
3
+ module Photoapp
4
+ class Photo
5
+ include Magick
6
+ attr_accessor :file, :logo, :image, :config, :session
7
+
8
+ def initialize(file, logo, session)
9
+ @file = file
10
+ @logo = logo
11
+ @session = session
12
+ @config = session.config
13
+ end
14
+
15
+ def config
16
+ @config
17
+ end
18
+
19
+ def image
20
+ @image ||= Image.read(file).first.resize_to_fill(2100, 1500, NorthGravity)
21
+ end
22
+
23
+ def watermark
24
+ @watermarked ||= image.composite(logo, SouthWestGravity, OverCompositeOp)
25
+ end
26
+
27
+ def with_url
28
+ @printable ||= begin
29
+ light_url = add_url("#fff")
30
+ dark_url = add_url("#000", true).blur_image(radius=6.0, sigma=2.0)
31
+ watermark.dup
32
+ .composite(dark_url, SouthEastGravity, OverCompositeOp)
33
+ .composite(light_url, SouthEastGravity, OverCompositeOp)
34
+ end
35
+ end
36
+
37
+ def add_url(color, stroke=false)
38
+ setting = config
39
+ image = Image.new(800,100) { self.background_color = "rgba(255, 255, 255, 0)" }
40
+ text = Draw.new
41
+ text.annotate(image, 0, 0, 60, 50, "#{setting['url_base']}/#{short}.jpg") do
42
+ text.gravity = SouthEastGravity
43
+ text.pointsize = setting['font_size']
44
+ text.fill = color
45
+ text.font = setting['font']
46
+ if stroke
47
+ text.stroke = color
48
+ end
49
+ end
50
+ image
51
+ end
52
+
53
+ def write
54
+ puts "writing #{upload_dest}"
55
+ puts "writing #{print_dest}"
56
+ FileUtils.mkdir_p(File.dirname(upload_dest))
57
+ FileUtils.mkdir_p(File.dirname(print_dest))
58
+ watermark.write upload_dest
59
+ with_url.write print_dest
60
+ cleanup
61
+ end
62
+
63
+ # Handle printing
64
+ def print
65
+ system "lpr #{print_dest}"
66
+ end
67
+
68
+ def add_to_photos
69
+ `automator -i #{config['print']} #{@session.gem_dir("lib/import-photos.workflow")}`
70
+ end
71
+
72
+ def cleanup
73
+ watermark.destroy!
74
+ with_url.destroy!
75
+ end
76
+
77
+ def upload_dest
78
+ File.join(config['upload'], short + '.jpg')
79
+ end
80
+
81
+ def print_dest
82
+ File.join(config['print'], short + '.jpg')
83
+ end
84
+
85
+ def short
86
+ @short ||= begin
87
+ now = Time.now
88
+ date = "#{now.strftime('%y')}#{now.strftime('%d')}#{now.month}"
89
+ source = [*?a..?z] - ['o', 'l'] + [*2..9]
90
+ short = ''
91
+ 5.times { short << source.sample.to_s }
92
+ short = "#{short}#{date}"
93
+ session.photos << short + '.jpg'
94
+ short
95
+ end
96
+ end
97
+
98
+ end
99
+ end
@@ -0,0 +1,275 @@
1
+ require 'find'
2
+ require 'fileutils'
3
+
4
+ module Photoapp
5
+ class S3
6
+
7
+ def initialize(options)
8
+ begin
9
+ require 'aws-sdk-v1'
10
+ rescue LoadError
11
+ abort "Deploying to S3 requires the aws-sdk-v1 gem. Install with `gem install aws-sdk-v1`."
12
+ end
13
+ @options = options
14
+ @local = File.expand_path(options['upload'] || 'upload')
15
+ @bucket_name = options['bucket_name']
16
+ @access_key = options['access_key_id'] || ENV['AWS_ACCESS_KEY_ID']
17
+ @secret_key = options['secret_access_key'] || ENV['AWS_SECRET_ACCESS_KEY']
18
+ @region = options['region'] || ENV['AWS_DEFAULT_REGION'] || 'us-east-1'
19
+ @distro_id = options['distribution_id'] || ENV['AWS_DISTRIBUTION_ID']
20
+ @remote_path = (options['remote_path'] || '/').sub(/^\//,'')
21
+ @verbose = options['verbose']
22
+ @incremental = options['incremental']
23
+ @delete = options['delete']
24
+ @headers = options['headers'] || []
25
+ @remote_path = @remote_path.sub(/^\//,'') # remove leading slash
26
+ @pull_dir = options['dir']
27
+ connect
28
+ end
29
+
30
+ def push
31
+ #abort "Seriously, you should. Quitting..." unless Deploy.check_gitignore
32
+ @bucket = @s3.buckets[@bucket_name]
33
+ if !@bucket.exists?
34
+ abort "Bucket not found: '#{@bucket_name}'. Check your configuration or create a bucket using: `octopress deploy add-bucket`"
35
+ else
36
+ puts "Syncing #{@local} files to #{@bucket_name} on S3."
37
+ write_files
38
+ delete_files if delete_files?
39
+ status_message
40
+ end
41
+ end
42
+
43
+ def pull
44
+ @bucket = @s3.buckets[@bucket_name]
45
+ if !@bucket.exists?
46
+ abort "Bucket not found: '#{@bucket_name}'. Check your configuration or create a bucket using: `octopress deploy add-bucket`"
47
+ else
48
+ puts "Syncing from S3 bucket: '#{@bucket_name}' to #{@pull_dir}."
49
+ @bucket.objects.each do |object|
50
+ path = File.join(@pull_dir, object.key)
51
+
52
+ # Path is a directory, not a file
53
+ if path =~ /\/$/
54
+ FileUtils.mkdir_p(path) unless File.directory?(path)
55
+ else
56
+ dir = File.dirname(path)
57
+ FileUtils.mkdir_p(dir) unless File.directory?(dir)
58
+ File.open(path, 'w') { |f| f.write(object.read) }
59
+ end
60
+ end
61
+ end
62
+ end
63
+
64
+ # Connect to S3 using the AWS SDK
65
+ # Retuns an aws bucket
66
+ #
67
+ def connect
68
+ AWS.config(access_key_id: @access_key, secret_access_key: @secret_key, region: @region)
69
+ @s3 = AWS.s3
70
+ @cloudfront = AWS.cloud_front.client
71
+ end
72
+
73
+ # Write site files to the selected bucket
74
+ #
75
+ def write_files
76
+ puts "Writing #{pluralize('file', site_files.size)}:" if @verbose
77
+ files_to_invalidate = []
78
+ site_files.each do |file|
79
+ s3_filename = remote_path(file)
80
+ o = @bucket.objects[s3_filename]
81
+ file_with_options = get_file_with_metadata(file, s3_filename);
82
+
83
+ begin
84
+ s3sum = o.etag.tr('"','') if o.exists?
85
+ rescue AWS::S3::Errors::NoSuchKey
86
+ s3sum = ""
87
+ end
88
+
89
+ if @incremental && (s3sum == Digest::MD5.file(file).hexdigest)
90
+ if @verbose
91
+ puts "= #{remote_path(file)}"
92
+ else
93
+ progress('=')
94
+ end
95
+ else
96
+ o.write(file_with_options)
97
+ files_to_invalidate.push(file)
98
+ if @verbose
99
+ puts "+ #{remote_path(file)}"
100
+ else
101
+ progress('+')
102
+ end
103
+ end
104
+ end
105
+
106
+ invalidate_cache(files_to_invalidate) unless @distro_id.nil?
107
+ end
108
+
109
+ def invalidate_cache(files)
110
+ puts "Invalidating cache for #{pluralize('file', site_files.size)}" if @verbose
111
+ @cloudfront.create_invalidation(
112
+ distribution_id: @distro_id,
113
+ invalidation_batch:{
114
+ paths:{
115
+ quantity: files.size,
116
+ items: files.map{|file| "/" + remote_path(file)}
117
+ },
118
+ # String of 8 random chars to uniquely id this invalidation
119
+ caller_reference: (0...8).map { ('a'..'z').to_a[rand(26)] }.join
120
+ }
121
+ ) unless files.empty?
122
+ end
123
+
124
+ def get_file_with_metadata(file, s3_filename)
125
+ file_with_options = {
126
+ :file => file,
127
+ :acl => :public_read
128
+ }
129
+
130
+ @headers.each do |conf|
131
+ if conf.has_key? 'filename' and s3_filename.match(conf['filename'])
132
+ if @verbose
133
+ puts "+ #{remote_path(file)} matched pattern #{conf['filename']}"
134
+ end
135
+
136
+ if conf.has_key? 'expires'
137
+ expireDate = conf['expires']
138
+
139
+ relative_years = /^\+(\d+) year(s)?$/.match(conf['expires'])
140
+ if relative_years
141
+ expireDate = (Time.now + (60 * 60 * 24 * 365 * relative_years[1].to_i)).httpdate
142
+ end
143
+
144
+ relative_days = /^\+(\d+) day(s)?$/.match(conf['expires'])
145
+ if relative_days
146
+ expireDate = (Time.now + (60 * 60 * 24 * relative_days[1].to_i)).httpdate
147
+ end
148
+
149
+ file_with_options[:expires] = expireDate
150
+ end
151
+
152
+ if conf.has_key? 'content_type'
153
+ file_with_options[:content_type] = conf['content_type']
154
+ end
155
+
156
+ if conf.has_key? 'cache_control'
157
+ file_with_options[:cache_control] = conf['cache_control']
158
+ end
159
+
160
+ if conf.has_key? 'content_encoding'
161
+ file_with_options[:content_encoding] = conf['content_encoding']
162
+ end
163
+ end
164
+ end
165
+
166
+ return file_with_options
167
+ end
168
+
169
+ # Delete files from the bucket, to ensure a 1:1 match with site files
170
+ #
171
+ def delete_files
172
+ if deletable_files.size > 0
173
+ puts "Deleting #{pluralize('file', deletable_files.size)}:" if @verbose
174
+ deletable_files.each do |file|
175
+ @bucket.objects.delete(file)
176
+ if @verbose
177
+ puts "- #{file}"
178
+ else
179
+ progress('-')
180
+ end
181
+ end
182
+ end
183
+ end
184
+
185
+ # Create a new S3 bucket
186
+ #
187
+ def add_bucket
188
+ puts @bucket_name
189
+ @bucket = @s3.buckets.create(@bucket_name)
190
+ puts "Created new bucket '#{@bucket_name}' in region '#{@region}'."
191
+ configure_bucket
192
+ end
193
+
194
+ def configure_bucket
195
+ error_page = @options['error_page'] || remote_path('404.html')
196
+ index_page = @options['index_page'] || remote_path('index.html')
197
+
198
+ config = @bucket.configure_website do |cfg|
199
+ cfg.index_document_suffix = index_page
200
+ cfg.error_document_key = error_page
201
+ end
202
+ puts "Bucket configured with index_document: #{index_page} and error_document: #{error_page}."
203
+ end
204
+
205
+ def delete_files?
206
+ !!@delete
207
+ end
208
+
209
+ # local site files
210
+ def site_files
211
+ @site_files ||= Find.find(@local).to_a.reject do |f|
212
+ File.directory?(f)
213
+ end
214
+ end
215
+
216
+ # Destination paths for local site files.
217
+ def site_files_dest
218
+ @site_files_dest ||= site_files.map{|f| remote_path(f) }
219
+ end
220
+
221
+ # Replace local path with remote path
222
+ def remote_path(file)
223
+ File.join(@remote_path, file.sub(@local, '')).sub(/^\//, '')
224
+ end
225
+
226
+ # Files from the bucket which are deletable
227
+ # Only deletes files beneath the remote_path if specified
228
+ def deletable_files
229
+ return [] unless delete_files?
230
+ unless @deletable
231
+ @deletable = @bucket.objects.map(&:key) - site_files_dest
232
+ @deletable.reject!{|f| (f =~ /^#{@remote_path}/).nil? }
233
+ end
234
+ @deletable
235
+ end
236
+
237
+ # List written and deleted file counts
238
+ def status_message
239
+ uploaded = site_files.size
240
+ deleted = deletable_files.size
241
+
242
+ message = "\nSuccess:".green + " #{uploaded} #{pluralize('file', uploaded)} uploaded"
243
+ message << ", #{deleted} #{pluralize('file', deleted)} deleted."
244
+ puts message
245
+ configure_bucket unless @bucket.website?
246
+ end
247
+
248
+ # Print consecutive characters
249
+ def progress(str)
250
+ print str
251
+ $stdout.flush
252
+ end
253
+
254
+ def pluralize(str, num)
255
+ str << 's' if num != 1
256
+ str
257
+ end
258
+
259
+ # Return default configuration options for this deployment type
260
+ def self.default_config(options={})
261
+ <<-CONFIG
262
+ #{"bucket_name: #{options[:bucket_name]}".ljust(40)} # Name of the S3 bucket where these files will be stored.
263
+ #{"access_key_id: #{options[:access_key_id]}".ljust(40)} # Get this from your AWS console at aws.amazon.com.
264
+ #{"secret_access_key: #{options[:secret_access_key]}".ljust(40)} # Keep it safe; keep it secret. Keep this file in your .gitignore.
265
+ #{"distribution_id: #{options[:distribution_id]}".ljust(40)} # Get this from your CloudFront page at https://console.aws.amazon.com/cloudfront/
266
+ #{"remote_path: #{options[:remote_path] || '/'}".ljust(40)} # relative path on bucket where files should be copied.
267
+ #{"region: #{options[:remote_path] || 'us-east-1'}".ljust(40)} # Region where your bucket is located.
268
+ #{"verbose: #{options[:verbose] || 'false'}".ljust(40)} # Print out all file operations.
269
+ #{"incremental: #{options[:incremental] || 'false'}".ljust(40)} # Only upload new/changed files
270
+ #{"delete: #{options[:delete] || 'false'}".ljust(40)} # Remove files from destination which do not match source files.
271
+ CONFIG
272
+ end
273
+
274
+ end
275
+ end