blobsterix 0.0.9
Sign up to get free protection for your applications and to get access to all the features.
- data/.gitignore +27 -0
- data/CHANGELOG.txt +13 -0
- data/Gemfile +16 -0
- data/LICENSE +22 -0
- data/README.md +122 -0
- data/Rakefile +13 -0
- data/bin/blobsterix +152 -0
- data/bin/test +26 -0
- data/blobsterix.gemspec +39 -0
- data/config/lighttpd.conf +50 -0
- data/lib/blobsterix.rb +213 -0
- data/lib/blobsterix/blob/blob_api.rb +55 -0
- data/lib/blobsterix/blob/blob_url_helper.rb +55 -0
- data/lib/blobsterix/helper/accept_type.rb +62 -0
- data/lib/blobsterix/helper/blob_access.rb +73 -0
- data/lib/blobsterix/helper/config_loader.rb +33 -0
- data/lib/blobsterix/helper/data_response.rb +54 -0
- data/lib/blobsterix/helper/http.rb +47 -0
- data/lib/blobsterix/helper/logable.rb +11 -0
- data/lib/blobsterix/helper/murmur.rb +137 -0
- data/lib/blobsterix/helper/status_info.rb +42 -0
- data/lib/blobsterix/helper/template_renderer.rb +39 -0
- data/lib/blobsterix/mimemagic/magic.rb +138 -0
- data/lib/blobsterix/mimemagic/tables.rb +1770 -0
- data/lib/blobsterix/mimemagic/version.rb +5 -0
- data/lib/blobsterix/router/app_router.rb +134 -0
- data/lib/blobsterix/s3/s3_api.rb +92 -0
- data/lib/blobsterix/s3/s3_url_helper.rb +93 -0
- data/lib/blobsterix/service.rb +34 -0
- data/lib/blobsterix/status/status_api.rb +62 -0
- data/lib/blobsterix/status/status_url_helper.rb +11 -0
- data/lib/blobsterix/storage/blob_meta_data.rb +60 -0
- data/lib/blobsterix/storage/bucket.rb +36 -0
- data/lib/blobsterix/storage/bucket_entry.rb +29 -0
- data/lib/blobsterix/storage/bucket_list.rb +26 -0
- data/lib/blobsterix/storage/cache.rb +90 -0
- data/lib/blobsterix/storage/file_system.rb +132 -0
- data/lib/blobsterix/storage/file_system_meta_data.rb +136 -0
- data/lib/blobsterix/storage/storage.rb +30 -0
- data/lib/blobsterix/transformation/image_transformation.rb +439 -0
- data/lib/blobsterix/transformation/transformation.rb +30 -0
- data/lib/blobsterix/transformation/transformation_chain.rb +78 -0
- data/lib/blobsterix/transformation/transformation_manager.rb +115 -0
- data/lib/blobsterix/version.rb +3 -0
- data/scripts/download.rb +30 -0
- data/scripts/test +6 -0
- data/spec/lib/blob/blob_api_spec.rb +81 -0
- data/spec/lib/helper/blob_access_spec.rb +72 -0
- data/spec/lib/s3/s3_api_spec.rb +183 -0
- data/spec/lib/service_spec.rb +12 -0
- data/spec/lib/status/status_api_spec.rb +42 -0
- data/spec/lib/storage/cache_spec.rb +135 -0
- data/spec/lib/storage/file_system_spec.rb +84 -0
- data/spec/spec_helper.rb +139 -0
- data/templates/app/Gemfile +12 -0
- data/templates/app/Rakefile +7 -0
- data/templates/app/config.rb +61 -0
- data/templates/app/config/environments/development.rb +40 -0
- data/templates/app/config/environments/production.rb +40 -0
- data/templates/app/storages/.keep +0 -0
- data/templates/app/transformators/.keep +0 -0
- data/templates/app/views/.keep +0 -0
- data/templates/storage_template.rb +30 -0
- data/templates/transformation_template.rb +41 -0
- data/templates/views/error_page.erb +18 -0
- data/templates/views/status_page.erb +31 -0
- metadata +325 -0
@@ -0,0 +1,36 @@
|
|
1
|
+
module Blobsterix
|
2
|
+
module Storage
|
3
|
+
class Bucket
|
4
|
+
attr_accessor :name, :creation_date, :contents
|
5
|
+
def initialize(name, date)
|
6
|
+
@name = name
|
7
|
+
@creation_date = date
|
8
|
+
@contents = []
|
9
|
+
yield self if block_given?
|
10
|
+
end
|
11
|
+
def to_xml()
|
12
|
+
date = Date.today
|
13
|
+
xml = Nokogiri::XML::Builder.new do |xml|
|
14
|
+
xml.ListBucketResult(:xmlns => "http://doc.s3.amazonaws.com/#{date.year}-#{date.month}-#{date.day}") {
|
15
|
+
xml.Name name
|
16
|
+
xml.Prefix
|
17
|
+
xml.Marker
|
18
|
+
xml.MaxKeys 1000
|
19
|
+
xml.IsTruncated false
|
20
|
+
contents.each{|entry|
|
21
|
+
entry.insert_xml(xml)
|
22
|
+
}
|
23
|
+
}
|
24
|
+
|
25
|
+
end
|
26
|
+
xml.to_xml
|
27
|
+
end
|
28
|
+
def insert_xml(xml)
|
29
|
+
xml.Bucket{
|
30
|
+
xml.Name name
|
31
|
+
xml.CreationDate creation_date
|
32
|
+
}
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
@@ -0,0 +1,29 @@
|
|
1
|
+
module Blobsterix
|
2
|
+
module Storage
|
3
|
+
class BucketEntry
|
4
|
+
attr_accessor :key, :last_modified, :etag, :size, :storage_class, :mimetype, :fullpath
|
5
|
+
def initialize(key)
|
6
|
+
@key = key
|
7
|
+
@last_modified = "2009-10-12T17:50:30.000Z"
|
8
|
+
@etag = ""fba9dede5f27731c9771645a39863328""
|
9
|
+
@size = "0"
|
10
|
+
@storage_class = "STANDARD"
|
11
|
+
@mimetype = "none"
|
12
|
+
@fullpath = ""
|
13
|
+
yield self if block_given?
|
14
|
+
end
|
15
|
+
|
16
|
+
def insert_xml(xml)
|
17
|
+
xml.Contents{
|
18
|
+
xml.Key key
|
19
|
+
xml.LastModified last_modified
|
20
|
+
xml.ETag etag
|
21
|
+
xml.Size size
|
22
|
+
xml.StorageClass storage_class
|
23
|
+
xml.MimeType mimetype
|
24
|
+
xml.FullPath fullpath
|
25
|
+
}
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
@@ -0,0 +1,26 @@
|
|
1
|
+
module Blobsterix
|
2
|
+
module Storage
|
3
|
+
class BucketList
|
4
|
+
attr_accessor :buckets
|
5
|
+
|
6
|
+
def initialize()
|
7
|
+
@buckets = []
|
8
|
+
yield self if block_given?
|
9
|
+
end
|
10
|
+
|
11
|
+
def to_xml()
|
12
|
+
date = Date.today
|
13
|
+
xml = Nokogiri::XML::Builder.new do |xml|
|
14
|
+
xml.ListAllMyBucketsResult(:xmlns => "http://doc.s3.amazonaws.com/#{date.year}-#{date.month}-#{date.day}") {
|
15
|
+
xml.Buckets {
|
16
|
+
buckets.each{|entry|
|
17
|
+
entry.insert_xml(xml)
|
18
|
+
}
|
19
|
+
}
|
20
|
+
}
|
21
|
+
end
|
22
|
+
xml.to_xml
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
@@ -0,0 +1,90 @@
|
|
1
|
+
module Blobsterix
|
2
|
+
module Storage
|
3
|
+
class Cache
|
4
|
+
include Blobsterix::Logable
|
5
|
+
|
6
|
+
def invalidation
|
7
|
+
each_meta_file do |meta_file|
|
8
|
+
blob_access=meta_to_blob_access(meta_file)
|
9
|
+
if Blobsterix.cache_checker.call(blob_access,meta_file.last_accessed,meta_file.last_modified)
|
10
|
+
invalidate(blob_access, true)
|
11
|
+
end
|
12
|
+
end
|
13
|
+
end
|
14
|
+
def initialize(path)
|
15
|
+
@path = Pathname.new(path)
|
16
|
+
FileUtils.mkdir_p(@path) if !Dir.exist?(@path)
|
17
|
+
end
|
18
|
+
|
19
|
+
def get(blob_access)
|
20
|
+
FileSystemMetaData.new(cache_file_path(blob_access))
|
21
|
+
end
|
22
|
+
|
23
|
+
def put(blob_access, data)
|
24
|
+
FileSystemMetaData.new(cache_file_path(blob_access),:bucket => blob_access.bucket, :id => blob_access.id, :trafo => blob_access.trafo, :accept_type => "#{blob_access.accept_type}").write() {|f|
|
25
|
+
f.write(data)
|
26
|
+
}
|
27
|
+
end
|
28
|
+
|
29
|
+
def delete(blob_access)
|
30
|
+
FileSystemMetaData.new(cache_file_path(blob_access)).delete if exists?(blob_access)
|
31
|
+
end
|
32
|
+
|
33
|
+
def exists?(blob_access)
|
34
|
+
valid = File.exist?(cache_file_path(blob_access))
|
35
|
+
valid ? Blobsterix.cache_hit(blob_access) : Blobsterix.cache_miss(blob_access)
|
36
|
+
valid
|
37
|
+
end
|
38
|
+
|
39
|
+
#invalidates all!!! formats of a blob_access
|
40
|
+
def invalidate(blob_access, delete_single=false)
|
41
|
+
if delete_single
|
42
|
+
FileSystemMetaData.new(cache_file_path(blob_access)).delete
|
43
|
+
else
|
44
|
+
cache_path(blob_access).entries.each {|cache_file|
|
45
|
+
unless cache_file.to_s.match(/\.meta$/) || cache_file.directory?
|
46
|
+
FileSystemMetaData.new(cache_path(blob_access).join(cache_file)).delete if cache_file.to_s.match(blob_access.identifier)
|
47
|
+
end
|
48
|
+
} if Dir.exist?(cache_path(blob_access))
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
private
|
53
|
+
|
54
|
+
def each_meta_file
|
55
|
+
Dir.glob(@path.join("**/*")).each {|file|
|
56
|
+
cache_file = Pathname.new file
|
57
|
+
if block_given? && !cache_file.to_s.match(/\.meta$/) && !cache_file.directory?
|
58
|
+
yield FileSystemMetaData.new(cache_file)
|
59
|
+
cache_file
|
60
|
+
end
|
61
|
+
}
|
62
|
+
end
|
63
|
+
|
64
|
+
def meta_to_blob_access(meta_file)
|
65
|
+
BlobAccess.new(:bucket => meta_file.payload["bucket"], :id => meta_file.payload["id"], :trafo => meta_file.payload["trafo"], :accept_type => AcceptType.new(meta_file.payload["accept_type"]||""))
|
66
|
+
end
|
67
|
+
|
68
|
+
def cache_file_path(blob_access)
|
69
|
+
cache_path(blob_access).join(blob_access.identifier)
|
70
|
+
end
|
71
|
+
|
72
|
+
def cache_path(blob_access)
|
73
|
+
@path.join(hash_filename("#{blob_access.bucket}_#{blob_access.id.gsub("/","_")}"))
|
74
|
+
end
|
75
|
+
|
76
|
+
def hash_filename(filename)
|
77
|
+
hash = Murmur.Hash64B(filename)
|
78
|
+
bits = hash.to_s(2)
|
79
|
+
parts = []
|
80
|
+
6.times { |index|
|
81
|
+
len = 11
|
82
|
+
len = bits.length if len >= bits.length
|
83
|
+
value = bits.slice!(0, len).to_i(2).to_s(16).rjust(3,"0")
|
84
|
+
parts.push(value)
|
85
|
+
}
|
86
|
+
parts.join("/")
|
87
|
+
end
|
88
|
+
end
|
89
|
+
end
|
90
|
+
end
|
@@ -0,0 +1,132 @@
|
|
1
|
+
require 'fileutils'
|
2
|
+
|
3
|
+
module Blobsterix
|
4
|
+
module Storage
|
5
|
+
class FileSystem < Storage
|
6
|
+
include Blobsterix::Logable
|
7
|
+
|
8
|
+
def initialize(path)
|
9
|
+
logger.info "Create FileSystem at #{path}"
|
10
|
+
@contents = path
|
11
|
+
FileUtils.mkdir_p(@contents) if !Dir.exist?(@contents)
|
12
|
+
end
|
13
|
+
|
14
|
+
def bucket_exist(bucket="root")
|
15
|
+
Dir.entries(contents).include?(bucket) and File.directory?(File.join(contents,bucket))
|
16
|
+
end
|
17
|
+
|
18
|
+
def list(bucket="root")
|
19
|
+
if bucket =~ /root/
|
20
|
+
BucketList.new do |l|
|
21
|
+
|
22
|
+
Dir.entries("#{contents}").each{|dir|
|
23
|
+
l.buckets << Bucket.new(dir, time_string_of(dir)) if File.directory? File.join("#{contents}",dir) and !(dir =='.' || dir == '..')
|
24
|
+
}
|
25
|
+
end
|
26
|
+
else
|
27
|
+
if bucket_exist(bucket)
|
28
|
+
b = Bucket.new(bucket, time_string_of(bucket))
|
29
|
+
bucket_files(bucket).each do |file|
|
30
|
+
b.contents << BucketEntry.new(file) do |entry|
|
31
|
+
meta = metaData(bucket, file)
|
32
|
+
entry.last_modified = meta.last_modified.strftime("%Y-%m-%dT%H:%M:%S.000Z")
|
33
|
+
entry.etag = meta.etag
|
34
|
+
entry.size = meta.size
|
35
|
+
entry.mimetype = meta.mimetype
|
36
|
+
entry.fullpath = contents(bucket, file).gsub("#{contents}/", "")
|
37
|
+
end
|
38
|
+
end
|
39
|
+
b
|
40
|
+
else
|
41
|
+
Nokogiri::XML::Builder.new do |xml|
|
42
|
+
xml.Error "no such bucket"
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|
47
|
+
|
48
|
+
def get(bucket, key)
|
49
|
+
if (not File.directory?(contents(bucket, key))) and bucket_files(bucket).include?(key)
|
50
|
+
Blobsterix.storage_read(BlobAccess.new(:bucket => bucket, :id => key))
|
51
|
+
metaData(bucket, key)
|
52
|
+
else
|
53
|
+
Blobsterix.storage_read_fail(BlobAccess.new(:bucket => bucket, :id => key))
|
54
|
+
Blobsterix::Storage::BlobMetaData.new
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
def put(bucket, key, value)
|
59
|
+
Blobsterix.storage_write(BlobAccess.new(:bucket => bucket, :id => key))
|
60
|
+
|
61
|
+
meta = metaData(bucket, key).write() {|f| f.write(value.read) }
|
62
|
+
|
63
|
+
Blobsterix.cache.invalidate(Blobsterix::BlobAccess.new(:bucket => bucket, :id => key))
|
64
|
+
meta
|
65
|
+
end
|
66
|
+
|
67
|
+
def create(bucket)
|
68
|
+
logger.info "Storage: create bucket #{contents(bucket)}"
|
69
|
+
FileUtils.mkdir_p(contents(bucket)) if not File.exist?(contents(bucket))
|
70
|
+
|
71
|
+
Nokogiri::XML::Builder.new do |xml|
|
72
|
+
end.to_s
|
73
|
+
end
|
74
|
+
|
75
|
+
def delete(bucket)
|
76
|
+
logger.info "Storage: delete bucket #{contents(bucket)}"
|
77
|
+
|
78
|
+
FileUtils.rm_rf(contents(bucket)) if bucket_exist(bucket) && bucket_files(bucket).empty?
|
79
|
+
|
80
|
+
#Dir.rmdir(contents(bucket)) if bucket_exist(bucket) && bucket_files(bucket).empty?
|
81
|
+
end
|
82
|
+
|
83
|
+
def delete_key(bucket, key)
|
84
|
+
Blobsterix.storage_delete(BlobAccess.new(:bucket => bucket, :id => key))
|
85
|
+
Blobsterix.cache.invalidate(Blobsterix::BlobAccess.new(:bucket => bucket, :id => key))
|
86
|
+
|
87
|
+
metaData(bucket, key).delete if bucket_files(bucket).include? key
|
88
|
+
end
|
89
|
+
|
90
|
+
private
|
91
|
+
def contents(bucket=nil, key=nil)
|
92
|
+
if bucket
|
93
|
+
key ? File.join(@contents, bucket, map_filename(key.gsub("/", "\\"))) : File.join(@contents, bucket)
|
94
|
+
else
|
95
|
+
@contents
|
96
|
+
end
|
97
|
+
end
|
98
|
+
|
99
|
+
def map_filename(filename)
|
100
|
+
hash = Murmur.Hash64B(filename)
|
101
|
+
bits = hash.to_s(2)
|
102
|
+
parts = []
|
103
|
+
6.times { |index|
|
104
|
+
len = 11
|
105
|
+
len = bits.length if len >= bits.length
|
106
|
+
value = bits.slice!(0, len).to_i(2).to_s(16).rjust(3,"0")
|
107
|
+
parts.push(value)
|
108
|
+
}
|
109
|
+
parts.push(filename)
|
110
|
+
parts.join("/")
|
111
|
+
end
|
112
|
+
|
113
|
+
def bucket_files(bucket)
|
114
|
+
if (bucket_exist(bucket))
|
115
|
+
Dir.glob("#{contents}/#{bucket}/**/*").select{|e| !File.directory?(e) and not e.end_with?(".meta")}.map{ |e|
|
116
|
+
e.gsub("#{contents}/#{bucket}/","").gsub(/\w+\/\w+\/\w+\/\w+\/\w+\/\w+\//, "").gsub("\\", "/")
|
117
|
+
}
|
118
|
+
else
|
119
|
+
[]
|
120
|
+
end
|
121
|
+
end
|
122
|
+
|
123
|
+
def metaData(bucket, key)
|
124
|
+
Blobsterix::Storage::FileSystemMetaData.new(contents(bucket, key))
|
125
|
+
end
|
126
|
+
|
127
|
+
def time_string_of(*file_name)
|
128
|
+
File.ctime("#{contents}/#{file_name.flatten.join("/")}").strftime("%Y-%m-%dT%H:%M:%S.000Z")
|
129
|
+
end
|
130
|
+
end
|
131
|
+
end
|
132
|
+
end
|
@@ -0,0 +1,136 @@
|
|
1
|
+
module Blobsterix
|
2
|
+
module Storage
|
3
|
+
class FileSystemMetaData < BlobMetaData
|
4
|
+
include Blobsterix::Logable
|
5
|
+
|
6
|
+
def initialize(path_, payload={})
|
7
|
+
@payload=payload
|
8
|
+
@path = path_
|
9
|
+
@last_modified = ""
|
10
|
+
load_meta_file
|
11
|
+
end
|
12
|
+
|
13
|
+
def to_s
|
14
|
+
@path
|
15
|
+
end
|
16
|
+
|
17
|
+
def check(key)
|
18
|
+
@key === key
|
19
|
+
end
|
20
|
+
|
21
|
+
def etag
|
22
|
+
if @last_modified === last_modified
|
23
|
+
@etag ||= Digest::MD5.hexdigest(data)
|
24
|
+
else
|
25
|
+
@last_modified = last_modified
|
26
|
+
@etag = Digest::MD5.hexdigest(data)
|
27
|
+
end
|
28
|
+
end
|
29
|
+
|
30
|
+
def mimetype
|
31
|
+
(@mimetype ||= get_mime.type)
|
32
|
+
end
|
33
|
+
|
34
|
+
def mediatype
|
35
|
+
(@mediatype ||= get_mime.mediatype)
|
36
|
+
end
|
37
|
+
|
38
|
+
def data
|
39
|
+
# begin
|
40
|
+
# raise StandardError.new
|
41
|
+
# rescue StandardError => e
|
42
|
+
# puts "VERY EXPENSIVE FILE READ"
|
43
|
+
# puts e.backtrace
|
44
|
+
# end
|
45
|
+
# logger.info "VERY EXPENSIVE FILE READ"
|
46
|
+
File.exists?(path) ? File.read(path) : ""
|
47
|
+
end
|
48
|
+
|
49
|
+
def path
|
50
|
+
@path
|
51
|
+
end
|
52
|
+
|
53
|
+
def size
|
54
|
+
@size ||= File.exists?(path) ? File.size(path) : 0
|
55
|
+
end
|
56
|
+
|
57
|
+
def last_modified
|
58
|
+
File.ctime(path)#.strftime("%Y-%m-%dT%H:%M:%S.000Z")
|
59
|
+
end
|
60
|
+
|
61
|
+
def last_accessed
|
62
|
+
File.atime(path)#.strftime("%Y-%m-%dT%H:%M:%S.000Z")
|
63
|
+
end
|
64
|
+
|
65
|
+
def accept_type
|
66
|
+
@accept_type ||= AcceptType.new(get_mime().to_s)
|
67
|
+
end
|
68
|
+
|
69
|
+
def header()
|
70
|
+
{"Etag" => etag, "Content-Type" => mimetype, "Last-Modified" => last_modified.strftime("%Y-%m-%dT%H:%M:%S.000Z"), "Cache-Control" => "max-age=#{60*60*24}", "Expires" => (Time.new+(60*60*24)).strftime("%Y-%m-%dT%H:%M:%S.000Z")}
|
71
|
+
end
|
72
|
+
|
73
|
+
def valid
|
74
|
+
File.exists?(path)
|
75
|
+
end
|
76
|
+
|
77
|
+
def payload
|
78
|
+
@payload
|
79
|
+
end
|
80
|
+
|
81
|
+
def write
|
82
|
+
if block_given?
|
83
|
+
FileUtils.mkdir_p(File.dirname(path))
|
84
|
+
delete
|
85
|
+
f = File.open(path, "wb")
|
86
|
+
yield f
|
87
|
+
f.close
|
88
|
+
end
|
89
|
+
save_meta_file
|
90
|
+
self
|
91
|
+
end
|
92
|
+
|
93
|
+
def delete
|
94
|
+
File.delete(meta_path) if File.exists?(meta_path)
|
95
|
+
File.delete(path) if valid
|
96
|
+
end
|
97
|
+
|
98
|
+
def to_json
|
99
|
+
as_json.to_json
|
100
|
+
end
|
101
|
+
|
102
|
+
def as_json
|
103
|
+
{'mimetype' => mimetype, 'mediatype' => mediatype, 'etag' => etag, 'size' => size,'payload' => @payload.to_json}
|
104
|
+
end
|
105
|
+
|
106
|
+
private
|
107
|
+
def meta_path
|
108
|
+
@meta_path ||= "#{path}.meta"
|
109
|
+
end
|
110
|
+
|
111
|
+
def get_mime
|
112
|
+
@mimeclass ||= (MimeMagic.by_magic(File.open(path)) if File.exists?(path) )|| MimeMagic.new("text/plain")
|
113
|
+
end
|
114
|
+
def save_meta_file
|
115
|
+
return if not valid
|
116
|
+
|
117
|
+
File.write(meta_path, to_json)
|
118
|
+
end
|
119
|
+
def load_meta_file
|
120
|
+
return if not valid
|
121
|
+
|
122
|
+
if not File.exists?(meta_path)
|
123
|
+
save_meta_file
|
124
|
+
else
|
125
|
+
data = JSON.load File.read(meta_path)
|
126
|
+
@mimetype = data["mimetype"]
|
127
|
+
@mediatype = data["mediatype"]
|
128
|
+
@etag = data["etag"]
|
129
|
+
@size = data["size"]
|
130
|
+
@payload = JSON.load(data["payload"]) || {}
|
131
|
+
@mimeclass = MimeMagic.new(@mimetype)
|
132
|
+
end
|
133
|
+
end
|
134
|
+
end
|
135
|
+
end
|
136
|
+
end
|