asset_cloud 2.7.1 → 2.7.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. checksums.yaml +4 -4
  2. data/.github/dependabot.yml +6 -0
  3. data/.github/workflows/ci.yml +21 -7
  4. data/.github/workflows/cla.yml +22 -0
  5. data/.gitignore +0 -1
  6. data/.rubocop.yml +0 -1
  7. data/.ruby-version +1 -0
  8. data/Gemfile +5 -3
  9. data/Gemfile.lock +180 -0
  10. data/History.md +9 -0
  11. data/README.rdoc +1 -3
  12. data/Rakefile +18 -16
  13. data/asset_cloud.gemspec +19 -18
  14. data/dev.yml +1 -2
  15. data/lib/asset_cloud/asset.rb +17 -13
  16. data/lib/asset_cloud/asset_extension.rb +27 -15
  17. data/lib/asset_cloud/base.rb +77 -72
  18. data/lib/asset_cloud/bucket.rb +5 -2
  19. data/lib/asset_cloud/buckets/active_record_bucket.rb +16 -14
  20. data/lib/asset_cloud/buckets/blackhole_bucket.rb +2 -0
  21. data/lib/asset_cloud/buckets/bucket_chain.rb +38 -31
  22. data/lib/asset_cloud/buckets/file_system_bucket.rb +14 -15
  23. data/lib/asset_cloud/buckets/gcs_bucket.rb +6 -8
  24. data/lib/asset_cloud/buckets/invalid_bucket.rb +9 -6
  25. data/lib/asset_cloud/buckets/memory_bucket.rb +7 -4
  26. data/lib/asset_cloud/buckets/s3_bucket.rb +11 -8
  27. data/lib/asset_cloud/buckets/versioned_memory_bucket.rb +4 -2
  28. data/lib/asset_cloud/callbacks.rb +24 -16
  29. data/lib/asset_cloud/free_key_locator.rb +6 -6
  30. data/lib/asset_cloud/metadata.rb +11 -7
  31. data/lib/asset_cloud/validations.rb +9 -5
  32. data/lib/asset_cloud.rb +24 -22
  33. data/spec/active_record_bucket_spec.rb +27 -26
  34. data/spec/asset_cloud/metadata_spec.rb +4 -2
  35. data/spec/asset_extension_spec.rb +17 -16
  36. data/spec/asset_spec.rb +27 -21
  37. data/spec/base_spec.rb +93 -92
  38. data/spec/blackhole_bucket_spec.rb +12 -11
  39. data/spec/bucket_chain_spec.rb +61 -56
  40. data/spec/bucket_spec.rb +6 -5
  41. data/spec/callbacks_spec.rb +65 -39
  42. data/spec/file_system_spec.rb +25 -24
  43. data/spec/find_free_key_spec.rb +16 -17
  44. data/spec/gcs_bucket_remote_spec.rb +23 -22
  45. data/spec/gcs_bucket_spec.rb +48 -60
  46. data/spec/memory_bucket_spec.rb +12 -11
  47. data/spec/mock_s3_interface.rb +17 -6
  48. data/spec/remote_s3_bucket_spec.rb +31 -28
  49. data/spec/s3_bucket_spec.rb +19 -17
  50. data/spec/spec_helper.rb +8 -7
  51. data/spec/validations_spec.rb +13 -12
  52. data/spec/versioned_memory_bucket_spec.rb +11 -10
  53. metadata +13 -36
  54. data/.github/probots.yml +0 -2
  55. data/.rubocop_todo.yml +0 -326
@@ -1,26 +1,27 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require 'spec_helper'
4
- require 'google/cloud/storage'
3
+ require "spec_helper"
4
+ require "google/cloud/storage"
5
5
 
6
6
  class RemoteGCSCloud < AssetCloud::Base
7
7
  attr_accessor :gcs_connection
8
+
8
9
  bucket :tmp, AssetCloud::GCSBucket
9
10
 
10
11
  def gcs_bucket
11
- gcs_connection.bucket(ENV['GCS_BUCKET'])
12
+ gcs_connection.bucket(ENV["GCS_BUCKET"])
12
13
  end
13
14
  end
14
15
 
15
- describe AssetCloud::GCSBucket, if: ENV['GCS_PROJECT_ID'] && ENV['GCS_KEY_FILEPATH'] && ENV['GCS_BUCKET'] do
16
- directory = File.dirname(__FILE__) + '/files'
16
+ describe AssetCloud::GCSBucket, if: ENV["GCS_PROJECT_ID"] && ENV["GCS_KEY_FILEPATH"] && ENV["GCS_BUCKET"] do
17
+ directory = File.dirname(__FILE__) + "/files"
17
18
 
18
19
  before(:all) do
19
- @cloud = RemoteGCSCloud.new(directory, 'assets/files')
20
+ @cloud = RemoteGCSCloud.new(directory, "assets/files")
20
21
 
21
22
  @cloud.gcs_connection = Google::Cloud::Storage.new(
22
- project_id: ENV['GCS_PROJECT_ID'],
23
- credentials: ENV['GCS_KEY_FILEPATH']
23
+ project_id: ENV["GCS_PROJECT_ID"],
24
+ credentials: ENV["GCS_KEY_FILEPATH"],
24
25
  )
25
26
  @bucket = @cloud.buckets[:tmp]
26
27
  end
@@ -38,7 +39,7 @@ describe AssetCloud::GCSBucket, if: ENV['GCS_PROJECT_ID'] && ENV['GCS_KEY_FILEPA
38
39
 
39
40
  it "#ls with arguments returns the file" do
40
41
  local_path = "#{directory}/products/key.txt"
41
- key = 'test/ls.txt'
42
+ key = "test/ls.txt"
42
43
 
43
44
  @bucket.write(key, local_path)
44
45
 
@@ -48,14 +49,14 @@ describe AssetCloud::GCSBucket, if: ENV['GCS_PROJECT_ID'] && ENV['GCS_KEY_FILEPA
48
49
 
49
50
  it "#write writes a file into the bucket" do
50
51
  local_path = "#{directory}/products/key.txt"
51
- key = 'test/key.txt'
52
+ key = "test/key.txt"
52
53
 
53
54
  @bucket.write(key, local_path)
54
55
  end
55
56
 
56
57
  it "#write writes a file into the bucket with metadata" do
57
58
  local_path = "#{directory}/products/key.txt"
58
- key = 'test/key.txt'
59
+ key = "test/key.txt"
59
60
  metadata = {
60
61
  "X-Robots-Tag" => "none",
61
62
  }
@@ -66,8 +67,8 @@ describe AssetCloud::GCSBucket, if: ENV['GCS_PROJECT_ID'] && ENV['GCS_KEY_FILEPA
66
67
 
67
68
  it "#write writes a file into the bucket with acl" do
68
69
  local_path = "#{directory}/products/key.txt"
69
- key = 'test/key.txt'
70
- acl = 'public'
70
+ key = "test/key.txt"
71
+ acl = "public"
71
72
 
72
73
  file = @bucket.write(key, local_path, acl: acl)
73
74
  expect(file.acl).to(be_truthy)
@@ -75,15 +76,15 @@ describe AssetCloud::GCSBucket, if: ENV['GCS_PROJECT_ID'] && ENV['GCS_KEY_FILEPA
75
76
 
76
77
  it "#write writes a file into the bucket with content_disposition" do
77
78
  local_path = "#{directory}/products/key.txt"
78
- key = 'test/key.txt'
79
- content_disposition = 'attachment'
79
+ key = "test/key.txt"
80
+ content_disposition = "attachment"
80
81
 
81
82
  file = @bucket.write(key, local_path, content_disposition: content_disposition)
82
83
  expect(file.content_disposition).to(eq(content_disposition))
83
84
  end
84
85
 
85
86
  it "#delete removes the file from the bucket" do
86
- key = 'test/key.txt'
87
+ key = "test/key.txt"
87
88
 
88
89
  expect do
89
90
  @bucket.delete(key)
@@ -91,15 +92,15 @@ describe AssetCloud::GCSBucket, if: ENV['GCS_PROJECT_ID'] && ENV['GCS_KEY_FILEPA
91
92
  end
92
93
 
93
94
  it "#delete raises AssetCloud::AssetNotFoundError if the file is not found" do
94
- key = 'tmp/not_found.txt'
95
+ key = "tmp/not_found.txt"
95
96
  expect do
96
97
  @bucket.delete(key)
97
98
  end.to(raise_error(AssetCloud::AssetNotFoundError))
98
99
  end
99
100
 
100
101
  it "#read returns the data of the file" do
101
- value = 'hello world'
102
- key = 'tmp/new_file.txt'
102
+ value = "hello world"
103
+ key = "tmp/new_file.txt"
103
104
  @bucket.write(key, StringIO.new(value))
104
105
 
105
106
  data = @bucket.read(key)
@@ -107,15 +108,15 @@ describe AssetCloud::GCSBucket, if: ENV['GCS_PROJECT_ID'] && ENV['GCS_KEY_FILEPA
107
108
  end
108
109
 
109
110
  it "#read raises AssetCloud::AssetNotFoundError if the file is not found" do
110
- key = 'tmp/not_found.txt'
111
+ key = "tmp/not_found.txt"
111
112
  expect do
112
113
  @bucket.read(key)
113
114
  end.to(raise_error(AssetCloud::AssetNotFoundError))
114
115
  end
115
116
 
116
117
  it "#stats returns metadata of the asset" do
117
- value = 'hello world'
118
- key = 'tmp/new_file.txt'
118
+ value = "hello world"
119
+ key = "tmp/new_file.txt"
119
120
  @bucket.write(key, StringIO.new(value))
120
121
 
121
122
  stats = @bucket.stat(key)
@@ -1,6 +1,7 @@
1
1
  # frozen_string_literal: true
2
- require 'spec_helper'
3
- require 'google/cloud/storage'
2
+
3
+ require "spec_helper"
4
+ require "google/cloud/storage"
4
5
 
5
6
  class GCSCloud < AssetCloud::Base
6
7
  end
@@ -13,15 +14,20 @@ class MockGCSBucket < AssetCloud::GCSBucket
13
14
  end
14
15
 
15
16
  def create_file(data, key, options = {})
17
+ created_files << [data, key, options]
18
+ end
19
+
20
+ def created_files
21
+ @created_files ||= []
16
22
  end
17
23
  end
18
24
 
19
25
  describe AssetCloud::GCSBucket do
20
- directory = File.dirname(__FILE__) + '/files'
26
+ directory = File.dirname(__FILE__) + "/files"
21
27
 
22
28
  before(:all) do
23
- @cloud = GCSCloud.new(directory, '/assets/files')
24
- @bucket = MockGCSBucket.new(@cloud, '')
29
+ @cloud = GCSCloud.new(directory, "/assets/files")
30
+ @bucket = MockGCSBucket.new(@cloud, "")
25
31
  end
26
32
 
27
33
  it "#ls with no arguments returns all files in the bucket" do
@@ -31,82 +37,62 @@ describe AssetCloud::GCSBucket do
31
37
  end
32
38
 
33
39
  it "#ls with arguments returns the file" do
34
- key = 'test/ls.txt'
35
- expect_any_instance_of(MockGCSBucket).to(receive(:file).with("s#{@cloud.url}/#{key}").and_return(Google::Cloud::Storage::File.new))
40
+ key = "test/ls.txt"
41
+ expect_any_instance_of(MockGCSBucket).to(receive(:file).with("s#{@cloud.url}/#{key}")
42
+ .and_return(Google::Cloud::Storage::File.new))
36
43
 
37
44
  file = @bucket.ls(key)
38
45
  expect(file.class).to(eq(Google::Cloud::Storage::File))
39
46
  end
40
47
 
41
- if RUBY_VERSION >= '2.7'
42
- it "#write writes a file into the bucket" do
43
- local_path = "#{directory}/products/key.txt"
44
- key = 'test/key.txt'
45
- expect_any_instance_of(MockGCSBucket).to(receive(:create_file).with(
46
- local_path,
47
- "s#{@cloud.url}/#{key}",
48
- ))
49
-
50
- @bucket.write(key, local_path)
51
- end
52
- else
53
- it "#write writes a file into the bucket" do
54
- local_path = "#{directory}/products/key.txt"
55
- key = 'test/key.txt'
56
- expect_any_instance_of(MockGCSBucket).to(receive(:create_file).with(
57
- local_path,
58
- "s#{@cloud.url}/#{key}",
59
- {}
60
- ))
61
-
62
- @bucket.write(key, local_path)
63
- end
48
+ it "#write writes a file into the bucket" do
49
+ local_path = "#{directory}/products/key.txt"
50
+ key = "test/key.txt"
51
+
52
+ @bucket.write(key, local_path)
53
+
54
+ expect(@bucket.created_files).to(include([local_path, "s#{@cloud.url}/#{key}", {}]))
64
55
  end
65
56
 
66
57
  it "#write writes a file into the bucket with metadata" do
67
58
  local_path = "#{directory}/products/key.txt"
68
- key = 'test/key.txt'
59
+ key = "test/key.txt"
69
60
  metadata = {
70
61
  "X-Robots-Tag" => "none",
71
62
  }
72
- expect_any_instance_of(MockGCSBucket).to(receive(:create_file).with(
73
- local_path,
74
- "s#{@cloud.url}/#{key}",
75
- metadata: metadata
76
- ))
77
63
 
78
64
  @bucket.write(key, local_path, metadata: metadata)
65
+
66
+ expect(@bucket.created_files).to(include([local_path, "s#{@cloud.url}/#{key}", { metadata: metadata }]))
79
67
  end
80
68
 
81
69
  it "#write writes a file into the bucket with acl" do
82
70
  local_path = "#{directory}/products/key.txt"
83
- key = 'test/key.txt'
84
- acl = 'public'
85
- expect_any_instance_of(MockGCSBucket).to(receive(:create_file).with(
86
- local_path,
87
- "s#{@cloud.url}/#{key}",
88
- acl: acl
89
- ))
71
+ key = "test/key.txt"
72
+ acl = "public"
90
73
 
91
74
  @bucket.write(key, local_path, acl: acl)
75
+ expect(@bucket.created_files).to(include([local_path, "s#{@cloud.url}/#{key}", { acl: acl }]))
92
76
  end
93
77
 
94
78
  it "#write writes a file into the bucket with content_disposition" do
95
79
  local_path = "#{directory}/products/key.txt"
96
- key = 'test/key.txt'
97
- content_disposition = 'attachment'
98
- expect_any_instance_of(MockGCSBucket).to(receive(:create_file).with(
99
- local_path,
100
- "s#{@cloud.url}/#{key}",
101
- content_disposition: content_disposition
102
- ))
80
+ key = "test/key.txt"
81
+ content_disposition = "attachment"
103
82
 
104
83
  @bucket.write(key, local_path, content_disposition: content_disposition)
84
+
85
+ expect(@bucket.created_files).to(include([
86
+ local_path,
87
+ "s#{@cloud.url}/#{key}",
88
+ { content_disposition: content_disposition },
89
+ ]))
105
90
  end
106
91
 
107
92
  it "#delete removes the file from the bucket" do
108
- key = 'test/key.txt'
109
- expect_any_instance_of(MockGCSBucket).to(receive(:file).with("s#{@cloud.url}/#{key}").and_return(Google::Cloud::Storage::File.new))
93
+ key = "test/key.txt"
94
+ expect_any_instance_of(MockGCSBucket).to(receive(:file).with("s#{@cloud.url}/#{key}")
95
+ .and_return(Google::Cloud::Storage::File.new))
110
96
  expect_any_instance_of(Google::Cloud::Storage::File).to(receive(:delete).with(no_args))
111
97
 
112
98
  expect do
@@ -115,17 +101,19 @@ describe AssetCloud::GCSBucket do
115
101
  end
116
102
 
117
103
  it "#read returns the data of the file" do
118
- value = 'hello world'
119
- key = 'tmp/new_file.txt'
120
- expect_any_instance_of(MockGCSBucket).to(receive(:file).with("s#{@cloud.url}/#{key}").and_return(Google::Cloud::Storage::File.new))
121
- expect_any_instance_of(Google::Cloud::Storage::File).to(receive(:download).and_return(StringIO.new(value)))
104
+ value = "hello world"
105
+ key = "tmp/new_file.txt"
106
+ expect_any_instance_of(MockGCSBucket).to(receive(:file).with("s#{@cloud.url}/#{key}")
107
+ .and_return(Google::Cloud::Storage::File.new))
108
+ expect_any_instance_of(Google::Cloud::Storage::File).to(receive(:download)
109
+ .and_return(StringIO.new(value)))
122
110
 
123
111
  data = @bucket.read(key)
124
112
  expect(data).to(eq(value))
125
113
  end
126
114
 
127
115
  it "#read raises AssetCloud::AssetNotFoundError if the file is not found" do
128
- key = 'tmp/not_found.txt'
116
+ key = "tmp/not_found.txt"
129
117
  expect_any_instance_of(MockGCSBucket).to(receive(:file).with("s#{@cloud.url}/#{key}").and_return(nil))
130
118
  expect do
131
119
  @bucket.read(key)
@@ -133,12 +121,12 @@ describe AssetCloud::GCSBucket do
133
121
  end
134
122
 
135
123
  it "#stat returns information on the asset" do
136
- value = 'hello world'
137
- key = 'tmp/new_file.txt'
124
+ key = "tmp/new_file.txt"
138
125
  expected_time = Time.now
139
126
  expected_size = 1
140
127
 
141
- expect_any_instance_of(MockGCSBucket).to(receive(:file).with("s#{@cloud.url}/#{key}").and_return(Google::Cloud::Storage::File.new))
128
+ expect_any_instance_of(MockGCSBucket).to(receive(:file).with("s#{@cloud.url}/#{key}")
129
+ .and_return(Google::Cloud::Storage::File.new))
142
130
  expect_any_instance_of(Google::Cloud::Storage::File).to(receive(:size).and_return(expected_size))
143
131
  expect_any_instance_of(Google::Cloud::Storage::File).to(receive(:created_at).and_return(expected_time))
144
132
  expect_any_instance_of(Google::Cloud::Storage::File).to(receive(:updated_at).and_return(expected_time))
@@ -1,25 +1,26 @@
1
1
  # frozen_string_literal: true
2
- require 'spec_helper'
2
+
3
+ require "spec_helper"
3
4
 
4
5
  class MemoryCloud < AssetCloud::Base
5
6
  bucket :memory, AssetCloud::MemoryBucket
6
7
  end
7
8
 
8
9
  describe AssetCloud::MemoryBucket do
9
- directory = File.dirname(__FILE__) + '/files'
10
+ directory = File.dirname(__FILE__) + "/files"
10
11
 
11
12
  before do
12
- @fs = MemoryCloud.new(directory, 'http://assets/files')
13
+ @fs = MemoryCloud.new(directory, "http://assets/files")
13
14
  end
14
15
 
15
- describe 'modifying items in subfolder' do
16
+ describe "modifying items in subfolder" do
16
17
  it "should return nil when file does not exist" do
17
- expect(@fs['memory/essay.txt'].exist?).to(eq(false))
18
+ expect(@fs["memory/essay.txt"].exist?).to(eq(false))
18
19
  end
19
20
 
20
21
  it "should return set content when asked for the same file" do
21
- @fs['memory/essay.txt'] = 'text'
22
- expect(@fs['memory/essay.txt'].value).to(eq('text'))
22
+ @fs["memory/essay.txt"] = "text"
23
+ expect(@fs["memory/essay.txt"].value).to(eq("text"))
23
24
  end
24
25
  end
25
26
 
@@ -29,15 +30,15 @@ describe AssetCloud::MemoryBucket do
29
30
  end
30
31
  end
31
32
 
32
- describe '#ls' do
33
+ describe "#ls" do
33
34
  before do
34
- %w{a b}.each do |letter|
35
- 2.times { |number| @fs.write("memory/#{letter}#{number}", '.') }
35
+ ["a", "b"].each do |letter|
36
+ 2.times { |number| @fs.write("memory/#{letter}#{number}", ".") }
36
37
  end
37
38
  end
38
39
 
39
40
  it "should return a list of assets which start with the given prefix" do
40
- expect(@fs.buckets[:memory].ls('memory/a').size).to(eq(2))
41
+ expect(@fs.buckets[:memory].ls("memory/a").size).to(eq(2))
41
42
  end
42
43
 
43
44
  it "should return a list of all assets when a prefix is not given" do
@@ -1,10 +1,17 @@
1
1
  # frozen_string_literal: true
2
- require 'ostruct'
2
+
3
+ require "ostruct"
3
4
 
4
5
  class MockS3Interface
5
- VALID_ACLS = %w(
6
- private public-read public-read-write authenticated-read aws-exec-read bucket-owner-read bucket-owner-full-control
7
- )
6
+ VALID_ACLS = [
7
+ "private",
8
+ "public-read",
9
+ "public-read-write",
10
+ "authenticated-read",
11
+ "aws-exec-read",
12
+ "bucket-owner-read",
13
+ "bucket-owner-full-control",
14
+ ]
8
15
 
9
16
  attr_reader :bucket_storage
10
17
 
@@ -39,6 +46,7 @@ class MockS3Interface
39
46
 
40
47
  class Bucket
41
48
  attr_reader :name, :client
49
+
42
50
  def initialize(client, name)
43
51
  @client = client
44
52
  @name = name
@@ -62,7 +70,7 @@ class MockS3Interface
62
70
  raise "Invalid ACL `#{options[:acl].inspect}`, must be one of: #{VALID_ACLS.inspect}"
63
71
  end
64
72
 
65
- options[:body] = options[:body].force_encoding(Encoding::BINARY)
73
+ options[:body] = options[:body].dup.force_encoding(Encoding::BINARY)
66
74
 
67
75
  key = options.delete(:key)
68
76
 
@@ -81,6 +89,7 @@ class MockS3Interface
81
89
 
82
90
  class NullS3Object
83
91
  attr_reader :key
92
+
84
93
  def initialize(bucket, key)
85
94
  @bucket = bucket
86
95
  @key = key
@@ -101,6 +110,8 @@ class MockS3Interface
101
110
  class S3Object
102
111
  attr_reader :key, :options
103
112
 
113
+ GottenObject = Struct.new(:body, keyword_init: true)
114
+
104
115
  def initialize(bucket, key, options = {})
105
116
  @bucket = bucket
106
117
  @key = key
@@ -113,7 +124,7 @@ class MockS3Interface
113
124
  end
114
125
 
115
126
  def get(*)
116
- OpenStruct.new(options)
127
+ GottenObject.new(**options)
117
128
  end
118
129
 
119
130
  def put(options = {})
@@ -1,85 +1,88 @@
1
1
  # frozen_string_literal: true
2
- require 'spec_helper'
2
+
3
+ require "spec_helper"
3
4
 
4
5
  class RemoteS3Cloud < AssetCloud::Base
5
6
  attr_accessor :s3_connection
7
+
6
8
  bucket :tmp, AssetCloud::S3Bucket
7
9
 
8
10
  def s3_bucket(_key)
9
- s3_connection.bucket(ENV['S3_BUCKET_NAME'])
11
+ s3_connection.bucket(ENV["S3_BUCKET_NAME"])
10
12
  end
11
13
  end
12
14
 
13
- describe 'Remote test for AssetCloud::S3Bucket', if: ENV['AWS_ACCESS_KEY_ID'] && ENV['AWS_SECRET_ACCESS_KEY'] && ENV['S3_BUCKET_NAME'] do
14
- directory = File.dirname(__FILE__) + '/files'
15
+ describe "Remote test for AssetCloud::S3Bucket",
16
+ if: ENV["AWS_ACCESS_KEY_ID"] && ENV["AWS_SECRET_ACCESS_KEY"] && ENV["S3_BUCKET_NAME"] do
17
+ directory = File.dirname(__FILE__) + "/files"
15
18
 
16
19
  before(:all) do
17
20
  Aws.config = {
18
- region: ENV.fetch('AWS_REGION', 'us-east-1'),
21
+ region: ENV.fetch("AWS_REGION", "us-east-1"),
19
22
  credentials: Aws::Credentials.new(
20
- ENV['AWS_ACCESS_KEY_ID'],
21
- ENV['AWS_SECRET_ACCESS_KEY'],
23
+ ENV["AWS_ACCESS_KEY_ID"],
24
+ ENV["AWS_SECRET_ACCESS_KEY"],
22
25
  ),
23
26
  }
24
27
 
25
- @cloud = RemoteS3Cloud.new(directory, 'testing/assets/files')
28
+ @cloud = RemoteS3Cloud.new(directory, "testing/assets/files")
26
29
  @cloud.s3_connection = Aws::S3::Resource.new
27
30
  @bucket = @cloud.buckets[:tmp]
28
31
  end
29
32
 
30
33
  after(:all) do
31
- listing = @bucket.ls('tmp')
34
+ listing = @bucket.ls("tmp")
32
35
  listing.each(&:delete)
33
36
  end
34
37
 
35
38
  it "#ls should return assets with proper keys" do
36
- @cloud['tmp/test1.txt'] = 'test1'
37
- @cloud['tmp/test2.txt'] = 'test2'
39
+ @cloud["tmp/test1.txt"] = "test1"
40
+ @cloud["tmp/test2.txt"] = "test2"
38
41
 
39
- ls = @bucket.ls('tmp')
42
+ ls = @bucket.ls("tmp")
40
43
 
41
44
  expect(ls).to(all(be_an(AssetCloud::Asset)))
42
- expect(ls.map(&:key) - ['tmp/test1.txt', 'tmp/test2.txt']).to(be_empty)
45
+ expect(ls.map(&:key) - ["tmp/test1.txt", "tmp/test2.txt"]).to(be_empty)
43
46
  end
44
47
 
45
48
  it "#ls returns all assets" do
46
- @cloud['tmp/test1.txt'] = 'test1'
47
- @cloud['tmp/test2.txt'] = 'test2'
49
+ @cloud["tmp/test1.txt"] = "test1"
50
+ @cloud["tmp/test2.txt"] = "test2"
48
51
 
49
52
  ls = @bucket.ls
50
53
 
51
54
  expect(ls).to(all(be_an(AssetCloud::Asset)))
52
- expect(ls.map(&:key) - ['tmp/test1.txt', 'tmp/test2.txt']).to(be_empty)
55
+ expect(ls.map(&:key) - ["tmp/test1.txt", "tmp/test2.txt"]).to(be_empty)
53
56
  end
54
57
 
55
58
  it "#delete should ignore errors when deleting" do
56
- @bucket.delete('tmp/a_file_that_should_not_exist.txt')
59
+ @bucket.delete("tmp/a_file_that_should_not_exist.txt")
57
60
  end
58
61
 
59
62
  it "#delete should always return true" do
60
- @cloud['tmp/test1.txt'] = 'test1'
63
+ @cloud["tmp/test1.txt"] = "test1"
61
64
 
62
- expect(@bucket.delete('tmp/test1.txt')).to(eq(true))
65
+ expect(@bucket.delete("tmp/test1.txt")).to(eq(true))
63
66
  end
64
67
 
65
68
  it "#stat should get metadata from S3" do
66
69
  start_time = Time.now
67
- value = 'hello world'
68
- @cloud.build('tmp/new_file.test', value).store
69
- metadata = @bucket.stat('tmp/new_file.test')
70
+ value = "hello world"
71
+ @cloud.build("tmp/new_file.test", value).store
72
+ metadata = @bucket.stat("tmp/new_file.test")
70
73
  expect(metadata.size).to(eq(value.size))
71
74
  expect(metadata.updated_at).to(be >= start_time)
72
75
  end
73
76
 
74
77
  it "#stat a missing asset" do
75
- metadata = @bucket.stat('i_do_not_exist_and_never_will.test')
78
+ metadata = @bucket.stat("i_do_not_exist_and_never_will.test")
76
79
  expect(metadata).to(be_an(AssetCloud::Metadata))
77
80
  expect(metadata.exist).to(be(false))
78
81
  end
79
82
 
80
83
  it "#read " do
81
- value = 'hello world'
82
- key = 'tmp/new_file.txt'
84
+ value = "hello world"
85
+ key = "tmp/new_file.txt"
83
86
  @bucket.write(key, value)
84
87
  data = @bucket.read(key)
85
88
  expect(data).to(eq(value))
@@ -90,11 +93,11 @@ describe 'Remote test for AssetCloud::S3Bucket', if: ENV['AWS_ACCESS_KEY_ID'] &&
90
93
  end
91
94
 
92
95
  it "#reads first bytes when passed options" do
93
- value = 'hello world'
94
- key = 'tmp/new_file.txt'
96
+ value = "hello world"
97
+ key = "tmp/new_file.txt"
95
98
  options = { range: 0...5 }
96
99
  @bucket.write(key, value)
97
100
  data = @bucket.read(key, options)
98
- expect(data).to(eq('hello'))
101
+ expect(data).to(eq("hello"))
99
102
  end
100
103
  end
@@ -1,5 +1,7 @@
1
- require 'spec_helper'
2
- require 'mock_s3_interface'
1
+ # frozen_string_literal: true
2
+
3
+ require "spec_helper"
4
+ require "mock_s3_interface"
3
5
 
4
6
  class S3Cloud < AssetCloud::Base
5
7
  bucket :tmp, AssetCloud::S3Bucket
@@ -11,19 +13,19 @@ class S3Cloud < AssetCloud::Base
11
13
  end
12
14
 
13
15
  describe AssetCloud::S3Bucket do
14
- directory = File.dirname(__FILE__) + '/files'
16
+ directory = File.dirname(__FILE__) + "/files"
15
17
 
16
18
  before(:all) do
17
- @cloud = S3Cloud.new(directory, 'http://assets/files')
18
- @cloud.s3_connection = MockS3Interface.new('a', 'b')
19
- @cloud.s3_bucket_name = 'asset-cloud-test'
19
+ @cloud = S3Cloud.new(directory, "http://assets/files")
20
+ @cloud.s3_connection = MockS3Interface.new("a", "b")
21
+ @cloud.s3_bucket_name = "asset-cloud-test"
20
22
 
21
23
  @bucket = @cloud.buckets[:tmp]
22
- FileUtils.mkdir_p(directory + '/tmp')
24
+ FileUtils.mkdir_p(directory + "/tmp")
23
25
  end
24
26
 
25
27
  after(:each) do
26
- FileUtils.rm_rf(directory + '/tmp')
28
+ FileUtils.rm_rf(directory + "/tmp")
27
29
  end
28
30
 
29
31
  it "#ls should return assets with proper keys" do
@@ -32,35 +34,35 @@ describe AssetCloud::S3Bucket do
32
34
  end
33
35
  expect_any_instance_of(MockS3Interface::Bucket).to(receive(:objects).and_return(collection))
34
36
 
35
- ls = @bucket.ls('tmp')
37
+ ls = @bucket.ls("tmp")
36
38
 
37
39
  expect(ls).to(all(be_an(AssetCloud::Asset)))
38
- expect(ls.map(&:key) - ['tmp/blah.gif', 'tmp/add_to_cart.gif']).to(be_empty)
40
+ expect(ls.map(&:key) - ["tmp/blah.gif", "tmp/add_to_cart.gif"]).to(be_empty)
39
41
  end
40
42
 
41
43
  it "#delete should not ignore errors when deleting" do
42
44
  expect_any_instance_of(MockS3Interface::NullS3Object).to(receive(:delete).and_raise(StandardError))
43
45
 
44
- expect { @bucket.delete('assets/fail.gif') }.to(raise_error(StandardError))
46
+ expect { @bucket.delete("assets/fail.gif") }.to(raise_error(StandardError))
45
47
  end
46
48
 
47
49
  it "#delete should always return true" do
48
50
  expect_any_instance_of(MockS3Interface::NullS3Object).to(receive(:delete).and_return(nil))
49
51
 
50
- expect(@bucket.delete('assets/fail.gif')).to(eq(true))
52
+ expect(@bucket.delete("assets/fail.gif")).to(eq(true))
51
53
  end
52
54
 
53
55
  it "#stat should get metadata from S3" do
54
- value = 'hello world'
55
- @cloud.build('tmp/new_file.test', value).store
56
- metadata = @bucket.stat('tmp/new_file.test')
56
+ value = "hello world"
57
+ @cloud.build("tmp/new_file.test", value).store
58
+ metadata = @bucket.stat("tmp/new_file.test")
57
59
  expect(metadata.size).to(eq(value.size))
58
60
  expect(metadata.updated_at).to(eq(Time.parse("Mon Aug 27 17:37:51 UTC 2007")))
59
61
  end
60
62
 
61
63
  it "#read " do
62
- value = 'hello world'
63
- key = 'tmp/new_file.txt'
64
+ value = "hello world"
65
+ key = "tmp/new_file.txt"
64
66
  @bucket.write(key, value)
65
67
  data = @bucket.read(key)
66
68
  expect(data).to(eq(value))
data/spec/spec_helper.rb CHANGED
@@ -1,9 +1,10 @@
1
1
  # frozen_string_literal: true
2
- require 'rubygems'
3
- require 'rspec'
4
- require 'pry-byebug' if RUBY_VERSION >= '2.0.0'
5
- require 'active_support/all'
2
+
3
+ require "rubygems"
4
+ require "rspec"
5
+ require "pry-byebug" if RUBY_VERSION >= "2.0.0"
6
+ require "active_support/all"
6
7
  $LOAD_PATH << File.dirname(__FILE__) + "/../lib"
7
- require 'asset_cloud'
8
- require 'asset_cloud/buckets/s3_bucket'
9
- require 'asset_cloud/buckets/gcs_bucket'
8
+ require "asset_cloud"
9
+ require "asset_cloud/buckets/s3_bucket"
10
+ require "asset_cloud/buckets/gcs_bucket"