saviour 0.4.6 → 0.4.7
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/saviour/s3_storage.rb +52 -33
- data/lib/saviour/version.rb +1 -1
- data/saviour.gemspec +1 -1
- data/spec/feature/memory_usage_spec.rb +19 -8
- data/spec/feature/original_assigned_file_is_not_modified_spec.rb +43 -0
- data/spec/models/s3_storage_spec.rb +55 -64
- data/spec/spec_helper.rb +0 -45
- metadata +4 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 5936e61e689f63ba357bd92fc4b57e6d97cb48e8
|
4
|
+
data.tar.gz: cad8d0487163dca1e329e4b0bfd902162d5eb234
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: ef0ef25467254b34eb00d0103567d469f43c8b72ebec94448b88543f9330999a829c0453f44aa8b1aabc47f220bc79a4e0f26577051c61bcd870d63fdc9773a4
|
7
|
+
data.tar.gz: 70ec47375eafbf65a8b26fdc89c728aadaceed58e134a0967751592755da1b54bfbf0356743dc8656e8c5858982dcc1e0bf382b4ee17141e527088848687f4d7
|
data/lib/saviour/s3_storage.rb
CHANGED
@@ -1,5 +1,5 @@
|
|
1
1
|
begin
|
2
|
-
require '
|
2
|
+
require 'aws-sdk-s3'
|
3
3
|
rescue LoadError
|
4
4
|
end
|
5
5
|
|
@@ -15,9 +15,10 @@ module Saviour
|
|
15
15
|
@create_options = conf.delete(:create_options) { {} }
|
16
16
|
conf.fetch(:aws_access_key_id) { raise(ArgumentError, "aws_access_key_id is required") }
|
17
17
|
conf.fetch(:aws_secret_access_key) { raise(ArgumentError, "aws_secret_access_key is required") }
|
18
|
+
@region = conf[:region] || raise(ArgumentError, "region is required")
|
18
19
|
end
|
19
20
|
|
20
|
-
def write(
|
21
|
+
def write(file_or_contents, path)
|
21
22
|
path = sanitize_leading_slash(path)
|
22
23
|
|
23
24
|
# http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMetadata.html
|
@@ -25,45 +26,50 @@ module Saviour
|
|
25
26
|
raise(KeyTooLarge, "The key in S3 must be at max 1024 bytes, this key is too big: #{path}")
|
26
27
|
end
|
27
28
|
|
28
|
-
|
29
|
-
key: path,
|
30
|
-
body: contents,
|
31
|
-
public: true
|
32
|
-
}.merge(@create_options)
|
33
|
-
)
|
29
|
+
client.put_object(@create_options.merge(body: file_or_contents, bucket: @bucket, key: path))
|
34
30
|
end
|
35
31
|
|
36
32
|
def write_from_file(file, path)
|
33
|
+
file.rewind
|
34
|
+
|
37
35
|
write(file, path)
|
38
36
|
end
|
39
37
|
|
40
38
|
def read_to_file(path, dest_file)
|
39
|
+
dest_file.binmode
|
41
40
|
dest_file.rewind
|
42
|
-
dest_file.
|
43
|
-
end
|
41
|
+
dest_file.truncate(0)
|
44
42
|
|
45
|
-
|
46
|
-
|
43
|
+
io = get_file_stringio(path)
|
44
|
+
while data = io.read(1024 * 1024)
|
45
|
+
dest_file.write(data)
|
46
|
+
end
|
47
47
|
|
48
|
-
|
49
|
-
|
48
|
+
dest_file.flush
|
49
|
+
end
|
50
50
|
|
51
|
-
|
51
|
+
def read(path)
|
52
|
+
get_file_stringio(path).read
|
52
53
|
end
|
53
54
|
|
54
55
|
def delete(path)
|
55
|
-
|
56
|
-
|
57
|
-
file = directory.files.get(real_path)
|
58
|
-
raise FileNotPresent, "Trying to delete an unexisting path: #{path}" unless file
|
56
|
+
path = sanitize_leading_slash(path)
|
59
57
|
|
60
|
-
|
58
|
+
client.delete_object(
|
59
|
+
bucket: @bucket,
|
60
|
+
key: path,
|
61
|
+
)
|
61
62
|
end
|
62
63
|
|
63
64
|
def exists?(path)
|
64
65
|
path = sanitize_leading_slash(path)
|
65
66
|
|
66
|
-
!!
|
67
|
+
!!client.head_object(
|
68
|
+
bucket: @bucket,
|
69
|
+
key: path
|
70
|
+
)
|
71
|
+
rescue Aws::S3::Errors::NotFound
|
72
|
+
false
|
67
73
|
end
|
68
74
|
|
69
75
|
def public_url(path)
|
@@ -77,13 +83,15 @@ module Saviour
|
|
77
83
|
source_path = sanitize_leading_slash(source_path)
|
78
84
|
destination_path = sanitize_leading_slash(destination_path)
|
79
85
|
|
80
|
-
|
81
|
-
@
|
82
|
-
|
83
|
-
|
86
|
+
client.copy_object(
|
87
|
+
@create_options.merge(
|
88
|
+
copy_source: "/#{@bucket}/#{source_path}",
|
89
|
+
bucket: @bucket,
|
90
|
+
key: destination_path
|
91
|
+
)
|
84
92
|
)
|
85
|
-
|
86
|
-
|
93
|
+
rescue Aws::S3::Errors::NoSuchKey
|
94
|
+
raise FileNotPresent, "Trying to cp an unexisting path: #{source_path}"
|
87
95
|
end
|
88
96
|
|
89
97
|
def mv(source_path, destination_path)
|
@@ -93,6 +101,17 @@ module Saviour
|
|
93
101
|
|
94
102
|
private
|
95
103
|
|
104
|
+
def get_file_stringio(path)
|
105
|
+
path = sanitize_leading_slash(path)
|
106
|
+
|
107
|
+
client.get_object(
|
108
|
+
bucket: @bucket,
|
109
|
+
key: path
|
110
|
+
).body
|
111
|
+
rescue Aws::S3::Errors::NotFound, Aws::S3::Errors::NoSuchKey
|
112
|
+
raise FileNotPresent, "Trying to read an unexisting path: #{path}"
|
113
|
+
end
|
114
|
+
|
96
115
|
def public_url_prefix
|
97
116
|
if @public_url_prefix.respond_to?(:call)
|
98
117
|
@public_url_prefix.call
|
@@ -105,12 +124,12 @@ module Saviour
|
|
105
124
|
path.gsub(/\A\/*/, '')
|
106
125
|
end
|
107
126
|
|
108
|
-
def
|
109
|
-
@
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
127
|
+
def client
|
128
|
+
@client ||= Aws::S3::Client.new(
|
129
|
+
access_key_id: @conf[:aws_access_key_id],
|
130
|
+
secret_access_key: @conf[:aws_secret_access_key],
|
131
|
+
region: @region
|
132
|
+
)
|
114
133
|
end
|
115
134
|
end
|
116
135
|
end
|
data/lib/saviour/version.rb
CHANGED
data/saviour.gemspec
CHANGED
@@ -23,7 +23,7 @@ Gem::Specification.new do |spec|
|
|
23
23
|
spec.add_development_dependency "rake"
|
24
24
|
spec.add_development_dependency "sqlite3"
|
25
25
|
spec.add_development_dependency "appraisal"
|
26
|
-
spec.add_development_dependency "
|
26
|
+
spec.add_development_dependency "aws-sdk-s3"
|
27
27
|
spec.add_development_dependency "mime-types"
|
28
28
|
spec.add_development_dependency "get_process_mem"
|
29
29
|
end
|
@@ -30,6 +30,13 @@ describe "memory usage" do
|
|
30
30
|
end
|
31
31
|
end
|
32
32
|
|
33
|
+
def with_no_gc
|
34
|
+
GC.disable
|
35
|
+
yield
|
36
|
+
ensure
|
37
|
+
GC.enable
|
38
|
+
end
|
39
|
+
|
33
40
|
describe "is kept low when using exclusively with_file processors" do
|
34
41
|
let(:uploader) {
|
35
42
|
Class.new(Saviour::BaseUploader) {
|
@@ -46,12 +53,14 @@ describe "memory usage" do
|
|
46
53
|
a = base_klass.create!
|
47
54
|
|
48
55
|
with_tempfile do |f|
|
49
|
-
|
56
|
+
with_no_gc do
|
57
|
+
base_line = GetProcessMem.new.mb
|
50
58
|
|
51
|
-
|
59
|
+
a.update_attributes! file: f
|
52
60
|
|
53
|
-
|
54
|
-
|
61
|
+
# Expect memory usage to grow below 10% of the file size
|
62
|
+
expect(GetProcessMem.new.mb - base_line).to be < size_to_test / 10
|
63
|
+
end
|
55
64
|
end
|
56
65
|
end
|
57
66
|
end
|
@@ -72,12 +81,14 @@ describe "memory usage" do
|
|
72
81
|
a = base_klass.create!
|
73
82
|
|
74
83
|
with_tempfile do |f|
|
75
|
-
|
84
|
+
with_no_gc do
|
85
|
+
base_line = GetProcessMem.new.mb
|
76
86
|
|
77
|
-
|
87
|
+
a.update_attributes! file: f
|
78
88
|
|
79
|
-
|
80
|
-
|
89
|
+
# Expect memory usage to grow at least the size of the file
|
90
|
+
expect(GetProcessMem.new.mb - base_line).to be > size_to_test
|
91
|
+
end
|
81
92
|
end
|
82
93
|
end
|
83
94
|
end
|
@@ -0,0 +1,43 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe "Original assigned file" do
|
4
|
+
before { allow(Saviour::Config).to receive(:storage).and_return(Saviour::LocalStorage.new(local_prefix: @tmpdir, public_url_prefix: "http://domain.com")) }
|
5
|
+
|
6
|
+
let(:uploader) {
|
7
|
+
Class.new(Saviour::BaseUploader) do
|
8
|
+
store_dir { "/store/dir/#{model.id}" }
|
9
|
+
process_with_file do |file, name|
|
10
|
+
::File.delete(file.path)
|
11
|
+
|
12
|
+
f = Tempfile.new("test")
|
13
|
+
f.write("Hello")
|
14
|
+
f.flush
|
15
|
+
|
16
|
+
[f, name]
|
17
|
+
end
|
18
|
+
end
|
19
|
+
}
|
20
|
+
|
21
|
+
let(:klass) {
|
22
|
+
klass = Class.new(Test) {
|
23
|
+
include Saviour::Model
|
24
|
+
}
|
25
|
+
klass.attach_file :file, uploader
|
26
|
+
klass
|
27
|
+
}
|
28
|
+
|
29
|
+
it "is preserved even after deleting the incoming file from a processor" do
|
30
|
+
f = Tempfile.new("test")
|
31
|
+
f.write("original")
|
32
|
+
f.flush
|
33
|
+
|
34
|
+
a = klass.create! file: f
|
35
|
+
|
36
|
+
expect(a.file.read).to eq "Hello"
|
37
|
+
|
38
|
+
expect(::File.file?(f.path)).to be_truthy
|
39
|
+
expect(::File.read(f.path)).to eq "original"
|
40
|
+
|
41
|
+
f.close!
|
42
|
+
end
|
43
|
+
end
|
@@ -1,9 +1,23 @@
|
|
1
1
|
require 'spec_helper'
|
2
2
|
|
3
3
|
describe Saviour::S3Storage do
|
4
|
-
|
5
|
-
|
6
|
-
|
4
|
+
let(:injected_client) { Aws::S3::Client.new(stub_responses: true) }
|
5
|
+
|
6
|
+
let(:storage_options) {
|
7
|
+
{
|
8
|
+
bucket: "fake-bucket",
|
9
|
+
aws_access_key_id: "stub",
|
10
|
+
aws_secret_access_key: "stub",
|
11
|
+
region: "fake",
|
12
|
+
public_url_prefix: "https://fake-bucket.s3.amazonaws.com"
|
13
|
+
}
|
14
|
+
}
|
15
|
+
|
16
|
+
subject {
|
17
|
+
storage = Saviour::S3Storage.new(storage_options)
|
18
|
+
allow(storage).to receive(:client).and_return(injected_client)
|
19
|
+
storage
|
20
|
+
}
|
7
21
|
|
8
22
|
context do
|
9
23
|
it "fails when no keys are provided" do
|
@@ -18,12 +32,8 @@ describe Saviour::S3Storage do
|
|
18
32
|
|
19
33
|
it "writting a new file" do
|
20
34
|
with_test_file("camaloon.jpg") do |file, _|
|
21
|
-
expect(mocked_s3.exists?(destination_path)).to be_falsey
|
22
|
-
|
23
35
|
contents = file.read
|
24
|
-
subject.write(contents, destination_path)
|
25
|
-
|
26
|
-
expect(mocked_s3.read(destination_path)).to eq contents
|
36
|
+
expect(subject.write(contents, destination_path)).to be_truthy
|
27
37
|
end
|
28
38
|
end
|
29
39
|
|
@@ -32,18 +42,6 @@ describe Saviour::S3Storage do
|
|
32
42
|
expect { subject.write("contents", key) }.to raise_error.with_message(/The key in S3 must be at max 1024 bytes, this key is too big/)
|
33
43
|
end
|
34
44
|
|
35
|
-
it "overwrites the existing file" do
|
36
|
-
mocked_s3.write("some dummy contents", destination_path)
|
37
|
-
expect(mocked_s3.exists?(destination_path)).to be_truthy
|
38
|
-
|
39
|
-
with_test_file("camaloon.jpg") do |file, _|
|
40
|
-
contents = file.read
|
41
|
-
subject.write(contents, destination_path)
|
42
|
-
expect(mocked_s3.read(destination_path)).to eq contents
|
43
|
-
end
|
44
|
-
end
|
45
|
-
|
46
|
-
|
47
45
|
it "ignores leading slash" do
|
48
46
|
subject.write("trash contents", "/folder/file.out")
|
49
47
|
expect(subject.exists?("folder/file.out")).to be_truthy
|
@@ -52,14 +50,21 @@ describe Saviour::S3Storage do
|
|
52
50
|
end
|
53
51
|
|
54
52
|
describe "fog create options" do
|
55
|
-
|
53
|
+
let(:storage_options) {
|
54
|
+
{
|
55
|
+
bucket: "fake-bucket",
|
56
|
+
aws_access_key_id: "stub",
|
57
|
+
aws_secret_access_key: "stub",
|
58
|
+
public_url_prefix: "https://fake-bucket.s3.amazonaws.com",
|
59
|
+
create_options: { cache_control: 'max-age=31536000', acl: "public-read" },
|
60
|
+
region: "fake"
|
61
|
+
}
|
62
|
+
}
|
56
63
|
|
57
64
|
it "uses passed options to create new files in S3" do
|
58
65
|
with_test_file("camaloon.jpg") do |file, _|
|
59
66
|
contents = file.read
|
60
|
-
subject.write(contents, destination_path)
|
61
|
-
file_data = mocked_s3.head(destination_path)
|
62
|
-
expect(file_data.cache_control).to eq "max-age=31536000"
|
67
|
+
expect(subject.write(contents, destination_path)).to be_truthy
|
63
68
|
end
|
64
69
|
end
|
65
70
|
end
|
@@ -69,15 +74,12 @@ describe Saviour::S3Storage do
|
|
69
74
|
let(:destination_path) { "dest/file.jpeg" }
|
70
75
|
|
71
76
|
it "reads an existing file" do
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
mocked_s3.write(contents, destination_path)
|
76
|
-
expect(subject.read(destination_path)).to eq contents
|
77
|
-
end
|
77
|
+
injected_client.stub_responses(:get_object, body: "hello")
|
78
|
+
expect(subject.read(destination_path)).to eq "hello"
|
78
79
|
end
|
79
80
|
|
80
81
|
it "fails if the file do not exists" do
|
82
|
+
injected_client.stub_responses(:get_object, 'NotFound')
|
81
83
|
expect { subject.read("nope.rar") }.to raise_error(Saviour::FileNotPresent)
|
82
84
|
end
|
83
85
|
end
|
@@ -86,18 +88,7 @@ describe Saviour::S3Storage do
|
|
86
88
|
let(:destination_path) { "dest/file.jpeg" }
|
87
89
|
|
88
90
|
it "deletes an existing file" do
|
89
|
-
|
90
|
-
contents = file.read
|
91
|
-
mocked_s3.write(contents, destination_path)
|
92
|
-
|
93
|
-
expect(mocked_s3.exists?(destination_path)).to be_truthy
|
94
|
-
subject.delete("/dest/file.jpeg")
|
95
|
-
expect(mocked_s3.exists?(destination_path)).to be_falsey
|
96
|
-
end
|
97
|
-
end
|
98
|
-
|
99
|
-
it "fails if the file do not exists" do
|
100
|
-
expect { subject.delete("nope.rar") }.to raise_error(Saviour::FileNotPresent)
|
91
|
+
expect(subject.delete(destination_path)).to be_truthy
|
101
92
|
end
|
102
93
|
end
|
103
94
|
|
@@ -105,14 +96,11 @@ describe Saviour::S3Storage do
|
|
105
96
|
let(:destination_path) { "dest/file.jpeg" }
|
106
97
|
|
107
98
|
it "with existing file" do
|
108
|
-
|
109
|
-
contents = file.read
|
110
|
-
mocked_s3.write(contents, destination_path)
|
111
|
-
expect(subject.exists?(destination_path)).to be_truthy
|
112
|
-
end
|
99
|
+
expect(subject.exists?(destination_path)).to be_truthy
|
113
100
|
end
|
114
101
|
|
115
102
|
it "with no file" do
|
103
|
+
injected_client.stub_responses(:head_object, 'NotFound')
|
116
104
|
expect(subject.exists?("unexisting_file.zip")).to be_falsey
|
117
105
|
end
|
118
106
|
end
|
@@ -121,37 +109,40 @@ describe Saviour::S3Storage do
|
|
121
109
|
let(:destination_path) { "dest/file.jpeg" }
|
122
110
|
|
123
111
|
context do
|
124
|
-
|
112
|
+
let(:storage_options) {
|
113
|
+
{
|
114
|
+
bucket: "fake-bucket",
|
115
|
+
aws_access_key_id: "stub",
|
116
|
+
aws_secret_access_key: "stub",
|
117
|
+
region: "fake"
|
118
|
+
}
|
119
|
+
}
|
125
120
|
|
126
121
|
it "fails if not provided the prefix" do
|
127
|
-
|
128
|
-
contents = file.read
|
129
|
-
mocked_s3.write(contents, destination_path)
|
130
|
-
expect { subject.public_url(destination_path) }.to raise_error(Saviour::S3Storage::MissingPublicUrlPrefix)
|
131
|
-
end
|
122
|
+
expect { subject.public_url(destination_path) }.to raise_error(Saviour::S3Storage::MissingPublicUrlPrefix)
|
132
123
|
end
|
133
124
|
end
|
134
125
|
|
135
126
|
context do
|
136
|
-
|
127
|
+
let(:storage_options) {
|
128
|
+
{
|
129
|
+
bucket: "fake-bucket",
|
130
|
+
aws_access_key_id: "stub",
|
131
|
+
aws_secret_access_key: "stub",
|
132
|
+
public_url_prefix: -> { "https://#{Time.now.hour}.s3.amazonaws.com" },
|
133
|
+
region: "fake"
|
134
|
+
}
|
135
|
+
}
|
137
136
|
|
138
137
|
it "allow to use a lambda for dynamic url prefixes" do
|
139
138
|
allow(Time).to receive(:now).and_return(Time.new(2015, 1, 1, 13, 2, 1))
|
140
139
|
|
141
|
-
|
142
|
-
contents = file.read
|
143
|
-
mocked_s3.write(contents, destination_path)
|
144
|
-
expect(subject.public_url(destination_path)).to eq "https://13.s3.amazonaws.com/dest/file.jpeg"
|
145
|
-
end
|
140
|
+
expect(subject.public_url(destination_path)).to eq "https://13.s3.amazonaws.com/dest/file.jpeg"
|
146
141
|
end
|
147
142
|
end
|
148
143
|
|
149
144
|
it do
|
150
|
-
|
151
|
-
contents = file.read
|
152
|
-
mocked_s3.write(contents, destination_path)
|
153
|
-
expect(subject.public_url(destination_path)).to eq "https://fake-bucket.s3.amazonaws.com/dest/file.jpeg"
|
154
|
-
end
|
145
|
+
expect(subject.public_url(destination_path)).to eq "https://fake-bucket.s3.amazonaws.com/dest/file.jpeg"
|
155
146
|
end
|
156
147
|
end
|
157
148
|
end
|
data/spec/spec_helper.rb
CHANGED
@@ -36,8 +36,6 @@ require 'support/models'
|
|
36
36
|
|
37
37
|
RSpec.configure do |config|
|
38
38
|
config.around do |example|
|
39
|
-
Fog.mock!
|
40
|
-
|
41
39
|
Dir.mktmpdir { |dir|
|
42
40
|
@tmpdir = dir
|
43
41
|
example.run
|
@@ -47,7 +45,6 @@ RSpec.configure do |config|
|
|
47
45
|
config.before do
|
48
46
|
Test.delete_all
|
49
47
|
end
|
50
|
-
config.after { Fog::Mock.reset }
|
51
48
|
end
|
52
49
|
|
53
50
|
def with_tempfile(ext = ".jpg")
|
@@ -68,45 +65,3 @@ def with_test_file(name)
|
|
68
65
|
yield(temp, File.basename(temp.path))
|
69
66
|
end
|
70
67
|
end
|
71
|
-
|
72
|
-
class MockedS3Helper
|
73
|
-
attr_reader :directory
|
74
|
-
|
75
|
-
def start!(bucket_name: nil)
|
76
|
-
@directory = connection.directories.create(key: bucket_name)
|
77
|
-
end
|
78
|
-
|
79
|
-
def write(contents, path)
|
80
|
-
directory.files.create(
|
81
|
-
key: path,
|
82
|
-
body: contents,
|
83
|
-
public: true
|
84
|
-
)
|
85
|
-
end
|
86
|
-
|
87
|
-
def read(path)
|
88
|
-
directory.files.get(path).body
|
89
|
-
end
|
90
|
-
|
91
|
-
def delete(path)
|
92
|
-
directory.files.get(path).destroy
|
93
|
-
end
|
94
|
-
|
95
|
-
def head(path)
|
96
|
-
directory.files.head(path)
|
97
|
-
end
|
98
|
-
|
99
|
-
def exists?(path)
|
100
|
-
!!head(path)
|
101
|
-
end
|
102
|
-
|
103
|
-
def public_url(path)
|
104
|
-
directory.files.get(path).public_url
|
105
|
-
end
|
106
|
-
|
107
|
-
private
|
108
|
-
|
109
|
-
def connection
|
110
|
-
@connection ||= Fog::Storage.new(provider: 'AWS', aws_access_key_id: "stub", aws_secret_access_key: "stub")
|
111
|
-
end
|
112
|
-
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: saviour
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.4.
|
4
|
+
version: 0.4.7
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Roger Campos
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2018-01-
|
11
|
+
date: 2018-01-09 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: activerecord
|
@@ -109,7 +109,7 @@ dependencies:
|
|
109
109
|
- !ruby/object:Gem::Version
|
110
110
|
version: '0'
|
111
111
|
- !ruby/object:Gem::Dependency
|
112
|
-
name:
|
112
|
+
name: aws-sdk-s3
|
113
113
|
requirement: !ruby/object:Gem::Requirement
|
114
114
|
requirements:
|
115
115
|
- - ">="
|
@@ -191,6 +191,7 @@ files:
|
|
191
191
|
- spec/feature/follow_file_spec.rb
|
192
192
|
- spec/feature/halt_processor_spec.rb
|
193
193
|
- spec/feature/memory_usage_spec.rb
|
194
|
+
- spec/feature/original_assigned_file_is_not_modified_spec.rb
|
194
195
|
- spec/feature/persisted_path_spec.rb
|
195
196
|
- spec/feature/processors_api_spec.rb
|
196
197
|
- spec/feature/reload_model_spec.rb
|