feedx 0.12.0 → 0.12.6

Sign up to get free protection for your applications and to get access to all the features.
Files changed (54) hide show
  1. checksums.yaml +4 -4
  2. data/.editorconfig +3 -0
  3. data/.github/workflows/test.yml +60 -0
  4. data/.gitignore +1 -0
  5. data/.rubocop.yml +14 -5
  6. data/Gemfile +0 -2
  7. data/Gemfile.lock +60 -49
  8. data/Makefile +6 -6
  9. data/README.md +1 -1
  10. data/compression.go +18 -0
  11. data/compression_test.go +17 -5
  12. data/consumer.go +12 -3
  13. data/consumer_test.go +50 -19
  14. data/ext/parquet/decoder.go +170 -0
  15. data/ext/parquet/decoder_test.go +88 -0
  16. data/ext/parquet/go.mod +10 -0
  17. data/ext/parquet/go.sum +154 -0
  18. data/ext/parquet/parquet.go +78 -0
  19. data/ext/parquet/parquet_test.go +28 -0
  20. data/ext/parquet/reader.go +89 -0
  21. data/ext/parquet/testdata/alltypes_plain.parquet +0 -0
  22. data/ext/parquet/types.go +51 -0
  23. data/feedx.gemspec +5 -6
  24. data/feedx_ext_test.go +6 -0
  25. data/feedx_test.go +6 -6
  26. data/format.go +45 -15
  27. data/format_test.go +7 -5
  28. data/go.mod +10 -5
  29. data/go.sum +90 -25
  30. data/internal/testdata/testdata.pb.go +176 -77
  31. data/lib/feedx/cache/memory.rb +1 -0
  32. data/lib/feedx/consumer.rb +9 -6
  33. data/lib/feedx/format.rb +1 -1
  34. data/lib/feedx/producer.rb +20 -18
  35. data/lib/feedx/stream.rb +24 -8
  36. data/producer_test.go +4 -4
  37. data/reader_test.go +6 -5
  38. data/spec/feedx/cache/memory_spec.rb +2 -2
  39. data/spec/feedx/cache/value_spec.rb +1 -1
  40. data/spec/feedx/compression/gzip_spec.rb +1 -1
  41. data/spec/feedx/compression/none_spec.rb +1 -1
  42. data/spec/feedx/compression_spec.rb +2 -2
  43. data/spec/feedx/consumer_spec.rb +5 -4
  44. data/spec/feedx/format/abstract_spec.rb +2 -1
  45. data/spec/feedx/format/json_spec.rb +6 -6
  46. data/spec/feedx/format/parquet_spec.rb +1 -1
  47. data/spec/feedx/format/protobuf_spec.rb +1 -1
  48. data/spec/feedx/format_spec.rb +2 -2
  49. data/spec/feedx/producer_spec.rb +10 -9
  50. data/spec/feedx/stream_spec.rb +36 -18
  51. data/writer.go +1 -4
  52. data/writer_test.go +8 -8
  53. metadata +25 -23
  54. data/.travis.yml +0 -24
data/producer_test.go CHANGED
@@ -7,8 +7,8 @@ import (
7
7
 
8
8
  "github.com/bsm/bfs"
9
9
  "github.com/bsm/feedx"
10
- . "github.com/onsi/ginkgo"
11
- . "github.com/onsi/gomega"
10
+ . "github.com/bsm/ginkgo"
11
+ . "github.com/bsm/gomega"
12
12
  )
13
13
 
14
14
  var _ = Describe("Producer", func() {
@@ -43,7 +43,7 @@ var _ = Describe("Producer", func() {
43
43
  }
44
44
  })
45
45
 
46
- It("should produce", func() {
46
+ It("produces", func() {
47
47
  setup(nil)
48
48
 
49
49
  Expect(subject.LastPush()).To(BeTemporally("~", time.Now(), time.Second))
@@ -56,7 +56,7 @@ var _ = Describe("Producer", func() {
56
56
  Expect(info.Size).To(BeNumerically("~", 75, 10))
57
57
  })
58
58
 
59
- It("should produce with custom last-mod check", func() {
59
+ It("produces with custom last-mod check", func() {
60
60
  setup(&feedx.ProducerOptions{
61
61
  Interval: 50 * time.Millisecond,
62
62
  LastModCheck: func(_ context.Context) (time.Time, error) { return time.Unix(1515151515, 987654321), nil },
data/reader_test.go CHANGED
@@ -4,12 +4,13 @@ import (
4
4
  "context"
5
5
  "io"
6
6
  "io/ioutil"
7
+ "time"
7
8
 
8
9
  "github.com/bsm/bfs"
9
10
  "github.com/bsm/feedx"
10
11
  "github.com/bsm/feedx/internal/testdata"
11
- . "github.com/onsi/ginkgo"
12
- . "github.com/onsi/gomega"
12
+ . "github.com/bsm/ginkgo"
13
+ . "github.com/bsm/gomega"
13
14
  )
14
15
 
15
16
  var _ = Describe("Reader", func() {
@@ -19,7 +20,7 @@ var _ = Describe("Reader", func() {
19
20
 
20
21
  BeforeEach(func() {
21
22
  obj = bfs.NewInMemObject("path/to/file.json")
22
- Expect(writeMulti(obj, 3)).To(Succeed())
23
+ Expect(writeMulti(obj, 3, time.Time{})).To(Succeed())
23
24
 
24
25
  var err error
25
26
  subject, err = feedx.NewReader(ctx, obj, nil)
@@ -30,14 +31,14 @@ var _ = Describe("Reader", func() {
30
31
  Expect(subject.Close()).To(Succeed())
31
32
  })
32
33
 
33
- It("should read", func() {
34
+ It("reads", func() {
34
35
  data, err := ioutil.ReadAll(subject)
35
36
  Expect(err).NotTo(HaveOccurred())
36
37
  Expect(len(data)).To(BeNumerically("~", 110, 20))
37
38
  Expect(subject.NumRead()).To(Equal(0))
38
39
  })
39
40
 
40
- It("should decode", func() {
41
+ It("decodes", func() {
41
42
  var msgs []*testdata.MockMessage
42
43
  for {
43
44
  var msg testdata.MockMessage
@@ -1,7 +1,7 @@
1
1
  require 'spec_helper'
2
2
 
3
3
  RSpec.describe Feedx::Cache::Memory do
4
- it 'should read/write' do
4
+ it 'read/writes' do
5
5
  expect(subject.fetch('key')).to be_nil
6
6
  expect(subject.fetch('key') { 'value' }).to eq('value')
7
7
  expect(subject.fetch('key')).to eq('value')
@@ -16,7 +16,7 @@ RSpec.describe Feedx::Cache::Memory do
16
16
  expect(subject.fetch('key')).to be_nil
17
17
  end
18
18
 
19
- it 'should write strings' do
19
+ it 'writes strings' do
20
20
  subject.write('key', 5)
21
21
  expect(subject.read('key')).to eq('5')
22
22
  end
@@ -5,7 +5,7 @@ RSpec.describe Feedx::Cache::Value do
5
5
  described_class.new(Feedx::Cache::Memory.new, 'key')
6
6
  end
7
7
 
8
- it 'should read/write' do
8
+ it 'read/writes' do
9
9
  expect(subject.fetch).to be_nil
10
10
  expect(subject.fetch { 'value' }).to eq('value')
11
11
  expect(subject.fetch).to eq('value')
@@ -1,7 +1,7 @@
1
1
  require 'spec_helper'
2
2
 
3
3
  RSpec.describe Feedx::Compression::Gzip do
4
- it 'should wrap readers/writers' do
4
+ it 'wraps readers/writers' do
5
5
  wio = StringIO.new
6
6
  subject.writer(wio) {|w| w.write 'xyz' * 1000 }
7
7
  expect(wio.size).to be_within(20).of(40)
@@ -1,7 +1,7 @@
1
1
  require 'spec_helper'
2
2
 
3
3
  RSpec.describe Feedx::Compression::None do
4
- it 'should wrap readers/writers' do
4
+ it 'wraps readers/writers' do
5
5
  wio = StringIO.new
6
6
  subject.writer(wio) {|w| w.write 'xyz' * 1000 }
7
7
  expect(wio.size).to eq(3000)
@@ -1,14 +1,14 @@
1
1
  require 'spec_helper'
2
2
 
3
3
  RSpec.describe Feedx::Compression do
4
- it 'should resolve' do
4
+ it 'resolves' do
5
5
  expect(described_class.resolve(:gzip)).to be_instance_of(described_class::Gzip)
6
6
  expect(described_class.resolve(:gz)).to be_instance_of(described_class::Gzip)
7
7
  expect(described_class.resolve(nil)).to be_instance_of(described_class::None)
8
8
  expect { described_class.resolve(:txt) }.to raise_error(/invalid compression txt/)
9
9
  end
10
10
 
11
- it 'should detect' do
11
+ it 'detects' do
12
12
  expect(described_class.detect('path/to/file.jsonz')).to be_instance_of(described_class::Gzip)
13
13
  expect(described_class.detect('path/to/file.json.gz')).to be_instance_of(described_class::Gzip)
14
14
  expect(described_class.detect('path/to/file.json')).to be_instance_of(described_class::None)
@@ -4,15 +4,16 @@ RSpec.describe Feedx::Consumer do
4
4
  let(:bucket) { BFS::Bucket::InMem.new }
5
5
  let(:klass) { Feedx::TestCase::Model }
6
6
  let(:cache) { Feedx::Cache::Memory.new.value('my-consumer') }
7
+
7
8
  before { allow(BFS).to receive(:resolve).and_return(bucket) }
8
9
 
9
- it 'should reject invalid inputs' do
10
+ it 'rejects invalid inputs' do
10
11
  expect do
11
- described_class.each('mock:///dir/file.txt', klass) {}
12
+ described_class.each('mock:///dir/file.txt', klass)
12
13
  end.to raise_error(/unable to detect format/)
13
14
  end
14
15
 
15
- it 'should consume feeds' do
16
+ it 'consumes feeds' do
16
17
  url = mock_produce!
17
18
  csm = described_class.new(url, klass)
18
19
  expect(csm).to be_a(Enumerable)
@@ -24,7 +25,7 @@ RSpec.describe Feedx::Consumer do
24
25
  expect(cnt).to eq(300)
25
26
  end
26
27
 
27
- it 'should perform conditionally' do
28
+ it 'performs conditionally' do
28
29
  url = mock_produce! last_modified: Time.at(1515151515)
29
30
  expect(described_class.new(url, klass, cache: cache).count).to eq(300)
30
31
  expect(described_class.new(url, klass, cache: cache).count).to eq(0)
@@ -2,10 +2,11 @@ require 'spec_helper'
2
2
 
3
3
  RSpec.describe Feedx::Format::Abstract do
4
4
  subject { Feedx::Format::JSON.new }
5
+
5
6
  let(:wio) { StringIO.new }
6
7
  let(:rio) { StringIO.open(wio.string) }
7
8
 
8
- it 'should decode each' do
9
+ it 'decodes each' do
9
10
  subject.encoder wio do |enc|
10
11
  enc.encode(Feedx::TestCase::Model.new('X'))
11
12
  enc.encode(Feedx::TestCase::Model.new('Y'))
@@ -4,17 +4,17 @@ RSpec.describe Feedx::Format::JSON do
4
4
  let(:wio) { StringIO.new }
5
5
  let(:rio) { StringIO.open(wio.string) }
6
6
 
7
- it 'should encode/decode' do
7
+ it 'encode/decodes' do
8
8
  subject.encoder wio do |enc|
9
9
  enc.encode(Feedx::TestCase::Model.new('X'))
10
10
  enc.encode(Feedx::TestCase::Model.new('Y'))
11
11
  enc.encode(Feedx::TestCase::Message.new(title: 'Z'))
12
12
  end
13
- expect(wio.string.lines).to eq [
14
- %({"title":"X","updated_at":"2018-01-05 11:25:15 UTC"}\n),
15
- %({"title":"Y","updated_at":"2018-01-05 11:25:15 UTC"}\n),
16
- %({"title":"Z"}\n),
17
- ]
13
+ expect(wio.string).to eq(<<~JSON)
14
+ {"title":"X","updated_at":"2018-01-05 11:25:15 UTC"}
15
+ {"title":"Y","updated_at":"2018-01-05 11:25:15 UTC"}
16
+ {"title":"Z"}
17
+ JSON
18
18
 
19
19
  subject.decoder rio do |dec|
20
20
  expect(dec.decode(Feedx::TestCase::Model)).to eq(Feedx::TestCase::Model.new('X'))
@@ -11,7 +11,7 @@ RSpec.describe Feedx::Format::Parquet do
11
11
  ])
12
12
  end
13
13
 
14
- it 'should encode/decode' do
14
+ it 'encode/decodes' do
15
15
  subject.encoder wio, schema: schema, batch_size: 2 do |enc|
16
16
  enc.encode(Feedx::TestCase::Model.new('X'))
17
17
  enc.encode(Feedx::TestCase::Model.new('Y'))
@@ -4,7 +4,7 @@ RSpec.describe Feedx::Format::Protobuf do
4
4
  let(:wio) { StringIO.new }
5
5
  let(:rio) { StringIO.open(wio.string) }
6
6
 
7
- it 'should encode/decode' do
7
+ it 'encode/decodes' do
8
8
  subject.encoder wio do |enc|
9
9
  enc.encode(Feedx::TestCase::Model.new('X'))
10
10
  enc.encode(Feedx::TestCase::Model.new('Y'))
@@ -1,13 +1,13 @@
1
1
  require 'spec_helper'
2
2
 
3
3
  RSpec.describe Feedx::Format do
4
- it 'should resolve' do
4
+ it 'resolves' do
5
5
  expect(described_class.resolve(:json)).to be_instance_of(described_class::JSON)
6
6
  expect(described_class.resolve(:pb)).to be_instance_of(described_class::Protobuf)
7
7
  expect { described_class.resolve(:txt) }.to raise_error(/invalid format txt/)
8
8
  end
9
9
 
10
- it 'should detect' do
10
+ it 'detects' do
11
11
  expect(described_class.detect('path/to/file.json')).to be_instance_of(described_class::JSON)
12
12
  expect(described_class.detect('path/to/file.jsonz')).to be_instance_of(described_class::JSON)
13
13
  expect(described_class.detect('path/to/file.json.gz')).to be_instance_of(described_class::JSON)
@@ -6,39 +6,40 @@ RSpec.describe Feedx::Producer do
6
6
  end
7
7
 
8
8
  let(:bucket) { BFS::Bucket::InMem.new }
9
+
9
10
  before { allow(BFS).to receive(:resolve).and_return(bucket) }
10
11
 
11
- it 'should reject invalid inputs' do
12
+ it 'rejects invalid inputs' do
12
13
  expect do
13
14
  described_class.perform 'mock:///dir/file.txt', enum: enumerable
14
15
  end.to raise_error(/unable to detect format/)
15
16
  end
16
17
 
17
- it 'should push compressed JSON' do
18
+ it 'pushes compressed JSON' do
18
19
  size = described_class.perform 'mock:///dir/file.jsonz', enum: enumerable
19
20
  expect(size).to be_within(20).of(166)
20
21
  expect(bucket.info('dir/file.jsonz').size).to eq(size)
21
22
  end
22
23
 
23
- it 'should push plain JSON' do
24
+ it 'pushes plain JSON' do
24
25
  size = described_class.perform 'mock:///dir/file.json', enum: enumerable
25
26
  expect(size).to eq(15900)
26
27
  expect(bucket.info('dir/file.json').size).to eq(size)
27
28
  end
28
29
 
29
- it 'should push compressed PB' do
30
+ it 'pushes compressed PB' do
30
31
  size = described_class.perform 'mock:///dir/file.pbz', enum: enumerable
31
32
  expect(size).to be_within(20).of(41)
32
33
  expect(bucket.info('dir/file.pbz').size).to eq(size)
33
34
  end
34
35
 
35
- it 'should push plain PB' do
36
+ it 'pushes plain PB' do
36
37
  size = described_class.perform 'mock:///dir/file.pb', enum: enumerable
37
38
  expect(size).to eq(1200)
38
39
  expect(bucket.info('dir/file.pb').size).to eq(size)
39
40
  end
40
41
 
41
- it 'should support factories' do
42
+ it 'supports factories' do
42
43
  size = described_class.perform('mock:///dir/file.json') do
43
44
  enumerable
44
45
  end
@@ -46,12 +47,12 @@ RSpec.describe Feedx::Producer do
46
47
  expect(bucket.info('dir/file.json').size).to eq(size)
47
48
  end
48
49
 
49
- it 'should support last-modified' do
50
+ it 'supports last-modified' do
50
51
  described_class.perform 'mock:///dir/file.json', last_modified: Time.at(1515151515), enum: enumerable
51
52
  expect(bucket.info('dir/file.json').metadata).to eq('X-Feedx-Last-Modified' => '1515151515000')
52
53
  end
53
54
 
54
- it 'should perform conditionally' do
55
+ it 'performs conditionally' do
55
56
  size = described_class.perform 'mock:///dir/file.json', last_modified: Time.at(1515151515), enum: enumerable
56
57
  expect(size).to eq(15900)
57
58
 
@@ -65,7 +66,7 @@ RSpec.describe Feedx::Producer do
65
66
  expect(size).to eq(15900)
66
67
  end
67
68
 
68
- it 'should accept downstream options' do
69
+ it 'accepts downstream options' do
69
70
  expect do
70
71
  described_class.perform 'mock:///dir/file.jsonz', enum: enumerable, x: 1, y: 'v', z: true
71
72
  end.not_to raise_error
@@ -1,19 +1,22 @@
1
1
  require 'spec_helper'
2
2
 
3
3
  RSpec.describe Feedx::Stream do
4
+ subject { described_class.new('mock:///dir/file.json') }
5
+
4
6
  let(:bucket) { BFS::Bucket::InMem.new }
7
+ let(:compressed) { described_class.new('mock:///dir/file.json.gz') }
8
+
5
9
  before { allow(BFS).to receive(:resolve).and_return(bucket) }
6
10
 
7
- subject { described_class.new('mock:///dir/file.json') }
8
- let(:compressed) { described_class.new('mock:///dir/file.json.gz') }
11
+ after { subject.close; compressed.close }
9
12
 
10
- it 'should reject invalid inputs' do
13
+ it 'rejects invalid inputs' do
11
14
  expect do
12
15
  described_class.new('mock:///dir/file.txt')
13
16
  end.to raise_error(/unable to detect format/)
14
17
  end
15
18
 
16
- it 'should accept custom formats' do
19
+ it 'accepts custom formats' do
17
20
  format = Class.new do
18
21
  def encoder(io, &block)
19
22
  Feedx::Format::JSON::Encoder.open(io, &block)
@@ -24,27 +27,30 @@ RSpec.describe Feedx::Stream do
24
27
  end
25
28
  end
26
29
 
27
- stream = described_class.new('mock:///dir/file.txt', format: format.new)
28
- stream.create {|s| s.encode Feedx::TestCase::Model.new('X') }
30
+ result = described_class.open('mock:///dir/file.txt', format: format.new) do |stream|
31
+ stream.create {|s| s.encode Feedx::TestCase::Model.new('X') }
32
+ 21
33
+ end
34
+ expect(result).to eq(21)
29
35
 
30
- expect(bucket.read('dir/file.txt')).to eq(
31
- %({"title":"X","updated_at":"2018-01-05 11:25:15 UTC"}\n),
32
- )
36
+ expect(bucket.read('dir/file.txt')).to eq(<<~JSON)
37
+ {"title":"X","updated_at":"2018-01-05 11:25:15 UTC"}
38
+ JSON
33
39
  end
34
40
 
35
- it 'should encode' do
41
+ it 'encodes' do
36
42
  subject.create do |s|
37
43
  s.encode(Feedx::TestCase::Model.new('X'))
38
44
  s.encode(Feedx::TestCase::Model.new('Y'))
39
45
  end
40
46
 
41
- expect(bucket.read('dir/file.json')).to eq(
42
- %({"title":"X","updated_at":"2018-01-05 11:25:15 UTC"}\n) +
43
- %({"title":"Y","updated_at":"2018-01-05 11:25:15 UTC"}\n),
44
- )
47
+ expect(bucket.read('dir/file.json')).to eq(<<~JSON)
48
+ {"title":"X","updated_at":"2018-01-05 11:25:15 UTC"}
49
+ {"title":"Y","updated_at":"2018-01-05 11:25:15 UTC"}
50
+ JSON
45
51
  end
46
52
 
47
- it 'should encode compressed' do
53
+ it 'encodes compressed' do
48
54
  compressed.create do |s|
49
55
  100.times do
50
56
  s.encode(Feedx::TestCase::Model.new('X'))
@@ -54,14 +60,26 @@ RSpec.describe Feedx::Stream do
54
60
  expect(bucket.info('dir/file.json.gz').size).to be_within(10).of(108)
55
61
  end
56
62
 
57
- it 'should encode with create options' do
63
+ it 'encodes with create options' do
58
64
  subject.create metadata: { 'x' => '5' } do |s|
59
65
  s.encode(Feedx::TestCase::Model.new('X'))
60
66
  end
61
67
  expect(bucket.info('dir/file.json').metadata).to eq('X' => '5')
62
68
  end
63
69
 
64
- it 'should decode' do
70
+ it 'aborts encode on errors (if compressed)' do
71
+ stop = RuntimeError.new('STOP')
72
+ expect do
73
+ compressed.create do |s|
74
+ s.encode(Feedx::TestCase::Model.new('X'))
75
+ raise stop
76
+ end
77
+ end.to raise_error(stop)
78
+
79
+ expect(bucket.ls('**').to_a).to be_empty
80
+ end
81
+
82
+ it 'decodes' do
65
83
  subject.create do |s|
66
84
  s.encode(Feedx::TestCase::Model.new('X'))
67
85
  s.encode(Feedx::TestCase::Model.new('Y'))
@@ -75,7 +93,7 @@ RSpec.describe Feedx::Stream do
75
93
  end
76
94
  end
77
95
 
78
- it 'should decode compressed' do
96
+ it 'decodes compressed' do
79
97
  compressed.create do |s|
80
98
  s.encode(Feedx::TestCase::Model.new('X'))
81
99
  s.encode(Feedx::TestCase::Model.new('Y'))
data/writer.go CHANGED
@@ -20,7 +20,7 @@ type WriterOptions struct {
20
20
  Compression Compression
21
21
 
22
22
  // Provides an optional last modified timestamp which is stored with the remote metadata.
23
- // Default: time.Now().
23
+ // Default: time.Time{}.
24
24
  LastMod time.Time
25
25
  }
26
26
 
@@ -31,9 +31,6 @@ func (o *WriterOptions) norm(name string) {
31
31
  if o.Compression == nil {
32
32
  o.Compression = DetectCompression(name)
33
33
  }
34
- if o.LastMod.IsZero() {
35
- o.LastMod = time.Now()
36
- }
37
34
  }
38
35
 
39
36
  // Writer encodes feeds to remote locations.
data/writer_test.go CHANGED
@@ -7,8 +7,8 @@ import (
7
7
 
8
8
  "github.com/bsm/bfs"
9
9
  "github.com/bsm/feedx"
10
- . "github.com/onsi/ginkgo"
11
- . "github.com/onsi/gomega"
10
+ . "github.com/bsm/ginkgo"
11
+ . "github.com/bsm/gomega"
12
12
  )
13
13
 
14
14
  var _ = Describe("Writer", func() {
@@ -20,7 +20,7 @@ var _ = Describe("Writer", func() {
20
20
  compressed = bfs.NewInMemObject("path/to/file.jsonz")
21
21
  })
22
22
 
23
- It("should write plain", func() {
23
+ It("writes plain", func() {
24
24
  w := feedx.NewWriter(context.Background(), plain, &feedx.WriterOptions{
25
25
  LastMod: time.Unix(1515151515, 123456789),
26
26
  })
@@ -35,7 +35,7 @@ var _ = Describe("Writer", func() {
35
35
  Expect(info.Metadata).To(Equal(bfs.Metadata{"X-Feedx-Last-Modified": "1515151515123"}))
36
36
  })
37
37
 
38
- It("should write compressed", func() {
38
+ It("writes compressed", func() {
39
39
  w := feedx.NewWriter(context.Background(), compressed, &feedx.WriterOptions{
40
40
  LastMod: time.Unix(1515151515, 123456789),
41
41
  })
@@ -50,14 +50,14 @@ var _ = Describe("Writer", func() {
50
50
  Expect(info.Metadata).To(Equal(bfs.Metadata{"X-Feedx-Last-Modified": "1515151515123"}))
51
51
  })
52
52
 
53
- It("should encode", func() {
54
- Expect(writeMulti(plain, 10)).To(Succeed())
55
- Expect(writeMulti(compressed, 10)).To(Succeed())
53
+ It("encodes", func() {
54
+ Expect(writeMulti(plain, 10, time.Time{})).To(Succeed())
55
+ Expect(writeMulti(compressed, 10, mockTime)).To(Succeed())
56
56
 
57
57
  info, err := plain.Head(ctx)
58
58
  Expect(err).NotTo(HaveOccurred())
59
59
  Expect(info.Size).To(BeNumerically("~", 370, 10))
60
- Expect(info.Metadata).To(Equal(bfs.Metadata{"X-Feedx-Last-Modified": "1515151515123"}))
60
+ Expect(info.Metadata).To(Equal(bfs.Metadata{"X-Feedx-Last-Modified": "0"}))
61
61
 
62
62
  info, err = compressed.Head(ctx)
63
63
  Expect(err).NotTo(HaveOccurred())