feedx 0.12.2 → 0.12.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. checksums.yaml +4 -4
  2. data/.editorconfig +3 -0
  3. data/.github/workflows/lint.yml +18 -0
  4. data/.github/workflows/test.yml +48 -0
  5. data/.gitignore +1 -0
  6. data/.golangci.yml +4 -0
  7. data/.rubocop.yml +14 -5
  8. data/Gemfile +0 -2
  9. data/Gemfile.lock +60 -50
  10. data/Makefile +6 -6
  11. data/README.md +1 -1
  12. data/compression.go +18 -0
  13. data/compression_test.go +17 -5
  14. data/consumer.go +12 -3
  15. data/consumer_test.go +50 -19
  16. data/ext/parquet/decoder.go +59 -0
  17. data/ext/parquet/decoder_test.go +88 -0
  18. data/ext/parquet/encoder.go +27 -0
  19. data/ext/parquet/encoder_test.go +70 -0
  20. data/ext/parquet/go.mod +12 -0
  21. data/ext/parquet/go.sum +193 -0
  22. data/ext/parquet/parquet.go +78 -0
  23. data/ext/parquet/parquet_test.go +28 -0
  24. data/ext/parquet/testdata/alltypes_plain.parquet +0 -0
  25. data/feedx.gemspec +6 -6
  26. data/feedx_ext_test.go +6 -0
  27. data/feedx_test.go +8 -6
  28. data/format.go +45 -15
  29. data/format_test.go +7 -5
  30. data/go.mod +8 -5
  31. data/go.sum +95 -32
  32. data/internal/testdata/testdata.pb.go +176 -77
  33. data/lib/feedx/cache/memory.rb +1 -0
  34. data/lib/feedx/format.rb +1 -1
  35. data/lib/feedx/producer.rb +18 -19
  36. data/lib/feedx/stream.rb +4 -8
  37. data/producer_test.go +4 -4
  38. data/reader_test.go +6 -5
  39. data/spec/feedx/cache/memory_spec.rb +2 -2
  40. data/spec/feedx/cache/value_spec.rb +1 -1
  41. data/spec/feedx/compression/gzip_spec.rb +1 -1
  42. data/spec/feedx/compression/none_spec.rb +1 -1
  43. data/spec/feedx/compression_spec.rb +2 -2
  44. data/spec/feedx/consumer_spec.rb +5 -4
  45. data/spec/feedx/format/abstract_spec.rb +2 -1
  46. data/spec/feedx/format/json_spec.rb +6 -6
  47. data/spec/feedx/format/parquet_spec.rb +1 -1
  48. data/spec/feedx/format/protobuf_spec.rb +1 -1
  49. data/spec/feedx/format_spec.rb +2 -2
  50. data/spec/feedx/producer_spec.rb +10 -9
  51. data/spec/feedx/stream_spec.rb +30 -18
  52. data/writer.go +1 -4
  53. data/writer_test.go +8 -8
  54. metadata +30 -25
  55. data/.travis.yml +0 -24
@@ -3,6 +3,7 @@ require 'monitor'
3
3
  # Thread-safe in-memory cache. Use for testing only.
4
4
  class Feedx::Cache::Memory < Feedx::Cache::Abstract
5
5
  def initialize
6
+ super
6
7
  @monitor = Monitor.new
7
8
  @entries = {}
8
9
  end
data/lib/feedx/format.rb CHANGED
@@ -27,7 +27,7 @@ module Feedx
27
27
  ext = File.extname(base)
28
28
  raise ArgumentError, 'unable to detect format' if ext.empty?
29
29
 
30
- kind = _resolve(ext[1..-1]) || _resolve(ext[1..-2])
30
+ kind = _resolve(ext[1..]) || _resolve(ext[1..-2])
31
31
  return kind if kind
32
32
 
33
33
  base = base[0..-ext.size - 1]
@@ -32,26 +32,25 @@ module Feedx
32
32
  end
33
33
 
34
34
  def perform
35
- stream = Feedx::Stream.new(@url, **@opts)
36
- enum = @enum.is_a?(Proc) ? @enum.call : @enum
37
- last_mod = @last_mod.is_a?(Proc) ? @last_mod.call(enum) : @last_mod
38
- local_rev = last_mod.is_a?(Integer) ? last_mod : (last_mod.to_f * 1000).floor
39
-
40
- begin
41
- metadata = stream.blob.info.metadata
42
- remote_rev = (metadata[META_LAST_MODIFIED] || metadata[META_LAST_MODIFIED_DC]).to_i
43
- return -1 unless local_rev > remote_rev
44
- rescue BFS::FileNotFound
45
- nil
46
- end if local_rev.positive?
47
-
48
- stream.create metadata: { META_LAST_MODIFIED => local_rev.to_s } do |fmt|
49
- iter = enum.respond_to?(:find_each) ? :find_each : :each
50
- enum.send(iter) {|rec| fmt.encode(rec, **@opts) }
35
+ Feedx::Stream.open(@url, **@opts) do |stream|
36
+ enum = @enum.is_a?(Proc) ? @enum.call : @enum
37
+ last_mod = @last_mod.is_a?(Proc) ? @last_mod.call(enum) : @last_mod
38
+ local_rev = last_mod.is_a?(Integer) ? last_mod : (last_mod.to_f * 1000).floor
39
+
40
+ begin
41
+ metadata = stream.blob.info.metadata
42
+ remote_rev = (metadata[META_LAST_MODIFIED] || metadata[META_LAST_MODIFIED_DC]).to_i
43
+ return -1 unless local_rev > remote_rev
44
+ rescue BFS::FileNotFound
45
+ nil
46
+ end if local_rev.positive?
47
+
48
+ stream.create metadata: { META_LAST_MODIFIED => local_rev.to_s } do |fmt|
49
+ iter = enum.respond_to?(:find_each) ? :find_each : :each
50
+ enum.send(iter) {|rec| fmt.encode(rec, **@opts) }
51
+ end
52
+ stream.blob.info.size
51
53
  end
52
- stream.blob.info.size
53
- ensure
54
- stream&.close
55
54
  end
56
55
  end
57
56
  end
data/lib/feedx/stream.rb CHANGED
@@ -36,12 +36,10 @@ module Feedx
36
36
  # @param [Hash] opts BFS::Blob#open options
37
37
  # @yield A block over a formatted stream.
38
38
  # @yieldparam [Feedx::Format::Abstract] formatted input stream.
39
- def open(**opts)
39
+ def open(**opts, &block)
40
40
  @blob.open(**opts) do |io|
41
41
  @compress.reader(io, **@opts) do |cio|
42
- @format.decoder(cio, **@opts) do |fmt|
43
- yield fmt
44
- end
42
+ @format.decoder(cio, **@opts, &block)
45
43
  end
46
44
  end
47
45
  end
@@ -50,12 +48,10 @@ module Feedx
50
48
  # @param [Hash] opts BFS::Blob#create options
51
49
  # @yield A block over a formatted stream.
52
50
  # @yieldparam [Feedx::Format::Abstract] formatted output stream.
53
- def create(**opts)
51
+ def create(**opts, &block)
54
52
  @blob.create(**opts) do |io|
55
53
  @compress.writer(io, **@opts) do |cio|
56
- @format.encoder(cio, **@opts) do |fmt|
57
- yield fmt
58
- end
54
+ @format.encoder(cio, **@opts, &block)
59
55
  end
60
56
  end
61
57
  end
data/producer_test.go CHANGED
@@ -7,8 +7,8 @@ import (
7
7
 
8
8
  "github.com/bsm/bfs"
9
9
  "github.com/bsm/feedx"
10
- . "github.com/onsi/ginkgo"
11
- . "github.com/onsi/gomega"
10
+ . "github.com/bsm/ginkgo"
11
+ . "github.com/bsm/gomega"
12
12
  )
13
13
 
14
14
  var _ = Describe("Producer", func() {
@@ -43,7 +43,7 @@ var _ = Describe("Producer", func() {
43
43
  }
44
44
  })
45
45
 
46
- It("should produce", func() {
46
+ It("produces", func() {
47
47
  setup(nil)
48
48
 
49
49
  Expect(subject.LastPush()).To(BeTemporally("~", time.Now(), time.Second))
@@ -56,7 +56,7 @@ var _ = Describe("Producer", func() {
56
56
  Expect(info.Size).To(BeNumerically("~", 75, 10))
57
57
  })
58
58
 
59
- It("should produce with custom last-mod check", func() {
59
+ It("produces with custom last-mod check", func() {
60
60
  setup(&feedx.ProducerOptions{
61
61
  Interval: 50 * time.Millisecond,
62
62
  LastModCheck: func(_ context.Context) (time.Time, error) { return time.Unix(1515151515, 987654321), nil },
data/reader_test.go CHANGED
@@ -4,12 +4,13 @@ import (
4
4
  "context"
5
5
  "io"
6
6
  "io/ioutil"
7
+ "time"
7
8
 
8
9
  "github.com/bsm/bfs"
9
10
  "github.com/bsm/feedx"
10
11
  "github.com/bsm/feedx/internal/testdata"
11
- . "github.com/onsi/ginkgo"
12
- . "github.com/onsi/gomega"
12
+ . "github.com/bsm/ginkgo"
13
+ . "github.com/bsm/gomega"
13
14
  )
14
15
 
15
16
  var _ = Describe("Reader", func() {
@@ -19,7 +20,7 @@ var _ = Describe("Reader", func() {
19
20
 
20
21
  BeforeEach(func() {
21
22
  obj = bfs.NewInMemObject("path/to/file.json")
22
- Expect(writeMulti(obj, 3)).To(Succeed())
23
+ Expect(writeMulti(obj, 3, time.Time{})).To(Succeed())
23
24
 
24
25
  var err error
25
26
  subject, err = feedx.NewReader(ctx, obj, nil)
@@ -30,14 +31,14 @@ var _ = Describe("Reader", func() {
30
31
  Expect(subject.Close()).To(Succeed())
31
32
  })
32
33
 
33
- It("should read", func() {
34
+ It("reads", func() {
34
35
  data, err := ioutil.ReadAll(subject)
35
36
  Expect(err).NotTo(HaveOccurred())
36
37
  Expect(len(data)).To(BeNumerically("~", 110, 20))
37
38
  Expect(subject.NumRead()).To(Equal(0))
38
39
  })
39
40
 
40
- It("should decode", func() {
41
+ It("decodes", func() {
41
42
  var msgs []*testdata.MockMessage
42
43
  for {
43
44
  var msg testdata.MockMessage
@@ -1,7 +1,7 @@
1
1
  require 'spec_helper'
2
2
 
3
3
  RSpec.describe Feedx::Cache::Memory do
4
- it 'should read/write' do
4
+ it 'read/writes' do
5
5
  expect(subject.fetch('key')).to be_nil
6
6
  expect(subject.fetch('key') { 'value' }).to eq('value')
7
7
  expect(subject.fetch('key')).to eq('value')
@@ -16,7 +16,7 @@ RSpec.describe Feedx::Cache::Memory do
16
16
  expect(subject.fetch('key')).to be_nil
17
17
  end
18
18
 
19
- it 'should write strings' do
19
+ it 'writes strings' do
20
20
  subject.write('key', 5)
21
21
  expect(subject.read('key')).to eq('5')
22
22
  end
@@ -5,7 +5,7 @@ RSpec.describe Feedx::Cache::Value do
5
5
  described_class.new(Feedx::Cache::Memory.new, 'key')
6
6
  end
7
7
 
8
- it 'should read/write' do
8
+ it 'read/writes' do
9
9
  expect(subject.fetch).to be_nil
10
10
  expect(subject.fetch { 'value' }).to eq('value')
11
11
  expect(subject.fetch).to eq('value')
@@ -1,7 +1,7 @@
1
1
  require 'spec_helper'
2
2
 
3
3
  RSpec.describe Feedx::Compression::Gzip do
4
- it 'should wrap readers/writers' do
4
+ it 'wraps readers/writers' do
5
5
  wio = StringIO.new
6
6
  subject.writer(wio) {|w| w.write 'xyz' * 1000 }
7
7
  expect(wio.size).to be_within(20).of(40)
@@ -1,7 +1,7 @@
1
1
  require 'spec_helper'
2
2
 
3
3
  RSpec.describe Feedx::Compression::None do
4
- it 'should wrap readers/writers' do
4
+ it 'wraps readers/writers' do
5
5
  wio = StringIO.new
6
6
  subject.writer(wio) {|w| w.write 'xyz' * 1000 }
7
7
  expect(wio.size).to eq(3000)
@@ -1,14 +1,14 @@
1
1
  require 'spec_helper'
2
2
 
3
3
  RSpec.describe Feedx::Compression do
4
- it 'should resolve' do
4
+ it 'resolves' do
5
5
  expect(described_class.resolve(:gzip)).to be_instance_of(described_class::Gzip)
6
6
  expect(described_class.resolve(:gz)).to be_instance_of(described_class::Gzip)
7
7
  expect(described_class.resolve(nil)).to be_instance_of(described_class::None)
8
8
  expect { described_class.resolve(:txt) }.to raise_error(/invalid compression txt/)
9
9
  end
10
10
 
11
- it 'should detect' do
11
+ it 'detects' do
12
12
  expect(described_class.detect('path/to/file.jsonz')).to be_instance_of(described_class::Gzip)
13
13
  expect(described_class.detect('path/to/file.json.gz')).to be_instance_of(described_class::Gzip)
14
14
  expect(described_class.detect('path/to/file.json')).to be_instance_of(described_class::None)
@@ -4,15 +4,16 @@ RSpec.describe Feedx::Consumer do
4
4
  let(:bucket) { BFS::Bucket::InMem.new }
5
5
  let(:klass) { Feedx::TestCase::Model }
6
6
  let(:cache) { Feedx::Cache::Memory.new.value('my-consumer') }
7
+
7
8
  before { allow(BFS).to receive(:resolve).and_return(bucket) }
8
9
 
9
- it 'should reject invalid inputs' do
10
+ it 'rejects invalid inputs' do
10
11
  expect do
11
- described_class.each('mock:///dir/file.txt', klass) {}
12
+ described_class.each('mock:///dir/file.txt', klass)
12
13
  end.to raise_error(/unable to detect format/)
13
14
  end
14
15
 
15
- it 'should consume feeds' do
16
+ it 'consumes feeds' do
16
17
  url = mock_produce!
17
18
  csm = described_class.new(url, klass)
18
19
  expect(csm).to be_a(Enumerable)
@@ -24,7 +25,7 @@ RSpec.describe Feedx::Consumer do
24
25
  expect(cnt).to eq(300)
25
26
  end
26
27
 
27
- it 'should perform conditionally' do
28
+ it 'performs conditionally' do
28
29
  url = mock_produce! last_modified: Time.at(1515151515)
29
30
  expect(described_class.new(url, klass, cache: cache).count).to eq(300)
30
31
  expect(described_class.new(url, klass, cache: cache).count).to eq(0)
@@ -2,10 +2,11 @@ require 'spec_helper'
2
2
 
3
3
  RSpec.describe Feedx::Format::Abstract do
4
4
  subject { Feedx::Format::JSON.new }
5
+
5
6
  let(:wio) { StringIO.new }
6
7
  let(:rio) { StringIO.open(wio.string) }
7
8
 
8
- it 'should decode each' do
9
+ it 'decodes each' do
9
10
  subject.encoder wio do |enc|
10
11
  enc.encode(Feedx::TestCase::Model.new('X'))
11
12
  enc.encode(Feedx::TestCase::Model.new('Y'))
@@ -4,17 +4,17 @@ RSpec.describe Feedx::Format::JSON do
4
4
  let(:wio) { StringIO.new }
5
5
  let(:rio) { StringIO.open(wio.string) }
6
6
 
7
- it 'should encode/decode' do
7
+ it 'encode/decodes' do
8
8
  subject.encoder wio do |enc|
9
9
  enc.encode(Feedx::TestCase::Model.new('X'))
10
10
  enc.encode(Feedx::TestCase::Model.new('Y'))
11
11
  enc.encode(Feedx::TestCase::Message.new(title: 'Z'))
12
12
  end
13
- expect(wio.string.lines).to eq [
14
- %({"title":"X","updated_at":"2018-01-05 11:25:15 UTC"}\n),
15
- %({"title":"Y","updated_at":"2018-01-05 11:25:15 UTC"}\n),
16
- %({"title":"Z"}\n),
17
- ]
13
+ expect(wio.string).to eq(<<~JSON)
14
+ {"title":"X","updated_at":"2018-01-05 11:25:15 UTC"}
15
+ {"title":"Y","updated_at":"2018-01-05 11:25:15 UTC"}
16
+ {"title":"Z"}
17
+ JSON
18
18
 
19
19
  subject.decoder rio do |dec|
20
20
  expect(dec.decode(Feedx::TestCase::Model)).to eq(Feedx::TestCase::Model.new('X'))
@@ -11,7 +11,7 @@ RSpec.describe Feedx::Format::Parquet do
11
11
  ])
12
12
  end
13
13
 
14
- it 'should encode/decode' do
14
+ it 'encode/decodes' do
15
15
  subject.encoder wio, schema: schema, batch_size: 2 do |enc|
16
16
  enc.encode(Feedx::TestCase::Model.new('X'))
17
17
  enc.encode(Feedx::TestCase::Model.new('Y'))
@@ -4,7 +4,7 @@ RSpec.describe Feedx::Format::Protobuf do
4
4
  let(:wio) { StringIO.new }
5
5
  let(:rio) { StringIO.open(wio.string) }
6
6
 
7
- it 'should encode/decode' do
7
+ it 'encode/decodes' do
8
8
  subject.encoder wio do |enc|
9
9
  enc.encode(Feedx::TestCase::Model.new('X'))
10
10
  enc.encode(Feedx::TestCase::Model.new('Y'))
@@ -1,13 +1,13 @@
1
1
  require 'spec_helper'
2
2
 
3
3
  RSpec.describe Feedx::Format do
4
- it 'should resolve' do
4
+ it 'resolves' do
5
5
  expect(described_class.resolve(:json)).to be_instance_of(described_class::JSON)
6
6
  expect(described_class.resolve(:pb)).to be_instance_of(described_class::Protobuf)
7
7
  expect { described_class.resolve(:txt) }.to raise_error(/invalid format txt/)
8
8
  end
9
9
 
10
- it 'should detect' do
10
+ it 'detects' do
11
11
  expect(described_class.detect('path/to/file.json')).to be_instance_of(described_class::JSON)
12
12
  expect(described_class.detect('path/to/file.jsonz')).to be_instance_of(described_class::JSON)
13
13
  expect(described_class.detect('path/to/file.json.gz')).to be_instance_of(described_class::JSON)
@@ -6,39 +6,40 @@ RSpec.describe Feedx::Producer do
6
6
  end
7
7
 
8
8
  let(:bucket) { BFS::Bucket::InMem.new }
9
+
9
10
  before { allow(BFS).to receive(:resolve).and_return(bucket) }
10
11
 
11
- it 'should reject invalid inputs' do
12
+ it 'rejects invalid inputs' do
12
13
  expect do
13
14
  described_class.perform 'mock:///dir/file.txt', enum: enumerable
14
15
  end.to raise_error(/unable to detect format/)
15
16
  end
16
17
 
17
- it 'should push compressed JSON' do
18
+ it 'pushes compressed JSON' do
18
19
  size = described_class.perform 'mock:///dir/file.jsonz', enum: enumerable
19
20
  expect(size).to be_within(20).of(166)
20
21
  expect(bucket.info('dir/file.jsonz').size).to eq(size)
21
22
  end
22
23
 
23
- it 'should push plain JSON' do
24
+ it 'pushes plain JSON' do
24
25
  size = described_class.perform 'mock:///dir/file.json', enum: enumerable
25
26
  expect(size).to eq(15900)
26
27
  expect(bucket.info('dir/file.json').size).to eq(size)
27
28
  end
28
29
 
29
- it 'should push compressed PB' do
30
+ it 'pushes compressed PB' do
30
31
  size = described_class.perform 'mock:///dir/file.pbz', enum: enumerable
31
32
  expect(size).to be_within(20).of(41)
32
33
  expect(bucket.info('dir/file.pbz').size).to eq(size)
33
34
  end
34
35
 
35
- it 'should push plain PB' do
36
+ it 'pushes plain PB' do
36
37
  size = described_class.perform 'mock:///dir/file.pb', enum: enumerable
37
38
  expect(size).to eq(1200)
38
39
  expect(bucket.info('dir/file.pb').size).to eq(size)
39
40
  end
40
41
 
41
- it 'should support factories' do
42
+ it 'supports factories' do
42
43
  size = described_class.perform('mock:///dir/file.json') do
43
44
  enumerable
44
45
  end
@@ -46,12 +47,12 @@ RSpec.describe Feedx::Producer do
46
47
  expect(bucket.info('dir/file.json').size).to eq(size)
47
48
  end
48
49
 
49
- it 'should support last-modified' do
50
+ it 'supports last-modified' do
50
51
  described_class.perform 'mock:///dir/file.json', last_modified: Time.at(1515151515), enum: enumerable
51
52
  expect(bucket.info('dir/file.json').metadata).to eq('X-Feedx-Last-Modified' => '1515151515000')
52
53
  end
53
54
 
54
- it 'should perform conditionally' do
55
+ it 'performs conditionally' do
55
56
  size = described_class.perform 'mock:///dir/file.json', last_modified: Time.at(1515151515), enum: enumerable
56
57
  expect(size).to eq(15900)
57
58
 
@@ -65,7 +66,7 @@ RSpec.describe Feedx::Producer do
65
66
  expect(size).to eq(15900)
66
67
  end
67
68
 
68
- it 'should accept downstream options' do
69
+ it 'accepts downstream options' do
69
70
  expect do
70
71
  described_class.perform 'mock:///dir/file.jsonz', enum: enumerable, x: 1, y: 'v', z: true
71
72
  end.not_to raise_error
@@ -1,22 +1,22 @@
1
1
  require 'spec_helper'
2
2
 
3
3
  RSpec.describe Feedx::Stream do
4
+ subject { described_class.new('mock:///dir/file.json') }
5
+
4
6
  let(:bucket) { BFS::Bucket::InMem.new }
5
7
  let(:compressed) { described_class.new('mock:///dir/file.json.gz') }
6
8
 
7
- subject { described_class.new('mock:///dir/file.json') }
8
-
9
9
  before { allow(BFS).to receive(:resolve).and_return(bucket) }
10
- after { subject.close }
11
- after { compressed.close }
12
10
 
13
- it 'should reject invalid inputs' do
11
+ after { subject.close; compressed.close }
12
+
13
+ it 'rejects invalid inputs' do
14
14
  expect do
15
15
  described_class.new('mock:///dir/file.txt')
16
16
  end.to raise_error(/unable to detect format/)
17
17
  end
18
18
 
19
- it 'should accept custom formats' do
19
+ it 'accepts custom formats' do
20
20
  format = Class.new do
21
21
  def encoder(io, &block)
22
22
  Feedx::Format::JSON::Encoder.open(io, &block)
@@ -33,24 +33,24 @@ RSpec.describe Feedx::Stream do
33
33
  end
34
34
  expect(result).to eq(21)
35
35
 
36
- expect(bucket.read('dir/file.txt')).to eq(
37
- %({"title":"X","updated_at":"2018-01-05 11:25:15 UTC"}\n),
38
- )
36
+ expect(bucket.read('dir/file.txt')).to eq(<<~JSON)
37
+ {"title":"X","updated_at":"2018-01-05 11:25:15 UTC"}
38
+ JSON
39
39
  end
40
40
 
41
- it 'should encode' do
41
+ it 'encodes' do
42
42
  subject.create do |s|
43
43
  s.encode(Feedx::TestCase::Model.new('X'))
44
44
  s.encode(Feedx::TestCase::Model.new('Y'))
45
45
  end
46
46
 
47
- expect(bucket.read('dir/file.json')).to eq(
48
- %({"title":"X","updated_at":"2018-01-05 11:25:15 UTC"}\n) +
49
- %({"title":"Y","updated_at":"2018-01-05 11:25:15 UTC"}\n),
50
- )
47
+ expect(bucket.read('dir/file.json')).to eq(<<~JSON)
48
+ {"title":"X","updated_at":"2018-01-05 11:25:15 UTC"}
49
+ {"title":"Y","updated_at":"2018-01-05 11:25:15 UTC"}
50
+ JSON
51
51
  end
52
52
 
53
- it 'should encode compressed' do
53
+ it 'encodes compressed' do
54
54
  compressed.create do |s|
55
55
  100.times do
56
56
  s.encode(Feedx::TestCase::Model.new('X'))
@@ -60,14 +60,26 @@ RSpec.describe Feedx::Stream do
60
60
  expect(bucket.info('dir/file.json.gz').size).to be_within(10).of(108)
61
61
  end
62
62
 
63
- it 'should encode with create options' do
63
+ it 'encodes with create options' do
64
64
  subject.create metadata: { 'x' => '5' } do |s|
65
65
  s.encode(Feedx::TestCase::Model.new('X'))
66
66
  end
67
67
  expect(bucket.info('dir/file.json').metadata).to eq('X' => '5')
68
68
  end
69
69
 
70
- it 'should decode' do
70
+ it 'aborts encode on errors (if compressed)' do
71
+ stop = RuntimeError.new('STOP')
72
+ expect do
73
+ compressed.create do |s|
74
+ s.encode(Feedx::TestCase::Model.new('X'))
75
+ raise stop
76
+ end
77
+ end.to raise_error(stop)
78
+
79
+ expect(bucket.ls('**').to_a).to be_empty
80
+ end
81
+
82
+ it 'decodes' do
71
83
  subject.create do |s|
72
84
  s.encode(Feedx::TestCase::Model.new('X'))
73
85
  s.encode(Feedx::TestCase::Model.new('Y'))
@@ -81,7 +93,7 @@ RSpec.describe Feedx::Stream do
81
93
  end
82
94
  end
83
95
 
84
- it 'should decode compressed' do
96
+ it 'decodes compressed' do
85
97
  compressed.create do |s|
86
98
  s.encode(Feedx::TestCase::Model.new('X'))
87
99
  s.encode(Feedx::TestCase::Model.new('Y'))
data/writer.go CHANGED
@@ -20,7 +20,7 @@ type WriterOptions struct {
20
20
  Compression Compression
21
21
 
22
22
  // Provides an optional last modified timestamp which is stored with the remote metadata.
23
- // Default: time.Now().
23
+ // Default: time.Time{}.
24
24
  LastMod time.Time
25
25
  }
26
26
 
@@ -31,9 +31,6 @@ func (o *WriterOptions) norm(name string) {
31
31
  if o.Compression == nil {
32
32
  o.Compression = DetectCompression(name)
33
33
  }
34
- if o.LastMod.IsZero() {
35
- o.LastMod = time.Now()
36
- }
37
34
  }
38
35
 
39
36
  // Writer encodes feeds to remote locations.
data/writer_test.go CHANGED
@@ -7,8 +7,8 @@ import (
7
7
 
8
8
  "github.com/bsm/bfs"
9
9
  "github.com/bsm/feedx"
10
- . "github.com/onsi/ginkgo"
11
- . "github.com/onsi/gomega"
10
+ . "github.com/bsm/ginkgo"
11
+ . "github.com/bsm/gomega"
12
12
  )
13
13
 
14
14
  var _ = Describe("Writer", func() {
@@ -20,7 +20,7 @@ var _ = Describe("Writer", func() {
20
20
  compressed = bfs.NewInMemObject("path/to/file.jsonz")
21
21
  })
22
22
 
23
- It("should write plain", func() {
23
+ It("writes plain", func() {
24
24
  w := feedx.NewWriter(context.Background(), plain, &feedx.WriterOptions{
25
25
  LastMod: time.Unix(1515151515, 123456789),
26
26
  })
@@ -35,7 +35,7 @@ var _ = Describe("Writer", func() {
35
35
  Expect(info.Metadata).To(Equal(bfs.Metadata{"X-Feedx-Last-Modified": "1515151515123"}))
36
36
  })
37
37
 
38
- It("should write compressed", func() {
38
+ It("writes compressed", func() {
39
39
  w := feedx.NewWriter(context.Background(), compressed, &feedx.WriterOptions{
40
40
  LastMod: time.Unix(1515151515, 123456789),
41
41
  })
@@ -50,14 +50,14 @@ var _ = Describe("Writer", func() {
50
50
  Expect(info.Metadata).To(Equal(bfs.Metadata{"X-Feedx-Last-Modified": "1515151515123"}))
51
51
  })
52
52
 
53
- It("should encode", func() {
54
- Expect(writeMulti(plain, 10)).To(Succeed())
55
- Expect(writeMulti(compressed, 10)).To(Succeed())
53
+ It("encodes", func() {
54
+ Expect(writeMulti(plain, 10, time.Time{})).To(Succeed())
55
+ Expect(writeMulti(compressed, 10, mockTime)).To(Succeed())
56
56
 
57
57
  info, err := plain.Head(ctx)
58
58
  Expect(err).NotTo(HaveOccurred())
59
59
  Expect(info.Size).To(BeNumerically("~", 370, 10))
60
- Expect(info.Metadata).To(Equal(bfs.Metadata{"X-Feedx-Last-Modified": "1515151515123"}))
60
+ Expect(info.Metadata).To(Equal(bfs.Metadata{"X-Feedx-Last-Modified": "0"}))
61
61
 
62
62
  info, err = compressed.Head(ctx)
63
63
  Expect(err).NotTo(HaveOccurred())