feedx 0.9.2 → 0.12.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.gitignore +1 -0
- data/.rubocop.yml +2 -0
- data/.travis.yml +13 -6
- data/Gemfile.lock +43 -20
- data/Makefile +5 -0
- data/consumer_test.go +5 -5
- data/feedx.gemspec +3 -2
- data/feedx_test.go +13 -13
- data/format.go +16 -16
- data/format_test.go +6 -7
- data/go.mod +5 -11
- data/go.sum +43 -26
- data/internal/testdata/testdata.pb.go +124 -0
- data/internal/testdata/testdata.proto +15 -0
- data/lib/feedx/cache/abstract.rb +2 -2
- data/lib/feedx/compression.rb +11 -4
- data/lib/feedx/compression/abstract.rb +2 -2
- data/lib/feedx/compression/gzip.rb +14 -2
- data/lib/feedx/compression/none.rb +4 -4
- data/lib/feedx/consumer.rb +12 -9
- data/lib/feedx/format.rb +18 -9
- data/lib/feedx/format/abstract.rb +42 -13
- data/lib/feedx/format/json.rb +12 -8
- data/lib/feedx/format/parquet.rb +102 -0
- data/lib/feedx/format/protobuf.rb +16 -8
- data/lib/feedx/producer.rb +12 -9
- data/lib/feedx/stream.rb +22 -25
- data/producer.go +1 -4
- data/producer_test.go +1 -2
- data/reader_test.go +7 -8
- data/spec/feedx/compression/gzip_spec.rb +4 -2
- data/spec/feedx/compression/none_spec.rb +2 -2
- data/spec/feedx/compression_spec.rb +9 -9
- data/spec/feedx/consumer_spec.rb +6 -3
- data/spec/feedx/format/abstract_spec.rb +11 -8
- data/spec/feedx/format/json_spec.rb +12 -11
- data/spec/feedx/format/parquet_spec.rb +30 -0
- data/spec/feedx/format/protobuf_spec.rb +12 -11
- data/spec/feedx/format_spec.rb +8 -8
- data/spec/feedx/producer_spec.rb +6 -0
- data/spec/feedx/stream_spec.rb +20 -1
- data/spec/spec_helper.rb +17 -1
- data/writer.go +19 -18
- data/writer_test.go +3 -5
- metadata +22 -3
@@ -1,16 +1,24 @@
|
|
1
1
|
require 'pbio'
|
2
2
|
|
3
3
|
class Feedx::Format::Protobuf < Feedx::Format::Abstract
|
4
|
-
|
5
|
-
|
6
|
-
|
4
|
+
class Decoder < Feedx::Format::Abstract::Decoder
|
5
|
+
def initialize(io, **opts)
|
6
|
+
super PBIO::Delimited.new(io), **opts
|
7
|
+
end
|
7
8
|
|
8
|
-
|
9
|
-
|
9
|
+
def decode(target, **)
|
10
|
+
@io.read(target)
|
11
|
+
end
|
10
12
|
end
|
11
13
|
|
12
|
-
|
13
|
-
|
14
|
-
|
14
|
+
class Encoder < Feedx::Format::Abstract::Encoder
|
15
|
+
def initialize(io, **opts)
|
16
|
+
super PBIO::Delimited.new(io), **opts
|
17
|
+
end
|
18
|
+
|
19
|
+
def encode(msg, **opts)
|
20
|
+
msg = msg.to_pb(**opts) if msg.respond_to?(:to_pb)
|
21
|
+
@io.write msg
|
22
|
+
end
|
15
23
|
end
|
16
24
|
end
|
data/lib/feedx/producer.rb
CHANGED
@@ -6,26 +6,29 @@ module Feedx
|
|
6
6
|
# Produces a relation as an encoded feed to a remote location.
|
7
7
|
class Producer
|
8
8
|
# See constructor.
|
9
|
-
def self.perform(url, opts
|
10
|
-
new(url, opts, &block).perform
|
9
|
+
def self.perform(url, **opts, &block)
|
10
|
+
new(url, **opts, &block).perform
|
11
11
|
end
|
12
12
|
|
13
13
|
# @param [String] url the destination URL.
|
14
14
|
# @param [Hash] opts options
|
15
15
|
# @option opts [Enumerable,ActiveRecord::Relation] :enum relation or enumerator to stream.
|
16
16
|
# @option opts [Symbol,Class<Feedx::Format::Abstract>] :format custom formatter. Default: from file extension.
|
17
|
-
# @option opts [Hash] :format_options format encode options. Default: {}.
|
18
17
|
# @option opts [Symbol,Class<Feedx::Compression::Abstract>] :compress enable compression. Default: from file extension.
|
19
18
|
# @option opts [Time,Proc] :last_modified the last modified time, used to determine if a push is necessary.
|
20
19
|
# @yield A block factory to generate the relation or enumerator.
|
21
20
|
# @yieldreturn [Enumerable,ActiveRecord::Relation] the relation or enumerator to stream.
|
22
|
-
def initialize(url,
|
23
|
-
@enum =
|
21
|
+
def initialize(url, last_modified: nil, format_options: {}, enum: nil, **opts, &block)
|
22
|
+
@enum = enum || block
|
24
23
|
raise ArgumentError, "#{self.class.name}.new expects an :enum option or a block factory" unless @enum
|
25
24
|
|
26
|
-
@stream = Feedx::Stream.new(url, opts)
|
27
|
-
@last_mod =
|
28
|
-
@
|
25
|
+
@stream = Feedx::Stream.new(url, **opts)
|
26
|
+
@last_mod = last_modified
|
27
|
+
@opts = opts.merge(format_options)
|
28
|
+
|
29
|
+
return if format_options.empty? || (defined?(Gem::Deprecate) && Gem::Deprecate.skip)
|
30
|
+
|
31
|
+
warn "WARNING: passing format_options is deprecated; pass the options inline instead (called from #{caller(2..2).first})."
|
29
32
|
end
|
30
33
|
|
31
34
|
def perform
|
@@ -43,7 +46,7 @@ module Feedx
|
|
43
46
|
|
44
47
|
@stream.create metadata: { META_LAST_MODIFIED => local_rev.to_s } do |fmt|
|
45
48
|
iter = enum.respond_to?(:find_each) ? :find_each : :each
|
46
|
-
enum.send(iter) {|rec| fmt.encode(rec, **@
|
49
|
+
enum.send(iter) {|rec| fmt.encode(rec, **@opts) }
|
47
50
|
end
|
48
51
|
@stream.blob.info.size
|
49
52
|
end
|
data/lib/feedx/stream.rb
CHANGED
@@ -10,21 +10,23 @@ module Feedx
|
|
10
10
|
# @param [Hash] opts options
|
11
11
|
# @option opts [Symbol,Class<Feedx::Format::Abstract>] :format custom formatter. Default: from file extension.
|
12
12
|
# @option opts [Symbol,Class<Feedx::Compression::Abstract>] :compress enable compression. Default: from file extension.
|
13
|
-
def initialize(url,
|
13
|
+
def initialize(url, format: nil, compress: nil, **opts)
|
14
14
|
@blob = BFS::Blob.new(url)
|
15
|
-
@format = detect_format(
|
16
|
-
@compress = detect_compress(
|
15
|
+
@format = detect_format(format)
|
16
|
+
@compress = detect_compress(compress)
|
17
|
+
@opts = opts
|
17
18
|
end
|
18
19
|
|
19
20
|
# Opens the remote for reading.
|
20
21
|
# @param [Hash] opts BFS::Blob#open options
|
21
22
|
# @yield A block over a formatted stream.
|
22
23
|
# @yieldparam [Feedx::Format::Abstract] formatted input stream.
|
23
|
-
def open(opts
|
24
|
-
@blob.open(opts) do |io|
|
25
|
-
@compress.reader(io) do |cio|
|
26
|
-
|
27
|
-
|
24
|
+
def open(**opts)
|
25
|
+
@blob.open(**opts) do |io|
|
26
|
+
@compress.reader(io, **@opts) do |cio|
|
27
|
+
@format.decoder(cio, **@opts) do |fmt|
|
28
|
+
yield fmt
|
29
|
+
end
|
28
30
|
end
|
29
31
|
end
|
30
32
|
end
|
@@ -33,11 +35,12 @@ module Feedx
|
|
33
35
|
# @param [Hash] opts BFS::Blob#create options
|
34
36
|
# @yield A block over a formatted stream.
|
35
37
|
# @yieldparam [Feedx::Format::Abstract] formatted output stream.
|
36
|
-
def create(opts
|
37
|
-
@blob.create(opts) do |io|
|
38
|
-
@compress.writer(io) do |cio|
|
39
|
-
|
40
|
-
|
38
|
+
def create(**opts)
|
39
|
+
@blob.create(**opts) do |io|
|
40
|
+
@compress.writer(io, **@opts) do |cio|
|
41
|
+
@format.encoder(cio, **@opts) do |fmt|
|
42
|
+
yield fmt
|
43
|
+
end
|
41
44
|
end
|
42
45
|
end
|
43
46
|
end
|
@@ -48,13 +51,10 @@ module Feedx
|
|
48
51
|
case val
|
49
52
|
when nil
|
50
53
|
Feedx::Format.detect(@blob.path)
|
51
|
-
when
|
52
|
-
parent = Feedx::Format::Abstract
|
53
|
-
raise ArgumentError, "Class #{val} must extend #{parent}" unless val < parent
|
54
|
-
|
55
|
-
val
|
56
|
-
else
|
54
|
+
when String, Symbol
|
57
55
|
Feedx::Format.resolve(val)
|
56
|
+
else
|
57
|
+
Feedx::Format.validate!(val)
|
58
58
|
end
|
59
59
|
end
|
60
60
|
|
@@ -62,13 +62,10 @@ module Feedx
|
|
62
62
|
case val
|
63
63
|
when nil
|
64
64
|
Feedx::Compression.detect(@blob.path)
|
65
|
-
when
|
66
|
-
parent = Feedx::Compression::Abstract
|
67
|
-
raise ArgumentError, "Class #{val} must extend #{parent}" unless val < parent
|
68
|
-
|
69
|
-
val
|
70
|
-
else
|
65
|
+
when String, Symbol
|
71
66
|
Feedx::Compression.resolve(val)
|
67
|
+
else
|
68
|
+
Feedx::Compression.validate!(val)
|
72
69
|
end
|
73
70
|
end
|
74
71
|
end
|
data/producer.go
CHANGED
@@ -147,10 +147,7 @@ func (p *Producer) push() (*ProducerPush, error) {
|
|
147
147
|
return &ProducerPush{Producer: p}, nil
|
148
148
|
}
|
149
149
|
|
150
|
-
writer
|
151
|
-
if err != nil {
|
152
|
-
return nil, err
|
153
|
-
}
|
150
|
+
writer := NewWriter(p.ctx, p.remote, &wopt)
|
154
151
|
defer writer.Discard()
|
155
152
|
|
156
153
|
if err := p.pfn(writer); err != nil {
|
data/producer_test.go
CHANGED
data/reader_test.go
CHANGED
@@ -5,10 +5,9 @@ import (
|
|
5
5
|
"io"
|
6
6
|
"io/ioutil"
|
7
7
|
|
8
|
-
"github.com/bsm/feedx"
|
9
|
-
|
10
8
|
"github.com/bsm/bfs"
|
11
|
-
|
9
|
+
"github.com/bsm/feedx"
|
10
|
+
"github.com/bsm/feedx/internal/testdata"
|
12
11
|
. "github.com/onsi/ginkgo"
|
13
12
|
. "github.com/onsi/gomega"
|
14
13
|
)
|
@@ -34,23 +33,23 @@ var _ = Describe("Reader", func() {
|
|
34
33
|
It("should read", func() {
|
35
34
|
data, err := ioutil.ReadAll(subject)
|
36
35
|
Expect(err).NotTo(HaveOccurred())
|
37
|
-
Expect(len(data)).To(BeNumerically("~",
|
36
|
+
Expect(len(data)).To(BeNumerically("~", 110, 20))
|
38
37
|
Expect(subject.NumRead()).To(Equal(0))
|
39
38
|
})
|
40
39
|
|
41
40
|
It("should decode", func() {
|
42
|
-
var msgs []
|
41
|
+
var msgs []*testdata.MockMessage
|
43
42
|
for {
|
44
|
-
var msg
|
43
|
+
var msg testdata.MockMessage
|
45
44
|
err := subject.Decode(&msg)
|
46
45
|
if err == io.EOF {
|
47
46
|
break
|
48
47
|
}
|
49
48
|
Expect(err).NotTo(HaveOccurred())
|
50
|
-
msgs = append(msgs, msg)
|
49
|
+
msgs = append(msgs, &msg)
|
51
50
|
}
|
52
51
|
|
53
|
-
Expect(msgs).To(
|
52
|
+
Expect(msgs).To(ConsistOf(seed(), seed(), seed()))
|
54
53
|
Expect(subject.NumRead()).To(Equal(3))
|
55
54
|
})
|
56
55
|
})
|
@@ -3,13 +3,15 @@ require 'spec_helper'
|
|
3
3
|
RSpec.describe Feedx::Compression::Gzip do
|
4
4
|
it 'should wrap readers/writers' do
|
5
5
|
wio = StringIO.new
|
6
|
-
|
6
|
+
subject.writer(wio) {|w| w.write 'xyz' * 1000 }
|
7
7
|
expect(wio.size).to be_within(20).of(40)
|
8
|
+
expect(wio.string.encoding).to eq(Encoding::BINARY)
|
8
9
|
|
9
10
|
data = ''
|
10
11
|
StringIO.open(wio.string) do |rio|
|
11
|
-
|
12
|
+
subject.reader(rio) {|z| data = z.read }
|
12
13
|
end
|
13
14
|
expect(data.size).to eq(3000)
|
15
|
+
expect(data.encoding).to eq(Encoding.default_external)
|
14
16
|
end
|
15
17
|
end
|
@@ -3,12 +3,12 @@ require 'spec_helper'
|
|
3
3
|
RSpec.describe Feedx::Compression::None do
|
4
4
|
it 'should wrap readers/writers' do
|
5
5
|
wio = StringIO.new
|
6
|
-
|
6
|
+
subject.writer(wio) {|w| w.write 'xyz' * 1000 }
|
7
7
|
expect(wio.size).to eq(3000)
|
8
8
|
|
9
9
|
data = ''
|
10
10
|
StringIO.open(wio.string) do |rio|
|
11
|
-
|
11
|
+
subject.reader(rio) {|z| data = z.read }
|
12
12
|
end
|
13
13
|
expect(data.size).to eq(3000)
|
14
14
|
end
|
@@ -2,18 +2,18 @@ require 'spec_helper'
|
|
2
2
|
|
3
3
|
RSpec.describe Feedx::Compression do
|
4
4
|
it 'should resolve' do
|
5
|
-
expect(described_class.resolve(:gzip)).to
|
6
|
-
expect(described_class.resolve(:gz)).to
|
7
|
-
expect(described_class.resolve(nil)).to
|
5
|
+
expect(described_class.resolve(:gzip)).to be_instance_of(described_class::Gzip)
|
6
|
+
expect(described_class.resolve(:gz)).to be_instance_of(described_class::Gzip)
|
7
|
+
expect(described_class.resolve(nil)).to be_instance_of(described_class::None)
|
8
8
|
expect { described_class.resolve(:txt) }.to raise_error(/invalid compression txt/)
|
9
9
|
end
|
10
10
|
|
11
11
|
it 'should detect' do
|
12
|
-
expect(described_class.detect('path/to/file.jsonz')).to
|
13
|
-
expect(described_class.detect('path/to/file.json.gz')).to
|
14
|
-
expect(described_class.detect('path/to/file.json')).to
|
15
|
-
expect(described_class.detect('path/to/file.pbz')).to
|
16
|
-
expect(described_class.detect('path/to/file.pb.gz')).to
|
17
|
-
expect(described_class.detect('path/to/file.pb')).to
|
12
|
+
expect(described_class.detect('path/to/file.jsonz')).to be_instance_of(described_class::Gzip)
|
13
|
+
expect(described_class.detect('path/to/file.json.gz')).to be_instance_of(described_class::Gzip)
|
14
|
+
expect(described_class.detect('path/to/file.json')).to be_instance_of(described_class::None)
|
15
|
+
expect(described_class.detect('path/to/file.pbz')).to be_instance_of(described_class::Gzip)
|
16
|
+
expect(described_class.detect('path/to/file.pb.gz')).to be_instance_of(described_class::Gzip)
|
17
|
+
expect(described_class.detect('path/to/file.pb')).to be_instance_of(described_class::None)
|
18
18
|
end
|
19
19
|
end
|
data/spec/feedx/consumer_spec.rb
CHANGED
@@ -36,10 +36,13 @@ RSpec.describe Feedx::Consumer do
|
|
36
36
|
|
37
37
|
private
|
38
38
|
|
39
|
-
def mock_produce!(
|
39
|
+
def mock_produce!(enum: mock_enum, **opts)
|
40
40
|
url = 'mock:///dir/file.json'
|
41
|
-
|
42
|
-
Feedx::Producer.perform url, opts
|
41
|
+
Feedx::Producer.perform url, enum: enum, **opts
|
43
42
|
url
|
44
43
|
end
|
44
|
+
|
45
|
+
def mock_enum
|
46
|
+
%w[x y z].map {|t| Feedx::TestCase::Model.new(t) } * 100
|
47
|
+
end
|
45
48
|
end
|
@@ -1,17 +1,20 @@
|
|
1
1
|
require 'spec_helper'
|
2
2
|
|
3
3
|
RSpec.describe Feedx::Format::Abstract do
|
4
|
-
subject { Feedx::Format::JSON.new
|
4
|
+
subject { Feedx::Format::JSON.new }
|
5
5
|
let(:wio) { StringIO.new }
|
6
|
+
let(:rio) { StringIO.open(wio.string) }
|
6
7
|
|
7
8
|
it 'should decode each' do
|
8
|
-
subject.
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
9
|
+
subject.encoder wio do |enc|
|
10
|
+
enc.encode(Feedx::TestCase::Model.new('X'))
|
11
|
+
enc.encode(Feedx::TestCase::Model.new('Y'))
|
12
|
+
enc.encode(Feedx::TestCase::Message.new(title: 'Z'))
|
13
|
+
end
|
14
|
+
|
15
|
+
subject.decoder rio do |dec|
|
16
|
+
acc = dec.decode_each(Feedx::TestCase::Model).to_a
|
17
|
+
expect(acc.map(&:title)).to eq(%w[X Y Z])
|
15
18
|
end
|
16
19
|
end
|
17
20
|
end
|
@@ -1,26 +1,27 @@
|
|
1
1
|
require 'spec_helper'
|
2
2
|
|
3
3
|
RSpec.describe Feedx::Format::JSON do
|
4
|
-
subject { described_class.new(wio) }
|
5
4
|
let(:wio) { StringIO.new }
|
5
|
+
let(:rio) { StringIO.open(wio.string) }
|
6
6
|
|
7
7
|
it 'should encode/decode' do
|
8
|
-
subject.
|
9
|
-
|
10
|
-
|
8
|
+
subject.encoder wio do |enc|
|
9
|
+
enc.encode(Feedx::TestCase::Model.new('X'))
|
10
|
+
enc.encode(Feedx::TestCase::Model.new('Y'))
|
11
|
+
enc.encode(Feedx::TestCase::Message.new(title: 'Z'))
|
12
|
+
end
|
11
13
|
expect(wio.string.lines).to eq [
|
12
14
|
%({"title":"X","updated_at":"2018-01-05 11:25:15 UTC"}\n),
|
13
15
|
%({"title":"Y","updated_at":"2018-01-05 11:25:15 UTC"}\n),
|
14
16
|
%({"title":"Z"}\n),
|
15
17
|
]
|
16
18
|
|
17
|
-
|
18
|
-
|
19
|
-
expect(
|
20
|
-
expect(
|
21
|
-
expect(
|
22
|
-
expect(
|
23
|
-
expect(fmt).to be_eof
|
19
|
+
subject.decoder rio do |dec|
|
20
|
+
expect(dec.decode(Feedx::TestCase::Model)).to eq(Feedx::TestCase::Model.new('X'))
|
21
|
+
expect(dec.decode(Feedx::TestCase::Model.new('O'))).to eq(Feedx::TestCase::Model.new('Y'))
|
22
|
+
expect(dec.decode(Feedx::TestCase::Model)).to eq(Feedx::TestCase::Model.new('Z'))
|
23
|
+
expect(dec.decode(Feedx::TestCase::Model)).to be_nil
|
24
|
+
expect(dec).to be_eof
|
24
25
|
end
|
25
26
|
end
|
26
27
|
end
|
@@ -0,0 +1,30 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
RSpec.describe Feedx::Format::Parquet do
|
4
|
+
let(:wio) { StringIO.new }
|
5
|
+
let(:rio) { StringIO.open(wio.string) }
|
6
|
+
|
7
|
+
let(:schema) do
|
8
|
+
Arrow::Schema.new([
|
9
|
+
Arrow::Field.new('title', :string),
|
10
|
+
Arrow::Field.new('updated_at', type: :timestamp, unit: :second),
|
11
|
+
])
|
12
|
+
end
|
13
|
+
|
14
|
+
it 'should encode/decode' do
|
15
|
+
subject.encoder wio, schema: schema, batch_size: 2 do |enc|
|
16
|
+
enc.encode(Feedx::TestCase::Model.new('X'))
|
17
|
+
enc.encode(Feedx::TestCase::Model.new('Y'))
|
18
|
+
enc.encode(Feedx::TestCase::Model.new('Z'))
|
19
|
+
end
|
20
|
+
expect(wio.string.bytesize).to be_within(100).of(1100)
|
21
|
+
|
22
|
+
subject.decoder rio do |dec|
|
23
|
+
expect(dec.decode(Feedx::TestCase::Model)).to eq(Feedx::TestCase::Model.new('X'))
|
24
|
+
expect(dec.decode(Feedx::TestCase::Model)).to eq(Feedx::TestCase::Model.new('Y'))
|
25
|
+
expect(dec.decode(Feedx::TestCase::Model)).to eq(Feedx::TestCase::Model.new('Z'))
|
26
|
+
expect(dec.decode(Feedx::TestCase::Model)).to be_nil
|
27
|
+
expect(dec).to be_eof
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
@@ -1,22 +1,23 @@
|
|
1
1
|
require 'spec_helper'
|
2
2
|
|
3
3
|
RSpec.describe Feedx::Format::Protobuf do
|
4
|
-
subject { described_class.new(wio) }
|
5
4
|
let(:wio) { StringIO.new }
|
5
|
+
let(:rio) { StringIO.open(wio.string) }
|
6
6
|
|
7
7
|
it 'should encode/decode' do
|
8
|
-
subject.
|
9
|
-
|
10
|
-
|
8
|
+
subject.encoder wio do |enc|
|
9
|
+
enc.encode(Feedx::TestCase::Model.new('X'))
|
10
|
+
enc.encode(Feedx::TestCase::Model.new('Y'))
|
11
|
+
enc.encode(Feedx::TestCase::Message.new(title: 'Z'))
|
12
|
+
end
|
11
13
|
expect(wio.string.bytes).to eq([3, 10, 1, 88] + [3, 10, 1, 89] + [3, 10, 1, 90])
|
12
14
|
|
13
|
-
|
14
|
-
|
15
|
-
expect(
|
16
|
-
expect(
|
17
|
-
expect(
|
18
|
-
expect(
|
19
|
-
expect(fmt).to be_eof
|
15
|
+
subject.decoder rio do |dec|
|
16
|
+
expect(dec.decode(Feedx::TestCase::Message)).to eq(Feedx::TestCase::Message.new(title: 'X'))
|
17
|
+
expect(dec.decode(Feedx::TestCase::Message)).to eq(Feedx::TestCase::Message.new(title: 'Y'))
|
18
|
+
expect(dec.decode(Feedx::TestCase::Message)).to eq(Feedx::TestCase::Message.new(title: 'Z'))
|
19
|
+
expect(dec.decode(Feedx::TestCase::Message)).to be_nil
|
20
|
+
expect(dec).to be_eof
|
20
21
|
end
|
21
22
|
end
|
22
23
|
end
|
data/spec/feedx/format_spec.rb
CHANGED
@@ -2,18 +2,18 @@ require 'spec_helper'
|
|
2
2
|
|
3
3
|
RSpec.describe Feedx::Format do
|
4
4
|
it 'should resolve' do
|
5
|
-
expect(described_class.resolve(:json)).to
|
6
|
-
expect(described_class.resolve(:pb)).to
|
5
|
+
expect(described_class.resolve(:json)).to be_instance_of(described_class::JSON)
|
6
|
+
expect(described_class.resolve(:pb)).to be_instance_of(described_class::Protobuf)
|
7
7
|
expect { described_class.resolve(:txt) }.to raise_error(/invalid format txt/)
|
8
8
|
end
|
9
9
|
|
10
10
|
it 'should detect' do
|
11
|
-
expect(described_class.detect('path/to/file.json')).to
|
12
|
-
expect(described_class.detect('path/to/file.jsonz')).to
|
13
|
-
expect(described_class.detect('path/to/file.json.gz')).to
|
14
|
-
expect(described_class.detect('path/to/file.pb')).to
|
15
|
-
expect(described_class.detect('path/to/file.pbz')).to
|
16
|
-
expect(described_class.detect('path/to/file.pb.z')).to
|
11
|
+
expect(described_class.detect('path/to/file.json')).to be_instance_of(described_class::JSON)
|
12
|
+
expect(described_class.detect('path/to/file.jsonz')).to be_instance_of(described_class::JSON)
|
13
|
+
expect(described_class.detect('path/to/file.json.gz')).to be_instance_of(described_class::JSON)
|
14
|
+
expect(described_class.detect('path/to/file.pb')).to be_instance_of(described_class::Protobuf)
|
15
|
+
expect(described_class.detect('path/to/file.pbz')).to be_instance_of(described_class::Protobuf)
|
16
|
+
expect(described_class.detect('path/to/file.pb.z')).to be_instance_of(described_class::Protobuf)
|
17
17
|
expect do
|
18
18
|
described_class.detect('path/to/file.txt')
|
19
19
|
end.to raise_error(/unable to detect format/)
|