feedx 0.6.3 → 0.7.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 1f4710e181ac3c987f7b02ebc976cf70bbc4c9826139823bca58620d94aa994a
4
- data.tar.gz: 273951be43e9e8ac3473bfccf428cf2860c34dd4c0869b86f726446c477c2e7f
3
+ metadata.gz: b306ae446c9cae375b6ad538cc41d88a621bbb5cb59d9adf5aa114326567b2ca
4
+ data.tar.gz: 218b0aabdf6d1dda76e520b226f7d211a27ba14421db4ed8e34a9f59451e1e20
5
5
  SHA512:
6
- metadata.gz: 862eeb21fda2add182866fdb686adab45091d585a9880565b7e12c2ef3240ecc3982ec09e0370f1e4734d57ac44aa97cd5fcd702215c84d9534e5eab5ba9b648
7
- data.tar.gz: fc10bdbfea9922ca29d3d0967ab8a1aa14330b914b1e07972ca61e040620d2ccdcd710e0eb63860001feb3c366097652b825fd4660e847449edfeecdbd3232e0
6
+ metadata.gz: 246d39971aa5d6fe96267b95fba832f90d24ae8c8a16c4ea949fb3bca3f8777fd036e311f8d5f97b7cfed301b3782c2035e4e4ba1b62b7373e1f1e7d090ab00d
7
+ data.tar.gz: a151e2c9cde8820505912ceca55afa58f3a44cb25581eda9b66f830e057fe1254093c93ad5be77848fa8e3b0033d14885cb4a412bb4308bf0109b2bb57a47771
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- feedx (0.6.3)
4
+ feedx (0.7.0)
5
5
  bfs (>= 0.3.4)
6
6
 
7
7
  GEM
@@ -32,7 +32,7 @@ GEM
32
32
  diff-lcs (>= 1.2.0, < 2.0)
33
33
  rspec-support (~> 3.8.0)
34
34
  rspec-support (3.8.0)
35
- rubocop (0.69.0)
35
+ rubocop (0.70.0)
36
36
  jaro_winkler (~> 1.5.1)
37
37
  parallel (~> 1.10)
38
38
  parser (>= 2.6)
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'feedx'
3
- s.version = '0.6.3'
3
+ s.version = '0.7.0'
4
4
  s.authors = ['Black Square Media Ltd']
5
5
  s.email = ['info@blacksquaremedia.com']
6
6
  s.summary = %(Exchange data between components via feeds)
@@ -3,6 +3,7 @@ module Feedx
3
3
 
4
4
  autoload :Compression, 'feedx/compression'
5
5
  autoload :Format, 'feedx/format'
6
+ autoload :Stream, 'feedx/stream'
6
7
  autoload :Producer, 'feedx/producer'
7
8
  autoload :Pusher, 'feedx/pusher'
8
9
  autoload :TaskState, 'feedx/task_state'
@@ -1,5 +1,9 @@
1
1
  class Feedx::Compression::Abstract
2
- def self.wrap(_io, &_block)
2
+ def self.reader(_io, &_block)
3
+ raise 'Not implemented'
4
+ end
5
+
6
+ def self.writer(_io, &_block)
3
7
  raise 'Not implemented'
4
8
  end
5
9
  end
@@ -1,7 +1,11 @@
1
1
  require 'zlib'
2
2
 
3
3
  class Feedx::Compression::Gzip < Feedx::Compression::Abstract
4
- def self.wrap(io, &block)
4
+ def self.reader(io, &block)
5
+ Zlib::GzipReader.wrap(io, &block)
6
+ end
7
+
8
+ def self.writer(io, &block)
5
9
  Zlib::GzipWriter.wrap(io, &block)
6
10
  end
7
11
  end
@@ -1,5 +1,9 @@
1
1
  class Feedx::Compression::None < Feedx::Compression::Abstract
2
- def self.wrap(io, &block)
2
+ def self.reader(io, &block)
3
+ block.call(io)
4
+ end
5
+
6
+ def self.writer(io, &block)
3
7
  block.call(io)
4
8
  end
5
9
  end
@@ -3,7 +3,15 @@ class Feedx::Format::Abstract
3
3
  @io = io
4
4
  end
5
5
 
6
- def write(_msg)
6
+ def eof?
7
+ @io.eof?
8
+ end
9
+
10
+ def decode(_klass)
11
+ raise 'Not implemented'
12
+ end
13
+
14
+ def encode(_msg)
7
15
  raise 'Not implemented'
8
16
  end
9
17
  end
@@ -1,7 +1,16 @@
1
1
  require 'json'
2
2
 
3
3
  class Feedx::Format::JSON < Feedx::Format::Abstract
4
- def write(msg)
4
+ def decode(obj)
5
+ line = @io.gets
6
+ return unless line
7
+
8
+ obj = obj.allocate if obj.is_a?(Class)
9
+ obj.from_json(line)
10
+ obj
11
+ end
12
+
13
+ def encode(msg)
5
14
  @io.write msg.to_json << "\n"
6
15
  end
7
16
  end
@@ -5,7 +5,12 @@ class Feedx::Format::Protobuf < Feedx::Format::Abstract
5
5
  super PBIO::Delimited.new(io)
6
6
  end
7
7
 
8
- def write(msg)
9
- @io.write msg.to_pb
8
+ def decode(klass)
9
+ @io.read(klass)
10
+ end
11
+
12
+ def encode(msg)
13
+ msg = msg.to_pb if msg.respond_to?(:to_pb)
14
+ @io.write msg
10
15
  end
11
16
  end
@@ -22,9 +22,7 @@ module Feedx
22
22
  @enum = opts[:enum] || block
23
23
  raise ArgumentError, "#{self.class.name}.new expects an :enum option or a block factory" unless @enum
24
24
 
25
- @blob = BFS::Blob.new(url)
26
- @format = detect_format(opts[:format])
27
- @compress = detect_compress(opts[:compress])
25
+ @stream = Feedx::Stream.new(url, opts)
28
26
  @last_mod = opts[:last_modified]
29
27
  end
30
28
 
@@ -34,51 +32,16 @@ module Feedx
34
32
  current = (last_mod.to_f * 1000).floor
35
33
 
36
34
  begin
37
- previous = @blob.info.metadata[META_LAST_MODIFIED].to_i
35
+ previous = @stream.blob.info.metadata[META_LAST_MODIFIED].to_i
38
36
  return -1 unless current > previous
39
37
  rescue BFS::FileNotFound # rubocop:disable Lint/HandleExceptions
40
38
  end if current.positive?
41
39
 
42
- @blob.create metadata: { META_LAST_MODIFIED => current.to_s } do |io|
43
- @compress.wrap(io) {|w| write_all(enum, w) }
40
+ @stream.create metadata: { META_LAST_MODIFIED => current.to_s } do |fmt|
41
+ iter = enum.respond_to?(:find_each) ? :find_each : :each
42
+ enum.send(iter) {|rec| fmt.encode(rec) }
44
43
  end
45
- @blob.info.size
46
- end
47
-
48
- private
49
-
50
- def detect_format(val)
51
- case val
52
- when nil
53
- Feedx::Format.detect(@blob.path)
54
- when Class
55
- parent = Feedx::Format::Abstract
56
- raise ArgumentError, "Class #{val} must extend #{parent}" unless val < parent
57
-
58
- val
59
- else
60
- Feedx::Format.resolve(val)
61
- end
62
- end
63
-
64
- def detect_compress(val)
65
- case val
66
- when nil
67
- Feedx::Compression.detect(@blob.path)
68
- when Class
69
- parent = Feedx::Compression::Abstract
70
- raise ArgumentError, "Class #{val} must extend #{parent}" unless val < parent
71
-
72
- val
73
- else
74
- Feedx::Compression.resolve(val)
75
- end
76
- end
77
-
78
- def write_all(enum, io)
79
- stream = @format.new(io)
80
- iterator = enum.respond_to?(:find_each) ? :find_each : :each
81
- enum.send(iterator) {|rec| stream.write(rec) }
44
+ @stream.blob.info.size
82
45
  end
83
46
  end
84
47
  end
@@ -0,0 +1,75 @@
1
+ require 'bfs'
2
+ require 'feedx'
3
+
4
+ module Feedx
5
+ # Abstract stream handler around a remote blob.
6
+ class Stream
7
+ attr_reader :blob
8
+
9
+ # @param [String] url the blob URL.
10
+ # @param [Hash] opts options
11
+ # @option opts [Symbol,Class<Feedx::Format::Abstract>] :format custom formatter. Default: from file extension.
12
+ # @option opts [Symbol,Class<Feedx::Compression::Abstract>] :compress enable compression. Default: from file extension.
13
+ def initialize(url, opts={})
14
+ @blob = BFS::Blob.new(url)
15
+ @format = detect_format(opts[:format])
16
+ @compress = detect_compress(opts[:compress])
17
+ end
18
+
19
+ # Opens the remote for reading.
20
+ # @param [Hash] opts BFS::Blob#open options
21
+ # @yield A block over a formatted stream.
22
+ # @yieldparam [Feedx::Format::Abstract] formatted input stream.
23
+ def open(opts={})
24
+ @blob.open(opts) do |io|
25
+ @compress.reader(io) do |cio|
26
+ fmt = @format.new(cio)
27
+ yield fmt
28
+ end
29
+ end
30
+ end
31
+
32
+ # Opens the remote for writing.
33
+ # @param [Hash] opts BFS::Blob#create options
34
+ # @yield A block over a formatted stream.
35
+ # @yieldparam [Feedx::Format::Abstract] formatted output stream.
36
+ def create(opts={})
37
+ @blob.create(opts) do |io|
38
+ @compress.writer(io) do |cio|
39
+ fmt = @format.new(cio)
40
+ yield fmt
41
+ end
42
+ end
43
+ end
44
+
45
+ private
46
+
47
+ def detect_format(val)
48
+ case val
49
+ when nil
50
+ Feedx::Format.detect(@blob.path)
51
+ when Class
52
+ parent = Feedx::Format::Abstract
53
+ raise ArgumentError, "Class #{val} must extend #{parent}" unless val < parent
54
+
55
+ val
56
+ else
57
+ Feedx::Format.resolve(val)
58
+ end
59
+ end
60
+
61
+ def detect_compress(val)
62
+ case val
63
+ when nil
64
+ Feedx::Compression.detect(@blob.path)
65
+ when Class
66
+ parent = Feedx::Compression::Abstract
67
+ raise ArgumentError, "Class #{val} must extend #{parent}" unless val < parent
68
+
69
+ val
70
+ else
71
+ Feedx::Compression.resolve(val)
72
+ end
73
+ end
74
+ end
75
+ end
@@ -1,9 +1,15 @@
1
1
  require 'spec_helper'
2
2
 
3
3
  RSpec.describe Feedx::Compression::Gzip do
4
- it 'should wrap' do
5
- io = StringIO.new
6
- described_class.wrap(io) {|w| w.write 'xyz' * 1000 }
7
- expect(io.size).to be_within(20).of(40)
4
+ it 'should wrap readers/writers' do
5
+ wio = StringIO.new
6
+ described_class.writer(wio) {|w| w.write 'xyz' * 1000 }
7
+ expect(wio.size).to be_within(20).of(40)
8
+
9
+ data = ''
10
+ StringIO.open(wio.string) do |rio|
11
+ described_class.reader(rio) {|z| data = z.read }
12
+ end
13
+ expect(data.size).to eq(3000)
8
14
  end
9
15
  end
@@ -1,9 +1,15 @@
1
1
  require 'spec_helper'
2
2
 
3
3
  RSpec.describe Feedx::Compression::None do
4
- it 'should wrap' do
5
- io = StringIO.new
6
- described_class.wrap(io) {|w| w.write 'xyz' * 1000 }
7
- expect(io.size).to eq(3000)
4
+ it 'should wrap readers/writers' do
5
+ wio = StringIO.new
6
+ described_class.writer(wio) {|w| w.write 'xyz' * 1000 }
7
+ expect(wio.size).to eq(3000)
8
+
9
+ data = ''
10
+ StringIO.open(wio.string) do |rio|
11
+ described_class.reader(rio) {|z| data = z.read }
12
+ end
13
+ expect(data.size).to eq(3000)
8
14
  end
9
15
  end
@@ -1,12 +1,26 @@
1
1
  require 'spec_helper'
2
2
 
3
3
  RSpec.describe Feedx::Format::JSON do
4
- subject { described_class.new(io) }
5
- let(:io) { StringIO.new }
4
+ subject { described_class.new(wio) }
5
+ let(:wio) { StringIO.new }
6
6
 
7
- it 'should write' do
8
- subject.write(a: 1, b: 2)
9
- subject.write(c: ['x'], d: true)
10
- expect(io.string).to eq %({"a":1,"b":2}\n{"c":["x"],"d":true}\n)
7
+ it 'should encode/decode' do
8
+ subject.encode(Feedx::TestCase::Model.new('X'))
9
+ subject.encode(Feedx::TestCase::Model.new('Y'))
10
+ subject.encode(Feedx::TestCase::Message.new(title: 'Z'))
11
+ expect(wio.string.lines).to eq [
12
+ %({"title":"X","updated_at":"2018-01-05 11:25:15 UTC"}\n),
13
+ %({"title":"Y","updated_at":"2018-01-05 11:25:15 UTC"}\n),
14
+ %({"title":"Z"}\n),
15
+ ]
16
+
17
+ StringIO.open(wio.string) do |rio|
18
+ fmt = described_class.new(rio)
19
+ expect(fmt.decode(Feedx::TestCase::Model)).to eq(Feedx::TestCase::Model.new('X'))
20
+ expect(fmt.decode(Feedx::TestCase::Model.new('O'))).to eq(Feedx::TestCase::Model.new('Y'))
21
+ expect(fmt.decode(Feedx::TestCase::Model)).to eq(Feedx::TestCase::Model.new('Z'))
22
+ expect(fmt.decode(Feedx::TestCase::Model)).to be_nil
23
+ expect(fmt).to be_eof
24
+ end
11
25
  end
12
26
  end
@@ -1,20 +1,22 @@
1
1
  require 'spec_helper'
2
2
 
3
3
  RSpec.describe Feedx::Format::Protobuf do
4
- subject { described_class.new(io) }
5
- let(:io) { StringIO.new }
4
+ subject { described_class.new(wio) }
5
+ let(:wio) { StringIO.new }
6
6
 
7
- let(:model) do
8
- Class.new Struct.new(:title) do
9
- def to_pb
10
- Feedx::TestCase::Message.new title: title
11
- end
12
- end
13
- end
7
+ it 'should encode/decode' do
8
+ subject.encode(Feedx::TestCase::Model.new('X'))
9
+ subject.encode(Feedx::TestCase::Model.new('Y'))
10
+ subject.encode(Feedx::TestCase::Message.new(title: 'Z'))
11
+ expect(wio.string.bytes).to eq([3, 10, 1, 88] + [3, 10, 1, 89] + [3, 10, 1, 90])
14
12
 
15
- it 'should write' do
16
- subject.write(model.new('X'))
17
- subject.write(model.new('Y'))
18
- expect(io.string.bytes).to eq([3, 10, 1, 88] + [3, 10, 1, 89])
13
+ StringIO.open(wio.string) do |rio|
14
+ fmt = described_class.new(rio)
15
+ expect(fmt.decode(Feedx::TestCase::Message)).to eq(Feedx::TestCase::Message.new(title: 'X'))
16
+ expect(fmt.decode(Feedx::TestCase::Message)).to eq(Feedx::TestCase::Message.new(title: 'Y'))
17
+ expect(fmt.decode(Feedx::TestCase::Message)).to eq(Feedx::TestCase::Message.new(title: 'Z'))
18
+ expect(fmt.decode(Feedx::TestCase::Message)).to be_nil
19
+ expect(fmt).to be_eof
20
+ end
19
21
  end
20
22
  end
@@ -1,20 +1,8 @@
1
1
  require 'spec_helper'
2
2
 
3
3
  RSpec.describe Feedx::Producer do
4
- let :model do
5
- Class.new Struct.new(:title) do
6
- def to_pb
7
- Feedx::TestCase::Message.new title: title
8
- end
9
-
10
- def to_json(*)
11
- ::JSON.dump(title: title, updated_at: Time.at(1515151515).utc)
12
- end
13
- end
14
- end
15
-
16
4
  let :enumerable do
17
- %w[x y z].map {|t| model.new(t) } * 100
5
+ %w[x y z].map {|t| Feedx::TestCase::Model.new(t) } * 100
18
6
  end
19
7
 
20
8
  let(:bucket) { BFS::Bucket::InMem.new }
@@ -0,0 +1,72 @@
1
+ require 'spec_helper'
2
+
3
+ RSpec.describe Feedx::Stream do
4
+ let(:bucket) { BFS::Bucket::InMem.new }
5
+ before { allow(BFS).to receive(:resolve).and_return(bucket) }
6
+
7
+ subject { described_class.new('mock:///dir/file.json') }
8
+ let(:compressed) { described_class.new('mock:///dir/file.json.gz') }
9
+
10
+ it 'should reject invalid inputs' do
11
+ expect do
12
+ described_class.new('mock:///dir/file.txt')
13
+ end.to raise_error(/unable to detect format/)
14
+ end
15
+
16
+ it 'should encode' do
17
+ subject.create do |s|
18
+ s.encode(Feedx::TestCase::Model.new('X'))
19
+ s.encode(Feedx::TestCase::Model.new('Y'))
20
+ end
21
+
22
+ expect(bucket.open('dir/file.json').read).to eq(
23
+ %({"title":"X","updated_at":"2018-01-05 11:25:15 UTC"}\n) +
24
+ %({"title":"Y","updated_at":"2018-01-05 11:25:15 UTC"}\n),
25
+ )
26
+ end
27
+
28
+ it 'should encode compressed' do
29
+ compressed.create do |s|
30
+ 100.times do
31
+ s.encode(Feedx::TestCase::Model.new('X'))
32
+ end
33
+ end
34
+
35
+ expect(bucket.info('dir/file.json.gz').size).to be_within(10).of(108)
36
+ end
37
+
38
+ it 'should encode with create options' do
39
+ subject.create metadata: { 'x' => '5' } do |s|
40
+ s.encode(Feedx::TestCase::Model.new('X'))
41
+ end
42
+ expect(bucket.info('dir/file.json').metadata).to eq('x' => '5')
43
+ end
44
+
45
+ it 'should decode' do
46
+ subject.create do |s|
47
+ s.encode(Feedx::TestCase::Model.new('X'))
48
+ s.encode(Feedx::TestCase::Model.new('Y'))
49
+ end
50
+
51
+ subject.open do |s|
52
+ expect(s.decode(Feedx::TestCase::Model)).to eq(Feedx::TestCase::Model.new('X'))
53
+ expect(s.decode(Feedx::TestCase::Model)).to eq(Feedx::TestCase::Model.new('Y'))
54
+ expect(s.decode(Feedx::TestCase::Model)).to be_nil
55
+ expect(s).to be_eof
56
+ end
57
+ end
58
+
59
+ it 'should decode compressed' do
60
+ compressed.create do |s|
61
+ s.encode(Feedx::TestCase::Model.new('X'))
62
+ s.encode(Feedx::TestCase::Model.new('Y'))
63
+ end
64
+
65
+ compressed.open do |s|
66
+ expect(s.decode(Feedx::TestCase::Model)).to eq(Feedx::TestCase::Model.new('X'))
67
+ expect(s.decode(Feedx::TestCase::Model)).to eq(Feedx::TestCase::Model.new('Y'))
68
+ expect(s.decode(Feedx::TestCase::Model)).to be_nil
69
+ expect(s).to be_eof
70
+ end
71
+ end
72
+ end
@@ -11,5 +11,31 @@ end
11
11
  module Feedx
12
12
  module TestCase
13
13
  Message = Google::Protobuf::DescriptorPool.generated_pool.lookup('com.blacksquaremedia.feedx.testcase.Message').msgclass
14
+
15
+ class Model
16
+ attr_reader :title
17
+
18
+ def initialize(title)
19
+ @title = title
20
+ end
21
+
22
+ def to_pb
23
+ Feedx::TestCase::Message.new title: @title
24
+ end
25
+
26
+ def ==(other)
27
+ title == other.title
28
+ end
29
+ alias eql? ==
30
+
31
+ def from_json(data, *)
32
+ hash = ::JSON.parse(data)
33
+ @title = hash['title'] if hash.is_a?(Hash)
34
+ end
35
+
36
+ def to_json(*)
37
+ ::JSON.dump(title: @title, updated_at: Time.at(1515151515).utc)
38
+ end
39
+ end
14
40
  end
15
41
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: feedx
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.6.3
4
+ version: 0.7.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Black Square Media Ltd
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2019-05-20 00:00:00.000000000 Z
11
+ date: 2019-05-23 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bfs
@@ -147,6 +147,7 @@ files:
147
147
  - lib/feedx/format/protobuf.rb
148
148
  - lib/feedx/producer.rb
149
149
  - lib/feedx/pusher.rb
150
+ - lib/feedx/stream.rb
150
151
  - producer.go
151
152
  - producer_test.go
152
153
  - reader.go
@@ -158,6 +159,7 @@ files:
158
159
  - spec/feedx/format/protobuf_spec.rb
159
160
  - spec/feedx/format_spec.rb
160
161
  - spec/feedx/producer_spec.rb
162
+ - spec/feedx/stream_spec.rb
161
163
  - spec/spec_helper.rb
162
164
  - writer.go
163
165
  - writer_test.go
@@ -192,4 +194,5 @@ test_files:
192
194
  - spec/feedx/format/protobuf_spec.rb
193
195
  - spec/feedx/format_spec.rb
194
196
  - spec/feedx/producer_spec.rb
197
+ - spec/feedx/stream_spec.rb
195
198
  - spec/spec_helper.rb