oplogjam 0.1.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/LICENSE +21 -0
- data/README.md +738 -0
- data/lib/oplogjam.rb +69 -0
- data/lib/oplogjam/apply_ops.rb +49 -0
- data/lib/oplogjam/command.rb +41 -0
- data/lib/oplogjam/delete.rb +50 -0
- data/lib/oplogjam/insert.rb +62 -0
- data/lib/oplogjam/noop.rb +39 -0
- data/lib/oplogjam/operation.rb +31 -0
- data/lib/oplogjam/operators.rb +2 -0
- data/lib/oplogjam/operators/assignment.rb +12 -0
- data/lib/oplogjam/operators/field_assignment.rb +11 -0
- data/lib/oplogjam/operators/index_assignment.rb +39 -0
- data/lib/oplogjam/operators/intermediate.rb +47 -0
- data/lib/oplogjam/operators/intermediate_field.rb +13 -0
- data/lib/oplogjam/operators/intermediate_index.rb +43 -0
- data/lib/oplogjam/operators/set.rb +100 -0
- data/lib/oplogjam/operators/unset.rb +40 -0
- data/lib/oplogjam/operators/unset_field.rb +15 -0
- data/lib/oplogjam/operators/unset_index.rb +39 -0
- data/lib/oplogjam/oplog.rb +21 -0
- data/lib/oplogjam/sanitizer.rb +19 -0
- data/lib/oplogjam/schema.rb +44 -0
- data/lib/oplogjam/update.rb +79 -0
- data/spec/oplogjam/apply_ops_spec.rb +174 -0
- data/spec/oplogjam/command_spec.rb +103 -0
- data/spec/oplogjam/delete_spec.rb +163 -0
- data/spec/oplogjam/insert_spec.rb +289 -0
- data/spec/oplogjam/noop_spec.rb +123 -0
- data/spec/oplogjam/operation_spec.rb +110 -0
- data/spec/oplogjam/operators/set_spec.rb +53 -0
- data/spec/oplogjam/sanitizer_spec.rb +35 -0
- data/spec/oplogjam/update_spec.rb +406 -0
- data/spec/spec_helper.rb +19 -0
- metadata +199 -0
@@ -0,0 +1,123 @@
|
|
1
|
+
require 'bson'
|
2
|
+
require 'oplogjam'
|
3
|
+
|
4
|
+
module Oplogjam
|
5
|
+
RSpec.describe Noop do
|
6
|
+
describe '.from' do
|
7
|
+
it 'converts a BSON no-op into an Noop' do
|
8
|
+
bson = BSON::Document.new(
|
9
|
+
ts: BSON::Timestamp.new(1_479_419_535, 1),
|
10
|
+
h: -2_135_725_856_567_446_411,
|
11
|
+
v: 2,
|
12
|
+
op: 'n',
|
13
|
+
ns: '',
|
14
|
+
o: BSON::Document.new(msg: 'initiating set')
|
15
|
+
)
|
16
|
+
|
17
|
+
expect(described_class.from(bson)).to be_a(described_class)
|
18
|
+
end
|
19
|
+
|
20
|
+
it 'raises an error if the message is missing' do
|
21
|
+
bson = BSON::Document.new(
|
22
|
+
ts: BSON::Timestamp.new(1_479_419_535, 1),
|
23
|
+
h: -2_135_725_856_567_446_411,
|
24
|
+
v: 2,
|
25
|
+
op: 'n',
|
26
|
+
ns: '',
|
27
|
+
o: BSON::Document.new(foo: 'bar')
|
28
|
+
)
|
29
|
+
|
30
|
+
expect { described_class.from(bson) }.to raise_error(InvalidNoop)
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
describe '#message' do
|
35
|
+
it 'returns the message' do
|
36
|
+
bson = BSON::Document.new(
|
37
|
+
ts: BSON::Timestamp.new(1_479_419_535, 1),
|
38
|
+
h: -2_135_725_856_567_446_411,
|
39
|
+
v: 2,
|
40
|
+
op: 'n',
|
41
|
+
ns: '',
|
42
|
+
o: BSON::Document.new(msg: 'initiating set')
|
43
|
+
)
|
44
|
+
noop = described_class.from(bson)
|
45
|
+
|
46
|
+
expect(noop.message).to eq('initiating set')
|
47
|
+
end
|
48
|
+
end
|
49
|
+
|
50
|
+
describe '#id' do
|
51
|
+
it 'returns a unique identifier for the operation' do
|
52
|
+
bson = BSON::Document.new(
|
53
|
+
ts: BSON::Timestamp.new(1_479_419_535, 1),
|
54
|
+
h: -2_135_725_856_567_446_411,
|
55
|
+
v: 2,
|
56
|
+
op: 'n',
|
57
|
+
ns: '',
|
58
|
+
o: BSON::Document.new(msg: 'initiating set')
|
59
|
+
)
|
60
|
+
noop = described_class.from(bson)
|
61
|
+
|
62
|
+
expect(noop.id).to eq(-2_135_725_856_567_446_411)
|
63
|
+
end
|
64
|
+
end
|
65
|
+
|
66
|
+
describe '#timestamp' do
|
67
|
+
it 'returns the time of the operation as a Time' do
|
68
|
+
bson = BSON::Document.new(
|
69
|
+
ts: BSON::Timestamp.new(1_479_419_535, 1),
|
70
|
+
h: -2_135_725_856_567_446_411,
|
71
|
+
v: 2,
|
72
|
+
op: 'n',
|
73
|
+
ns: '',
|
74
|
+
o: BSON::Document.new(msg: 'initiating set')
|
75
|
+
)
|
76
|
+
noop = described_class.from(bson)
|
77
|
+
|
78
|
+
expect(noop.timestamp).to eq(Time.at(1_479_419_535, 1))
|
79
|
+
end
|
80
|
+
end
|
81
|
+
|
82
|
+
describe '#ts' do
|
83
|
+
it 'returns the raw underlying BSON timestamp' do
|
84
|
+
bson = BSON::Document.new(
|
85
|
+
ts: BSON::Timestamp.new(1_479_419_535, 1),
|
86
|
+
h: -2_135_725_856_567_446_411,
|
87
|
+
v: 2,
|
88
|
+
op: 'n',
|
89
|
+
ns: '',
|
90
|
+
o: BSON::Document.new(msg: 'initiating set')
|
91
|
+
)
|
92
|
+
noop = described_class.from(bson)
|
93
|
+
|
94
|
+
expect(noop.ts).to eq(BSON::Timestamp.new(1_479_419_535, 1))
|
95
|
+
end
|
96
|
+
end
|
97
|
+
|
98
|
+
describe '#==' do
|
99
|
+
it 'is equal to another noop with the same ID' do
|
100
|
+
bson = BSON::Document.new(
|
101
|
+
ts: BSON::Timestamp.new(1_479_419_535, 1),
|
102
|
+
h: -2_135_725_856_567_446_411,
|
103
|
+
v: 2,
|
104
|
+
op: 'n',
|
105
|
+
ns: '',
|
106
|
+
o: BSON::Document.new(msg: 'initiating set')
|
107
|
+
)
|
108
|
+
another_bson = BSON::Document.new(
|
109
|
+
ts: BSON::Timestamp.new(1_479_419_535, 1),
|
110
|
+
h: -2_135_725_856_567_446_411,
|
111
|
+
v: 2,
|
112
|
+
op: 'n',
|
113
|
+
ns: '',
|
114
|
+
o: BSON::Document.new(msg: 'initiating set')
|
115
|
+
)
|
116
|
+
noop = described_class.from(bson)
|
117
|
+
another_noop = described_class.from(another_bson)
|
118
|
+
|
119
|
+
expect(noop).to eq(another_noop)
|
120
|
+
end
|
121
|
+
end
|
122
|
+
end
|
123
|
+
end
|
@@ -0,0 +1,110 @@
|
|
1
|
+
require 'bson'
|
2
|
+
require 'oplogjam'
|
3
|
+
|
4
|
+
module Oplogjam
|
5
|
+
RSpec.describe Operation do
|
6
|
+
describe '.from' do
|
7
|
+
it 'converts BSON no-ops into Noop' do
|
8
|
+
bson = BSON::Document.new(
|
9
|
+
ts: BSON::Timestamp.new(1_479_419_535, 1),
|
10
|
+
h: -2_135_725_856_567_446_411,
|
11
|
+
v: 2,
|
12
|
+
op: 'n',
|
13
|
+
ns: '',
|
14
|
+
o: BSON::Document.new(msg: 'initiating set')
|
15
|
+
)
|
16
|
+
|
17
|
+
expect(described_class.from(bson)).to be_a(Noop)
|
18
|
+
end
|
19
|
+
|
20
|
+
it 'converts BSON inserts into Inserts' do
|
21
|
+
bson = BSON::Document.new(
|
22
|
+
ts: BSON::Timestamp.new(1_496_414_570, 11),
|
23
|
+
t: 14,
|
24
|
+
h: -3_028_027_288_268_436_781,
|
25
|
+
v: 2,
|
26
|
+
op: 'i',
|
27
|
+
ns: 'foo.bar',
|
28
|
+
o: BSON::Document.new(_id: 1, baz: 'quux')
|
29
|
+
)
|
30
|
+
|
31
|
+
expect(described_class.from(bson)).to be_a(Insert)
|
32
|
+
end
|
33
|
+
|
34
|
+
it 'converts BSON updates into Updates' do
|
35
|
+
bson = BSON::Document.new(
|
36
|
+
ts: BSON::Timestamp.new(1_479_561_033, 1),
|
37
|
+
t: 2,
|
38
|
+
h: 3_511_341_713_062_188_019,
|
39
|
+
v: 2,
|
40
|
+
op: 'u',
|
41
|
+
ns: 'foo.bar',
|
42
|
+
o2: BSON::Document.new(_id: BSON::ObjectId('583033a3643431ab5be6ec35')),
|
43
|
+
o: BSON::Document.new('$set' => BSON::Document.new('bar' => 'baz'))
|
44
|
+
)
|
45
|
+
|
46
|
+
expect(described_class.from(bson)).to be_a(Update)
|
47
|
+
end
|
48
|
+
|
49
|
+
it 'converts BSON deletes into Deletes' do
|
50
|
+
bson = BSON::Document.new(
|
51
|
+
ts: BSON::Timestamp.new(1_479_421_186, 1),
|
52
|
+
t: 1,
|
53
|
+
h: -5_457_382_347_563_537_847,
|
54
|
+
v: 2,
|
55
|
+
op: 'd',
|
56
|
+
ns: 'foo.bar',
|
57
|
+
o: BSON::Document.new(_id: BSON::ObjectId('582e287cfedf6fb051b2efdf'))
|
58
|
+
)
|
59
|
+
|
60
|
+
expect(described_class.from(bson)).to be_a(Delete)
|
61
|
+
end
|
62
|
+
|
63
|
+
it 'converts BSON commands into Commands' do
|
64
|
+
bson = BSON::Document.new(
|
65
|
+
ts: BSON::Timestamp.new(1_479_420_028, 1),
|
66
|
+
t: 1,
|
67
|
+
h: -1_789_557_309_812_000_233,
|
68
|
+
v: 2,
|
69
|
+
op: 'c',
|
70
|
+
ns: 'foo.$cmd',
|
71
|
+
o: BSON::Document.new(create: 'bar')
|
72
|
+
)
|
73
|
+
|
74
|
+
expect(described_class.from(bson)).to be_a(Command)
|
75
|
+
end
|
76
|
+
|
77
|
+
it 'converts BSON applyOps into ApplyOps' do
|
78
|
+
bson = BSON::Document.new(
|
79
|
+
ts: BSON::Timestamp.new(1_479_420_028, 1),
|
80
|
+
t: 1,
|
81
|
+
h: -1_789_557_309_812_000_233,
|
82
|
+
v: 2,
|
83
|
+
op: 'c',
|
84
|
+
ns: 'foo.$cmd',
|
85
|
+
o: BSON::Document.new(
|
86
|
+
applyOps: [
|
87
|
+
BSON::Document.new(
|
88
|
+
ts: BSON::Timestamp.new(1_496_414_570, 11),
|
89
|
+
t: 14,
|
90
|
+
h: -3_028_027_288_268_436_781,
|
91
|
+
v: 2,
|
92
|
+
op: 'i',
|
93
|
+
ns: 'foo.bar',
|
94
|
+
o: BSON::Document.new(_id: 1, baz: 'quux')
|
95
|
+
)
|
96
|
+
]
|
97
|
+
)
|
98
|
+
)
|
99
|
+
|
100
|
+
expect(described_class.from(bson)).to be_a(Oplogjam::ApplyOps)
|
101
|
+
end
|
102
|
+
|
103
|
+
it 'raises an error if given an unknown operation' do
|
104
|
+
bson = BSON::Document.new(not: 'an operation')
|
105
|
+
|
106
|
+
expect { described_class.from(bson) }.to raise_error(InvalidOperation)
|
107
|
+
end
|
108
|
+
end
|
109
|
+
end
|
110
|
+
end
|
@@ -0,0 +1,53 @@
|
|
1
|
+
require 'oplogjam'
|
2
|
+
|
3
|
+
module Oplogjam
|
4
|
+
module Operators
|
5
|
+
RSpec.describe Set do
|
6
|
+
describe '.update' do
|
7
|
+
it 'converts a simple $set into SQL' do
|
8
|
+
sql = described_class.from('a' => 1).update(Sequel.pg_jsonb_op(:document))
|
9
|
+
|
10
|
+
expect(sql).to eq(Sequel.pg_jsonb_op(:document).set(%w[a], '1'))
|
11
|
+
end
|
12
|
+
|
13
|
+
it 'converts several simple $sets into SQL' do
|
14
|
+
sql = described_class.from('a' => 1, 'b' => 2).update(Sequel.pg_jsonb_op(:document))
|
15
|
+
|
16
|
+
expect(sql).to eq(Sequel.pg_jsonb_op(:document).set(%w[a], '1').set(%w[b], '2'))
|
17
|
+
end
|
18
|
+
|
19
|
+
it 'converts a single nested $set into SQL' do
|
20
|
+
sql = described_class.from('a.b' => 1).update(Sequel.pg_jsonb_op(:document))
|
21
|
+
|
22
|
+
expect(sql).to eq(
|
23
|
+
Sequel
|
24
|
+
.pg_jsonb_op(:document)
|
25
|
+
.set(%w[a], Sequel.function(:coalesce, Sequel.pg_jsonb_op(:document)[%w[a]], Sequel.pg_jsonb({})))
|
26
|
+
.set(%w[a b], '1')
|
27
|
+
)
|
28
|
+
end
|
29
|
+
|
30
|
+
it 'converts a complex $set tree into a SQL expression' do
|
31
|
+
sql = described_class.from('a.b.c' => 1, 'a.b.d' => 2, 'a.e' => 3).update(Sequel.pg_jsonb_op(:document))
|
32
|
+
|
33
|
+
document = Sequel.pg_jsonb_op(:document)
|
34
|
+
a_root = document.set(
|
35
|
+
%w[a],
|
36
|
+
Sequel.function(:coalesce, document[%w[a]], Sequel.pg_jsonb({}))
|
37
|
+
)
|
38
|
+
|
39
|
+
expect(sql).to eq(
|
40
|
+
a_root
|
41
|
+
.set(
|
42
|
+
%w[a b],
|
43
|
+
Sequel.function(:coalesce, a_root[%w[a b]], Sequel.pg_jsonb({}))
|
44
|
+
)
|
45
|
+
.set(%w[a b c], '1')
|
46
|
+
.set(%w[a b d], '2')
|
47
|
+
.set(%w[a e], '3')
|
48
|
+
)
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
52
|
+
end
|
53
|
+
end
|
@@ -0,0 +1,35 @@
|
|
1
|
+
require 'oplogjam'
|
2
|
+
|
3
|
+
module Oplogjam
|
4
|
+
RSpec.describe Sanitizer do
|
5
|
+
describe '.sanitize' do
|
6
|
+
it 'strips null bytes from strings' do
|
7
|
+
expect(described_class.sanitize("Foo\x00bar\x00")).to eq('Foobar')
|
8
|
+
end
|
9
|
+
|
10
|
+
it 'does not strip escaped null bytes from strings' do
|
11
|
+
expect(described_class.sanitize('Foo\u0000bar')).to eq('Foo\u0000bar')
|
12
|
+
end
|
13
|
+
|
14
|
+
it 'strips null bytes from array elements' do
|
15
|
+
expect(described_class.sanitize(["Foo\x00", "\x00Bar"])).to eq(['Foo', 'Bar'])
|
16
|
+
end
|
17
|
+
|
18
|
+
it 'strips null bytes from JSONB array elements' do
|
19
|
+
expect(described_class.sanitize(Sequel::Postgres::JSONBArray.new(["Foo\x00", "\x00Bar"]))).to eq(['Foo', 'Bar'])
|
20
|
+
end
|
21
|
+
|
22
|
+
it 'strips null bytes from hash values' do
|
23
|
+
expect(described_class.sanitize('name' => "Foo\x00")).to eq('name' => 'Foo')
|
24
|
+
end
|
25
|
+
|
26
|
+
it 'strips null bytes from hash keys' do
|
27
|
+
expect(described_class.sanitize("\x00name" => 'Foo')).to eq('name' => 'Foo')
|
28
|
+
end
|
29
|
+
|
30
|
+
it 'strips null bytes from JSONB hash elements' do
|
31
|
+
expect(described_class.sanitize(Sequel::Postgres::JSONBHash.new('name' => "Foo\x00"))).to eq('name' => 'Foo')
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
@@ -0,0 +1,406 @@
|
|
1
|
+
require 'bson'
|
2
|
+
require 'oplogjam'
|
3
|
+
|
4
|
+
module Oplogjam
|
5
|
+
RSpec.describe Update do
|
6
|
+
let(:postgres) { Sequel.connect('postgres:///oplogjam_test') }
|
7
|
+
let(:schema) { Schema.new(postgres) }
|
8
|
+
let(:table) { postgres.from(:bar) }
|
9
|
+
|
10
|
+
before(:example, :database) do
|
11
|
+
postgres.extension :pg_json
|
12
|
+
schema.create_table(:bar)
|
13
|
+
schema.add_indexes(:bar)
|
14
|
+
end
|
15
|
+
|
16
|
+
after(:example, :database) do
|
17
|
+
table.truncate
|
18
|
+
end
|
19
|
+
|
20
|
+
describe '.from' do
|
21
|
+
it 'converts a BSON update into an Update' do
|
22
|
+
bson = BSON::Document.new(
|
23
|
+
ts: BSON::Timestamp.new(1_479_561_033, 1),
|
24
|
+
t: 2,
|
25
|
+
h: 3_511_341_713_062_188_019,
|
26
|
+
v: 2,
|
27
|
+
op: 'u',
|
28
|
+
ns: 'foo.bar',
|
29
|
+
o2: BSON::Document.new(_id: BSON::ObjectId('583033a3643431ab5be6ec35')),
|
30
|
+
o: BSON::Document.new('$set' => BSON::Document.new('bar' => 'baz'))
|
31
|
+
)
|
32
|
+
|
33
|
+
expect(described_class.from(bson)).to be_a(described_class)
|
34
|
+
end
|
35
|
+
|
36
|
+
it 'raises an error if the query is missing' do
|
37
|
+
bson = BSON::Document.new(
|
38
|
+
ts: BSON::Timestamp.new(1_479_561_033, 1),
|
39
|
+
t: 2,
|
40
|
+
h: 3_511_341_713_062_188_019,
|
41
|
+
v: 2,
|
42
|
+
op: 'u',
|
43
|
+
ns: 'foo.bar',
|
44
|
+
o: BSON::Document.new('$set' => BSON::Document.new('bar' => 'baz'))
|
45
|
+
)
|
46
|
+
|
47
|
+
expect { described_class.from(bson) }.to raise_error(InvalidUpdate)
|
48
|
+
end
|
49
|
+
end
|
50
|
+
|
51
|
+
describe '#timestamp' do
|
52
|
+
it 'returns the timestamp of the operation as a Time' do
|
53
|
+
bson = BSON::Document.new(
|
54
|
+
ts: BSON::Timestamp.new(1_479_561_033, 1),
|
55
|
+
t: 2,
|
56
|
+
h: 3_511_341_713_062_188_019,
|
57
|
+
v: 2,
|
58
|
+
op: 'u',
|
59
|
+
ns: 'foo.bar',
|
60
|
+
o2: BSON::Document.new(_id: BSON::ObjectId('583033a3643431ab5be6ec35')),
|
61
|
+
o: BSON::Document.new('$set' => BSON::Document.new('bar' => 'baz'))
|
62
|
+
)
|
63
|
+
update = described_class.from(bson)
|
64
|
+
|
65
|
+
expect(update.timestamp).to eq(Time.at(1_479_561_033, 1))
|
66
|
+
end
|
67
|
+
end
|
68
|
+
|
69
|
+
describe '#namespace' do
|
70
|
+
it 'returns the namespace' do
|
71
|
+
bson = BSON::Document.new(
|
72
|
+
ts: BSON::Timestamp.new(1_479_561_033, 1),
|
73
|
+
t: 2,
|
74
|
+
h: 3_511_341_713_062_188_019,
|
75
|
+
v: 2,
|
76
|
+
op: 'u',
|
77
|
+
ns: 'foo.bar',
|
78
|
+
o2: BSON::Document.new(_id: BSON::ObjectId('583033a3643431ab5be6ec35')),
|
79
|
+
o: BSON::Document.new('$set' => BSON::Document.new('bar' => 'baz'))
|
80
|
+
)
|
81
|
+
update = described_class.from(bson)
|
82
|
+
|
83
|
+
expect(update.namespace).to eq('foo.bar')
|
84
|
+
end
|
85
|
+
end
|
86
|
+
|
87
|
+
describe '#query' do
|
88
|
+
it 'returns the query' do
|
89
|
+
bson = BSON::Document.new(
|
90
|
+
ts: BSON::Timestamp.new(1_479_561_033, 1),
|
91
|
+
t: 2,
|
92
|
+
h: 3_511_341_713_062_188_019,
|
93
|
+
v: 2,
|
94
|
+
op: 'u',
|
95
|
+
ns: 'foo.bar',
|
96
|
+
o2: BSON::Document.new(_id: BSON::ObjectId('583033a3643431ab5be6ec35')),
|
97
|
+
o: BSON::Document.new('$set' => BSON::Document.new('bar' => 'baz'))
|
98
|
+
)
|
99
|
+
update = described_class.from(bson)
|
100
|
+
|
101
|
+
expect(update.query).to eq(BSON::Document.new(_id: BSON::ObjectId('583033a3643431ab5be6ec35')))
|
102
|
+
end
|
103
|
+
end
|
104
|
+
|
105
|
+
describe '#update' do
|
106
|
+
it 'returns the update' do
|
107
|
+
bson = BSON::Document.new(
|
108
|
+
ts: BSON::Timestamp.new(1_479_561_033, 1),
|
109
|
+
t: 2,
|
110
|
+
h: 3_511_341_713_062_188_019,
|
111
|
+
v: 2,
|
112
|
+
op: 'u',
|
113
|
+
ns: 'foo.bar',
|
114
|
+
o2: BSON::Document.new(_id: BSON::ObjectId('583033a3643431ab5be6ec35')),
|
115
|
+
o: BSON::Document.new('$set' => BSON::Document.new('bar' => 'baz'))
|
116
|
+
)
|
117
|
+
update = described_class.from(bson)
|
118
|
+
|
119
|
+
expect(update.update).to eq(BSON::Document.new('$set' => BSON::Document.new('bar' => 'baz')))
|
120
|
+
end
|
121
|
+
end
|
122
|
+
|
123
|
+
describe '#apply', :database do
|
124
|
+
it 'ignores updates to unmapped tables' do
|
125
|
+
table.insert(id: '1', document: '{"_id":1,"a":1}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
126
|
+
update = build_update(1, 'a' => 2)
|
127
|
+
update.apply('foo.baz' => table)
|
128
|
+
|
129
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1, 'a' => 1))
|
130
|
+
end
|
131
|
+
|
132
|
+
it 'strips null bytes from updates' do
|
133
|
+
table.insert(id: '1', document: '{"_id":1}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
134
|
+
update = build_update(1, 'foo' => "bar\x00")
|
135
|
+
update.apply('foo.bar' => table)
|
136
|
+
|
137
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1, 'foo' => 'bar'))
|
138
|
+
end
|
139
|
+
|
140
|
+
it 'updates updated_at' do
|
141
|
+
Timecop.freeze(Time.new(2001, 1, 1, 0, 0, 0)) do
|
142
|
+
table.insert(id: '1', document: '{"_id":1}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
143
|
+
update = build_update(1, 'a' => 1)
|
144
|
+
update.apply('foo.bar' => table)
|
145
|
+
|
146
|
+
expect(table.first).to include(updated_at: Time.new(2001, 1, 1, 0, 0, 0))
|
147
|
+
end
|
148
|
+
end
|
149
|
+
|
150
|
+
it 'applies {"a"=>1, "b"=>2} to {}' do
|
151
|
+
table.insert(id: '1', document: '{"_id":1}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
152
|
+
update = build_update(1, 'a' => 1, 'b' => 2)
|
153
|
+
update.apply('foo.bar' => table)
|
154
|
+
|
155
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1, 'a' => 1, 'b' => 2))
|
156
|
+
end
|
157
|
+
|
158
|
+
it 'applies {"$set"=>{"a"=>1}} to {}' do
|
159
|
+
table.insert(id: '1', document: '{"_id":1}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
160
|
+
update = build_update(1, '$set' => { 'a' => 1 })
|
161
|
+
update.apply('foo.bar' => table)
|
162
|
+
|
163
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1, 'a' => 1))
|
164
|
+
end
|
165
|
+
|
166
|
+
it 'applies {"$set"=>{"a"=>1}} to {"a"=>0}' do
|
167
|
+
table.insert(id: '1', document: '{"_id":1,"a":0}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
168
|
+
update = build_update(1, '$set' => { 'a' => 1 })
|
169
|
+
update.apply('foo.bar' => table)
|
170
|
+
|
171
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1, 'a' => 1))
|
172
|
+
end
|
173
|
+
|
174
|
+
it 'applies {"$set"=>{"a"=>1, "b"=>2}} to {}' do
|
175
|
+
table.insert(id: '1', document: '{"_id":1}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
176
|
+
update = build_update(1, '$set' => { 'a' => 1, 'b' => 2 })
|
177
|
+
update.apply('foo.bar' => table)
|
178
|
+
|
179
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1, 'a' => 1, 'b' => 2))
|
180
|
+
end
|
181
|
+
|
182
|
+
it 'applies {"$set"=>{"a"=>1, "b"=>2}} to {"a"=>0, "b"=>0}' do
|
183
|
+
table.insert(id: '1', document: '{"_id":1,"a":0,"b":0}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
184
|
+
update = build_update(1, '$set' => { 'a' => 1, 'b' => 2 })
|
185
|
+
update.apply('foo.bar' => table)
|
186
|
+
|
187
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1, 'a' => 1, 'b' => 2))
|
188
|
+
end
|
189
|
+
|
190
|
+
it 'applies {"$unset"=>{"a"=>""}} to {"a"=>0, "b"=>0}' do
|
191
|
+
table.insert(id: '1', document: '{"_id":1,"a":0,"b":0}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
192
|
+
update = build_update(1, '$unset' => { 'a' => '' })
|
193
|
+
update.apply('foo.bar' => table)
|
194
|
+
|
195
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1, 'b' => 0))
|
196
|
+
end
|
197
|
+
|
198
|
+
it 'applies {"$unset"=>{"a"=>"", "b"=>""}} to {"a"=>0, "b"=>0}' do
|
199
|
+
table.insert(id: '1', document: '{"_id":1,"a":0,"b":0}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
200
|
+
update = build_update(1, '$unset' => { 'a' => '', 'b' => '' })
|
201
|
+
update.apply('foo.bar' => table)
|
202
|
+
|
203
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1))
|
204
|
+
end
|
205
|
+
|
206
|
+
it 'applies {"$unset"=>{"c"=>""}} to {"a"=>0, "b"=>0}' do
|
207
|
+
table.insert(id: '1', document: '{"_id":1,"a":0,"b":0}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
208
|
+
update = build_update(1, '$unset' => { 'c' => '' })
|
209
|
+
update.apply('foo.bar' => table)
|
210
|
+
|
211
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1, 'a' => 0, 'b' => 0))
|
212
|
+
end
|
213
|
+
|
214
|
+
it 'applies {"$set"=>{"b"=>1}, "$unset"=>{"a"=>""}} to {"a"=>0}' do
|
215
|
+
table.insert(id: '1', document: '{"_id":1,"a":0}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
216
|
+
update = build_update(1, '$set' => { 'b' => 1 }, '$unset' => { 'a' => '' })
|
217
|
+
update.apply('foo.bar' => table)
|
218
|
+
|
219
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1, 'b' => 1))
|
220
|
+
end
|
221
|
+
|
222
|
+
it 'applies {"$set"=>{"a.0"=>1}} to {}' do
|
223
|
+
table.insert(id: '1', document: '{"_id":1}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
224
|
+
update = build_update(1, '$set' => { 'a.0' => 1 })
|
225
|
+
update.apply('foo.bar' => table)
|
226
|
+
|
227
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1, 'a' => { '0' => 1 }))
|
228
|
+
end
|
229
|
+
|
230
|
+
it 'applies {"$set"=>{"a.0"=>1}} to {"a"=>[]}' do
|
231
|
+
table.insert(id: '1', document: '{"_id":1,"a":[]}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
232
|
+
update = build_update(1, '$set' => { 'a.0' => 1 })
|
233
|
+
update.apply('foo.bar' => table)
|
234
|
+
|
235
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1, 'a' => [1]))
|
236
|
+
end
|
237
|
+
|
238
|
+
it 'applies {"$set"=>{"a.0"=>1}} to {"a"=>{}}' do
|
239
|
+
table.insert(id: '1', document: '{"_id":1,"a":{}}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
240
|
+
update = build_update(1, '$set' => { 'a.0' => 1 })
|
241
|
+
update.apply('foo.bar' => table)
|
242
|
+
|
243
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1, 'a' => { '0' => 1 }))
|
244
|
+
end
|
245
|
+
|
246
|
+
it 'applies {"$set"=>{"a.1"=>1}} to {"a"=>[]}' do
|
247
|
+
table.insert(id: '1', document: '{"_id":1,"a":[]}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
248
|
+
update = build_update(1, '$set' => { 'a.1' => 1 })
|
249
|
+
update.apply('foo.bar' => table)
|
250
|
+
|
251
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1, 'a' => [nil, 1]))
|
252
|
+
end
|
253
|
+
|
254
|
+
it 'applies {"$unset"=>{"a.0"=>""}} to {}' do
|
255
|
+
table.insert(id: '1', document: '{"_id":1}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
256
|
+
update = build_update(1, '$unset' => { 'a.0' => '' })
|
257
|
+
update.apply('foo.bar' => table)
|
258
|
+
|
259
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1))
|
260
|
+
end
|
261
|
+
|
262
|
+
it 'applies {"$unset"=>{"a.0"=>""}} to {"a"=>[]}' do
|
263
|
+
table.insert(id: '1', document: '{"_id":1,"a":[]}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
264
|
+
update = build_update(1, '$unset' => { 'a.0' => '' })
|
265
|
+
update.apply('foo.bar' => table)
|
266
|
+
|
267
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1, 'a' => []))
|
268
|
+
end
|
269
|
+
|
270
|
+
it 'applies {"$unset"=>{"a.0"=>""}} to {"a"=>[1]}' do
|
271
|
+
table.insert(id: '1', document: '{"_id":1,"a":[1]}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
272
|
+
update = build_update(1, '$unset' => { 'a.0' => '' })
|
273
|
+
update.apply('foo.bar' => table)
|
274
|
+
|
275
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1, 'a' => [nil]))
|
276
|
+
end
|
277
|
+
|
278
|
+
it 'applies {"$unset"=>{"a.0"=>""}} to {"a"=>[1, 2]}' do
|
279
|
+
table.insert(id: '1', document: '{"_id":1,"a":[1,2]}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
280
|
+
update = build_update(1, '$unset' => { 'a.0' => '' })
|
281
|
+
update.apply('foo.bar' => table)
|
282
|
+
|
283
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1, 'a' => [nil, 2]))
|
284
|
+
end
|
285
|
+
|
286
|
+
it 'applies {"$unset"=>{"a.1"=>""}} to {"a"=>[1, 2]}' do
|
287
|
+
table.insert(id: '1', document: '{"_id":1,"a":[1,2]}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
288
|
+
update = build_update(1, '$unset' => { 'a.1' => '' })
|
289
|
+
update.apply('foo.bar' => table)
|
290
|
+
|
291
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1, 'a' => [1, nil]))
|
292
|
+
end
|
293
|
+
|
294
|
+
it 'applies {"$unset"=>{"a.2"=>""}} to {"a"=>[1, 2]}' do
|
295
|
+
table.insert(id: '1', document: '{"_id":1,"a":[1,2]}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
296
|
+
update = build_update(1, '$unset' => { 'a.2' => '' })
|
297
|
+
update.apply('foo.bar' => table)
|
298
|
+
|
299
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1, 'a' => [1, 2]))
|
300
|
+
end
|
301
|
+
|
302
|
+
it 'applies {"$unset"=>{"a.0"=>""}} to {"a"=>{}}' do
|
303
|
+
table.insert(id: '1', document: '{"_id":1,"a":{}}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
304
|
+
update = build_update(1, '$unset' => { 'a.0' => '' })
|
305
|
+
update.apply('foo.bar' => table)
|
306
|
+
|
307
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1, 'a' => {}))
|
308
|
+
end
|
309
|
+
|
310
|
+
it 'applies {"$unset"=>{"a.1"=>""}} to {"a"=>[]}' do
|
311
|
+
table.insert(id: '1', document: '{"_id":1,"a":[]}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
312
|
+
update = build_update(1, '$unset' => { 'a.1' => '' })
|
313
|
+
update.apply('foo.bar' => table)
|
314
|
+
|
315
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1, 'a' => []))
|
316
|
+
end
|
317
|
+
|
318
|
+
it 'applies {"$set"=>{"a.b"=>1}} to {}' do
|
319
|
+
table.insert(id: '1', document: '{"_id":1}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
320
|
+
update = build_update(1, '$set' => { 'a.b' => 1 })
|
321
|
+
update.apply('foo.bar' => table)
|
322
|
+
|
323
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1, 'a' => { 'b' => 1 }))
|
324
|
+
end
|
325
|
+
|
326
|
+
it 'applies {"$set"=>{"a.b.c"=>1}} to {}' do
|
327
|
+
table.insert(id: '1', document: '{"_id":1}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
328
|
+
update = build_update(1, '$set' => { 'a.b.c' => 1 })
|
329
|
+
update.apply('foo.bar' => table)
|
330
|
+
|
331
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1, 'a' => { 'b' => { 'c' => 1 } }))
|
332
|
+
end
|
333
|
+
|
334
|
+
it 'applies {"$set"=>{"a.b.c"=>1, "a.d"=>2}} to {}' do
|
335
|
+
table.insert(id: '1', document: '{"_id":1}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
336
|
+
update = build_update(1, '$set' => { 'a.b.c' => 1, 'a.d' => 2 })
|
337
|
+
update.apply('foo.bar' => table)
|
338
|
+
|
339
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1, 'a' => { 'b' => { 'c' => 1 }, 'd' => 2 }))
|
340
|
+
end
|
341
|
+
|
342
|
+
it 'applies {"$set"=>{"a.b"=>1}} to {"a"=>{}}' do
|
343
|
+
table.insert(id: '1', document: '{"_id":1,"a":{}}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
344
|
+
update = build_update(1, '$set' => { 'a.b' => 1 })
|
345
|
+
update.apply('foo.bar' => table)
|
346
|
+
|
347
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1, 'a' => { 'b' => 1 }))
|
348
|
+
end
|
349
|
+
|
350
|
+
it 'applies {"$set"=>{"a.b"=>1}} to {"a"=>{"b"=>0}}' do
|
351
|
+
table.insert(id: '1', document: '{"_id":1,"a":{"b":0}}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
352
|
+
update = build_update(1, '$set' => { 'a.b' => 1 })
|
353
|
+
update.apply('foo.bar' => table)
|
354
|
+
|
355
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1, 'a' => { 'b' => 1 }))
|
356
|
+
end
|
357
|
+
|
358
|
+
it 'applies {"$set"=>{"a.1.b"=>1}} to {}' do
|
359
|
+
table.insert(id: '1', document: '{"_id":1}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
360
|
+
update = build_update(1, '$set' => { 'a.1.b' => 1 })
|
361
|
+
update.apply('foo.bar' => table)
|
362
|
+
|
363
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1, 'a' => { '1' => { 'b' => 1 } }))
|
364
|
+
end
|
365
|
+
|
366
|
+
it 'applies {"$set"=>{"a.1.b"=>1}} to {"a"=>[]}' do
|
367
|
+
table.insert(id: '1', document: '{"_id":1,"a":[]}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
368
|
+
update = build_update(1, '$set' => { 'a.1.b' => 1 })
|
369
|
+
update.apply('foo.bar' => table)
|
370
|
+
|
371
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1, 'a' => [nil, { 'b' => 1 }]))
|
372
|
+
end
|
373
|
+
|
374
|
+
it 'applies {"$set"=>{"a.1.b.1"=>1}} to {}' do
|
375
|
+
table.insert(id: '1', document: '{"_id":1}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
376
|
+
update = build_update(1, '$set' => { 'a.1.b.1' => 1 })
|
377
|
+
update.apply('foo.bar' => table)
|
378
|
+
|
379
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1, 'a' => { '1' => { 'b' => { '1' => 1 } } }))
|
380
|
+
end
|
381
|
+
|
382
|
+
it 'applies {"$set"=>{"a.1.b.1"=>1}} to {"a"=>[]}' do
|
383
|
+
table.insert(id: '1', document: '{"_id":1,"a":[]}', created_at: Time.now.utc, updated_at: Time.now.utc)
|
384
|
+
update = build_update(1, '$set' => { 'a.1.b.1' => 1 })
|
385
|
+
update.apply('foo.bar' => table)
|
386
|
+
|
387
|
+
expect(table.first).to include(document: Sequel.pg_jsonb('_id' => 1, 'a' => [nil, { 'b' => { '1' => 1 } }]))
|
388
|
+
end
|
389
|
+
end
|
390
|
+
|
391
|
+
def build_update(id = '1', attributes = { '$set' => BSON::Document.new('bar' => 'baz') })
|
392
|
+
bson = BSON::Document.new(
|
393
|
+
ts: BSON::Timestamp.new(1_479_561_033, 1),
|
394
|
+
t: 2,
|
395
|
+
h: 3_511_341_713_062_188_019,
|
396
|
+
v: 2,
|
397
|
+
op: 'u',
|
398
|
+
ns: 'foo.bar',
|
399
|
+
o2: BSON::Document.new(_id: id),
|
400
|
+
o: BSON::Document.new(attributes)
|
401
|
+
)
|
402
|
+
|
403
|
+
described_class.from(bson)
|
404
|
+
end
|
405
|
+
end
|
406
|
+
end
|