deimos-ruby 1.6.3 → 1.8.1.pre.beta1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.circleci/config.yml +9 -0
- data/.rubocop.yml +22 -16
- data/.ruby-version +1 -1
- data/CHANGELOG.md +42 -0
- data/Gemfile.lock +125 -98
- data/README.md +164 -16
- data/Rakefile +1 -1
- data/deimos-ruby.gemspec +4 -3
- data/docs/ARCHITECTURE.md +144 -0
- data/docs/CONFIGURATION.md +27 -0
- data/lib/deimos.rb +8 -7
- data/lib/deimos/active_record_consume/batch_consumption.rb +159 -0
- data/lib/deimos/active_record_consume/batch_slicer.rb +27 -0
- data/lib/deimos/active_record_consume/message_consumption.rb +58 -0
- data/lib/deimos/active_record_consume/schema_model_converter.rb +52 -0
- data/lib/deimos/active_record_consumer.rb +33 -75
- data/lib/deimos/active_record_producer.rb +23 -0
- data/lib/deimos/batch_consumer.rb +2 -140
- data/lib/deimos/config/configuration.rb +28 -10
- data/lib/deimos/consume/batch_consumption.rb +150 -0
- data/lib/deimos/consume/message_consumption.rb +94 -0
- data/lib/deimos/consumer.rb +79 -70
- data/lib/deimos/kafka_message.rb +1 -1
- data/lib/deimos/kafka_topic_info.rb +22 -3
- data/lib/deimos/message.rb +6 -1
- data/lib/deimos/metrics/provider.rb +0 -2
- data/lib/deimos/poll_info.rb +9 -0
- data/lib/deimos/schema_backends/avro_base.rb +28 -1
- data/lib/deimos/schema_backends/base.rb +15 -2
- data/lib/deimos/tracing/provider.rb +0 -2
- data/lib/deimos/utils/db_poller.rb +149 -0
- data/lib/deimos/utils/db_producer.rb +59 -16
- data/lib/deimos/utils/deadlock_retry.rb +68 -0
- data/lib/deimos/utils/lag_reporter.rb +19 -26
- data/lib/deimos/version.rb +1 -1
- data/lib/generators/deimos/active_record/templates/migration.rb.tt +28 -0
- data/lib/generators/deimos/active_record/templates/model.rb.tt +5 -0
- data/lib/generators/deimos/active_record_generator.rb +79 -0
- data/lib/generators/deimos/db_backend/templates/migration +1 -0
- data/lib/generators/deimos/db_backend/templates/rails3_migration +1 -0
- data/lib/generators/deimos/db_poller/templates/migration +11 -0
- data/lib/generators/deimos/db_poller/templates/rails3_migration +16 -0
- data/lib/generators/deimos/db_poller_generator.rb +48 -0
- data/lib/tasks/deimos.rake +7 -0
- data/spec/active_record_batch_consumer_spec.rb +481 -0
- data/spec/active_record_consume/batch_slicer_spec.rb +42 -0
- data/spec/active_record_consume/schema_model_converter_spec.rb +105 -0
- data/spec/active_record_consumer_spec.rb +3 -11
- data/spec/active_record_producer_spec.rb +66 -88
- data/spec/batch_consumer_spec.rb +24 -7
- data/spec/config/configuration_spec.rb +4 -0
- data/spec/consumer_spec.rb +8 -8
- data/spec/deimos_spec.rb +57 -49
- data/spec/generators/active_record_generator_spec.rb +56 -0
- data/spec/handlers/my_batch_consumer.rb +6 -1
- data/spec/handlers/my_consumer.rb +6 -1
- data/spec/kafka_topic_info_spec.rb +39 -16
- data/spec/message_spec.rb +19 -0
- data/spec/producer_spec.rb +3 -3
- data/spec/rake_spec.rb +1 -1
- data/spec/schemas/com/my-namespace/Generated.avsc +71 -0
- data/spec/schemas/com/my-namespace/MySchemaCompound-key.avsc +18 -0
- data/spec/schemas/com/my-namespace/Wibble.avsc +43 -0
- data/spec/spec_helper.rb +62 -6
- data/spec/utils/db_poller_spec.rb +320 -0
- data/spec/utils/db_producer_spec.rb +84 -10
- data/spec/utils/deadlock_retry_spec.rb +74 -0
- data/spec/utils/lag_reporter_spec.rb +29 -22
- metadata +66 -30
- data/lib/deimos/base_consumer.rb +0 -104
- data/lib/deimos/utils/executor.rb +0 -124
- data/lib/deimos/utils/platform_schema_validation.rb +0 -0
- data/lib/deimos/utils/signal_handler.rb +0 -68
- data/spec/utils/executor_spec.rb +0 -53
- data/spec/utils/signal_handler_spec.rb +0 -16
data/spec/deimos_spec.rb
CHANGED
@@ -68,11 +68,11 @@ describe Deimos do
|
|
68
68
|
|
69
69
|
describe '#start_db_backend!' do
|
70
70
|
it 'should start if backend is db and thread_count is > 0' do
|
71
|
-
signal_handler = instance_double(
|
71
|
+
signal_handler = instance_double(Sigurd::SignalHandler)
|
72
72
|
allow(signal_handler).to receive(:run!)
|
73
|
-
expect(
|
73
|
+
expect(Sigurd::Executor).to receive(:new).
|
74
74
|
with(anything, sleep_seconds: 5, logger: anything).and_call_original
|
75
|
-
expect(
|
75
|
+
expect(Sigurd::SignalHandler).to receive(:new) do |executor|
|
76
76
|
expect(executor.runners.size).to eq(2)
|
77
77
|
signal_handler
|
78
78
|
end
|
@@ -83,7 +83,7 @@ describe Deimos do
|
|
83
83
|
end
|
84
84
|
|
85
85
|
it 'should not start if backend is not db' do
|
86
|
-
expect(
|
86
|
+
expect(Sigurd::SignalHandler).not_to receive(:new)
|
87
87
|
described_class.configure do |config|
|
88
88
|
config.producers.backend = :kafka
|
89
89
|
end
|
@@ -92,7 +92,7 @@ describe Deimos do
|
|
92
92
|
end
|
93
93
|
|
94
94
|
it 'should not start if thread_count is nil' do
|
95
|
-
expect(
|
95
|
+
expect(Sigurd::SignalHandler).not_to receive(:new)
|
96
96
|
described_class.configure do |config|
|
97
97
|
config.producers.backend = :db
|
98
98
|
end
|
@@ -101,61 +101,69 @@ describe Deimos do
|
|
101
101
|
end
|
102
102
|
|
103
103
|
it 'should not start if thread_count is 0' do
|
104
|
-
expect(
|
104
|
+
expect(Sigurd::SignalHandler).not_to receive(:new)
|
105
105
|
described_class.configure do |config|
|
106
106
|
config.producers.backend = :db
|
107
107
|
end
|
108
108
|
expect { described_class.start_db_backend!(thread_count: 0) }.
|
109
109
|
to raise_error('Thread count is not given or set to zero, exiting')
|
110
110
|
end
|
111
|
+
end
|
111
112
|
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
it 'should not raise an error with properly configured handlers' do
|
118
|
-
path = config_path # for scope issues in the block below
|
119
|
-
# Add explicit consumers
|
120
|
-
phobos_configuration['listeners'] << { 'handler' => 'ConsumerTest::MyConsumer',
|
121
|
-
'delivery' => 'message' }
|
122
|
-
phobos_configuration['listeners'] << { 'handler' => 'ConsumerTest::MyConsumer',
|
123
|
-
'delivery' => 'batch' }
|
124
|
-
|
125
|
-
expect {
|
126
|
-
described_class.configure { |c| c.phobos_config_file = path }
|
127
|
-
}.not_to raise_error
|
128
|
-
end
|
129
|
-
|
130
|
-
it 'should raise an error if BatchConsumers do not have inline_batch delivery' do
|
131
|
-
path = config_path # for scope issues in the block below
|
132
|
-
phobos_configuration['listeners'] = [{ 'handler' => 'ConsumerTest::MyBatchConsumer',
|
133
|
-
'delivery' => 'message' }]
|
134
|
-
|
135
|
-
expect {
|
136
|
-
described_class.configure { |c| c.phobos_config_file = path }
|
137
|
-
}.to raise_error('BatchConsumer ConsumerTest::MyBatchConsumer must have delivery set to `inline_batch`')
|
138
|
-
end
|
139
|
-
|
140
|
-
it 'should raise an error if Consumers do not have message or batch delivery' do
|
141
|
-
path = config_path # for scope issues in the block below
|
142
|
-
phobos_configuration['listeners'] = [{ 'handler' => 'ConsumerTest::MyConsumer',
|
143
|
-
'delivery' => 'inline_batch' }]
|
113
|
+
describe 'delivery configuration' do
|
114
|
+
before(:each) do
|
115
|
+
allow(YAML).to receive(:load).and_return(phobos_configuration)
|
116
|
+
end
|
144
117
|
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
118
|
+
it 'should not raise an error with properly configured handlers' do
|
119
|
+
expect {
|
120
|
+
described_class.configure do
|
121
|
+
consumer do
|
122
|
+
class_name 'ConsumerTest::MyConsumer'
|
123
|
+
delivery :message
|
124
|
+
end
|
125
|
+
consumer do
|
126
|
+
class_name 'ConsumerTest::MyConsumer'
|
127
|
+
delivery :batch
|
128
|
+
end
|
129
|
+
consumer do
|
130
|
+
class_name 'ConsumerTest::MyBatchConsumer'
|
131
|
+
delivery :inline_batch
|
132
|
+
end
|
133
|
+
end
|
134
|
+
}.not_to raise_error
|
135
|
+
end
|
149
136
|
|
150
|
-
|
151
|
-
|
152
|
-
|
137
|
+
it 'should raise an error if inline_batch listeners do not implement consume_batch' do
|
138
|
+
expect {
|
139
|
+
described_class.configure do
|
140
|
+
consumer do
|
141
|
+
class_name 'ConsumerTest::MyConsumer'
|
142
|
+
delivery :inline_batch
|
143
|
+
end
|
144
|
+
end
|
145
|
+
}.to raise_error('BatchConsumer ConsumerTest::MyConsumer does not implement `consume_batch`')
|
146
|
+
end
|
153
147
|
|
154
|
-
|
155
|
-
|
156
|
-
|
157
|
-
|
148
|
+
it 'should raise an error if Consumers do not have message or batch delivery' do
|
149
|
+
expect {
|
150
|
+
described_class.configure do
|
151
|
+
consumer do
|
152
|
+
class_name 'ConsumerTest::MyBatchConsumer'
|
153
|
+
delivery :message
|
154
|
+
end
|
155
|
+
end
|
156
|
+
}.to raise_error('Non-batch Consumer ConsumerTest::MyBatchConsumer does not implement `consume`')
|
157
|
+
end
|
158
158
|
|
159
|
+
it 'should treat nil as `batch`' do
|
160
|
+
expect {
|
161
|
+
described_class.configure do
|
162
|
+
consumer do
|
163
|
+
class_name 'ConsumerTest::MyConsumer'
|
164
|
+
end
|
165
|
+
end
|
166
|
+
}.not_to raise_error
|
159
167
|
end
|
160
168
|
end
|
161
169
|
end
|
@@ -0,0 +1,56 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'generators/deimos/active_record_generator'
|
4
|
+
|
5
|
+
RSpec.describe Deimos::Generators::ActiveRecordGenerator do
|
6
|
+
|
7
|
+
after(:each) do
|
8
|
+
FileUtils.rm_rf('db') if File.exist?('db')
|
9
|
+
FileUtils.rm_rf('app') if File.exist?('app')
|
10
|
+
end
|
11
|
+
|
12
|
+
it 'should generate a migration' do
|
13
|
+
expect(Dir['db/migrate/*.rb']).to be_empty
|
14
|
+
expect(Dir['app/models/*.rb']).to be_empty
|
15
|
+
described_class.start(['generated_table', 'com.my-namespace.Generated'])
|
16
|
+
files = Dir['db/migrate/*.rb']
|
17
|
+
expect(files.length).to eq(1)
|
18
|
+
results = <<~MIGRATION
|
19
|
+
class CreateGeneratedTable < ActiveRecord::Migration[6.0]
|
20
|
+
def up
|
21
|
+
if table_exists?(:generated_table)
|
22
|
+
warn "generated_table already exists, exiting"
|
23
|
+
return
|
24
|
+
end
|
25
|
+
create_table :generated_table do |t|
|
26
|
+
t.string :a_string
|
27
|
+
t.integer :a_int
|
28
|
+
t.bigint :a_long
|
29
|
+
t.float :a_float
|
30
|
+
t.float :a_double
|
31
|
+
t.string :an_enum
|
32
|
+
t.json :an_array
|
33
|
+
t.json :a_map
|
34
|
+
t.json :a_record
|
35
|
+
end
|
36
|
+
|
37
|
+
# TODO add indexes as necessary
|
38
|
+
end
|
39
|
+
|
40
|
+
def down
|
41
|
+
return unless table_exists?(:generated_table)
|
42
|
+
drop_table :generated_table
|
43
|
+
end
|
44
|
+
|
45
|
+
end
|
46
|
+
MIGRATION
|
47
|
+
expect(File.read(files[0])).to eq(results)
|
48
|
+
model = <<~MODEL
|
49
|
+
class GeneratedTable < ApplicationRecord
|
50
|
+
enum an_enum: {sym1: 'sym1', sym2: 'sym2'}
|
51
|
+
end
|
52
|
+
MODEL
|
53
|
+
expect(File.read('app/models/generated_table.rb')).to eq(model)
|
54
|
+
end
|
55
|
+
|
56
|
+
end
|
@@ -37,22 +37,45 @@ each_db_config(Deimos::KafkaTopicInfo) do
|
|
37
37
|
end
|
38
38
|
|
39
39
|
specify '#clear_lock' do
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
40
|
+
freeze_time do
|
41
|
+
Deimos::KafkaTopicInfo.create!(topic: 'my-topic', locked_by: 'abc',
|
42
|
+
locked_at: 10.seconds.ago, error: true, retries: 1,
|
43
|
+
last_processed_at: 20.seconds.ago)
|
44
|
+
Deimos::KafkaTopicInfo.create!(topic: 'my-topic2', locked_by: 'def',
|
45
|
+
locked_at: 10.seconds.ago, error: true, retries: 1,
|
46
|
+
last_processed_at: 20.seconds.ago)
|
47
|
+
Deimos::KafkaTopicInfo.clear_lock('my-topic', 'abc')
|
48
|
+
expect(Deimos::KafkaTopicInfo.count).to eq(2)
|
49
|
+
record = Deimos::KafkaTopicInfo.first
|
50
|
+
expect(record.locked_by).to eq(nil)
|
51
|
+
expect(record.locked_at).to eq(nil)
|
52
|
+
expect(record.error).to eq(false)
|
53
|
+
expect(record.retries).to eq(0)
|
54
|
+
expect(record.last_processed_at.to_s).to eq(Time.zone.now.to_s)
|
55
|
+
record = Deimos::KafkaTopicInfo.last
|
56
|
+
expect(record.locked_by).not_to eq(nil)
|
57
|
+
expect(record.locked_at).not_to eq(nil)
|
58
|
+
expect(record.error).not_to eq(false)
|
59
|
+
expect(record.retries).not_to eq(0)
|
60
|
+
expect(record.last_processed_at.to_s).to eq(20.seconds.ago.to_s)
|
61
|
+
end
|
62
|
+
end
|
63
|
+
|
64
|
+
specify '#ping_empty_topics' do
|
65
|
+
freeze_time do
|
66
|
+
old_time = 1.hour.ago.to_s
|
67
|
+
t1 = Deimos::KafkaTopicInfo.create!(topic: 'topic1', last_processed_at: old_time)
|
68
|
+
t2 = Deimos::KafkaTopicInfo.create!(topic: 'topic2', last_processed_at: old_time)
|
69
|
+
t3 = Deimos::KafkaTopicInfo.create!(topic: 'topic3', last_processed_at: old_time,
|
70
|
+
locked_by: 'me', locked_at: 1.minute.ago)
|
71
|
+
|
72
|
+
expect(Deimos::KafkaTopicInfo.count).to eq(3)
|
73
|
+
Deimos::KafkaTopicInfo.all.each { |t| expect(t.last_processed_at.to_s).to eq(old_time) }
|
74
|
+
Deimos::KafkaTopicInfo.ping_empty_topics(%w(topic1))
|
75
|
+
expect(t1.reload.last_processed_at.to_s).to eq(old_time) # was passed as an exception
|
76
|
+
expect(t2.reload.last_processed_at.to_s).to eq(Time.zone.now.to_s)
|
77
|
+
expect(t3.reload.last_processed_at.to_s).to eq(old_time) # is locked
|
78
|
+
end
|
56
79
|
end
|
57
80
|
|
58
81
|
specify '#register_error' do
|
@@ -0,0 +1,19 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
RSpec.describe(Deimos::Message) do
|
4
|
+
it 'should detect tombstones' do
|
5
|
+
expect(described_class.new(nil, nil, key: 'key1')).
|
6
|
+
to be_tombstone
|
7
|
+
expect(described_class.new({ v: 'val1' }, nil, key: 'key1')).
|
8
|
+
not_to be_tombstone
|
9
|
+
expect(described_class.new({ v: '' }, nil, key: 'key1')).
|
10
|
+
not_to be_tombstone
|
11
|
+
expect(described_class.new({ v: 'val1' }, nil, key: nil)).
|
12
|
+
not_to be_tombstone
|
13
|
+
end
|
14
|
+
|
15
|
+
it 'can support complex keys/values' do
|
16
|
+
expect { described_class.new({ a: 1, b: 2 }, nil, key: { c: 3, d: 4 }) }.
|
17
|
+
not_to raise_exception
|
18
|
+
end
|
19
|
+
end
|
data/spec/producer_spec.rb
CHANGED
@@ -148,7 +148,7 @@ module ProducerTest
|
|
148
148
|
Deimos.disable_producers do
|
149
149
|
raise 'OH NOES'
|
150
150
|
end
|
151
|
-
}
|
151
|
+
}.to raise_error('OH NOES')
|
152
152
|
expect(Deimos).not_to be_producers_disabled
|
153
153
|
end
|
154
154
|
|
@@ -246,7 +246,7 @@ module ProducerTest
|
|
246
246
|
MyNonEncodedProducer.publish_list(
|
247
247
|
[{ 'test_id' => 'foo', 'some_int' => 123 }]
|
248
248
|
)
|
249
|
-
}
|
249
|
+
}.to raise_error('No key given but a key is required! Use `key_config none: true` to avoid using keys.')
|
250
250
|
end
|
251
251
|
|
252
252
|
it 'should allow nil keys if none: true is configured' do
|
@@ -254,7 +254,7 @@ module ProducerTest
|
|
254
254
|
MyNoKeyProducer.publish_list(
|
255
255
|
[{ 'test_id' => 'foo', 'some_int' => 123 }]
|
256
256
|
)
|
257
|
-
}
|
257
|
+
}.not_to raise_error
|
258
258
|
end
|
259
259
|
|
260
260
|
it 'should use a partition key' do
|
data/spec/rake_spec.rb
CHANGED
@@ -9,7 +9,7 @@ if Rake.application.lookup(:environment).nil?
|
|
9
9
|
Rake::Task.define_task(:environment)
|
10
10
|
end
|
11
11
|
|
12
|
-
describe 'Rakefile' do
|
12
|
+
describe 'Rakefile' do
|
13
13
|
it 'should start listeners' do
|
14
14
|
runner = instance_double(Phobos::CLI::Runner)
|
15
15
|
expect(Phobos::CLI::Runner).to receive(:new).and_return(runner)
|
@@ -0,0 +1,71 @@
|
|
1
|
+
{
|
2
|
+
"namespace": "com.my-namespace",
|
3
|
+
"name": "Generated",
|
4
|
+
"type": "record",
|
5
|
+
"doc": "Test schema",
|
6
|
+
"fields": [
|
7
|
+
{
|
8
|
+
"name": "a_string",
|
9
|
+
"type": "string"
|
10
|
+
},
|
11
|
+
{
|
12
|
+
"name": "a_int",
|
13
|
+
"type": "int"
|
14
|
+
},
|
15
|
+
{
|
16
|
+
"name": "a_long",
|
17
|
+
"type": "long"
|
18
|
+
},
|
19
|
+
{
|
20
|
+
"name": "a_float",
|
21
|
+
"type": "float"
|
22
|
+
},
|
23
|
+
{
|
24
|
+
"name": "a_double",
|
25
|
+
"type": "double"
|
26
|
+
},
|
27
|
+
{
|
28
|
+
"name": "an_enum",
|
29
|
+
"type": {
|
30
|
+
"type": "enum",
|
31
|
+
"name": "AnEnum",
|
32
|
+
"symbols": ["sym1", "sym2"]
|
33
|
+
}
|
34
|
+
},
|
35
|
+
{
|
36
|
+
"name": "an_array",
|
37
|
+
"type": {
|
38
|
+
"type": "array",
|
39
|
+
"items": "int"
|
40
|
+
}
|
41
|
+
},
|
42
|
+
{
|
43
|
+
"name": "a_map",
|
44
|
+
"type": {
|
45
|
+
"type": "map",
|
46
|
+
"values": "string"
|
47
|
+
}
|
48
|
+
},
|
49
|
+
{
|
50
|
+
"name": "timestamp",
|
51
|
+
"type": "string"
|
52
|
+
},
|
53
|
+
{
|
54
|
+
"name": "message_id",
|
55
|
+
"type": "string"
|
56
|
+
},
|
57
|
+
{
|
58
|
+
"name": "a_record",
|
59
|
+
"type": {
|
60
|
+
"type": "record",
|
61
|
+
"name": "ARecord",
|
62
|
+
"fields": [
|
63
|
+
{
|
64
|
+
"name": "a_record_field",
|
65
|
+
"type": "string"
|
66
|
+
}
|
67
|
+
]
|
68
|
+
}
|
69
|
+
}
|
70
|
+
]
|
71
|
+
}
|
@@ -0,0 +1,18 @@
|
|
1
|
+
{
|
2
|
+
"namespace": "com.my-namespace",
|
3
|
+
"name": "MySchemaCompound-key",
|
4
|
+
"type": "record",
|
5
|
+
"doc": "Test schema",
|
6
|
+
"fields": [
|
7
|
+
{
|
8
|
+
"name": "part_one",
|
9
|
+
"type": "string",
|
10
|
+
"doc": "test string one"
|
11
|
+
},
|
12
|
+
{
|
13
|
+
"name": "part_two",
|
14
|
+
"type": "string",
|
15
|
+
"doc": "test string two"
|
16
|
+
}
|
17
|
+
]
|
18
|
+
}
|
@@ -0,0 +1,43 @@
|
|
1
|
+
{
|
2
|
+
"namespace": "com.my-namespace",
|
3
|
+
"name": "Wibble",
|
4
|
+
"type": "record",
|
5
|
+
"fields": [
|
6
|
+
{
|
7
|
+
"name": "id",
|
8
|
+
"type": "long"
|
9
|
+
},
|
10
|
+
{
|
11
|
+
"name": "wibble_id",
|
12
|
+
"type": "long"
|
13
|
+
},
|
14
|
+
{
|
15
|
+
"name": "name",
|
16
|
+
"type": "string"
|
17
|
+
},
|
18
|
+
{
|
19
|
+
"name": "floop",
|
20
|
+
"type": "string"
|
21
|
+
},
|
22
|
+
{
|
23
|
+
"name": "birthday_int",
|
24
|
+
"type": "int"
|
25
|
+
},
|
26
|
+
{
|
27
|
+
"name": "birthday_long",
|
28
|
+
"type": "long"
|
29
|
+
},
|
30
|
+
{
|
31
|
+
"name": "birthday_optional",
|
32
|
+
"type": ["null", "int"]
|
33
|
+
},
|
34
|
+
{
|
35
|
+
"name": "updated_at",
|
36
|
+
"type": "long"
|
37
|
+
},
|
38
|
+
{
|
39
|
+
"name": "created_at",
|
40
|
+
"type": "long"
|
41
|
+
}
|
42
|
+
]
|
43
|
+
}
|