logster 2.5.1 → 2.6.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (62) hide show
  1. checksums.yaml +4 -4
  2. data/.travis.yml +2 -0
  3. data/CHANGELOG.md +9 -0
  4. data/README.md +15 -1
  5. data/Rakefile +1 -0
  6. data/assets/javascript/client-app.js +204 -168
  7. data/assets/javascript/vendor.js +5132 -5833
  8. data/assets/stylesheets/client-app.css +1 -1
  9. data/client-app/.eslintrc.js +17 -5
  10. data/client-app/.travis.yml +4 -3
  11. data/client-app/app/app.js +5 -7
  12. data/client-app/app/components/actions-menu.js +24 -17
  13. data/client-app/app/components/back-trace.js +148 -0
  14. data/client-app/app/components/env-tab.js +16 -12
  15. data/client-app/app/components/message-info.js +84 -7
  16. data/client-app/app/components/message-row.js +13 -15
  17. data/client-app/app/components/panel-resizer.js +63 -45
  18. data/client-app/app/components/patterns-list.js +6 -6
  19. data/client-app/app/components/update-time.js +13 -13
  20. data/client-app/app/controllers/index.js +4 -2
  21. data/client-app/app/index.html +1 -1
  22. data/client-app/app/initializers/app-init.js +1 -1
  23. data/client-app/app/lib/decorators.js +11 -0
  24. data/client-app/app/lib/preload.js +14 -3
  25. data/client-app/app/lib/utilities.js +63 -36
  26. data/client-app/app/models/group.js +6 -1
  27. data/client-app/app/models/message-collection.js +9 -7
  28. data/client-app/app/models/message.js +25 -20
  29. data/client-app/app/router.js +4 -6
  30. data/client-app/app/styles/app.css +18 -4
  31. data/client-app/app/templates/components/actions-menu.hbs +6 -2
  32. data/client-app/app/templates/components/back-trace.hbs +8 -0
  33. data/client-app/app/templates/components/message-info.hbs +7 -2
  34. data/client-app/app/templates/index.hbs +4 -1
  35. data/client-app/config/environment.js +1 -1
  36. data/client-app/config/optional-features.json +4 -1
  37. data/client-app/ember-cli-build.js +2 -3
  38. data/client-app/package-lock.json +9712 -2884
  39. data/client-app/package.json +25 -22
  40. data/client-app/preload-json-manager.rb +62 -0
  41. data/client-app/testem.js +0 -1
  42. data/client-app/tests/index.html +1 -1
  43. data/client-app/tests/integration/components/back-trace-test.js +109 -0
  44. data/client-app/tests/integration/components/message-info-test.js +4 -3
  45. data/client-app/tests/integration/components/patterns-list-test.js +7 -2
  46. data/lib/logster.rb +1 -0
  47. data/lib/logster/base_store.rb +16 -9
  48. data/lib/logster/configuration.rb +12 -2
  49. data/lib/logster/defer_logger.rb +1 -1
  50. data/lib/logster/logger.rb +12 -0
  51. data/lib/logster/message.rb +89 -30
  52. data/lib/logster/middleware/viewer.rb +44 -8
  53. data/lib/logster/redis_store.rb +69 -51
  54. data/lib/logster/suppression_pattern.rb +1 -1
  55. data/lib/logster/version.rb +1 -1
  56. data/logster.gemspec +1 -1
  57. data/test/logster/middleware/test_viewer.rb +100 -0
  58. data/test/logster/test_base_store.rb +16 -0
  59. data/test/logster/test_defer_logger.rb +1 -1
  60. data/test/logster/test_message.rb +142 -54
  61. data/test/logster/test_redis_store.rb +99 -39
  62. metadata +11 -6
@@ -146,4 +146,20 @@ class TestBaseStore < Minitest::Test
146
146
  message = @store.report(Logger::WARN, "test", "H", backtrace: ["Foo", "Bar"])
147
147
  assert_equal("Foo\nBar", message.backtrace)
148
148
  end
149
+
150
+ def test_chained_loggers_dont_have_superfluous_frames_in_backtrace
151
+ logger = Logster::Logger.new(@store)
152
+ other_store = Logster::TestStore.new
153
+ other_logger = Logster::Logger.new(other_store)
154
+ logger.chain(other_logger)
155
+ logger.warn("this is warning")
156
+ [@store, other_store].each do |store|
157
+ message = store.reported.first
158
+ assert_equal("this is warning", message.message)
159
+ # the first line in the backtrace should be the method that
160
+ # called the warn/info/error etc. method.
161
+ # in this case the first line should be this test method
162
+ assert_includes(message.backtrace.lines.first, __method__.to_s)
163
+ end
164
+ end
149
165
  end
@@ -29,7 +29,7 @@ class TestDeferLogger < Minitest::Test
29
29
  # Otherwise we'd only get a partial backtrace from
30
30
  # the point the new thread was spawned
31
31
  backtrace = @store.calls.first[3][:backtrace]
32
- assert_instance_of(Array, backtrace)
32
+ assert_includes(backtrace.lines.first, __method__.to_s)
33
33
 
34
34
  assert_equal(0, queue.size)
35
35
  end
@@ -11,32 +11,38 @@ class TestMessage < MiniTest::Test
11
11
 
12
12
  msg2 = Logster::Message.new(0, '', 'test', 20)
13
13
  msg2.populate_from_env(a: "2", c: "3")
14
-
15
14
  assert_equal(msg2.grouping_key, msg1.grouping_key)
16
-
17
15
  msg1.merge_similar_message(msg2)
18
16
 
19
17
  assert_equal(20, msg1.timestamp)
20
18
  assert_equal(10, msg1.first_timestamp)
21
-
22
- assert Array === msg1.env
23
- assert_equal(msg1.env.size, 2)
24
- assert({ a: "1", b: "2" } <= msg1.env[0])
25
- assert({ "a" => "2", "c" => "3" } <= msg1.env[1])
19
+ assert_equal(msg1.env_buffer, [msg2.env])
26
20
  end
27
21
 
28
- def test_merge_adds_timestamp_to_env
29
- time1 = Time.new(2010, 1, 1, 1, 1).to_i
30
- msg1 = Logster::Message.new(0, '', 'test', time1)
31
- msg1.env = { a: "aa", b: "bb" }
32
-
33
- time2 = Time.new(2011, 1, 1, 1, 1).to_i
34
- msg2 = Logster::Message.new(0, '', 'test', time2)
35
- msg2.env = { e: "ee", f: "ff" }
36
-
37
- msg1.merge_similar_message(msg2)
38
- assert_equal(time1, msg1.env[0]["time"])
39
- assert_equal(time2, msg1.env[1]["time"])
22
+ def test_populate_from_env_will_add_time_to_env_unless_it_already_exists
23
+ t = (Time.new.to_f * 1000).to_i
24
+ msg = Logster::Message.new(0, '', 'test', t)
25
+ msg.populate_from_env({})
26
+ assert_equal(t, msg.env["time"])
27
+
28
+ msg = Logster::Message.new(0, '', 'test', t)
29
+ msg.populate_from_env(time: 5)
30
+ assert_nil(msg.env["time"])
31
+ assert_equal(5, msg.env[:time])
32
+
33
+ msg = Logster::Message.new(0, '', 'test', t)
34
+ msg.populate_from_env("time" => 6)
35
+ assert_equal(6, msg.env["time"])
36
+ assert_nil(msg.env[:time])
37
+
38
+ msg = Logster::Message.new(0, '', 'test', t)
39
+ msg.populate_from_env([{ "time" => 6 }, { "time" => 8 }])
40
+ assert_equal([6, 8], msg.env.map { |e| e["time"] })
41
+ assert_equal([nil, nil], msg.env.map { |e| e[:time] })
42
+
43
+ msg = Logster::Message.new(0, '', 'test', t)
44
+ msg.populate_from_env([{ dsd: 6 }, { dsd: 8 }])
45
+ assert_equal([t, t], msg.env.map { |e| e["time"] })
40
46
  end
41
47
 
42
48
  def test_merge_messages_both_with_array_envs
@@ -47,26 +53,18 @@ class TestMessage < MiniTest::Test
47
53
  msg2.env = [{ e: "ee", f: "ff" }, { g: "gg", h: "hh" }]
48
54
 
49
55
  msg1.merge_similar_message(msg2)
50
-
51
- # new env should be an array, but it should never have
52
- # another array of envs within itself (hence flatten(1))
53
- assert_equal(msg1.env.size, 4)
54
- assert_equal(msg1.env.map(&:keys).flatten(1).map(&:to_s), %w{a b c d e f g h})
55
- assert_equal(msg1.env.map(&:values).flatten(1).map(&:to_s), %w{aa bb cc dd ee ff gg hh})
56
+ assert_equal(msg2.env, msg1.env_buffer)
56
57
  end
57
58
 
58
59
  def test_merge_messages_one_with_array_envs
59
60
  msg1 = Logster::Message.new(0, '', 'test', 10)
60
- msg1.env = [{ a: "aa", b: "bb" }, { c: "cc", d: "dd" }]
61
+ msg1.env = { e: "ee", f: "ff" }
61
62
 
62
63
  msg2 = Logster::Message.new(0, '', 'test', 20)
63
- msg2.env = { e: "ee", f: "ff" }
64
+ msg2.env = [{ a: "aa", b: "bb" }, { c: "cc", d: "dd" }]
64
65
 
65
66
  msg1.merge_similar_message(msg2)
66
-
67
- assert_equal(msg1.env.size, 3)
68
- assert_equal(msg1.env.map(&:keys).flatten(1).map(&:to_s), %w{a b c d e f time})
69
- assert_equal(msg1.env.map(&:values).flatten(1).map(&:to_s), %w{aa bb cc dd ee ff 20})
67
+ assert_equal(msg2.env, msg1.env_buffer)
70
68
  end
71
69
 
72
70
  def test_adds_application_version
@@ -87,8 +85,7 @@ class TestMessage < MiniTest::Test
87
85
 
88
86
  assert_equal(msg1.grouping_key, msg2.grouping_key)
89
87
 
90
- save_env = msg1.merge_similar_message(msg2)
91
- assert(save_env)
88
+ msg1.merge_similar_message(msg2)
92
89
  assert_equal(msg1.count, 15 + 13)
93
90
  end
94
91
 
@@ -120,19 +117,17 @@ class TestMessage < MiniTest::Test
120
117
  assert hash <= msg.env[0]
121
118
  end
122
119
 
123
- def test_ensure_env_samples_dont_exceed_50
120
+ def test_merging_envs_add_new_envs_to_buffer
124
121
  msg1 = Logster::Message.new(0, '', 'test', 10, count: 50)
125
- msg1.env = [{ a: 1 }]
122
+ msg1.env = 50.times.map { |n| { a: n } }
126
123
  msg2 = Logster::Message.new(0, '', 'test', 20, count: 13)
127
- msg2.env = { b: 2 }
124
+ msg2.env = 13.times.map { |n| { b: n } }
128
125
 
129
126
  assert_equal(msg1.grouping_key, msg2.grouping_key)
130
127
 
131
- save_env = msg1.merge_similar_message(msg2)
132
-
133
- refute(save_env)
128
+ msg1.merge_similar_message(msg2)
134
129
  assert_equal(63, msg1.count) # update count
135
- assert_equal([{ a: 1 }], msg1.env) # but don't merge msg2's env into msg1's
130
+ assert_equal(msg2.env, msg1.env_buffer)
136
131
  end
137
132
 
138
133
  def test_message_to_h_respects_params
@@ -157,21 +152,114 @@ class TestMessage < MiniTest::Test
157
152
  assert_equal(600 + 3, msg.message.size)
158
153
  end
159
154
 
160
- def test_env_is_not_merged_into_similar_message_if_size_will_be_too_large
161
- default = Logster.config.maximum_message_size_bytes
162
- Logster.config.maximum_message_size_bytes = 1000
163
- message = Logster::Message.new(Logger::INFO, "test", "message", count: 13)
164
- env = [{ key1: "this is my first key", key2: "this is my second key" }] * 13
165
- message.env = env
155
+ def test_drop_redundant_envs
156
+ message = Logster::Message.new(Logger::WARN, '', 'message')
157
+ message.env = [{ a: 4 }] * 10
158
+ assert_equal(10, message.env.size)
159
+ message.drop_redundant_envs(5)
160
+ assert_equal(5, message.env.size)
161
+
162
+ env = { f: 5, g: 4 }
163
+ message.env = env.dup
164
+ message.drop_redundant_envs(1)
165
+ assert_equal(env, message.env)
166
+ end
166
167
 
167
- message2 = Logster::Message.new(Logger::INFO, "test", "message")
168
- message2.env = env.first
169
- message.merge_similar_message(message2)
168
+ def test_apply_env_size_limit_keeps_as_many_keys_as_possible
169
+ message = Logster::Message.new(Logger::WARN, '', 'message', 1)
170
+ env = { a: 1, bb: 22, ccc: 333 }
171
+ message.env = env.dup
172
+ message.apply_env_size_limit(24)
173
+ assert_operator(message.env.to_json.bytesize, :<=, 24)
174
+ assert_equal({ a: 1, bb: 22 }.to_json.bytesize, message.env.to_json.bytesize)
175
+
176
+ message.env = [env.dup] * 5
177
+ message.apply_env_size_limit(24)
178
+ assert_equal(5, message.env.size)
179
+ message.env.each do |e|
180
+ assert_operator(e.to_json.bytesize, :<=, 24)
181
+ assert_equal({ a: 1, bb: 22 }.to_json.bytesize, e.to_json.bytesize)
182
+ end
183
+
184
+ message.env = env.dup
185
+ message.apply_env_size_limit(25)
186
+ assert_operator(message.env.to_json.bytesize, :<=, 25)
187
+ assert_equal({ a: 1, bb: 22, ccc: 333 }.to_json.bytesize, message.env.to_json.bytesize)
188
+ end
170
189
 
171
- # env isn't merged, but count is incremented
172
- assert_equal(13, message.env.size)
173
- assert_equal(14, message.count)
174
- ensure
175
- Logster.config.maximum_message_size_bytes = default
190
+ def test_apply_message_size_limit_removes_gems_dir_from_backtrace_to_keep_total_message_size_below_limit
191
+ backtrace = <<~TEXT
192
+ /var/www/discourse/vendor/bundle/ruby/2.6.0/gems/rails_multisite-2.0.7/lib/rails_multisite/connection_management.rb:220:in `with_connection'
193
+ /var/www/discourse/vendor/bundle/ruby/2.6.0/gems/rails_multisite-2.0.7/lib/rails_multisite/connection_management.rb:60:in `with_connection'
194
+ /var/www/discourse/lib/scheduler/defer.rb:89:in `do_work'
195
+ /var/www/discourse/lib/scheduler/defer.rb:79:in `block (2 levels) in start_thread'
196
+ TEXT
197
+ without_gems_dir = <<~TEXT
198
+ rails_multisite-2.0.7/lib/rails_multisite/connection_management.rb:220:in `with_connection'
199
+ rails_multisite-2.0.7/lib/rails_multisite/connection_management.rb:60:in `with_connection'
200
+ /var/www/discourse/lib/scheduler/defer.rb:89:in `do_work'
201
+ /var/www/discourse/lib/scheduler/defer.rb:79:in `block (2 levels) in start_thread'
202
+ TEXT
203
+ gems_dir = "/var/www/discourse/vendor/bundle/ruby/2.6.0/gems/"
204
+ message = Logster::Message.new(Logger::WARN, '', 'message', 1)
205
+
206
+ message.backtrace = backtrace.dup
207
+ assert_operator(message.to_json(exclude_env: true).bytesize, :>=, 500)
208
+ message.apply_message_size_limit(500, gems_dir: gems_dir)
209
+ assert_operator(message.to_json(exclude_env: true).bytesize, :<=, 500)
210
+ assert_equal(without_gems_dir.strip, message.backtrace.strip)
211
+ end
212
+
213
+ def test_apply_message_size_limit_removes_lines_from_backtrace_to_keep_total_size_below_limit
214
+ backtrace = <<~TEXT
215
+ rails_multisite-2.0.7/lib/rails_multisite/connection_management.rb:220:in `with_connection'
216
+ rails_multisite-2.0.7/lib/rails_multisite/connection_management.rb:60:in `with_connection'
217
+ /var/www/discourse/lib/scheduler/defer.rb:89:in `do_work'
218
+ /var/www/discourse/lib/scheduler/defer.rb:79:in `block (2 levels) in start_thread'
219
+ TEXT
220
+
221
+ expected = <<~TEXT
222
+ rails_multisite-2.0.7/lib/rails_multisite/connection_management.rb:220:in `with_connection'
223
+ rails_multisite-2.0.7/lib/rails_multisite/connection_management.rb:60:in `with_connection'
224
+ /var/www/discourse
225
+ TEXT
226
+ message = Logster::Message.new(Logger::WARN, '', 'message', 1)
227
+ message.backtrace = backtrace.dup
228
+ assert_operator(message.to_json(exclude_env: true).bytesize, :>=, 350)
229
+ message.apply_message_size_limit(350)
230
+ assert_operator(message.to_json(exclude_env: true).bytesize, :<=, 350)
231
+ assert_equal(expected.strip, message.backtrace.strip)
232
+ end
233
+
234
+ def test_truncate_backtrace_shouldnt_corrupt_backtrace_when_it_contains_multibytes_characters
235
+ backtrace = "aहa"
236
+ message = Logster::Message.new(Logger::WARN, '', 'message', 1)
237
+ message.backtrace = backtrace.dup
238
+ message.truncate_backtrace(3)
239
+ assert_equal("a", message.backtrace)
240
+
241
+ message.backtrace = backtrace.dup
242
+ message.truncate_backtrace(4)
243
+ assert_equal("aह", message.backtrace)
244
+
245
+ message.backtrace = backtrace.dup
246
+ message.truncate_backtrace(5)
247
+ assert_equal(backtrace, message.backtrace)
248
+ end
249
+
250
+ def test_apply_message_size_limit_doesnt_remove_backtrace_entirely
251
+ message = Logster::Message.new(Logger::WARN, '', 'message', 1)
252
+ message.backtrace = "a" * 1000
253
+ assert_operator(message.to_json(exclude_env: true).bytesize, :>=, 500)
254
+ message.apply_message_size_limit(500)
255
+ assert_operator(message.to_json(exclude_env: true).bytesize, :<=, 500)
256
+ assert_equal(("a" * 354).size, message.backtrace.size)
257
+ end
258
+
259
+ def test_apply_message_size_limit_doesnt_hang_forever_and_doesnt_remove_backtrace_entirely
260
+ message = Logster::Message.new(Logger::WARN, '', 'message', 1)
261
+ message.backtrace = "aa" * 100
262
+ message.apply_message_size_limit(10)
263
+ assert_equal(("aa" * 100).size, message.backtrace.size)
176
264
  end
177
265
  end
@@ -223,10 +223,11 @@ class TestRedisStore < Minitest::Test
223
223
  msg = messages[n]
224
224
  assert_equal("test_#{n}", msg.message)
225
225
  if n == 0
226
- assert_equal(Logster::Message.default_env, msg.env)
226
+ assert_equal(Logster::Message.default_env.merge("time" => msg.timestamp), msg.env)
227
227
  else
228
228
  assert({ "test_#{n}" => "envsss" } <= msg.env)
229
229
  end
230
+ assert_equal(msg.timestamp, msg.env["time"])
230
231
  end
231
232
  end
232
233
 
@@ -241,43 +242,50 @@ class TestRedisStore < Minitest::Test
241
242
  old_env = { "old_env" => "old value" }
242
243
  message = @store.report(Logger::WARN, "test", "A", env: old_env)
243
244
 
244
- unsaved_env = { "unsaved_env" => "lost value" }
245
- message.env = unsaved_env
245
+ extra_env = { "saved_env" => "saved value!" }
246
+ similar = @store.report(Logger::WARN, 'test', 'A', env: extra_env)
247
+ message.merge_similar_message(similar)
246
248
 
247
- @store.replace_and_bump(message, save_env: false)
249
+ @store.replace_and_bump(message)
248
250
 
249
251
  message = @store.get(message.key)
250
- assert(old_env <= message.env)
251
- refute(unsaved_env <= message.env)
252
-
253
- saved_env = { "saved_env" => "saved value!" }
254
- message.env = saved_env
255
-
256
- @store.replace_and_bump(message)
252
+ assert(extra_env <= message.env[0])
253
+ assert(old_env <= message.env[1])
254
+ end
257
255
 
256
+ def test_ensure_env_doesnt_exceed_50_item
257
+ Logster.config.allow_grouping = true
258
+ message = nil
259
+ 52.times do |n|
260
+ message = @store.report(Logger::WARN, "", "mssage", env: { a: n })
261
+ end
258
262
  message = @store.get(message.key)
259
- assert(saved_env == message.env)
263
+ assert_equal(52, message.count)
264
+ assert_equal(50, message.env.size)
265
+ assert_equal((2..51).to_a, message.env.map { |e| e[:a] || e["a"] }.sort)
266
+ ensure
267
+ Logster.config.allow_grouping = false
260
268
  end
261
269
 
262
270
  def test_merging_performance
263
271
  Logster.config.allow_grouping = true
264
272
  backtrace = "fake backtrace"
265
- env = { "some_env" => "some env" }
266
- another_env = { "another_env" => "more env" }
267
- yet_another_env = { "moaar_env" => "more env" }
268
-
269
- @store.report(Logger::WARN, "", "title", backtrace: backtrace, env: env, count: 49)
273
+ env = [{ "some_env" => "some env" }] * 50
274
+ new_env = { "some_key" => "1234442" }
270
275
 
271
- message = @store.report(Logger::WARN, "", "title", backtrace: backtrace, env: another_env)
272
- assert_instance_of(Array, message.env)
273
- assert_equal(2, message.env.size)
274
- assert(env <= message.env[0])
275
- assert(another_env <= message.env[1])
276
+ @store.report(Logger::WARN, "", "title", backtrace: backtrace, env: env, count: 50)
276
277
 
277
- message = @store.report(Logger::WARN, "", "title", backtrace: backtrace, env: yet_another_env)
278
- # we don't need to load env from redis cause we don't
279
- # need to merge new env samples if count is 50 or more
278
+ message = @store.report(Logger::WARN, "", "title", backtrace: backtrace, env: new_env)
279
+ # env is nil cause we don't need to fetch it from redis
280
+ # we just send the newly added envs to redis and it'll
281
+ # take care of prepending them to the existing envs
280
282
  assert_nil(message.env)
283
+
284
+ message = @store.get(message.key)
285
+ assert_instance_of(Array, message.env)
286
+ assert_equal(50, message.env.size)
287
+ assert_equal(51, message.count)
288
+ assert(new_env <= message.env[0])
281
289
  ensure
282
290
  Logster.config.allow_grouping = false
283
291
  end
@@ -548,7 +556,7 @@ class TestRedisStore < Minitest::Test
548
556
  # message2 shouldn't vanish even if
549
557
  # its env matches an ignore pattern
550
558
  # however it should be merged with message1
551
- assert_equal("business17", message.env[1]["cluster"])
559
+ assert_equal("business17", message.env[0]["cluster"])
552
560
  ensure
553
561
  # reset so it doesn't affect other tests
554
562
  @store.ignore = nil
@@ -833,19 +841,46 @@ class TestRedisStore < Minitest::Test
833
841
  end
834
842
  end
835
843
 
836
- def test_store_trims_too_big_envs
837
- default = Logster.config.maximum_message_size_bytes
838
- Logster.config.maximum_message_size_bytes = 1000
839
- message = Logster::Message.new(Logger::INFO, "test", "message")
840
- env = [{ key1: "this is my first key", key2: "this is my second key" }] * 40
841
- message.env = env
842
- @store.save(message)
843
- trimmed_message = @store.latest.first
844
- assert_equal(13, trimmed_message.env.size)
845
- size = message.to_json(exclude_env: true).bytesize + message.env_json.bytesize
846
- assert_operator(1000, :>, size)
847
- ensure
848
- Logster.config.maximum_message_size_bytes = default
844
+ def test_ensure_messages_meet_config_size_limits_when_messages_are_saved
845
+ config_reset(
846
+ maximum_message_size_bytes: 300,
847
+ max_env_bytes: 30,
848
+ max_env_count_per_message: 5
849
+ ) do
850
+ env = [{ aaa: 111, bbb: 222, ccc: 333, ddd: 444 }] * 7
851
+ message = @store.report(Logger::WARN, '', 'test', backtrace: "aa\n" * 100, env: env.dup, timestamp: 777)
852
+ message = @store.get(message.key)
853
+ assert_operator(message.to_json(exclude_env: true).bytesize, :<, 300)
854
+ assert_equal(5, message.env.size)
855
+ message.env.each do |e|
856
+ assert_operator(e.to_json.bytesize, :<=, 30)
857
+ assert_equal({ "aaa" => 111, "time" => 777 }, e)
858
+ end
859
+ end
860
+ end
861
+
862
+ def test_ensure_messages_meet_config_size_limits_when_merged_together
863
+
864
+ config_reset(
865
+ max_env_bytes: 30,
866
+ max_env_count_per_message: 5,
867
+ allow_grouping: true
868
+ ) do
869
+ env = [{ a: 1, aa: 22, aaa: 333, aaaa: 4444 }] * 3
870
+ env_2 = [{ b: 1, bb: 22, bbb: 333, bbbb: 4444 }] * 3
871
+ @store.report(Logger::WARN, '', 'test', backtrace: "aa\n" * 100, env: env.dup, timestamp: 777)
872
+ message = @store.report(Logger::WARN, '', 'test', backtrace: "aa\n" * 100, env: env_2.dup, timestamp: 777)
873
+ message = @store.get(message.key)
874
+ assert_equal(5, message.env.size)
875
+ message.env.first(3).each do |e|
876
+ assert_operator(e.to_json.bytesize, :<=, 30)
877
+ assert_equal({ "b" => 1, "bb" => 22, "time" => 777 }, e)
878
+ end
879
+ message.env.last(2).each do |e|
880
+ assert_operator(e.to_json.bytesize, :<=, 30)
881
+ assert_equal({ "a" => 1, "aa" => 22, "time" => 777 }, e)
882
+ end
883
+ end
849
884
  end
850
885
 
851
886
  def test_custom_grouping_patterns
@@ -997,8 +1032,33 @@ class TestRedisStore < Minitest::Test
997
1032
  Logster::Group.remove_instance_variable(:@max_size)
998
1033
  end
999
1034
 
1035
+ def test_truncated_messages_when_they_are_similar_can_still_be_merged
1036
+ config_reset(allow_grouping: true) do
1037
+ backtrace = "a" * Logster.config.maximum_message_size_bytes
1038
+ title = "sasasas"
1039
+ msg = @store.report(Logger::WARN, '', title, backtrace: backtrace.dup)
1040
+ msg2 = @store.report(Logger::WARN, '', title, backtrace: backtrace.dup)
1041
+ assert_equal(msg.key, msg2.key)
1042
+ assert_operator(msg.to_json(exclude_env: true).bytesize, :<=, Logster.config.maximum_message_size_bytes)
1043
+ assert_operator(msg.backtrace.size, :<, backtrace.size)
1044
+ end
1045
+ end
1046
+
1000
1047
  private
1001
1048
 
1049
+ def config_reset(configs)
1050
+ defaults = {}
1051
+ configs.each do |k, v|
1052
+ defaults[k] = Logster.config.public_send(k)
1053
+ Logster.config.public_send("#{k}=", v)
1054
+ end
1055
+ yield
1056
+ ensure
1057
+ defaults.each do |k, v|
1058
+ Logster.config.public_send("#{k}=", v)
1059
+ end
1060
+ end
1061
+
1002
1062
  def reset_redis
1003
1063
  @store.clear_all
1004
1064
  end