logster 2.11.0 → 2.11.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -43,6 +43,7 @@
43
43
  "ember-maybe-import-regenerator": "^0.1.6",
44
44
  "ember-qunit": "^4.6.0",
45
45
  "ember-resolver": "^7.0.0",
46
+ "ember-sinon-qunit": "^6.0.0",
46
47
  "ember-source": "^3.15.0",
47
48
  "eslint-plugin-ember": "^7.13.0",
48
49
  "eslint-plugin-node": "^10.0.0",
@@ -57,6 +58,6 @@
57
58
  "edition": "octane"
58
59
  },
59
60
  "dependencies": {
60
- "moment": "~2.22.2"
61
+ "moment": "~2.29.4"
61
62
  }
62
63
  }
@@ -1,12 +1,30 @@
1
1
  import { module, test } from 'qunit';
2
2
  import { setupTest } from 'ember-qunit';
3
+ import { default as MessageCollection } from "client-app/models/message-collection";
4
+ import sinon from "sinon";
5
+ import * as utilities from "client-app/lib/utilities";
3
6
 
4
- module('Unit | Controller | index', function(hooks) {
7
+ module('Unit | Controller | index', function (hooks) {
5
8
  setupTest(hooks);
6
9
 
7
- // Replace this with your real tests.
8
- test('it exists', function(assert) {
9
- let controller = this.owner.lookup('controller:index');
10
- assert.ok(controller);
10
+ test('uses search param to filter results', function (assert) {
11
+ const controller = this.owner.lookup('controller:index');
12
+ const ajaxStub = sinon.stub(utilities, 'ajax')
13
+ const messages = MessageCollection.create();
14
+ const row1 = {"message": "error tomtom", "severity": 2, "key": "ce1f53b0cc"};
15
+ const row2 = {"message": "error steaky", "severity": 3, "key": "b083352825"};
16
+
17
+ messages.rows.addObjects([row1, row2]);
18
+ controller.set("model", messages)
19
+
20
+ assert.equal(controller.searchTerm, null, 'initial value is null');
21
+ assert.deepEqual(controller.model.rows, [row1, row2], 'all rows');
22
+
23
+ ajaxStub.callsFake(() => Promise.resolve({search: "tomtom", filter: [5], messages: []}));
24
+ controller.set("search", "tomtom");
25
+
26
+ assert.equal(controller.searchTerm, "tomtom", 'search sets search term');
27
+ assert.equal(ajaxStub.firstCall.args[0], "/messages.json", "get messages");
28
+ assert.deepEqual(ajaxStub.firstCall.args[1], {"data": {"filter": "5", "search": "tomtom"}}, "with correct terms");
11
29
  });
12
30
  });
@@ -4,7 +4,7 @@ require 'logger'
4
4
 
5
5
  module Logster
6
6
  class Logger < ::Logger
7
- LOGSTER_ENV = "logster_env".freeze
7
+ LOGSTER_ENV = "logster_env"
8
8
 
9
9
  attr_accessor :store, :skip_store
10
10
  attr_reader :chained
@@ -12,21 +12,17 @@ module Logster
12
12
  def initialize(store)
13
13
  super(nil)
14
14
  @store = store
15
- @override_levels = nil
16
15
  @chained = []
17
16
  @skip_store = false
17
+ @logster_override_level_key = "logster_override_level_#{object_id}"
18
18
  end
19
19
 
20
20
  def override_level=(val)
21
- tid = Thread.current.object_id
22
-
23
- ol = @override_levels
24
- if val.nil? && ol && ol.key?(tid)
25
- ol.delete(tid)
26
- @override_levels = nil if ol.length == 0
27
- elsif val
28
- (@override_levels ||= {})[tid] = val
29
- end
21
+ Thread.current[@logster_override_level_key] = val
22
+ end
23
+
24
+ def override_level
25
+ Thread.current[@logster_override_level_key]
30
26
  end
31
27
 
32
28
  def chain(logger)
@@ -55,8 +51,7 @@ module Logster
55
51
  end
56
52
 
57
53
  def level
58
- ol = @override_levels
59
- (ol && ol[Thread.current.object_id]) || @level
54
+ Thread.current[@logster_override_level_key] || @level
60
55
  end
61
56
 
62
57
  def add_with_opts(severity, message = nil, progname = progname(), opts = nil, &block)
@@ -81,8 +81,11 @@ module Logster
81
81
 
82
82
  def self.hostname
83
83
  @hostname ||= begin
84
- command = (Logster.config.use_full_hostname ? `hostname -f` : `hostname`) rescue "<unknown>"
84
+ command = Logster.config.use_full_hostname ? `hostname -f` : `hostname`
85
85
  command.strip!
86
+ command
87
+ rescue
88
+ "<unknown>"
86
89
  end
87
90
  end
88
91
 
@@ -27,10 +27,10 @@ module Logster
27
27
  end
28
28
  end
29
29
 
30
- @redis.multi do
31
- @redis.hset(grouping_key, message.grouping_key, message.key)
32
- @redis.rpush(list_key, message.key)
33
- update_message(message, save_env: true)
30
+ @redis.multi do |pipeline|
31
+ pipeline.hset(grouping_key, message.grouping_key, message.key)
32
+ pipeline.rpush(list_key, message.key)
33
+ update_message(message, save_env: true, redis: pipeline)
34
34
  end
35
35
 
36
36
  trim
@@ -41,30 +41,30 @@ module Logster
41
41
 
42
42
  def delete(msg)
43
43
  groups = find_pattern_groups() { |pat| msg.message =~ pat }
44
- @redis.multi do
44
+ @redis.multi do |pipeline|
45
45
  groups.each do |group|
46
46
  group.remove_message(msg)
47
- save_pattern_group(group) if group.changed?
47
+ save_pattern_group(group, redis: pipeline) if group.changed?
48
48
  end
49
- @redis.hdel(hash_key, msg.key)
50
- delete_env(msg.key)
51
- @redis.hdel(grouping_key, msg.grouping_key)
52
- @redis.lrem(list_key, -1, msg.key)
49
+ pipeline.hdel(hash_key, msg.key)
50
+ delete_env(msg.key, redis: pipeline)
51
+ pipeline.hdel(grouping_key, msg.grouping_key)
52
+ pipeline.lrem(list_key, -1, msg.key)
53
53
  end
54
54
  end
55
55
 
56
56
  def bulk_delete(message_keys, grouping_keys)
57
57
  groups = find_pattern_groups(load_messages: true)
58
- @redis.multi do
58
+ @redis.multi do |pipeline|
59
59
  groups.each do |group|
60
60
  group.messages = group.messages.reject { |m| message_keys.include?(m.key) }
61
- save_pattern_group(group) if group.changed?
61
+ save_pattern_group(group, redis: pipeline) if group.changed?
62
62
  end
63
- @redis.hdel(hash_key, message_keys)
64
- @redis.hdel(grouping_key, grouping_keys)
63
+ pipeline.hdel(hash_key, message_keys)
64
+ pipeline.hdel(grouping_key, grouping_keys)
65
65
  message_keys.each do |k|
66
- @redis.lrem(list_key, -1, k)
67
- delete_env(k)
66
+ pipeline.lrem(list_key, -1, k)
67
+ delete_env(k, redis: pipeline)
68
68
  end
69
69
  end
70
70
  end
@@ -74,11 +74,11 @@ module Logster
74
74
  exists = @redis.hexists(hash_key, message.key)
75
75
  return false unless exists
76
76
 
77
- @redis.multi do
78
- @redis.hset(hash_key, message.key, message.to_json(exclude_env: true))
79
- push_env(message.key, message.env_buffer) if message.has_env_buffer?
80
- @redis.lrem(list_key, -1, message.key)
81
- @redis.rpush(list_key, message.key)
77
+ @redis.multi do |pipeline|
78
+ pipeline.hset(hash_key, message.key, message.to_json(exclude_env: true))
79
+ push_env(message.key, message.env_buffer, redis: pipeline) if message.has_env_buffer?
80
+ pipeline.lrem(list_key, -1, message.key)
81
+ pipeline.rpush(list_key, message.key)
82
82
  end
83
83
  message.env_buffer = [] if message.has_env_buffer?
84
84
  check_rate_limits(message.severity)
@@ -188,11 +188,12 @@ module Logster
188
188
  .sort
189
189
  .map(&:key)
190
190
 
191
- @redis.pipelined do
191
+ @redis.pipelined do |pipeline|
192
192
  sorted.each do |message_key|
193
- @redis.rpush(list_key, message_key)
193
+ pipeline.rpush(list_key, message_key)
194
194
  end
195
195
  end
196
+
196
197
  find_pattern_groups(load_messages: true).each do |group|
197
198
  group.messages = group.messages.select { |m| sorted.include?(m.key) }
198
199
  save_pattern_group(group) if group.changed?
@@ -317,11 +318,11 @@ module Logster
317
318
  end
318
319
 
319
320
  def insert_pattern(set_name, pattern)
320
- @redis.sadd(set_name, pattern)
321
+ @redis.sadd(set_name, [pattern])
321
322
  end
322
323
 
323
324
  def remove_pattern(set_name, pattern)
324
- @redis.srem(set_name, pattern)
325
+ @redis.srem(set_name, [pattern])
325
326
  end
326
327
 
327
328
  def get_patterns(set_name)
@@ -383,11 +384,11 @@ module Logster
383
384
  jsons
384
385
  end
385
386
 
386
- def save_pattern_group(group)
387
+ def save_pattern_group(group, redis: @redis)
387
388
  if group.messages_keys.size == 0
388
- @redis.hdel(pattern_groups_key, group.key)
389
+ redis.hdel(pattern_groups_key, group.key)
389
390
  else
390
- @redis.hset(pattern_groups_key, group.key, group.to_json)
391
+ redis.hset(pattern_groups_key, group.key, group.to_json)
391
392
  end
392
393
  end
393
394
 
@@ -429,13 +430,13 @@ module Logster
429
430
  end
430
431
  end
431
432
 
432
- def update_message(message, save_env: false)
433
- @redis.hset(hash_key, message.key, message.to_json(exclude_env: true))
434
- push_env(message.key, message.env) if save_env
433
+ def update_message(message, save_env: false, redis: @redis)
434
+ redis.hset(hash_key, message.key, message.to_json(exclude_env: true))
435
+ push_env(message.key, message.env, redis: redis) if save_env
435
436
  if message.protected
436
- @redis.sadd(protected_key, message.key)
437
+ redis.sadd(protected_key, [message.key])
437
438
  else
438
- @redis.srem(protected_key, message.key)
439
+ redis.srem(protected_key, [message.key])
439
440
  end
440
441
  end
441
442
 
@@ -640,15 +641,15 @@ module Logster
640
641
  rate_limiter
641
642
  end
642
643
 
643
- def push_env(message_key, env)
644
+ def push_env(message_key, env, redis: @redis)
644
645
  prefixed = env_prefix(message_key)
645
646
  env = [env] unless Array === env
646
- @redis.lpush(prefixed, env.map(&:to_json).reverse)
647
- @redis.ltrim(prefixed, 0, Logster.config.max_env_count_per_message - 1)
647
+ redis.lpush(prefixed, env.map(&:to_json).reverse)
648
+ redis.ltrim(prefixed, 0, Logster.config.max_env_count_per_message - 1)
648
649
  end
649
650
 
650
- def delete_env(message_key)
651
- @redis.del(env_prefix(message_key))
651
+ def delete_env(message_key, redis: @redis)
652
+ redis.del(env_prefix(message_key))
652
653
  end
653
654
 
654
655
  def env_unprefix(key, with_namespace: false)
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Logster
4
- VERSION = "2.11.0"
4
+ VERSION = "2.11.3"
5
5
  end
@@ -26,6 +26,12 @@ class TestLogger < Minitest::Test
26
26
  end
27
27
 
28
28
  def test_per_thread_override
29
+ logger2 = Logster::Logger.new(@store)
30
+ logger2.override_level = 2
31
+
32
+ # we should not leak between objects
33
+ assert_nil @logger.override_level
34
+
29
35
  @logger.override_level = 2
30
36
 
31
37
  @logger.add(0, "test", "prog", backtrace: "backtrace", env: { a: "x" })
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logster
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.11.0
4
+ version: 2.11.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Sam Saffron
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2022-03-11 00:00:00.000000000 Z
11
+ date: 2022-08-25 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler