vinted-prometheus-client-mmap 1.5.0-x86_64-linux

Sign up to get free protection for your applications and to get access to all the features.
Files changed (57) hide show
  1. checksums.yaml +7 -0
  2. data/README.md +5 -0
  3. data/ext/fast_mmaped_file_rs/Cargo.toml +40 -0
  4. data/ext/fast_mmaped_file_rs/README.md +52 -0
  5. data/ext/fast_mmaped_file_rs/build.rs +7 -0
  6. data/ext/fast_mmaped_file_rs/extconf.rb +28 -0
  7. data/ext/fast_mmaped_file_rs/src/error.rs +174 -0
  8. data/ext/fast_mmaped_file_rs/src/exemplars.rs +25 -0
  9. data/ext/fast_mmaped_file_rs/src/file_entry.rs +1252 -0
  10. data/ext/fast_mmaped_file_rs/src/file_info.rs +240 -0
  11. data/ext/fast_mmaped_file_rs/src/lib.rs +89 -0
  12. data/ext/fast_mmaped_file_rs/src/macros.rs +14 -0
  13. data/ext/fast_mmaped_file_rs/src/map.rs +519 -0
  14. data/ext/fast_mmaped_file_rs/src/metrics.proto +153 -0
  15. data/ext/fast_mmaped_file_rs/src/mmap/inner.rs +775 -0
  16. data/ext/fast_mmaped_file_rs/src/mmap.rs +977 -0
  17. data/ext/fast_mmaped_file_rs/src/raw_entry.rs +547 -0
  18. data/ext/fast_mmaped_file_rs/src/testhelper.rs +222 -0
  19. data/ext/fast_mmaped_file_rs/src/util.rs +140 -0
  20. data/lib/.DS_Store +0 -0
  21. data/lib/2.7/fast_mmaped_file_rs.so +0 -0
  22. data/lib/3.0/fast_mmaped_file_rs.so +0 -0
  23. data/lib/3.1/fast_mmaped_file_rs.so +0 -0
  24. data/lib/3.2/fast_mmaped_file_rs.so +0 -0
  25. data/lib/3.3/fast_mmaped_file_rs.so +0 -0
  26. data/lib/prometheus/.DS_Store +0 -0
  27. data/lib/prometheus/client/configuration.rb +24 -0
  28. data/lib/prometheus/client/counter.rb +27 -0
  29. data/lib/prometheus/client/formats/protobuf.rb +93 -0
  30. data/lib/prometheus/client/formats/text.rb +85 -0
  31. data/lib/prometheus/client/gauge.rb +40 -0
  32. data/lib/prometheus/client/helper/entry_parser.rb +132 -0
  33. data/lib/prometheus/client/helper/file_locker.rb +50 -0
  34. data/lib/prometheus/client/helper/json_parser.rb +23 -0
  35. data/lib/prometheus/client/helper/metrics_processing.rb +45 -0
  36. data/lib/prometheus/client/helper/metrics_representation.rb +51 -0
  37. data/lib/prometheus/client/helper/mmaped_file.rb +64 -0
  38. data/lib/prometheus/client/helper/plain_file.rb +29 -0
  39. data/lib/prometheus/client/histogram.rb +80 -0
  40. data/lib/prometheus/client/label_set_validator.rb +85 -0
  41. data/lib/prometheus/client/metric.rb +80 -0
  42. data/lib/prometheus/client/mmaped_dict.rb +83 -0
  43. data/lib/prometheus/client/mmaped_value.rb +164 -0
  44. data/lib/prometheus/client/page_size.rb +17 -0
  45. data/lib/prometheus/client/push.rb +203 -0
  46. data/lib/prometheus/client/rack/collector.rb +88 -0
  47. data/lib/prometheus/client/rack/exporter.rb +102 -0
  48. data/lib/prometheus/client/registry.rb +65 -0
  49. data/lib/prometheus/client/simple_value.rb +31 -0
  50. data/lib/prometheus/client/summary.rb +69 -0
  51. data/lib/prometheus/client/support/puma.rb +44 -0
  52. data/lib/prometheus/client/support/unicorn.rb +35 -0
  53. data/lib/prometheus/client/uses_value_type.rb +20 -0
  54. data/lib/prometheus/client/version.rb +5 -0
  55. data/lib/prometheus/client.rb +58 -0
  56. data/lib/prometheus.rb +3 -0
  57. metadata +210 -0
@@ -0,0 +1,132 @@
1
+ require 'prometheus/client/helper/json_parser'
2
+
3
+ module Prometheus
4
+ module Client
5
+ module Helper
6
+ module EntryParser
7
+ class ParsingError < RuntimeError;
8
+ end
9
+
10
+ MINIMUM_SIZE = 8
11
+ START_POSITION = 8
12
+ VALUE_BYTES = 8
13
+ ENCODED_LENGTH_BYTES = 4
14
+
15
+ def used
16
+ slice(0..3).unpack('l')[0]
17
+ end
18
+
19
+ def parts
20
+ @parts ||= File.basename(filepath, '.db')
21
+ .split('_')
22
+ .map { |e| e.gsub(/-\d+$/, '') } # remove trailing -number
23
+ end
24
+
25
+ def type
26
+ parts[0].to_sym
27
+ end
28
+
29
+ def pid
30
+ (parts[2..-1] || []).join('_')
31
+ end
32
+
33
+ def multiprocess_mode
34
+ parts[1]
35
+ end
36
+
37
+ def empty?
38
+ size < MINIMUM_SIZE || used.zero?
39
+ end
40
+
41
+ def entries(ignore_errors = false)
42
+ return Enumerator.new {} if empty?
43
+
44
+ Enumerator.new do |yielder|
45
+ used_ = used # cache used to avoid unnecessary unpack operations
46
+
47
+ pos = START_POSITION # used + padding offset
48
+ while pos < used_ && pos < size && pos > 0
49
+ data = slice(pos..-1)
50
+ unless data
51
+ raise ParsingError, "data slice is nil at pos #{pos}" unless ignore_errors
52
+ pos += 8
53
+ next
54
+ end
55
+
56
+ encoded_len, first_encoded_bytes = data.unpack('LL')
57
+ if encoded_len.nil? || encoded_len.zero? || first_encoded_bytes.nil? || first_encoded_bytes.zero?
58
+ # do not parse empty data
59
+ pos += 8
60
+ next
61
+ end
62
+
63
+ entry_len = ENCODED_LENGTH_BYTES + encoded_len
64
+ padding_len = 8 - entry_len % 8
65
+
66
+ value_offset = entry_len + padding_len # align to 8 bytes
67
+ pos += value_offset
68
+
69
+ if value_offset > 0 && (pos + VALUE_BYTES) <= size # if positions are safe
70
+ yielder.yield data, encoded_len, value_offset, pos
71
+ else
72
+ raise ParsingError, "data slice is nil at pos #{pos}" unless ignore_errors
73
+ end
74
+ pos += VALUE_BYTES
75
+ end
76
+ end
77
+ end
78
+
79
+ def parsed_entries(ignore_errors = false)
80
+ result = entries(ignore_errors).map do |data, encoded_len, value_offset, _|
81
+ begin
82
+ encoded, value = data.unpack(format('@4A%d@%dd', encoded_len, value_offset))
83
+ [encoded, value]
84
+ rescue ArgumentError => e
85
+ Prometheus::Client.logger.debug("Error processing data: #{bin_to_hex(data[0, 7])} len: #{encoded_len} value_offset: #{value_offset}")
86
+ raise ParsingError, e unless ignore_errors
87
+ end
88
+ end
89
+
90
+ result.reject!(&:nil?) if ignore_errors
91
+ result
92
+ end
93
+
94
+ def to_metrics(metrics = {}, ignore_errors = false)
95
+ parsed_entries(ignore_errors).each do |key, value|
96
+ begin
97
+ metric_name, name, labelnames, labelvalues = JsonParser.load(key)
98
+ labelnames ||= []
99
+ labelvalues ||= []
100
+
101
+ metric = metrics.fetch(metric_name,
102
+ metric_name: metric_name,
103
+ help: 'Multiprocess metric',
104
+ type: type,
105
+ samples: [])
106
+ if type == :gauge
107
+ metric[:multiprocess_mode] = multiprocess_mode
108
+ metric[:samples] += [[name, labelnames.zip(labelvalues) + [['pid', pid]], value]]
109
+ else
110
+ # The duplicates and labels are fixed in the next for.
111
+ metric[:samples] += [[name, labelnames.zip(labelvalues), value]]
112
+ end
113
+ metrics[metric_name] = metric
114
+
115
+ rescue JSON::ParserError => e
116
+ raise ParsingError(e) unless ignore_errors
117
+ end
118
+ end
119
+
120
+ metrics.reject! { |e| e.nil? } if ignore_errors
121
+ metrics
122
+ end
123
+
124
+ private
125
+
126
+ def bin_to_hex(s)
127
+ s.each_byte.map { |b| b.to_s(16) }.join
128
+ end
129
+ end
130
+ end
131
+ end
132
+ end
@@ -0,0 +1,50 @@
1
+ module Prometheus
2
+ module Client
3
+ module Helper
4
+ class FileLocker
5
+ class << self
6
+ LOCK_FILE_MUTEX = Mutex.new
7
+
8
+ def lock_to_process(filepath)
9
+ LOCK_FILE_MUTEX.synchronize do
10
+ @file_locks ||= {}
11
+ return false if @file_locks[filepath]
12
+
13
+ file = File.open(filepath, 'ab')
14
+ if file.flock(File::LOCK_NB | File::LOCK_EX)
15
+ @file_locks[filepath] = file
16
+ return true
17
+ else
18
+ return false
19
+ end
20
+ end
21
+ end
22
+
23
+ def unlock(filepath)
24
+ LOCK_FILE_MUTEX.synchronize do
25
+ @file_locks ||= {}
26
+ return false unless @file_locks[filepath]
27
+
28
+ file = @file_locks[filepath]
29
+ file.flock(File::LOCK_UN)
30
+ file.close
31
+ @file_locks.delete(filepath)
32
+ end
33
+ end
34
+
35
+ def unlock_all
36
+ LOCK_FILE_MUTEX.synchronize do
37
+ @file_locks ||= {}
38
+ @file_locks.values.each do |file|
39
+ file.flock(File::LOCK_UN)
40
+ file.close
41
+ end
42
+
43
+ @file_locks = {}
44
+ end
45
+ end
46
+ end
47
+ end
48
+ end
49
+ end
50
+ end
@@ -0,0 +1,23 @@
1
+ require 'json'
2
+
3
+ module Prometheus
4
+ module Client
5
+ module Helper
6
+ module JsonParser
7
+ class << self
8
+ if defined?(Oj)
9
+ def load(s)
10
+ Oj.load(s)
11
+ rescue Oj::ParseError, EncodingError => e
12
+ raise JSON::ParserError.new(e.message)
13
+ end
14
+ else
15
+ def load(s)
16
+ JSON.parse(s)
17
+ end
18
+ end
19
+ end
20
+ end
21
+ end
22
+ end
23
+ end
@@ -0,0 +1,45 @@
1
+ module Prometheus
2
+ module Client
3
+ module Helper
4
+ module MetricsProcessing
5
+ def self.merge_metrics(metrics)
6
+ metrics.each_value do |metric|
7
+ metric[:samples] = merge_samples(metric[:samples], metric[:type], metric[:multiprocess_mode]).map do |(name, labels), value|
8
+ [name, labels.to_h, value]
9
+ end
10
+ end
11
+ end
12
+
13
+ def self.merge_samples(raw_samples, metric_type, multiprocess_mode)
14
+ samples = {}
15
+ raw_samples.each do |name, labels, value|
16
+ without_pid = labels.reject { |l| l[0] == 'pid' }
17
+
18
+ case metric_type
19
+ when :gauge
20
+ case multiprocess_mode
21
+ when 'min'
22
+ s = samples.fetch([name, without_pid], value)
23
+ samples[[name, without_pid]] = [s, value].min
24
+ when 'max'
25
+ s = samples.fetch([name, without_pid], value)
26
+ samples[[name, without_pid]] = [s, value].max
27
+ when 'livesum'
28
+ s = samples.fetch([name, without_pid], 0.0)
29
+ samples[[name, without_pid]] = s + value
30
+ else # all/liveall
31
+ samples[[name, labels]] = value
32
+ end
33
+ else
34
+ # Counter, Histogram and Summary.
35
+ s = samples.fetch([name, without_pid], 0.0)
36
+ samples[[name, without_pid]] = s + value
37
+ end
38
+ end
39
+
40
+ samples
41
+ end
42
+ end
43
+ end
44
+ end
45
+ end
@@ -0,0 +1,51 @@
1
+ module Prometheus
2
+ module Client
3
+ module Helper
4
+ module MetricsRepresentation
5
+ METRIC_LINE = '%s%s %s'.freeze
6
+ TYPE_LINE = '# TYPE %s %s'.freeze
7
+ HELP_LINE = '# HELP %s %s'.freeze
8
+
9
+ LABEL = '%s="%s"'.freeze
10
+ SEPARATOR = ','.freeze
11
+ DELIMITER = "\n".freeze
12
+
13
+ REGEX = { doc: /[\n\\]/, label: /[\n\\"]/ }.freeze
14
+ REPLACE = { "\n" => '\n', '\\' => '\\\\', '"' => '\"' }.freeze
15
+
16
+ def self.to_text(metrics)
17
+ lines = []
18
+
19
+ metrics.each do |name, metric|
20
+ lines << format(HELP_LINE, name, escape(metric[:help]))
21
+ lines << format(TYPE_LINE, name, metric[:type])
22
+ metric[:samples].each do |metric_name, labels, value|
23
+ lines << metric(metric_name, format_labels(labels), value)
24
+ end
25
+ end
26
+
27
+ # there must be a trailing delimiter
28
+ (lines << nil).join(DELIMITER)
29
+ end
30
+
31
+ def self.metric(name, labels, value)
32
+ format(METRIC_LINE, name, labels, value)
33
+ end
34
+
35
+ def self.format_labels(set)
36
+ return if set.empty?
37
+
38
+ strings = set.each_with_object([]) do |(key, value), memo|
39
+ memo << format(LABEL, key, escape(value, :label))
40
+ end
41
+
42
+ "{#{strings.join(SEPARATOR)}}"
43
+ end
44
+
45
+ def self.escape(string, format = :doc)
46
+ string.to_s.gsub(REGEX[format], REPLACE)
47
+ end
48
+ end
49
+ end
50
+ end
51
+ end
@@ -0,0 +1,64 @@
1
+ require 'prometheus/client/helper/entry_parser'
2
+ require 'prometheus/client/helper/file_locker'
3
+
4
+ # load precompiled extension if available
5
+ begin
6
+ ruby_version = /(\d+\.\d+)/.match(RUBY_VERSION)
7
+ require_relative "../../../#{ruby_version}/fast_mmaped_file_rs"
8
+ rescue LoadError
9
+ require 'fast_mmaped_file_rs'
10
+ end
11
+
12
+ module Prometheus
13
+ module Client
14
+ module Helper
15
+ class MmapedFile < FastMmapedFileRs
16
+ include EntryParser
17
+
18
+ attr_reader :filepath, :size
19
+
20
+ def initialize(filepath)
21
+ @filepath = filepath
22
+
23
+ File.open(filepath, 'a+b') do |file|
24
+ file.truncate(initial_mmap_file_size) if file.size < MINIMUM_SIZE
25
+ @size = file.size
26
+ end
27
+
28
+ super(filepath)
29
+ end
30
+
31
+ def close
32
+ munmap
33
+ FileLocker.unlock(filepath)
34
+ end
35
+
36
+ private
37
+
38
+ def initial_mmap_file_size
39
+ Prometheus::Client.configuration.initial_mmap_file_size
40
+ end
41
+
42
+ public
43
+
44
+ class << self
45
+ def open(filepath)
46
+ MmapedFile.new(filepath)
47
+ end
48
+
49
+ def ensure_exclusive_file(file_prefix = 'mmaped_file')
50
+ (0..Float::INFINITY).lazy
51
+ .map { |f_num| "#{file_prefix}_#{Prometheus::Client.pid}-#{f_num}.db" }
52
+ .map { |filename| File.join(Prometheus::Client.configuration.multiprocess_files_dir, filename) }
53
+ .find { |path| Helper::FileLocker.lock_to_process(path) }
54
+ end
55
+
56
+ def open_exclusive_file(file_prefix = 'mmaped_file')
57
+ filename = Helper::MmapedFile.ensure_exclusive_file(file_prefix)
58
+ open(filename)
59
+ end
60
+ end
61
+ end
62
+ end
63
+ end
64
+ end
@@ -0,0 +1,29 @@
1
+ require 'prometheus/client/helper/entry_parser'
2
+
3
+ module Prometheus
4
+ module Client
5
+ module Helper
6
+ # Parses DB files without using mmap
7
+ class PlainFile
8
+ include EntryParser
9
+ attr_reader :filepath
10
+
11
+ def source
12
+ @data ||= File.read(filepath, mode: 'rb')
13
+ end
14
+
15
+ def initialize(filepath)
16
+ @filepath = filepath
17
+ end
18
+
19
+ def slice(*args)
20
+ source.slice(*args)
21
+ end
22
+
23
+ def size
24
+ source.length
25
+ end
26
+ end
27
+ end
28
+ end
29
+ end
@@ -0,0 +1,80 @@
1
+ require 'prometheus/client/metric'
2
+ require 'prometheus/client/uses_value_type'
3
+
4
+ module Prometheus
5
+ module Client
6
+ # A histogram samples observations (usually things like request durations
7
+ # or response sizes) and counts them in configurable buckets. It also
8
+ # provides a sum of all observed values.
9
+ class Histogram < Metric
10
+ # Value represents the state of a Histogram at a given point.
11
+ class Value < Hash
12
+ include UsesValueType
13
+ attr_accessor :sum, :total, :total_inf
14
+
15
+ def initialize(type, name, labels, buckets)
16
+ @sum = value_object(type, name, "#{name}_sum", labels)
17
+ @total = value_object(type, name, "#{name}_count", labels)
18
+ @total_inf = value_object(type, name, "#{name}_bucket", labels.merge(le: "+Inf"))
19
+
20
+ buckets.each do |bucket|
21
+ self[bucket] = value_object(type, name, "#{name}_bucket", labels.merge(le: bucket.to_s))
22
+ end
23
+ end
24
+
25
+ def observe(value, exemplar_name = '', exemplar_value = '')
26
+ @sum.increment(value, exemplar_name, exemplar_value)
27
+ @total.increment(1, exemplar_name, exemplar_value)
28
+ @total_inf.increment(1, exemplar_name, exemplar_value)
29
+
30
+ each_key do |bucket|
31
+ self[bucket].increment(1, exemplar_name, exemplar_value) if value <= bucket
32
+ end
33
+ end
34
+
35
+ def get()
36
+ hash = {}
37
+ each_key do |bucket|
38
+ hash[bucket] = self[bucket].get()
39
+ end
40
+ hash
41
+ end
42
+ end
43
+
44
+ # DEFAULT_BUCKETS are the default Histogram buckets. The default buckets
45
+ # are tailored to broadly measure the response time (in seconds) of a
46
+ # network service. (From DefBuckets client_golang)
47
+ DEFAULT_BUCKETS = [0.005, 0.01, 0.025, 0.05, 0.1, 0.25, 0.5, 1,
48
+ 2.5, 5, 10].freeze
49
+
50
+ # Offer a way to manually specify buckets
51
+ def initialize(name, docstring, base_labels = {},
52
+ buckets = DEFAULT_BUCKETS)
53
+ raise ArgumentError, 'Unsorted buckets, typo?' unless sorted? buckets
54
+
55
+ @buckets = buckets
56
+ super(name, docstring, base_labels)
57
+ end
58
+
59
+ def type
60
+ :histogram
61
+ end
62
+
63
+ def observe(labels, value, exemplar_name = '', exemplar_value = '')
64
+ label_set = label_set_for(labels)
65
+ synchronize { @values[label_set].observe(value, exemplar_name, exemplar_value) }
66
+ end
67
+
68
+ private
69
+
70
+ def default(labels)
71
+ # TODO: default function needs to know key of hash info (label names and values)
72
+ Value.new(type, @name, labels, @buckets)
73
+ end
74
+
75
+ def sorted?(bucket)
76
+ bucket.each_cons(2).all? { |i, j| i <= j }
77
+ end
78
+ end
79
+ end
80
+ end
@@ -0,0 +1,85 @@
1
+ # encoding: UTF-8
2
+
3
+ module Prometheus
4
+ module Client
5
+ # LabelSetValidator ensures that all used label sets comply with the
6
+ # Prometheus specification.
7
+ class LabelSetValidator
8
+ RESERVED_LABELS = [].freeze
9
+
10
+ class LabelSetError < StandardError; end
11
+ class InvalidLabelSetError < LabelSetError; end
12
+ class InvalidLabelError < LabelSetError; end
13
+ class ReservedLabelError < LabelSetError; end
14
+
15
+ def initialize(reserved_labels = [])
16
+ @reserved_labels = (reserved_labels + RESERVED_LABELS).freeze
17
+ @validated = {}
18
+ end
19
+
20
+ def valid?(labels)
21
+ unless labels.is_a?(Hash)
22
+ raise InvalidLabelSetError, "#{labels} is not a valid label set"
23
+ end
24
+
25
+ labels.all? do |key, value|
26
+ validate_symbol(key)
27
+ validate_name(key)
28
+ validate_reserved_key(key)
29
+ validate_value(key, value)
30
+ end
31
+ end
32
+
33
+ def validate(labels)
34
+ return labels if @validated.key?(labels.hash)
35
+
36
+ valid?(labels)
37
+
38
+ unless @validated.empty? || match?(labels, @validated.first.last)
39
+ raise InvalidLabelSetError, "labels must have the same signature: (#{label_diff(labels, @validated.first.last)})"
40
+ end
41
+
42
+ @validated[labels.hash] = labels
43
+ end
44
+
45
+ private
46
+
47
+ def label_diff(a, b)
48
+ "expected keys: #{b.keys.sort}, got: #{a.keys.sort}"
49
+ end
50
+
51
+ def match?(a, b)
52
+ a.keys.sort == b.keys.sort
53
+ end
54
+
55
+ def validate_symbol(key)
56
+ return true if key.is_a?(Symbol)
57
+
58
+ raise InvalidLabelError, "label #{key} is not a symbol"
59
+ end
60
+
61
+ def validate_name(key)
62
+ return true unless key.to_s.start_with?('__')
63
+
64
+ raise ReservedLabelError, "label #{key} must not start with __"
65
+ end
66
+
67
+ def validate_reserved_key(key)
68
+ return true unless @reserved_labels.include?(key)
69
+
70
+ raise ReservedLabelError, "#{key} is reserved"
71
+ end
72
+
73
+ def validate_value(key, value)
74
+ return true if value.is_a?(String) ||
75
+ value.is_a?(Numeric) ||
76
+ value.is_a?(Symbol) ||
77
+ value.is_a?(FalseClass) ||
78
+ value.is_a?(TrueClass) ||
79
+ value.nil?
80
+
81
+ raise InvalidLabelError, "#{key} does not contain a valid value (type #{value.class})"
82
+ end
83
+ end
84
+ end
85
+ end
@@ -0,0 +1,80 @@
1
+ require 'thread'
2
+ require 'prometheus/client/label_set_validator'
3
+ require 'prometheus/client/uses_value_type'
4
+
5
+ module Prometheus
6
+ module Client
7
+ class Metric
8
+ include UsesValueType
9
+ attr_reader :name, :docstring, :base_labels
10
+
11
+ def initialize(name, docstring, base_labels = {})
12
+ @mutex = Mutex.new
13
+ @validator = case type
14
+ when :summary
15
+ LabelSetValidator.new(['quantile'])
16
+ when :histogram
17
+ LabelSetValidator.new(['le'])
18
+ else
19
+ LabelSetValidator.new
20
+ end
21
+ @values = Hash.new { |hash, key| hash[key] = default(key) }
22
+
23
+ validate_name(name)
24
+ validate_docstring(docstring)
25
+ @validator.valid?(base_labels)
26
+
27
+ @name = name
28
+ @docstring = docstring
29
+ @base_labels = base_labels
30
+ end
31
+
32
+ # Returns the value for the given label set
33
+ def get(labels = {})
34
+ label_set = label_set_for(labels)
35
+ @validator.valid?(label_set)
36
+
37
+ @values[label_set].get
38
+ end
39
+
40
+ # Returns all label sets with their values
41
+ def values
42
+ synchronize do
43
+ @values.each_with_object({}) do |(labels, value), memo|
44
+ memo[labels] = value
45
+ end
46
+ end
47
+ end
48
+
49
+ private
50
+
51
+ def touch_default_value
52
+ @values[label_set_for({})]
53
+ end
54
+
55
+ def default(labels)
56
+ value_object(type, @name, @name, labels)
57
+ end
58
+
59
+ def validate_name(name)
60
+ return true if name.is_a?(Symbol)
61
+
62
+ raise ArgumentError, 'given name must be a symbol'
63
+ end
64
+
65
+ def validate_docstring(docstring)
66
+ return true if docstring.respond_to?(:empty?) && !docstring.empty?
67
+
68
+ raise ArgumentError, 'docstring must be given'
69
+ end
70
+
71
+ def label_set_for(labels)
72
+ @validator.validate(@base_labels.merge(labels))
73
+ end
74
+
75
+ def synchronize(&block)
76
+ @mutex.synchronize(&block)
77
+ end
78
+ end
79
+ end
80
+ end