vinted-prometheus-client-mmap 1.2.2 → 1.3.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 38b385249d3ee5faca8bf41dcc9bc35ec625b4c6c11c2b90b612cb9fcf863bb1
4
- data.tar.gz: 41f10da86eb0b8d5742d23c7670af5cfa2f85dbb9b57040e6b1183b5d7ffef0e
3
+ metadata.gz: c9c2172a64911eb3a109b866611b4167e9dafacedf7bdb4ee45da71340d0314e
4
+ data.tar.gz: '018047827caaea20f96e7ca8b62a717c1c17b0aab14ad1a0bf7ea2735f8ee18a'
5
5
  SHA512:
6
- metadata.gz: 200a343904c8a993eb1495cd4128f307fd09a8e76f57b0d2af20301dda52ec5a9a0cca41300a90777b0e551bf2c9fd08eb18aacaa7a421c2b22543580e6ea2a9
7
- data.tar.gz: 9f0e0b316c993cf1c5f531c6f9bfae540c287e32c044d43e5738a2753ab37e1ea230b9b69371daf4082b4046c957fc0e25c584012e04be5eae17885944697096
6
+ metadata.gz: 1f09730605d1c7904ec72b3ac0afc0fd61a79da99e78f93da27551ec2340dbde87b47c72d3b59e934df72e5af92139f488364d1ea161bf2ab1052f118aed1e57
7
+ data.tar.gz: 6bf4a9f9aeac52d16f15f3d8b3cc44306b009487891b22d431cc2a96aa46a0bba70b9364845e9f382d35729172ce4171ce980c5907c3c4aac47501d3b9710be5
@@ -135,7 +135,7 @@ impl MmapError {
135
135
  }
136
136
  }
137
137
 
138
- pub fn out_of_bounds<T: Display>(index: T, len: T) -> Self {
138
+ pub fn out_of_bounds<T: Display>(index: T, len: T) -> Self {
139
139
  MmapError::OutOfBounds {
140
140
  index: index.to_string(),
141
141
  len: len.to_string(),
@@ -1,4 +1,4 @@
1
- #[derive(Clone, Debug)]
1
+ #[derive(Clone, Debug, Serialize, Deserialize)]
2
2
  pub struct Exemplar {
3
3
  // Labels (set of label names/values). Only 1 for now.
4
4
  // Value -> f64.
@@ -8,18 +8,18 @@ pub struct Exemplar {
8
8
  // The combined length of the label names and values of an Exemplar's LabelSet MUST NOT exceed 128 UTF-8 character code points.
9
9
  // 4 bytes max per code point.
10
10
  // So, we need to allocate 128*4 = 512 bytes for the label names and values.
11
- LabelName: &str,
12
- LabelValue: &str,
13
- Value: f64,
14
- Timestamp: u64,
11
+ pub label_name: String,
12
+
13
+ pub label_value: String,
14
+ pub value: f64,
15
+ pub timestamp: u128,
15
16
  }
16
17
 
17
- pub struct Label {
18
- Name: &str,
19
- Value: &str,
20
- }
18
+ use serde::{Deserialize, Serialize};
19
+
20
+ use crate::size_of;
21
21
 
22
- pub const EXEMPLAR_ENTRY_MAX_SIZE_BYTES:u64 = 512 + size_of::<f64>() + size_of::<u64>();
22
+ pub const EXEMPLAR_ENTRY_MAX_SIZE_BYTES:usize = 512 + size_of::<f64>() + size_of::<u64>();
23
23
 
24
24
  // Key -> use the old one.
25
25
  // Value -> allocate EXEMPLAR_ENTRY_MAX_SIZE_BYTES. If it exceeds this, we need to return an error. Use JSON.
@@ -1,5 +1,5 @@
1
1
  use core::panic;
2
- use magnus::Symbol;
2
+ use magnus::{eval, Symbol, Value};
3
3
  use serde::Deserialize;
4
4
  use serde_json::value::RawValue;
5
5
  use smallvec::SmallVec;
@@ -7,6 +7,7 @@ use std::fmt::Write;
7
7
  use std::str;
8
8
 
9
9
  use crate::error::{MmapError, RubyError};
10
+ use crate::exemplars::Exemplar;
10
11
  use crate::file_info::FileInfo;
11
12
  use crate::raw_entry::RawEntry;
12
13
  use crate::Result;
@@ -104,34 +105,72 @@ impl<'a> BorrowedData<'a> {
104
105
  pub struct EntryMetadata {
105
106
  pub multiprocess_mode: Symbol,
106
107
  pub type_: Symbol,
107
- pub value: f64,
108
+ pub value: Option<f64>,
109
+ pub ex: Option<Exemplar>,
108
110
  }
109
111
 
110
112
  impl EntryMetadata {
111
113
  /// Construct a new `FileEntry`, copying the JSON string from the `RawEntry`
112
114
  /// into an internal buffer.
113
115
  pub fn new(mmap_entry: &RawEntry, file: &FileInfo) -> Result<Self> {
116
+ if file.type_.to_string() == "exemplar" {
117
+ let ex = mmap_entry.exemplar();
118
+
119
+
120
+
121
+ return Ok(EntryMetadata {
122
+ multiprocess_mode: file.multiprocess_mode,
123
+ type_: file.type_,
124
+ value: None,
125
+ ex: Some(ex),
126
+ })
127
+ }
128
+
114
129
  let value = mmap_entry.value();
115
130
 
116
131
  Ok(EntryMetadata {
117
132
  multiprocess_mode: file.multiprocess_mode,
118
133
  type_: file.type_,
119
- value,
134
+ value: Some(value),
135
+ ex: None,
120
136
  })
121
137
  }
122
138
 
123
139
  /// Combine values with another `EntryMetadata`.
124
140
  pub fn merge(&mut self, other: &Self) {
125
- if self.type_ == SYM_GAUGE {
126
- match self.multiprocess_mode {
127
- s if s == SYM_MIN => self.value = self.value.min(other.value),
128
- s if s == SYM_MAX => self.value = self.value.max(other.value),
129
- s if s == SYM_LIVESUM => self.value += other.value,
130
- _ => self.value = other.value,
141
+ if other.ex.is_some() {
142
+ let otherex = other.ex.clone().unwrap();
143
+
144
+ if self.ex.is_some() {
145
+ let selfex = self.ex.clone().unwrap();
146
+
147
+ if selfex.timestamp < otherex.timestamp {
148
+ self.ex = other.ex.clone();
149
+ }
150
+ } else {
151
+ self.ex = other.ex.clone();
152
+ }
153
+ }
154
+ if other.value.is_some() {
155
+ if self.value.is_none() {
156
+ self.value = other.value;
157
+ } else {
158
+ let other_value = other.value.unwrap();
159
+ let self_value = self.value.unwrap();
160
+
161
+ if self.type_ == SYM_GAUGE {
162
+ match self.multiprocess_mode {
163
+ s if s == SYM_MIN => self.value = Some(self_value.min(other_value)),
164
+ s if s == SYM_MAX => self.value = Some(self_value.max(other_value)),
165
+ s if s == SYM_LIVESUM => self.value = Some(self_value + other_value),
166
+ _ => self.value = Some(other_value),
167
+ }
168
+ } else {
169
+ self.value = Some(self_value + other_value);
170
+ }
131
171
  }
132
- } else {
133
- self.value += other.value;
134
172
  }
173
+
135
174
  }
136
175
 
137
176
  /// Validate if pid is significant for metric.
@@ -149,9 +188,26 @@ use std::collections::hash_map::DefaultHasher;
149
188
  use std::collections::HashMap;
150
189
  use std::hash::Hash;
151
190
  use std::hash::Hasher;
152
- use std::time::{SystemTime, UNIX_EPOCH};
153
191
 
154
192
  use std::io::Write as OtherWrite;
193
+
194
+ fn exemplar_to_proto(e: &Exemplar) -> io::prometheus::client::Exemplar {
195
+ let seconds = e.timestamp / (1000 * 1000 * 1000);
196
+ let nanos = e.timestamp % (1000 * 1000 * 1000);
197
+
198
+ io::prometheus::client::Exemplar {
199
+ label: vec![io::prometheus::client::LabelPair {
200
+ name: Some(e.label_name.clone()),
201
+ value: Some(e.label_value.clone()),
202
+ }],
203
+ value: Some(e.value),
204
+ timestamp: Some(prost_types::Timestamp {
205
+ seconds: seconds as i64,
206
+ nanos: nanos as i32,
207
+ }),
208
+ }
209
+ }
210
+
155
211
  impl FileEntry {
156
212
  pub fn trim_quotes(s: &str) -> String {
157
213
  let mut chars = s.chars();
@@ -229,26 +285,17 @@ impl FileEntry {
229
285
  // Get the final u64 hash value
230
286
  let hash_value = hasher.finish();
231
287
 
232
- let start = SystemTime::now();
233
- let since_the_epoch = start
234
- .duration_since(UNIX_EPOCH)
235
- .expect("Time went backwards");
236
-
237
288
  m.counter = Some(io::prometheus::client::Counter {
238
- value: Some(gr.0.meta.value),
289
+ value: gr.0.meta.value,
239
290
  created_timestamp: None,
240
- exemplar: Some(io::prometheus::client::Exemplar{
241
- label: vec![
242
- io::prometheus::client::LabelPair {
243
- name: Some("traceID".to_string()),
244
- value: Some("123456789".to_string()),
245
- }
246
- ],
247
- value: Some(gr.0.meta.value),
248
- timestamp: Some(prost_types::Timestamp { seconds:since_the_epoch.as_secs() as i64 , nanos: since_the_epoch.as_nanos() as i32 }),
249
- }),
291
+ exemplar: None,
250
292
  });
251
293
 
294
+ if gr.0.meta.ex.is_some() {
295
+ m.counter.as_mut().unwrap().exemplar =
296
+ Some(exemplar_to_proto(gr.0.meta.ex.as_ref().unwrap()));
297
+ }
298
+
252
299
  mtrcs.insert(hash_value, m);
253
300
  metric_types.insert(hash_value, "counter");
254
301
  metric_names.insert(hash_value, gr.1.metric_name);
@@ -266,7 +313,7 @@ impl FileEntry {
266
313
  let hash_value = hasher.finish();
267
314
 
268
315
  m.gauge = Some(io::prometheus::client::Gauge {
269
- value: Some(gr.0.meta.value),
316
+ value: gr.0.meta.value,
270
317
  });
271
318
  mtrcs.insert(hash_value, m);
272
319
  metric_types.insert(hash_value, "gauge");
@@ -312,7 +359,7 @@ impl FileEntry {
312
359
 
313
360
  let mut curf: f64 =
314
361
  bucket.cumulative_count_float.unwrap_or_default();
315
- curf += gr.0.meta.value;
362
+ curf += gr.0.meta.value.unwrap();
316
363
 
317
364
  bucket.cumulative_count_float = Some(curf);
318
365
  }
@@ -335,7 +382,7 @@ impl FileEntry {
335
382
 
336
383
  let buckets = vec![io::prometheus::client::Bucket {
337
384
  cumulative_count: None,
338
- cumulative_count_float: Some(gr.0.meta.value),
385
+ cumulative_count_float: gr.0.meta.value,
339
386
  upper_bound: Some(
340
387
  le.expect(
341
388
  &format!("got no LE for {}", gr.1.metric_name)
@@ -413,10 +460,10 @@ impl FileEntry {
413
460
  if gr.1.metric_name.ends_with("_count") {
414
461
  let samplecount = smry.sample_count.unwrap_or_default();
415
462
  smry.sample_count =
416
- Some((gr.0.meta.value as u64) + samplecount);
463
+ Some((gr.0.meta.value.unwrap() as u64) + samplecount);
417
464
  } else if gr.1.metric_name.ends_with("_sum") {
418
465
  let samplesum: f64 = smry.sample_sum.unwrap_or_default();
419
- smry.sample_sum = Some(gr.0.meta.value + samplesum);
466
+ smry.sample_sum = Some(gr.0.meta.value.unwrap() + samplesum);
420
467
  } else {
421
468
  let mut found_quantile = false;
422
469
  for qntl in &mut smry.quantile {
@@ -425,7 +472,7 @@ impl FileEntry {
425
472
  }
426
473
 
427
474
  let mut curq: f64 = qntl.quantile.unwrap_or_default();
428
- curq += gr.0.meta.value;
475
+ curq += gr.0.meta.value.unwrap();
429
476
 
430
477
  qntl.quantile = Some(curq);
431
478
  found_quantile = true;
@@ -434,7 +481,7 @@ impl FileEntry {
434
481
  if !found_quantile {
435
482
  smry.quantile.push(io::prometheus::client::Quantile {
436
483
  quantile: quantile,
437
- value: Some(gr.0.meta.value),
484
+ value: gr.0.meta.value,
438
485
  });
439
486
  }
440
487
  }
@@ -455,7 +502,7 @@ impl FileEntry {
455
502
  gr.1.metric_name.strip_suffix("_count").unwrap();
456
503
  m.summary = Some(io::prometheus::client::Summary {
457
504
  quantile: vec![],
458
- sample_count: Some(gr.0.meta.value as u64),
505
+ sample_count: Some(gr.0.meta.value.unwrap() as u64),
459
506
  sample_sum: None,
460
507
  created_timestamp: None,
461
508
  });
@@ -464,14 +511,14 @@ impl FileEntry {
464
511
  gr.1.metric_name.strip_suffix("_sum").unwrap();
465
512
  m.summary = Some(io::prometheus::client::Summary {
466
513
  quantile: vec![],
467
- sample_sum: Some(gr.0.meta.value),
514
+ sample_sum: Some(gr.0.meta.value.unwrap()),
468
515
  sample_count: None,
469
516
  created_timestamp: None,
470
517
  });
471
518
  } else {
472
519
  let quantiles = vec![io::prometheus::client::Quantile {
473
520
  quantile: quantile,
474
- value: Some(gr.0.meta.value),
521
+ value: gr.0.meta.value,
475
522
  }];
476
523
  m.summary = Some(io::prometheus::client::Summary {
477
524
  quantile: quantiles,
@@ -487,6 +534,9 @@ impl FileEntry {
487
534
  }
488
535
  }
489
536
  }
537
+ "exemplar" => {
538
+ // Exemplars are handled later on.
539
+ }
490
540
  mtype => {
491
541
  panic!("unhandled metric type {}", mtype)
492
542
  }
@@ -532,6 +582,8 @@ impl FileEntry {
532
582
  unsafe { Ok(str::from_utf8_unchecked(buffer.get_ref()).to_string()) }
533
583
  }
534
584
 
585
+
586
+
535
587
  /// Convert the sorted entries into a String in Prometheus metrics format.
536
588
  pub fn entries_to_string(entries: Vec<FileEntry>) -> Result<String> {
537
589
  // We guesstimate that lines are ~100 bytes long, preallocate the string to
@@ -568,7 +620,7 @@ impl FileEntry {
568
620
 
569
621
  entry.append_entry(metrics_data, &mut out)?;
570
622
 
571
- writeln!(&mut out, " {}", entry.meta.value)
623
+ writeln!(&mut out, " {}", entry.meta.value.unwrap())
572
624
  .map_err(|e| MmapError::Other(format!("Failed to append to output: {e}")))?;
573
625
 
574
626
  processed_count += 1;
@@ -1182,7 +1234,7 @@ mod test {
1182
1234
  entry_a.meta.merge(&entry_b.meta);
1183
1235
 
1184
1236
  assert_eq!(
1185
- case.expected_value, entry_a.meta.value,
1237
+ case.expected_value, entry_a.meta.value.unwrap(),
1186
1238
  "test case: {name} - value"
1187
1239
  );
1188
1240
  }
@@ -14,6 +14,7 @@ pub mod map;
14
14
  pub mod mmap;
15
15
  pub mod raw_entry;
16
16
  pub mod util;
17
+ pub mod exemplars;
17
18
 
18
19
  pub mod io {
19
20
  pub mod prometheus {
@@ -82,6 +83,7 @@ fn init(ruby: &Ruby) -> magnus::error::Result<()> {
82
83
  klass.define_method("used=", method!(MmapedFile::save_used, 1))?;
83
84
  klass.define_method("fetch_entry", method!(MmapedFile::fetch_entry, 3))?;
84
85
  klass.define_method("upsert_entry", method!(MmapedFile::upsert_entry, 3))?;
86
+ klass.define_method("upsert_exemplar", method!(MmapedFile::upsert_exemplar, 5))?;
85
87
 
86
88
  Ok(())
87
89
  }
@@ -1,6 +1,7 @@
1
1
  use hashbrown::hash_map::RawEntryMut;
2
2
  use hashbrown::HashMap;
3
- use magnus::{exception::*, Error, RArray};
3
+ use magnus::class::file;
4
+ use magnus::{eval, exception::*, Error, RArray, Value};
4
5
  use std::hash::{BuildHasher, Hash, Hasher};
5
6
  use std::mem::size_of;
6
7
 
@@ -142,22 +143,39 @@ impl EntryMap {
142
143
  let mut pos = HEADER_SIZE;
143
144
 
144
145
  while pos + size_of::<u32>() < used {
145
- let raw_entry = RawEntry::from_slice(&source[pos..used])?;
146
-
147
- if pos + raw_entry.total_len() > used {
148
- return Err(MmapError::PromParsing(format!(
149
- "source file {} corrupted, used {used} < stored data length {}",
150
- file_info.path.display(),
151
- pos + raw_entry.total_len()
152
- )));
153
- }
146
+ let raw_entry: RawEntry;
147
+
148
+ if file_info.type_.to_string() == "exemplar" {
149
+ raw_entry = RawEntry::from_slice_exemplar(&source[pos..used])?;
150
+
151
+ if pos + raw_entry.total_len_exemplar() > used {
152
+ return Err(MmapError::PromParsing(format!(
153
+ "source file {} corrupted, used {used} < stored data length {}",
154
+ file_info.path.display(),
155
+ pos + raw_entry.total_len()
156
+ )));
157
+ }
154
158
 
159
+ pos += raw_entry.total_len_exemplar();
160
+
161
+ } else {
162
+ raw_entry = RawEntry::from_slice(&source[pos..used])?;
163
+
164
+ if pos + raw_entry.total_len() > used {
165
+ return Err(MmapError::PromParsing(format!(
166
+ "source file {} corrupted, used {used} < stored data length {}",
167
+ file_info.path.display(),
168
+ pos + raw_entry.total_len()
169
+ )));
170
+ }
171
+
172
+ pos += raw_entry.total_len();
173
+ }
174
+
155
175
  let meta = EntryMetadata::new(&raw_entry, &file_info)?;
156
176
  let data = BorrowedData::new(&raw_entry, &file_info, meta.is_pid_significant())?;
157
177
 
158
178
  self.merge_or_store(data, meta)?;
159
-
160
- pos += raw_entry.total_len();
161
179
  }
162
180
 
163
181
  Ok(())
@@ -198,7 +216,8 @@ mod test {
198
216
  meta: EntryMetadata {
199
217
  multiprocess_mode: Symbol::new("max"),
200
218
  type_: Symbol::new("gauge"),
201
- value: 1.0,
219
+ value: Some(1.0),
220
+ ex: None,
202
221
  },
203
222
  },
204
223
  FileEntry {
@@ -209,7 +228,8 @@ mod test {
209
228
  meta: EntryMetadata {
210
229
  multiprocess_mode: Symbol::new("max"),
211
230
  type_: Symbol::new("gauge"),
212
- value: 1.0,
231
+ value: Some(1.0),
232
+ ex: None,
213
233
  },
214
234
  },
215
235
  FileEntry {
@@ -220,7 +240,8 @@ mod test {
220
240
  meta: EntryMetadata {
221
241
  multiprocess_mode: Symbol::new("max"),
222
242
  type_: Symbol::new("gauge"),
223
- value: 1.0,
243
+ value: Some(1.0),
244
+ ex: None,
224
245
  },
225
246
  },
226
247
  FileEntry {
@@ -231,7 +252,8 @@ mod test {
231
252
  meta: EntryMetadata {
232
253
  multiprocess_mode: Symbol::new("max"),
233
254
  type_: Symbol::new("gauge"),
234
- value: 1.0,
255
+ value: Some(1.0),
256
+ ex: None,
235
257
  },
236
258
  },
237
259
  FileEntry {
@@ -242,7 +264,8 @@ mod test {
242
264
  meta: EntryMetadata {
243
265
  multiprocess_mode: Symbol::new("all"),
244
266
  type_: Symbol::new("gauge"),
245
- value: 1.0,
267
+ value: Some(1.0),
268
+ ex: None,
246
269
  },
247
270
  },
248
271
  FileEntry {
@@ -253,7 +276,8 @@ mod test {
253
276
  meta: EntryMetadata {
254
277
  multiprocess_mode: Symbol::new("all"),
255
278
  type_: Symbol::new("gauge"),
256
- value: 1.0,
279
+ value: Some(1.0),
280
+ ex: None,
257
281
  },
258
282
  },
259
283
  ];
@@ -294,7 +318,8 @@ mod test {
294
318
  meta: EntryMetadata {
295
319
  multiprocess_mode: Symbol::new("all"),
296
320
  type_: Symbol::new("gauge"),
297
- value: 1.0,
321
+ value: Some(1.0),
322
+ ex: None,
298
323
  },
299
324
  };
300
325
 
@@ -306,7 +331,8 @@ mod test {
306
331
  meta: EntryMetadata {
307
332
  multiprocess_mode: Symbol::new("all"),
308
333
  type_: Symbol::new("gauge"),
309
- value: 5.0,
334
+ value: Some(5.0),
335
+ ex: None,
310
336
  },
311
337
  };
312
338
 
@@ -318,7 +344,8 @@ mod test {
318
344
  meta: EntryMetadata {
319
345
  multiprocess_mode: Symbol::new("all"),
320
346
  type_: Symbol::new("gauge"),
321
- value: 100.0,
347
+ value: Some(100.0),
348
+ ex: None,
322
349
  },
323
350
  };
324
351
 
@@ -330,7 +357,8 @@ mod test {
330
357
  meta: EntryMetadata {
331
358
  multiprocess_mode: Symbol::new("all"),
332
359
  type_: Symbol::new("gauge"),
333
- value: 1.0,
360
+ value: Some(100.0),
361
+ ex: None,
334
362
  },
335
363
  };
336
364
 
@@ -345,7 +373,7 @@ mod test {
345
373
 
346
374
  assert_eq!(
347
375
  5.0,
348
- map.0.get(&starting_entry.data).unwrap().value,
376
+ map.0.get(&starting_entry.data).unwrap().value.unwrap(),
349
377
  "value updated"
350
378
  );
351
379
  assert_eq!(1, map.0.len(), "no entry added");
@@ -359,7 +387,7 @@ mod test {
359
387
 
360
388
  assert_eq!(
361
389
  5.0,
362
- map.0.get(&starting_entry.data).unwrap().value,
390
+ map.0.get(&starting_entry.data).unwrap().value.unwrap(),
363
391
  "value unchanged"
364
392
  );
365
393
 
@@ -371,7 +399,7 @@ mod test {
371
399
 
372
400
  assert_eq!(
373
401
  5.0,
374
- map.0.get(&starting_entry.data).unwrap().value,
402
+ map.0.get(&starting_entry.data).unwrap().value.unwrap(),
375
403
  "value unchanged"
376
404
  );
377
405
  assert_eq!(3, map.0.len(), "entry added");
@@ -9,7 +9,9 @@ use std::path::PathBuf;
9
9
 
10
10
  use crate::error::{MmapError, RubyError};
11
11
  use crate::raw_entry::RawEntry;
12
- use crate::util::CheckedOps;
12
+ use crate::exemplars::{Exemplar, EXEMPLAR_ENTRY_MAX_SIZE_BYTES};
13
+
14
+ use crate::util::{read_exemplar, CheckedOps};
13
15
  use crate::util::{self, errno, read_f64, read_u32};
14
16
  use crate::Result;
15
17
  use crate::HEADER_SIZE;
@@ -139,6 +141,59 @@ impl InnerMmap {
139
141
  Ok(position)
140
142
  }
141
143
 
144
+ pub unsafe fn initialize_entry_exemplar(&mut self, key: &[u8], ex: Exemplar) -> Result<usize> {
145
+ // CAST: no-op on 32-bit, widening on 64-bit.
146
+ let current_used = self.load_used()? as usize;
147
+ let entry_length = RawEntry::calc_total_len_exemplar(key.len())?;
148
+
149
+ let new_used = current_used.add_chk(entry_length)?;
150
+
151
+ // Increasing capacity requires expanding the file and re-mmapping it, we can't
152
+ // perform this from `InnerMmap`.
153
+ if self.capacity() < new_used {
154
+ return Err(MmapError::Other(format!(
155
+ "mmap capacity {} less than {}",
156
+ self.capacity(),
157
+ new_used
158
+ )));
159
+ }
160
+
161
+ let bytes = self.map.as_mut();
162
+ let value_offset = RawEntry::save_exemplar(&mut bytes[current_used..new_used], key, ex)?;
163
+
164
+ // Won't overflow as value_offset is less than new_used.
165
+ let position = current_used + value_offset;
166
+ let new_used32 = util::cast_chk::<_, u32>(new_used, "used")?;
167
+
168
+ self.save_used(new_used32)?;
169
+ Ok(position)
170
+ }
171
+
172
+ pub fn save_exemplar(&mut self, offset: usize, exemplar: Exemplar) -> Result<()> {
173
+ if self.len.add_chk(size_of::<Exemplar>())? <= offset {
174
+ return Err(MmapError::out_of_bounds(
175
+ offset + size_of::<f64>(),
176
+ self.len,
177
+ ));
178
+ }
179
+
180
+ if offset < HEADER_SIZE {
181
+ return Err(MmapError::Other(format!(
182
+ "writing to offset {offset} would overwrite file header"
183
+ )));
184
+ }
185
+
186
+ let val = serde_json::to_string(&exemplar).unwrap();
187
+
188
+ let value_bytes = val.as_bytes();
189
+ let value_range = self.item_range(offset, value_bytes.len())?;
190
+
191
+ let bytes = self.map.as_mut();
192
+ bytes[value_range].copy_from_slice(&value_bytes);
193
+
194
+ Ok(())
195
+ }
196
+
142
197
  /// Save a metrics value to an existing entry in the mmap.
143
198
  pub fn save_value(&mut self, offset: usize, value: f64) -> Result<()> {
144
199
  if self.len.add_chk(size_of::<f64>())? <= offset {
@@ -174,6 +229,17 @@ impl InnerMmap {
174
229
  read_f64(self.map.as_ref(), offset)
175
230
  }
176
231
 
232
+ pub fn load_exemplar(&mut self, offset: usize) -> Result<Exemplar> {
233
+ if self.len.add_chk(EXEMPLAR_ENTRY_MAX_SIZE_BYTES)? <= offset {
234
+ return Err(MmapError::out_of_bounds(
235
+ offset + EXEMPLAR_ENTRY_MAX_SIZE_BYTES,
236
+ self.len,
237
+ ));
238
+ }
239
+
240
+ read_exemplar(self.map.as_mut(), offset)
241
+ }
242
+
177
243
  /// The length of data written to the file.
178
244
  /// With a new file this is only set when Ruby calls `slice` on
179
245
  /// `FastMmapedFileRs`, so even if data has been written to the
@@ -15,6 +15,7 @@ use std::sync::RwLock;
15
15
 
16
16
  use crate::err;
17
17
  use crate::error::MmapError;
18
+ use crate::exemplars::Exemplar;
18
19
  use crate::file_entry::FileEntry;
19
20
  use crate::map::EntryMap;
20
21
  use crate::raw_entry::RawEntry;
@@ -76,6 +77,8 @@ const STR_SHARED: c_ulong = 1 << (14);
76
77
  #[magnus::wrap(class = "FastMmapedFileRs", free_immediately, size)]
77
78
  pub struct MmapedFile(RwLock<Option<InnerMmap>>);
78
79
 
80
+ use std::time::{SystemTime, UNIX_EPOCH};
81
+
79
82
  impl MmapedFile {
80
83
  /// call-seq:
81
84
  /// new(file)
@@ -320,6 +323,61 @@ impl MmapedFile {
320
323
  rs_self.load_value(value_offset)
321
324
  }
322
325
 
326
+ pub fn upsert_exemplar(
327
+ rb_self: Obj<Self>,
328
+ positions: RHash,
329
+ key: RString,
330
+ value: f64,
331
+ exemplar_name: RString,
332
+ exemplar_value: RString,
333
+ ) -> magnus::error::Result<f64> {
334
+ let rs_self = &*rb_self;
335
+ let position: Option<Fixnum> = positions.lookup(key)?;
336
+
337
+ let start = SystemTime::now();
338
+ let since_the_epoch = start
339
+ .duration_since(UNIX_EPOCH)
340
+ .expect("Time went backwards");
341
+
342
+ let ex: Exemplar = Exemplar {
343
+ label_name: unsafe { exemplar_name.as_str().unwrap().into() },
344
+ label_value: unsafe { exemplar_value.as_str().unwrap().into() },
345
+ value: value,
346
+ timestamp: since_the_epoch.as_nanos(),
347
+ };
348
+
349
+ if let Some(pos) = position {
350
+ let pos = pos.to_usize()?;
351
+ return rs_self
352
+ .inner_mut(|inner| {
353
+ inner.save_exemplar(pos, ex)?;
354
+
355
+ // TODO just return `value` here instead of loading it?
356
+ // This is how the C implementation did it, but I don't
357
+ // see what the extra load gains us.
358
+ let ex = inner.load_exemplar(pos);
359
+
360
+ Ok(ex.unwrap().value)
361
+ })
362
+ .map_err(|e| e.into());
363
+ }
364
+
365
+
366
+ rs_self.check_expand_exemplar(rb_self, key.len())?;
367
+
368
+ let value_offset: usize = rs_self.inner_mut(|inner| {
369
+ // SAFETY: We must not call any Ruby code for the lifetime of this borrow.
370
+ unsafe { inner.initialize_entry_exemplar(key.as_slice(), ex) }
371
+ })?;
372
+
373
+ // CAST: no-op on 64-bit, widening on 32-bit.
374
+ positions.aset(key, Integer::from_u64(value_offset as u64))?;
375
+
376
+ let ex = rs_self.load_exemplar(value_offset);
377
+
378
+ Ok(ex.unwrap().value)
379
+ }
380
+
323
381
  /// Update the value of an existing entry, if present. Otherwise create a new entry
324
382
  /// for the key.
325
383
  pub fn upsert_entry(
@@ -469,6 +527,23 @@ impl MmapedFile {
469
527
  Ok(())
470
528
  }
471
529
 
530
+ /// Check that the mmap is large enough to contain the value to be added,
531
+ /// and expand it to fit if necessary.
532
+ fn check_expand_exemplar(&self, rb_self: Obj<Self>, key_len: usize) -> magnus::error::Result<()> {
533
+ // CAST: no-op on 32-bit, widening on 64-bit.
534
+ let used = self.inner(|inner| inner.load_used())? as usize;
535
+ let entry_len = RawEntry::calc_total_len_exemplar(key_len)?;
536
+
537
+ // We need the mmapped region to contain at least one byte beyond the
538
+ // written data to create a NUL- terminated C string. Validate that
539
+ // new length does not exactly match or exceed the length of the mmap.
540
+ while self.capacity() <= used.add_chk(entry_len)? {
541
+ self.expand_to_fit(rb_self, self.capacity().mul_chk(2)?)?;
542
+ }
543
+
544
+ Ok(())
545
+ }
546
+
472
547
  /// Expand the underlying file until it is long enough to fit `target_cap`.
473
548
  /// This will remove the existing mmap, expand the file, then update any
474
549
  /// strings held by the `WeakMap` to point to the newly mmapped address.
@@ -555,6 +630,11 @@ impl MmapedFile {
555
630
  .map_err(|e| e.into())
556
631
  }
557
632
 
633
+ fn load_exemplar<'a, 'b>(&'a self, position: usize) -> magnus::error::Result<Exemplar> {
634
+ self.inner_mut(|inner| inner.load_exemplar(position))
635
+ .map_err(|e| e.into())
636
+ }
637
+
558
638
  fn as_mut_ptr(&self) -> *mut c_char {
559
639
  // UNWRAP: This is actually infallible, but we need to
560
640
  // wrap it in a `Result` for use with `inner()`.
@@ -655,13 +735,14 @@ impl MmapedFile {
655
735
 
656
736
  #[cfg(test)]
657
737
  mod test {
738
+ use super::*;
739
+ use core::panic;
658
740
  use magnus::error::Error;
659
741
  use magnus::eval;
660
742
  use magnus::Range;
661
743
  use nix::unistd::{sysconf, SysconfVar};
662
744
  use std::mem::size_of;
663
745
 
664
- use super::*;
665
746
  use crate::raw_entry::RawEntry;
666
747
  use crate::testhelper::TestFile;
667
748
 
@@ -1,6 +1,7 @@
1
1
  use std::mem::size_of;
2
2
 
3
3
  use crate::error::MmapError;
4
+ use crate::exemplars::{Exemplar, EXEMPLAR_ENTRY_MAX_SIZE_BYTES};
4
5
  use crate::util;
5
6
  use crate::util::CheckedOps;
6
7
  use crate::Result;
@@ -13,6 +14,42 @@ pub struct RawEntry<'a> {
13
14
  }
14
15
 
15
16
  impl<'a> RawEntry<'a> {
17
+ pub fn save_exemplar(bytes: &'a mut [u8], key: &[u8], value: Exemplar) -> Result<usize> {
18
+ let total_len = Self::calc_total_len_exemplar(key.len())?;
19
+
20
+ if total_len > bytes.len() {
21
+ return Err(MmapError::Other(format!(
22
+ "entry length {total_len} larger than slice length {}",
23
+ bytes.len()
24
+ )));
25
+ }
26
+
27
+ let val = serde_json::to_string(&value).unwrap();
28
+
29
+ // CAST: `calc_len` runs `check_encoded_len`, we know the key len
30
+ // is less than i32::MAX. No risk of overflows or failed casts.
31
+ let key_len: u32 = key.len() as u32;
32
+
33
+ // Write the key length to the mmap.
34
+ bytes[..size_of::<u32>()].copy_from_slice(&key_len.to_ne_bytes());
35
+
36
+ // Advance slice past the size.
37
+ let bytes = &mut bytes[size_of::<u32>()..];
38
+
39
+ bytes[..key.len()].copy_from_slice(key);
40
+
41
+ // Advance to end of key.
42
+ let bytes = &mut bytes[key.len()..];
43
+
44
+ let pad_len = Self::padding_len(key.len());
45
+ bytes[..pad_len].fill(b' ');
46
+ let bytes = &mut bytes[pad_len..];
47
+
48
+ bytes[..val.len()].copy_from_slice(val.as_bytes());
49
+
50
+ Self::calc_value_offset(key.len())
51
+ }
52
+
16
53
  /// Save an entry to the mmap, returning the value offset in the newly created entry.
17
54
  pub fn save(bytes: &'a mut [u8], key: &[u8], value: f64) -> Result<usize> {
18
55
  let total_len = Self::calc_total_len(key.len())?;
@@ -66,6 +103,23 @@ impl<'a> RawEntry<'a> {
66
103
  Ok(Self { bytes, encoded_len })
67
104
  }
68
105
 
106
+ pub fn from_slice_exemplar(bytes: &'a [u8]) -> Result<Self> {
107
+ // CAST: no-op on 32-bit, widening on 64-bit.
108
+ let encoded_len = util::read_u32(bytes, 0)? as usize;
109
+
110
+ let total_len = Self::calc_total_len_exemplar(encoded_len)?;
111
+
112
+ // Confirm the value is in bounds of the slice provided.
113
+ if total_len > bytes.len() {
114
+ return Err(MmapError::out_of_bounds(total_len, bytes.len()));
115
+ }
116
+
117
+ // Advance slice past length int and cut at end of entry.
118
+ let bytes = &bytes[size_of::<u32>()..total_len];
119
+
120
+ Ok(Self { bytes, encoded_len })
121
+ }
122
+
69
123
  /// Read the `f64` value of an entry from memory.
70
124
  #[inline]
71
125
  pub fn value(&self) -> f64 {
@@ -77,6 +131,15 @@ impl<'a> RawEntry<'a> {
77
131
  util::read_f64(self.bytes, offset).unwrap()
78
132
  }
79
133
 
134
+ pub fn exemplar(&self) -> Exemplar {
135
+ // We've stripped off the leading u32, don't include that here.
136
+ let offset = self.encoded_len + Self::padding_len(self.encoded_len);
137
+
138
+ // UNWRAP: We confirm in the constructor that the value offset
139
+ // is in-range for the slice.
140
+ util::read_exemplar(self.bytes, offset).unwrap()
141
+ }
142
+
80
143
  /// The length of the entry key without padding.
81
144
  #[inline]
82
145
  pub fn encoded_len(&self) -> usize {
@@ -97,6 +160,12 @@ impl<'a> RawEntry<'a> {
97
160
  Self::calc_total_len(self.encoded_len).unwrap()
98
161
  }
99
162
 
163
+ #[inline]
164
+ pub fn total_len_exemplar(&self) -> usize {
165
+ // UNWRAP:: We confirmed in the constructor that this doesn't overflow.
166
+ Self::calc_total_len_exemplar(self.encoded_len).unwrap()
167
+ }
168
+
100
169
  /// Calculate the total length of an `MmapEntry`, including the string length,
101
170
  /// string, padding, and value. Validates encoding_len is within expected bounds.
102
171
  #[inline]
@@ -104,6 +173,11 @@ impl<'a> RawEntry<'a> {
104
173
  Self::calc_value_offset(encoded_len)?.add_chk(size_of::<f64>())
105
174
  }
106
175
 
176
+ #[inline]
177
+ pub fn calc_total_len_exemplar(encoded_len: usize) -> Result<usize> {
178
+ Self::calc_value_offset(encoded_len)?.add_chk(EXEMPLAR_ENTRY_MAX_SIZE_BYTES)
179
+ }
180
+
107
181
  /// Calculate the value offset of an `MmapEntry`, including the string length,
108
182
  /// string, padding. Validates encoding_len is within expected bounds.
109
183
  #[inline]
@@ -1,10 +1,12 @@
1
1
  use nix::errno::Errno;
2
2
  use nix::libc::c_long;
3
+ use std::borrow::Cow;
3
4
  use std::fmt::Display;
4
5
  use std::io;
5
6
  use std::mem::size_of;
6
7
 
7
8
  use crate::error::MmapError;
9
+ use crate::exemplars::{Exemplar, EXEMPLAR_ENTRY_MAX_SIZE_BYTES};
8
10
  use crate::Result;
9
11
 
10
12
  /// Wrapper around `checked_add()` that converts failures
@@ -88,6 +90,24 @@ pub fn read_f64(buf: &[u8], offset: usize) -> Result<f64> {
88
90
  ))
89
91
  }
90
92
 
93
+ pub fn read_exemplar(buf: &[u8], offset: usize) -> Result<Exemplar> {
94
+ if let Some(slice) = buf.get(offset..offset + EXEMPLAR_ENTRY_MAX_SIZE_BYTES) {
95
+ // UNWRAP: We can safely unwrap the conversion from slice to array as we
96
+ // can be sure the target array has same length as the source slice.
97
+ let out: &[u8; EXEMPLAR_ENTRY_MAX_SIZE_BYTES] = slice.try_into().expect("failed to convert slice to array");
98
+
99
+ let res: Vec<u8> = out.iter().cloned().filter(|&x| x != 0).collect();
100
+
101
+ let v: Exemplar = serde_json::from_slice(&res).expect("failed to convert string to Exemplar");
102
+
103
+ return Ok(v)
104
+ }
105
+ Err(MmapError::out_of_bounds(
106
+ offset + EXEMPLAR_ENTRY_MAX_SIZE_BYTES,
107
+ buf.len(),
108
+ ))
109
+ }
110
+
91
111
  #[cfg(test)]
92
112
  mod test {
93
113
  use super::*;
@@ -10,11 +10,11 @@ module Prometheus
10
10
  :counter
11
11
  end
12
12
 
13
- def increment(labels = {}, by = 1)
13
+ def increment(labels = {}, by = 1, exemplar_name = '', exemplar_value = '')
14
14
  raise ArgumentError, 'increment must be a non-negative number' if by < 0
15
15
 
16
16
  label_set = label_set_for(labels)
17
- synchronize { @values[label_set].increment(by) }
17
+ synchronize { @values[label_set].increment(by, exemplar_name, exemplar_value) }
18
18
  end
19
19
 
20
20
  private
@@ -46,6 +46,10 @@ module Prometheus
46
46
  @m.upsert_entry(@positions, key, value)
47
47
  end
48
48
 
49
+ def write_exemplar(key, value, exemplar_id, exemplar_val)
50
+ @m.upsert_exemplar(@positions, key, value, exemplar_id, exemplar_val)
51
+ end
52
+
49
53
  def path
50
54
  @m.filepath if @m
51
55
  end
@@ -31,11 +31,7 @@ module Prometheus
31
31
  initialize_file if pid_changed?
32
32
 
33
33
  @value += amount
34
- # TODO(GiedriusS): write exemplars too.
35
- if @file_prefix != 'gauge'
36
- puts "#{@name} exemplar name = #{exemplar_name}, exemplar_value = #{exemplar_value}"
37
- end
38
- write_value(@key, @value)
34
+ write_value(@key, @value, exemplar_name, exemplar_value)
39
35
  @value
40
36
  end
41
37
  end
@@ -120,12 +116,18 @@ module Prometheus
120
116
  unless @file.nil?
121
117
  @file.close
122
118
  end
119
+ unless @exemplar_file.nil?
120
+ @exemplar_file.close
121
+ end
123
122
  mmaped_file = Helper::MmapedFile.open_exclusive_file(@file_prefix)
123
+ exemplar_file = Helper::MmapedFile.open_exclusive_file('exemplar')
124
124
 
125
125
  @@files[@file_prefix] = MmapedDict.new(mmaped_file)
126
+ @@files['exemplar'] = MmapedDict.new(exemplar_file)
126
127
  end
127
128
 
128
129
  @file = @@files[@file_prefix]
130
+ @exemplar_file = @@files['exemplar']
129
131
  @key = rebuild_key
130
132
 
131
133
  @value = read_value(@key)
@@ -139,8 +141,12 @@ module Prometheus
139
141
  [@metric_name, @name, keys, values].to_json
140
142
  end
141
143
 
142
- def write_value(key, val)
144
+ def write_value(key, val, exemplar_name = '', exemplar_value = '')
143
145
  @file.write_value(key, val)
146
+ # Exemplars are only defined on counters or histograms.
147
+ if @file_prefix == 'counter' or @file_prefix == 'histogram' and exemplar_name != '' and exemplar_value != ''
148
+ @exemplar_file.write_exemplar(key, val, exemplar_name, exemplar_value)
149
+ end
144
150
  rescue StandardError => e
145
151
  Prometheus::Client.logger.warn("writing value to #{@file.path} failed with #{e}")
146
152
  Prometheus::Client.logger.debug(e.backtrace.join("\n"))
@@ -1,5 +1,5 @@
1
1
  module Prometheus
2
2
  module Client
3
- VERSION = '1.2.2'.freeze
3
+ VERSION = '1.3.0'.freeze
4
4
  end
5
5
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: vinted-prometheus-client-mmap
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.2.2
4
+ version: 1.3.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Tobias Schmidt