@effect/opentelemetry 0.46.3 → 0.46.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. package/Otlp/package.json +6 -0
  2. package/OtlpLogger/package.json +6 -0
  3. package/OtlpMetrics/package.json +6 -0
  4. package/OtlpResource/package.json +6 -0
  5. package/dist/cjs/Otlp.js +38 -0
  6. package/dist/cjs/Otlp.js.map +1 -0
  7. package/dist/cjs/OtlpLogger.js +153 -0
  8. package/dist/cjs/OtlpLogger.js.map +1 -0
  9. package/dist/cjs/OtlpMetrics.js +354 -0
  10. package/dist/cjs/OtlpMetrics.js.map +1 -0
  11. package/dist/cjs/OtlpResource.js +93 -0
  12. package/dist/cjs/OtlpResource.js.map +1 -0
  13. package/dist/cjs/OtlpTracer.js +45 -130
  14. package/dist/cjs/OtlpTracer.js.map +1 -1
  15. package/dist/cjs/index.js +9 -1
  16. package/dist/cjs/internal/otlpExporter.js +81 -0
  17. package/dist/cjs/internal/otlpExporter.js.map +1 -0
  18. package/dist/dts/Otlp.d.ts +29 -0
  19. package/dist/dts/Otlp.d.ts.map +1 -0
  20. package/dist/dts/OtlpLogger.d.ts +39 -0
  21. package/dist/dts/OtlpLogger.d.ts.map +1 -0
  22. package/dist/dts/OtlpMetrics.d.ts +38 -0
  23. package/dist/dts/OtlpMetrics.d.ts.map +1 -0
  24. package/dist/dts/OtlpResource.d.ts +89 -0
  25. package/dist/dts/OtlpResource.d.ts.map +1 -0
  26. package/dist/dts/OtlpTracer.d.ts +6 -3
  27. package/dist/dts/OtlpTracer.d.ts.map +1 -1
  28. package/dist/dts/index.d.ts +17 -0
  29. package/dist/dts/index.d.ts.map +1 -1
  30. package/dist/dts/internal/otlpExporter.d.ts +2 -0
  31. package/dist/dts/internal/otlpExporter.d.ts.map +1 -0
  32. package/dist/esm/Otlp.js +29 -0
  33. package/dist/esm/Otlp.js.map +1 -0
  34. package/dist/esm/OtlpLogger.js +144 -0
  35. package/dist/esm/OtlpLogger.js.map +1 -0
  36. package/dist/esm/OtlpMetrics.js +345 -0
  37. package/dist/esm/OtlpMetrics.js.map +1 -0
  38. package/dist/esm/OtlpResource.js +81 -0
  39. package/dist/esm/OtlpResource.js.map +1 -0
  40. package/dist/esm/OtlpTracer.js +42 -126
  41. package/dist/esm/OtlpTracer.js.map +1 -1
  42. package/dist/esm/index.js +17 -0
  43. package/dist/esm/index.js.map +1 -1
  44. package/dist/esm/internal/otlpExporter.js +73 -0
  45. package/dist/esm/internal/otlpExporter.js.map +1 -0
  46. package/package.json +35 -3
  47. package/src/Otlp.ts +56 -0
  48. package/src/OtlpLogger.ts +243 -0
  49. package/src/OtlpMetrics.ts +568 -0
  50. package/src/OtlpResource.ts +168 -0
  51. package/src/OtlpTracer.ts +54 -185
  52. package/src/index.ts +21 -0
  53. package/src/internal/otlpExporter.ts +114 -0
@@ -0,0 +1,568 @@
1
+ /**
2
+ * @since 1.0.0
3
+ */
4
+ import type * as Headers from "@effect/platform/Headers"
5
+ import type * as HttpClient from "@effect/platform/HttpClient"
6
+ import * as Arr from "effect/Array"
7
+ import * as Duration from "effect/Duration"
8
+ import * as Effect from "effect/Effect"
9
+ import * as Layer from "effect/Layer"
10
+ import * as Metric from "effect/Metric"
11
+ import type * as MetricKey from "effect/MetricKey"
12
+ import * as MetricState from "effect/MetricState"
13
+ import * as Option from "effect/Option"
14
+ import type * as Scope from "effect/Scope"
15
+ import * as Exporter from "./internal/otlpExporter.js"
16
+ import type { Fixed64, KeyValue } from "./OtlpResource.js"
17
+ import * as OtlpResource from "./OtlpResource.js"
18
+
19
+ /**
20
+ * @since 1.0.0
21
+ * @category Constructors
22
+ */
23
+ export const make: (options: {
24
+ readonly url: string
25
+ readonly resource: {
26
+ readonly serviceName: string
27
+ readonly serviceVersion?: string | undefined
28
+ readonly attributes?: Record<string, unknown>
29
+ }
30
+ readonly headers?: Headers.Input | undefined
31
+ readonly exportInterval?: Duration.DurationInput | undefined
32
+ }) => Effect.Effect<
33
+ void,
34
+ never,
35
+ HttpClient.HttpClient | Scope.Scope
36
+ > = Effect.fnUntraced(function*(options) {
37
+ const clock = yield* Effect.clock
38
+ const startTime = String(clock.unsafeCurrentTimeNanos())
39
+
40
+ const resource = OtlpResource.make(options.resource)
41
+ const metricsScope: IInstrumentationScope = {
42
+ name: options.resource.serviceName
43
+ }
44
+
45
+ const snapshot = (): IExportMetricsServiceRequest => {
46
+ const snapshot = Metric.unsafeSnapshot()
47
+ const nowNanos = clock.unsafeCurrentTimeNanos()
48
+ const nowTime = String(nowNanos)
49
+ const metricData: Array<IMetric> = []
50
+ const metricDataByName = new Map<string, IMetric>()
51
+ const addMetricData = (data: IMetric) => {
52
+ metricData.push(data)
53
+ metricDataByName.set(data.name, data)
54
+ }
55
+
56
+ for (let i = 0, len = snapshot.length; i < len; i++) {
57
+ const { metricKey, metricState } = snapshot[i]
58
+ let unit = "1"
59
+ const attributes = Arr.reduce(metricKey.tags, [], (acc: Array<KeyValue>, label) => {
60
+ if (label.key === "unit" || label.key === "time_unit") {
61
+ unit = label.value
62
+ }
63
+ acc.push({ key: label.key, value: { stringValue: label.value } })
64
+ return acc
65
+ })
66
+
67
+ if (MetricState.isCounterState(metricState)) {
68
+ const dataPoint: INumberDataPoint = {
69
+ attributes,
70
+ startTimeUnixNano: startTime,
71
+ timeUnixNano: nowTime
72
+ }
73
+ if (typeof metricState.count === "bigint") {
74
+ dataPoint.asInt = Number(metricState.count)
75
+ } else {
76
+ dataPoint.asDouble = metricState.count
77
+ }
78
+ if (metricDataByName.has(metricKey.name)) {
79
+ metricDataByName.get(metricKey.name)!.sum!.dataPoints.push(dataPoint)
80
+ } else {
81
+ const key = metricKey as MetricKey.MetricKey.Counter<any>
82
+ addMetricData({
83
+ name: metricKey.name,
84
+ description: getOrEmpty(key.description),
85
+ unit,
86
+ sum: {
87
+ aggregationTemporality: EAggregationTemporality.AGGREGATION_TEMPORALITY_CUMULATIVE,
88
+ isMonotonic: key.keyType.incremental,
89
+ dataPoints: [dataPoint]
90
+ }
91
+ })
92
+ }
93
+ } else if (MetricState.isGaugeState(metricState)) {
94
+ const dataPoint: INumberDataPoint = {
95
+ attributes,
96
+ startTimeUnixNano: startTime,
97
+ timeUnixNano: nowTime
98
+ }
99
+ if (typeof metricState.value === "bigint") {
100
+ dataPoint.asInt = Number(metricState.value)
101
+ } else {
102
+ dataPoint.asDouble = metricState.value
103
+ }
104
+ if (metricDataByName.has(metricKey.name)) {
105
+ metricDataByName.get(metricKey.name)!.gauge!.dataPoints.push(dataPoint)
106
+ } else {
107
+ addMetricData({
108
+ name: metricKey.name,
109
+ description: getOrEmpty(metricKey.description),
110
+ unit,
111
+ gauge: {
112
+ dataPoints: [dataPoint]
113
+ }
114
+ })
115
+ }
116
+ } else if (MetricState.isHistogramState(metricState)) {
117
+ const size = metricState.buckets.length
118
+ const buckets = {
119
+ boundaries: Arr.allocate(size - 1) as Array<number>,
120
+ counts: Arr.allocate(size) as Array<number>
121
+ }
122
+ let i = 0
123
+ let prev = 0
124
+ for (const [boundary, value] of metricState.buckets) {
125
+ if (i < size - 1) {
126
+ buckets.boundaries[i] = boundary
127
+ }
128
+ buckets.counts[i] = value - prev
129
+ prev = value
130
+ i++
131
+ }
132
+ const dataPoint: IHistogramDataPoint = {
133
+ attributes,
134
+ startTimeUnixNano: startTime,
135
+ timeUnixNano: nowTime,
136
+ count: metricState.count,
137
+ min: metricState.min,
138
+ max: metricState.max,
139
+ sum: metricState.sum,
140
+ bucketCounts: buckets.counts,
141
+ explicitBounds: buckets.boundaries
142
+ }
143
+
144
+ if (metricDataByName.has(metricKey.name)) {
145
+ metricDataByName.get(metricKey.name)!.histogram!.dataPoints.push(dataPoint)
146
+ } else {
147
+ addMetricData({
148
+ name: metricKey.name,
149
+ description: getOrEmpty(metricKey.description),
150
+ unit,
151
+ histogram: {
152
+ aggregationTemporality: EAggregationTemporality.AGGREGATION_TEMPORALITY_CUMULATIVE,
153
+ dataPoints: [dataPoint]
154
+ }
155
+ })
156
+ }
157
+ } else if (MetricState.isFrequencyState(metricState)) {
158
+ const dataPoints: Array<INumberDataPoint> = []
159
+ for (const [freqKey, value] of metricState.occurrences) {
160
+ dataPoints.push({
161
+ attributes: [...attributes, { key: "key", value: { stringValue: freqKey } }],
162
+ startTimeUnixNano: startTime,
163
+ timeUnixNano: nowTime,
164
+ asInt: value
165
+ })
166
+ }
167
+ if (metricDataByName.has(metricKey.name)) {
168
+ // eslint-disable-next-line no-restricted-syntax
169
+ metricDataByName.get(metricKey.name)!.sum!.dataPoints.push(...dataPoints)
170
+ } else {
171
+ addMetricData({
172
+ name: metricKey.name,
173
+ description: getOrEmpty(metricKey.description),
174
+ unit,
175
+ sum: {
176
+ aggregationTemporality: EAggregationTemporality.AGGREGATION_TEMPORALITY_CUMULATIVE,
177
+ isMonotonic: true,
178
+ dataPoints
179
+ }
180
+ })
181
+ }
182
+ } else if (MetricState.isSummaryState(metricState)) {
183
+ const dataPoints: Array<INumberDataPoint> = [{
184
+ attributes: [...attributes, { key: "quantile", value: { stringValue: "min" } }],
185
+ startTimeUnixNano: startTime,
186
+ timeUnixNano: nowTime,
187
+ asDouble: metricState.min
188
+ }]
189
+ for (const [quantile, value] of metricState.quantiles) {
190
+ dataPoints.push({
191
+ attributes: [...attributes, { key: "quantile", value: { stringValue: quantile.toString() } }],
192
+ startTimeUnixNano: startTime,
193
+ timeUnixNano: nowTime,
194
+ asDouble: value._tag === "Some" ? value.value : 0
195
+ })
196
+ }
197
+ dataPoints.push({
198
+ attributes: [...attributes, { key: "quantile", value: { stringValue: "max" } }],
199
+ startTimeUnixNano: startTime,
200
+ timeUnixNano: nowTime,
201
+ asDouble: metricState.max
202
+ })
203
+ const countDataPoint: INumberDataPoint = {
204
+ attributes,
205
+ startTimeUnixNano: startTime,
206
+ timeUnixNano: nowTime,
207
+ asInt: metricState.count
208
+ }
209
+ const sumDataPoint: INumberDataPoint = {
210
+ attributes,
211
+ startTimeUnixNano: startTime,
212
+ timeUnixNano: nowTime,
213
+ asDouble: metricState.sum
214
+ }
215
+
216
+ if (metricDataByName.has(`${metricKey.name}_quantiles`)) {
217
+ // eslint-disable-next-line no-restricted-syntax
218
+ metricDataByName.get(`${metricKey.name}_quantiles`)!.sum!.dataPoints.push(...dataPoints)
219
+ metricDataByName.get(`${metricKey.name}_count`)!.sum!.dataPoints.push(countDataPoint)
220
+ metricDataByName.get(`${metricKey.name}_sum`)!.sum!.dataPoints.push(sumDataPoint)
221
+ } else {
222
+ addMetricData({
223
+ name: `${metricKey.name}_quantiles`,
224
+ description: getOrEmpty(metricKey.description),
225
+ unit,
226
+ sum: {
227
+ aggregationTemporality: EAggregationTemporality.AGGREGATION_TEMPORALITY_CUMULATIVE,
228
+ isMonotonic: false,
229
+ dataPoints
230
+ }
231
+ })
232
+ addMetricData({
233
+ name: `${metricKey.name}_count`,
234
+ description: getOrEmpty(metricKey.description),
235
+ unit: "1",
236
+ sum: {
237
+ aggregationTemporality: EAggregationTemporality.AGGREGATION_TEMPORALITY_CUMULATIVE,
238
+ isMonotonic: true,
239
+ dataPoints: [countDataPoint]
240
+ }
241
+ })
242
+ addMetricData({
243
+ name: `${metricKey.name}_sum`,
244
+ description: getOrEmpty(metricKey.description),
245
+ unit: "1",
246
+ sum: {
247
+ aggregationTemporality: EAggregationTemporality.AGGREGATION_TEMPORALITY_CUMULATIVE,
248
+ isMonotonic: true,
249
+ dataPoints: [sumDataPoint]
250
+ }
251
+ })
252
+ }
253
+ }
254
+ }
255
+
256
+ return {
257
+ resourceMetrics: [{
258
+ resource,
259
+ scopeMetrics: [{
260
+ scope: metricsScope,
261
+ metrics: metricData
262
+ }]
263
+ }]
264
+ }
265
+ }
266
+
267
+ yield* Exporter.make({
268
+ label: "OtlpMetrics",
269
+ url: options.url,
270
+ headers: options.headers,
271
+ maxBatchSize: "disabled",
272
+ exportInterval: options.exportInterval ?? Duration.seconds(10),
273
+ body: snapshot
274
+ })
275
+ })
276
+
277
+ /**
278
+ * @since 1.0.0
279
+ * @category Layers
280
+ */
281
+ export const layer = (options: {
282
+ readonly url: string
283
+ readonly resource: {
284
+ readonly serviceName: string
285
+ readonly serviceVersion?: string | undefined
286
+ readonly attributes?: Record<string, unknown>
287
+ }
288
+ readonly headers?: Headers.Input | undefined
289
+ readonly exportInterval?: Duration.DurationInput | undefined
290
+ }): Layer.Layer<never, never, HttpClient.HttpClient> => Layer.scopedDiscard(make(options))
291
+
292
+ // internal
293
+
294
+ const getOrEmpty = Option.getOrElse(() => "")
295
+
296
+ /** Properties of an InstrumentationScope. */
297
+ interface IInstrumentationScope {
298
+ /** InstrumentationScope name */
299
+ name: string
300
+ /** InstrumentationScope version */
301
+ version?: string
302
+ /** InstrumentationScope attributes */
303
+ attributes?: Array<KeyValue>
304
+ /** InstrumentationScope droppedAttributesCount */
305
+ droppedAttributesCount?: number
306
+ }
307
+
308
+ /** Properties of an ExportMetricsServiceRequest. */
309
+ interface IExportMetricsServiceRequest {
310
+ /** ExportMetricsServiceRequest resourceMetrics */
311
+ resourceMetrics: Array<IResourceMetrics>
312
+ }
313
+ /** Properties of a ResourceMetrics. */
314
+ interface IResourceMetrics {
315
+ /** ResourceMetrics resource */
316
+ resource?: OtlpResource.Resource
317
+ /** ResourceMetrics scopeMetrics */
318
+ scopeMetrics: Array<IScopeMetrics>
319
+ /** ResourceMetrics schemaUrl */
320
+ schemaUrl?: string
321
+ }
322
+ /** Properties of an IScopeMetrics. */
323
+ interface IScopeMetrics {
324
+ /** ScopeMetrics scope */
325
+ scope?: IInstrumentationScope
326
+ /** ScopeMetrics metrics */
327
+ metrics: Array<IMetric>
328
+ /** ScopeMetrics schemaUrl */
329
+ schemaUrl?: string
330
+ }
331
+ /** Properties of a Metric. */
332
+ interface IMetric {
333
+ /** Metric name */
334
+ name: string
335
+ /** Metric description */
336
+ description?: string
337
+ /** Metric unit */
338
+ unit?: string
339
+ /** Metric gauge */
340
+ gauge?: IGauge
341
+ /** Metric sum */
342
+ sum?: ISum
343
+ /** Metric histogram */
344
+ histogram?: IHistogram
345
+ /** Metric exponentialHistogram */
346
+ exponentialHistogram?: IExponentialHistogram
347
+ /** Metric summary */
348
+ summary?: ISummary
349
+ }
350
+ /** Properties of a Gauge. */
351
+ interface IGauge {
352
+ /** Gauge dataPoints */
353
+ dataPoints: Array<INumberDataPoint>
354
+ }
355
+ /** Properties of a Sum. */
356
+ interface ISum {
357
+ /** Sum dataPoints */
358
+ dataPoints: Array<INumberDataPoint>
359
+ /** Sum aggregationTemporality */
360
+ aggregationTemporality: EAggregationTemporality
361
+ /** Sum isMonotonic */
362
+ isMonotonic: boolean
363
+ }
364
+ /** Properties of a Histogram. */
365
+ interface IHistogram {
366
+ /** Histogram dataPoints */
367
+ dataPoints: Array<IHistogramDataPoint>
368
+ /** Histogram aggregationTemporality */
369
+ aggregationTemporality?: EAggregationTemporality
370
+ }
371
+ /** Properties of an ExponentialHistogram. */
372
+ interface IExponentialHistogram {
373
+ /** ExponentialHistogram dataPoints */
374
+ dataPoints: Array<IExponentialHistogramDataPoint>
375
+ /** ExponentialHistogram aggregationTemporality */
376
+ aggregationTemporality?: EAggregationTemporality
377
+ }
378
+ /** Properties of a Summary. */
379
+ interface ISummary {
380
+ /** Summary dataPoints */
381
+ dataPoints: Array<ISummaryDataPoint>
382
+ }
383
+ /** Properties of a NumberDataPoint. */
384
+ interface INumberDataPoint {
385
+ /** NumberDataPoint attributes */
386
+ attributes: Array<KeyValue>
387
+ /** NumberDataPoint startTimeUnixNano */
388
+ startTimeUnixNano?: Fixed64
389
+ /** NumberDataPoint timeUnixNano */
390
+ timeUnixNano?: Fixed64
391
+ /** NumberDataPoint asDouble */
392
+ asDouble?: number | null
393
+ /** NumberDataPoint asInt */
394
+ asInt?: number
395
+ /** NumberDataPoint exemplars */
396
+ exemplars?: Array<IExemplar>
397
+ /** NumberDataPoint flags */
398
+ flags?: number
399
+ }
400
+ /** Properties of a HistogramDataPoint. */
401
+ interface IHistogramDataPoint {
402
+ /** HistogramDataPoint attributes */
403
+ attributes?: Array<KeyValue>
404
+ /** HistogramDataPoint startTimeUnixNano */
405
+ startTimeUnixNano?: Fixed64
406
+ /** HistogramDataPoint timeUnixNano */
407
+ timeUnixNano?: Fixed64
408
+ /** HistogramDataPoint count */
409
+ count?: number
410
+ /** HistogramDataPoint sum */
411
+ sum?: number
412
+ /** HistogramDataPoint bucketCounts */
413
+ bucketCounts?: Array<number>
414
+ /** HistogramDataPoint explicitBounds */
415
+ explicitBounds?: Array<number>
416
+ /** HistogramDataPoint exemplars */
417
+ exemplars?: Array<IExemplar>
418
+ /** HistogramDataPoint flags */
419
+ flags?: number
420
+ /** HistogramDataPoint min */
421
+ min?: number
422
+ /** HistogramDataPoint max */
423
+ max?: number
424
+ }
425
+ /** Properties of an ExponentialHistogramDataPoint. */
426
+ interface IExponentialHistogramDataPoint {
427
+ /** ExponentialHistogramDataPoint attributes */
428
+ attributes?: Array<KeyValue>
429
+ /** ExponentialHistogramDataPoint startTimeUnixNano */
430
+ startTimeUnixNano?: Fixed64
431
+ /** ExponentialHistogramDataPoint timeUnixNano */
432
+ timeUnixNano?: Fixed64
433
+ /** ExponentialHistogramDataPoint count */
434
+ count?: number
435
+ /** ExponentialHistogramDataPoint sum */
436
+ sum?: number
437
+ /** ExponentialHistogramDataPoint scale */
438
+ scale?: number
439
+ /** ExponentialHistogramDataPoint zeroCount */
440
+ zeroCount?: number
441
+ /** ExponentialHistogramDataPoint positive */
442
+ positive?: IBuckets
443
+ /** ExponentialHistogramDataPoint negative */
444
+ negative?: IBuckets
445
+ /** ExponentialHistogramDataPoint flags */
446
+ flags?: number
447
+ /** ExponentialHistogramDataPoint exemplars */
448
+ exemplars?: Array<IExemplar>
449
+ /** ExponentialHistogramDataPoint min */
450
+ min?: number
451
+ /** ExponentialHistogramDataPoint max */
452
+ max?: number
453
+ }
454
+ /** Properties of a SummaryDataPoint. */
455
+ interface ISummaryDataPoint {
456
+ /** SummaryDataPoint attributes */
457
+ attributes?: Array<KeyValue>
458
+ /** SummaryDataPoint startTimeUnixNano */
459
+ startTimeUnixNano?: number
460
+ /** SummaryDataPoint timeUnixNano */
461
+ timeUnixNano?: string
462
+ /** SummaryDataPoint count */
463
+ count?: number
464
+ /** SummaryDataPoint sum */
465
+ sum?: number
466
+ /** SummaryDataPoint quantileValues */
467
+ quantileValues?: Array<IValueAtQuantile>
468
+ /** SummaryDataPoint flags */
469
+ flags?: number
470
+ }
471
+ /** Properties of a ValueAtQuantile. */
472
+ interface IValueAtQuantile {
473
+ /** ValueAtQuantile quantile */
474
+ quantile?: number
475
+ /** ValueAtQuantile value */
476
+ value?: number
477
+ }
478
+ /** Properties of a Buckets. */
479
+ interface IBuckets {
480
+ /** Buckets offset */
481
+ offset?: number
482
+ /** Buckets bucketCounts */
483
+ bucketCounts?: Array<number>
484
+ }
485
+ /** Properties of an Exemplar. */
486
+ interface IExemplar {
487
+ /** Exemplar filteredAttributes */
488
+ filteredAttributes?: Array<KeyValue>
489
+ /** Exemplar timeUnixNano */
490
+ timeUnixNano?: string
491
+ /** Exemplar asDouble */
492
+ asDouble?: number
493
+ /** Exemplar asInt */
494
+ asInt?: number
495
+ /** Exemplar spanId */
496
+ spanId?: string | Uint8Array
497
+ /** Exemplar traceId */
498
+ traceId?: string | Uint8Array
499
+ }
500
+ /**
501
+ * AggregationTemporality defines how a metric aggregator reports aggregated
502
+ * values. It describes how those values relate to the time interval over
503
+ * which they are aggregated.
504
+ */
505
+ const enum EAggregationTemporality {
506
+ AGGREGATION_TEMPORALITY_UNSPECIFIED = 0,
507
+ /** DELTA is an AggregationTemporality for a metric aggregator which reports
508
+ changes since last report time. Successive metrics contain aggregation of
509
+ values from continuous and non-overlapping intervals.
510
+
511
+ The values for a DELTA metric are based only on the time interval
512
+ associated with one measurement cycle. There is no dependency on
513
+ previous measurements like is the case for CUMULATIVE metrics.
514
+
515
+ For example, consider a system measuring the number of requests that
516
+ it receives and reports the sum of these requests every second as a
517
+ DELTA metric:
518
+
519
+ 1. The system starts receiving at time=t_0.
520
+ 2. A request is received, the system measures 1 request.
521
+ 3. A request is received, the system measures 1 request.
522
+ 4. A request is received, the system measures 1 request.
523
+ 5. The 1 second collection cycle ends. A metric is exported for the
524
+ number of requests received over the interval of time t_0 to
525
+ t_0+1 with a value of 3.
526
+ 6. A request is received, the system measures 1 request.
527
+ 7. A request is received, the system measures 1 request.
528
+ 8. The 1 second collection cycle ends. A metric is exported for the
529
+ number of requests received over the interval of time t_0+1 to
530
+ t_0+2 with a value of 2. */
531
+ AGGREGATION_TEMPORALITY_DELTA = 1,
532
+ /** CUMULATIVE is an AggregationTemporality for a metric aggregator which
533
+ reports changes since a fixed start time. This means that current values
534
+ of a CUMULATIVE metric depend on all previous measurements since the
535
+ start time. Because of this, the sender is required to retain this state
536
+ in some form. If this state is lost or invalidated, the CUMULATIVE metric
537
+ values MUST be reset and a new fixed start time following the last
538
+ reported measurement time sent MUST be used.
539
+
540
+ For example, consider a system measuring the number of requests that
541
+ it receives and reports the sum of these requests every second as a
542
+ CUMULATIVE metric:
543
+
544
+ 1. The system starts receiving at time=t_0.
545
+ 2. A request is received, the system measures 1 request.
546
+ 3. A request is received, the system measures 1 request.
547
+ 4. A request is received, the system measures 1 request.
548
+ 5. The 1 second collection cycle ends. A metric is exported for the
549
+ number of requests received over the interval of time t_0 to
550
+ t_0+1 with a value of 3.
551
+ 6. A request is received, the system measures 1 request.
552
+ 7. A request is received, the system measures 1 request.
553
+ 8. The 1 second collection cycle ends. A metric is exported for the
554
+ number of requests received over the interval of time t_0 to
555
+ t_0+2 with a value of 5.
556
+ 9. The system experiences a fault and loses state.
557
+ 10. The system recovers and resumes receiving at time=t_1.
558
+ 11. A request is received, the system measures 1 request.
559
+ 12. The 1 second collection cycle ends. A metric is exported for the
560
+ number of requests received over the interval of time t_1 to
561
+ t_0+1 with a value of 1.
562
+
563
+ Note: Even though, when reporting changes since last report time, using
564
+ CUMULATIVE is valid, it is not recommended. This may cause problems for
565
+ systems that do not use start_time to determine when the aggregation
566
+ value was reset (e.g. Prometheus). */
567
+ AGGREGATION_TEMPORALITY_CUMULATIVE = 2
568
+ }