observability-toolkit 1.8.2 → 1.8.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (259) hide show
  1. package/README.md +60 -0
  2. package/dist/backends/index.d.ts +43 -0
  3. package/dist/backends/index.d.ts.map +1 -1
  4. package/dist/backends/index.js +41 -0
  5. package/dist/backends/index.js.map +1 -1
  6. package/dist/backends/index.test.d.ts +5 -0
  7. package/dist/backends/index.test.d.ts.map +1 -0
  8. package/dist/backends/index.test.js +156 -0
  9. package/dist/backends/index.test.js.map +1 -0
  10. package/dist/backends/local-jsonl-boolean-search.test.js +15 -12
  11. package/dist/backends/local-jsonl-boolean-search.test.js.map +1 -1
  12. package/dist/backends/local-jsonl-cache.test.d.ts +2 -0
  13. package/dist/backends/local-jsonl-cache.test.d.ts.map +1 -0
  14. package/dist/backends/local-jsonl-cache.test.js +295 -0
  15. package/dist/backends/local-jsonl-cache.test.js.map +1 -0
  16. package/dist/backends/local-jsonl-circuit-breaker.test.d.ts +2 -0
  17. package/dist/backends/local-jsonl-circuit-breaker.test.d.ts.map +1 -0
  18. package/dist/backends/local-jsonl-circuit-breaker.test.js +180 -0
  19. package/dist/backends/local-jsonl-circuit-breaker.test.js.map +1 -0
  20. package/dist/backends/local-jsonl-export.test.d.ts +2 -0
  21. package/dist/backends/local-jsonl-export.test.d.ts.map +1 -0
  22. package/dist/backends/local-jsonl-export.test.js +704 -0
  23. package/dist/backends/local-jsonl-export.test.js.map +1 -0
  24. package/dist/backends/local-jsonl-index.test.d.ts +2 -0
  25. package/dist/backends/local-jsonl-index.test.d.ts.map +1 -0
  26. package/dist/backends/local-jsonl-index.test.js +554 -0
  27. package/dist/backends/local-jsonl-index.test.js.map +1 -0
  28. package/dist/backends/local-jsonl-logs.test.d.ts +2 -0
  29. package/dist/backends/local-jsonl-logs.test.d.ts.map +1 -0
  30. package/dist/backends/local-jsonl-logs.test.js +612 -0
  31. package/dist/backends/local-jsonl-logs.test.js.map +1 -0
  32. package/dist/backends/local-jsonl-metrics.test.d.ts +2 -0
  33. package/dist/backends/local-jsonl-metrics.test.d.ts.map +1 -0
  34. package/dist/backends/local-jsonl-metrics.test.js +876 -0
  35. package/dist/backends/local-jsonl-metrics.test.js.map +1 -0
  36. package/dist/backends/local-jsonl-traces.test.d.ts +2 -0
  37. package/dist/backends/local-jsonl-traces.test.d.ts.map +1 -0
  38. package/dist/backends/local-jsonl-traces.test.js +1729 -0
  39. package/dist/backends/local-jsonl-traces.test.js.map +1 -0
  40. package/dist/backends/local-jsonl.d.ts +9 -0
  41. package/dist/backends/local-jsonl.d.ts.map +1 -1
  42. package/dist/backends/local-jsonl.js +348 -227
  43. package/dist/backends/local-jsonl.js.map +1 -1
  44. package/dist/backends/local-jsonl.test.js +290 -21
  45. package/dist/backends/local-jsonl.test.js.map +1 -1
  46. package/dist/backends/signoz-api-circuit-breaker.test.d.ts +6 -0
  47. package/dist/backends/signoz-api-circuit-breaker.test.d.ts.map +1 -0
  48. package/dist/backends/signoz-api-circuit-breaker.test.js +548 -0
  49. package/dist/backends/signoz-api-circuit-breaker.test.js.map +1 -0
  50. package/dist/backends/signoz-api-rate-limiter.test.d.ts +6 -0
  51. package/dist/backends/signoz-api-rate-limiter.test.d.ts.map +1 -0
  52. package/dist/backends/signoz-api-rate-limiter.test.js +389 -0
  53. package/dist/backends/signoz-api-rate-limiter.test.js.map +1 -0
  54. package/dist/backends/signoz-api-ssrf.test.d.ts +6 -0
  55. package/dist/backends/signoz-api-ssrf.test.d.ts.map +1 -0
  56. package/dist/backends/signoz-api-ssrf.test.js +216 -0
  57. package/dist/backends/signoz-api-ssrf.test.js.map +1 -0
  58. package/dist/backends/signoz-api-test-helpers.d.ts +80 -0
  59. package/dist/backends/signoz-api-test-helpers.d.ts.map +1 -0
  60. package/dist/backends/signoz-api-test-helpers.js +79 -0
  61. package/dist/backends/signoz-api-test-helpers.js.map +1 -0
  62. package/dist/backends/signoz-api.d.ts +16 -0
  63. package/dist/backends/signoz-api.d.ts.map +1 -1
  64. package/dist/backends/signoz-api.js +71 -9
  65. package/dist/backends/signoz-api.js.map +1 -1
  66. package/dist/backends/signoz-api.test.d.ts +9 -0
  67. package/dist/backends/signoz-api.test.d.ts.map +1 -1
  68. package/dist/backends/signoz-api.test.js +14 -1027
  69. package/dist/backends/signoz-api.test.js.map +1 -1
  70. package/dist/lib/cache.d.ts +47 -1
  71. package/dist/lib/cache.d.ts.map +1 -1
  72. package/dist/lib/cache.js +40 -3
  73. package/dist/lib/cache.js.map +1 -1
  74. package/dist/lib/circuit-breaker.d.ts +83 -0
  75. package/dist/lib/circuit-breaker.d.ts.map +1 -0
  76. package/dist/lib/circuit-breaker.js +125 -0
  77. package/dist/lib/circuit-breaker.js.map +1 -0
  78. package/dist/lib/circuit-breaker.test.d.ts +2 -0
  79. package/dist/lib/circuit-breaker.test.d.ts.map +1 -0
  80. package/dist/lib/circuit-breaker.test.js +263 -0
  81. package/dist/lib/circuit-breaker.test.js.map +1 -0
  82. package/dist/lib/constants-symlink.test.d.ts +12 -0
  83. package/dist/lib/constants-symlink.test.d.ts.map +1 -0
  84. package/dist/lib/constants-symlink.test.js +357 -0
  85. package/dist/lib/constants-symlink.test.js.map +1 -0
  86. package/dist/lib/constants.d.ts +43 -0
  87. package/dist/lib/constants.d.ts.map +1 -1
  88. package/dist/lib/constants.js +154 -24
  89. package/dist/lib/constants.js.map +1 -1
  90. package/dist/lib/constants.test.js +156 -7
  91. package/dist/lib/constants.test.js.map +1 -1
  92. package/dist/lib/edge-cases.test.d.ts +11 -0
  93. package/dist/lib/edge-cases.test.d.ts.map +1 -0
  94. package/dist/lib/edge-cases.test.js +634 -0
  95. package/dist/lib/edge-cases.test.js.map +1 -0
  96. package/dist/lib/error-sanitizer.d.ts.map +1 -1
  97. package/dist/lib/error-sanitizer.js +62 -26
  98. package/dist/lib/error-sanitizer.js.map +1 -1
  99. package/dist/lib/error-sanitizer.test.js +186 -0
  100. package/dist/lib/error-sanitizer.test.js.map +1 -1
  101. package/dist/lib/error-types.d.ts +54 -0
  102. package/dist/lib/error-types.d.ts.map +1 -0
  103. package/dist/lib/error-types.js +154 -0
  104. package/dist/lib/error-types.js.map +1 -0
  105. package/dist/lib/error-types.test.d.ts +2 -0
  106. package/dist/lib/error-types.test.d.ts.map +1 -0
  107. package/dist/lib/error-types.test.js +196 -0
  108. package/dist/lib/error-types.test.js.map +1 -0
  109. package/dist/lib/file-utils.test.js +3 -3
  110. package/dist/lib/file-utils.test.js.map +1 -1
  111. package/dist/lib/indexer.test.js +157 -24
  112. package/dist/lib/indexer.test.js.map +1 -1
  113. package/dist/lib/input-validator.d.ts +17 -0
  114. package/dist/lib/input-validator.d.ts.map +1 -1
  115. package/dist/lib/input-validator.fuzz.test.d.ts +12 -0
  116. package/dist/lib/input-validator.fuzz.test.d.ts.map +1 -0
  117. package/dist/lib/input-validator.fuzz.test.js +290 -0
  118. package/dist/lib/input-validator.fuzz.test.js.map +1 -0
  119. package/dist/lib/input-validator.js +62 -3
  120. package/dist/lib/input-validator.js.map +1 -1
  121. package/dist/lib/input-validator.test.js +129 -1
  122. package/dist/lib/input-validator.test.js.map +1 -1
  123. package/dist/lib/logger.d.ts +46 -0
  124. package/dist/lib/logger.d.ts.map +1 -0
  125. package/dist/lib/logger.js +81 -0
  126. package/dist/lib/logger.js.map +1 -0
  127. package/dist/lib/logger.test.d.ts +2 -0
  128. package/dist/lib/logger.test.d.ts.map +1 -0
  129. package/dist/lib/logger.test.js +122 -0
  130. package/dist/lib/logger.test.js.map +1 -0
  131. package/dist/lib/query-sanitizer.d.ts +51 -3
  132. package/dist/lib/query-sanitizer.d.ts.map +1 -1
  133. package/dist/lib/query-sanitizer.js +105 -31
  134. package/dist/lib/query-sanitizer.js.map +1 -1
  135. package/dist/lib/query-sanitizer.test.js +102 -1
  136. package/dist/lib/query-sanitizer.test.js.map +1 -1
  137. package/dist/lib/server-utils.d.ts +88 -0
  138. package/dist/lib/server-utils.d.ts.map +1 -0
  139. package/dist/lib/server-utils.js +173 -0
  140. package/dist/lib/server-utils.js.map +1 -0
  141. package/dist/lib/shared-schemas.d.ts +81 -0
  142. package/dist/lib/shared-schemas.d.ts.map +1 -0
  143. package/dist/lib/shared-schemas.js +80 -0
  144. package/dist/lib/shared-schemas.js.map +1 -0
  145. package/dist/lib/shared-schemas.test.d.ts +5 -0
  146. package/dist/lib/shared-schemas.test.d.ts.map +1 -0
  147. package/dist/lib/shared-schemas.test.js +106 -0
  148. package/dist/lib/shared-schemas.test.js.map +1 -0
  149. package/dist/lib/toon-encoder.d.ts +26 -0
  150. package/dist/lib/toon-encoder.d.ts.map +1 -0
  151. package/dist/lib/toon-encoder.js +61 -0
  152. package/dist/lib/toon-encoder.js.map +1 -0
  153. package/dist/lib/toon-encoder.test.d.ts +5 -0
  154. package/dist/lib/toon-encoder.test.d.ts.map +1 -0
  155. package/dist/lib/toon-encoder.test.js +85 -0
  156. package/dist/lib/toon-encoder.test.js.map +1 -0
  157. package/dist/server.d.ts +1 -49
  158. package/dist/server.d.ts.map +1 -1
  159. package/dist/server.js +154 -162
  160. package/dist/server.js.map +1 -1
  161. package/dist/server.test.js +198 -7
  162. package/dist/server.test.js.map +1 -1
  163. package/dist/test-helpers/env-utils.d.ts +87 -0
  164. package/dist/test-helpers/env-utils.d.ts.map +1 -0
  165. package/dist/test-helpers/env-utils.js +132 -0
  166. package/dist/test-helpers/env-utils.js.map +1 -0
  167. package/dist/test-helpers/file-utils.d.ts +67 -0
  168. package/dist/test-helpers/file-utils.d.ts.map +1 -1
  169. package/dist/test-helpers/file-utils.js +165 -2
  170. package/dist/test-helpers/file-utils.js.map +1 -1
  171. package/dist/test-helpers/fuzz-generators.d.ts +58 -0
  172. package/dist/test-helpers/fuzz-generators.d.ts.map +1 -0
  173. package/dist/test-helpers/fuzz-generators.js +216 -0
  174. package/dist/test-helpers/fuzz-generators.js.map +1 -0
  175. package/dist/test-helpers/index.d.ts +11 -0
  176. package/dist/test-helpers/index.d.ts.map +1 -0
  177. package/dist/test-helpers/index.js +30 -0
  178. package/dist/test-helpers/index.js.map +1 -0
  179. package/dist/test-helpers/memfs-utils.d.ts +181 -0
  180. package/dist/test-helpers/memfs-utils.d.ts.map +1 -0
  181. package/dist/test-helpers/memfs-utils.js +292 -0
  182. package/dist/test-helpers/memfs-utils.js.map +1 -0
  183. package/dist/test-helpers/memfs-utils.test.d.ts +5 -0
  184. package/dist/test-helpers/memfs-utils.test.d.ts.map +1 -0
  185. package/dist/test-helpers/memfs-utils.test.js +338 -0
  186. package/dist/test-helpers/memfs-utils.test.js.map +1 -0
  187. package/dist/test-helpers/mock-backends.d.ts +113 -2
  188. package/dist/test-helpers/mock-backends.d.ts.map +1 -1
  189. package/dist/test-helpers/mock-backends.js +199 -3
  190. package/dist/test-helpers/mock-backends.js.map +1 -1
  191. package/dist/test-helpers/mock-backends.test.d.ts +5 -0
  192. package/dist/test-helpers/mock-backends.test.d.ts.map +1 -0
  193. package/dist/test-helpers/mock-backends.test.js +368 -0
  194. package/dist/test-helpers/mock-backends.test.js.map +1 -0
  195. package/dist/test-helpers/race-condition-helpers.d.ts +85 -0
  196. package/dist/test-helpers/race-condition-helpers.d.ts.map +1 -0
  197. package/dist/test-helpers/race-condition-helpers.js +279 -0
  198. package/dist/test-helpers/race-condition-helpers.js.map +1 -0
  199. package/dist/test-helpers/schema-validators.d.ts +32 -0
  200. package/dist/test-helpers/schema-validators.d.ts.map +1 -0
  201. package/dist/test-helpers/schema-validators.js +125 -0
  202. package/dist/test-helpers/schema-validators.js.map +1 -0
  203. package/dist/test-helpers/test-data-builders.d.ts +260 -0
  204. package/dist/test-helpers/test-data-builders.d.ts.map +1 -0
  205. package/dist/test-helpers/test-data-builders.js +337 -0
  206. package/dist/test-helpers/test-data-builders.js.map +1 -0
  207. package/dist/test-helpers/test-data-builders.test.d.ts +2 -0
  208. package/dist/test-helpers/test-data-builders.test.d.ts.map +1 -0
  209. package/dist/test-helpers/test-data-builders.test.js +306 -0
  210. package/dist/test-helpers/test-data-builders.test.js.map +1 -0
  211. package/dist/test-helpers/tool-validators.d.ts +28 -0
  212. package/dist/test-helpers/tool-validators.d.ts.map +1 -0
  213. package/dist/test-helpers/tool-validators.js +71 -0
  214. package/dist/test-helpers/tool-validators.js.map +1 -0
  215. package/dist/tools/context-stats.d.ts +1 -0
  216. package/dist/tools/context-stats.d.ts.map +1 -1
  217. package/dist/tools/context-stats.js +9 -5
  218. package/dist/tools/context-stats.js.map +1 -1
  219. package/dist/tools/context-stats.test.js +24 -10
  220. package/dist/tools/context-stats.test.js.map +1 -1
  221. package/dist/tools/get-trace-url.js +2 -2
  222. package/dist/tools/get-trace-url.js.map +1 -1
  223. package/dist/tools/health-check.js +2 -2
  224. package/dist/tools/health-check.js.map +1 -1
  225. package/dist/tools/query-evaluations.d.ts +21 -18
  226. package/dist/tools/query-evaluations.d.ts.map +1 -1
  227. package/dist/tools/query-evaluations.js +33 -19
  228. package/dist/tools/query-evaluations.js.map +1 -1
  229. package/dist/tools/query-evaluations.test.js +60 -63
  230. package/dist/tools/query-evaluations.test.js.map +1 -1
  231. package/dist/tools/query-llm-events.d.ts +19 -15
  232. package/dist/tools/query-llm-events.d.ts.map +1 -1
  233. package/dist/tools/query-llm-events.js +31 -15
  234. package/dist/tools/query-llm-events.js.map +1 -1
  235. package/dist/tools/query-llm-events.test.js +277 -12
  236. package/dist/tools/query-llm-events.test.js.map +1 -1
  237. package/dist/tools/query-logs.d.ts +22 -22
  238. package/dist/tools/query-logs.d.ts.map +1 -1
  239. package/dist/tools/query-logs.js +9 -9
  240. package/dist/tools/query-logs.js.map +1 -1
  241. package/dist/tools/query-logs.test.js +19 -72
  242. package/dist/tools/query-logs.test.js.map +1 -1
  243. package/dist/tools/query-metrics.d.ts +14 -14
  244. package/dist/tools/query-metrics.d.ts.map +1 -1
  245. package/dist/tools/query-metrics.js +9 -9
  246. package/dist/tools/query-metrics.js.map +1 -1
  247. package/dist/tools/query-metrics.test.js +12 -25
  248. package/dist/tools/query-metrics.test.js.map +1 -1
  249. package/dist/tools/query-traces.d.ts +28 -28
  250. package/dist/tools/query-traces.d.ts.map +1 -1
  251. package/dist/tools/query-traces.js +18 -18
  252. package/dist/tools/query-traces.js.map +1 -1
  253. package/dist/tools/query-traces.test.js +58 -54
  254. package/dist/tools/query-traces.test.js.map +1 -1
  255. package/dist/tools/setup-claudeignore.js +7 -7
  256. package/dist/tools/setup-claudeignore.js.map +1 -1
  257. package/dist/tools/setup-claudeignore.test.js +4 -25
  258. package/dist/tools/setup-claudeignore.test.js.map +1 -1
  259. package/package.json +4 -2
@@ -0,0 +1,634 @@
1
+ /**
2
+ * Edge case tests for security and robustness (T2)
3
+ *
4
+ * Covers:
5
+ * - Malformed JSONL with null bytes
6
+ * - Extremely large attribute values
7
+ * - Time range edge cases spanning year boundaries
8
+ * - Integer overflow in timestamp calculations
9
+ */
10
+ import { describe, it, before, after, beforeEach } from 'node:test';
11
+ import assert from 'node:assert';
12
+ import * as fs from 'fs';
13
+ import * as path from 'path';
14
+ import { readJsonlSync, readJsonlSyncWithStats, streamJsonl, filterByDateRange, } from './file-utils.js';
15
+ import { LocalJsonlBackend } from '../backends/local-jsonl.js';
16
+ import { createTempDir, removeTempDir, getSharedTempDir, clearTempDir, removeSharedTempDir, writeJsonlFileAsync, getTestDate } from '../test-helpers/file-utils.js';
17
+ describe('edge-cases: malformed JSONL with null bytes', () => {
18
+ let testDir;
19
+ before(() => {
20
+ testDir = getSharedTempDir('EdgeCasesNullBytes');
21
+ });
22
+ beforeEach(() => {
23
+ clearTempDir(testDir);
24
+ });
25
+ after(() => {
26
+ removeSharedTempDir('EdgeCasesNullBytes');
27
+ });
28
+ describe('readJsonlSync', () => {
29
+ it('should skip lines containing null bytes in JSON string values', () => {
30
+ const filePath = path.join(testDir, 'test.jsonl');
31
+ // Line with null byte in value - null byte in JSON string may cause parse issues
32
+ // depending on how Node handles it. The actual behavior is implementation-specific.
33
+ fs.writeFileSync(filePath, '{"id":1}\n{"id":2,"val":"test\0null"}\n{"id":3}');
34
+ const result = readJsonlSync(filePath);
35
+ // At minimum, we should get lines 1 and 3
36
+ assert.ok(result.length >= 2);
37
+ assert.deepStrictEqual(result[0], { id: 1 });
38
+ });
39
+ it('should skip lines with null byte breaking JSON structure', () => {
40
+ const filePath = path.join(testDir, 'test.jsonl');
41
+ // Null byte as separate character breaking JSON
42
+ fs.writeFileSync(filePath, '{"id":1}\n\0\n{"id":2}');
43
+ const result = readJsonlSync(filePath);
44
+ assert.strictEqual(result.length, 2);
45
+ });
46
+ it('should handle line that is just null bytes', () => {
47
+ const filePath = path.join(testDir, 'test.jsonl');
48
+ fs.writeFileSync(filePath, '{"id":1}\n\0\0\0\n{"id":2}');
49
+ const result = readJsonlSync(filePath);
50
+ assert.strictEqual(result.length, 2);
51
+ });
52
+ it('should handle null byte at start of line', () => {
53
+ const filePath = path.join(testDir, 'test.jsonl');
54
+ fs.writeFileSync(filePath, '{"id":1}\n\0{"id":2}\n{"id":3}');
55
+ const result = readJsonlSync(filePath);
56
+ // First line valid, second line starts with null so JSON.parse may fail or succeed
57
+ // Third line valid
58
+ assert.ok(result.length >= 2);
59
+ assert.deepStrictEqual(result[0], { id: 1 });
60
+ });
61
+ it('should handle null byte at end of line', () => {
62
+ const filePath = path.join(testDir, 'test.jsonl');
63
+ fs.writeFileSync(filePath, '{"id":1}\n{"id":2}\0\n{"id":3}');
64
+ const result = readJsonlSync(filePath);
65
+ // Second line ends with null, JSON.parse may fail
66
+ assert.ok(result.length >= 2);
67
+ });
68
+ it('should handle file that is entirely null bytes', () => {
69
+ const filePath = path.join(testDir, 'test.jsonl');
70
+ fs.writeFileSync(filePath, Buffer.from([0, 0, 0, 0, 0]));
71
+ const result = readJsonlSync(filePath);
72
+ assert.deepStrictEqual(result, []);
73
+ });
74
+ it('should handle mixed null bytes and valid JSON', () => {
75
+ const filePath = path.join(testDir, 'test.jsonl');
76
+ const content = Buffer.concat([
77
+ Buffer.from('{"id":1}\n'),
78
+ Buffer.from([0, 0, 0]),
79
+ Buffer.from('\n{"id":2}\n'),
80
+ Buffer.from([0]),
81
+ Buffer.from('{"broken\n'),
82
+ Buffer.from('{"id":3}'),
83
+ ]);
84
+ fs.writeFileSync(filePath, content);
85
+ const result = readJsonlSync(filePath);
86
+ // Should get at least the valid JSON objects
87
+ assert.ok(result.length >= 2);
88
+ });
89
+ });
90
+ describe('readJsonlSyncWithStats', () => {
91
+ it('should track skipped lines with null bytes', () => {
92
+ const filePath = path.join(testDir, 'test.jsonl');
93
+ fs.writeFileSync(filePath, '{"id":1}\n\0invalid\n{"id":2}');
94
+ const { data, stats } = readJsonlSyncWithStats(filePath);
95
+ assert.strictEqual(data.length, 2);
96
+ assert.strictEqual(stats.skippedLines, 1);
97
+ assert.strictEqual(stats.totalLines, 3);
98
+ });
99
+ it('should count null-byte-only lines as skipped', () => {
100
+ const filePath = path.join(testDir, 'test.jsonl');
101
+ fs.writeFileSync(filePath, '{"id":1}\n\0\0\0\n{"id":2}');
102
+ const { data, stats } = readJsonlSyncWithStats(filePath);
103
+ assert.strictEqual(data.length, 2);
104
+ // Null-byte line is either skipped or counted as empty
105
+ assert.ok(stats.skippedLines >= 0 || stats.emptyLines >= 0);
106
+ });
107
+ });
108
+ describe('streamJsonl', () => {
109
+ it('should skip lines with null bytes when streaming', async () => {
110
+ const filePath = path.join(testDir, 'test.jsonl');
111
+ fs.writeFileSync(filePath, '{"id":1}\n\0broken\n{"id":2}');
112
+ const results = [];
113
+ for await (const item of streamJsonl(filePath)) {
114
+ results.push(item);
115
+ }
116
+ assert.strictEqual(results.length, 2);
117
+ assert.deepStrictEqual(results[0], { id: 1 });
118
+ assert.deepStrictEqual(results[1], { id: 2 });
119
+ });
120
+ it('should handle streaming file with embedded null bytes in values', async () => {
121
+ const filePath = path.join(testDir, 'test.jsonl');
122
+ fs.writeFileSync(filePath, '{"id":1,"data":"before\0after"}\n{"id":2}');
123
+ const results = [];
124
+ for await (const item of streamJsonl(filePath)) {
125
+ results.push(item);
126
+ }
127
+ // Null bytes in JSON strings may or may not parse depending on readline handling
128
+ // The important thing is it doesn't crash and we get at least valid JSON
129
+ assert.ok(results.length >= 1);
130
+ // Verify we at least got the second (clean) object
131
+ const hasId2 = results.some((r) => r.id === 2);
132
+ assert.ok(hasId2 || results.length >= 1);
133
+ });
134
+ });
135
+ describe('LocalJsonlBackend with null bytes', () => {
136
+ it('should skip malformed trace spans with null bytes', async () => {
137
+ const today = getTestDate();
138
+ const backend = new LocalJsonlBackend(testDir);
139
+ // Write file with mix of valid and null-byte-corrupted spans
140
+ const content = [
141
+ JSON.stringify({ traceId: 't1', spanId: 's1', name: 'valid1', startTime: [1700000000, 0] }),
142
+ '\0corrupted',
143
+ JSON.stringify({ traceId: 't2', spanId: 's2', name: 'valid2', startTime: [1700000000, 0] }),
144
+ ].join('\n');
145
+ fs.writeFileSync(path.join(testDir, `traces-${today}.jsonl`), content);
146
+ const results = await backend.queryTraces({});
147
+ assert.strictEqual(results.length, 2);
148
+ });
149
+ it('should skip malformed log records with null bytes', async () => {
150
+ const today = getTestDate();
151
+ const backend = new LocalJsonlBackend(testDir);
152
+ const content = [
153
+ JSON.stringify({ timestamp: '2026-01-01T00:00:00Z', body: 'valid log 1' }),
154
+ '\0\0\0',
155
+ JSON.stringify({ timestamp: '2026-01-01T00:00:01Z', body: 'valid log 2' }),
156
+ ].join('\n');
157
+ fs.writeFileSync(path.join(testDir, `logs-${today}.jsonl`), content);
158
+ const results = await backend.queryLogs({});
159
+ assert.strictEqual(results.length, 2);
160
+ });
161
+ });
162
+ });
163
+ describe('edge-cases: extremely large attribute values', () => {
164
+ let testDir;
165
+ before(() => {
166
+ testDir = getSharedTempDir('EdgeCasesLargeAttrs');
167
+ });
168
+ beforeEach(() => {
169
+ clearTempDir(testDir);
170
+ });
171
+ after(() => {
172
+ removeSharedTempDir('EdgeCasesLargeAttrs');
173
+ });
174
+ describe('attribute size limits', () => {
175
+ it('should handle span with very large attribute value', async () => {
176
+ const today = getTestDate();
177
+ const backend = new LocalJsonlBackend(testDir);
178
+ // Create a span with a 100KB attribute value
179
+ const largeValue = 'x'.repeat(100 * 1024);
180
+ const span = {
181
+ traceId: 't1',
182
+ spanId: 's1',
183
+ name: 'large-attr-span',
184
+ startTime: [1700000000, 0],
185
+ attributes: { 'large.attr': largeValue },
186
+ };
187
+ await writeJsonlFileAsync(path.join(testDir, `traces-${today}.jsonl`), [span]);
188
+ const results = await backend.queryTraces({});
189
+ assert.strictEqual(results.length, 1);
190
+ assert.strictEqual(results[0].attributes?.['large.attr'], largeValue);
191
+ });
192
+ it('should handle span with many attributes', async () => {
193
+ const today = getTestDate();
194
+ const backend = new LocalJsonlBackend(testDir);
195
+ // Create a span with 1000 attributes
196
+ const manyAttrs = {};
197
+ for (let i = 0; i < 1000; i++) {
198
+ manyAttrs[`attr.${i}`] = `value-${i}`;
199
+ }
200
+ const span = {
201
+ traceId: 't1',
202
+ spanId: 's1',
203
+ name: 'many-attrs-span',
204
+ startTime: [1700000000, 0],
205
+ attributes: manyAttrs,
206
+ };
207
+ await writeJsonlFileAsync(path.join(testDir, `traces-${today}.jsonl`), [span]);
208
+ const results = await backend.queryTraces({});
209
+ assert.strictEqual(results.length, 1);
210
+ // Attributes should include all 1000 custom attributes
211
+ // (resource attributes like service.name only added if resource is provided)
212
+ assert.ok(Object.keys(results[0].attributes || {}).length >= 1000);
213
+ });
214
+ it('should handle deeply nested attribute objects', async () => {
215
+ const today = getTestDate();
216
+ const backend = new LocalJsonlBackend(testDir);
217
+ // Create deeply nested object (50 levels)
218
+ let nested = { value: 'deep' };
219
+ for (let i = 0; i < 50; i++) {
220
+ nested = { level: i, child: nested };
221
+ }
222
+ const span = {
223
+ traceId: 't1',
224
+ spanId: 's1',
225
+ name: 'nested-span',
226
+ startTime: [1700000000, 0],
227
+ attributes: { nested },
228
+ };
229
+ await writeJsonlFileAsync(path.join(testDir, `traces-${today}.jsonl`), [span]);
230
+ const results = await backend.queryTraces({});
231
+ assert.strictEqual(results.length, 1);
232
+ assert.ok(results[0].attributes?.nested);
233
+ });
234
+ it('should handle log with very large body', async () => {
235
+ const today = getTestDate();
236
+ const backend = new LocalJsonlBackend(testDir);
237
+ const largeBody = 'Log message: ' + 'x'.repeat(50 * 1024);
238
+ const log = {
239
+ timestamp: '2026-01-01T00:00:00Z',
240
+ body: largeBody,
241
+ };
242
+ await writeJsonlFileAsync(path.join(testDir, `logs-${today}.jsonl`), [log]);
243
+ const results = await backend.queryLogs({});
244
+ assert.strictEqual(results.length, 1);
245
+ assert.strictEqual(results[0].body, largeBody);
246
+ });
247
+ it('should handle metric with large number of exemplars', async () => {
248
+ const today = getTestDate();
249
+ const backend = new LocalJsonlBackend(testDir);
250
+ // Create metric with 100 exemplars
251
+ const exemplars = Array.from({ length: 100 }, (_, i) => ({
252
+ traceId: `trace-${i}`,
253
+ spanId: `span-${i}`,
254
+ value: i * 1.5,
255
+ timestamp: `2026-01-01T00:00:${String(i % 60).padStart(2, '0')}Z`,
256
+ }));
257
+ const metric = {
258
+ timestamp: '2026-01-01T00:00:00Z',
259
+ name: 'test.metric',
260
+ value: 100,
261
+ type: 'histogram',
262
+ exemplars,
263
+ };
264
+ await writeJsonlFileAsync(path.join(testDir, `metrics-${today}.jsonl`), [metric]);
265
+ const results = await backend.queryMetrics({});
266
+ assert.ok(results.length >= 1);
267
+ });
268
+ it('should handle attribute key with special characters', async () => {
269
+ const today = getTestDate();
270
+ const backend = new LocalJsonlBackend(testDir);
271
+ const span = {
272
+ traceId: 't1',
273
+ spanId: 's1',
274
+ name: 'special-key-span',
275
+ startTime: [1700000000, 0],
276
+ attributes: {
277
+ 'key.with.dots': 'value1',
278
+ 'key-with-dashes': 'value2',
279
+ 'key_with_underscores': 'value3',
280
+ 'key:with:colons': 'value4',
281
+ 'key/with/slashes': 'value5',
282
+ },
283
+ };
284
+ await writeJsonlFileAsync(path.join(testDir, `traces-${today}.jsonl`), [span]);
285
+ const results = await backend.queryTraces({});
286
+ assert.strictEqual(results.length, 1);
287
+ assert.strictEqual(results[0].attributes?.['key.with.dots'], 'value1');
288
+ assert.strictEqual(results[0].attributes?.['key-with-dashes'], 'value2');
289
+ });
290
+ it('should handle unicode attribute values', async () => {
291
+ const today = getTestDate();
292
+ const backend = new LocalJsonlBackend(testDir);
293
+ const span = {
294
+ traceId: 't1',
295
+ spanId: 's1',
296
+ name: 'unicode-span',
297
+ startTime: [1700000000, 0],
298
+ attributes: {
299
+ 'unicode.emoji': '🔥🎉🚀',
300
+ 'unicode.chinese': '中文测试',
301
+ 'unicode.arabic': 'اختبار',
302
+ 'unicode.mixed': 'Test 测试 🎉',
303
+ },
304
+ };
305
+ await writeJsonlFileAsync(path.join(testDir, `traces-${today}.jsonl`), [span]);
306
+ const results = await backend.queryTraces({});
307
+ assert.strictEqual(results.length, 1);
308
+ assert.strictEqual(results[0].attributes?.['unicode.emoji'], '🔥🎉🚀');
309
+ assert.strictEqual(results[0].attributes?.['unicode.chinese'], '中文测试');
310
+ });
311
+ });
312
+ });
313
+ describe('edge-cases: time range spanning year boundaries', () => {
314
+ describe('filterByDateRange with year boundaries', () => {
315
+ const yearBoundaryItems = [
316
+ { id: 1, timestamp: '2025-12-30' },
317
+ { id: 2, timestamp: '2025-12-31' },
318
+ { id: 3, timestamp: '2026-01-01' },
319
+ { id: 4, timestamp: '2026-01-02' },
320
+ ];
321
+ it('should filter items spanning year boundary', () => {
322
+ const result = filterByDateRange(yearBoundaryItems, '2025-12-31', '2026-01-01');
323
+ assert.strictEqual(result.length, 2);
324
+ assert.deepStrictEqual(result[0], { id: 2, timestamp: '2025-12-31' });
325
+ assert.deepStrictEqual(result[1], { id: 3, timestamp: '2026-01-01' });
326
+ });
327
+ it('should filter items starting before year boundary', () => {
328
+ const result = filterByDateRange(yearBoundaryItems, '2025-12-29', '2025-12-31');
329
+ assert.strictEqual(result.length, 2);
330
+ assert.deepStrictEqual(result[0], { id: 1, timestamp: '2025-12-30' });
331
+ assert.deepStrictEqual(result[1], { id: 2, timestamp: '2025-12-31' });
332
+ });
333
+ it('should filter items ending after year boundary', () => {
334
+ const result = filterByDateRange(yearBoundaryItems, '2026-01-01', '2026-01-03');
335
+ assert.strictEqual(result.length, 2);
336
+ assert.deepStrictEqual(result[0], { id: 3, timestamp: '2026-01-01' });
337
+ assert.deepStrictEqual(result[1], { id: 4, timestamp: '2026-01-02' });
338
+ });
339
+ it('should handle multi-year span', () => {
340
+ const multiYearItems = [
341
+ { id: 1, timestamp: '2024-06-15' },
342
+ { id: 2, timestamp: '2025-06-15' },
343
+ { id: 3, timestamp: '2026-06-15' },
344
+ { id: 4, timestamp: '2027-06-15' },
345
+ ];
346
+ const result = filterByDateRange(multiYearItems, '2025-01-01', '2026-12-31');
347
+ assert.strictEqual(result.length, 2);
348
+ assert.strictEqual(result[0].id, 2);
349
+ assert.strictEqual(result[1].id, 3);
350
+ });
351
+ it('should handle leap year boundary (Feb 28/29)', () => {
352
+ const leapYearItems = [
353
+ { id: 1, timestamp: '2024-02-28' },
354
+ { id: 2, timestamp: '2024-02-29' }, // Leap year
355
+ { id: 3, timestamp: '2024-03-01' },
356
+ ];
357
+ const result = filterByDateRange(leapYearItems, '2024-02-28', '2024-03-01');
358
+ assert.strictEqual(result.length, 3);
359
+ });
360
+ it('should handle non-leap year boundary', () => {
361
+ const nonLeapYearItems = [
362
+ { id: 1, timestamp: '2025-02-28' },
363
+ { id: 2, timestamp: '2025-03-01' }, // No Feb 29 in 2025
364
+ ];
365
+ const result = filterByDateRange(nonLeapYearItems, '2025-02-28', '2025-03-01');
366
+ assert.strictEqual(result.length, 2);
367
+ });
368
+ it('should handle decade boundary', () => {
369
+ const decadeItems = [
370
+ { id: 1, timestamp: '2029-12-31' },
371
+ { id: 2, timestamp: '2030-01-01' },
372
+ ];
373
+ const result = filterByDateRange(decadeItems, '2029-12-30', '2030-01-02');
374
+ assert.strictEqual(result.length, 2);
375
+ });
376
+ it('should handle century boundary (if applicable)', () => {
377
+ const centuryItems = [
378
+ { id: 1, timestamp: '2099-12-31' },
379
+ { id: 2, timestamp: '2100-01-01' },
380
+ ];
381
+ const result = filterByDateRange(centuryItems, '2099-12-31', '2100-01-01');
382
+ assert.strictEqual(result.length, 2);
383
+ });
384
+ });
385
+ describe('LocalJsonlBackend with year boundary date ranges', () => {
386
+ let testDir;
387
+ let backend;
388
+ before(() => {
389
+ testDir = getSharedTempDir('EdgeCasesYearBoundary');
390
+ });
391
+ beforeEach(() => {
392
+ clearTempDir(testDir);
393
+ backend = new LocalJsonlBackend(testDir);
394
+ });
395
+ after(() => {
396
+ removeSharedTempDir('EdgeCasesYearBoundary');
397
+ });
398
+ it('should query traces spanning year boundary files', async () => {
399
+ // Create files for end of 2025 and start of 2026
400
+ const spans2025 = [
401
+ { traceId: 't1', spanId: 's1', name: 'span-2025', startTime: [1735603200, 0] }, // 2025-12-31
402
+ ];
403
+ const spans2026 = [
404
+ { traceId: 't2', spanId: 's2', name: 'span-2026', startTime: [1735689600, 0] }, // 2026-01-01
405
+ ];
406
+ await writeJsonlFileAsync(path.join(testDir, 'traces-2025-12-31.jsonl'), spans2025);
407
+ await writeJsonlFileAsync(path.join(testDir, 'traces-2026-01-01.jsonl'), spans2026);
408
+ const results = await backend.queryTraces({
409
+ startDate: '2025-12-31',
410
+ endDate: '2026-01-01',
411
+ });
412
+ assert.strictEqual(results.length, 2);
413
+ });
414
+ it('should query logs spanning year boundary', async () => {
415
+ const logs2025 = [
416
+ { timestamp: '2025-12-31T23:59:59Z', body: 'Last log of 2025' },
417
+ ];
418
+ const logs2026 = [
419
+ { timestamp: '2026-01-01T00:00:00Z', body: 'First log of 2026' },
420
+ ];
421
+ await writeJsonlFileAsync(path.join(testDir, 'logs-2025-12-31.jsonl'), logs2025);
422
+ await writeJsonlFileAsync(path.join(testDir, 'logs-2026-01-01.jsonl'), logs2026);
423
+ const results = await backend.queryLogs({
424
+ startDate: '2025-12-31',
425
+ endDate: '2026-01-01',
426
+ });
427
+ assert.strictEqual(results.length, 2);
428
+ });
429
+ });
430
+ });
431
+ describe('edge-cases: integer overflow in timestamp calculations', () => {
432
+ describe('timestamp boundary values', () => {
433
+ it('should handle timestamp at Unix epoch', () => {
434
+ const items = [
435
+ { id: 1, timestamp: '1970-01-01T00:00:00Z' },
436
+ { id: 2, timestamp: '1970-01-01T00:00:01Z' },
437
+ ];
438
+ const result = filterByDateRange(items, '1970-01-01', '1970-01-02');
439
+ assert.strictEqual(result.length, 2);
440
+ });
441
+ it('should handle very large year (2100)', () => {
442
+ const items = [
443
+ { id: 1, timestamp: '2100-06-15' }, // Use date without time to avoid lexicographic issues
444
+ ];
445
+ const result = filterByDateRange(items, '2100-01-01', '2100-12-31');
446
+ assert.strictEqual(result.length, 1);
447
+ });
448
+ it('should handle nanosecond precision timestamps', async () => {
449
+ const testDir = createTempDir('edge-case-nanosecond-');
450
+ try {
451
+ const backend = new LocalJsonlBackend(testDir);
452
+ const today = getTestDate();
453
+ // Test max nanosecond value (999999999)
454
+ const spans = [
455
+ {
456
+ traceId: 't1',
457
+ spanId: 's1',
458
+ name: 'max-nanos',
459
+ startTime: [1700000000, 999999999],
460
+ endTime: [1700000001, 0],
461
+ },
462
+ {
463
+ traceId: 't2',
464
+ spanId: 's2',
465
+ name: 'zero-nanos',
466
+ startTime: [1700000000, 0],
467
+ endTime: [1700000000, 500000000],
468
+ },
469
+ ];
470
+ await writeJsonlFileAsync(path.join(testDir, `traces-${today}.jsonl`), spans);
471
+ const results = await backend.queryTraces({});
472
+ assert.strictEqual(results.length, 2);
473
+ // Verify duration calculations don't overflow
474
+ assert.ok(results[0].durationMs !== undefined);
475
+ assert.ok(results[1].durationMs !== undefined);
476
+ }
477
+ finally {
478
+ removeTempDir(testDir);
479
+ }
480
+ });
481
+ it('should handle duration with max seconds value', async () => {
482
+ const testDir = createTempDir('edge-case-max-duration-');
483
+ try {
484
+ const backend = new LocalJsonlBackend(testDir);
485
+ const today = getTestDate();
486
+ // Duration of ~24 hours in nanoseconds format
487
+ const spans = [
488
+ {
489
+ traceId: 't1',
490
+ spanId: 's1',
491
+ name: 'long-duration',
492
+ startTime: [1700000000, 0],
493
+ duration: [86400, 0], // 24 hours
494
+ },
495
+ ];
496
+ await writeJsonlFileAsync(path.join(testDir, `traces-${today}.jsonl`), spans);
497
+ const results = await backend.queryTraces({});
498
+ assert.strictEqual(results.length, 1);
499
+ assert.strictEqual(results[0].durationMs, 86400000); // 24 hours in ms
500
+ }
501
+ finally {
502
+ removeTempDir(testDir);
503
+ }
504
+ });
505
+ it('should handle negative duration gracefully', async () => {
506
+ const testDir = createTempDir('edge-case-negative-duration-');
507
+ try {
508
+ const backend = new LocalJsonlBackend(testDir);
509
+ const today = getTestDate();
510
+ // Negative duration (invalid but should not crash)
511
+ const spans = [
512
+ {
513
+ traceId: 't1',
514
+ spanId: 's1',
515
+ name: 'negative-duration',
516
+ startTime: [1700000000, 0],
517
+ endTime: [1699999999, 0], // End before start
518
+ },
519
+ ];
520
+ await writeJsonlFileAsync(path.join(testDir, `traces-${today}.jsonl`), spans);
521
+ const results = await backend.queryTraces({});
522
+ // Should still return the span (validation is caller's responsibility)
523
+ assert.strictEqual(results.length, 1);
524
+ }
525
+ finally {
526
+ removeTempDir(testDir);
527
+ }
528
+ });
529
+ it('should handle duration filtering with extreme values', async () => {
530
+ const testDir = createTempDir('edge-case-extreme-duration-');
531
+ try {
532
+ const backend = new LocalJsonlBackend(testDir);
533
+ const today = getTestDate();
534
+ const spans = [
535
+ {
536
+ traceId: 't1',
537
+ spanId: 's1',
538
+ name: 'microsecond',
539
+ startTime: [1700000000, 0],
540
+ duration: [0, 1000], // 1 microsecond
541
+ },
542
+ {
543
+ traceId: 't2',
544
+ spanId: 's2',
545
+ name: 'hour',
546
+ startTime: [1700000000, 0],
547
+ duration: [3600, 0], // 1 hour
548
+ },
549
+ ];
550
+ await writeJsonlFileAsync(path.join(testDir, `traces-${today}.jsonl`), spans);
551
+ // Filter for durations > 1ms and < 1 hour
552
+ const results = await backend.queryTraces({
553
+ minDurationMs: 1,
554
+ maxDurationMs: 3600000,
555
+ });
556
+ // Only the 1-hour span should match (microsecond < 1ms)
557
+ assert.strictEqual(results.length, 1);
558
+ assert.strictEqual(results[0].name, 'hour');
559
+ }
560
+ finally {
561
+ removeTempDir(testDir);
562
+ }
563
+ });
564
+ it('should handle Number.MAX_SAFE_INTEGER in calculations', () => {
565
+ // This tests that timestamp math doesn't lose precision
566
+ const maxSafe = Number.MAX_SAFE_INTEGER;
567
+ const timestampMs = maxSafe - 1000;
568
+ // Creating a date from this should not lose precision for reasonable dates
569
+ const date = new Date(1700000000000); // Reasonable timestamp
570
+ assert.ok(date.getTime() === 1700000000000);
571
+ });
572
+ it('should handle zero duration correctly', async () => {
573
+ const testDir = createTempDir('edge-case-zero-duration-');
574
+ try {
575
+ const backend = new LocalJsonlBackend(testDir);
576
+ const today = getTestDate();
577
+ const spans = [
578
+ {
579
+ traceId: 't1',
580
+ spanId: 's1',
581
+ name: 'zero-duration',
582
+ startTime: [1700000000, 0],
583
+ duration: [0, 0],
584
+ },
585
+ {
586
+ traceId: 't2',
587
+ spanId: 's2',
588
+ name: 'nonzero-duration',
589
+ startTime: [1700000000, 0],
590
+ duration: [1, 0],
591
+ },
592
+ ];
593
+ await writeJsonlFileAsync(path.join(testDir, `traces-${today}.jsonl`), spans);
594
+ const results = await backend.queryTraces({ minDurationMs: 0 });
595
+ // Both should match (0 >= 0)
596
+ assert.strictEqual(results.length, 2);
597
+ }
598
+ finally {
599
+ removeTempDir(testDir);
600
+ }
601
+ });
602
+ it('should handle timestamp comparison near millisecond boundaries', () => {
603
+ // Test that string comparison works correctly for timestamps
604
+ const items = [
605
+ { id: 1, timestamp: '2026-01-15T10:00:00.000Z' },
606
+ { id: 2, timestamp: '2026-01-15T10:00:00.001Z' },
607
+ { id: 3, timestamp: '2026-01-15T10:00:00.999Z' },
608
+ ];
609
+ const result = filterByDateRange(items, '2026-01-15T10:00:00.000Z', '2026-01-15T10:00:00.500Z');
610
+ assert.strictEqual(result.length, 2);
611
+ assert.strictEqual(result[0].id, 1);
612
+ assert.strictEqual(result[1].id, 2);
613
+ });
614
+ });
615
+ describe('hours calculation edge cases', () => {
616
+ it('should not overflow when calculating milliseconds for large hour values', () => {
617
+ // Test the pattern used in SigNoz: hours * 60 * 60 * 1000
618
+ const hoursValues = [1, 24, 168, 720, 8760]; // 1h, 1d, 1w, 1m, 1y
619
+ for (const hours of hoursValues) {
620
+ const ms = hours * 60 * 60 * 1000;
621
+ assert.ok(Number.isSafeInteger(ms), `${hours} hours should produce safe integer: ${ms}`);
622
+ assert.ok(ms > 0, `${hours} hours should produce positive value: ${ms}`);
623
+ }
624
+ });
625
+ it('should handle max reasonable hours without overflow', () => {
626
+ // 10 years in hours
627
+ const maxReasonableHours = 10 * 365 * 24;
628
+ const ms = maxReasonableHours * 60 * 60 * 1000;
629
+ assert.ok(Number.isSafeInteger(ms));
630
+ assert.ok(ms < Number.MAX_SAFE_INTEGER);
631
+ });
632
+ });
633
+ });
634
+ //# sourceMappingURL=edge-cases.test.js.map