observability-toolkit 2.0.0 → 2.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +166 -398
- package/dist/__tests__/find-constant-dedup.test.d.ts +11 -0
- package/dist/__tests__/find-constant-dedup.test.d.ts.map +1 -0
- package/dist/__tests__/find-constant-dedup.test.js +132 -0
- package/dist/__tests__/find-constant-dedup.test.js.map +1 -0
- package/dist/backends/backend-schemas.d.ts +309 -0
- package/dist/backends/backend-schemas.d.ts.map +1 -0
- package/dist/backends/backend-schemas.js +215 -0
- package/dist/backends/backend-schemas.js.map +1 -0
- package/dist/backends/cloud.d.ts +46 -0
- package/dist/backends/cloud.d.ts.map +1 -0
- package/dist/backends/cloud.js +520 -0
- package/dist/backends/cloud.js.map +1 -0
- package/dist/backends/cloud.test.d.ts +2 -0
- package/dist/backends/cloud.test.d.ts.map +1 -0
- package/dist/backends/cloud.test.js +436 -0
- package/dist/backends/cloud.test.js.map +1 -0
- package/dist/backends/index.d.ts +659 -386
- package/dist/backends/index.d.ts.map +1 -1
- package/dist/backends/index.js +318 -41
- package/dist/backends/index.js.map +1 -1
- package/dist/backends/index.test.js +578 -57
- package/dist/backends/index.test.js.map +1 -1
- package/dist/backends/local-jsonl-boolean-search.test.js +8 -7
- package/dist/backends/local-jsonl-boolean-search.test.js.map +1 -1
- package/dist/backends/local-jsonl-cache.test.js +33 -31
- package/dist/backends/local-jsonl-cache.test.js.map +1 -1
- package/dist/backends/local-jsonl-circuit-breaker.test.js +9 -7
- package/dist/backends/local-jsonl-circuit-breaker.test.js.map +1 -1
- package/dist/backends/local-jsonl-export.test.js +73 -58
- package/dist/backends/local-jsonl-export.test.js.map +1 -1
- package/dist/backends/local-jsonl-index.test.js +52 -50
- package/dist/backends/local-jsonl-index.test.js.map +1 -1
- package/dist/backends/local-jsonl-logs.test.js +47 -31
- package/dist/backends/local-jsonl-logs.test.js.map +1 -1
- package/dist/backends/local-jsonl-metrics.test.js +85 -82
- package/dist/backends/local-jsonl-metrics.test.js.map +1 -1
- package/dist/backends/local-jsonl-otlp-unwrap.test.d.ts +2 -0
- package/dist/backends/local-jsonl-otlp-unwrap.test.d.ts.map +1 -0
- package/dist/backends/local-jsonl-otlp-unwrap.test.js +602 -0
- package/dist/backends/local-jsonl-otlp-unwrap.test.js.map +1 -0
- package/dist/backends/local-jsonl-traces.test.js +161 -147
- package/dist/backends/local-jsonl-traces.test.js.map +1 -1
- package/dist/backends/local-jsonl.d.ts +37 -8
- package/dist/backends/local-jsonl.d.ts.map +1 -1
- package/dist/backends/local-jsonl.js +1088 -241
- package/dist/backends/local-jsonl.js.map +1 -1
- package/dist/backends/shared.d.ts +9 -0
- package/dist/backends/shared.d.ts.map +1 -0
- package/dist/backends/shared.js +9 -0
- package/dist/backends/shared.js.map +1 -0
- package/dist/generated/opentelemetry/proto/collector/logs/v1/logs_service_pb.d.ts +40 -0
- package/dist/generated/opentelemetry/proto/collector/logs/v1/logs_service_pb.d.ts.map +1 -0
- package/dist/generated/opentelemetry/proto/collector/logs/v1/logs_service_pb.js +27 -0
- package/dist/generated/opentelemetry/proto/collector/logs/v1/logs_service_pb.js.map +1 -0
- package/dist/generated/opentelemetry/proto/collector/metrics/v1/metrics_service_pb.d.ts +106 -0
- package/dist/generated/opentelemetry/proto/collector/metrics/v1/metrics_service_pb.d.ts.map +1 -0
- package/dist/generated/opentelemetry/proto/collector/metrics/v1/metrics_service_pb.js +43 -0
- package/dist/generated/opentelemetry/proto/collector/metrics/v1/metrics_service_pb.js.map +1 -0
- package/dist/generated/opentelemetry/proto/collector/profiles/v1development/profiles_service_pb.d.ts +111 -0
- package/dist/generated/opentelemetry/proto/collector/profiles/v1development/profiles_service_pb.d.ts.map +1 -0
- package/dist/generated/opentelemetry/proto/collector/profiles/v1development/profiles_service_pb.js +42 -0
- package/dist/generated/opentelemetry/proto/collector/profiles/v1development/profiles_service_pb.js.map +1 -0
- package/dist/generated/opentelemetry/proto/collector/trace/v1/trace_service_pb.d.ts +106 -0
- package/dist/generated/opentelemetry/proto/collector/trace/v1/trace_service_pb.d.ts.map +1 -0
- package/dist/generated/opentelemetry/proto/collector/trace/v1/trace_service_pb.js +43 -0
- package/dist/generated/opentelemetry/proto/collector/trace/v1/trace_service_pb.js.map +1 -0
- package/dist/generated/opentelemetry/proto/common/v1/common_pb.d.ts +243 -0
- package/dist/generated/opentelemetry/proto/common/v1/common_pb.d.ts.map +1 -0
- package/dist/generated/opentelemetry/proto/common/v1/common_pb.js +49 -0
- package/dist/generated/opentelemetry/proto/common/v1/common_pb.js.map +1 -0
- package/dist/generated/opentelemetry/proto/logs/v1/logs_pb.d.ts +90 -0
- package/dist/generated/opentelemetry/proto/logs/v1/logs_pb.d.ts.map +1 -0
- package/dist/generated/opentelemetry/proto/logs/v1/logs_pb.js +66 -0
- package/dist/generated/opentelemetry/proto/logs/v1/logs_pb.js.map +1 -0
- package/dist/generated/opentelemetry/proto/metrics/v1/metrics_pb.d.ts +1134 -0
- package/dist/generated/opentelemetry/proto/metrics/v1/metrics_pb.d.ts.map +1 -0
- package/dist/generated/opentelemetry/proto/metrics/v1/metrics_pb.js +223 -0
- package/dist/generated/opentelemetry/proto/metrics/v1/metrics_pb.js.map +1 -0
- package/dist/generated/opentelemetry/proto/profiles/v1development/profiles_pb.d.ts +678 -0
- package/dist/generated/opentelemetry/proto/profiles/v1development/profiles_pb.d.ts.map +1 -0
- package/dist/generated/opentelemetry/proto/profiles/v1development/profiles_pb.js +107 -0
- package/dist/generated/opentelemetry/proto/profiles/v1development/profiles_pb.js.map +1 -0
- package/dist/generated/opentelemetry/proto/resource/v1/resource_pb.d.ts +46 -0
- package/dist/generated/opentelemetry/proto/resource/v1/resource_pb.d.ts.map +1 -0
- package/dist/generated/opentelemetry/proto/resource/v1/resource_pb.js +25 -0
- package/dist/generated/opentelemetry/proto/resource/v1/resource_pb.js.map +1 -0
- package/dist/generated/opentelemetry/proto/trace/v1/trace_pb.d.ts +569 -0
- package/dist/generated/opentelemetry/proto/trace/v1/trace_pb.d.ts.map +1 -0
- package/dist/generated/opentelemetry/proto/trace/v1/trace_pb.js +195 -0
- package/dist/generated/opentelemetry/proto/trace/v1/trace_pb.js.map +1 -0
- package/dist/lib/agent-judge/agent-as-judge.d.ts +157 -0
- package/dist/lib/agent-judge/agent-as-judge.d.ts.map +1 -0
- package/dist/lib/agent-judge/agent-as-judge.js +137 -0
- package/dist/lib/agent-judge/agent-as-judge.js.map +1 -0
- package/dist/lib/agent-judge/agent-as-judge.test.d.ts.map +1 -0
- package/dist/lib/agent-judge/agent-as-judge.test.js +839 -0
- package/dist/lib/agent-judge/agent-as-judge.test.js.map +1 -0
- package/dist/lib/agent-judge/agent-eval-metrics.d.ts +293 -0
- package/dist/lib/agent-judge/agent-eval-metrics.d.ts.map +1 -0
- package/dist/lib/agent-judge/agent-eval-metrics.js +715 -0
- package/dist/lib/agent-judge/agent-eval-metrics.js.map +1 -0
- package/dist/lib/agent-judge/agent-eval-metrics.test.d.ts +5 -0
- package/dist/lib/agent-judge/agent-eval-metrics.test.d.ts.map +1 -0
- package/dist/lib/agent-judge/agent-eval-metrics.test.js +676 -0
- package/dist/lib/agent-judge/agent-eval-metrics.test.js.map +1 -0
- package/dist/lib/agent-judge/agent-judge-classes.d.ts +95 -0
- package/dist/lib/agent-judge/agent-judge-classes.d.ts.map +1 -0
- package/dist/lib/agent-judge/agent-judge-classes.js +222 -0
- package/dist/lib/agent-judge/agent-judge-classes.js.map +1 -0
- package/dist/lib/agent-judge/agent-judge-classes.test.d.ts +6 -0
- package/dist/lib/agent-judge/agent-judge-classes.test.d.ts.map +1 -0
- package/dist/lib/agent-judge/agent-judge-classes.test.js +271 -0
- package/dist/lib/agent-judge/agent-judge-classes.test.js.map +1 -0
- package/dist/lib/agent-judge/agent-judge-consensus.d.ts +58 -0
- package/dist/lib/agent-judge/agent-judge-consensus.d.ts.map +1 -0
- package/dist/lib/agent-judge/agent-judge-consensus.js +149 -0
- package/dist/lib/agent-judge/agent-judge-consensus.js.map +1 -0
- package/dist/lib/agent-judge/agent-judge-consensus.test.d.ts +2 -0
- package/dist/lib/agent-judge/agent-judge-consensus.test.d.ts.map +1 -0
- package/dist/lib/agent-judge/agent-judge-consensus.test.js +170 -0
- package/dist/lib/agent-judge/agent-judge-consensus.test.js.map +1 -0
- package/dist/lib/agent-judge/agent-judge-verification.d.ts +89 -0
- package/dist/lib/agent-judge/agent-judge-verification.d.ts.map +1 -0
- package/dist/lib/agent-judge/agent-judge-verification.js +235 -0
- package/dist/lib/agent-judge/agent-judge-verification.js.map +1 -0
- package/dist/lib/agent-judge/agent-judge-verification.test.d.ts +5 -0
- package/dist/lib/agent-judge/agent-judge-verification.test.d.ts.map +1 -0
- package/dist/lib/agent-judge/agent-judge-verification.test.js +399 -0
- package/dist/lib/agent-judge/agent-judge-verification.test.js.map +1 -0
- package/dist/lib/audit/agent-auditor-scoring.d.ts +167 -0
- package/dist/lib/audit/agent-auditor-scoring.d.ts.map +1 -0
- package/dist/lib/audit/agent-auditor-scoring.js +338 -0
- package/dist/lib/audit/agent-auditor-scoring.js.map +1 -0
- package/dist/lib/audit/agent-auditor-scoring.test.d.ts +2 -0
- package/dist/lib/audit/agent-auditor-scoring.test.d.ts.map +1 -0
- package/dist/lib/audit/agent-auditor-scoring.test.js +576 -0
- package/dist/lib/audit/agent-auditor-scoring.test.js.map +1 -0
- package/dist/lib/audit/audit-record.d.ts +139 -0
- package/dist/lib/audit/audit-record.d.ts.map +1 -0
- package/dist/lib/audit/audit-record.js +288 -0
- package/dist/lib/audit/audit-record.js.map +1 -0
- package/dist/lib/audit/audit-record.test.d.ts +5 -0
- package/dist/lib/audit/audit-record.test.d.ts.map +1 -0
- package/dist/lib/audit/audit-record.test.js +258 -0
- package/dist/lib/audit/audit-record.test.js.map +1 -0
- package/dist/lib/audit/audit-scoring-constants.d.ts +57 -0
- package/dist/lib/audit/audit-scoring-constants.d.ts.map +1 -0
- package/dist/lib/audit/audit-scoring-constants.js +59 -0
- package/dist/lib/audit/audit-scoring-constants.js.map +1 -0
- package/dist/lib/audit/compliance-report.d.ts +125 -0
- package/dist/lib/audit/compliance-report.d.ts.map +1 -0
- package/dist/lib/audit/compliance-report.js +205 -0
- package/dist/lib/audit/compliance-report.js.map +1 -0
- package/dist/lib/audit/compliance-report.test.d.ts +5 -0
- package/dist/lib/audit/compliance-report.test.d.ts.map +1 -0
- package/dist/lib/audit/compliance-report.test.js +290 -0
- package/dist/lib/audit/compliance-report.test.js.map +1 -0
- package/dist/lib/audit/retention-guard.d.ts +41 -0
- package/dist/lib/audit/retention-guard.d.ts.map +1 -0
- package/dist/lib/audit/retention-guard.js +103 -0
- package/dist/lib/audit/retention-guard.js.map +1 -0
- package/dist/lib/audit/retention-guard.test.d.ts +5 -0
- package/dist/lib/audit/retention-guard.test.d.ts.map +1 -0
- package/dist/lib/audit/retention-guard.test.js +109 -0
- package/dist/lib/audit/retention-guard.test.js.map +1 -0
- package/dist/lib/audit/skill-auditor-scoring.d.ts +69 -0
- package/dist/lib/audit/skill-auditor-scoring.d.ts.map +1 -0
- package/dist/lib/audit/skill-auditor-scoring.js +149 -0
- package/dist/lib/audit/skill-auditor-scoring.js.map +1 -0
- package/dist/lib/audit/skill-auditor-scoring.test.d.ts +2 -0
- package/dist/lib/audit/skill-auditor-scoring.test.d.ts.map +1 -0
- package/dist/lib/audit/skill-auditor-scoring.test.js +369 -0
- package/dist/lib/audit/skill-auditor-scoring.test.js.map +1 -0
- package/dist/lib/audit/verification-events.d.ts +119 -0
- package/dist/lib/audit/verification-events.d.ts.map +1 -0
- package/dist/lib/audit/verification-events.js +175 -0
- package/dist/lib/audit/verification-events.js.map +1 -0
- package/dist/lib/audit/verification-events.test.d.ts.map +1 -0
- package/dist/lib/audit/verification-events.test.js +197 -0
- package/dist/lib/audit/verification-events.test.js.map +1 -0
- package/dist/lib/core/constants-models.d.ts +90 -0
- package/dist/lib/core/constants-models.d.ts.map +1 -0
- package/dist/lib/core/constants-models.js +208 -0
- package/dist/lib/core/constants-models.js.map +1 -0
- package/dist/lib/core/constants-otel.d.ts +68 -0
- package/dist/lib/core/constants-otel.d.ts.map +1 -0
- package/dist/lib/core/constants-otel.js +128 -0
- package/dist/lib/core/constants-otel.js.map +1 -0
- package/dist/lib/core/constants-symlink.test.d.ts.map +1 -0
- package/dist/lib/core/constants-symlink.test.js +358 -0
- package/dist/lib/core/constants-symlink.test.js.map +1 -0
- package/dist/lib/core/constants-telemetry.d.ts +21 -0
- package/dist/lib/core/constants-telemetry.d.ts.map +1 -0
- package/dist/lib/core/constants-telemetry.js +162 -0
- package/dist/lib/core/constants-telemetry.js.map +1 -0
- package/dist/lib/core/constants.d.ts +152 -0
- package/dist/lib/core/constants.d.ts.map +1 -0
- package/dist/lib/core/constants.js +223 -0
- package/dist/lib/core/constants.js.map +1 -0
- package/dist/lib/core/constants.test.d.ts.map +1 -0
- package/dist/lib/core/constants.test.js +833 -0
- package/dist/lib/core/constants.test.js.map +1 -0
- package/dist/lib/core/doc-sync.test.d.ts +9 -0
- package/dist/lib/core/doc-sync.test.d.ts.map +1 -0
- package/dist/lib/core/doc-sync.test.js +159 -0
- package/dist/lib/core/doc-sync.test.js.map +1 -0
- package/dist/lib/core/edge-cases.test.d.ts.map +1 -0
- package/dist/lib/core/edge-cases.test.js +637 -0
- package/dist/lib/core/edge-cases.test.js.map +1 -0
- package/dist/lib/core/file-utils.d.ts +360 -0
- package/dist/lib/core/file-utils.d.ts.map +1 -0
- package/dist/lib/core/file-utils.js +890 -0
- package/dist/lib/core/file-utils.js.map +1 -0
- package/dist/lib/core/file-utils.test-constants.d.ts +38 -0
- package/dist/lib/core/file-utils.test-constants.d.ts.map +1 -0
- package/dist/lib/core/file-utils.test-constants.js +40 -0
- package/dist/lib/core/file-utils.test-constants.js.map +1 -0
- package/dist/lib/core/file-utils.test.d.ts.map +1 -0
- package/dist/lib/core/file-utils.test.js +1329 -0
- package/dist/lib/core/file-utils.test.js.map +1 -0
- package/dist/lib/core/input-validator.d.ts +125 -0
- package/dist/lib/core/input-validator.d.ts.map +1 -0
- package/dist/lib/core/input-validator.fuzz.test.d.ts.map +1 -0
- package/dist/lib/core/input-validator.fuzz.test.js +302 -0
- package/dist/lib/core/input-validator.fuzz.test.js.map +1 -0
- package/dist/lib/core/input-validator.js +348 -0
- package/dist/lib/core/input-validator.js.map +1 -0
- package/dist/lib/core/input-validator.test.d.ts.map +1 -0
- package/dist/lib/core/input-validator.test.js +465 -0
- package/dist/lib/core/input-validator.test.js.map +1 -0
- package/dist/lib/core/logger.d.ts +32 -0
- package/dist/lib/core/logger.d.ts.map +1 -0
- package/dist/lib/core/logger.js +104 -0
- package/dist/lib/core/logger.js.map +1 -0
- package/dist/lib/core/logger.test.d.ts.map +1 -0
- package/dist/lib/core/logger.test.js.map +1 -0
- package/dist/lib/core/schema-types.d.ts +37 -0
- package/dist/lib/core/schema-types.d.ts.map +1 -0
- package/dist/lib/core/schema-types.js +29 -0
- package/dist/lib/core/schema-types.js.map +1 -0
- package/dist/lib/core/server-utils.d.ts +98 -0
- package/dist/lib/core/server-utils.d.ts.map +1 -0
- package/dist/lib/core/server-utils.js +193 -0
- package/dist/lib/core/server-utils.js.map +1 -0
- package/dist/lib/core/shared-schemas.d.ts +301 -0
- package/dist/lib/core/shared-schemas.d.ts.map +1 -0
- package/dist/lib/core/shared-schemas.js +222 -0
- package/dist/lib/core/shared-schemas.js.map +1 -0
- package/dist/lib/core/shared-schemas.test.d.ts.map +1 -0
- package/dist/lib/core/shared-schemas.test.js +136 -0
- package/dist/lib/core/shared-schemas.test.js.map +1 -0
- package/dist/lib/core/units.d.ts +67 -0
- package/dist/lib/core/units.d.ts.map +1 -0
- package/dist/lib/core/units.js +88 -0
- package/dist/lib/core/units.js.map +1 -0
- package/dist/lib/cost/cost-estimation.d.ts +264 -0
- package/dist/lib/cost/cost-estimation.d.ts.map +1 -0
- package/dist/lib/cost/cost-estimation.js +541 -0
- package/dist/lib/cost/cost-estimation.js.map +1 -0
- package/dist/lib/cost/cost-estimation.test.d.ts +5 -0
- package/dist/lib/cost/cost-estimation.test.d.ts.map +1 -0
- package/dist/lib/cost/cost-estimation.test.js +701 -0
- package/dist/lib/cost/cost-estimation.test.js.map +1 -0
- package/dist/lib/cost/pricing-cache.d.ts +59 -0
- package/dist/lib/cost/pricing-cache.d.ts.map +1 -0
- package/dist/lib/cost/pricing-cache.js +120 -0
- package/dist/lib/cost/pricing-cache.js.map +1 -0
- package/dist/lib/cost/pricing-cache.test.d.ts +5 -0
- package/dist/lib/cost/pricing-cache.test.d.ts.map +1 -0
- package/dist/lib/cost/pricing-cache.test.js +176 -0
- package/dist/lib/cost/pricing-cache.test.js.map +1 -0
- package/dist/lib/dashboard-file-utils.d.ts +35 -0
- package/dist/lib/dashboard-file-utils.d.ts.map +1 -0
- package/dist/lib/dashboard-file-utils.js +94 -0
- package/dist/lib/dashboard-file-utils.js.map +1 -0
- package/dist/lib/errors/error-sanitizer.d.ts +62 -0
- package/dist/lib/errors/error-sanitizer.d.ts.map +1 -0
- package/dist/lib/errors/error-sanitizer.js +235 -0
- package/dist/lib/errors/error-sanitizer.js.map +1 -0
- package/dist/lib/errors/error-sanitizer.test.d.ts.map +1 -0
- package/dist/lib/errors/error-sanitizer.test.js +534 -0
- package/dist/lib/errors/error-sanitizer.test.js.map +1 -0
- package/dist/lib/errors/error-types.d.ts +59 -0
- package/dist/lib/errors/error-types.d.ts.map +1 -0
- package/dist/lib/errors/error-types.js +187 -0
- package/dist/lib/errors/error-types.js.map +1 -0
- package/dist/lib/errors/error-types.test.d.ts.map +1 -0
- package/dist/lib/errors/error-types.test.js +246 -0
- package/dist/lib/errors/error-types.test.js.map +1 -0
- package/dist/lib/errors/query-sanitizer.d.ts.map +1 -0
- package/dist/lib/errors/query-sanitizer.js +269 -0
- package/dist/lib/errors/query-sanitizer.js.map +1 -0
- package/dist/lib/errors/query-sanitizer.test.d.ts.map +1 -0
- package/dist/lib/errors/query-sanitizer.test.js +403 -0
- package/dist/lib/errors/query-sanitizer.test.js.map +1 -0
- package/dist/lib/exports/confident-export.d.ts +105 -0
- package/dist/lib/exports/confident-export.d.ts.map +1 -0
- package/dist/lib/exports/confident-export.js +385 -0
- package/dist/lib/exports/confident-export.js.map +1 -0
- package/dist/lib/exports/confident-export.test.d.ts.map +1 -0
- package/dist/lib/exports/confident-export.test.js +848 -0
- package/dist/lib/exports/confident-export.test.js.map +1 -0
- package/dist/lib/exports/datadog-export.d.ts +200 -0
- package/dist/lib/exports/datadog-export.d.ts.map +1 -0
- package/dist/lib/exports/datadog-export.js +488 -0
- package/dist/lib/exports/datadog-export.js.map +1 -0
- package/dist/lib/exports/datadog-export.test.d.ts +2 -0
- package/dist/lib/exports/datadog-export.test.d.ts.map +1 -0
- package/dist/lib/exports/datadog-export.test.js +890 -0
- package/dist/lib/exports/datadog-export.test.js.map +1 -0
- package/dist/lib/exports/export-config-schemas.d.ts +67 -0
- package/dist/lib/exports/export-config-schemas.d.ts.map +1 -0
- package/dist/lib/exports/export-config-schemas.js +120 -0
- package/dist/lib/exports/export-config-schemas.js.map +1 -0
- package/dist/lib/exports/export-config-schemas.test.d.ts +8 -0
- package/dist/lib/exports/export-config-schemas.test.d.ts.map +1 -0
- package/dist/lib/exports/export-config-schemas.test.js +503 -0
- package/dist/lib/exports/export-config-schemas.test.js.map +1 -0
- package/dist/lib/exports/export-utils.d.ts +127 -0
- package/dist/lib/exports/export-utils.d.ts.map +1 -0
- package/dist/lib/exports/export-utils.js +303 -0
- package/dist/lib/exports/export-utils.js.map +1 -0
- package/dist/lib/exports/export-utils.test.d.ts.map +1 -0
- package/dist/lib/exports/export-utils.test.js +344 -0
- package/dist/lib/exports/export-utils.test.js.map +1 -0
- package/dist/lib/exports/langfuse-export.d.ts +129 -0
- package/dist/lib/exports/langfuse-export.d.ts.map +1 -0
- package/dist/lib/exports/langfuse-export.js +370 -0
- package/dist/lib/exports/langfuse-export.js.map +1 -0
- package/dist/lib/exports/langfuse-export.test.d.ts.map +1 -0
- package/dist/lib/exports/langfuse-export.test.js +1020 -0
- package/dist/lib/exports/langfuse-export.test.js.map +1 -0
- package/dist/lib/exports/otlp-export.d.ts +179 -0
- package/dist/lib/exports/otlp-export.d.ts.map +1 -0
- package/dist/lib/exports/otlp-export.js +397 -0
- package/dist/lib/exports/otlp-export.js.map +1 -0
- package/dist/lib/exports/otlp-format-converter.d.ts +70 -0
- package/dist/lib/exports/otlp-format-converter.d.ts.map +1 -0
- package/dist/lib/exports/otlp-format-converter.js +401 -0
- package/dist/lib/exports/otlp-format-converter.js.map +1 -0
- package/dist/lib/exports/otlp-proto-encode.d.ts +53 -0
- package/dist/lib/exports/otlp-proto-encode.d.ts.map +1 -0
- package/dist/lib/exports/otlp-proto-encode.js +165 -0
- package/dist/lib/exports/otlp-proto-encode.js.map +1 -0
- package/dist/lib/exports/otlp-proto-encode.test.d.ts +7 -0
- package/dist/lib/exports/otlp-proto-encode.test.d.ts.map +1 -0
- package/dist/lib/exports/otlp-proto-encode.test.js +997 -0
- package/dist/lib/exports/otlp-proto-encode.test.js.map +1 -0
- package/dist/lib/exports/phoenix-export.d.ts +119 -0
- package/dist/lib/exports/phoenix-export.d.ts.map +1 -0
- package/dist/lib/exports/phoenix-export.js +448 -0
- package/dist/lib/exports/phoenix-export.js.map +1 -0
- package/dist/lib/exports/phoenix-export.test.d.ts.map +1 -0
- package/dist/lib/exports/phoenix-export.test.js +816 -0
- package/dist/lib/exports/phoenix-export.test.js.map +1 -0
- package/dist/lib/index.d.ts +16 -0
- package/dist/lib/index.d.ts.map +1 -0
- package/dist/lib/index.js +31 -0
- package/dist/lib/index.js.map +1 -0
- package/dist/lib/judge/evaluation-hooks-schemas.d.ts +186 -0
- package/dist/lib/judge/evaluation-hooks-schemas.d.ts.map +1 -0
- package/dist/lib/judge/evaluation-hooks-schemas.js +125 -0
- package/dist/lib/judge/evaluation-hooks-schemas.js.map +1 -0
- package/dist/lib/judge/evaluation-hooks.d.ts +88 -0
- package/dist/lib/judge/evaluation-hooks.d.ts.map +1 -0
- package/dist/lib/judge/evaluation-hooks.js +658 -0
- package/dist/lib/judge/evaluation-hooks.js.map +1 -0
- package/dist/lib/judge/evaluation-hooks.test.d.ts.map +1 -0
- package/dist/lib/judge/evaluation-hooks.test.js +934 -0
- package/dist/lib/judge/evaluation-hooks.test.js.map +1 -0
- package/dist/lib/judge/llm-as-judge.d.ts +138 -0
- package/dist/lib/judge/llm-as-judge.d.ts.map +1 -0
- package/dist/lib/judge/llm-as-judge.js +103 -0
- package/dist/lib/judge/llm-as-judge.js.map +1 -0
- package/dist/lib/judge/llm-as-judge.test.d.ts.map +1 -0
- package/dist/lib/judge/llm-as-judge.test.js +2179 -0
- package/dist/lib/judge/llm-as-judge.test.js.map +1 -0
- package/dist/lib/judge/llm-judge-bias.d.ts +44 -0
- package/dist/lib/judge/llm-judge-bias.d.ts.map +1 -0
- package/dist/lib/judge/llm-judge-bias.js +130 -0
- package/dist/lib/judge/llm-judge-bias.js.map +1 -0
- package/dist/lib/judge/llm-judge-bias.test.d.ts +2 -0
- package/dist/lib/judge/llm-judge-bias.test.d.ts.map +1 -0
- package/dist/lib/judge/llm-judge-bias.test.js +380 -0
- package/dist/lib/judge/llm-judge-bias.test.js.map +1 -0
- package/dist/lib/judge/llm-judge-code.d.ts +99 -0
- package/dist/lib/judge/llm-judge-code.d.ts.map +1 -0
- package/dist/lib/judge/llm-judge-code.js +261 -0
- package/dist/lib/judge/llm-judge-code.js.map +1 -0
- package/dist/lib/judge/llm-judge-code.test.d.ts +2 -0
- package/dist/lib/judge/llm-judge-code.test.d.ts.map +1 -0
- package/dist/lib/judge/llm-judge-code.test.js +981 -0
- package/dist/lib/judge/llm-judge-code.test.js.map +1 -0
- package/dist/lib/judge/llm-judge-config.d.ts +241 -0
- package/dist/lib/judge/llm-judge-config.d.ts.map +1 -0
- package/dist/lib/judge/llm-judge-config.js +390 -0
- package/dist/lib/judge/llm-judge-config.js.map +1 -0
- package/dist/lib/judge/llm-judge-config.test.d.ts +5 -0
- package/dist/lib/judge/llm-judge-config.test.d.ts.map +1 -0
- package/dist/lib/judge/llm-judge-config.test.js +392 -0
- package/dist/lib/judge/llm-judge-config.test.js.map +1 -0
- package/dist/lib/judge/llm-judge-constants.d.ts +111 -0
- package/dist/lib/judge/llm-judge-constants.d.ts.map +1 -0
- package/dist/lib/judge/llm-judge-constants.js +150 -0
- package/dist/lib/judge/llm-judge-constants.js.map +1 -0
- package/dist/lib/judge/llm-judge-dag.d.ts +57 -0
- package/dist/lib/judge/llm-judge-dag.d.ts.map +1 -0
- package/dist/lib/judge/llm-judge-dag.js +217 -0
- package/dist/lib/judge/llm-judge-dag.js.map +1 -0
- package/dist/lib/judge/llm-judge-dag.test.d.ts +8 -0
- package/dist/lib/judge/llm-judge-dag.test.d.ts.map +1 -0
- package/dist/lib/judge/llm-judge-dag.test.js +973 -0
- package/dist/lib/judge/llm-judge-dag.test.js.map +1 -0
- package/dist/lib/judge/llm-judge-domain.d.ts +42 -0
- package/dist/lib/judge/llm-judge-domain.d.ts.map +1 -0
- package/dist/lib/judge/llm-judge-domain.js +167 -0
- package/dist/lib/judge/llm-judge-domain.js.map +1 -0
- package/dist/lib/judge/llm-judge-domain.test.d.ts +6 -0
- package/dist/lib/judge/llm-judge-domain.test.d.ts.map +1 -0
- package/dist/lib/judge/llm-judge-domain.test.js +337 -0
- package/dist/lib/judge/llm-judge-domain.test.js.map +1 -0
- package/dist/lib/judge/llm-judge-geval.d.ts +42 -0
- package/dist/lib/judge/llm-judge-geval.d.ts.map +1 -0
- package/dist/lib/judge/llm-judge-geval.js +213 -0
- package/dist/lib/judge/llm-judge-geval.js.map +1 -0
- package/dist/lib/judge/llm-judge-geval.test.d.ts +2 -0
- package/dist/lib/judge/llm-judge-geval.test.d.ts.map +1 -0
- package/dist/lib/judge/llm-judge-geval.test.js +556 -0
- package/dist/lib/judge/llm-judge-geval.test.js.map +1 -0
- package/dist/lib/judge/llm-judge-otel.test.d.ts +9 -0
- package/dist/lib/judge/llm-judge-otel.test.d.ts.map +1 -0
- package/dist/lib/judge/llm-judge-otel.test.js +91 -0
- package/dist/lib/judge/llm-judge-otel.test.js.map +1 -0
- package/dist/lib/judge/llm-judge-qag.d.ts +38 -0
- package/dist/lib/judge/llm-judge-qag.d.ts.map +1 -0
- package/dist/lib/judge/llm-judge-qag.js +205 -0
- package/dist/lib/judge/llm-judge-qag.js.map +1 -0
- package/dist/lib/judge/llm-judge-qag.test.d.ts +2 -0
- package/dist/lib/judge/llm-judge-qag.test.d.ts.map +1 -0
- package/dist/lib/judge/llm-judge-qag.test.js +386 -0
- package/dist/lib/judge/llm-judge-qag.test.js.map +1 -0
- package/dist/lib/judge/llm-judge-resilience.d.ts +74 -0
- package/dist/lib/judge/llm-judge-resilience.d.ts.map +1 -0
- package/dist/lib/judge/llm-judge-resilience.js +146 -0
- package/dist/lib/judge/llm-judge-resilience.js.map +1 -0
- package/dist/lib/judge/llm-judge-resilience.test.d.ts +2 -0
- package/dist/lib/judge/llm-judge-resilience.test.d.ts.map +1 -0
- package/dist/lib/judge/llm-judge-resilience.test.js +353 -0
- package/dist/lib/judge/llm-judge-resilience.test.js.map +1 -0
- package/dist/lib/judge/llm-judge-security.d.ts +106 -0
- package/dist/lib/judge/llm-judge-security.d.ts.map +1 -0
- package/dist/lib/judge/llm-judge-security.js +314 -0
- package/dist/lib/judge/llm-judge-security.js.map +1 -0
- package/dist/lib/judge/llm-judge-security.test.d.ts +2 -0
- package/dist/lib/judge/llm-judge-security.test.d.ts.map +1 -0
- package/dist/lib/judge/llm-judge-security.test.js +1011 -0
- package/dist/lib/judge/llm-judge-security.test.js.map +1 -0
- package/dist/lib/observability/context-accumulator.d.ts +32 -0
- package/dist/lib/observability/context-accumulator.d.ts.map +1 -0
- package/dist/lib/observability/context-accumulator.js +87 -0
- package/dist/lib/observability/context-accumulator.js.map +1 -0
- package/dist/lib/observability/evaluation-events.d.ts +35 -0
- package/dist/lib/observability/evaluation-events.d.ts.map +1 -0
- package/dist/lib/observability/evaluation-events.js +90 -0
- package/dist/lib/observability/evaluation-events.js.map +1 -0
- package/dist/lib/observability/file-span-exporter.d.ts +17 -0
- package/dist/lib/observability/file-span-exporter.d.ts.map +1 -0
- package/dist/lib/observability/file-span-exporter.js +49 -0
- package/dist/lib/observability/file-span-exporter.js.map +1 -0
- package/dist/lib/observability/histogram-bucket-constants.d.ts +25 -0
- package/dist/lib/observability/histogram-bucket-constants.d.ts.map +1 -0
- package/dist/lib/observability/histogram-bucket-constants.js +60 -0
- package/dist/lib/observability/histogram-bucket-constants.js.map +1 -0
- package/dist/lib/observability/histogram.d.ts +112 -0
- package/dist/lib/observability/histogram.d.ts.map +1 -0
- package/dist/lib/observability/histogram.js +170 -0
- package/dist/lib/observability/histogram.js.map +1 -0
- package/dist/lib/observability/histogram.test.d.ts.map +1 -0
- package/dist/lib/observability/histogram.test.js +385 -0
- package/dist/lib/observability/histogram.test.js.map +1 -0
- package/dist/lib/observability/indexer.d.ts +114 -0
- package/dist/lib/observability/indexer.d.ts.map +1 -0
- package/dist/lib/observability/indexer.js +402 -0
- package/dist/lib/observability/indexer.js.map +1 -0
- package/dist/lib/observability/indexer.test.d.ts.map +1 -0
- package/dist/lib/observability/indexer.test.js +713 -0
- package/dist/lib/observability/indexer.test.js.map +1 -0
- package/dist/lib/observability/instrumentation-eval.test.d.ts +5 -0
- package/dist/lib/observability/instrumentation-eval.test.d.ts.map +1 -0
- package/dist/lib/observability/instrumentation-eval.test.js +63 -0
- package/dist/lib/observability/instrumentation-eval.test.js.map +1 -0
- package/dist/lib/observability/instrumentation-init-errors.test.d.ts +13 -0
- package/dist/lib/observability/instrumentation-init-errors.test.d.ts.map +1 -0
- package/dist/lib/observability/instrumentation-init-errors.test.js +194 -0
- package/dist/lib/observability/instrumentation-init-errors.test.js.map +1 -0
- package/dist/lib/observability/instrumentation-retry-timeout.test.d.ts +15 -0
- package/dist/lib/observability/instrumentation-retry-timeout.test.d.ts.map +1 -0
- package/dist/lib/observability/instrumentation-retry-timeout.test.js +188 -0
- package/dist/lib/observability/instrumentation-retry-timeout.test.js.map +1 -0
- package/dist/lib/observability/instrumentation-set-otel.test.d.ts +5 -0
- package/dist/lib/observability/instrumentation-set-otel.test.d.ts.map +1 -0
- package/dist/lib/observability/instrumentation-set-otel.test.js +59 -0
- package/dist/lib/observability/instrumentation-set-otel.test.js.map +1 -0
- package/dist/lib/observability/instrumentation.d.ts +158 -0
- package/dist/lib/observability/instrumentation.d.ts.map +1 -0
- package/dist/lib/observability/instrumentation.integration.test.d.ts.map +1 -0
- package/dist/lib/observability/instrumentation.integration.test.js +590 -0
- package/dist/lib/observability/instrumentation.integration.test.js.map +1 -0
- package/dist/lib/observability/instrumentation.js +512 -0
- package/dist/lib/observability/instrumentation.js.map +1 -0
- package/dist/lib/observability/instrumentation.test.d.ts.map +1 -0
- package/dist/lib/observability/instrumentation.test.js +822 -0
- package/dist/lib/observability/instrumentation.test.js.map +1 -0
- package/dist/lib/observability/mcp-semconv-constants.d.ts +98 -0
- package/dist/lib/observability/mcp-semconv-constants.d.ts.map +1 -0
- package/dist/lib/observability/mcp-semconv-constants.js +102 -0
- package/dist/lib/observability/mcp-semconv-constants.js.map +1 -0
- package/dist/lib/observability/mcp-semconv.d.ts +37 -0
- package/dist/lib/observability/mcp-semconv.d.ts.map +1 -0
- package/dist/lib/observability/mcp-semconv.js +87 -0
- package/dist/lib/observability/mcp-semconv.js.map +1 -0
- package/dist/lib/observability/mcp-semconv.test.d.ts +2 -0
- package/dist/lib/observability/mcp-semconv.test.d.ts.map +1 -0
- package/dist/lib/observability/mcp-semconv.test.js +168 -0
- package/dist/lib/observability/mcp-semconv.test.js.map +1 -0
- package/dist/lib/observability/metrics.d.ts +100 -0
- package/dist/lib/observability/metrics.d.ts.map +1 -0
- package/dist/lib/observability/metrics.js +429 -0
- package/dist/lib/observability/metrics.js.map +1 -0
- package/dist/lib/observability/metrics.test.d.ts.map +1 -0
- package/dist/lib/observability/metrics.test.js +191 -0
- package/dist/lib/observability/metrics.test.js.map +1 -0
- package/dist/lib/observability/observability-test-constants.d.ts +34 -0
- package/dist/lib/observability/observability-test-constants.d.ts.map +1 -0
- package/dist/lib/observability/observability-test-constants.js +55 -0
- package/dist/lib/observability/observability-test-constants.js.map +1 -0
- package/dist/lib/observability/opentelemetry-resources.test.d.ts +2 -0
- package/dist/lib/observability/opentelemetry-resources.test.d.ts.map +1 -0
- package/dist/lib/observability/opentelemetry-resources.test.js +19 -0
- package/dist/lib/observability/opentelemetry-resources.test.js.map +1 -0
- package/dist/lib/observability/parse-stats.d.ts.map +1 -0
- package/dist/lib/observability/parse-stats.js +207 -0
- package/dist/lib/observability/parse-stats.js.map +1 -0
- package/dist/lib/observability/parse-stats.test.d.ts.map +1 -0
- package/dist/lib/observability/parse-stats.test.js +287 -0
- package/dist/lib/observability/parse-stats.test.js.map +1 -0
- package/dist/lib/observability/render-trace-tree.d.ts +31 -0
- package/dist/lib/observability/render-trace-tree.d.ts.map +1 -0
- package/dist/lib/observability/render-trace-tree.js +95 -0
- package/dist/lib/observability/render-trace-tree.js.map +1 -0
- package/dist/lib/observability/render-trace-tree.test.d.ts +5 -0
- package/dist/lib/observability/render-trace-tree.test.d.ts.map +1 -0
- package/dist/lib/observability/render-trace-tree.test.js +97 -0
- package/dist/lib/observability/render-trace-tree.test.js.map +1 -0
- package/dist/lib/observability/span-attributes.d.ts +27 -0
- package/dist/lib/observability/span-attributes.d.ts.map +1 -0
- package/dist/lib/observability/span-attributes.js +85 -0
- package/dist/lib/observability/span-attributes.js.map +1 -0
- package/dist/lib/observability/trace-anomaly-detector.d.ts +23 -0
- package/dist/lib/observability/trace-anomaly-detector.d.ts.map +1 -0
- package/dist/lib/observability/trace-anomaly-detector.js +211 -0
- package/dist/lib/observability/trace-anomaly-detector.js.map +1 -0
- package/dist/lib/observability/trace-anomaly-detector.test.d.ts +5 -0
- package/dist/lib/observability/trace-anomaly-detector.test.d.ts.map +1 -0
- package/dist/lib/observability/trace-anomaly-detector.test.js +224 -0
- package/dist/lib/observability/trace-anomaly-detector.test.js.map +1 -0
- package/dist/lib/observability/trace-anomaly-schemas.d.ts +189 -0
- package/dist/lib/observability/trace-anomaly-schemas.d.ts.map +1 -0
- package/dist/lib/observability/trace-anomaly-schemas.js +167 -0
- package/dist/lib/observability/trace-anomaly-schemas.js.map +1 -0
- package/dist/lib/privacy/content-redaction.d.ts +141 -0
- package/dist/lib/privacy/content-redaction.d.ts.map +1 -0
- package/dist/lib/privacy/content-redaction.js +210 -0
- package/dist/lib/privacy/content-redaction.js.map +1 -0
- package/dist/lib/privacy/content-redaction.test.d.ts +2 -0
- package/dist/lib/privacy/content-redaction.test.d.ts.map +1 -0
- package/dist/lib/privacy/content-redaction.test.js +302 -0
- package/dist/lib/privacy/content-redaction.test.js.map +1 -0
- package/dist/lib/quality/bucket-utils.d.ts +17 -0
- package/dist/lib/quality/bucket-utils.d.ts.map +1 -0
- package/dist/lib/quality/bucket-utils.js +31 -0
- package/dist/lib/quality/bucket-utils.js.map +1 -0
- package/dist/lib/quality/bucket-utils.test.d.ts +2 -0
- package/dist/lib/quality/bucket-utils.test.d.ts.map +1 -0
- package/dist/lib/quality/bucket-utils.test.js +42 -0
- package/dist/lib/quality/bucket-utils.test.js.map +1 -0
- package/dist/lib/quality/qfe-backtest-detail.test.d.ts +5 -0
- package/dist/lib/quality/qfe-backtest-detail.test.d.ts.map +1 -0
- package/dist/lib/quality/qfe-backtest-detail.test.js +179 -0
- package/dist/lib/quality/qfe-backtest-detail.test.js.map +1 -0
- package/dist/lib/quality/qfe-calibration-paths.test.d.ts +5 -0
- package/dist/lib/quality/qfe-calibration-paths.test.d.ts.map +1 -0
- package/dist/lib/quality/qfe-calibration-paths.test.js +203 -0
- package/dist/lib/quality/qfe-calibration-paths.test.js.map +1 -0
- package/dist/lib/quality/qfe-correlation-helpers.test.d.ts +6 -0
- package/dist/lib/quality/qfe-correlation-helpers.test.d.ts.map +1 -0
- package/dist/lib/quality/qfe-correlation-helpers.test.js +143 -0
- package/dist/lib/quality/qfe-correlation-helpers.test.js.map +1 -0
- package/dist/lib/quality/qfe-cqi-paths.test.d.ts +6 -0
- package/dist/lib/quality/qfe-cqi-paths.test.d.ts.map +1 -0
- package/dist/lib/quality/qfe-cqi-paths.test.js +231 -0
- package/dist/lib/quality/qfe-cqi-paths.test.js.map +1 -0
- package/dist/lib/quality/qfe-critic-internals.test.d.ts +6 -0
- package/dist/lib/quality/qfe-critic-internals.test.d.ts.map +1 -0
- package/dist/lib/quality/qfe-critic-internals.test.js +191 -0
- package/dist/lib/quality/qfe-critic-internals.test.js.map +1 -0
- package/dist/lib/quality/qfe-derived-paths.test.d.ts +2 -0
- package/dist/lib/quality/qfe-derived-paths.test.d.ts.map +1 -0
- package/dist/lib/quality/qfe-derived-paths.test.js +372 -0
- package/dist/lib/quality/qfe-derived-paths.test.js.map +1 -0
- package/dist/lib/quality/qfe-dynamics-paths.test.d.ts +8 -0
- package/dist/lib/quality/qfe-dynamics-paths.test.d.ts.map +1 -0
- package/dist/lib/quality/qfe-dynamics-paths.test.js +223 -0
- package/dist/lib/quality/qfe-dynamics-paths.test.js.map +1 -0
- package/dist/lib/quality/qfe-granger-internals.test.d.ts +6 -0
- package/dist/lib/quality/qfe-granger-internals.test.d.ts.map +1 -0
- package/dist/lib/quality/qfe-granger-internals.test.js +158 -0
- package/dist/lib/quality/qfe-granger-internals.test.js.map +1 -0
- package/dist/lib/quality/qfe-label-normalize.test.d.ts +7 -0
- package/dist/lib/quality/qfe-label-normalize.test.d.ts.map +1 -0
- package/dist/lib/quality/qfe-label-normalize.test.js +332 -0
- package/dist/lib/quality/qfe-label-normalize.test.js.map +1 -0
- package/dist/lib/quality/qfe-ordinal-edge.test.d.ts +6 -0
- package/dist/lib/quality/qfe-ordinal-edge.test.d.ts.map +1 -0
- package/dist/lib/quality/qfe-ordinal-edge.test.js +98 -0
- package/dist/lib/quality/qfe-ordinal-edge.test.js.map +1 -0
- package/dist/lib/quality/qfe-roles-detail.test.d.ts +5 -0
- package/dist/lib/quality/qfe-roles-detail.test.d.ts.map +1 -0
- package/dist/lib/quality/qfe-roles-detail.test.js +115 -0
- package/dist/lib/quality/qfe-roles-detail.test.js.map +1 -0
- package/dist/lib/quality/qfe-rolling-detail.test.d.ts +7 -0
- package/dist/lib/quality/qfe-rolling-detail.test.d.ts.map +1 -0
- package/dist/lib/quality/qfe-rolling-detail.test.js +249 -0
- package/dist/lib/quality/qfe-rolling-detail.test.js.map +1 -0
- package/dist/lib/quality/qfe-stats-internals.test.d.ts +7 -0
- package/dist/lib/quality/qfe-stats-internals.test.d.ts.map +1 -0
- package/dist/lib/quality/qfe-stats-internals.test.js +143 -0
- package/dist/lib/quality/qfe-stats-internals.test.js.map +1 -0
- package/dist/lib/quality/qfe-streaming.test.d.ts +5 -0
- package/dist/lib/quality/qfe-streaming.test.d.ts.map +1 -0
- package/dist/lib/quality/qfe-streaming.test.js +239 -0
- package/dist/lib/quality/qfe-streaming.test.js.map +1 -0
- package/dist/lib/quality/qfe-sweep-detail.test.d.ts +6 -0
- package/dist/lib/quality/qfe-sweep-detail.test.d.ts.map +1 -0
- package/dist/lib/quality/qfe-sweep-detail.test.js +291 -0
- package/dist/lib/quality/qfe-sweep-detail.test.js.map +1 -0
- package/dist/lib/quality/quality-alerts.d.ts +23 -0
- package/dist/lib/quality/quality-alerts.d.ts.map +1 -0
- package/dist/lib/quality/quality-alerts.js +89 -0
- package/dist/lib/quality/quality-alerts.js.map +1 -0
- package/dist/lib/quality/quality-alerts.test.d.ts +2 -0
- package/dist/lib/quality/quality-alerts.test.d.ts.map +1 -0
- package/dist/lib/quality/quality-alerts.test.js +86 -0
- package/dist/lib/quality/quality-alerts.test.js.map +1 -0
- package/dist/lib/quality/quality-constants.d.ts +294 -0
- package/dist/lib/quality/quality-constants.d.ts.map +1 -0
- package/dist/lib/quality/quality-constants.js +335 -0
- package/dist/lib/quality/quality-constants.js.map +1 -0
- package/dist/lib/quality/quality-feature-engineering.d.ts +1071 -0
- package/dist/lib/quality/quality-feature-engineering.d.ts.map +1 -0
- package/dist/lib/quality/quality-feature-engineering.js +2076 -0
- package/dist/lib/quality/quality-feature-engineering.js.map +1 -0
- package/dist/lib/quality/quality-feature-engineering.test.d.ts +5 -0
- package/dist/lib/quality/quality-feature-engineering.test.d.ts.map +1 -0
- package/dist/lib/quality/quality-feature-engineering.test.js +2908 -0
- package/dist/lib/quality/quality-feature-engineering.test.js.map +1 -0
- package/dist/lib/quality/quality-metrics.d.ts +943 -0
- package/dist/lib/quality/quality-metrics.d.ts.map +1 -0
- package/dist/lib/quality/quality-metrics.js +1151 -0
- package/dist/lib/quality/quality-metrics.js.map +1 -0
- package/dist/lib/quality/quality-metrics.test.d.ts +5 -0
- package/dist/lib/quality/quality-metrics.test.d.ts.map +1 -0
- package/dist/lib/quality/quality-metrics.test.js +2766 -0
- package/dist/lib/quality/quality-metrics.test.js.map +1 -0
- package/dist/lib/quality/quality-multi-agent.d.ts +106 -0
- package/dist/lib/quality/quality-multi-agent.d.ts.map +1 -0
- package/dist/lib/quality/quality-multi-agent.js +124 -0
- package/dist/lib/quality/quality-multi-agent.js.map +1 -0
- package/dist/lib/quality/quality-multi-agent.test.d.ts +6 -0
- package/dist/lib/quality/quality-multi-agent.test.d.ts.map +1 -0
- package/dist/lib/quality/quality-multi-agent.test.js +163 -0
- package/dist/lib/quality/quality-multi-agent.test.js.map +1 -0
- package/dist/lib/quality/quality-sla.d.ts +35 -0
- package/dist/lib/quality/quality-sla.d.ts.map +1 -0
- package/dist/lib/quality/quality-sla.js +62 -0
- package/dist/lib/quality/quality-sla.js.map +1 -0
- package/dist/lib/quality/quality-sla.test.d.ts +5 -0
- package/dist/lib/quality/quality-sla.test.d.ts.map +1 -0
- package/dist/lib/quality/quality-sla.test.js +144 -0
- package/dist/lib/quality/quality-sla.test.js.map +1 -0
- package/dist/lib/quality/quality-test-constants.d.ts +23 -0
- package/dist/lib/quality/quality-test-constants.d.ts.map +1 -0
- package/dist/lib/quality/quality-test-constants.js +25 -0
- package/dist/lib/quality/quality-test-constants.js.map +1 -0
- package/dist/lib/quality/quality-trends.d.ts +101 -0
- package/dist/lib/quality/quality-trends.d.ts.map +1 -0
- package/dist/lib/quality/quality-trends.js +299 -0
- package/dist/lib/quality/quality-trends.js.map +1 -0
- package/dist/lib/quality/quality-trends.test.d.ts +6 -0
- package/dist/lib/quality/quality-trends.test.d.ts.map +1 -0
- package/dist/lib/quality/quality-trends.test.js +377 -0
- package/dist/lib/quality/quality-trends.test.js.map +1 -0
- package/dist/lib/quality/quality-views.d.ts +966 -0
- package/dist/lib/quality/quality-views.d.ts.map +1 -0
- package/dist/lib/quality/quality-views.js +367 -0
- package/dist/lib/quality/quality-views.js.map +1 -0
- package/dist/lib/quality/quality-views.test.d.ts +6 -0
- package/dist/lib/quality/quality-views.test.d.ts.map +1 -0
- package/dist/lib/quality/quality-views.test.js +262 -0
- package/dist/lib/quality/quality-views.test.js.map +1 -0
- package/dist/lib/quality/quality-visualization.d.ts +112 -0
- package/dist/lib/quality/quality-visualization.d.ts.map +1 -0
- package/dist/lib/quality/quality-visualization.js +136 -0
- package/dist/lib/quality/quality-visualization.js.map +1 -0
- package/dist/lib/quality/quality-visualization.test.d.ts +5 -0
- package/dist/lib/quality/quality-visualization.test.d.ts.map +1 -0
- package/dist/lib/quality/quality-visualization.test.js +189 -0
- package/dist/lib/quality/quality-visualization.test.js.map +1 -0
- package/dist/lib/resilience/cache.d.ts +56 -0
- package/dist/lib/resilience/cache.d.ts.map +1 -0
- package/dist/lib/resilience/cache.js +96 -0
- package/dist/lib/resilience/cache.js.map +1 -0
- package/dist/lib/resilience/cache.test.d.ts.map +1 -0
- package/dist/lib/resilience/cache.test.js +106 -0
- package/dist/lib/resilience/cache.test.js.map +1 -0
- package/dist/lib/resilience/circuit-breaker.d.ts +147 -0
- package/dist/lib/resilience/circuit-breaker.d.ts.map +1 -0
- package/dist/lib/resilience/circuit-breaker.js +251 -0
- package/dist/lib/resilience/circuit-breaker.js.map +1 -0
- package/dist/lib/resilience/circuit-breaker.test.d.ts.map +1 -0
- package/dist/lib/resilience/circuit-breaker.test.js +266 -0
- package/dist/lib/resilience/circuit-breaker.test.js.map +1 -0
- package/dist/lib/resilience/toon-encoder.d.ts +31 -0
- package/dist/lib/resilience/toon-encoder.d.ts.map +1 -0
- package/dist/lib/resilience/toon-encoder.js +66 -0
- package/dist/lib/resilience/toon-encoder.js.map +1 -0
- package/dist/lib/resilience/toon-encoder.test.d.ts.map +1 -0
- package/dist/lib/resilience/toon-encoder.test.js +86 -0
- package/dist/lib/resilience/toon-encoder.test.js.map +1 -0
- package/dist/lib/testing/mock-llm-builder.d.ts +139 -0
- package/dist/lib/testing/mock-llm-builder.d.ts.map +1 -0
- package/dist/lib/testing/mock-llm-builder.js +254 -0
- package/dist/lib/testing/mock-llm-builder.js.map +1 -0
- package/dist/lib/testing/mock-llm-builder.test.d.ts +5 -0
- package/dist/lib/testing/mock-llm-builder.test.d.ts.map +1 -0
- package/dist/lib/testing/mock-llm-builder.test.js +304 -0
- package/dist/lib/testing/mock-llm-builder.test.js.map +1 -0
- package/dist/lib/validation/api-schemas.d.ts +705 -0
- package/dist/lib/validation/api-schemas.d.ts.map +1 -0
- package/dist/lib/validation/api-schemas.js +351 -0
- package/dist/lib/validation/api-schemas.js.map +1 -0
- package/dist/lib/validation/api-schemas.test.d.ts +5 -0
- package/dist/lib/validation/api-schemas.test.d.ts.map +1 -0
- package/dist/lib/validation/api-schemas.test.js +427 -0
- package/dist/lib/validation/api-schemas.test.js.map +1 -0
- package/dist/lib/validation/dashboard-schemas.d.ts +203 -0
- package/dist/lib/validation/dashboard-schemas.d.ts.map +1 -0
- package/dist/lib/validation/dashboard-schemas.js +186 -0
- package/dist/lib/validation/dashboard-schemas.js.map +1 -0
- package/dist/lib/validation/dashboard-schemas.test.d.ts +5 -0
- package/dist/lib/validation/dashboard-schemas.test.d.ts.map +1 -0
- package/dist/lib/validation/dashboard-schemas.test.js +353 -0
- package/dist/lib/validation/dashboard-schemas.test.js.map +1 -0
- package/dist/server.d.ts +2 -1
- package/dist/server.d.ts.map +1 -1
- package/dist/server.js +158 -144
- package/dist/server.js.map +1 -1
- package/dist/server.test.js +102 -95
- package/dist/server.test.js.map +1 -1
- package/dist/test-helpers/assertions.d.ts +6 -0
- package/dist/test-helpers/assertions.d.ts.map +1 -0
- package/dist/test-helpers/assertions.js +11 -0
- package/dist/test-helpers/assertions.js.map +1 -0
- package/dist/test-helpers/env-utils.d.ts +0 -64
- package/dist/test-helpers/env-utils.d.ts.map +1 -1
- package/dist/test-helpers/env-utils.js +0 -100
- package/dist/test-helpers/env-utils.js.map +1 -1
- package/dist/test-helpers/fuzz-generators.d.ts.map +1 -1
- package/dist/test-helpers/fuzz-generators.js +62 -22
- package/dist/test-helpers/fuzz-generators.js.map +1 -1
- package/dist/test-helpers/index.d.ts +3 -2
- package/dist/test-helpers/index.d.ts.map +1 -1
- package/dist/test-helpers/index.js +4 -2
- package/dist/test-helpers/index.js.map +1 -1
- package/dist/test-helpers/memfs-utils.test.js +81 -76
- package/dist/test-helpers/memfs-utils.test.js.map +1 -1
- package/dist/test-helpers/mock-backends.d.ts +19 -17
- package/dist/test-helpers/mock-backends.d.ts.map +1 -1
- package/dist/test-helpers/mock-backends.js +16 -4
- package/dist/test-helpers/mock-backends.js.map +1 -1
- package/dist/test-helpers/mock-backends.test.js +43 -112
- package/dist/test-helpers/mock-backends.test.js.map +1 -1
- package/dist/test-helpers/race-condition-helpers.d.ts.map +1 -1
- package/dist/test-helpers/race-condition-helpers.js +3 -2
- package/dist/test-helpers/race-condition-helpers.js.map +1 -1
- package/dist/test-helpers/schema-validators.d.ts +2 -2
- package/dist/test-helpers/schema-validators.d.ts.map +1 -1
- package/dist/test-helpers/schema-validators.js +35 -31
- package/dist/test-helpers/schema-validators.js.map +1 -1
- package/dist/test-helpers/test-constants.d.ts +74 -0
- package/dist/test-helpers/test-constants.d.ts.map +1 -0
- package/dist/test-helpers/test-constants.js +78 -0
- package/dist/test-helpers/test-constants.js.map +1 -0
- package/dist/test-helpers/test-data-builders.d.ts +25 -7
- package/dist/test-helpers/test-data-builders.d.ts.map +1 -1
- package/dist/test-helpers/test-data-builders.js +32 -9
- package/dist/test-helpers/test-data-builders.js.map +1 -1
- package/dist/test-helpers/test-data-builders.test.js +116 -107
- package/dist/test-helpers/test-data-builders.test.js.map +1 -1
- package/dist/test-helpers/tool-validators.d.ts +1 -1
- package/dist/test-helpers/tool-validators.d.ts.map +1 -1
- package/dist/test-helpers/tool-validators.js +10 -10
- package/dist/test-helpers/tool-validators.js.map +1 -1
- package/dist/tools/audit-trail.d.ts +170 -0
- package/dist/tools/audit-trail.d.ts.map +1 -0
- package/dist/tools/audit-trail.js +109 -0
- package/dist/tools/audit-trail.js.map +1 -0
- package/dist/tools/audit-trail.test.d.ts +5 -0
- package/dist/tools/audit-trail.test.d.ts.map +1 -0
- package/dist/tools/audit-trail.test.js +122 -0
- package/dist/tools/audit-trail.test.js.map +1 -0
- package/dist/tools/context-stats.d.ts +6 -20
- package/dist/tools/context-stats.d.ts.map +1 -1
- package/dist/tools/context-stats.js +106 -88
- package/dist/tools/context-stats.js.map +1 -1
- package/dist/tools/context-stats.test.js +109 -60
- package/dist/tools/context-stats.test.js.map +1 -1
- package/dist/tools/detect-trace-anomalies.d.ts +123 -0
- package/dist/tools/detect-trace-anomalies.d.ts.map +1 -0
- package/dist/tools/detect-trace-anomalies.js +66 -0
- package/dist/tools/detect-trace-anomalies.js.map +1 -0
- package/dist/tools/estimate-cost.d.ts +77 -0
- package/dist/tools/estimate-cost.d.ts.map +1 -0
- package/dist/tools/estimate-cost.js +104 -0
- package/dist/tools/estimate-cost.js.map +1 -0
- package/dist/tools/estimate-cost.test.d.ts +5 -0
- package/dist/tools/estimate-cost.test.d.ts.map +1 -0
- package/dist/tools/estimate-cost.test.js +343 -0
- package/dist/tools/estimate-cost.test.js.map +1 -0
- package/dist/tools/export-base.d.ts +77 -0
- package/dist/tools/export-base.d.ts.map +1 -0
- package/dist/tools/export-base.js +150 -0
- package/dist/tools/export-base.js.map +1 -0
- package/dist/tools/export-base.test.d.ts +18 -0
- package/dist/tools/export-base.test.d.ts.map +1 -0
- package/dist/tools/export-base.test.js +220 -0
- package/dist/tools/export-base.test.js.map +1 -0
- package/dist/tools/export-confident.d.ts +94 -90
- package/dist/tools/export-confident.d.ts.map +1 -1
- package/dist/tools/export-confident.js +17 -115
- package/dist/tools/export-confident.js.map +1 -1
- package/dist/tools/export-confident.test.js +79 -75
- package/dist/tools/export-confident.test.js.map +1 -1
- package/dist/tools/export-datadog.d.ts +77 -116
- package/dist/tools/export-datadog.d.ts.map +1 -1
- package/dist/tools/export-datadog.js +38 -40
- package/dist/tools/export-datadog.js.map +1 -1
- package/dist/tools/export-datadog.test.js +122 -165
- package/dist/tools/export-datadog.test.js.map +1 -1
- package/dist/tools/export-jaeger.d.ts +100 -0
- package/dist/tools/export-jaeger.d.ts.map +1 -0
- package/dist/tools/export-jaeger.js +154 -0
- package/dist/tools/export-jaeger.js.map +1 -0
- package/dist/tools/export-jaeger.test.d.ts +2 -0
- package/dist/tools/export-jaeger.test.d.ts.map +1 -0
- package/dist/tools/export-jaeger.test.js +113 -0
- package/dist/tools/export-jaeger.test.js.map +1 -0
- package/dist/tools/export-langfuse.d.ts +78 -80
- package/dist/tools/export-langfuse.d.ts.map +1 -1
- package/dist/tools/export-langfuse.js +15 -113
- package/dist/tools/export-langfuse.js.map +1 -1
- package/dist/tools/export-langfuse.test.js +70 -81
- package/dist/tools/export-langfuse.test.js.map +1 -1
- package/dist/tools/export-phoenix.d.ts +115 -90
- package/dist/tools/export-phoenix.d.ts.map +1 -1
- package/dist/tools/export-phoenix.js +29 -117
- package/dist/tools/export-phoenix.js.map +1 -1
- package/dist/tools/export-phoenix.test.js +95 -94
- package/dist/tools/export-phoenix.test.js.map +1 -1
- package/dist/tools/get-trace-url.d.ts +2 -10
- package/dist/tools/get-trace-url.d.ts.map +1 -1
- package/dist/tools/get-trace-url.js +5 -8
- package/dist/tools/get-trace-url.js.map +1 -1
- package/dist/tools/get-trace-url.test.js +81 -399
- package/dist/tools/get-trace-url.test.js.map +1 -1
- package/dist/tools/hallucination-detection.d.ts +203 -0
- package/dist/tools/hallucination-detection.d.ts.map +1 -0
- package/dist/tools/hallucination-detection.js +189 -0
- package/dist/tools/hallucination-detection.js.map +1 -0
- package/dist/tools/hallucination-detection.test.d.ts +5 -0
- package/dist/tools/hallucination-detection.test.d.ts.map +1 -0
- package/dist/tools/hallucination-detection.test.js +529 -0
- package/dist/tools/hallucination-detection.test.js.map +1 -0
- package/dist/tools/health-check.d.ts +9 -16
- package/dist/tools/health-check.d.ts.map +1 -1
- package/dist/tools/health-check.js +88 -101
- package/dist/tools/health-check.js.map +1 -1
- package/dist/tools/health-check.test.js +72 -165
- package/dist/tools/health-check.test.js.map +1 -1
- package/dist/tools/index.d.ts +13 -0
- package/dist/tools/index.d.ts.map +1 -1
- package/dist/tools/index.js +13 -0
- package/dist/tools/index.js.map +1 -1
- package/dist/tools/ingest-constants.d.ts +8 -0
- package/dist/tools/ingest-constants.d.ts.map +1 -0
- package/dist/tools/ingest-constants.js +8 -0
- package/dist/tools/ingest-constants.js.map +1 -0
- package/dist/tools/ingest-spans.d.ts +45 -0
- package/dist/tools/ingest-spans.d.ts.map +1 -0
- package/dist/tools/ingest-spans.js +129 -0
- package/dist/tools/ingest-spans.js.map +1 -0
- package/dist/tools/ingest-spans.test.d.ts +5 -0
- package/dist/tools/ingest-spans.test.d.ts.map +1 -0
- package/dist/tools/ingest-spans.test.js +250 -0
- package/dist/tools/ingest-spans.test.js.map +1 -0
- package/dist/tools/ingest-traces.d.ts +76 -0
- package/dist/tools/ingest-traces.d.ts.map +1 -0
- package/dist/tools/ingest-traces.js +164 -0
- package/dist/tools/ingest-traces.js.map +1 -0
- package/dist/tools/ingest-traces.test.d.ts +5 -0
- package/dist/tools/ingest-traces.test.d.ts.map +1 -0
- package/dist/tools/ingest-traces.test.js +483 -0
- package/dist/tools/ingest-traces.test.js.map +1 -0
- package/dist/tools/inject-evaluations.d.ts +136 -1197
- package/dist/tools/inject-evaluations.d.ts.map +1 -1
- package/dist/tools/inject-evaluations.js +65 -53
- package/dist/tools/inject-evaluations.js.map +1 -1
- package/dist/tools/inject-evaluations.test.js +83 -71
- package/dist/tools/inject-evaluations.test.js.map +1 -1
- package/dist/tools/manage-datasets.d.ts +850 -0
- package/dist/tools/manage-datasets.d.ts.map +1 -0
- package/dist/tools/manage-datasets.js +139 -0
- package/dist/tools/manage-datasets.js.map +1 -0
- package/dist/tools/manage-datasets.test.d.ts +5 -0
- package/dist/tools/manage-datasets.test.d.ts.map +1 -0
- package/dist/tools/manage-datasets.test.js +430 -0
- package/dist/tools/manage-datasets.test.js.map +1 -0
- package/dist/tools/multi-agent-coordination.d.ts +178 -0
- package/dist/tools/multi-agent-coordination.d.ts.map +1 -0
- package/dist/tools/multi-agent-coordination.js +270 -0
- package/dist/tools/multi-agent-coordination.js.map +1 -0
- package/dist/tools/multi-agent-coordination.test.d.ts +5 -0
- package/dist/tools/multi-agent-coordination.test.d.ts.map +1 -0
- package/dist/tools/multi-agent-coordination.test.js +530 -0
- package/dist/tools/multi-agent-coordination.test.js.map +1 -0
- package/dist/tools/query-evaluations.d.ts +147 -105
- package/dist/tools/query-evaluations.d.ts.map +1 -1
- package/dist/tools/query-evaluations.js +205 -178
- package/dist/tools/query-evaluations.js.map +1 -1
- package/dist/tools/query-evaluations.test.js +386 -391
- package/dist/tools/query-evaluations.test.js.map +1 -1
- package/dist/tools/query-llm-events.d.ts +100 -75
- package/dist/tools/query-llm-events.d.ts.map +1 -1
- package/dist/tools/query-llm-events.js +106 -80
- package/dist/tools/query-llm-events.js.map +1 -1
- package/dist/tools/query-llm-events.test.js +183 -346
- package/dist/tools/query-llm-events.test.js.map +1 -1
- package/dist/tools/query-logs.d.ts +45 -58
- package/dist/tools/query-logs.d.ts.map +1 -1
- package/dist/tools/query-logs.js +54 -101
- package/dist/tools/query-logs.js.map +1 -1
- package/dist/tools/query-logs.test.js +118 -314
- package/dist/tools/query-logs.test.js.map +1 -1
- package/dist/tools/query-metric-histograms.d.ts +112 -0
- package/dist/tools/query-metric-histograms.d.ts.map +1 -0
- package/dist/tools/query-metric-histograms.js +69 -0
- package/dist/tools/query-metric-histograms.js.map +1 -0
- package/dist/tools/query-metric-histograms.test.d.ts +5 -0
- package/dist/tools/query-metric-histograms.test.d.ts.map +1 -0
- package/dist/tools/query-metric-histograms.test.js +209 -0
- package/dist/tools/query-metric-histograms.test.js.map +1 -0
- package/dist/tools/query-metrics.d.ts +159 -60
- package/dist/tools/query-metrics.d.ts.map +1 -1
- package/dist/tools/query-metrics.js +133 -111
- package/dist/tools/query-metrics.js.map +1 -1
- package/dist/tools/query-metrics.test.js +314 -389
- package/dist/tools/query-metrics.test.js.map +1 -1
- package/dist/tools/query-regressions.d.ts +76 -0
- package/dist/tools/query-regressions.d.ts.map +1 -0
- package/dist/tools/query-regressions.js +122 -0
- package/dist/tools/query-regressions.js.map +1 -0
- package/dist/tools/query-regressions.test.d.ts +8 -0
- package/dist/tools/query-regressions.test.d.ts.map +1 -0
- package/dist/tools/query-regressions.test.js +129 -0
- package/dist/tools/query-regressions.test.js.map +1 -0
- package/dist/tools/query-traces.d.ts +103 -71
- package/dist/tools/query-traces.d.ts.map +1 -1
- package/dist/tools/query-traces.js +75 -106
- package/dist/tools/query-traces.js.map +1 -1
- package/dist/tools/query-traces.test.js +140 -846
- package/dist/tools/query-traces.test.js.map +1 -1
- package/dist/tools/query-verifications.d.ts +55 -43
- package/dist/tools/query-verifications.d.ts.map +1 -1
- package/dist/tools/query-verifications.js +47 -46
- package/dist/tools/query-verifications.js.map +1 -1
- package/dist/tools/query-verifications.test.js +42 -35
- package/dist/tools/query-verifications.test.js.map +1 -1
- package/dist/tools/routing-telemetry.d.ts +168 -0
- package/dist/tools/routing-telemetry.d.ts.map +1 -0
- package/dist/tools/routing-telemetry.js +267 -0
- package/dist/tools/routing-telemetry.js.map +1 -0
- package/dist/tools/routing-telemetry.test.d.ts +5 -0
- package/dist/tools/routing-telemetry.test.d.ts.map +1 -0
- package/dist/tools/routing-telemetry.test.js +747 -0
- package/dist/tools/routing-telemetry.test.js.map +1 -0
- package/dist/tools/setup-claudeignore.d.ts +4 -32
- package/dist/tools/setup-claudeignore.d.ts.map +1 -1
- package/dist/tools/setup-claudeignore.js +18 -22
- package/dist/tools/setup-claudeignore.js.map +1 -1
- package/dist/tools/setup-claudeignore.test.js +50 -49
- package/dist/tools/setup-claudeignore.test.js.map +1 -1
- package/dist/tools/token-budget.d.ts +170 -0
- package/dist/tools/token-budget.d.ts.map +1 -0
- package/dist/tools/token-budget.js +219 -0
- package/dist/tools/token-budget.js.map +1 -0
- package/dist/tools/token-budget.test.d.ts +5 -0
- package/dist/tools/token-budget.test.d.ts.map +1 -0
- package/dist/tools/token-budget.test.js +293 -0
- package/dist/tools/token-budget.test.js.map +1 -0
- package/package.json +72 -10
- package/dist/backends/local-jsonl.test.d.ts +0 -2
- package/dist/backends/local-jsonl.test.d.ts.map +0 -1
- package/dist/backends/local-jsonl.test.js +0 -4651
- package/dist/backends/local-jsonl.test.js.map +0 -1
- package/dist/backends/signoz-api-circuit-breaker.test.d.ts +0 -6
- package/dist/backends/signoz-api-circuit-breaker.test.d.ts.map +0 -1
- package/dist/backends/signoz-api-circuit-breaker.test.js +0 -548
- package/dist/backends/signoz-api-circuit-breaker.test.js.map +0 -1
- package/dist/backends/signoz-api-rate-limiter.test.d.ts +0 -6
- package/dist/backends/signoz-api-rate-limiter.test.d.ts.map +0 -1
- package/dist/backends/signoz-api-rate-limiter.test.js +0 -390
- package/dist/backends/signoz-api-rate-limiter.test.js.map +0 -1
- package/dist/backends/signoz-api-ssrf.test.d.ts +0 -6
- package/dist/backends/signoz-api-ssrf.test.d.ts.map +0 -1
- package/dist/backends/signoz-api-ssrf.test.js +0 -216
- package/dist/backends/signoz-api-ssrf.test.js.map +0 -1
- package/dist/backends/signoz-api-test-helpers.d.ts +0 -80
- package/dist/backends/signoz-api-test-helpers.d.ts.map +0 -1
- package/dist/backends/signoz-api-test-helpers.js +0 -79
- package/dist/backends/signoz-api-test-helpers.js.map +0 -1
- package/dist/backends/signoz-api.d.ts +0 -109
- package/dist/backends/signoz-api.d.ts.map +0 -1
- package/dist/backends/signoz-api.integration.test.d.ts +0 -8
- package/dist/backends/signoz-api.integration.test.d.ts.map +0 -1
- package/dist/backends/signoz-api.integration.test.js +0 -137
- package/dist/backends/signoz-api.integration.test.js.map +0 -1
- package/dist/backends/signoz-api.js +0 -1132
- package/dist/backends/signoz-api.js.map +0 -1
- package/dist/backends/signoz-api.test.d.ts +0 -11
- package/dist/backends/signoz-api.test.d.ts.map +0 -1
- package/dist/backends/signoz-api.test.js +0 -832
- package/dist/backends/signoz-api.test.js.map +0 -1
- package/dist/lib/agent-as-judge.d.ts +0 -388
- package/dist/lib/agent-as-judge.d.ts.map +0 -1
- package/dist/lib/agent-as-judge.js +0 -740
- package/dist/lib/agent-as-judge.js.map +0 -1
- package/dist/lib/agent-as-judge.test.d.ts.map +0 -1
- package/dist/lib/agent-as-judge.test.js +0 -816
- package/dist/lib/agent-as-judge.test.js.map +0 -1
- package/dist/lib/cache.d.ts +0 -90
- package/dist/lib/cache.d.ts.map +0 -1
- package/dist/lib/cache.js +0 -133
- package/dist/lib/cache.js.map +0 -1
- package/dist/lib/cache.test.d.ts.map +0 -1
- package/dist/lib/cache.test.js +0 -105
- package/dist/lib/cache.test.js.map +0 -1
- package/dist/lib/circuit-breaker.d.ts +0 -101
- package/dist/lib/circuit-breaker.d.ts.map +0 -1
- package/dist/lib/circuit-breaker.js +0 -158
- package/dist/lib/circuit-breaker.js.map +0 -1
- package/dist/lib/circuit-breaker.test.d.ts.map +0 -1
- package/dist/lib/circuit-breaker.test.js +0 -263
- package/dist/lib/circuit-breaker.test.js.map +0 -1
- package/dist/lib/confident-export.d.ts +0 -101
- package/dist/lib/confident-export.d.ts.map +0 -1
- package/dist/lib/confident-export.js +0 -393
- package/dist/lib/confident-export.js.map +0 -1
- package/dist/lib/confident-export.test.d.ts.map +0 -1
- package/dist/lib/confident-export.test.js +0 -835
- package/dist/lib/confident-export.test.js.map +0 -1
- package/dist/lib/constants-symlink.test.d.ts.map +0 -1
- package/dist/lib/constants-symlink.test.js +0 -357
- package/dist/lib/constants-symlink.test.js.map +0 -1
- package/dist/lib/constants.d.ts +0 -183
- package/dist/lib/constants.d.ts.map +0 -1
- package/dist/lib/constants.js +0 -453
- package/dist/lib/constants.js.map +0 -1
- package/dist/lib/constants.test.d.ts.map +0 -1
- package/dist/lib/constants.test.js +0 -717
- package/dist/lib/constants.test.js.map +0 -1
- package/dist/lib/datadog-export.d.ts +0 -156
- package/dist/lib/datadog-export.d.ts.map +0 -1
- package/dist/lib/datadog-export.js +0 -464
- package/dist/lib/datadog-export.js.map +0 -1
- package/dist/lib/datadog-export.test.d.ts +0 -14
- package/dist/lib/datadog-export.test.d.ts.map +0 -1
- package/dist/lib/datadog-export.test.js +0 -890
- package/dist/lib/datadog-export.test.js.map +0 -1
- package/dist/lib/edge-cases.test.d.ts.map +0 -1
- package/dist/lib/edge-cases.test.js +0 -634
- package/dist/lib/edge-cases.test.js.map +0 -1
- package/dist/lib/error-sanitizer.d.ts +0 -57
- package/dist/lib/error-sanitizer.d.ts.map +0 -1
- package/dist/lib/error-sanitizer.js +0 -233
- package/dist/lib/error-sanitizer.js.map +0 -1
- package/dist/lib/error-sanitizer.test.d.ts.map +0 -1
- package/dist/lib/error-sanitizer.test.js +0 -528
- package/dist/lib/error-sanitizer.test.js.map +0 -1
- package/dist/lib/error-types.d.ts +0 -54
- package/dist/lib/error-types.d.ts.map +0 -1
- package/dist/lib/error-types.js +0 -154
- package/dist/lib/error-types.js.map +0 -1
- package/dist/lib/error-types.test.d.ts.map +0 -1
- package/dist/lib/error-types.test.js +0 -196
- package/dist/lib/error-types.test.js.map +0 -1
- package/dist/lib/evaluation-hooks.d.ts +0 -49
- package/dist/lib/evaluation-hooks.d.ts.map +0 -1
- package/dist/lib/evaluation-hooks.js +0 -488
- package/dist/lib/evaluation-hooks.js.map +0 -1
- package/dist/lib/evaluation-hooks.test.d.ts.map +0 -1
- package/dist/lib/evaluation-hooks.test.js +0 -624
- package/dist/lib/evaluation-hooks.test.js.map +0 -1
- package/dist/lib/export-utils.d.ts +0 -99
- package/dist/lib/export-utils.d.ts.map +0 -1
- package/dist/lib/export-utils.js +0 -238
- package/dist/lib/export-utils.js.map +0 -1
- package/dist/lib/export-utils.test.d.ts.map +0 -1
- package/dist/lib/export-utils.test.js +0 -193
- package/dist/lib/export-utils.test.js.map +0 -1
- package/dist/lib/file-utils.d.ts +0 -320
- package/dist/lib/file-utils.d.ts.map +0 -1
- package/dist/lib/file-utils.js +0 -816
- package/dist/lib/file-utils.js.map +0 -1
- package/dist/lib/file-utils.test.d.ts.map +0 -1
- package/dist/lib/file-utils.test.js +0 -1333
- package/dist/lib/file-utils.test.js.map +0 -1
- package/dist/lib/histogram.d.ts +0 -119
- package/dist/lib/histogram.d.ts.map +0 -1
- package/dist/lib/histogram.js +0 -202
- package/dist/lib/histogram.js.map +0 -1
- package/dist/lib/histogram.test.d.ts.map +0 -1
- package/dist/lib/histogram.test.js +0 -381
- package/dist/lib/histogram.test.js.map +0 -1
- package/dist/lib/indexer.d.ts +0 -96
- package/dist/lib/indexer.d.ts.map +0 -1
- package/dist/lib/indexer.js +0 -353
- package/dist/lib/indexer.js.map +0 -1
- package/dist/lib/indexer.test.d.ts.map +0 -1
- package/dist/lib/indexer.test.js +0 -696
- package/dist/lib/indexer.test.js.map +0 -1
- package/dist/lib/input-validator.d.ts +0 -115
- package/dist/lib/input-validator.d.ts.map +0 -1
- package/dist/lib/input-validator.fuzz.test.d.ts.map +0 -1
- package/dist/lib/input-validator.fuzz.test.js +0 -290
- package/dist/lib/input-validator.fuzz.test.js.map +0 -1
- package/dist/lib/input-validator.js +0 -304
- package/dist/lib/input-validator.js.map +0 -1
- package/dist/lib/input-validator.test.d.ts.map +0 -1
- package/dist/lib/input-validator.test.js +0 -415
- package/dist/lib/input-validator.test.js.map +0 -1
- package/dist/lib/instrumentation.d.ts +0 -153
- package/dist/lib/instrumentation.d.ts.map +0 -1
- package/dist/lib/instrumentation.integration.test.d.ts.map +0 -1
- package/dist/lib/instrumentation.integration.test.js +0 -589
- package/dist/lib/instrumentation.integration.test.js.map +0 -1
- package/dist/lib/instrumentation.js +0 -520
- package/dist/lib/instrumentation.js.map +0 -1
- package/dist/lib/instrumentation.test.d.ts.map +0 -1
- package/dist/lib/instrumentation.test.js +0 -821
- package/dist/lib/instrumentation.test.js.map +0 -1
- package/dist/lib/langfuse-export.d.ts +0 -125
- package/dist/lib/langfuse-export.d.ts.map +0 -1
- package/dist/lib/langfuse-export.js +0 -367
- package/dist/lib/langfuse-export.js.map +0 -1
- package/dist/lib/langfuse-export.test.d.ts.map +0 -1
- package/dist/lib/langfuse-export.test.js +0 -1007
- package/dist/lib/langfuse-export.test.js.map +0 -1
- package/dist/lib/llm-as-judge.d.ts +0 -657
- package/dist/lib/llm-as-judge.d.ts.map +0 -1
- package/dist/lib/llm-as-judge.js +0 -1397
- package/dist/lib/llm-as-judge.js.map +0 -1
- package/dist/lib/llm-as-judge.test.d.ts.map +0 -1
- package/dist/lib/llm-as-judge.test.js +0 -2409
- package/dist/lib/llm-as-judge.test.js.map +0 -1
- package/dist/lib/logger.d.ts +0 -46
- package/dist/lib/logger.d.ts.map +0 -1
- package/dist/lib/logger.js +0 -81
- package/dist/lib/logger.js.map +0 -1
- package/dist/lib/logger.test.d.ts.map +0 -1
- package/dist/lib/logger.test.js.map +0 -1
- package/dist/lib/metrics.d.ts +0 -62
- package/dist/lib/metrics.d.ts.map +0 -1
- package/dist/lib/metrics.js +0 -166
- package/dist/lib/metrics.js.map +0 -1
- package/dist/lib/metrics.test.d.ts.map +0 -1
- package/dist/lib/metrics.test.js +0 -189
- package/dist/lib/metrics.test.js.map +0 -1
- package/dist/lib/otlp-export.d.ts +0 -178
- package/dist/lib/otlp-export.d.ts.map +0 -1
- package/dist/lib/otlp-export.js +0 -382
- package/dist/lib/otlp-export.js.map +0 -1
- package/dist/lib/parse-stats.d.ts.map +0 -1
- package/dist/lib/parse-stats.js +0 -206
- package/dist/lib/parse-stats.js.map +0 -1
- package/dist/lib/parse-stats.test.d.ts.map +0 -1
- package/dist/lib/parse-stats.test.js +0 -283
- package/dist/lib/parse-stats.test.js.map +0 -1
- package/dist/lib/phoenix-export.d.ts +0 -109
- package/dist/lib/phoenix-export.d.ts.map +0 -1
- package/dist/lib/phoenix-export.js +0 -429
- package/dist/lib/phoenix-export.js.map +0 -1
- package/dist/lib/phoenix-export.test.d.ts.map +0 -1
- package/dist/lib/phoenix-export.test.js +0 -725
- package/dist/lib/phoenix-export.test.js.map +0 -1
- package/dist/lib/query-sanitizer.d.ts.map +0 -1
- package/dist/lib/query-sanitizer.js +0 -261
- package/dist/lib/query-sanitizer.js.map +0 -1
- package/dist/lib/query-sanitizer.test.d.ts.map +0 -1
- package/dist/lib/query-sanitizer.test.js +0 -400
- package/dist/lib/query-sanitizer.test.js.map +0 -1
- package/dist/lib/server-utils.d.ts +0 -93
- package/dist/lib/server-utils.d.ts.map +0 -1
- package/dist/lib/server-utils.js +0 -181
- package/dist/lib/server-utils.js.map +0 -1
- package/dist/lib/shared-schemas.d.ts +0 -87
- package/dist/lib/shared-schemas.d.ts.map +0 -1
- package/dist/lib/shared-schemas.js +0 -87
- package/dist/lib/shared-schemas.js.map +0 -1
- package/dist/lib/shared-schemas.test.d.ts.map +0 -1
- package/dist/lib/shared-schemas.test.js +0 -106
- package/dist/lib/shared-schemas.test.js.map +0 -1
- package/dist/lib/toon-encoder.d.ts +0 -26
- package/dist/lib/toon-encoder.d.ts.map +0 -1
- package/dist/lib/toon-encoder.js +0 -61
- package/dist/lib/toon-encoder.js.map +0 -1
- package/dist/lib/toon-encoder.test.d.ts.map +0 -1
- package/dist/lib/toon-encoder.test.js +0 -85
- package/dist/lib/toon-encoder.test.js.map +0 -1
- package/dist/lib/verification-events.d.ts +0 -100
- package/dist/lib/verification-events.d.ts.map +0 -1
- package/dist/lib/verification-events.js +0 -162
- package/dist/lib/verification-events.js.map +0 -1
- package/dist/lib/verification-events.test.d.ts.map +0 -1
- package/dist/lib/verification-events.test.js +0 -193
- package/dist/lib/verification-events.test.js.map +0 -1
- package/dist/tools/signoz.integration.test.d.ts +0 -8
- package/dist/tools/signoz.integration.test.d.ts.map +0 -1
- package/dist/tools/signoz.integration.test.js +0 -141
- package/dist/tools/signoz.integration.test.js.map +0 -1
- package/dist/types/evaluation-hooks.d.ts +0 -176
- package/dist/types/evaluation-hooks.d.ts.map +0 -1
- package/dist/types/evaluation-hooks.js +0 -49
- package/dist/types/evaluation-hooks.js.map +0 -1
- /package/dist/lib/{agent-as-judge.test.d.ts → agent-judge/agent-as-judge.test.d.ts} +0 -0
- /package/dist/lib/{verification-events.test.d.ts → audit/verification-events.test.d.ts} +0 -0
- /package/dist/lib/{constants-symlink.test.d.ts → core/constants-symlink.test.d.ts} +0 -0
- /package/dist/lib/{constants.test.d.ts → core/constants.test.d.ts} +0 -0
- /package/dist/lib/{edge-cases.test.d.ts → core/edge-cases.test.d.ts} +0 -0
- /package/dist/lib/{file-utils.test.d.ts → core/file-utils.test.d.ts} +0 -0
- /package/dist/lib/{input-validator.fuzz.test.d.ts → core/input-validator.fuzz.test.d.ts} +0 -0
- /package/dist/lib/{input-validator.test.d.ts → core/input-validator.test.d.ts} +0 -0
- /package/dist/lib/{logger.test.d.ts → core/logger.test.d.ts} +0 -0
- /package/dist/lib/{logger.test.js → core/logger.test.js} +0 -0
- /package/dist/lib/{shared-schemas.test.d.ts → core/shared-schemas.test.d.ts} +0 -0
- /package/dist/lib/{error-sanitizer.test.d.ts → errors/error-sanitizer.test.d.ts} +0 -0
- /package/dist/lib/{error-types.test.d.ts → errors/error-types.test.d.ts} +0 -0
- /package/dist/lib/{query-sanitizer.d.ts → errors/query-sanitizer.d.ts} +0 -0
- /package/dist/lib/{query-sanitizer.test.d.ts → errors/query-sanitizer.test.d.ts} +0 -0
- /package/dist/lib/{confident-export.test.d.ts → exports/confident-export.test.d.ts} +0 -0
- /package/dist/lib/{export-utils.test.d.ts → exports/export-utils.test.d.ts} +0 -0
- /package/dist/lib/{langfuse-export.test.d.ts → exports/langfuse-export.test.d.ts} +0 -0
- /package/dist/lib/{phoenix-export.test.d.ts → exports/phoenix-export.test.d.ts} +0 -0
- /package/dist/lib/{evaluation-hooks.test.d.ts → judge/evaluation-hooks.test.d.ts} +0 -0
- /package/dist/lib/{llm-as-judge.test.d.ts → judge/llm-as-judge.test.d.ts} +0 -0
- /package/dist/lib/{histogram.test.d.ts → observability/histogram.test.d.ts} +0 -0
- /package/dist/lib/{indexer.test.d.ts → observability/indexer.test.d.ts} +0 -0
- /package/dist/lib/{instrumentation.integration.test.d.ts → observability/instrumentation.integration.test.d.ts} +0 -0
- /package/dist/lib/{instrumentation.test.d.ts → observability/instrumentation.test.d.ts} +0 -0
- /package/dist/lib/{metrics.test.d.ts → observability/metrics.test.d.ts} +0 -0
- /package/dist/lib/{parse-stats.d.ts → observability/parse-stats.d.ts} +0 -0
- /package/dist/lib/{parse-stats.test.d.ts → observability/parse-stats.test.d.ts} +0 -0
- /package/dist/lib/{cache.test.d.ts → resilience/cache.test.d.ts} +0 -0
- /package/dist/lib/{circuit-breaker.test.d.ts → resilience/circuit-breaker.test.d.ts} +0 -0
- /package/dist/lib/{toon-encoder.test.d.ts → resilience/toon-encoder.test.d.ts} +0 -0
|
@@ -1,4651 +0,0 @@
|
|
|
1
|
-
import { describe, it, before, after, beforeEach } from 'node:test';
|
|
2
|
-
import * as assert from 'node:assert';
|
|
3
|
-
import * as fs from 'fs';
|
|
4
|
-
import * as path from 'path';
|
|
5
|
-
import { LocalJsonlBackend, MultiDirectoryBackend } from './local-jsonl.js';
|
|
6
|
-
import { buildAndWriteIndex, getIndexPath } from '../lib/indexer.js';
|
|
7
|
-
import { createTempDir, removeTempDir, getSharedTempDir, clearTempDir, removeSharedTempDir, writeJsonlFile, getTestDate } from '../test-helpers/file-utils.js';
|
|
8
|
-
describe('LocalJsonlBackend', () => {
|
|
9
|
-
let tempDir;
|
|
10
|
-
let backend;
|
|
11
|
-
before(() => {
|
|
12
|
-
tempDir = getSharedTempDir('LocalJsonlBackend');
|
|
13
|
-
});
|
|
14
|
-
beforeEach(() => {
|
|
15
|
-
clearTempDir(tempDir);
|
|
16
|
-
backend = new LocalJsonlBackend(tempDir);
|
|
17
|
-
});
|
|
18
|
-
after(() => {
|
|
19
|
-
removeSharedTempDir('LocalJsonlBackend');
|
|
20
|
-
});
|
|
21
|
-
describe('queryTraces', () => {
|
|
22
|
-
it('should read and normalize trace spans from JSONL files', async () => {
|
|
23
|
-
const today = getTestDate();
|
|
24
|
-
const mockSpans = [
|
|
25
|
-
{
|
|
26
|
-
traceId: 'trace1',
|
|
27
|
-
spanId: 'span1',
|
|
28
|
-
name: 'test-operation',
|
|
29
|
-
startTime: [1700000000, 0],
|
|
30
|
-
endTime: [1700000001, 500000000],
|
|
31
|
-
resource: { serviceName: 'test-service', serviceVersion: '1.0.0' },
|
|
32
|
-
attributes: { 'custom.attr': 'value1' },
|
|
33
|
-
},
|
|
34
|
-
];
|
|
35
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
36
|
-
const results = await backend.queryTraces({});
|
|
37
|
-
assert.strictEqual(results.length, 1);
|
|
38
|
-
assert.strictEqual(results[0].traceId, 'trace1');
|
|
39
|
-
assert.strictEqual(results[0].spanId, 'span1');
|
|
40
|
-
assert.strictEqual(results[0].name, 'test-operation');
|
|
41
|
-
assert.strictEqual(results[0].attributes?.['service.name'], 'test-service');
|
|
42
|
-
assert.strictEqual(results[0].attributes?.['service.version'], '1.0.0');
|
|
43
|
-
assert.strictEqual(results[0].attributes?.['custom.attr'], 'value1');
|
|
44
|
-
});
|
|
45
|
-
it('should filter spans by traceId', async () => {
|
|
46
|
-
const today = getTestDate();
|
|
47
|
-
const mockSpans = [
|
|
48
|
-
{ traceId: 'trace1', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
|
|
49
|
-
{ traceId: 'trace2', spanId: 'span2', name: 'op2', startTime: [1700000000, 0] },
|
|
50
|
-
{ traceId: 'trace1', spanId: 'span3', name: 'op3', startTime: [1700000000, 0] },
|
|
51
|
-
];
|
|
52
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
53
|
-
const results = await backend.queryTraces({ traceId: 'trace1' });
|
|
54
|
-
assert.strictEqual(results.length, 2);
|
|
55
|
-
assert.ok(results.every(s => s.traceId === 'trace1'));
|
|
56
|
-
});
|
|
57
|
-
it('should filter spans by spanName substring', async () => {
|
|
58
|
-
const today = getTestDate();
|
|
59
|
-
const mockSpans = [
|
|
60
|
-
{ traceId: 'trace1', spanId: 'span1', name: 'user-create', startTime: [1700000000, 0] },
|
|
61
|
-
{ traceId: 'trace1', spanId: 'span2', name: 'user-update', startTime: [1700000000, 0] },
|
|
62
|
-
{ traceId: 'trace1', spanId: 'span3', name: 'db-query', startTime: [1700000000, 0] },
|
|
63
|
-
];
|
|
64
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
65
|
-
const results = await backend.queryTraces({ spanName: 'user' });
|
|
66
|
-
assert.strictEqual(results.length, 2);
|
|
67
|
-
assert.ok(results.every(s => s.name.includes('user')));
|
|
68
|
-
});
|
|
69
|
-
it('should filter spans by duration range', async () => {
|
|
70
|
-
const today = getTestDate();
|
|
71
|
-
const mockSpans = [
|
|
72
|
-
{
|
|
73
|
-
traceId: 'trace1',
|
|
74
|
-
spanId: 'span1',
|
|
75
|
-
name: 'fast-op',
|
|
76
|
-
startTime: [1700000000, 0],
|
|
77
|
-
endTime: [1700000000, 500000000], // 0.5s
|
|
78
|
-
},
|
|
79
|
-
{
|
|
80
|
-
traceId: 'trace1',
|
|
81
|
-
spanId: 'span2',
|
|
82
|
-
name: 'medium-op',
|
|
83
|
-
startTime: [1700000000, 0],
|
|
84
|
-
endTime: [1700000002, 0], // 2s
|
|
85
|
-
},
|
|
86
|
-
{
|
|
87
|
-
traceId: 'trace1',
|
|
88
|
-
spanId: 'span3',
|
|
89
|
-
name: 'slow-op',
|
|
90
|
-
startTime: [1700000000, 0],
|
|
91
|
-
endTime: [1700000010, 0], // 10s
|
|
92
|
-
},
|
|
93
|
-
];
|
|
94
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
95
|
-
const results = await backend.queryTraces({ minDurationMs: 1000, maxDurationMs: 5000 });
|
|
96
|
-
assert.strictEqual(results.length, 1);
|
|
97
|
-
assert.strictEqual(results[0].name, 'medium-op');
|
|
98
|
-
});
|
|
99
|
-
it('should filter spans by serviceName', async () => {
|
|
100
|
-
const today = getTestDate();
|
|
101
|
-
const mockSpans = [
|
|
102
|
-
{
|
|
103
|
-
traceId: 'trace1',
|
|
104
|
-
spanId: 'span1',
|
|
105
|
-
name: 'op1',
|
|
106
|
-
startTime: [1700000000, 0],
|
|
107
|
-
resource: { serviceName: 'service-a' },
|
|
108
|
-
},
|
|
109
|
-
{
|
|
110
|
-
traceId: 'trace1',
|
|
111
|
-
spanId: 'span2',
|
|
112
|
-
name: 'op2',
|
|
113
|
-
startTime: [1700000000, 0],
|
|
114
|
-
resource: { serviceName: 'service-b' },
|
|
115
|
-
},
|
|
116
|
-
];
|
|
117
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
118
|
-
const results = await backend.queryTraces({ serviceName: 'service-a' });
|
|
119
|
-
assert.strictEqual(results.length, 1);
|
|
120
|
-
assert.strictEqual(results[0].attributes?.['service.name'], 'service-a');
|
|
121
|
-
});
|
|
122
|
-
it('should apply limit and offset to results', async () => {
|
|
123
|
-
const today = getTestDate();
|
|
124
|
-
const mockSpans = Array.from({ length: 150 }, (_, i) => ({
|
|
125
|
-
traceId: `trace${i}`,
|
|
126
|
-
spanId: `span${i}`,
|
|
127
|
-
name: `op${i}`,
|
|
128
|
-
startTime: [1700000000, 0],
|
|
129
|
-
}));
|
|
130
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
131
|
-
const results = await backend.queryTraces({ limit: 50, offset: 25 });
|
|
132
|
-
assert.strictEqual(results.length, 50);
|
|
133
|
-
assert.strictEqual(results[0].traceId, 'trace25');
|
|
134
|
-
});
|
|
135
|
-
it('should skip invalid spans (missing required fields)', async () => {
|
|
136
|
-
const today = getTestDate();
|
|
137
|
-
const mockSpans = [
|
|
138
|
-
{ traceId: 'trace1', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
|
|
139
|
-
{ traceId: 'trace2', spanId: 'span2', startTime: [1700000000, 0] }, // missing name
|
|
140
|
-
{ spanId: 'span3', name: 'op3', startTime: [1700000000, 0] }, // missing traceId
|
|
141
|
-
{ traceId: 'trace4', name: 'op4', startTime: [1700000000, 0] }, // missing spanId
|
|
142
|
-
];
|
|
143
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
144
|
-
const results = await backend.queryTraces({});
|
|
145
|
-
assert.strictEqual(results.length, 1);
|
|
146
|
-
assert.strictEqual(results[0].traceId, 'trace1');
|
|
147
|
-
});
|
|
148
|
-
it('should convert duration from [seconds, nanoseconds] array', async () => {
|
|
149
|
-
const today = getTestDate();
|
|
150
|
-
const mockSpans = [
|
|
151
|
-
{
|
|
152
|
-
traceId: 'trace1',
|
|
153
|
-
spanId: 'span1',
|
|
154
|
-
name: 'op1',
|
|
155
|
-
startTime: [1700000000, 0],
|
|
156
|
-
duration: [2, 500000000], // 2.5 seconds
|
|
157
|
-
},
|
|
158
|
-
];
|
|
159
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
160
|
-
const results = await backend.queryTraces({});
|
|
161
|
-
assert.strictEqual(results.length, 1);
|
|
162
|
-
assert.strictEqual(results[0].durationMs, 2500);
|
|
163
|
-
});
|
|
164
|
-
it('should convert span kind number to string', async () => {
|
|
165
|
-
const today = getTestDate();
|
|
166
|
-
const mockSpans = [
|
|
167
|
-
{ traceId: 'trace1', spanId: 'span1', name: 'op1', kind: 0, startTime: [1700000000, 0] },
|
|
168
|
-
{ traceId: 'trace2', spanId: 'span2', name: 'op2', kind: 1, startTime: [1700000000, 0] },
|
|
169
|
-
{ traceId: 'trace3', spanId: 'span3', name: 'op3', kind: 2, startTime: [1700000000, 0] },
|
|
170
|
-
];
|
|
171
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
172
|
-
const results = await backend.queryTraces({});
|
|
173
|
-
assert.strictEqual(results[0].kind, 'INTERNAL');
|
|
174
|
-
assert.strictEqual(results[1].kind, 'SERVER');
|
|
175
|
-
assert.strictEqual(results[2].kind, 'CLIENT');
|
|
176
|
-
});
|
|
177
|
-
it('should convert status code number to string', async () => {
|
|
178
|
-
const today = getTestDate();
|
|
179
|
-
const mockSpans = [
|
|
180
|
-
{ traceId: 'trace1', spanId: 'span1', name: 'op1', startTime: [1700000000, 0], status: { code: 0 } },
|
|
181
|
-
{ traceId: 'trace2', spanId: 'span2', name: 'op2', startTime: [1700000000, 0], status: { code: 1 } },
|
|
182
|
-
{ traceId: 'trace3', spanId: 'span3', name: 'op3', startTime: [1700000000, 0], status: { code: 2, message: 'Test error' } },
|
|
183
|
-
];
|
|
184
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
185
|
-
const results = await backend.queryTraces({});
|
|
186
|
-
assert.strictEqual(results[0].statusCode, 'UNSET');
|
|
187
|
-
assert.strictEqual(results[0].status?.code, 0);
|
|
188
|
-
assert.strictEqual(results[1].statusCode, 'OK');
|
|
189
|
-
assert.strictEqual(results[1].status?.code, 1);
|
|
190
|
-
assert.strictEqual(results[2].statusCode, 'ERROR');
|
|
191
|
-
assert.strictEqual(results[2].status?.code, 2);
|
|
192
|
-
assert.strictEqual(results[2].status?.message, 'Test error');
|
|
193
|
-
});
|
|
194
|
-
it('should handle spans without status', async () => {
|
|
195
|
-
const today = getTestDate();
|
|
196
|
-
const mockSpans = [
|
|
197
|
-
{ traceId: 'trace1', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
|
|
198
|
-
];
|
|
199
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
200
|
-
const results = await backend.queryTraces({});
|
|
201
|
-
assert.strictEqual(results[0].statusCode, undefined);
|
|
202
|
-
assert.strictEqual(results[0].status, undefined);
|
|
203
|
-
});
|
|
204
|
-
it('should extract instrumentationScope from spans', async () => {
|
|
205
|
-
const today = getTestDate();
|
|
206
|
-
const mockSpans = [
|
|
207
|
-
{
|
|
208
|
-
traceId: 'trace1',
|
|
209
|
-
spanId: 'span1',
|
|
210
|
-
name: 'http-request',
|
|
211
|
-
startTime: [1700000000, 0],
|
|
212
|
-
instrumentationScope: {
|
|
213
|
-
name: '@opentelemetry/instrumentation-http',
|
|
214
|
-
version: '0.48.0',
|
|
215
|
-
schemaUrl: 'https://opentelemetry.io/schemas/1.21.0',
|
|
216
|
-
},
|
|
217
|
-
},
|
|
218
|
-
{
|
|
219
|
-
traceId: 'trace2',
|
|
220
|
-
spanId: 'span2',
|
|
221
|
-
name: 'custom-span',
|
|
222
|
-
startTime: [1700000000, 0],
|
|
223
|
-
instrumentationScope: {
|
|
224
|
-
name: 'custom-hooks',
|
|
225
|
-
},
|
|
226
|
-
},
|
|
227
|
-
{
|
|
228
|
-
traceId: 'trace3',
|
|
229
|
-
spanId: 'span3',
|
|
230
|
-
name: 'no-scope',
|
|
231
|
-
startTime: [1700000000, 0],
|
|
232
|
-
},
|
|
233
|
-
];
|
|
234
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
235
|
-
const results = await backend.queryTraces({});
|
|
236
|
-
assert.strictEqual(results.length, 3);
|
|
237
|
-
// First span: full scope
|
|
238
|
-
assert.strictEqual(results[0].instrumentationScope?.name, '@opentelemetry/instrumentation-http');
|
|
239
|
-
assert.strictEqual(results[0].instrumentationScope?.version, '0.48.0');
|
|
240
|
-
assert.strictEqual(results[0].instrumentationScope?.schemaUrl, 'https://opentelemetry.io/schemas/1.21.0');
|
|
241
|
-
// Second span: name only
|
|
242
|
-
assert.strictEqual(results[1].instrumentationScope?.name, 'custom-hooks');
|
|
243
|
-
assert.strictEqual(results[1].instrumentationScope?.version, undefined);
|
|
244
|
-
// Third span: no scope
|
|
245
|
-
assert.strictEqual(results[2].instrumentationScope, undefined);
|
|
246
|
-
});
|
|
247
|
-
it('should extract span links from spans', async () => {
|
|
248
|
-
const today = getTestDate();
|
|
249
|
-
const mockSpans = [
|
|
250
|
-
{
|
|
251
|
-
traceId: 'trace1',
|
|
252
|
-
spanId: 'span1',
|
|
253
|
-
name: 'batch-processor',
|
|
254
|
-
startTime: [1700000000, 0],
|
|
255
|
-
links: [
|
|
256
|
-
{
|
|
257
|
-
context: { traceId: 'trace-upstream-1', spanId: 'span-upstream-1' },
|
|
258
|
-
attributes: { 'link.type': 'producer' },
|
|
259
|
-
},
|
|
260
|
-
{
|
|
261
|
-
context: { traceId: 'trace-upstream-2', spanId: 'span-upstream-2' },
|
|
262
|
-
},
|
|
263
|
-
],
|
|
264
|
-
},
|
|
265
|
-
{
|
|
266
|
-
traceId: 'trace2',
|
|
267
|
-
spanId: 'span2',
|
|
268
|
-
name: 'single-link',
|
|
269
|
-
startTime: [1700000000, 0],
|
|
270
|
-
links: [
|
|
271
|
-
{
|
|
272
|
-
context: { traceId: 'trace-parent', spanId: 'span-parent' },
|
|
273
|
-
attributes: { 'link.reason': 'causal' },
|
|
274
|
-
},
|
|
275
|
-
],
|
|
276
|
-
},
|
|
277
|
-
{
|
|
278
|
-
traceId: 'trace3',
|
|
279
|
-
spanId: 'span3',
|
|
280
|
-
name: 'no-links',
|
|
281
|
-
startTime: [1700000000, 0],
|
|
282
|
-
},
|
|
283
|
-
];
|
|
284
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
285
|
-
const results = await backend.queryTraces({});
|
|
286
|
-
assert.strictEqual(results.length, 3);
|
|
287
|
-
// First span: multiple links
|
|
288
|
-
assert.strictEqual(results[0].links?.length, 2);
|
|
289
|
-
assert.strictEqual(results[0].links?.[0].traceId, 'trace-upstream-1');
|
|
290
|
-
assert.strictEqual(results[0].links?.[0].spanId, 'span-upstream-1');
|
|
291
|
-
assert.strictEqual(results[0].links?.[0].attributes?.['link.type'], 'producer');
|
|
292
|
-
assert.strictEqual(results[0].links?.[1].traceId, 'trace-upstream-2');
|
|
293
|
-
assert.strictEqual(results[0].links?.[1].spanId, 'span-upstream-2');
|
|
294
|
-
assert.strictEqual(results[0].links?.[1].attributes, undefined);
|
|
295
|
-
// Second span: single link with attributes
|
|
296
|
-
assert.strictEqual(results[1].links?.length, 1);
|
|
297
|
-
assert.strictEqual(results[1].links?.[0].traceId, 'trace-parent');
|
|
298
|
-
assert.strictEqual(results[1].links?.[0].attributes?.['link.reason'], 'causal');
|
|
299
|
-
// Third span: no links
|
|
300
|
-
assert.strictEqual(results[2].links, undefined);
|
|
301
|
-
});
|
|
302
|
-
it('should filter out invalid span links with missing context', async () => {
|
|
303
|
-
const today = getTestDate();
|
|
304
|
-
const mockSpans = [
|
|
305
|
-
{
|
|
306
|
-
traceId: 'trace1',
|
|
307
|
-
spanId: 'span1',
|
|
308
|
-
name: 'mixed-links',
|
|
309
|
-
startTime: [1700000000, 0],
|
|
310
|
-
links: [
|
|
311
|
-
{
|
|
312
|
-
context: { traceId: 'valid-trace', spanId: 'valid-span' },
|
|
313
|
-
},
|
|
314
|
-
{
|
|
315
|
-
context: { traceId: 'missing-span-id' },
|
|
316
|
-
},
|
|
317
|
-
{
|
|
318
|
-
context: { spanId: 'missing-trace-id' },
|
|
319
|
-
},
|
|
320
|
-
{
|
|
321
|
-
// No context at all
|
|
322
|
-
attributes: { 'orphan': true },
|
|
323
|
-
},
|
|
324
|
-
],
|
|
325
|
-
},
|
|
326
|
-
];
|
|
327
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
328
|
-
const results = await backend.queryTraces({});
|
|
329
|
-
assert.strictEqual(results.length, 1);
|
|
330
|
-
// Only the valid link should be included
|
|
331
|
-
assert.strictEqual(results[0].links?.length, 1);
|
|
332
|
-
assert.strictEqual(results[0].links?.[0].traceId, 'valid-trace');
|
|
333
|
-
assert.strictEqual(results[0].links?.[0].spanId, 'valid-span');
|
|
334
|
-
});
|
|
335
|
-
it('should set links to undefined when all links are invalid', async () => {
|
|
336
|
-
const today = getTestDate();
|
|
337
|
-
const mockSpans = [
|
|
338
|
-
{
|
|
339
|
-
traceId: 'trace1',
|
|
340
|
-
spanId: 'span1',
|
|
341
|
-
name: 'all-invalid-links',
|
|
342
|
-
startTime: [1700000000, 0],
|
|
343
|
-
links: [
|
|
344
|
-
{ context: { traceId: 'missing-span' } },
|
|
345
|
-
{ context: { spanId: 'missing-trace' } },
|
|
346
|
-
],
|
|
347
|
-
},
|
|
348
|
-
];
|
|
349
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
350
|
-
const results = await backend.queryTraces({});
|
|
351
|
-
assert.strictEqual(results.length, 1);
|
|
352
|
-
assert.strictEqual(results[0].links, undefined);
|
|
353
|
-
});
|
|
354
|
-
it('should return empty array when no files found', async () => {
|
|
355
|
-
// No files created - tempDir is empty
|
|
356
|
-
const results = await backend.queryTraces({});
|
|
357
|
-
assert.strictEqual(results.length, 0);
|
|
358
|
-
});
|
|
359
|
-
it('should filter spans by attributeFilter with string value', async () => {
|
|
360
|
-
const today = getTestDate();
|
|
361
|
-
const mockSpans = [
|
|
362
|
-
{
|
|
363
|
-
traceId: 'trace1',
|
|
364
|
-
spanId: 'span1',
|
|
365
|
-
name: 'hook:session-start',
|
|
366
|
-
startTime: [1700000000, 0],
|
|
367
|
-
attributes: { 'hook.name': 'session-start', 'hook.type': 'session' },
|
|
368
|
-
},
|
|
369
|
-
{
|
|
370
|
-
traceId: 'trace2',
|
|
371
|
-
spanId: 'span2',
|
|
372
|
-
name: 'hook:mcp-pre-tool',
|
|
373
|
-
startTime: [1700000000, 0],
|
|
374
|
-
attributes: { 'hook.name': 'mcp-pre-tool', 'mcp.server': 'signoz' },
|
|
375
|
-
},
|
|
376
|
-
{
|
|
377
|
-
traceId: 'trace3',
|
|
378
|
-
spanId: 'span3',
|
|
379
|
-
name: 'hook:post-tool',
|
|
380
|
-
startTime: [1700000000, 0],
|
|
381
|
-
attributes: { 'hook.name': 'post-tool', 'mcp.server': 'webresearch' },
|
|
382
|
-
},
|
|
383
|
-
];
|
|
384
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
385
|
-
const results = await backend.queryTraces({
|
|
386
|
-
attributeFilter: { 'hook.name': 'session-start' },
|
|
387
|
-
});
|
|
388
|
-
assert.strictEqual(results.length, 1);
|
|
389
|
-
assert.strictEqual(results[0].traceId, 'trace1');
|
|
390
|
-
});
|
|
391
|
-
it('should filter spans by attributeFilter with multiple attributes', async () => {
|
|
392
|
-
const today = getTestDate();
|
|
393
|
-
const mockSpans = [
|
|
394
|
-
{
|
|
395
|
-
traceId: 'trace1',
|
|
396
|
-
spanId: 'span1',
|
|
397
|
-
name: 'mcp-call',
|
|
398
|
-
startTime: [1700000000, 0],
|
|
399
|
-
attributes: { 'mcp.server': 'signoz', 'mcp.success': true },
|
|
400
|
-
},
|
|
401
|
-
{
|
|
402
|
-
traceId: 'trace2',
|
|
403
|
-
spanId: 'span2',
|
|
404
|
-
name: 'mcp-call',
|
|
405
|
-
startTime: [1700000000, 0],
|
|
406
|
-
attributes: { 'mcp.server': 'signoz', 'mcp.success': false },
|
|
407
|
-
},
|
|
408
|
-
{
|
|
409
|
-
traceId: 'trace3',
|
|
410
|
-
spanId: 'span3',
|
|
411
|
-
name: 'mcp-call',
|
|
412
|
-
startTime: [1700000000, 0],
|
|
413
|
-
attributes: { 'mcp.server': 'webresearch', 'mcp.success': true },
|
|
414
|
-
},
|
|
415
|
-
];
|
|
416
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
417
|
-
const results = await backend.queryTraces({
|
|
418
|
-
attributeFilter: { 'mcp.server': 'signoz', 'mcp.success': true },
|
|
419
|
-
});
|
|
420
|
-
assert.strictEqual(results.length, 1);
|
|
421
|
-
assert.strictEqual(results[0].traceId, 'trace1');
|
|
422
|
-
});
|
|
423
|
-
it('should filter spans by attributeFilter with number value', async () => {
|
|
424
|
-
const today = getTestDate();
|
|
425
|
-
const mockSpans = [
|
|
426
|
-
{
|
|
427
|
-
traceId: 'trace1',
|
|
428
|
-
spanId: 'span1',
|
|
429
|
-
name: 'http-request',
|
|
430
|
-
startTime: [1700000000, 0],
|
|
431
|
-
attributes: { 'http.status_code': 200 },
|
|
432
|
-
},
|
|
433
|
-
{
|
|
434
|
-
traceId: 'trace2',
|
|
435
|
-
spanId: 'span2',
|
|
436
|
-
name: 'http-request',
|
|
437
|
-
startTime: [1700000000, 0],
|
|
438
|
-
attributes: { 'http.status_code': 500 },
|
|
439
|
-
},
|
|
440
|
-
];
|
|
441
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
442
|
-
const results = await backend.queryTraces({
|
|
443
|
-
attributeFilter: { 'http.status_code': 200 },
|
|
444
|
-
});
|
|
445
|
-
assert.strictEqual(results.length, 1);
|
|
446
|
-
assert.strictEqual(results[0].traceId, 'trace1');
|
|
447
|
-
});
|
|
448
|
-
it('should filter spans by attributeFilter with boolean value', async () => {
|
|
449
|
-
const today = getTestDate();
|
|
450
|
-
const mockSpans = [
|
|
451
|
-
{
|
|
452
|
-
traceId: 'trace1',
|
|
453
|
-
spanId: 'span1',
|
|
454
|
-
name: 'agent-call',
|
|
455
|
-
startTime: [1700000000, 0],
|
|
456
|
-
attributes: { 'agent.is_background': true },
|
|
457
|
-
},
|
|
458
|
-
{
|
|
459
|
-
traceId: 'trace2',
|
|
460
|
-
spanId: 'span2',
|
|
461
|
-
name: 'agent-call',
|
|
462
|
-
startTime: [1700000000, 0],
|
|
463
|
-
attributes: { 'agent.is_background': false },
|
|
464
|
-
},
|
|
465
|
-
];
|
|
466
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
467
|
-
const results = await backend.queryTraces({
|
|
468
|
-
attributeFilter: { 'agent.is_background': false },
|
|
469
|
-
});
|
|
470
|
-
assert.strictEqual(results.length, 1);
|
|
471
|
-
assert.strictEqual(results[0].traceId, 'trace2');
|
|
472
|
-
});
|
|
473
|
-
it('should return empty array when attributeFilter matches nothing', async () => {
|
|
474
|
-
const today = getTestDate();
|
|
475
|
-
const mockSpans = [
|
|
476
|
-
{
|
|
477
|
-
traceId: 'trace1',
|
|
478
|
-
spanId: 'span1',
|
|
479
|
-
name: 'op1',
|
|
480
|
-
startTime: [1700000000, 0],
|
|
481
|
-
attributes: { 'hook.name': 'session-start' },
|
|
482
|
-
},
|
|
483
|
-
];
|
|
484
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
485
|
-
const results = await backend.queryTraces({
|
|
486
|
-
attributeFilter: { 'hook.name': 'nonexistent' },
|
|
487
|
-
});
|
|
488
|
-
assert.strictEqual(results.length, 0);
|
|
489
|
-
});
|
|
490
|
-
it('should combine attributeFilter with other filters', async () => {
|
|
491
|
-
const today = getTestDate();
|
|
492
|
-
const mockSpans = [
|
|
493
|
-
{
|
|
494
|
-
traceId: 'trace1',
|
|
495
|
-
spanId: 'span1',
|
|
496
|
-
name: 'hook:mcp-pre-tool',
|
|
497
|
-
startTime: [1700000000, 0],
|
|
498
|
-
endTime: [1700000000, 500000000], // 500ms
|
|
499
|
-
attributes: { 'mcp.server': 'signoz' },
|
|
500
|
-
},
|
|
501
|
-
{
|
|
502
|
-
traceId: 'trace2',
|
|
503
|
-
spanId: 'span2',
|
|
504
|
-
name: 'hook:mcp-pre-tool',
|
|
505
|
-
startTime: [1700000000, 0],
|
|
506
|
-
endTime: [1700000002, 0], // 2000ms
|
|
507
|
-
attributes: { 'mcp.server': 'signoz' },
|
|
508
|
-
},
|
|
509
|
-
{
|
|
510
|
-
traceId: 'trace3',
|
|
511
|
-
spanId: 'span3',
|
|
512
|
-
name: 'hook:mcp-pre-tool',
|
|
513
|
-
startTime: [1700000000, 0],
|
|
514
|
-
endTime: [1700000000, 500000000], // 500ms
|
|
515
|
-
attributes: { 'mcp.server': 'webresearch' },
|
|
516
|
-
},
|
|
517
|
-
];
|
|
518
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
519
|
-
const results = await backend.queryTraces({
|
|
520
|
-
spanName: 'mcp',
|
|
521
|
-
minDurationMs: 1000,
|
|
522
|
-
attributeFilter: { 'mcp.server': 'signoz' },
|
|
523
|
-
});
|
|
524
|
-
assert.strictEqual(results.length, 1);
|
|
525
|
-
assert.strictEqual(results[0].traceId, 'trace2');
|
|
526
|
-
});
|
|
527
|
-
it('should exclude spans matching excludeSpanName', async () => {
|
|
528
|
-
const today = getTestDate();
|
|
529
|
-
const mockSpans = [
|
|
530
|
-
{ traceId: 'trace1', spanId: 'span1', name: 'http-request', startTime: [1700000000, 0] },
|
|
531
|
-
{ traceId: 'trace2', spanId: 'span2', name: 'db-query', startTime: [1700000000, 0] },
|
|
532
|
-
{ traceId: 'trace3', spanId: 'span3', name: 'http-response', startTime: [1700000000, 0] },
|
|
533
|
-
];
|
|
534
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
535
|
-
const results = await backend.queryTraces({ excludeSpanName: 'http' });
|
|
536
|
-
assert.strictEqual(results.length, 1);
|
|
537
|
-
assert.strictEqual(results[0].name, 'db-query');
|
|
538
|
-
});
|
|
539
|
-
it('should filter spans by spanNameRegex', async () => {
|
|
540
|
-
const today = getTestDate();
|
|
541
|
-
const mockSpans = [
|
|
542
|
-
{ traceId: 'trace1', spanId: 'span1', name: 'hook:session-start', startTime: [1700000000, 0] },
|
|
543
|
-
{ traceId: 'trace2', spanId: 'span2', name: 'hook:session-end', startTime: [1700000000, 0] },
|
|
544
|
-
{ traceId: 'trace3', spanId: 'span3', name: 'mcp-call', startTime: [1700000000, 0] },
|
|
545
|
-
{ traceId: 'trace4', spanId: 'span4', name: 'hook:pre-tool', startTime: [1700000000, 0] },
|
|
546
|
-
];
|
|
547
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
548
|
-
// Match spans starting with 'hook:session'
|
|
549
|
-
const results = await backend.queryTraces({ spanNameRegex: '^hook:session' });
|
|
550
|
-
assert.strictEqual(results.length, 2);
|
|
551
|
-
assert.ok(results.some(s => s.name === 'hook:session-start'));
|
|
552
|
-
assert.ok(results.some(s => s.name === 'hook:session-end'));
|
|
553
|
-
});
|
|
554
|
-
it('should filter spans by spanNameRegex with complex pattern', async () => {
|
|
555
|
-
const today = getTestDate();
|
|
556
|
-
const mockSpans = [
|
|
557
|
-
{ traceId: 'trace1', spanId: 'span1', name: 'api-v1-users-get', startTime: [1700000000, 0] },
|
|
558
|
-
{ traceId: 'trace2', spanId: 'span2', name: 'api-v2-users-get', startTime: [1700000000, 0] },
|
|
559
|
-
{ traceId: 'trace3', spanId: 'span3', name: 'api-v1-orders-post', startTime: [1700000000, 0] },
|
|
560
|
-
{ traceId: 'trace4', spanId: 'span4', name: 'internal-process', startTime: [1700000000, 0] },
|
|
561
|
-
];
|
|
562
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
563
|
-
// Match spans with api-v[12]-.*-get pattern
|
|
564
|
-
const results = await backend.queryTraces({ spanNameRegex: 'api-v[12]-.*-get' });
|
|
565
|
-
assert.strictEqual(results.length, 2);
|
|
566
|
-
assert.ok(results.some(s => s.name === 'api-v1-users-get'));
|
|
567
|
-
assert.ok(results.some(s => s.name === 'api-v2-users-get'));
|
|
568
|
-
});
|
|
569
|
-
it('should handle invalid spanNameRegex gracefully', async () => {
|
|
570
|
-
const today = getTestDate();
|
|
571
|
-
const mockSpans = [
|
|
572
|
-
{ traceId: 'trace1', spanId: 'span1', name: 'test-span', startTime: [1700000000, 0] },
|
|
573
|
-
];
|
|
574
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
575
|
-
// Invalid regex pattern - should be skipped (all spans returned)
|
|
576
|
-
const results = await backend.queryTraces({ spanNameRegex: '[invalid(' });
|
|
577
|
-
// Invalid regex is skipped, so all spans should be returned
|
|
578
|
-
assert.strictEqual(results.length, 1);
|
|
579
|
-
assert.strictEqual(results[0].name, 'test-span');
|
|
580
|
-
});
|
|
581
|
-
it('should combine spanNameRegex with spanName filter', async () => {
|
|
582
|
-
const today = getTestDate();
|
|
583
|
-
const mockSpans = [
|
|
584
|
-
{ traceId: 'trace1', spanId: 'span1', name: 'hook:mcp-pre-tool', startTime: [1700000000, 0] },
|
|
585
|
-
{ traceId: 'trace2', spanId: 'span2', name: 'hook:mcp-post-tool', startTime: [1700000000, 0] },
|
|
586
|
-
{ traceId: 'trace3', spanId: 'span3', name: 'hook:session-start', startTime: [1700000000, 0] },
|
|
587
|
-
{ traceId: 'trace4', spanId: 'span4', name: 'mcp-call', startTime: [1700000000, 0] },
|
|
588
|
-
];
|
|
589
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
590
|
-
// spanName filters first (substring), then regex narrows down
|
|
591
|
-
const results = await backend.queryTraces({
|
|
592
|
-
spanName: 'hook',
|
|
593
|
-
spanNameRegex: 'mcp',
|
|
594
|
-
});
|
|
595
|
-
assert.strictEqual(results.length, 2);
|
|
596
|
-
assert.ok(results.some(s => s.name === 'hook:mcp-pre-tool'));
|
|
597
|
-
assert.ok(results.some(s => s.name === 'hook:mcp-post-tool'));
|
|
598
|
-
});
|
|
599
|
-
it('should combine spanNameRegex with excludeSpanName', async () => {
|
|
600
|
-
const today = getTestDate();
|
|
601
|
-
const mockSpans = [
|
|
602
|
-
{ traceId: 'trace1', spanId: 'span1', name: 'hook:mcp-pre-tool', startTime: [1700000000, 0] },
|
|
603
|
-
{ traceId: 'trace2', spanId: 'span2', name: 'hook:mcp-post-tool', startTime: [1700000000, 0] },
|
|
604
|
-
{ traceId: 'trace3', spanId: 'span3', name: 'hook:session-start', startTime: [1700000000, 0] },
|
|
605
|
-
];
|
|
606
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
607
|
-
// Regex matches all hook:mcp-*, exclude post-tool
|
|
608
|
-
const results = await backend.queryTraces({
|
|
609
|
-
spanNameRegex: '^hook:mcp-',
|
|
610
|
-
excludeSpanName: 'post-tool',
|
|
611
|
-
});
|
|
612
|
-
assert.strictEqual(results.length, 1);
|
|
613
|
-
assert.strictEqual(results[0].name, 'hook:mcp-pre-tool');
|
|
614
|
-
});
|
|
615
|
-
it('should filter spans by attributeExists - all must exist', async () => {
|
|
616
|
-
const today = getTestDate();
|
|
617
|
-
const mockSpans = [
|
|
618
|
-
{
|
|
619
|
-
traceId: 'trace1',
|
|
620
|
-
spanId: 'span1',
|
|
621
|
-
name: 'op1',
|
|
622
|
-
startTime: [1700000000, 0],
|
|
623
|
-
attributes: { 'http.method': 'GET', 'http.status_code': 200 },
|
|
624
|
-
},
|
|
625
|
-
{
|
|
626
|
-
traceId: 'trace2',
|
|
627
|
-
spanId: 'span2',
|
|
628
|
-
name: 'op2',
|
|
629
|
-
startTime: [1700000000, 0],
|
|
630
|
-
attributes: { 'http.method': 'POST' }, // missing http.status_code
|
|
631
|
-
},
|
|
632
|
-
{
|
|
633
|
-
traceId: 'trace3',
|
|
634
|
-
spanId: 'span3',
|
|
635
|
-
name: 'op3',
|
|
636
|
-
startTime: [1700000000, 0],
|
|
637
|
-
attributes: { 'db.system': 'postgres' }, // missing both
|
|
638
|
-
},
|
|
639
|
-
];
|
|
640
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
641
|
-
const results = await backend.queryTraces({
|
|
642
|
-
attributeExists: ['http.method', 'http.status_code'],
|
|
643
|
-
});
|
|
644
|
-
assert.strictEqual(results.length, 1);
|
|
645
|
-
assert.strictEqual(results[0].traceId, 'trace1');
|
|
646
|
-
});
|
|
647
|
-
it('should filter spans by attributeNotExists - exclude if any exist', async () => {
|
|
648
|
-
const today = getTestDate();
|
|
649
|
-
const mockSpans = [
|
|
650
|
-
{
|
|
651
|
-
traceId: 'trace1',
|
|
652
|
-
spanId: 'span1',
|
|
653
|
-
name: 'op1',
|
|
654
|
-
startTime: [1700000000, 0],
|
|
655
|
-
attributes: { 'http.method': 'GET', 'error.message': 'timeout' },
|
|
656
|
-
},
|
|
657
|
-
{
|
|
658
|
-
traceId: 'trace2',
|
|
659
|
-
spanId: 'span2',
|
|
660
|
-
name: 'op2',
|
|
661
|
-
startTime: [1700000000, 0],
|
|
662
|
-
attributes: { 'http.method': 'POST' }, // no error attributes
|
|
663
|
-
},
|
|
664
|
-
{
|
|
665
|
-
traceId: 'trace3',
|
|
666
|
-
spanId: 'span3',
|
|
667
|
-
name: 'op3',
|
|
668
|
-
startTime: [1700000000, 0],
|
|
669
|
-
attributes: { 'http.method': 'GET', 'error.type': 'network' },
|
|
670
|
-
},
|
|
671
|
-
];
|
|
672
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
673
|
-
const results = await backend.queryTraces({
|
|
674
|
-
attributeNotExists: ['error.message', 'error.type'],
|
|
675
|
-
});
|
|
676
|
-
assert.strictEqual(results.length, 1);
|
|
677
|
-
assert.strictEqual(results[0].traceId, 'trace2');
|
|
678
|
-
});
|
|
679
|
-
it('should combine spanName with excludeSpanName', async () => {
|
|
680
|
-
const today = getTestDate();
|
|
681
|
-
const mockSpans = [
|
|
682
|
-
{ traceId: 'trace1', spanId: 'span1', name: 'http-request-external', startTime: [1700000000, 0] },
|
|
683
|
-
{ traceId: 'trace2', spanId: 'span2', name: 'http-request-internal', startTime: [1700000000, 0] },
|
|
684
|
-
{ traceId: 'trace3', spanId: 'span3', name: 'db-query', startTime: [1700000000, 0] },
|
|
685
|
-
];
|
|
686
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
687
|
-
const results = await backend.queryTraces({
|
|
688
|
-
spanName: 'http',
|
|
689
|
-
excludeSpanName: 'internal',
|
|
690
|
-
});
|
|
691
|
-
assert.strictEqual(results.length, 1);
|
|
692
|
-
assert.strictEqual(results[0].name, 'http-request-external');
|
|
693
|
-
});
|
|
694
|
-
it('should combine attributeExists with attributeFilter', async () => {
|
|
695
|
-
const today = getTestDate();
|
|
696
|
-
const mockSpans = [
|
|
697
|
-
{
|
|
698
|
-
traceId: 'trace1',
|
|
699
|
-
spanId: 'span1',
|
|
700
|
-
name: 'op1',
|
|
701
|
-
startTime: [1700000000, 0],
|
|
702
|
-
attributes: { 'http.method': 'GET', 'http.status_code': 200 },
|
|
703
|
-
},
|
|
704
|
-
{
|
|
705
|
-
traceId: 'trace2',
|
|
706
|
-
spanId: 'span2',
|
|
707
|
-
name: 'op2',
|
|
708
|
-
startTime: [1700000000, 0],
|
|
709
|
-
attributes: { 'http.method': 'POST', 'http.status_code': 500 },
|
|
710
|
-
},
|
|
711
|
-
{
|
|
712
|
-
traceId: 'trace3',
|
|
713
|
-
spanId: 'span3',
|
|
714
|
-
name: 'op3',
|
|
715
|
-
startTime: [1700000000, 0],
|
|
716
|
-
attributes: { 'http.method': 'GET' }, // missing status_code
|
|
717
|
-
},
|
|
718
|
-
];
|
|
719
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
720
|
-
const results = await backend.queryTraces({
|
|
721
|
-
attributeFilter: { 'http.method': 'GET' },
|
|
722
|
-
attributeExists: ['http.status_code'],
|
|
723
|
-
});
|
|
724
|
-
assert.strictEqual(results.length, 1);
|
|
725
|
-
assert.strictEqual(results[0].traceId, 'trace1');
|
|
726
|
-
});
|
|
727
|
-
it('should filter spans by numericFilter with gt operator', async () => {
|
|
728
|
-
const today = getTestDate();
|
|
729
|
-
const mockSpans = [
|
|
730
|
-
{
|
|
731
|
-
traceId: 'trace1',
|
|
732
|
-
spanId: 'span1',
|
|
733
|
-
name: 'http-request',
|
|
734
|
-
startTime: [1700000000, 0],
|
|
735
|
-
attributes: { 'http.status_code': 200 },
|
|
736
|
-
},
|
|
737
|
-
{
|
|
738
|
-
traceId: 'trace2',
|
|
739
|
-
spanId: 'span2',
|
|
740
|
-
name: 'http-request',
|
|
741
|
-
startTime: [1700000000, 0],
|
|
742
|
-
attributes: { 'http.status_code': 500 },
|
|
743
|
-
},
|
|
744
|
-
{
|
|
745
|
-
traceId: 'trace3',
|
|
746
|
-
spanId: 'span3',
|
|
747
|
-
name: 'http-request',
|
|
748
|
-
startTime: [1700000000, 0],
|
|
749
|
-
attributes: { 'http.status_code': 300 },
|
|
750
|
-
},
|
|
751
|
-
];
|
|
752
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
753
|
-
const results = await backend.queryTraces({
|
|
754
|
-
numericFilter: [{ attribute: 'http.status_code', operator: 'gt', value: 299 }],
|
|
755
|
-
});
|
|
756
|
-
assert.strictEqual(results.length, 2);
|
|
757
|
-
assert.ok(results.some(s => s.traceId === 'trace2'));
|
|
758
|
-
assert.ok(results.some(s => s.traceId === 'trace3'));
|
|
759
|
-
});
|
|
760
|
-
it('should filter spans by numericFilter with gte operator', async () => {
|
|
761
|
-
const today = getTestDate();
|
|
762
|
-
const mockSpans = [
|
|
763
|
-
{
|
|
764
|
-
traceId: 'trace1',
|
|
765
|
-
spanId: 'span1',
|
|
766
|
-
name: 'http-request',
|
|
767
|
-
startTime: [1700000000, 0],
|
|
768
|
-
attributes: { 'http.status_code': 200 },
|
|
769
|
-
},
|
|
770
|
-
{
|
|
771
|
-
traceId: 'trace2',
|
|
772
|
-
spanId: 'span2',
|
|
773
|
-
name: 'http-request',
|
|
774
|
-
startTime: [1700000000, 0],
|
|
775
|
-
attributes: { 'http.status_code': 300 },
|
|
776
|
-
},
|
|
777
|
-
];
|
|
778
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
779
|
-
const results = await backend.queryTraces({
|
|
780
|
-
numericFilter: [{ attribute: 'http.status_code', operator: 'gte', value: 300 }],
|
|
781
|
-
});
|
|
782
|
-
assert.strictEqual(results.length, 1);
|
|
783
|
-
assert.strictEqual(results[0].traceId, 'trace2');
|
|
784
|
-
});
|
|
785
|
-
it('should filter spans by numericFilter with lt operator', async () => {
|
|
786
|
-
const today = getTestDate();
|
|
787
|
-
const mockSpans = [
|
|
788
|
-
{
|
|
789
|
-
traceId: 'trace1',
|
|
790
|
-
spanId: 'span1',
|
|
791
|
-
name: 'http-request',
|
|
792
|
-
startTime: [1700000000, 0],
|
|
793
|
-
attributes: { 'http.status_code': 200 },
|
|
794
|
-
},
|
|
795
|
-
{
|
|
796
|
-
traceId: 'trace2',
|
|
797
|
-
spanId: 'span2',
|
|
798
|
-
name: 'http-request',
|
|
799
|
-
startTime: [1700000000, 0],
|
|
800
|
-
attributes: { 'http.status_code': 500 },
|
|
801
|
-
},
|
|
802
|
-
];
|
|
803
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
804
|
-
const results = await backend.queryTraces({
|
|
805
|
-
numericFilter: [{ attribute: 'http.status_code', operator: 'lt', value: 300 }],
|
|
806
|
-
});
|
|
807
|
-
assert.strictEqual(results.length, 1);
|
|
808
|
-
assert.strictEqual(results[0].traceId, 'trace1');
|
|
809
|
-
});
|
|
810
|
-
it('should filter spans by numericFilter with lte operator', async () => {
|
|
811
|
-
const today = getTestDate();
|
|
812
|
-
const mockSpans = [
|
|
813
|
-
{
|
|
814
|
-
traceId: 'trace1',
|
|
815
|
-
spanId: 'span1',
|
|
816
|
-
name: 'http-request',
|
|
817
|
-
startTime: [1700000000, 0],
|
|
818
|
-
attributes: { 'http.status_code': 200 },
|
|
819
|
-
},
|
|
820
|
-
{
|
|
821
|
-
traceId: 'trace2',
|
|
822
|
-
spanId: 'span2',
|
|
823
|
-
name: 'http-request',
|
|
824
|
-
startTime: [1700000000, 0],
|
|
825
|
-
attributes: { 'http.status_code': 300 },
|
|
826
|
-
},
|
|
827
|
-
];
|
|
828
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
829
|
-
const results = await backend.queryTraces({
|
|
830
|
-
numericFilter: [{ attribute: 'http.status_code', operator: 'lte', value: 200 }],
|
|
831
|
-
});
|
|
832
|
-
assert.strictEqual(results.length, 1);
|
|
833
|
-
assert.strictEqual(results[0].traceId, 'trace1');
|
|
834
|
-
});
|
|
835
|
-
it('should filter spans by numericFilter with eq operator', async () => {
|
|
836
|
-
const today = getTestDate();
|
|
837
|
-
const mockSpans = [
|
|
838
|
-
{
|
|
839
|
-
traceId: 'trace1',
|
|
840
|
-
spanId: 'span1',
|
|
841
|
-
name: 'http-request',
|
|
842
|
-
startTime: [1700000000, 0],
|
|
843
|
-
attributes: { 'http.status_code': 200 },
|
|
844
|
-
},
|
|
845
|
-
{
|
|
846
|
-
traceId: 'trace2',
|
|
847
|
-
spanId: 'span2',
|
|
848
|
-
name: 'http-request',
|
|
849
|
-
startTime: [1700000000, 0],
|
|
850
|
-
attributes: { 'http.status_code': 500 },
|
|
851
|
-
},
|
|
852
|
-
];
|
|
853
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
854
|
-
const results = await backend.queryTraces({
|
|
855
|
-
numericFilter: [{ attribute: 'http.status_code', operator: 'eq', value: 200 }],
|
|
856
|
-
});
|
|
857
|
-
assert.strictEqual(results.length, 1);
|
|
858
|
-
assert.strictEqual(results[0].traceId, 'trace1');
|
|
859
|
-
});
|
|
860
|
-
it('should filter spans by multiple numericFilter conditions (AND logic)', async () => {
|
|
861
|
-
const today = getTestDate();
|
|
862
|
-
const mockSpans = [
|
|
863
|
-
{
|
|
864
|
-
traceId: 'trace1',
|
|
865
|
-
spanId: 'span1',
|
|
866
|
-
name: 'http-request',
|
|
867
|
-
startTime: [1700000000, 0],
|
|
868
|
-
attributes: { 'http.status_code': 200, 'http.response_size': 1000 },
|
|
869
|
-
},
|
|
870
|
-
{
|
|
871
|
-
traceId: 'trace2',
|
|
872
|
-
spanId: 'span2',
|
|
873
|
-
name: 'http-request',
|
|
874
|
-
startTime: [1700000000, 0],
|
|
875
|
-
attributes: { 'http.status_code': 200, 'http.response_size': 5000 },
|
|
876
|
-
},
|
|
877
|
-
{
|
|
878
|
-
traceId: 'trace3',
|
|
879
|
-
spanId: 'span3',
|
|
880
|
-
name: 'http-request',
|
|
881
|
-
startTime: [1700000000, 0],
|
|
882
|
-
attributes: { 'http.status_code': 500, 'http.response_size': 100 },
|
|
883
|
-
},
|
|
884
|
-
];
|
|
885
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
886
|
-
const results = await backend.queryTraces({
|
|
887
|
-
numericFilter: [
|
|
888
|
-
{ attribute: 'http.status_code', operator: 'lt', value: 300 },
|
|
889
|
-
{ attribute: 'http.response_size', operator: 'gt', value: 2000 },
|
|
890
|
-
],
|
|
891
|
-
});
|
|
892
|
-
assert.strictEqual(results.length, 1);
|
|
893
|
-
assert.strictEqual(results[0].traceId, 'trace2');
|
|
894
|
-
});
|
|
895
|
-
it('should skip spans when numericFilter attribute is missing', async () => {
|
|
896
|
-
const today = getTestDate();
|
|
897
|
-
const mockSpans = [
|
|
898
|
-
{
|
|
899
|
-
traceId: 'trace1',
|
|
900
|
-
spanId: 'span1',
|
|
901
|
-
name: 'http-request',
|
|
902
|
-
startTime: [1700000000, 0],
|
|
903
|
-
attributes: { 'http.status_code': 200 },
|
|
904
|
-
},
|
|
905
|
-
{
|
|
906
|
-
traceId: 'trace2',
|
|
907
|
-
spanId: 'span2',
|
|
908
|
-
name: 'http-request',
|
|
909
|
-
startTime: [1700000000, 0],
|
|
910
|
-
attributes: { 'other.attr': 'value' }, // missing http.status_code
|
|
911
|
-
},
|
|
912
|
-
];
|
|
913
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
914
|
-
const results = await backend.queryTraces({
|
|
915
|
-
numericFilter: [{ attribute: 'http.status_code', operator: 'gte', value: 100 }],
|
|
916
|
-
});
|
|
917
|
-
assert.strictEqual(results.length, 1);
|
|
918
|
-
assert.strictEqual(results[0].traceId, 'trace1');
|
|
919
|
-
});
|
|
920
|
-
it('should skip spans when numericFilter attribute is not a number', async () => {
|
|
921
|
-
const today = getTestDate();
|
|
922
|
-
const mockSpans = [
|
|
923
|
-
{
|
|
924
|
-
traceId: 'trace1',
|
|
925
|
-
spanId: 'span1',
|
|
926
|
-
name: 'http-request',
|
|
927
|
-
startTime: [1700000000, 0],
|
|
928
|
-
attributes: { 'http.status_code': 200 },
|
|
929
|
-
},
|
|
930
|
-
{
|
|
931
|
-
traceId: 'trace2',
|
|
932
|
-
spanId: 'span2',
|
|
933
|
-
name: 'http-request',
|
|
934
|
-
startTime: [1700000000, 0],
|
|
935
|
-
attributes: { 'http.status_code': '200' }, // string, not number
|
|
936
|
-
},
|
|
937
|
-
];
|
|
938
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
939
|
-
const results = await backend.queryTraces({
|
|
940
|
-
numericFilter: [{ attribute: 'http.status_code', operator: 'eq', value: 200 }],
|
|
941
|
-
});
|
|
942
|
-
assert.strictEqual(results.length, 1);
|
|
943
|
-
assert.strictEqual(results[0].traceId, 'trace1');
|
|
944
|
-
});
|
|
945
|
-
it('should combine numericFilter with other filters', async () => {
|
|
946
|
-
const today = getTestDate();
|
|
947
|
-
const mockSpans = [
|
|
948
|
-
{
|
|
949
|
-
traceId: 'trace1',
|
|
950
|
-
spanId: 'span1',
|
|
951
|
-
name: 'http-request',
|
|
952
|
-
startTime: [1700000000, 0],
|
|
953
|
-
attributes: { 'http.status_code': 500, 'http.method': 'GET' },
|
|
954
|
-
},
|
|
955
|
-
{
|
|
956
|
-
traceId: 'trace2',
|
|
957
|
-
spanId: 'span2',
|
|
958
|
-
name: 'http-request',
|
|
959
|
-
startTime: [1700000000, 0],
|
|
960
|
-
attributes: { 'http.status_code': 500, 'http.method': 'POST' },
|
|
961
|
-
},
|
|
962
|
-
{
|
|
963
|
-
traceId: 'trace3',
|
|
964
|
-
spanId: 'span3',
|
|
965
|
-
name: 'http-request',
|
|
966
|
-
startTime: [1700000000, 0],
|
|
967
|
-
attributes: { 'http.status_code': 200, 'http.method': 'GET' },
|
|
968
|
-
},
|
|
969
|
-
];
|
|
970
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
971
|
-
const results = await backend.queryTraces({
|
|
972
|
-
attributeFilter: { 'http.method': 'GET' },
|
|
973
|
-
numericFilter: [{ attribute: 'http.status_code', operator: 'gte', value: 400 }],
|
|
974
|
-
});
|
|
975
|
-
assert.strictEqual(results.length, 1);
|
|
976
|
-
assert.strictEqual(results[0].traceId, 'trace1');
|
|
977
|
-
});
|
|
978
|
-
it('should complete queries with timing (timing is logged for slow queries)', async () => {
|
|
979
|
-
// This test verifies that query timing is active and doesn't break normal queries
|
|
980
|
-
// Timing warnings are logged for queries > 500ms
|
|
981
|
-
const today = getTestDate();
|
|
982
|
-
const mockSpans = [
|
|
983
|
-
{ traceId: 'trace1', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
|
|
984
|
-
{ traceId: 'trace2', spanId: 'span2', name: 'op2', startTime: [1700000001, 0] },
|
|
985
|
-
];
|
|
986
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
987
|
-
// Query should complete successfully with timing active
|
|
988
|
-
const results = await backend.queryTraces({});
|
|
989
|
-
assert.strictEqual(results.length, 2);
|
|
990
|
-
});
|
|
991
|
-
});
|
|
992
|
-
describe('queryLogs', () => {
|
|
993
|
-
it('should read and normalize log records from JSONL files', async () => {
|
|
994
|
-
const today = getTestDate();
|
|
995
|
-
const mockLogs = [
|
|
996
|
-
{
|
|
997
|
-
timestamp: '2026-01-28T10:00:00.000Z',
|
|
998
|
-
severityText: 'ERROR',
|
|
999
|
-
body: 'Connection failed',
|
|
1000
|
-
traceId: 'trace1',
|
|
1001
|
-
spanId: 'span1',
|
|
1002
|
-
resource: { serviceName: 'api-service' },
|
|
1003
|
-
attributes: { 'error.type': 'timeout' },
|
|
1004
|
-
},
|
|
1005
|
-
];
|
|
1006
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
1007
|
-
const results = await backend.queryLogs({});
|
|
1008
|
-
assert.strictEqual(results.length, 1);
|
|
1009
|
-
assert.strictEqual(results[0].severity, 'ERROR');
|
|
1010
|
-
assert.strictEqual(results[0].body, 'Connection failed');
|
|
1011
|
-
assert.strictEqual(results[0].attributes?.['service.name'], 'api-service');
|
|
1012
|
-
assert.strictEqual(results[0].attributes?.['error.type'], 'timeout');
|
|
1013
|
-
});
|
|
1014
|
-
it('should handle timestamp as ISO string', async () => {
|
|
1015
|
-
const today = getTestDate();
|
|
1016
|
-
const mockLogs = [
|
|
1017
|
-
{
|
|
1018
|
-
timestamp: '2026-01-28T10:00:00.123Z',
|
|
1019
|
-
body: 'Test log',
|
|
1020
|
-
},
|
|
1021
|
-
];
|
|
1022
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
1023
|
-
const results = await backend.queryLogs({});
|
|
1024
|
-
assert.strictEqual(results[0].timestamp, '2026-01-28T10:00:00.123Z');
|
|
1025
|
-
});
|
|
1026
|
-
it('should convert timestamp from [seconds, nanoseconds] array', async () => {
|
|
1027
|
-
const today = getTestDate();
|
|
1028
|
-
const mockLogs = [
|
|
1029
|
-
{
|
|
1030
|
-
timestamp: [1700000000, 123456789], // seconds + nanoseconds
|
|
1031
|
-
body: 'Test log',
|
|
1032
|
-
},
|
|
1033
|
-
];
|
|
1034
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
1035
|
-
const results = await backend.queryLogs({});
|
|
1036
|
-
// Verify it's a valid ISO string
|
|
1037
|
-
assert.match(results[0].timestamp, /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/);
|
|
1038
|
-
});
|
|
1039
|
-
it('should filter logs by severity (case-insensitive)', async () => {
|
|
1040
|
-
const today = getTestDate();
|
|
1041
|
-
const mockLogs = [
|
|
1042
|
-
{ timestamp: '2026-01-28T10:00:00Z', severityText: 'ERROR', body: 'Error 1' },
|
|
1043
|
-
{ timestamp: '2026-01-28T10:01:00Z', severity: 'WARN', body: 'Warning 1' },
|
|
1044
|
-
{ timestamp: '2026-01-28T10:02:00Z', severity: 'error', body: 'Error 2' },
|
|
1045
|
-
{ timestamp: '2026-01-28T10:03:00Z', severity: 'INFO', body: 'Info 1' },
|
|
1046
|
-
];
|
|
1047
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
1048
|
-
const results = await backend.queryLogs({ severity: 'ERROR' });
|
|
1049
|
-
assert.strictEqual(results.length, 2);
|
|
1050
|
-
assert.ok(results.every(l => l.severity.toUpperCase() === 'ERROR'));
|
|
1051
|
-
});
|
|
1052
|
-
it('should filter logs by traceId', async () => {
|
|
1053
|
-
const today = getTestDate();
|
|
1054
|
-
const mockLogs = [
|
|
1055
|
-
{ timestamp: '2026-01-28T10:00:00Z', body: 'Log 1', traceId: 'trace1' },
|
|
1056
|
-
{ timestamp: '2026-01-28T10:01:00Z', body: 'Log 2', traceId: 'trace2' },
|
|
1057
|
-
{ timestamp: '2026-01-28T10:02:00Z', body: 'Log 3', traceId: 'trace1' },
|
|
1058
|
-
];
|
|
1059
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
1060
|
-
const results = await backend.queryLogs({ traceId: 'trace1' });
|
|
1061
|
-
assert.strictEqual(results.length, 2);
|
|
1062
|
-
assert.ok(results.every(l => l.traceId === 'trace1'));
|
|
1063
|
-
});
|
|
1064
|
-
it('should filter logs by search text (case-insensitive substring)', async () => {
|
|
1065
|
-
const today = getTestDate();
|
|
1066
|
-
const mockLogs = [
|
|
1067
|
-
{ timestamp: '2026-01-28T10:00:00Z', body: 'Connection timeout' },
|
|
1068
|
-
{ timestamp: '2026-01-28T10:01:00Z', body: 'Database query failed' },
|
|
1069
|
-
{ timestamp: '2026-01-28T10:02:00Z', body: 'Connection reset by peer' },
|
|
1070
|
-
];
|
|
1071
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
1072
|
-
const results = await backend.queryLogs({ search: 'CONNECTION' });
|
|
1073
|
-
assert.strictEqual(results.length, 2);
|
|
1074
|
-
assert.ok(results.every(l => l.body.toLowerCase().includes('connection')));
|
|
1075
|
-
});
|
|
1076
|
-
it('should use severityText if available, fallback to severity', async () => {
|
|
1077
|
-
const today = getTestDate();
|
|
1078
|
-
const mockLogs = [
|
|
1079
|
-
{ timestamp: '2026-01-28T10:00:00Z', body: 'Log 1', severityText: 'CUSTOM' },
|
|
1080
|
-
{ timestamp: '2026-01-28T10:01:00Z', body: 'Log 2', severity: 'WARN' },
|
|
1081
|
-
{ timestamp: '2026-01-28T10:02:00Z', body: 'Log 3' }, // no severity
|
|
1082
|
-
];
|
|
1083
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
1084
|
-
const results = await backend.queryLogs({});
|
|
1085
|
-
assert.strictEqual(results[0].severity, 'CUSTOM');
|
|
1086
|
-
assert.strictEqual(results[1].severity, 'WARN');
|
|
1087
|
-
assert.strictEqual(results[2].severity, 'INFO'); // default
|
|
1088
|
-
});
|
|
1089
|
-
it('should extract instrumentationScope from logs', async () => {
|
|
1090
|
-
const today = getTestDate();
|
|
1091
|
-
const mockLogs = [
|
|
1092
|
-
{
|
|
1093
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
1094
|
-
body: 'HTTP request received',
|
|
1095
|
-
instrumentationScope: {
|
|
1096
|
-
name: '@opentelemetry/instrumentation-http',
|
|
1097
|
-
version: '0.48.0',
|
|
1098
|
-
schemaUrl: 'https://opentelemetry.io/schemas/1.21.0',
|
|
1099
|
-
},
|
|
1100
|
-
},
|
|
1101
|
-
{
|
|
1102
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
1103
|
-
body: 'Custom log from hooks',
|
|
1104
|
-
instrumentationScope: {
|
|
1105
|
-
name: 'custom-hooks',
|
|
1106
|
-
},
|
|
1107
|
-
},
|
|
1108
|
-
{
|
|
1109
|
-
timestamp: '2026-01-28T10:02:00Z',
|
|
1110
|
-
body: 'Log without scope',
|
|
1111
|
-
},
|
|
1112
|
-
];
|
|
1113
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
1114
|
-
const results = await backend.queryLogs({});
|
|
1115
|
-
assert.strictEqual(results.length, 3);
|
|
1116
|
-
// First log: full scope
|
|
1117
|
-
assert.strictEqual(results[0].instrumentationScope?.name, '@opentelemetry/instrumentation-http');
|
|
1118
|
-
assert.strictEqual(results[0].instrumentationScope?.version, '0.48.0');
|
|
1119
|
-
assert.strictEqual(results[0].instrumentationScope?.schemaUrl, 'https://opentelemetry.io/schemas/1.21.0');
|
|
1120
|
-
// Second log: name only
|
|
1121
|
-
assert.strictEqual(results[1].instrumentationScope?.name, 'custom-hooks');
|
|
1122
|
-
assert.strictEqual(results[1].instrumentationScope?.version, undefined);
|
|
1123
|
-
// Third log: no scope
|
|
1124
|
-
assert.strictEqual(results[2].instrumentationScope, undefined);
|
|
1125
|
-
});
|
|
1126
|
-
it('should apply limit and offset to log results', async () => {
|
|
1127
|
-
const today = getTestDate();
|
|
1128
|
-
const mockLogs = Array.from({ length: 200 }, (_, i) => ({
|
|
1129
|
-
timestamp: new Date(Date.now() + i * 1000).toISOString(),
|
|
1130
|
-
body: `Log ${i}`,
|
|
1131
|
-
}));
|
|
1132
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
1133
|
-
const results = await backend.queryLogs({ limit: 50, offset: 75 });
|
|
1134
|
-
assert.strictEqual(results.length, 50);
|
|
1135
|
-
assert.strictEqual(results[0].body, 'Log 75');
|
|
1136
|
-
});
|
|
1137
|
-
it('should handle empty body field', async () => {
|
|
1138
|
-
const today = getTestDate();
|
|
1139
|
-
const mockLogs = [
|
|
1140
|
-
{ timestamp: '2026-01-28T10:00:00Z', body: 'Normal log' },
|
|
1141
|
-
{ timestamp: '2026-01-28T10:01:00Z' }, // missing body
|
|
1142
|
-
];
|
|
1143
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
1144
|
-
const results = await backend.queryLogs({});
|
|
1145
|
-
assert.strictEqual(results.length, 2);
|
|
1146
|
-
assert.strictEqual(results[1].body, '');
|
|
1147
|
-
});
|
|
1148
|
-
it('should set severityNumber based on severity text', async () => {
|
|
1149
|
-
const today = getTestDate();
|
|
1150
|
-
const mockLogs = [
|
|
1151
|
-
{ timestamp: '2026-01-28T10:00:00Z', severityText: 'TRACE', body: 'Trace log' },
|
|
1152
|
-
{ timestamp: '2026-01-28T10:01:00Z', severityText: 'DEBUG', body: 'Debug log' },
|
|
1153
|
-
{ timestamp: '2026-01-28T10:02:00Z', severityText: 'INFO', body: 'Info log' },
|
|
1154
|
-
{ timestamp: '2026-01-28T10:03:00Z', severityText: 'WARN', body: 'Warn log' },
|
|
1155
|
-
{ timestamp: '2026-01-28T10:04:00Z', severityText: 'ERROR', body: 'Error log' },
|
|
1156
|
-
{ timestamp: '2026-01-28T10:05:00Z', severityText: 'FATAL', body: 'Fatal log' },
|
|
1157
|
-
];
|
|
1158
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
1159
|
-
const results = await backend.queryLogs({});
|
|
1160
|
-
assert.strictEqual(results.length, 6);
|
|
1161
|
-
assert.strictEqual(results[0].severityNumber, 1); // TRACE
|
|
1162
|
-
assert.strictEqual(results[1].severityNumber, 5); // DEBUG
|
|
1163
|
-
assert.strictEqual(results[2].severityNumber, 9); // INFO
|
|
1164
|
-
assert.strictEqual(results[3].severityNumber, 13); // WARN
|
|
1165
|
-
assert.strictEqual(results[4].severityNumber, 17); // ERROR
|
|
1166
|
-
assert.strictEqual(results[5].severityNumber, 21); // FATAL
|
|
1167
|
-
});
|
|
1168
|
-
it('should handle lowercase severity when setting severityNumber', async () => {
|
|
1169
|
-
const today = getTestDate();
|
|
1170
|
-
const mockLogs = [
|
|
1171
|
-
{ timestamp: '2026-01-28T10:00:00Z', severity: 'error', body: 'Lowercase error' },
|
|
1172
|
-
{ timestamp: '2026-01-28T10:01:00Z', severity: 'warn', body: 'Lowercase warn' },
|
|
1173
|
-
{ timestamp: '2026-01-28T10:02:00Z', severity: 'info', body: 'Lowercase info' },
|
|
1174
|
-
];
|
|
1175
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
1176
|
-
const results = await backend.queryLogs({});
|
|
1177
|
-
assert.strictEqual(results.length, 3);
|
|
1178
|
-
assert.strictEqual(results[0].severityNumber, 17); // error -> ERROR -> 17
|
|
1179
|
-
assert.strictEqual(results[1].severityNumber, 13); // warn -> WARN -> 13
|
|
1180
|
-
assert.strictEqual(results[2].severityNumber, 9); // info -> INFO -> 9
|
|
1181
|
-
});
|
|
1182
|
-
it('should set severityNumber to undefined for unknown severity levels', async () => {
|
|
1183
|
-
const today = getTestDate();
|
|
1184
|
-
const mockLogs = [
|
|
1185
|
-
{ timestamp: '2026-01-28T10:00:00Z', severityText: 'CUSTOM', body: 'Custom severity' },
|
|
1186
|
-
{ timestamp: '2026-01-28T10:01:00Z', severityText: 'VERBOSE', body: 'Verbose severity' },
|
|
1187
|
-
];
|
|
1188
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
1189
|
-
const results = await backend.queryLogs({});
|
|
1190
|
-
assert.strictEqual(results.length, 2);
|
|
1191
|
-
assert.strictEqual(results[0].severityNumber, undefined);
|
|
1192
|
-
assert.strictEqual(results[1].severityNumber, undefined);
|
|
1193
|
-
});
|
|
1194
|
-
it('should set severityNumber to 9 (INFO) when severity defaults', async () => {
|
|
1195
|
-
const today = getTestDate();
|
|
1196
|
-
const mockLogs = [
|
|
1197
|
-
{ timestamp: '2026-01-28T10:00:00Z', body: 'No severity specified' },
|
|
1198
|
-
];
|
|
1199
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
1200
|
-
const results = await backend.queryLogs({});
|
|
1201
|
-
assert.strictEqual(results.length, 1);
|
|
1202
|
-
assert.strictEqual(results[0].severity, 'INFO');
|
|
1203
|
-
assert.strictEqual(results[0].severityNumber, 9);
|
|
1204
|
-
});
|
|
1205
|
-
it('should exclude logs matching excludeSearch', async () => {
|
|
1206
|
-
const today = getTestDate();
|
|
1207
|
-
const mockLogs = [
|
|
1208
|
-
{ timestamp: '2026-01-28T10:00:00Z', body: 'Connection failed' },
|
|
1209
|
-
{ timestamp: '2026-01-28T10:01:00Z', body: 'Request successful' },
|
|
1210
|
-
{ timestamp: '2026-01-28T10:02:00Z', body: 'Connection timeout' },
|
|
1211
|
-
];
|
|
1212
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
1213
|
-
const results = await backend.queryLogs({ excludeSearch: 'connection' });
|
|
1214
|
-
assert.strictEqual(results.length, 1);
|
|
1215
|
-
assert.strictEqual(results[0].body, 'Request successful');
|
|
1216
|
-
});
|
|
1217
|
-
it('should combine search with excludeSearch', async () => {
|
|
1218
|
-
const today = getTestDate();
|
|
1219
|
-
const mockLogs = [
|
|
1220
|
-
{ timestamp: '2026-01-28T10:00:00Z', body: 'User login successful' },
|
|
1221
|
-
{ timestamp: '2026-01-28T10:01:00Z', body: 'User login failed' },
|
|
1222
|
-
{ timestamp: '2026-01-28T10:02:00Z', body: 'System startup' },
|
|
1223
|
-
{ timestamp: '2026-01-28T10:03:00Z', body: 'User logout' },
|
|
1224
|
-
];
|
|
1225
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
1226
|
-
const results = await backend.queryLogs({
|
|
1227
|
-
search: 'user',
|
|
1228
|
-
excludeSearch: 'failed',
|
|
1229
|
-
});
|
|
1230
|
-
assert.strictEqual(results.length, 2);
|
|
1231
|
-
assert.ok(results.some(l => l.body === 'User login successful'));
|
|
1232
|
-
assert.ok(results.some(l => l.body === 'User logout'));
|
|
1233
|
-
});
|
|
1234
|
-
it('should filter logs by attributeExists - all must exist', async () => {
|
|
1235
|
-
const today = getTestDate();
|
|
1236
|
-
const mockLogs = [
|
|
1237
|
-
{
|
|
1238
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
1239
|
-
body: 'Log 1',
|
|
1240
|
-
attributes: { 'request.id': 'abc', 'user.id': '123' },
|
|
1241
|
-
},
|
|
1242
|
-
{
|
|
1243
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
1244
|
-
body: 'Log 2',
|
|
1245
|
-
attributes: { 'request.id': 'def' }, // missing user.id
|
|
1246
|
-
},
|
|
1247
|
-
{
|
|
1248
|
-
timestamp: '2026-01-28T10:02:00Z',
|
|
1249
|
-
body: 'Log 3',
|
|
1250
|
-
attributes: { 'other.attr': 'value' }, // missing both
|
|
1251
|
-
},
|
|
1252
|
-
];
|
|
1253
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
1254
|
-
const results = await backend.queryLogs({
|
|
1255
|
-
attributeExists: ['request.id', 'user.id'],
|
|
1256
|
-
});
|
|
1257
|
-
assert.strictEqual(results.length, 1);
|
|
1258
|
-
assert.strictEqual(results[0].body, 'Log 1');
|
|
1259
|
-
});
|
|
1260
|
-
it('should filter logs by attributeNotExists - exclude if any exist', async () => {
|
|
1261
|
-
const today = getTestDate();
|
|
1262
|
-
const mockLogs = [
|
|
1263
|
-
{
|
|
1264
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
1265
|
-
body: 'Log with error',
|
|
1266
|
-
attributes: { 'request.id': 'abc', 'error.message': 'timeout' },
|
|
1267
|
-
},
|
|
1268
|
-
{
|
|
1269
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
1270
|
-
body: 'Clean log',
|
|
1271
|
-
attributes: { 'request.id': 'def' },
|
|
1272
|
-
},
|
|
1273
|
-
{
|
|
1274
|
-
timestamp: '2026-01-28T10:02:00Z',
|
|
1275
|
-
body: 'Log with exception',
|
|
1276
|
-
attributes: { 'request.id': 'ghi', 'exception.type': 'NullPointer' },
|
|
1277
|
-
},
|
|
1278
|
-
];
|
|
1279
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
1280
|
-
const results = await backend.queryLogs({
|
|
1281
|
-
attributeNotExists: ['error.message', 'exception.type'],
|
|
1282
|
-
});
|
|
1283
|
-
assert.strictEqual(results.length, 1);
|
|
1284
|
-
assert.strictEqual(results[0].body, 'Clean log');
|
|
1285
|
-
});
|
|
1286
|
-
it('should combine search with attribute filters', async () => {
|
|
1287
|
-
const today = getTestDate();
|
|
1288
|
-
const mockLogs = [
|
|
1289
|
-
{
|
|
1290
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
1291
|
-
body: 'API request completed',
|
|
1292
|
-
attributes: { 'request.id': 'abc', 'http.status_code': 200 },
|
|
1293
|
-
},
|
|
1294
|
-
{
|
|
1295
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
1296
|
-
body: 'API request failed',
|
|
1297
|
-
attributes: { 'request.id': 'def' }, // missing status_code
|
|
1298
|
-
},
|
|
1299
|
-
{
|
|
1300
|
-
timestamp: '2026-01-28T10:02:00Z',
|
|
1301
|
-
body: 'Database query completed',
|
|
1302
|
-
attributes: { 'request.id': 'ghi', 'http.status_code': 200 },
|
|
1303
|
-
},
|
|
1304
|
-
];
|
|
1305
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
1306
|
-
const results = await backend.queryLogs({
|
|
1307
|
-
search: 'API',
|
|
1308
|
-
attributeExists: ['http.status_code'],
|
|
1309
|
-
});
|
|
1310
|
-
assert.strictEqual(results.length, 1);
|
|
1311
|
-
assert.strictEqual(results[0].body, 'API request completed');
|
|
1312
|
-
});
|
|
1313
|
-
it('should filter logs by numericFilter with gt operator', async () => {
|
|
1314
|
-
const today = getTestDate();
|
|
1315
|
-
const mockLogs = [
|
|
1316
|
-
{
|
|
1317
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
1318
|
-
body: 'Request completed',
|
|
1319
|
-
attributes: { 'http.status_code': 200 },
|
|
1320
|
-
},
|
|
1321
|
-
{
|
|
1322
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
1323
|
-
body: 'Request failed',
|
|
1324
|
-
attributes: { 'http.status_code': 500 },
|
|
1325
|
-
},
|
|
1326
|
-
{
|
|
1327
|
-
timestamp: '2026-01-28T10:02:00Z',
|
|
1328
|
-
body: 'Request redirected',
|
|
1329
|
-
attributes: { 'http.status_code': 302 },
|
|
1330
|
-
},
|
|
1331
|
-
];
|
|
1332
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
1333
|
-
const results = await backend.queryLogs({
|
|
1334
|
-
numericFilter: [{ attribute: 'http.status_code', operator: 'gt', value: 299 }],
|
|
1335
|
-
});
|
|
1336
|
-
assert.strictEqual(results.length, 2);
|
|
1337
|
-
assert.ok(results.some(l => l.body === 'Request failed'));
|
|
1338
|
-
assert.ok(results.some(l => l.body === 'Request redirected'));
|
|
1339
|
-
});
|
|
1340
|
-
it('should filter logs by numericFilter with lt operator', async () => {
|
|
1341
|
-
const today = getTestDate();
|
|
1342
|
-
const mockLogs = [
|
|
1343
|
-
{
|
|
1344
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
1345
|
-
body: 'Small response',
|
|
1346
|
-
attributes: { 'response.size': 100 },
|
|
1347
|
-
},
|
|
1348
|
-
{
|
|
1349
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
1350
|
-
body: 'Large response',
|
|
1351
|
-
attributes: { 'response.size': 5000 },
|
|
1352
|
-
},
|
|
1353
|
-
];
|
|
1354
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
1355
|
-
const results = await backend.queryLogs({
|
|
1356
|
-
numericFilter: [{ attribute: 'response.size', operator: 'lt', value: 1000 }],
|
|
1357
|
-
});
|
|
1358
|
-
assert.strictEqual(results.length, 1);
|
|
1359
|
-
assert.strictEqual(results[0].body, 'Small response');
|
|
1360
|
-
});
|
|
1361
|
-
it('should filter logs by multiple numericFilter conditions', async () => {
|
|
1362
|
-
const today = getTestDate();
|
|
1363
|
-
const mockLogs = [
|
|
1364
|
-
{
|
|
1365
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
1366
|
-
body: 'Fast small response',
|
|
1367
|
-
attributes: { 'response.size': 100, 'response.time_ms': 50 },
|
|
1368
|
-
},
|
|
1369
|
-
{
|
|
1370
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
1371
|
-
body: 'Slow large response',
|
|
1372
|
-
attributes: { 'response.size': 5000, 'response.time_ms': 2000 },
|
|
1373
|
-
},
|
|
1374
|
-
{
|
|
1375
|
-
timestamp: '2026-01-28T10:02:00Z',
|
|
1376
|
-
body: 'Fast large response',
|
|
1377
|
-
attributes: { 'response.size': 5000, 'response.time_ms': 100 },
|
|
1378
|
-
},
|
|
1379
|
-
];
|
|
1380
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
1381
|
-
const results = await backend.queryLogs({
|
|
1382
|
-
numericFilter: [
|
|
1383
|
-
{ attribute: 'response.size', operator: 'gte', value: 1000 },
|
|
1384
|
-
{ attribute: 'response.time_ms', operator: 'lte', value: 500 },
|
|
1385
|
-
],
|
|
1386
|
-
});
|
|
1387
|
-
assert.strictEqual(results.length, 1);
|
|
1388
|
-
assert.strictEqual(results[0].body, 'Fast large response');
|
|
1389
|
-
});
|
|
1390
|
-
it('should skip logs when numericFilter attribute is missing or not a number', async () => {
|
|
1391
|
-
const today = getTestDate();
|
|
1392
|
-
const mockLogs = [
|
|
1393
|
-
{
|
|
1394
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
1395
|
-
body: 'Log with numeric',
|
|
1396
|
-
attributes: { 'count': 100 },
|
|
1397
|
-
},
|
|
1398
|
-
{
|
|
1399
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
1400
|
-
body: 'Log with string',
|
|
1401
|
-
attributes: { 'count': 'one hundred' },
|
|
1402
|
-
},
|
|
1403
|
-
{
|
|
1404
|
-
timestamp: '2026-01-28T10:02:00Z',
|
|
1405
|
-
body: 'Log without count',
|
|
1406
|
-
attributes: { 'other': 'value' },
|
|
1407
|
-
},
|
|
1408
|
-
];
|
|
1409
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
1410
|
-
const results = await backend.queryLogs({
|
|
1411
|
-
numericFilter: [{ attribute: 'count', operator: 'gte', value: 50 }],
|
|
1412
|
-
});
|
|
1413
|
-
assert.strictEqual(results.length, 1);
|
|
1414
|
-
assert.strictEqual(results[0].body, 'Log with numeric');
|
|
1415
|
-
});
|
|
1416
|
-
it('should combine numericFilter with search and severity', async () => {
|
|
1417
|
-
const today = getTestDate();
|
|
1418
|
-
const mockLogs = [
|
|
1419
|
-
{
|
|
1420
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
1421
|
-
body: 'API Error: rate limit',
|
|
1422
|
-
severity: 'ERROR',
|
|
1423
|
-
attributes: { 'http.status_code': 429, 'retry_count': 3 },
|
|
1424
|
-
},
|
|
1425
|
-
{
|
|
1426
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
1427
|
-
body: 'API Error: server error',
|
|
1428
|
-
severity: 'ERROR',
|
|
1429
|
-
attributes: { 'http.status_code': 500, 'retry_count': 1 },
|
|
1430
|
-
},
|
|
1431
|
-
{
|
|
1432
|
-
timestamp: '2026-01-28T10:02:00Z',
|
|
1433
|
-
body: 'API Error: timeout',
|
|
1434
|
-
severity: 'WARN',
|
|
1435
|
-
attributes: { 'http.status_code': 408, 'retry_count': 5 },
|
|
1436
|
-
},
|
|
1437
|
-
];
|
|
1438
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
1439
|
-
const results = await backend.queryLogs({
|
|
1440
|
-
search: 'API Error',
|
|
1441
|
-
severity: 'ERROR',
|
|
1442
|
-
numericFilter: [{ attribute: 'retry_count', operator: 'gt', value: 2 }],
|
|
1443
|
-
});
|
|
1444
|
-
assert.strictEqual(results.length, 1);
|
|
1445
|
-
assert.strictEqual(results[0].body, 'API Error: rate limit');
|
|
1446
|
-
});
|
|
1447
|
-
it('should extract fields from JSON log body using dot notation', async () => {
|
|
1448
|
-
const today = getTestDate();
|
|
1449
|
-
const mockLogs = [
|
|
1450
|
-
{
|
|
1451
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
1452
|
-
body: '{"user":{"id":"user123","name":"Alice"},"request":{"method":"POST","path":"/api/v1/users"}}',
|
|
1453
|
-
},
|
|
1454
|
-
{
|
|
1455
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
1456
|
-
body: '{"user":{"id":"user456"},"request":{"method":"GET"}}',
|
|
1457
|
-
},
|
|
1458
|
-
];
|
|
1459
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
1460
|
-
const results = await backend.queryLogs({
|
|
1461
|
-
extractFields: ['user.id', 'request.method'],
|
|
1462
|
-
});
|
|
1463
|
-
assert.strictEqual(results.length, 2);
|
|
1464
|
-
assert.deepStrictEqual(results[0].extractedFields, {
|
|
1465
|
-
'user.id': 'user123',
|
|
1466
|
-
'request.method': 'POST',
|
|
1467
|
-
});
|
|
1468
|
-
assert.deepStrictEqual(results[1].extractedFields, {
|
|
1469
|
-
'user.id': 'user456',
|
|
1470
|
-
'request.method': 'GET',
|
|
1471
|
-
});
|
|
1472
|
-
});
|
|
1473
|
-
it('should handle missing fields gracefully during extraction', async () => {
|
|
1474
|
-
const today = getTestDate();
|
|
1475
|
-
const mockLogs = [
|
|
1476
|
-
{
|
|
1477
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
1478
|
-
body: '{"user":{"id":"user123"},"status":"ok"}',
|
|
1479
|
-
},
|
|
1480
|
-
];
|
|
1481
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
1482
|
-
const results = await backend.queryLogs({
|
|
1483
|
-
extractFields: ['user.id', 'user.name', 'nonexistent.path'],
|
|
1484
|
-
});
|
|
1485
|
-
assert.strictEqual(results.length, 1);
|
|
1486
|
-
// Only user.id exists, user.name and nonexistent.path are undefined
|
|
1487
|
-
assert.deepStrictEqual(results[0].extractedFields, {
|
|
1488
|
-
'user.id': 'user123',
|
|
1489
|
-
});
|
|
1490
|
-
});
|
|
1491
|
-
it('should not extract fields from non-JSON log bodies', async () => {
|
|
1492
|
-
const today = getTestDate();
|
|
1493
|
-
const mockLogs = [
|
|
1494
|
-
{
|
|
1495
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
1496
|
-
body: 'Plain text log message',
|
|
1497
|
-
},
|
|
1498
|
-
{
|
|
1499
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
1500
|
-
body: '[2026-01-28] INFO: Starting service',
|
|
1501
|
-
},
|
|
1502
|
-
];
|
|
1503
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
1504
|
-
const results = await backend.queryLogs({
|
|
1505
|
-
extractFields: ['user.id', 'message'],
|
|
1506
|
-
});
|
|
1507
|
-
assert.strictEqual(results.length, 2);
|
|
1508
|
-
assert.strictEqual(results[0].extractedFields, undefined);
|
|
1509
|
-
assert.strictEqual(results[1].extractedFields, undefined);
|
|
1510
|
-
});
|
|
1511
|
-
it('should not extract fields when extractFields is empty', async () => {
|
|
1512
|
-
const today = getTestDate();
|
|
1513
|
-
const mockLogs = [
|
|
1514
|
-
{
|
|
1515
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
1516
|
-
body: '{"user":{"id":"user123"}}',
|
|
1517
|
-
},
|
|
1518
|
-
];
|
|
1519
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
1520
|
-
const results = await backend.queryLogs({
|
|
1521
|
-
extractFields: [],
|
|
1522
|
-
});
|
|
1523
|
-
assert.strictEqual(results.length, 1);
|
|
1524
|
-
assert.strictEqual(results[0].extractedFields, undefined);
|
|
1525
|
-
});
|
|
1526
|
-
it('should extract deeply nested fields', async () => {
|
|
1527
|
-
const today = getTestDate();
|
|
1528
|
-
const mockLogs = [
|
|
1529
|
-
{
|
|
1530
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
1531
|
-
body: '{"response":{"data":{"items":[1,2,3],"meta":{"count":3}}}}',
|
|
1532
|
-
},
|
|
1533
|
-
];
|
|
1534
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
1535
|
-
const results = await backend.queryLogs({
|
|
1536
|
-
extractFields: ['response.data.meta.count', 'response.data.items'],
|
|
1537
|
-
});
|
|
1538
|
-
assert.strictEqual(results.length, 1);
|
|
1539
|
-
assert.deepStrictEqual(results[0].extractedFields, {
|
|
1540
|
-
'response.data.meta.count': 3,
|
|
1541
|
-
'response.data.items': [1, 2, 3],
|
|
1542
|
-
});
|
|
1543
|
-
});
|
|
1544
|
-
it('should handle invalid JSON gracefully during extraction', async () => {
|
|
1545
|
-
const today = getTestDate();
|
|
1546
|
-
const mockLogs = [
|
|
1547
|
-
{
|
|
1548
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
1549
|
-
body: '{invalid json',
|
|
1550
|
-
},
|
|
1551
|
-
];
|
|
1552
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
1553
|
-
const results = await backend.queryLogs({
|
|
1554
|
-
extractFields: ['user.id'],
|
|
1555
|
-
});
|
|
1556
|
-
assert.strictEqual(results.length, 1);
|
|
1557
|
-
assert.strictEqual(results[0].extractedFields, undefined);
|
|
1558
|
-
});
|
|
1559
|
-
it('should return undefined extractedFields when no fields match', async () => {
|
|
1560
|
-
const today = getTestDate();
|
|
1561
|
-
const mockLogs = [
|
|
1562
|
-
{
|
|
1563
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
1564
|
-
body: '{"foo":"bar"}',
|
|
1565
|
-
},
|
|
1566
|
-
];
|
|
1567
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
1568
|
-
const results = await backend.queryLogs({
|
|
1569
|
-
extractFields: ['nonexistent.field', 'another.missing'],
|
|
1570
|
-
});
|
|
1571
|
-
assert.strictEqual(results.length, 1);
|
|
1572
|
-
assert.strictEqual(results[0].extractedFields, undefined);
|
|
1573
|
-
});
|
|
1574
|
-
});
|
|
1575
|
-
describe('queryMetrics', () => {
|
|
1576
|
-
it('should read and normalize metric data points from JSONL files', async () => {
|
|
1577
|
-
const today = getTestDate();
|
|
1578
|
-
const mockMetrics = [
|
|
1579
|
-
{
|
|
1580
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
1581
|
-
name: 'http.requests.total',
|
|
1582
|
-
value: 100,
|
|
1583
|
-
type: 'counter',
|
|
1584
|
-
unit: 'requests',
|
|
1585
|
-
resource: { serviceName: 'api-gateway' },
|
|
1586
|
-
attributes: { 'http.method': 'GET', 'http.status_code': 200 },
|
|
1587
|
-
},
|
|
1588
|
-
];
|
|
1589
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
1590
|
-
const results = await backend.queryMetrics({});
|
|
1591
|
-
assert.strictEqual(results.length, 1);
|
|
1592
|
-
assert.strictEqual(results[0].name, 'http.requests.total');
|
|
1593
|
-
assert.strictEqual(results[0].value, 100);
|
|
1594
|
-
assert.strictEqual(results[0].unit, 'requests');
|
|
1595
|
-
assert.strictEqual(results[0].attributes?.['service.name'], 'api-gateway');
|
|
1596
|
-
assert.strictEqual(results[0].attributes?.['http.method'], 'GET');
|
|
1597
|
-
});
|
|
1598
|
-
it('should filter metrics by name substring', async () => {
|
|
1599
|
-
const today = getTestDate();
|
|
1600
|
-
const mockMetrics = [
|
|
1601
|
-
{ timestamp: '2026-01-28T10:00:00Z', name: 'http.requests.total', value: 100, type: 'counter' },
|
|
1602
|
-
{ timestamp: '2026-01-28T10:01:00Z', name: 'http.request.duration', value: 150, type: 'histogram' },
|
|
1603
|
-
{ timestamp: '2026-01-28T10:02:00Z', name: 'memory.usage', value: 512, type: 'gauge' },
|
|
1604
|
-
];
|
|
1605
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
1606
|
-
const results = await backend.queryMetrics({ metricName: 'http' });
|
|
1607
|
-
assert.strictEqual(results.length, 2);
|
|
1608
|
-
assert.ok(results.every(m => m.name.includes('http')));
|
|
1609
|
-
});
|
|
1610
|
-
it('should apply limit and offset to metric results', async () => {
|
|
1611
|
-
const today = getTestDate();
|
|
1612
|
-
const mockMetrics = Array.from({ length: 150 }, (_, i) => ({
|
|
1613
|
-
timestamp: new Date(Date.now() + i * 1000).toISOString(),
|
|
1614
|
-
name: `metric.${i}`,
|
|
1615
|
-
value: i * 10,
|
|
1616
|
-
type: 'gauge',
|
|
1617
|
-
}));
|
|
1618
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
1619
|
-
const results = await backend.queryMetrics({ limit: 50, offset: 30 });
|
|
1620
|
-
assert.strictEqual(results.length, 50);
|
|
1621
|
-
assert.strictEqual(results[0].name, 'metric.30');
|
|
1622
|
-
});
|
|
1623
|
-
it('should aggregate metrics with sum function', async () => {
|
|
1624
|
-
const today = getTestDate();
|
|
1625
|
-
const mockMetrics = [
|
|
1626
|
-
{ timestamp: '2026-01-28T10:00:00Z', name: 'requests', value: 100, type: 'counter' },
|
|
1627
|
-
{ timestamp: '2026-01-28T10:01:00Z', name: 'requests', value: 150, type: 'counter' },
|
|
1628
|
-
{ timestamp: '2026-01-28T10:02:00Z', name: 'requests', value: 200, type: 'counter' },
|
|
1629
|
-
];
|
|
1630
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
1631
|
-
const results = await backend.queryMetrics({ aggregation: 'sum' });
|
|
1632
|
-
assert.strictEqual(results.length, 1);
|
|
1633
|
-
assert.strictEqual(results[0].value, 450);
|
|
1634
|
-
});
|
|
1635
|
-
it('should aggregate metrics with avg function', async () => {
|
|
1636
|
-
const today = getTestDate();
|
|
1637
|
-
const mockMetrics = [
|
|
1638
|
-
{ timestamp: '2026-01-28T10:00:00Z', name: 'latency', value: 100, type: 'histogram' },
|
|
1639
|
-
{ timestamp: '2026-01-28T10:01:00Z', name: 'latency', value: 200, type: 'histogram' },
|
|
1640
|
-
{ timestamp: '2026-01-28T10:02:00Z', name: 'latency', value: 300, type: 'histogram' },
|
|
1641
|
-
];
|
|
1642
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
1643
|
-
const results = await backend.queryMetrics({ aggregation: 'avg' });
|
|
1644
|
-
assert.strictEqual(results.length, 1);
|
|
1645
|
-
assert.strictEqual(results[0].value, 200);
|
|
1646
|
-
});
|
|
1647
|
-
it('should aggregate metrics with min function', async () => {
|
|
1648
|
-
const today = getTestDate();
|
|
1649
|
-
const mockMetrics = [
|
|
1650
|
-
{ timestamp: '2026-01-28T10:00:00Z', name: 'response_time', value: 150, type: 'gauge' },
|
|
1651
|
-
{ timestamp: '2026-01-28T10:01:00Z', name: 'response_time', value: 50, type: 'gauge' },
|
|
1652
|
-
{ timestamp: '2026-01-28T10:02:00Z', name: 'response_time', value: 200, type: 'gauge' },
|
|
1653
|
-
];
|
|
1654
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
1655
|
-
const results = await backend.queryMetrics({ aggregation: 'min' });
|
|
1656
|
-
assert.strictEqual(results.length, 1);
|
|
1657
|
-
assert.strictEqual(results[0].value, 50);
|
|
1658
|
-
});
|
|
1659
|
-
it('should aggregate metrics with max function', async () => {
|
|
1660
|
-
const today = getTestDate();
|
|
1661
|
-
const mockMetrics = [
|
|
1662
|
-
{ timestamp: '2026-01-28T10:00:00Z', name: 'memory', value: 512, type: 'gauge' },
|
|
1663
|
-
{ timestamp: '2026-01-28T10:01:00Z', name: 'memory', value: 256, type: 'gauge' },
|
|
1664
|
-
{ timestamp: '2026-01-28T10:02:00Z', name: 'memory', value: 1024, type: 'gauge' },
|
|
1665
|
-
];
|
|
1666
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
1667
|
-
const results = await backend.queryMetrics({ aggregation: 'max' });
|
|
1668
|
-
assert.strictEqual(results.length, 1);
|
|
1669
|
-
assert.strictEqual(results[0].value, 1024);
|
|
1670
|
-
});
|
|
1671
|
-
it('should aggregate metrics with count function', async () => {
|
|
1672
|
-
const today = getTestDate();
|
|
1673
|
-
const mockMetrics = [
|
|
1674
|
-
{ timestamp: '2026-01-28T10:00:00Z', name: 'events', value: 10, type: 'counter' },
|
|
1675
|
-
{ timestamp: '2026-01-28T10:01:00Z', name: 'events', value: 20, type: 'counter' },
|
|
1676
|
-
{ timestamp: '2026-01-28T10:02:00Z', name: 'events', value: 30, type: 'counter' },
|
|
1677
|
-
];
|
|
1678
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
1679
|
-
const results = await backend.queryMetrics({ aggregation: 'count' });
|
|
1680
|
-
assert.strictEqual(results.length, 1);
|
|
1681
|
-
assert.strictEqual(results[0].value, 3);
|
|
1682
|
-
});
|
|
1683
|
-
it('should aggregate metrics with p50 (median) function', async () => {
|
|
1684
|
-
const today = getTestDate();
|
|
1685
|
-
const mockMetrics = [
|
|
1686
|
-
{ timestamp: '2026-01-28T10:00:00Z', name: 'latency', value: 10, type: 'histogram' },
|
|
1687
|
-
{ timestamp: '2026-01-28T10:01:00Z', name: 'latency', value: 20, type: 'histogram' },
|
|
1688
|
-
{ timestamp: '2026-01-28T10:02:00Z', name: 'latency', value: 30, type: 'histogram' },
|
|
1689
|
-
{ timestamp: '2026-01-28T10:03:00Z', name: 'latency', value: 40, type: 'histogram' },
|
|
1690
|
-
{ timestamp: '2026-01-28T10:04:00Z', name: 'latency', value: 50, type: 'histogram' },
|
|
1691
|
-
];
|
|
1692
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
1693
|
-
const results = await backend.queryMetrics({ aggregation: 'p50' });
|
|
1694
|
-
assert.strictEqual(results.length, 1);
|
|
1695
|
-
assert.strictEqual(results[0].value, 30); // median of [10, 20, 30, 40, 50]
|
|
1696
|
-
});
|
|
1697
|
-
it('should aggregate metrics with p95 function', async () => {
|
|
1698
|
-
const today = getTestDate();
|
|
1699
|
-
// Create 100 data points for a more realistic p95 calculation
|
|
1700
|
-
const mockMetrics = Array.from({ length: 100 }, (_, i) => ({
|
|
1701
|
-
timestamp: `2026-01-28T10:${String(Math.floor(i / 60)).padStart(2, '0')}:${String(i % 60).padStart(2, '0')}Z`,
|
|
1702
|
-
name: 'response_time',
|
|
1703
|
-
value: i + 1, // values 1-100
|
|
1704
|
-
type: 'histogram',
|
|
1705
|
-
}));
|
|
1706
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
1707
|
-
const results = await backend.queryMetrics({ aggregation: 'p95' });
|
|
1708
|
-
assert.strictEqual(results.length, 1);
|
|
1709
|
-
assert.strictEqual(results[0].value, 95); // 95th percentile of 1-100
|
|
1710
|
-
});
|
|
1711
|
-
it('should aggregate metrics with p99 function', async () => {
|
|
1712
|
-
const today = getTestDate();
|
|
1713
|
-
// Create 100 data points for a more realistic p99 calculation
|
|
1714
|
-
const mockMetrics = Array.from({ length: 100 }, (_, i) => ({
|
|
1715
|
-
timestamp: `2026-01-28T10:${String(Math.floor(i / 60)).padStart(2, '0')}:${String(i % 60).padStart(2, '0')}Z`,
|
|
1716
|
-
name: 'response_time',
|
|
1717
|
-
value: i + 1, // values 1-100
|
|
1718
|
-
type: 'histogram',
|
|
1719
|
-
}));
|
|
1720
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
1721
|
-
const results = await backend.queryMetrics({ aggregation: 'p99' });
|
|
1722
|
-
assert.strictEqual(results.length, 1);
|
|
1723
|
-
assert.strictEqual(results[0].value, 99); // 99th percentile of 1-100
|
|
1724
|
-
});
|
|
1725
|
-
it('should handle p50 with even number of values', async () => {
|
|
1726
|
-
const today = getTestDate();
|
|
1727
|
-
const mockMetrics = [
|
|
1728
|
-
{ timestamp: '2026-01-28T10:00:00Z', name: 'latency', value: 10, type: 'histogram' },
|
|
1729
|
-
{ timestamp: '2026-01-28T10:01:00Z', name: 'latency', value: 20, type: 'histogram' },
|
|
1730
|
-
{ timestamp: '2026-01-28T10:02:00Z', name: 'latency', value: 30, type: 'histogram' },
|
|
1731
|
-
{ timestamp: '2026-01-28T10:03:00Z', name: 'latency', value: 40, type: 'histogram' },
|
|
1732
|
-
];
|
|
1733
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
1734
|
-
const results = await backend.queryMetrics({ aggregation: 'p50' });
|
|
1735
|
-
assert.strictEqual(results.length, 1);
|
|
1736
|
-
assert.strictEqual(results[0].value, 20); // ceil(0.5 * 4) - 1 = 1, sorted[1] = 20
|
|
1737
|
-
});
|
|
1738
|
-
it('should handle percentile with single value', async () => {
|
|
1739
|
-
const today = getTestDate();
|
|
1740
|
-
const mockMetrics = [
|
|
1741
|
-
{ timestamp: '2026-01-28T10:00:00Z', name: 'latency', value: 42, type: 'histogram' },
|
|
1742
|
-
];
|
|
1743
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
1744
|
-
const results = await backend.queryMetrics({ aggregation: 'p95' });
|
|
1745
|
-
assert.strictEqual(results.length, 1);
|
|
1746
|
-
assert.strictEqual(results[0].value, 42); // single value is the only percentile
|
|
1747
|
-
});
|
|
1748
|
-
it('should calculate percentiles with groupBy', async () => {
|
|
1749
|
-
const today = getTestDate();
|
|
1750
|
-
const mockMetrics = [
|
|
1751
|
-
{ timestamp: '2026-01-28T10:00:00Z', name: 'latency', value: 10, type: 'histogram', attributes: { endpoint: '/api/users' } },
|
|
1752
|
-
{ timestamp: '2026-01-28T10:01:00Z', name: 'latency', value: 20, type: 'histogram', attributes: { endpoint: '/api/users' } },
|
|
1753
|
-
{ timestamp: '2026-01-28T10:02:00Z', name: 'latency', value: 30, type: 'histogram', attributes: { endpoint: '/api/users' } },
|
|
1754
|
-
{ timestamp: '2026-01-28T10:03:00Z', name: 'latency', value: 100, type: 'histogram', attributes: { endpoint: '/api/orders' } },
|
|
1755
|
-
{ timestamp: '2026-01-28T10:04:00Z', name: 'latency', value: 200, type: 'histogram', attributes: { endpoint: '/api/orders' } },
|
|
1756
|
-
{ timestamp: '2026-01-28T10:05:00Z', name: 'latency', value: 300, type: 'histogram', attributes: { endpoint: '/api/orders' } },
|
|
1757
|
-
];
|
|
1758
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
1759
|
-
const results = await backend.queryMetrics({ aggregation: 'p50', groupBy: ['endpoint'] });
|
|
1760
|
-
assert.strictEqual(results.length, 2);
|
|
1761
|
-
const usersMetric = results.find(m => m.attributes?.endpoint === '/api/users');
|
|
1762
|
-
const ordersMetric = results.find(m => m.attributes?.endpoint === '/api/orders');
|
|
1763
|
-
assert.strictEqual(usersMetric?.value, 20); // median of [10, 20, 30]
|
|
1764
|
-
assert.strictEqual(ordersMetric?.value, 200); // median of [100, 200, 300]
|
|
1765
|
-
});
|
|
1766
|
-
it('should aggregate metrics grouped by attributes', async () => {
|
|
1767
|
-
const today = getTestDate();
|
|
1768
|
-
const mockMetrics = [
|
|
1769
|
-
{
|
|
1770
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
1771
|
-
name: 'http.requests',
|
|
1772
|
-
value: 100,
|
|
1773
|
-
type: 'counter',
|
|
1774
|
-
attributes: { method: 'GET' },
|
|
1775
|
-
},
|
|
1776
|
-
{
|
|
1777
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
1778
|
-
name: 'http.requests',
|
|
1779
|
-
value: 50,
|
|
1780
|
-
type: 'counter',
|
|
1781
|
-
attributes: { method: 'POST' },
|
|
1782
|
-
},
|
|
1783
|
-
{
|
|
1784
|
-
timestamp: '2026-01-28T10:02:00Z',
|
|
1785
|
-
name: 'http.requests',
|
|
1786
|
-
value: 200,
|
|
1787
|
-
type: 'counter',
|
|
1788
|
-
attributes: { method: 'GET' },
|
|
1789
|
-
},
|
|
1790
|
-
];
|
|
1791
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
1792
|
-
const results = await backend.queryMetrics({ aggregation: 'sum', groupBy: ['method'] });
|
|
1793
|
-
assert.strictEqual(results.length, 2);
|
|
1794
|
-
const getMetric = results.find(m => m.attributes?.method === 'GET');
|
|
1795
|
-
const postMetric = results.find(m => m.attributes?.method === 'POST');
|
|
1796
|
-
assert.strictEqual(getMetric?.value, 300);
|
|
1797
|
-
assert.strictEqual(postMetric?.value, 50);
|
|
1798
|
-
});
|
|
1799
|
-
it('should return empty array when no metrics found', async () => {
|
|
1800
|
-
// No files created
|
|
1801
|
-
const results = await backend.queryMetrics({});
|
|
1802
|
-
assert.strictEqual(results.length, 0);
|
|
1803
|
-
});
|
|
1804
|
-
it('should aggregate metrics by time bucket with 1m buckets', async () => {
|
|
1805
|
-
const today = getTestDate();
|
|
1806
|
-
const mockMetrics = [
|
|
1807
|
-
// First minute bucket: 10:00:00 - 10:00:59
|
|
1808
|
-
{ timestamp: `${today}T10:00:00Z`, name: 'requests', value: 10, type: 'counter' },
|
|
1809
|
-
{ timestamp: `${today}T10:00:30Z`, name: 'requests', value: 20, type: 'counter' },
|
|
1810
|
-
{ timestamp: `${today}T10:00:45Z`, name: 'requests', value: 30, type: 'counter' },
|
|
1811
|
-
// Second minute bucket: 10:01:00 - 10:01:59
|
|
1812
|
-
{ timestamp: `${today}T10:01:00Z`, name: 'requests', value: 100, type: 'counter' },
|
|
1813
|
-
{ timestamp: `${today}T10:01:30Z`, name: 'requests', value: 200, type: 'counter' },
|
|
1814
|
-
// Third minute bucket: 10:02:00 - 10:02:59
|
|
1815
|
-
{ timestamp: `${today}T10:02:15Z`, name: 'requests', value: 50, type: 'counter' },
|
|
1816
|
-
];
|
|
1817
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
1818
|
-
const results = await backend.queryMetrics({ aggregation: 'sum', timeBucket: '1m' });
|
|
1819
|
-
assert.strictEqual(results.length, 3);
|
|
1820
|
-
// Results should be sorted by timestamp
|
|
1821
|
-
assert.strictEqual(results[0].value, 60); // 10 + 20 + 30
|
|
1822
|
-
assert.strictEqual(results[1].value, 300); // 100 + 200
|
|
1823
|
-
assert.strictEqual(results[2].value, 50); // 50
|
|
1824
|
-
// Timestamps should be floored to bucket boundaries
|
|
1825
|
-
assert.strictEqual(results[0].timestamp, `${today}T10:00:00.000Z`);
|
|
1826
|
-
assert.strictEqual(results[1].timestamp, `${today}T10:01:00.000Z`);
|
|
1827
|
-
assert.strictEqual(results[2].timestamp, `${today}T10:02:00.000Z`);
|
|
1828
|
-
});
|
|
1829
|
-
it('should aggregate metrics by time bucket with 5m buckets', async () => {
|
|
1830
|
-
const today = getTestDate();
|
|
1831
|
-
const mockMetrics = [
|
|
1832
|
-
// First 5-minute bucket: 10:00:00 - 10:04:59
|
|
1833
|
-
{ timestamp: `${today}T10:00:00Z`, name: 'requests', value: 10, type: 'counter' },
|
|
1834
|
-
{ timestamp: `${today}T10:02:00Z`, name: 'requests', value: 20, type: 'counter' },
|
|
1835
|
-
{ timestamp: `${today}T10:04:00Z`, name: 'requests', value: 30, type: 'counter' },
|
|
1836
|
-
// Second 5-minute bucket: 10:05:00 - 10:09:59
|
|
1837
|
-
{ timestamp: `${today}T10:05:00Z`, name: 'requests', value: 100, type: 'counter' },
|
|
1838
|
-
{ timestamp: `${today}T10:08:00Z`, name: 'requests', value: 200, type: 'counter' },
|
|
1839
|
-
];
|
|
1840
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
1841
|
-
const results = await backend.queryMetrics({ aggregation: 'sum', timeBucket: '5m' });
|
|
1842
|
-
assert.strictEqual(results.length, 2);
|
|
1843
|
-
assert.strictEqual(results[0].value, 60); // 10 + 20 + 30
|
|
1844
|
-
assert.strictEqual(results[1].value, 300); // 100 + 200
|
|
1845
|
-
});
|
|
1846
|
-
it('should aggregate metrics by time bucket with 1h buckets', async () => {
|
|
1847
|
-
const today = getTestDate();
|
|
1848
|
-
const mockMetrics = [
|
|
1849
|
-
// First hour bucket: 10:00:00 - 10:59:59
|
|
1850
|
-
{ timestamp: `${today}T10:00:00Z`, name: 'requests', value: 10, type: 'counter' },
|
|
1851
|
-
{ timestamp: `${today}T10:30:00Z`, name: 'requests', value: 20, type: 'counter' },
|
|
1852
|
-
// Second hour bucket: 11:00:00 - 11:59:59
|
|
1853
|
-
{ timestamp: `${today}T11:00:00Z`, name: 'requests', value: 100, type: 'counter' },
|
|
1854
|
-
{ timestamp: `${today}T11:45:00Z`, name: 'requests', value: 200, type: 'counter' },
|
|
1855
|
-
];
|
|
1856
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
1857
|
-
const results = await backend.queryMetrics({ aggregation: 'sum', timeBucket: '1h' });
|
|
1858
|
-
assert.strictEqual(results.length, 2);
|
|
1859
|
-
assert.strictEqual(results[0].value, 30); // 10 + 20
|
|
1860
|
-
assert.strictEqual(results[1].value, 300); // 100 + 200
|
|
1861
|
-
});
|
|
1862
|
-
it('should aggregate metrics by time bucket with 1d buckets', async () => {
|
|
1863
|
-
// Create metrics files for two days
|
|
1864
|
-
writeJsonlFile(path.join(tempDir, 'metrics-2026-01-28.jsonl'), [
|
|
1865
|
-
{ timestamp: '2026-01-28T10:00:00Z', name: 'requests', value: 100, type: 'counter' },
|
|
1866
|
-
{ timestamp: '2026-01-28T20:00:00Z', name: 'requests', value: 200, type: 'counter' },
|
|
1867
|
-
]);
|
|
1868
|
-
writeJsonlFile(path.join(tempDir, 'metrics-2026-01-29.jsonl'), [
|
|
1869
|
-
{ timestamp: '2026-01-29T08:00:00Z', name: 'requests', value: 300, type: 'counter' },
|
|
1870
|
-
{ timestamp: '2026-01-29T16:00:00Z', name: 'requests', value: 400, type: 'counter' },
|
|
1871
|
-
]);
|
|
1872
|
-
const results = await backend.queryMetrics({
|
|
1873
|
-
aggregation: 'sum',
|
|
1874
|
-
timeBucket: '1d',
|
|
1875
|
-
startDate: '2026-01-28',
|
|
1876
|
-
endDate: '2026-01-29',
|
|
1877
|
-
});
|
|
1878
|
-
assert.strictEqual(results.length, 2);
|
|
1879
|
-
assert.strictEqual(results[0].value, 300); // 100 + 200
|
|
1880
|
-
assert.strictEqual(results[1].value, 700); // 300 + 400
|
|
1881
|
-
});
|
|
1882
|
-
it('should combine time bucket with groupBy', async () => {
|
|
1883
|
-
const today = getTestDate();
|
|
1884
|
-
const mockMetrics = [
|
|
1885
|
-
// First minute, method=GET
|
|
1886
|
-
{ timestamp: `${today}T10:00:00Z`, name: 'requests', value: 10, type: 'counter', attributes: { method: 'GET' } },
|
|
1887
|
-
{ timestamp: `${today}T10:00:30Z`, name: 'requests', value: 20, type: 'counter', attributes: { method: 'GET' } },
|
|
1888
|
-
// First minute, method=POST
|
|
1889
|
-
{ timestamp: `${today}T10:00:15Z`, name: 'requests', value: 5, type: 'counter', attributes: { method: 'POST' } },
|
|
1890
|
-
// Second minute, method=GET
|
|
1891
|
-
{ timestamp: `${today}T10:01:00Z`, name: 'requests', value: 100, type: 'counter', attributes: { method: 'GET' } },
|
|
1892
|
-
// Second minute, method=POST
|
|
1893
|
-
{ timestamp: `${today}T10:01:30Z`, name: 'requests', value: 50, type: 'counter', attributes: { method: 'POST' } },
|
|
1894
|
-
];
|
|
1895
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
1896
|
-
const results = await backend.queryMetrics({
|
|
1897
|
-
aggregation: 'sum',
|
|
1898
|
-
timeBucket: '1m',
|
|
1899
|
-
groupBy: ['method'],
|
|
1900
|
-
});
|
|
1901
|
-
assert.strictEqual(results.length, 4);
|
|
1902
|
-
// First bucket GET
|
|
1903
|
-
const bucket1GET = results.find(m => m.timestamp === `${today}T10:00:00.000Z` && m.attributes?.method === 'GET');
|
|
1904
|
-
assert.strictEqual(bucket1GET?.value, 30); // 10 + 20
|
|
1905
|
-
// First bucket POST
|
|
1906
|
-
const bucket1POST = results.find(m => m.timestamp === `${today}T10:00:00.000Z` && m.attributes?.method === 'POST');
|
|
1907
|
-
assert.strictEqual(bucket1POST?.value, 5);
|
|
1908
|
-
// Second bucket GET
|
|
1909
|
-
const bucket2GET = results.find(m => m.timestamp === `${today}T10:01:00.000Z` && m.attributes?.method === 'GET');
|
|
1910
|
-
assert.strictEqual(bucket2GET?.value, 100);
|
|
1911
|
-
// Second bucket POST
|
|
1912
|
-
const bucket2POST = results.find(m => m.timestamp === `${today}T10:01:00.000Z` && m.attributes?.method === 'POST');
|
|
1913
|
-
assert.strictEqual(bucket2POST?.value, 50);
|
|
1914
|
-
});
|
|
1915
|
-
it('should use avg aggregation with time buckets', async () => {
|
|
1916
|
-
const today = getTestDate();
|
|
1917
|
-
const mockMetrics = [
|
|
1918
|
-
// First minute bucket
|
|
1919
|
-
{ timestamp: `${today}T10:00:00Z`, name: 'latency', value: 100, type: 'histogram' },
|
|
1920
|
-
{ timestamp: `${today}T10:00:30Z`, name: 'latency', value: 200, type: 'histogram' },
|
|
1921
|
-
{ timestamp: `${today}T10:00:45Z`, name: 'latency', value: 300, type: 'histogram' },
|
|
1922
|
-
// Second minute bucket
|
|
1923
|
-
{ timestamp: `${today}T10:01:00Z`, name: 'latency', value: 500, type: 'histogram' },
|
|
1924
|
-
{ timestamp: `${today}T10:01:30Z`, name: 'latency', value: 700, type: 'histogram' },
|
|
1925
|
-
];
|
|
1926
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
1927
|
-
const results = await backend.queryMetrics({ aggregation: 'avg', timeBucket: '1m' });
|
|
1928
|
-
assert.strictEqual(results.length, 2);
|
|
1929
|
-
assert.strictEqual(results[0].value, 200); // (100 + 200 + 300) / 3
|
|
1930
|
-
assert.strictEqual(results[1].value, 600); // (500 + 700) / 2
|
|
1931
|
-
});
|
|
1932
|
-
it('should ignore invalid time bucket format', async () => {
|
|
1933
|
-
const today = getTestDate();
|
|
1934
|
-
const mockMetrics = [
|
|
1935
|
-
{ timestamp: `${today}T10:00:00Z`, name: 'requests', value: 10, type: 'counter' },
|
|
1936
|
-
{ timestamp: `${today}T10:00:30Z`, name: 'requests', value: 20, type: 'counter' },
|
|
1937
|
-
{ timestamp: `${today}T10:01:00Z`, name: 'requests', value: 30, type: 'counter' },
|
|
1938
|
-
];
|
|
1939
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
1940
|
-
// Invalid format - should be ignored and aggregate all together
|
|
1941
|
-
const results = await backend.queryMetrics({ aggregation: 'sum', timeBucket: 'invalid' });
|
|
1942
|
-
assert.strictEqual(results.length, 1);
|
|
1943
|
-
assert.strictEqual(results[0].value, 60); // All grouped together
|
|
1944
|
-
});
|
|
1945
|
-
it('should calculate rate of change per second', async () => {
|
|
1946
|
-
const today = getTestDate();
|
|
1947
|
-
const mockMetrics = [
|
|
1948
|
-
{ timestamp: `${today}T10:00:00Z`, name: 'requests', value: 100, type: 'counter' },
|
|
1949
|
-
{ timestamp: `${today}T10:00:30Z`, name: 'requests', value: 200, type: 'counter' },
|
|
1950
|
-
{ timestamp: `${today}T10:01:00Z`, name: 'requests', value: 400, type: 'counter' },
|
|
1951
|
-
];
|
|
1952
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
1953
|
-
const results = await backend.queryMetrics({ aggregation: 'rate' });
|
|
1954
|
-
assert.strictEqual(results.length, 1);
|
|
1955
|
-
// Rate = (400 - 100) / 60 seconds = 5 per second
|
|
1956
|
-
assert.strictEqual(results[0].value, 5);
|
|
1957
|
-
});
|
|
1958
|
-
it('should calculate rate with timeBucket', async () => {
|
|
1959
|
-
const today = getTestDate();
|
|
1960
|
-
const mockMetrics = [
|
|
1961
|
-
// First minute bucket: 10:00:00 - 10:00:59
|
|
1962
|
-
{ timestamp: `${today}T10:00:00Z`, name: 'requests', value: 0, type: 'counter' },
|
|
1963
|
-
{ timestamp: `${today}T10:00:30Z`, name: 'requests', value: 60, type: 'counter' },
|
|
1964
|
-
// Second minute bucket: 10:01:00 - 10:01:59
|
|
1965
|
-
{ timestamp: `${today}T10:01:00Z`, name: 'requests', value: 100, type: 'counter' },
|
|
1966
|
-
{ timestamp: `${today}T10:01:30Z`, name: 'requests', value: 250, type: 'counter' },
|
|
1967
|
-
];
|
|
1968
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
1969
|
-
const results = await backend.queryMetrics({ aggregation: 'rate', timeBucket: '1m' });
|
|
1970
|
-
assert.strictEqual(results.length, 2);
|
|
1971
|
-
// First bucket: (60 - 0) / 30 seconds = 2 per second
|
|
1972
|
-
assert.strictEqual(results[0].value, 2);
|
|
1973
|
-
// Second bucket: (250 - 100) / 30 seconds = 5 per second
|
|
1974
|
-
assert.strictEqual(results[1].value, 5);
|
|
1975
|
-
});
|
|
1976
|
-
it('should return rate of 0 for single data point', async () => {
|
|
1977
|
-
const today = getTestDate();
|
|
1978
|
-
const mockMetrics = [
|
|
1979
|
-
{ timestamp: `${today}T10:00:00Z`, name: 'requests', value: 100, type: 'counter' },
|
|
1980
|
-
];
|
|
1981
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
1982
|
-
const results = await backend.queryMetrics({ aggregation: 'rate' });
|
|
1983
|
-
assert.strictEqual(results.length, 1);
|
|
1984
|
-
assert.strictEqual(results[0].value, 0);
|
|
1985
|
-
});
|
|
1986
|
-
it('should return rate of 0 when all timestamps are the same', async () => {
|
|
1987
|
-
const today = getTestDate();
|
|
1988
|
-
const mockMetrics = [
|
|
1989
|
-
{ timestamp: `${today}T10:00:00Z`, name: 'requests', value: 100, type: 'counter' },
|
|
1990
|
-
{ timestamp: `${today}T10:00:00Z`, name: 'requests', value: 200, type: 'counter' },
|
|
1991
|
-
{ timestamp: `${today}T10:00:00Z`, name: 'requests', value: 300, type: 'counter' },
|
|
1992
|
-
];
|
|
1993
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
1994
|
-
const results = await backend.queryMetrics({ aggregation: 'rate' });
|
|
1995
|
-
assert.strictEqual(results.length, 1);
|
|
1996
|
-
// Avoid division by zero - return 0 when duration is 0
|
|
1997
|
-
assert.strictEqual(results[0].value, 0);
|
|
1998
|
-
});
|
|
1999
|
-
it('should read and normalize histogram metrics with bucket distribution', async () => {
|
|
2000
|
-
const today = getTestDate();
|
|
2001
|
-
const mockMetrics = [
|
|
2002
|
-
{
|
|
2003
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2004
|
-
name: 'http.request.duration',
|
|
2005
|
-
value: 150, // typically the sum/count average or similar aggregate
|
|
2006
|
-
type: 'histogram',
|
|
2007
|
-
unit: 'ms',
|
|
2008
|
-
resource: { serviceName: 'api-gateway' },
|
|
2009
|
-
attributes: { 'http.method': 'GET' },
|
|
2010
|
-
histogram: {
|
|
2011
|
-
buckets: [
|
|
2012
|
-
{ le: 50, count: 10 },
|
|
2013
|
-
{ le: 100, count: 25 },
|
|
2014
|
-
{ le: 250, count: 45 },
|
|
2015
|
-
{ le: 500, count: 48 },
|
|
2016
|
-
{ le: Infinity, count: 50 },
|
|
2017
|
-
],
|
|
2018
|
-
sum: 7500,
|
|
2019
|
-
count: 50,
|
|
2020
|
-
},
|
|
2021
|
-
},
|
|
2022
|
-
];
|
|
2023
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
2024
|
-
const results = await backend.queryMetrics({});
|
|
2025
|
-
assert.strictEqual(results.length, 1);
|
|
2026
|
-
assert.strictEqual(results[0].name, 'http.request.duration');
|
|
2027
|
-
assert.strictEqual(results[0].value, 150);
|
|
2028
|
-
assert.strictEqual(results[0].unit, 'ms');
|
|
2029
|
-
assert.strictEqual(results[0].attributes?.['service.name'], 'api-gateway');
|
|
2030
|
-
assert.strictEqual(results[0].attributes?.['http.method'], 'GET');
|
|
2031
|
-
// Verify histogram data is present
|
|
2032
|
-
assert.ok(results[0].histogram, 'Histogram data should be present');
|
|
2033
|
-
assert.strictEqual(results[0].histogram?.sum, 7500);
|
|
2034
|
-
assert.strictEqual(results[0].histogram?.count, 50);
|
|
2035
|
-
assert.strictEqual(results[0].histogram?.buckets.length, 5);
|
|
2036
|
-
// Verify bucket boundaries and cumulative counts
|
|
2037
|
-
assert.strictEqual(results[0].histogram?.buckets[0].le, 50);
|
|
2038
|
-
assert.strictEqual(results[0].histogram?.buckets[0].count, 10);
|
|
2039
|
-
assert.strictEqual(results[0].histogram?.buckets[1].le, 100);
|
|
2040
|
-
assert.strictEqual(results[0].histogram?.buckets[1].count, 25);
|
|
2041
|
-
assert.strictEqual(results[0].histogram?.buckets[2].le, 250);
|
|
2042
|
-
assert.strictEqual(results[0].histogram?.buckets[2].count, 45);
|
|
2043
|
-
});
|
|
2044
|
-
it('should handle histogram metrics without histogram data (non-histogram type)', async () => {
|
|
2045
|
-
const today = getTestDate();
|
|
2046
|
-
const mockMetrics = [
|
|
2047
|
-
{
|
|
2048
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2049
|
-
name: 'http.requests.total',
|
|
2050
|
-
value: 100,
|
|
2051
|
-
type: 'counter',
|
|
2052
|
-
unit: 'requests',
|
|
2053
|
-
},
|
|
2054
|
-
];
|
|
2055
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
2056
|
-
const results = await backend.queryMetrics({});
|
|
2057
|
-
assert.strictEqual(results.length, 1);
|
|
2058
|
-
assert.strictEqual(results[0].name, 'http.requests.total');
|
|
2059
|
-
assert.strictEqual(results[0].value, 100);
|
|
2060
|
-
assert.strictEqual(results[0].histogram, undefined);
|
|
2061
|
-
});
|
|
2062
|
-
it('should handle mixed metric types including histograms', async () => {
|
|
2063
|
-
const today = getTestDate();
|
|
2064
|
-
const mockMetrics = [
|
|
2065
|
-
{
|
|
2066
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2067
|
-
name: 'http.requests.total',
|
|
2068
|
-
value: 100,
|
|
2069
|
-
type: 'counter',
|
|
2070
|
-
},
|
|
2071
|
-
{
|
|
2072
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2073
|
-
name: 'http.request.duration',
|
|
2074
|
-
value: 150,
|
|
2075
|
-
type: 'histogram',
|
|
2076
|
-
histogram: {
|
|
2077
|
-
buckets: [
|
|
2078
|
-
{ le: 100, count: 20 },
|
|
2079
|
-
{ le: 500, count: 80 },
|
|
2080
|
-
{ le: Infinity, count: 100 },
|
|
2081
|
-
],
|
|
2082
|
-
sum: 15000,
|
|
2083
|
-
count: 100,
|
|
2084
|
-
},
|
|
2085
|
-
},
|
|
2086
|
-
{
|
|
2087
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2088
|
-
name: 'memory.usage',
|
|
2089
|
-
value: 512,
|
|
2090
|
-
type: 'gauge',
|
|
2091
|
-
},
|
|
2092
|
-
];
|
|
2093
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
2094
|
-
const results = await backend.queryMetrics({});
|
|
2095
|
-
assert.strictEqual(results.length, 3);
|
|
2096
|
-
const counter = results.find(m => m.name === 'http.requests.total');
|
|
2097
|
-
const histogram = results.find(m => m.name === 'http.request.duration');
|
|
2098
|
-
const gauge = results.find(m => m.name === 'memory.usage');
|
|
2099
|
-
assert.ok(counter, 'Counter metric should be present');
|
|
2100
|
-
assert.strictEqual(counter?.histogram, undefined);
|
|
2101
|
-
assert.ok(histogram, 'Histogram metric should be present');
|
|
2102
|
-
assert.ok(histogram?.histogram, 'Histogram should have histogram data');
|
|
2103
|
-
assert.strictEqual(histogram?.histogram?.count, 100);
|
|
2104
|
-
assert.ok(gauge, 'Gauge metric should be present');
|
|
2105
|
-
assert.strictEqual(gauge?.histogram, undefined);
|
|
2106
|
-
});
|
|
2107
|
-
it('should preserve histogram data when filtering by metric name', async () => {
|
|
2108
|
-
const today = getTestDate();
|
|
2109
|
-
const mockMetrics = [
|
|
2110
|
-
{
|
|
2111
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2112
|
-
name: 'api.latency',
|
|
2113
|
-
value: 200,
|
|
2114
|
-
type: 'histogram',
|
|
2115
|
-
histogram: {
|
|
2116
|
-
buckets: [
|
|
2117
|
-
{ le: 100, count: 5 },
|
|
2118
|
-
{ le: 500, count: 15 },
|
|
2119
|
-
{ le: 1000, count: 20 },
|
|
2120
|
-
],
|
|
2121
|
-
sum: 4000,
|
|
2122
|
-
count: 20,
|
|
2123
|
-
},
|
|
2124
|
-
},
|
|
2125
|
-
{
|
|
2126
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
2127
|
-
name: 'db.query.duration',
|
|
2128
|
-
value: 50,
|
|
2129
|
-
type: 'histogram',
|
|
2130
|
-
histogram: {
|
|
2131
|
-
buckets: [
|
|
2132
|
-
{ le: 10, count: 30 },
|
|
2133
|
-
{ le: 50, count: 80 },
|
|
2134
|
-
{ le: 100, count: 100 },
|
|
2135
|
-
],
|
|
2136
|
-
sum: 3500,
|
|
2137
|
-
count: 100,
|
|
2138
|
-
},
|
|
2139
|
-
},
|
|
2140
|
-
];
|
|
2141
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
2142
|
-
const results = await backend.queryMetrics({ metricName: 'api.latency' });
|
|
2143
|
-
assert.strictEqual(results.length, 1);
|
|
2144
|
-
assert.strictEqual(results[0].name, 'api.latency');
|
|
2145
|
-
assert.ok(results[0].histogram);
|
|
2146
|
-
assert.strictEqual(results[0].histogram?.sum, 4000);
|
|
2147
|
-
assert.strictEqual(results[0].histogram?.count, 20);
|
|
2148
|
-
assert.strictEqual(results[0].histogram?.buckets.length, 3);
|
|
2149
|
-
});
|
|
2150
|
-
it('should ignore histogram field when metric type is not histogram', async () => {
|
|
2151
|
-
const today = getTestDate();
|
|
2152
|
-
// Edge case: a metric that has histogram field but type is not 'histogram'
|
|
2153
|
-
const mockMetrics = [
|
|
2154
|
-
{
|
|
2155
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2156
|
-
name: 'malformed.metric',
|
|
2157
|
-
value: 100,
|
|
2158
|
-
type: 'gauge', // Not histogram type
|
|
2159
|
-
histogram: {
|
|
2160
|
-
// This should be ignored since type != 'histogram'
|
|
2161
|
-
buckets: [{ le: 100, count: 10 }],
|
|
2162
|
-
sum: 500,
|
|
2163
|
-
count: 10,
|
|
2164
|
-
},
|
|
2165
|
-
},
|
|
2166
|
-
];
|
|
2167
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
2168
|
-
const results = await backend.queryMetrics({});
|
|
2169
|
-
assert.strictEqual(results.length, 1);
|
|
2170
|
-
assert.strictEqual(results[0].name, 'malformed.metric');
|
|
2171
|
-
assert.strictEqual(results[0].value, 100);
|
|
2172
|
-
// Histogram data should NOT be included since type is 'gauge', not 'histogram'
|
|
2173
|
-
assert.strictEqual(results[0].histogram, undefined);
|
|
2174
|
-
});
|
|
2175
|
-
it('should normalize aggregationTemporality from numeric OTel values', async () => {
|
|
2176
|
-
const today = getTestDate();
|
|
2177
|
-
const mockMetrics = [
|
|
2178
|
-
{
|
|
2179
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2180
|
-
name: 'http.requests.delta',
|
|
2181
|
-
value: 100,
|
|
2182
|
-
type: 'counter',
|
|
2183
|
-
aggregationTemporality: 1, // DELTA
|
|
2184
|
-
},
|
|
2185
|
-
{
|
|
2186
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
2187
|
-
name: 'http.requests.cumulative',
|
|
2188
|
-
value: 500,
|
|
2189
|
-
type: 'counter',
|
|
2190
|
-
aggregationTemporality: 2, // CUMULATIVE
|
|
2191
|
-
},
|
|
2192
|
-
{
|
|
2193
|
-
timestamp: '2026-01-28T10:02:00Z',
|
|
2194
|
-
name: 'http.requests.unspecified',
|
|
2195
|
-
value: 50,
|
|
2196
|
-
type: 'counter',
|
|
2197
|
-
aggregationTemporality: 0, // UNSPECIFIED
|
|
2198
|
-
},
|
|
2199
|
-
];
|
|
2200
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
2201
|
-
const results = await backend.queryMetrics({});
|
|
2202
|
-
assert.strictEqual(results.length, 3);
|
|
2203
|
-
const deltaMetric = results.find(m => m.name === 'http.requests.delta');
|
|
2204
|
-
const cumulativeMetric = results.find(m => m.name === 'http.requests.cumulative');
|
|
2205
|
-
const unspecifiedMetric = results.find(m => m.name === 'http.requests.unspecified');
|
|
2206
|
-
assert.strictEqual(deltaMetric?.aggregationTemporality, 'DELTA');
|
|
2207
|
-
assert.strictEqual(cumulativeMetric?.aggregationTemporality, 'CUMULATIVE');
|
|
2208
|
-
assert.strictEqual(unspecifiedMetric?.aggregationTemporality, 'UNSPECIFIED');
|
|
2209
|
-
});
|
|
2210
|
-
it('should normalize aggregationTemporality from string values', async () => {
|
|
2211
|
-
const today = getTestDate();
|
|
2212
|
-
const mockMetrics = [
|
|
2213
|
-
{
|
|
2214
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2215
|
-
name: 'requests.delta',
|
|
2216
|
-
value: 100,
|
|
2217
|
-
type: 'counter',
|
|
2218
|
-
aggregationTemporality: 'delta', // lowercase
|
|
2219
|
-
},
|
|
2220
|
-
{
|
|
2221
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
2222
|
-
name: 'requests.cumulative',
|
|
2223
|
-
value: 500,
|
|
2224
|
-
type: 'counter',
|
|
2225
|
-
aggregationTemporality: 'CUMULATIVE', // uppercase
|
|
2226
|
-
},
|
|
2227
|
-
{
|
|
2228
|
-
timestamp: '2026-01-28T10:02:00Z',
|
|
2229
|
-
name: 'requests.unspecified',
|
|
2230
|
-
value: 50,
|
|
2231
|
-
type: 'counter',
|
|
2232
|
-
aggregationTemporality: 'Unspecified', // mixed case
|
|
2233
|
-
},
|
|
2234
|
-
];
|
|
2235
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
2236
|
-
const results = await backend.queryMetrics({});
|
|
2237
|
-
assert.strictEqual(results.length, 3);
|
|
2238
|
-
const deltaMetric = results.find(m => m.name === 'requests.delta');
|
|
2239
|
-
const cumulativeMetric = results.find(m => m.name === 'requests.cumulative');
|
|
2240
|
-
const unspecifiedMetric = results.find(m => m.name === 'requests.unspecified');
|
|
2241
|
-
assert.strictEqual(deltaMetric?.aggregationTemporality, 'DELTA');
|
|
2242
|
-
assert.strictEqual(cumulativeMetric?.aggregationTemporality, 'CUMULATIVE');
|
|
2243
|
-
assert.strictEqual(unspecifiedMetric?.aggregationTemporality, 'UNSPECIFIED');
|
|
2244
|
-
});
|
|
2245
|
-
it('should return undefined aggregationTemporality when not provided', async () => {
|
|
2246
|
-
const today = getTestDate();
|
|
2247
|
-
const mockMetrics = [
|
|
2248
|
-
{
|
|
2249
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2250
|
-
name: 'gauge.metric',
|
|
2251
|
-
value: 42,
|
|
2252
|
-
type: 'gauge',
|
|
2253
|
-
// No aggregationTemporality field
|
|
2254
|
-
},
|
|
2255
|
-
];
|
|
2256
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
2257
|
-
const results = await backend.queryMetrics({});
|
|
2258
|
-
assert.strictEqual(results.length, 1);
|
|
2259
|
-
assert.strictEqual(results[0].aggregationTemporality, undefined);
|
|
2260
|
-
});
|
|
2261
|
-
it('should handle invalid aggregationTemporality string values', async () => {
|
|
2262
|
-
const today = getTestDate();
|
|
2263
|
-
const mockMetrics = [
|
|
2264
|
-
{
|
|
2265
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2266
|
-
name: 'metric.invalid',
|
|
2267
|
-
value: 100,
|
|
2268
|
-
type: 'counter',
|
|
2269
|
-
aggregationTemporality: 'invalid_value',
|
|
2270
|
-
},
|
|
2271
|
-
];
|
|
2272
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
2273
|
-
const results = await backend.queryMetrics({});
|
|
2274
|
-
assert.strictEqual(results.length, 1);
|
|
2275
|
-
// Invalid values should normalize to UNSPECIFIED
|
|
2276
|
-
assert.strictEqual(results[0].aggregationTemporality, 'UNSPECIFIED');
|
|
2277
|
-
});
|
|
2278
|
-
it('should handle unknown numeric aggregationTemporality values', async () => {
|
|
2279
|
-
const today = getTestDate();
|
|
2280
|
-
const mockMetrics = [
|
|
2281
|
-
{
|
|
2282
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2283
|
-
name: 'metric.unknown',
|
|
2284
|
-
value: 100,
|
|
2285
|
-
type: 'counter',
|
|
2286
|
-
aggregationTemporality: 99, // Unknown numeric value
|
|
2287
|
-
},
|
|
2288
|
-
];
|
|
2289
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
2290
|
-
const results = await backend.queryMetrics({});
|
|
2291
|
-
assert.strictEqual(results.length, 1);
|
|
2292
|
-
// Unknown numeric values should normalize to UNSPECIFIED
|
|
2293
|
-
assert.strictEqual(results[0].aggregationTemporality, 'UNSPECIFIED');
|
|
2294
|
-
});
|
|
2295
|
-
it('should read and normalize metrics with exemplars', async () => {
|
|
2296
|
-
const today = getTestDate();
|
|
2297
|
-
const mockMetrics = [
|
|
2298
|
-
{
|
|
2299
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2300
|
-
name: 'http.request.duration',
|
|
2301
|
-
value: 150,
|
|
2302
|
-
type: 'histogram',
|
|
2303
|
-
unit: 'ms',
|
|
2304
|
-
histogram: {
|
|
2305
|
-
buckets: [
|
|
2306
|
-
{ le: 100, count: 10 },
|
|
2307
|
-
{ le: 500, count: 45 },
|
|
2308
|
-
{ le: Infinity, count: 50 },
|
|
2309
|
-
],
|
|
2310
|
-
sum: 7500,
|
|
2311
|
-
count: 50,
|
|
2312
|
-
},
|
|
2313
|
-
exemplars: [
|
|
2314
|
-
{
|
|
2315
|
-
timestamp: '2026-01-28T10:00:00.123Z',
|
|
2316
|
-
value: 450,
|
|
2317
|
-
traceId: 'abc123def456',
|
|
2318
|
-
spanId: 'span789',
|
|
2319
|
-
attributes: { 'http.status_code': 500 },
|
|
2320
|
-
},
|
|
2321
|
-
{
|
|
2322
|
-
timestamp: '2026-01-28T10:00:00.456Z',
|
|
2323
|
-
value: 95,
|
|
2324
|
-
traceId: 'xyz789abc123',
|
|
2325
|
-
spanId: 'span456',
|
|
2326
|
-
},
|
|
2327
|
-
],
|
|
2328
|
-
},
|
|
2329
|
-
];
|
|
2330
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
2331
|
-
const results = await backend.queryMetrics({});
|
|
2332
|
-
assert.strictEqual(results.length, 1);
|
|
2333
|
-
assert.strictEqual(results[0].name, 'http.request.duration');
|
|
2334
|
-
assert.ok(results[0].exemplars, 'Exemplars should be present');
|
|
2335
|
-
assert.strictEqual(results[0].exemplars?.length, 2);
|
|
2336
|
-
// Verify first exemplar (high latency with error)
|
|
2337
|
-
const highLatencyExemplar = results[0].exemplars?.[0];
|
|
2338
|
-
assert.strictEqual(highLatencyExemplar?.value, 450);
|
|
2339
|
-
assert.strictEqual(highLatencyExemplar?.traceId, 'abc123def456');
|
|
2340
|
-
assert.strictEqual(highLatencyExemplar?.spanId, 'span789');
|
|
2341
|
-
assert.strictEqual(highLatencyExemplar?.attributes?.['http.status_code'], 500);
|
|
2342
|
-
// Verify second exemplar
|
|
2343
|
-
const normalExemplar = results[0].exemplars?.[1];
|
|
2344
|
-
assert.strictEqual(normalExemplar?.value, 95);
|
|
2345
|
-
assert.strictEqual(normalExemplar?.traceId, 'xyz789abc123');
|
|
2346
|
-
});
|
|
2347
|
-
it('should normalize exemplar timestamps from [seconds, nanoseconds] format', async () => {
|
|
2348
|
-
const today = getTestDate();
|
|
2349
|
-
const mockMetrics = [
|
|
2350
|
-
{
|
|
2351
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2352
|
-
name: 'api.latency',
|
|
2353
|
-
value: 200,
|
|
2354
|
-
type: 'histogram',
|
|
2355
|
-
exemplars: [
|
|
2356
|
-
{
|
|
2357
|
-
timestamp: [1738062000, 123000000], // [seconds, nanoseconds]
|
|
2358
|
-
value: 350,
|
|
2359
|
-
traceId: 'trace123',
|
|
2360
|
-
spanId: 'span456',
|
|
2361
|
-
},
|
|
2362
|
-
],
|
|
2363
|
-
},
|
|
2364
|
-
];
|
|
2365
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
2366
|
-
const results = await backend.queryMetrics({});
|
|
2367
|
-
assert.strictEqual(results.length, 1);
|
|
2368
|
-
assert.ok(results[0].exemplars);
|
|
2369
|
-
assert.strictEqual(results[0].exemplars?.length, 1);
|
|
2370
|
-
// Timestamp should be converted to ISO string
|
|
2371
|
-
assert.ok(results[0].exemplars?.[0].timestamp.includes('T'));
|
|
2372
|
-
assert.strictEqual(results[0].exemplars?.[0].value, 350);
|
|
2373
|
-
});
|
|
2374
|
-
it('should handle exemplars without optional fields', async () => {
|
|
2375
|
-
const today = getTestDate();
|
|
2376
|
-
const mockMetrics = [
|
|
2377
|
-
{
|
|
2378
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2379
|
-
name: 'counter.metric',
|
|
2380
|
-
value: 100,
|
|
2381
|
-
type: 'counter',
|
|
2382
|
-
exemplars: [
|
|
2383
|
-
{
|
|
2384
|
-
value: 1, // Only required field
|
|
2385
|
-
},
|
|
2386
|
-
],
|
|
2387
|
-
},
|
|
2388
|
-
];
|
|
2389
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
2390
|
-
const results = await backend.queryMetrics({});
|
|
2391
|
-
assert.strictEqual(results.length, 1);
|
|
2392
|
-
assert.ok(results[0].exemplars);
|
|
2393
|
-
assert.strictEqual(results[0].exemplars?.length, 1);
|
|
2394
|
-
assert.strictEqual(results[0].exemplars?.[0].value, 1);
|
|
2395
|
-
// Timestamp should default to metric timestamp
|
|
2396
|
-
assert.strictEqual(results[0].exemplars?.[0].timestamp, '2026-01-28T10:00:00Z');
|
|
2397
|
-
assert.strictEqual(results[0].exemplars?.[0].traceId, undefined);
|
|
2398
|
-
assert.strictEqual(results[0].exemplars?.[0].spanId, undefined);
|
|
2399
|
-
});
|
|
2400
|
-
it('should handle metrics without exemplars', async () => {
|
|
2401
|
-
const today = getTestDate();
|
|
2402
|
-
const mockMetrics = [
|
|
2403
|
-
{
|
|
2404
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2405
|
-
name: 'simple.counter',
|
|
2406
|
-
value: 42,
|
|
2407
|
-
type: 'counter',
|
|
2408
|
-
// No exemplars field
|
|
2409
|
-
},
|
|
2410
|
-
];
|
|
2411
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
2412
|
-
const results = await backend.queryMetrics({});
|
|
2413
|
-
assert.strictEqual(results.length, 1);
|
|
2414
|
-
assert.strictEqual(results[0].exemplars, undefined);
|
|
2415
|
-
});
|
|
2416
|
-
it('should handle empty exemplars array', async () => {
|
|
2417
|
-
const today = getTestDate();
|
|
2418
|
-
const mockMetrics = [
|
|
2419
|
-
{
|
|
2420
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2421
|
-
name: 'empty.exemplars',
|
|
2422
|
-
value: 100,
|
|
2423
|
-
type: 'counter',
|
|
2424
|
-
exemplars: [], // Empty array
|
|
2425
|
-
},
|
|
2426
|
-
];
|
|
2427
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
2428
|
-
const results = await backend.queryMetrics({});
|
|
2429
|
-
assert.strictEqual(results.length, 1);
|
|
2430
|
-
// Empty exemplars array should result in undefined
|
|
2431
|
-
assert.strictEqual(results[0].exemplars, undefined);
|
|
2432
|
-
});
|
|
2433
|
-
});
|
|
2434
|
-
describe('queryLLMEvents', () => {
|
|
2435
|
-
it('should read and normalize LLM events from JSONL files', async () => {
|
|
2436
|
-
const today = getTestDate();
|
|
2437
|
-
const mockEvents = [
|
|
2438
|
-
{
|
|
2439
|
-
timestamp: '2026-01-28T10:00:00.000Z',
|
|
2440
|
-
name: 'llm.completion',
|
|
2441
|
-
attributes: {
|
|
2442
|
-
'gen_ai.request.model': 'claude-3-opus',
|
|
2443
|
-
'gen_ai.system': 'anthropic',
|
|
2444
|
-
'gen_ai.usage.input_tokens': 100,
|
|
2445
|
-
'gen_ai.usage.output_tokens': 50,
|
|
2446
|
-
'duration_ms': 1500,
|
|
2447
|
-
'success': true,
|
|
2448
|
-
},
|
|
2449
|
-
},
|
|
2450
|
-
];
|
|
2451
|
-
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
2452
|
-
const results = await backend.queryLLMEvents({});
|
|
2453
|
-
assert.strictEqual(results.length, 1);
|
|
2454
|
-
assert.strictEqual(results[0].name, 'llm.completion');
|
|
2455
|
-
assert.strictEqual(results[0].attributes['gen_ai.request.model'], 'claude-3-opus');
|
|
2456
|
-
assert.strictEqual(results[0].attributes['gen_ai.system'], 'anthropic');
|
|
2457
|
-
assert.strictEqual(results[0].attributes['gen_ai.usage.input_tokens'], 100);
|
|
2458
|
-
});
|
|
2459
|
-
it('should filter events by eventName substring', async () => {
|
|
2460
|
-
const today = getTestDate();
|
|
2461
|
-
const mockEvents = [
|
|
2462
|
-
{ timestamp: '2026-01-28T10:00:00Z', name: 'llm.completion', attributes: {} },
|
|
2463
|
-
{ timestamp: '2026-01-28T10:01:00Z', name: 'llm.embedding', attributes: {} },
|
|
2464
|
-
{ timestamp: '2026-01-28T10:02:00Z', name: 'tool.execution', attributes: {} },
|
|
2465
|
-
];
|
|
2466
|
-
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
2467
|
-
const results = await backend.queryLLMEvents({ eventName: 'llm' });
|
|
2468
|
-
assert.strictEqual(results.length, 2);
|
|
2469
|
-
assert.ok(results.every(e => e.name.includes('llm')));
|
|
2470
|
-
});
|
|
2471
|
-
it('should filter events by model', async () => {
|
|
2472
|
-
const today = getTestDate();
|
|
2473
|
-
const mockEvents = [
|
|
2474
|
-
{
|
|
2475
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2476
|
-
name: 'llm.completion',
|
|
2477
|
-
attributes: { 'gen_ai.request.model': 'claude-3-opus' },
|
|
2478
|
-
},
|
|
2479
|
-
{
|
|
2480
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
2481
|
-
name: 'llm.completion',
|
|
2482
|
-
attributes: { 'gen_ai.request.model': 'gpt-4' },
|
|
2483
|
-
},
|
|
2484
|
-
{
|
|
2485
|
-
timestamp: '2026-01-28T10:02:00Z',
|
|
2486
|
-
name: 'llm.completion',
|
|
2487
|
-
attributes: { model: 'claude-3-opus' }, // alternate attribute name
|
|
2488
|
-
},
|
|
2489
|
-
];
|
|
2490
|
-
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
2491
|
-
const results = await backend.queryLLMEvents({ model: 'claude-3-opus' });
|
|
2492
|
-
assert.strictEqual(results.length, 2);
|
|
2493
|
-
});
|
|
2494
|
-
it('should filter events by provider', async () => {
|
|
2495
|
-
const today = getTestDate();
|
|
2496
|
-
const mockEvents = [
|
|
2497
|
-
{
|
|
2498
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2499
|
-
name: 'llm.completion',
|
|
2500
|
-
attributes: { 'gen_ai.system': 'anthropic' },
|
|
2501
|
-
},
|
|
2502
|
-
{
|
|
2503
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
2504
|
-
name: 'llm.completion',
|
|
2505
|
-
attributes: { 'gen_ai.system': 'openai' },
|
|
2506
|
-
},
|
|
2507
|
-
{
|
|
2508
|
-
timestamp: '2026-01-28T10:02:00Z',
|
|
2509
|
-
name: 'llm.completion',
|
|
2510
|
-
attributes: { provider: 'anthropic' }, // alternate attribute name
|
|
2511
|
-
},
|
|
2512
|
-
];
|
|
2513
|
-
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
2514
|
-
const results = await backend.queryLLMEvents({ provider: 'anthropic' });
|
|
2515
|
-
assert.strictEqual(results.length, 2);
|
|
2516
|
-
});
|
|
2517
|
-
it('should filter events by search text in attributes', async () => {
|
|
2518
|
-
const today = getTestDate();
|
|
2519
|
-
const mockEvents = [
|
|
2520
|
-
{
|
|
2521
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2522
|
-
name: 'llm.completion',
|
|
2523
|
-
attributes: { prompt: 'Write a function to calculate fibonacci' },
|
|
2524
|
-
},
|
|
2525
|
-
{
|
|
2526
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
2527
|
-
name: 'llm.completion',
|
|
2528
|
-
attributes: { prompt: 'Explain quantum computing' },
|
|
2529
|
-
},
|
|
2530
|
-
];
|
|
2531
|
-
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
2532
|
-
const results = await backend.queryLLMEvents({ search: 'fibonacci' });
|
|
2533
|
-
assert.strictEqual(results.length, 1);
|
|
2534
|
-
assert.strictEqual(results[0].attributes.prompt, 'Write a function to calculate fibonacci');
|
|
2535
|
-
});
|
|
2536
|
-
it('should filter events by search text in event name', async () => {
|
|
2537
|
-
const today = getTestDate();
|
|
2538
|
-
const mockEvents = [
|
|
2539
|
-
{ timestamp: '2026-01-28T10:00:00Z', name: 'llm.completion.streaming', attributes: {} },
|
|
2540
|
-
{ timestamp: '2026-01-28T10:01:00Z', name: 'llm.completion', attributes: {} },
|
|
2541
|
-
];
|
|
2542
|
-
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
2543
|
-
const results = await backend.queryLLMEvents({ search: 'streaming' });
|
|
2544
|
-
assert.strictEqual(results.length, 1);
|
|
2545
|
-
assert.strictEqual(results[0].name, 'llm.completion.streaming');
|
|
2546
|
-
});
|
|
2547
|
-
it('should apply limit and offset to LLM event results', async () => {
|
|
2548
|
-
const today = getTestDate();
|
|
2549
|
-
const mockEvents = Array.from({ length: 100 }, (_, i) => ({
|
|
2550
|
-
timestamp: new Date(Date.now() + i * 1000).toISOString(),
|
|
2551
|
-
name: `event-${i}`,
|
|
2552
|
-
attributes: { index: i },
|
|
2553
|
-
}));
|
|
2554
|
-
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
2555
|
-
const results = await backend.queryLLMEvents({ limit: 20, offset: 50 });
|
|
2556
|
-
assert.strictEqual(results.length, 20);
|
|
2557
|
-
assert.strictEqual(results[0].name, 'event-50');
|
|
2558
|
-
});
|
|
2559
|
-
it('should filter events by date range', async () => {
|
|
2560
|
-
// Create files for multiple dates
|
|
2561
|
-
writeJsonlFile(path.join(tempDir, 'llm-events-2026-01-26.jsonl'), [
|
|
2562
|
-
{ timestamp: '2026-01-26T10:00:00Z', name: 'event-26', attributes: {} },
|
|
2563
|
-
]);
|
|
2564
|
-
writeJsonlFile(path.join(tempDir, 'llm-events-2026-01-27.jsonl'), [
|
|
2565
|
-
{ timestamp: '2026-01-27T10:00:00Z', name: 'event-27', attributes: {} },
|
|
2566
|
-
]);
|
|
2567
|
-
writeJsonlFile(path.join(tempDir, 'llm-events-2026-01-28.jsonl'), [
|
|
2568
|
-
{ timestamp: '2026-01-28T10:00:00Z', name: 'event-28', attributes: {} },
|
|
2569
|
-
]);
|
|
2570
|
-
const results = await backend.queryLLMEvents({
|
|
2571
|
-
startDate: '2026-01-27',
|
|
2572
|
-
endDate: '2026-01-27',
|
|
2573
|
-
});
|
|
2574
|
-
assert.strictEqual(results.length, 1);
|
|
2575
|
-
assert.strictEqual(results[0].name, 'event-27');
|
|
2576
|
-
});
|
|
2577
|
-
it('should skip events with missing required fields', async () => {
|
|
2578
|
-
const today = getTestDate();
|
|
2579
|
-
const mockEvents = [
|
|
2580
|
-
{ timestamp: '2026-01-28T10:00:00Z', name: 'valid-event', attributes: {} },
|
|
2581
|
-
{ timestamp: '2026-01-28T10:01:00Z', attributes: {} }, // missing name
|
|
2582
|
-
{ name: 'no-timestamp', attributes: {} }, // missing timestamp
|
|
2583
|
-
];
|
|
2584
|
-
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
2585
|
-
const results = await backend.queryLLMEvents({});
|
|
2586
|
-
assert.strictEqual(results.length, 1);
|
|
2587
|
-
assert.strictEqual(results[0].name, 'valid-event');
|
|
2588
|
-
});
|
|
2589
|
-
it('should return empty array when no LLM event files found', async () => {
|
|
2590
|
-
const results = await backend.queryLLMEvents({});
|
|
2591
|
-
assert.strictEqual(results.length, 0);
|
|
2592
|
-
});
|
|
2593
|
-
it('should combine multiple filters', async () => {
|
|
2594
|
-
const today = getTestDate();
|
|
2595
|
-
const mockEvents = [
|
|
2596
|
-
{
|
|
2597
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2598
|
-
name: 'llm.completion',
|
|
2599
|
-
attributes: { 'gen_ai.request.model': 'claude-3-opus', 'gen_ai.system': 'anthropic' },
|
|
2600
|
-
},
|
|
2601
|
-
{
|
|
2602
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
2603
|
-
name: 'llm.completion',
|
|
2604
|
-
attributes: { 'gen_ai.request.model': 'gpt-4', 'gen_ai.system': 'openai' },
|
|
2605
|
-
},
|
|
2606
|
-
{
|
|
2607
|
-
timestamp: '2026-01-28T10:02:00Z',
|
|
2608
|
-
name: 'llm.embedding',
|
|
2609
|
-
attributes: { 'gen_ai.request.model': 'claude-3-opus', 'gen_ai.system': 'anthropic' },
|
|
2610
|
-
},
|
|
2611
|
-
];
|
|
2612
|
-
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
2613
|
-
const results = await backend.queryLLMEvents({
|
|
2614
|
-
eventName: 'completion',
|
|
2615
|
-
model: 'claude-3-opus',
|
|
2616
|
-
provider: 'anthropic',
|
|
2617
|
-
});
|
|
2618
|
-
assert.strictEqual(results.length, 1);
|
|
2619
|
-
assert.strictEqual(results[0].name, 'llm.completion');
|
|
2620
|
-
});
|
|
2621
|
-
it('should use OTel GenAI provider fallback: gen_ai.provider.name -> gen_ai.system -> provider', async () => {
|
|
2622
|
-
const today = getTestDate();
|
|
2623
|
-
const mockEvents = [
|
|
2624
|
-
{
|
|
2625
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2626
|
-
name: 'llm.completion',
|
|
2627
|
-
attributes: { 'gen_ai.provider.name': 'anthropic-new' }, // should match
|
|
2628
|
-
},
|
|
2629
|
-
{
|
|
2630
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
2631
|
-
name: 'llm.completion',
|
|
2632
|
-
attributes: { 'gen_ai.system': 'anthropic-new', 'provider': 'legacy' }, // should match via gen_ai.system
|
|
2633
|
-
},
|
|
2634
|
-
{
|
|
2635
|
-
timestamp: '2026-01-28T10:02:00Z',
|
|
2636
|
-
name: 'llm.completion',
|
|
2637
|
-
attributes: { 'provider': 'anthropic-new' }, // should match via provider fallback
|
|
2638
|
-
},
|
|
2639
|
-
{
|
|
2640
|
-
timestamp: '2026-01-28T10:03:00Z',
|
|
2641
|
-
name: 'llm.completion',
|
|
2642
|
-
attributes: { 'gen_ai.provider.name': 'other-provider' }, // should NOT match
|
|
2643
|
-
},
|
|
2644
|
-
];
|
|
2645
|
-
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
2646
|
-
const results = await backend.queryLLMEvents({ provider: 'anthropic-new' });
|
|
2647
|
-
assert.strictEqual(results.length, 3);
|
|
2648
|
-
});
|
|
2649
|
-
it('should filter OpenAI events by provider', async () => {
|
|
2650
|
-
const today = getTestDate();
|
|
2651
|
-
const mockEvents = [
|
|
2652
|
-
{
|
|
2653
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2654
|
-
name: 'llm.completion',
|
|
2655
|
-
attributes: {
|
|
2656
|
-
'gen_ai.provider.name': 'openai',
|
|
2657
|
-
'gen_ai.request.model': 'gpt-4o',
|
|
2658
|
-
'gen_ai.usage.input_tokens': 500,
|
|
2659
|
-
},
|
|
2660
|
-
},
|
|
2661
|
-
{
|
|
2662
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
2663
|
-
name: 'llm.completion',
|
|
2664
|
-
attributes: {
|
|
2665
|
-
'gen_ai.provider.name': 'anthropic',
|
|
2666
|
-
'gen_ai.request.model': 'claude-3-opus',
|
|
2667
|
-
},
|
|
2668
|
-
},
|
|
2669
|
-
];
|
|
2670
|
-
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
2671
|
-
const results = await backend.queryLLMEvents({ provider: 'openai' });
|
|
2672
|
-
assert.strictEqual(results.length, 1);
|
|
2673
|
-
assert.strictEqual(results[0].attributes?.['gen_ai.request.model'], 'gpt-4o');
|
|
2674
|
-
});
|
|
2675
|
-
it('should filter Google Gemini events by provider', async () => {
|
|
2676
|
-
const today = getTestDate();
|
|
2677
|
-
const mockEvents = [
|
|
2678
|
-
{
|
|
2679
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2680
|
-
name: 'llm.completion',
|
|
2681
|
-
attributes: {
|
|
2682
|
-
'gen_ai.provider.name': 'gcp.gemini',
|
|
2683
|
-
'gen_ai.request.model': 'gemini-1.5-pro',
|
|
2684
|
-
},
|
|
2685
|
-
},
|
|
2686
|
-
{
|
|
2687
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
2688
|
-
name: 'llm.completion',
|
|
2689
|
-
attributes: {
|
|
2690
|
-
'gen_ai.provider.name': 'gcp.vertex_ai',
|
|
2691
|
-
'gen_ai.request.model': 'gemini-pro',
|
|
2692
|
-
},
|
|
2693
|
-
},
|
|
2694
|
-
{
|
|
2695
|
-
timestamp: '2026-01-28T10:02:00Z',
|
|
2696
|
-
name: 'llm.completion',
|
|
2697
|
-
attributes: {
|
|
2698
|
-
'gen_ai.provider.name': 'openai',
|
|
2699
|
-
'gen_ai.request.model': 'gpt-4',
|
|
2700
|
-
},
|
|
2701
|
-
},
|
|
2702
|
-
];
|
|
2703
|
-
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
2704
|
-
const geminiResults = await backend.queryLLMEvents({ provider: 'gcp.gemini' });
|
|
2705
|
-
assert.strictEqual(geminiResults.length, 1);
|
|
2706
|
-
assert.strictEqual(geminiResults[0].attributes?.['gen_ai.request.model'], 'gemini-1.5-pro');
|
|
2707
|
-
const vertexResults = await backend.queryLLMEvents({ provider: 'gcp.vertex_ai' });
|
|
2708
|
-
assert.strictEqual(vertexResults.length, 1);
|
|
2709
|
-
});
|
|
2710
|
-
it('should filter Mistral AI events by provider', async () => {
|
|
2711
|
-
const today = getTestDate();
|
|
2712
|
-
const mockEvents = [
|
|
2713
|
-
{
|
|
2714
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2715
|
-
name: 'llm.completion',
|
|
2716
|
-
attributes: {
|
|
2717
|
-
'gen_ai.provider.name': 'mistral_ai',
|
|
2718
|
-
'gen_ai.request.model': 'mistral-large',
|
|
2719
|
-
},
|
|
2720
|
-
},
|
|
2721
|
-
{
|
|
2722
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
2723
|
-
name: 'llm.completion',
|
|
2724
|
-
attributes: {
|
|
2725
|
-
'gen_ai.provider.name': 'anthropic',
|
|
2726
|
-
'gen_ai.request.model': 'claude-3',
|
|
2727
|
-
},
|
|
2728
|
-
},
|
|
2729
|
-
];
|
|
2730
|
-
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
2731
|
-
const results = await backend.queryLLMEvents({ provider: 'mistral_ai' });
|
|
2732
|
-
assert.strictEqual(results.length, 1);
|
|
2733
|
-
assert.strictEqual(results[0].attributes?.['gen_ai.request.model'], 'mistral-large');
|
|
2734
|
-
});
|
|
2735
|
-
it('should filter AWS Bedrock events by provider', async () => {
|
|
2736
|
-
const today = getTestDate();
|
|
2737
|
-
const mockEvents = [
|
|
2738
|
-
{
|
|
2739
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2740
|
-
name: 'llm.completion',
|
|
2741
|
-
attributes: {
|
|
2742
|
-
'gen_ai.provider.name': 'aws.bedrock',
|
|
2743
|
-
'gen_ai.request.model': 'anthropic.claude-3-sonnet-20240229-v1:0',
|
|
2744
|
-
},
|
|
2745
|
-
},
|
|
2746
|
-
{
|
|
2747
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
2748
|
-
name: 'llm.completion',
|
|
2749
|
-
attributes: {
|
|
2750
|
-
'gen_ai.provider.name': 'anthropic',
|
|
2751
|
-
'gen_ai.request.model': 'claude-3-sonnet',
|
|
2752
|
-
},
|
|
2753
|
-
},
|
|
2754
|
-
];
|
|
2755
|
-
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
2756
|
-
const results = await backend.queryLLMEvents({ provider: 'aws.bedrock' });
|
|
2757
|
-
assert.strictEqual(results.length, 1);
|
|
2758
|
-
assert.ok(results[0].attributes?.['gen_ai.request.model']?.toString().includes('anthropic.claude'));
|
|
2759
|
-
});
|
|
2760
|
-
it('should filter Cohere events by provider', async () => {
|
|
2761
|
-
const today = getTestDate();
|
|
2762
|
-
const mockEvents = [
|
|
2763
|
-
{
|
|
2764
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2765
|
-
name: 'llm.completion',
|
|
2766
|
-
attributes: {
|
|
2767
|
-
'gen_ai.provider.name': 'cohere',
|
|
2768
|
-
'gen_ai.request.model': 'command-r-plus',
|
|
2769
|
-
},
|
|
2770
|
-
},
|
|
2771
|
-
{
|
|
2772
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
2773
|
-
name: 'llm.completion',
|
|
2774
|
-
attributes: {
|
|
2775
|
-
'gen_ai.provider.name': 'openai',
|
|
2776
|
-
'gen_ai.request.model': 'gpt-4',
|
|
2777
|
-
},
|
|
2778
|
-
},
|
|
2779
|
-
];
|
|
2780
|
-
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
2781
|
-
const results = await backend.queryLLMEvents({ provider: 'cohere' });
|
|
2782
|
-
assert.strictEqual(results.length, 1);
|
|
2783
|
-
assert.strictEqual(results[0].attributes?.['gen_ai.request.model'], 'command-r-plus');
|
|
2784
|
-
});
|
|
2785
|
-
it('should filter Groq events by provider', async () => {
|
|
2786
|
-
const today = getTestDate();
|
|
2787
|
-
const mockEvents = [
|
|
2788
|
-
{
|
|
2789
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2790
|
-
name: 'llm.completion',
|
|
2791
|
-
attributes: {
|
|
2792
|
-
'gen_ai.provider.name': 'groq',
|
|
2793
|
-
'gen_ai.request.model': 'llama-3.3-70b',
|
|
2794
|
-
},
|
|
2795
|
-
},
|
|
2796
|
-
{
|
|
2797
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
2798
|
-
name: 'llm.completion',
|
|
2799
|
-
attributes: {
|
|
2800
|
-
'gen_ai.provider.name': 'together_ai',
|
|
2801
|
-
'gen_ai.request.model': 'llama-3-70b',
|
|
2802
|
-
},
|
|
2803
|
-
},
|
|
2804
|
-
];
|
|
2805
|
-
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
2806
|
-
const groqResults = await backend.queryLLMEvents({ provider: 'groq' });
|
|
2807
|
-
assert.strictEqual(groqResults.length, 1);
|
|
2808
|
-
assert.strictEqual(groqResults[0].attributes?.['gen_ai.request.model'], 'llama-3.3-70b');
|
|
2809
|
-
const togetherResults = await backend.queryLLMEvents({ provider: 'together_ai' });
|
|
2810
|
-
assert.strictEqual(togetherResults.length, 1);
|
|
2811
|
-
});
|
|
2812
|
-
it('should filter Ollama local model events by provider', async () => {
|
|
2813
|
-
const today = getTestDate();
|
|
2814
|
-
const mockEvents = [
|
|
2815
|
-
{
|
|
2816
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2817
|
-
name: 'llm.completion',
|
|
2818
|
-
attributes: {
|
|
2819
|
-
'gen_ai.provider.name': 'ollama',
|
|
2820
|
-
'gen_ai.request.model': 'llama3:8b',
|
|
2821
|
-
},
|
|
2822
|
-
},
|
|
2823
|
-
{
|
|
2824
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
2825
|
-
name: 'llm.completion',
|
|
2826
|
-
attributes: {
|
|
2827
|
-
'gen_ai.provider.name': 'openai',
|
|
2828
|
-
'gen_ai.request.model': 'gpt-4',
|
|
2829
|
-
},
|
|
2830
|
-
},
|
|
2831
|
-
];
|
|
2832
|
-
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
2833
|
-
const results = await backend.queryLLMEvents({ provider: 'ollama' });
|
|
2834
|
-
assert.strictEqual(results.length, 1);
|
|
2835
|
-
assert.strictEqual(results[0].attributes?.['gen_ai.request.model'], 'llama3:8b');
|
|
2836
|
-
});
|
|
2837
|
-
it('should filter custom/internal provider events', async () => {
|
|
2838
|
-
const today = getTestDate();
|
|
2839
|
-
const mockEvents = [
|
|
2840
|
-
{
|
|
2841
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2842
|
-
name: 'llm.completion',
|
|
2843
|
-
attributes: {
|
|
2844
|
-
'gen_ai.provider.name': 'custom-internal-llm',
|
|
2845
|
-
'gen_ai.request.model': 'internal-model-v2',
|
|
2846
|
-
},
|
|
2847
|
-
},
|
|
2848
|
-
{
|
|
2849
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
2850
|
-
name: 'llm.completion',
|
|
2851
|
-
attributes: {
|
|
2852
|
-
'gen_ai.provider.name': 'anthropic',
|
|
2853
|
-
'gen_ai.request.model': 'claude-3',
|
|
2854
|
-
},
|
|
2855
|
-
},
|
|
2856
|
-
];
|
|
2857
|
-
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
2858
|
-
const results = await backend.queryLLMEvents({ provider: 'custom-internal-llm' });
|
|
2859
|
-
assert.strictEqual(results.length, 1);
|
|
2860
|
-
assert.strictEqual(results[0].attributes?.['gen_ai.request.model'], 'internal-model-v2');
|
|
2861
|
-
});
|
|
2862
|
-
it('should combine provider and model filters', async () => {
|
|
2863
|
-
const today = getTestDate();
|
|
2864
|
-
const mockEvents = [
|
|
2865
|
-
{
|
|
2866
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2867
|
-
name: 'llm.completion',
|
|
2868
|
-
attributes: {
|
|
2869
|
-
'gen_ai.provider.name': 'openai',
|
|
2870
|
-
'gen_ai.request.model': 'gpt-4o',
|
|
2871
|
-
},
|
|
2872
|
-
},
|
|
2873
|
-
{
|
|
2874
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
2875
|
-
name: 'llm.completion',
|
|
2876
|
-
attributes: {
|
|
2877
|
-
'gen_ai.provider.name': 'openai',
|
|
2878
|
-
'gen_ai.request.model': 'gpt-4-turbo',
|
|
2879
|
-
},
|
|
2880
|
-
},
|
|
2881
|
-
{
|
|
2882
|
-
timestamp: '2026-01-28T10:02:00Z',
|
|
2883
|
-
name: 'llm.completion',
|
|
2884
|
-
attributes: {
|
|
2885
|
-
'gen_ai.provider.name': 'anthropic',
|
|
2886
|
-
'gen_ai.request.model': 'gpt-4o', // Same model name, different provider
|
|
2887
|
-
},
|
|
2888
|
-
},
|
|
2889
|
-
];
|
|
2890
|
-
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
2891
|
-
const results = await backend.queryLLMEvents({ provider: 'openai', model: 'gpt-4o' });
|
|
2892
|
-
assert.strictEqual(results.length, 1);
|
|
2893
|
-
assert.strictEqual(results[0].attributes?.['gen_ai.provider.name'], 'openai');
|
|
2894
|
-
assert.strictEqual(results[0].attributes?.['gen_ai.request.model'], 'gpt-4o');
|
|
2895
|
-
});
|
|
2896
|
-
it('should return empty array when provider has no events', async () => {
|
|
2897
|
-
const today = getTestDate();
|
|
2898
|
-
const mockEvents = [
|
|
2899
|
-
{
|
|
2900
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2901
|
-
name: 'llm.completion',
|
|
2902
|
-
attributes: {
|
|
2903
|
-
'gen_ai.provider.name': 'anthropic',
|
|
2904
|
-
'gen_ai.request.model': 'claude-3',
|
|
2905
|
-
},
|
|
2906
|
-
},
|
|
2907
|
-
];
|
|
2908
|
-
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
2909
|
-
const results = await backend.queryLLMEvents({ provider: 'nonexistent-provider' });
|
|
2910
|
-
assert.strictEqual(results.length, 0);
|
|
2911
|
-
});
|
|
2912
|
-
it('should filter events by operationName', async () => {
|
|
2913
|
-
const today = getTestDate();
|
|
2914
|
-
const mockEvents = [
|
|
2915
|
-
{
|
|
2916
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2917
|
-
name: 'llm.chat',
|
|
2918
|
-
attributes: { 'gen_ai.operation.name': 'chat' },
|
|
2919
|
-
},
|
|
2920
|
-
{
|
|
2921
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
2922
|
-
name: 'llm.embedding',
|
|
2923
|
-
attributes: { 'gen_ai.operation.name': 'embeddings' },
|
|
2924
|
-
},
|
|
2925
|
-
{
|
|
2926
|
-
timestamp: '2026-01-28T10:02:00Z',
|
|
2927
|
-
name: 'agent.invoke',
|
|
2928
|
-
attributes: { 'gen_ai.operation.name': 'invoke_agent' },
|
|
2929
|
-
},
|
|
2930
|
-
];
|
|
2931
|
-
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
2932
|
-
const results = await backend.queryLLMEvents({ operationName: 'chat' });
|
|
2933
|
-
assert.strictEqual(results.length, 1);
|
|
2934
|
-
assert.strictEqual(results[0].name, 'llm.chat');
|
|
2935
|
-
});
|
|
2936
|
-
it('should filter events by conversationId', async () => {
|
|
2937
|
-
const today = getTestDate();
|
|
2938
|
-
const mockEvents = [
|
|
2939
|
-
{
|
|
2940
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2941
|
-
name: 'llm.chat',
|
|
2942
|
-
attributes: { 'gen_ai.conversation.id': 'conv-abc123' },
|
|
2943
|
-
},
|
|
2944
|
-
{
|
|
2945
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
2946
|
-
name: 'llm.chat',
|
|
2947
|
-
attributes: { 'gen_ai.conversation.id': 'conv-xyz789' },
|
|
2948
|
-
},
|
|
2949
|
-
{
|
|
2950
|
-
timestamp: '2026-01-28T10:02:00Z',
|
|
2951
|
-
name: 'llm.chat',
|
|
2952
|
-
attributes: { 'gen_ai.conversation.id': 'conv-abc123' },
|
|
2953
|
-
},
|
|
2954
|
-
];
|
|
2955
|
-
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
2956
|
-
const results = await backend.queryLLMEvents({ conversationId: 'conv-abc123' });
|
|
2957
|
-
assert.strictEqual(results.length, 2);
|
|
2958
|
-
});
|
|
2959
|
-
it('should combine OTel GenAI filters with other filters', async () => {
|
|
2960
|
-
const today = getTestDate();
|
|
2961
|
-
const mockEvents = [
|
|
2962
|
-
{
|
|
2963
|
-
timestamp: '2026-01-28T10:00:00Z',
|
|
2964
|
-
name: 'llm.chat',
|
|
2965
|
-
attributes: {
|
|
2966
|
-
'gen_ai.operation.name': 'chat',
|
|
2967
|
-
'gen_ai.conversation.id': 'conv-abc123',
|
|
2968
|
-
'gen_ai.request.model': 'claude-3-opus',
|
|
2969
|
-
},
|
|
2970
|
-
},
|
|
2971
|
-
{
|
|
2972
|
-
timestamp: '2026-01-28T10:01:00Z',
|
|
2973
|
-
name: 'llm.chat',
|
|
2974
|
-
attributes: {
|
|
2975
|
-
'gen_ai.operation.name': 'chat',
|
|
2976
|
-
'gen_ai.conversation.id': 'conv-abc123',
|
|
2977
|
-
'gen_ai.request.model': 'gpt-4',
|
|
2978
|
-
},
|
|
2979
|
-
},
|
|
2980
|
-
];
|
|
2981
|
-
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
2982
|
-
const results = await backend.queryLLMEvents({
|
|
2983
|
-
operationName: 'chat',
|
|
2984
|
-
conversationId: 'conv-abc123',
|
|
2985
|
-
model: 'claude-3-opus',
|
|
2986
|
-
});
|
|
2987
|
-
assert.strictEqual(results.length, 1);
|
|
2988
|
-
assert.strictEqual(results[0].attributes['gen_ai.request.model'], 'claude-3-opus');
|
|
2989
|
-
});
|
|
2990
|
-
});
|
|
2991
|
-
describe('queryTraces OTel GenAI agent/tool filters', () => {
|
|
2992
|
-
it('should filter traces by agentId', async () => {
|
|
2993
|
-
const today = getTestDate();
|
|
2994
|
-
const mockSpans = [
|
|
2995
|
-
{
|
|
2996
|
-
traceId: 'trace1',
|
|
2997
|
-
spanId: 'span1',
|
|
2998
|
-
name: 'agent.invoke',
|
|
2999
|
-
startTime: [1700000000, 0],
|
|
3000
|
-
attributes: { 'gen_ai.agent.id': 'agent-001' },
|
|
3001
|
-
},
|
|
3002
|
-
{
|
|
3003
|
-
traceId: 'trace1',
|
|
3004
|
-
spanId: 'span2',
|
|
3005
|
-
name: 'agent.invoke',
|
|
3006
|
-
startTime: [1700000001, 0],
|
|
3007
|
-
attributes: { 'gen_ai.agent.id': 'agent-002' },
|
|
3008
|
-
},
|
|
3009
|
-
];
|
|
3010
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
3011
|
-
const results = await backend.queryTraces({ agentId: 'agent-001' });
|
|
3012
|
-
assert.strictEqual(results.length, 1);
|
|
3013
|
-
assert.strictEqual(results[0].attributes?.['gen_ai.agent.id'], 'agent-001');
|
|
3014
|
-
});
|
|
3015
|
-
it('should filter traces by agentName', async () => {
|
|
3016
|
-
const today = getTestDate();
|
|
3017
|
-
const mockSpans = [
|
|
3018
|
-
{
|
|
3019
|
-
traceId: 'trace1',
|
|
3020
|
-
spanId: 'span1',
|
|
3021
|
-
name: 'agent.invoke',
|
|
3022
|
-
startTime: [1700000000, 0],
|
|
3023
|
-
attributes: { 'gen_ai.agent.name': 'Explore' },
|
|
3024
|
-
},
|
|
3025
|
-
{
|
|
3026
|
-
traceId: 'trace1',
|
|
3027
|
-
spanId: 'span2',
|
|
3028
|
-
name: 'agent.invoke',
|
|
3029
|
-
startTime: [1700000001, 0],
|
|
3030
|
-
attributes: { 'gen_ai.agent.name': 'Plan' },
|
|
3031
|
-
},
|
|
3032
|
-
];
|
|
3033
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
3034
|
-
const results = await backend.queryTraces({ agentName: 'Explore' });
|
|
3035
|
-
assert.strictEqual(results.length, 1);
|
|
3036
|
-
assert.strictEqual(results[0].attributes?.['gen_ai.agent.name'], 'Explore');
|
|
3037
|
-
});
|
|
3038
|
-
it('should filter traces by toolName', async () => {
|
|
3039
|
-
const today = getTestDate();
|
|
3040
|
-
const mockSpans = [
|
|
3041
|
-
{
|
|
3042
|
-
traceId: 'trace1',
|
|
3043
|
-
spanId: 'span1',
|
|
3044
|
-
name: 'tool.execute',
|
|
3045
|
-
startTime: [1700000000, 0],
|
|
3046
|
-
attributes: { 'gen_ai.tool.name': 'Read' },
|
|
3047
|
-
},
|
|
3048
|
-
{
|
|
3049
|
-
traceId: 'trace1',
|
|
3050
|
-
spanId: 'span2',
|
|
3051
|
-
name: 'tool.execute',
|
|
3052
|
-
startTime: [1700000001, 0],
|
|
3053
|
-
attributes: { 'gen_ai.tool.name': 'Write' },
|
|
3054
|
-
},
|
|
3055
|
-
];
|
|
3056
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
3057
|
-
const results = await backend.queryTraces({ toolName: 'Read' });
|
|
3058
|
-
assert.strictEqual(results.length, 1);
|
|
3059
|
-
assert.strictEqual(results[0].attributes?.['gen_ai.tool.name'], 'Read');
|
|
3060
|
-
});
|
|
3061
|
-
it('should filter traces by toolCallId', async () => {
|
|
3062
|
-
const today = getTestDate();
|
|
3063
|
-
const mockSpans = [
|
|
3064
|
-
{
|
|
3065
|
-
traceId: 'trace1',
|
|
3066
|
-
spanId: 'span1',
|
|
3067
|
-
name: 'tool.execute',
|
|
3068
|
-
startTime: [1700000000, 0],
|
|
3069
|
-
attributes: { 'gen_ai.tool.call.id': 'toolu_abc123' },
|
|
3070
|
-
},
|
|
3071
|
-
{
|
|
3072
|
-
traceId: 'trace1',
|
|
3073
|
-
spanId: 'span2',
|
|
3074
|
-
name: 'tool.execute',
|
|
3075
|
-
startTime: [1700000001, 0],
|
|
3076
|
-
attributes: { 'gen_ai.tool.call.id': 'toolu_xyz789' },
|
|
3077
|
-
},
|
|
3078
|
-
];
|
|
3079
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
3080
|
-
const results = await backend.queryTraces({ toolCallId: 'toolu_abc123' });
|
|
3081
|
-
assert.strictEqual(results.length, 1);
|
|
3082
|
-
assert.strictEqual(results[0].attributes?.['gen_ai.tool.call.id'], 'toolu_abc123');
|
|
3083
|
-
});
|
|
3084
|
-
it('should filter traces by toolType', async () => {
|
|
3085
|
-
const today = getTestDate();
|
|
3086
|
-
const mockSpans = [
|
|
3087
|
-
{
|
|
3088
|
-
traceId: 'trace1',
|
|
3089
|
-
spanId: 'span1',
|
|
3090
|
-
name: 'tool.execute',
|
|
3091
|
-
startTime: [1700000000, 0],
|
|
3092
|
-
attributes: { 'gen_ai.tool.type': 'function' },
|
|
3093
|
-
},
|
|
3094
|
-
{
|
|
3095
|
-
traceId: 'trace1',
|
|
3096
|
-
spanId: 'span2',
|
|
3097
|
-
name: 'tool.execute',
|
|
3098
|
-
startTime: [1700000001, 0],
|
|
3099
|
-
attributes: { 'gen_ai.tool.type': 'mcp' },
|
|
3100
|
-
},
|
|
3101
|
-
];
|
|
3102
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
3103
|
-
const results = await backend.queryTraces({ toolType: 'function' });
|
|
3104
|
-
assert.strictEqual(results.length, 1);
|
|
3105
|
-
assert.strictEqual(results[0].attributes?.['gen_ai.tool.type'], 'function');
|
|
3106
|
-
});
|
|
3107
|
-
it('should filter traces by operationName', async () => {
|
|
3108
|
-
const today = getTestDate();
|
|
3109
|
-
const mockSpans = [
|
|
3110
|
-
{
|
|
3111
|
-
traceId: 'trace1',
|
|
3112
|
-
spanId: 'span1',
|
|
3113
|
-
name: 'llm.call',
|
|
3114
|
-
startTime: [1700000000, 0],
|
|
3115
|
-
attributes: { 'gen_ai.operation.name': 'chat' },
|
|
3116
|
-
},
|
|
3117
|
-
{
|
|
3118
|
-
traceId: 'trace1',
|
|
3119
|
-
spanId: 'span2',
|
|
3120
|
-
name: 'tool.execute',
|
|
3121
|
-
startTime: [1700000001, 0],
|
|
3122
|
-
attributes: { 'gen_ai.operation.name': 'execute_tool' },
|
|
3123
|
-
},
|
|
3124
|
-
];
|
|
3125
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
3126
|
-
const results = await backend.queryTraces({ operationName: 'chat' });
|
|
3127
|
-
assert.strictEqual(results.length, 1);
|
|
3128
|
-
assert.strictEqual(results[0].attributes?.['gen_ai.operation.name'], 'chat');
|
|
3129
|
-
});
|
|
3130
|
-
it('should combine agent/tool filters with other trace filters', async () => {
|
|
3131
|
-
const today = getTestDate();
|
|
3132
|
-
const mockSpans = [
|
|
3133
|
-
{
|
|
3134
|
-
traceId: 'trace1',
|
|
3135
|
-
spanId: 'span1',
|
|
3136
|
-
name: 'agent.explore',
|
|
3137
|
-
startTime: [1700000000, 0],
|
|
3138
|
-
duration: [0, 100000000], // 100ms
|
|
3139
|
-
attributes: {
|
|
3140
|
-
'gen_ai.agent.name': 'Explore',
|
|
3141
|
-
'gen_ai.tool.name': 'Grep',
|
|
3142
|
-
},
|
|
3143
|
-
},
|
|
3144
|
-
{
|
|
3145
|
-
traceId: 'trace1',
|
|
3146
|
-
spanId: 'span2',
|
|
3147
|
-
name: 'agent.explore',
|
|
3148
|
-
startTime: [1700000001, 0],
|
|
3149
|
-
duration: [0, 200000000], // 200ms
|
|
3150
|
-
attributes: {
|
|
3151
|
-
'gen_ai.agent.name': 'Explore',
|
|
3152
|
-
'gen_ai.tool.name': 'Read',
|
|
3153
|
-
},
|
|
3154
|
-
},
|
|
3155
|
-
];
|
|
3156
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
3157
|
-
const results = await backend.queryTraces({
|
|
3158
|
-
agentName: 'Explore',
|
|
3159
|
-
toolName: 'Grep',
|
|
3160
|
-
spanName: 'agent',
|
|
3161
|
-
});
|
|
3162
|
-
assert.strictEqual(results.length, 1);
|
|
3163
|
-
assert.strictEqual(results[0].attributes?.['gen_ai.tool.name'], 'Grep');
|
|
3164
|
-
});
|
|
3165
|
-
});
|
|
3166
|
-
describe('healthCheck', () => {
|
|
3167
|
-
it('should return error when telemetry directory does not exist', async () => {
|
|
3168
|
-
const nonExistentBackend = new LocalJsonlBackend('/nonexistent/telemetry');
|
|
3169
|
-
const result = await nonExistentBackend.healthCheck();
|
|
3170
|
-
assert.strictEqual(result.status, 'error');
|
|
3171
|
-
assert.match(result.message || '', /not found/);
|
|
3172
|
-
});
|
|
3173
|
-
it('should return ok when directory exists with no files', async () => {
|
|
3174
|
-
// tempDir exists but has no files
|
|
3175
|
-
const result = await backend.healthCheck();
|
|
3176
|
-
assert.strictEqual(result.status, 'ok');
|
|
3177
|
-
assert.match(result.message || '', /No telemetry files/);
|
|
3178
|
-
});
|
|
3179
|
-
it('should return ok with found files message', async () => {
|
|
3180
|
-
const today = getTestDate();
|
|
3181
|
-
// Create both traces and logs files
|
|
3182
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), []);
|
|
3183
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), []);
|
|
3184
|
-
const result = await backend.healthCheck();
|
|
3185
|
-
assert.strictEqual(result.status, 'ok');
|
|
3186
|
-
assert.match(result.message || '', /traces.*logs/);
|
|
3187
|
-
});
|
|
3188
|
-
it('should include llm-events in health check message', async () => {
|
|
3189
|
-
const today = getTestDate();
|
|
3190
|
-
// Create all three file types
|
|
3191
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), []);
|
|
3192
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), []);
|
|
3193
|
-
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), []);
|
|
3194
|
-
const result = await backend.healthCheck();
|
|
3195
|
-
assert.strictEqual(result.status, 'ok');
|
|
3196
|
-
assert.match(result.message || '', /llm-events/);
|
|
3197
|
-
});
|
|
3198
|
-
});
|
|
3199
|
-
describe('date range filtering', () => {
|
|
3200
|
-
it('should filter files by startDate and endDate', async () => {
|
|
3201
|
-
// Create files for multiple dates
|
|
3202
|
-
writeJsonlFile(path.join(tempDir, 'traces-2026-01-26.jsonl'), [
|
|
3203
|
-
{ traceId: 'trace-26', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
|
|
3204
|
-
]);
|
|
3205
|
-
writeJsonlFile(path.join(tempDir, 'traces-2026-01-27.jsonl'), [
|
|
3206
|
-
{ traceId: 'trace-27', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
|
|
3207
|
-
]);
|
|
3208
|
-
writeJsonlFile(path.join(tempDir, 'traces-2026-01-28.jsonl'), [
|
|
3209
|
-
{ traceId: 'trace-28', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
|
|
3210
|
-
]);
|
|
3211
|
-
writeJsonlFile(path.join(tempDir, 'traces-2026-01-29.jsonl'), [
|
|
3212
|
-
{ traceId: 'trace-29', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
|
|
3213
|
-
]);
|
|
3214
|
-
const results = await backend.queryTraces({ startDate: '2026-01-27', endDate: '2026-01-28' });
|
|
3215
|
-
assert.strictEqual(results.length, 2);
|
|
3216
|
-
const traceIds = results.map(r => r.traceId);
|
|
3217
|
-
assert.ok(traceIds.includes('trace-27'));
|
|
3218
|
-
assert.ok(traceIds.includes('trace-28'));
|
|
3219
|
-
assert.ok(!traceIds.includes('trace-26'));
|
|
3220
|
-
assert.ok(!traceIds.includes('trace-29'));
|
|
3221
|
-
});
|
|
3222
|
-
it('should use today as default when no date range specified', async () => {
|
|
3223
|
-
const today = getTestDate();
|
|
3224
|
-
const yesterday = new Date(Date.now() - 86400000).toISOString().split('T')[0];
|
|
3225
|
-
// Create file for today and yesterday
|
|
3226
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), [
|
|
3227
|
-
{ traceId: 'today-trace', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
|
|
3228
|
-
]);
|
|
3229
|
-
writeJsonlFile(path.join(tempDir, `traces-${yesterday}.jsonl`), [
|
|
3230
|
-
{ traceId: 'yesterday-trace', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
|
|
3231
|
-
]);
|
|
3232
|
-
// Query with no date range should only get today's data
|
|
3233
|
-
const results = await backend.queryTraces({});
|
|
3234
|
-
assert.strictEqual(results.length, 1);
|
|
3235
|
-
assert.strictEqual(results[0].traceId, 'today-trace');
|
|
3236
|
-
});
|
|
3237
|
-
});
|
|
3238
|
-
describe('error handling', () => {
|
|
3239
|
-
it('should handle JSONL parsing errors gracefully', async () => {
|
|
3240
|
-
const today = getTestDate();
|
|
3241
|
-
const filePath = path.join(tempDir, `traces-${today}.jsonl`);
|
|
3242
|
-
// Write malformed JSON
|
|
3243
|
-
fs.writeFileSync(filePath, 'not valid json\n{"traceId":"t1","spanId":"s1","name":"op"}\n', 'utf-8');
|
|
3244
|
-
const results = await backend.queryTraces({});
|
|
3245
|
-
// Should skip the malformed line and parse the valid one
|
|
3246
|
-
assert.strictEqual(results.length, 1);
|
|
3247
|
-
assert.strictEqual(results[0].traceId, 't1');
|
|
3248
|
-
});
|
|
3249
|
-
it('should skip spans with invalid time calculations', async () => {
|
|
3250
|
-
const today = getTestDate();
|
|
3251
|
-
const mockSpans = [
|
|
3252
|
-
{
|
|
3253
|
-
traceId: 'trace1',
|
|
3254
|
-
spanId: 'span1',
|
|
3255
|
-
name: 'op1',
|
|
3256
|
-
startTime: [1700000000, 0],
|
|
3257
|
-
endTime: [1700000000, 0],
|
|
3258
|
-
},
|
|
3259
|
-
];
|
|
3260
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
3261
|
-
const results = await backend.queryTraces({});
|
|
3262
|
-
assert.strictEqual(results.length, 1);
|
|
3263
|
-
assert.strictEqual(results[0].durationMs, 0);
|
|
3264
|
-
});
|
|
3265
|
-
});
|
|
3266
|
-
});
|
|
3267
|
-
describe('insertSortedBounded helper', () => {
|
|
3268
|
-
// Test the insertSortedBounded function by observing its behavior
|
|
3269
|
-
// through MultiDirectoryBackend's query methods
|
|
3270
|
-
it('should maintain sorted order by timestamp in traces', async () => {
|
|
3271
|
-
const projectDir = createTempDir();
|
|
3272
|
-
try {
|
|
3273
|
-
const today = getTestDate();
|
|
3274
|
-
const localTelemetry = path.join(projectDir, 'telemetry');
|
|
3275
|
-
fs.mkdirSync(localTelemetry, { recursive: true });
|
|
3276
|
-
// Create traces with various timestamps
|
|
3277
|
-
writeJsonlFile(path.join(localTelemetry, `traces-${today}.jsonl`), [
|
|
3278
|
-
{ traceId: 'sorted-test-1', spanId: 's1', name: 'SortedTestOp', startTime: [1700000005, 0] },
|
|
3279
|
-
{ traceId: 'sorted-test-2', spanId: 's2', name: 'SortedTestOp', startTime: [1700000001, 0] },
|
|
3280
|
-
{ traceId: 'sorted-test-3', spanId: 's3', name: 'SortedTestOp', startTime: [1700000009, 0] },
|
|
3281
|
-
{ traceId: 'sorted-test-4', spanId: 's4', name: 'SortedTestOp', startTime: [1700000003, 0] },
|
|
3282
|
-
{ traceId: 'sorted-test-5', spanId: 's5', name: 'SortedTestOp', startTime: [1700000007, 0] },
|
|
3283
|
-
]);
|
|
3284
|
-
const backend = new MultiDirectoryBackend(projectDir);
|
|
3285
|
-
const results = await backend.queryTraces({ spanName: 'SortedTestOp' });
|
|
3286
|
-
// Should be sorted by timestamp descending (most recent first)
|
|
3287
|
-
assert.strictEqual(results.length, 5);
|
|
3288
|
-
assert.strictEqual(results[0].traceId, 'sorted-test-3'); // 1700000009
|
|
3289
|
-
assert.strictEqual(results[1].traceId, 'sorted-test-5'); // 1700000007
|
|
3290
|
-
assert.strictEqual(results[2].traceId, 'sorted-test-1'); // 1700000005
|
|
3291
|
-
assert.strictEqual(results[3].traceId, 'sorted-test-4'); // 1700000003
|
|
3292
|
-
assert.strictEqual(results[4].traceId, 'sorted-test-2'); // 1700000001
|
|
3293
|
-
}
|
|
3294
|
-
finally {
|
|
3295
|
-
removeTempDir(projectDir);
|
|
3296
|
-
}
|
|
3297
|
-
});
|
|
3298
|
-
it('should limit results to maxSize efficiently', async () => {
|
|
3299
|
-
const projectDir = createTempDir();
|
|
3300
|
-
try {
|
|
3301
|
-
const today = getTestDate();
|
|
3302
|
-
const localTelemetry = path.join(projectDir, 'telemetry');
|
|
3303
|
-
fs.mkdirSync(localTelemetry, { recursive: true });
|
|
3304
|
-
// Create 20 traces
|
|
3305
|
-
writeJsonlFile(path.join(localTelemetry, `traces-${today}.jsonl`), Array.from({ length: 20 }, (_, i) => ({
|
|
3306
|
-
traceId: `bounded-test-${i}`,
|
|
3307
|
-
spanId: `s${i}`,
|
|
3308
|
-
name: 'BoundedTestOp',
|
|
3309
|
-
startTime: [1700000000 + i, 0],
|
|
3310
|
-
})));
|
|
3311
|
-
const backend = new MultiDirectoryBackend(projectDir);
|
|
3312
|
-
const results = await backend.queryTraces({ spanName: 'BoundedTestOp', limit: 5 });
|
|
3313
|
-
// Should return only 5 results
|
|
3314
|
-
assert.strictEqual(results.length, 5);
|
|
3315
|
-
// Results should be sorted by timestamp descending
|
|
3316
|
-
// LocalJsonlBackend returns first 5 found (0-4), then sorted
|
|
3317
|
-
assert.strictEqual(results[0].traceId, 'bounded-test-4'); // highest of 0-4
|
|
3318
|
-
assert.strictEqual(results[4].traceId, 'bounded-test-0'); // lowest of 0-4
|
|
3319
|
-
}
|
|
3320
|
-
finally {
|
|
3321
|
-
removeTempDir(projectDir);
|
|
3322
|
-
}
|
|
3323
|
-
});
|
|
3324
|
-
it('should maintain sorted order in logs', async () => {
|
|
3325
|
-
const projectDir = createTempDir();
|
|
3326
|
-
try {
|
|
3327
|
-
const today = getTestDate();
|
|
3328
|
-
const localTelemetry = path.join(projectDir, 'telemetry');
|
|
3329
|
-
fs.mkdirSync(localTelemetry, { recursive: true });
|
|
3330
|
-
writeJsonlFile(path.join(localTelemetry, `logs-${today}.jsonl`), [
|
|
3331
|
-
{ timestamp: `${today}T10:00:00Z`, body: 'SortedLogTest_A' },
|
|
3332
|
-
{ timestamp: `${today}T12:00:00Z`, body: 'SortedLogTest_B' },
|
|
3333
|
-
{ timestamp: `${today}T08:00:00Z`, body: 'SortedLogTest_C' },
|
|
3334
|
-
{ timestamp: `${today}T14:00:00Z`, body: 'SortedLogTest_D' },
|
|
3335
|
-
]);
|
|
3336
|
-
const backend = new MultiDirectoryBackend(projectDir);
|
|
3337
|
-
const results = await backend.queryLogs({ search: 'SortedLogTest' });
|
|
3338
|
-
// Should be sorted by timestamp descending
|
|
3339
|
-
assert.strictEqual(results.length, 4);
|
|
3340
|
-
assert.strictEqual(results[0].body, 'SortedLogTest_D'); // 14:00
|
|
3341
|
-
assert.strictEqual(results[1].body, 'SortedLogTest_B'); // 12:00
|
|
3342
|
-
assert.strictEqual(results[2].body, 'SortedLogTest_A'); // 10:00
|
|
3343
|
-
assert.strictEqual(results[3].body, 'SortedLogTest_C'); // 08:00
|
|
3344
|
-
}
|
|
3345
|
-
finally {
|
|
3346
|
-
removeTempDir(projectDir);
|
|
3347
|
-
}
|
|
3348
|
-
});
|
|
3349
|
-
it('should maintain sorted order in LLM events', async () => {
|
|
3350
|
-
const projectDir = createTempDir();
|
|
3351
|
-
try {
|
|
3352
|
-
const today = getTestDate();
|
|
3353
|
-
const localTelemetry = path.join(projectDir, 'telemetry');
|
|
3354
|
-
fs.mkdirSync(localTelemetry, { recursive: true });
|
|
3355
|
-
writeJsonlFile(path.join(localTelemetry, `llm-events-${today}.jsonl`), [
|
|
3356
|
-
{ timestamp: `${today}T09:00:00Z`, name: 'SortedLLMTest.A', attributes: {} },
|
|
3357
|
-
{ timestamp: `${today}T15:00:00Z`, name: 'SortedLLMTest.B', attributes: {} },
|
|
3358
|
-
{ timestamp: `${today}T11:00:00Z`, name: 'SortedLLMTest.C', attributes: {} },
|
|
3359
|
-
]);
|
|
3360
|
-
const backend = new MultiDirectoryBackend(projectDir);
|
|
3361
|
-
const results = await backend.queryLLMEvents({ eventName: 'SortedLLMTest' });
|
|
3362
|
-
// Should be sorted by timestamp descending
|
|
3363
|
-
assert.strictEqual(results.length, 3);
|
|
3364
|
-
assert.strictEqual(results[0].name, 'SortedLLMTest.B'); // 15:00
|
|
3365
|
-
assert.strictEqual(results[1].name, 'SortedLLMTest.C'); // 11:00
|
|
3366
|
-
assert.strictEqual(results[2].name, 'SortedLLMTest.A'); // 09:00
|
|
3367
|
-
}
|
|
3368
|
-
finally {
|
|
3369
|
-
removeTempDir(projectDir);
|
|
3370
|
-
}
|
|
3371
|
-
});
|
|
3372
|
-
});
|
|
3373
|
-
describe('streaming JSONL optimization', () => {
|
|
3374
|
-
it('should handle large files with streaming', async () => {
|
|
3375
|
-
const tempDir = createTempDir();
|
|
3376
|
-
try {
|
|
3377
|
-
const today = getTestDate();
|
|
3378
|
-
// Create a file with many records
|
|
3379
|
-
const spans = Array.from({ length: 500 }, (_, i) => ({
|
|
3380
|
-
traceId: `stream-test-${i}`,
|
|
3381
|
-
spanId: `span-${i}`,
|
|
3382
|
-
name: 'StreamTestOp',
|
|
3383
|
-
startTime: [1700000000 + i, 0],
|
|
3384
|
-
}));
|
|
3385
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), spans);
|
|
3386
|
-
const backend = new LocalJsonlBackend(tempDir);
|
|
3387
|
-
const results = await backend.queryTraces({ spanName: 'StreamTestOp', limit: 50 });
|
|
3388
|
-
// Should return limited results without loading all into memory
|
|
3389
|
-
assert.strictEqual(results.length, 50);
|
|
3390
|
-
}
|
|
3391
|
-
finally {
|
|
3392
|
-
removeTempDir(tempDir);
|
|
3393
|
-
}
|
|
3394
|
-
});
|
|
3395
|
-
it('should terminate early when limit is reached', async () => {
|
|
3396
|
-
const tempDir = createTempDir();
|
|
3397
|
-
try {
|
|
3398
|
-
const today = getTestDate();
|
|
3399
|
-
// Create file with many records
|
|
3400
|
-
const logs = Array.from({ length: 1000 }, (_, i) => ({
|
|
3401
|
-
timestamp: new Date(Date.now() - i * 1000).toISOString(),
|
|
3402
|
-
body: `StreamingLog_${i}`,
|
|
3403
|
-
severity: 'INFO',
|
|
3404
|
-
}));
|
|
3405
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), logs);
|
|
3406
|
-
const backend = new LocalJsonlBackend(tempDir);
|
|
3407
|
-
const start = Date.now();
|
|
3408
|
-
const results = await backend.queryLogs({ search: 'StreamingLog', limit: 10 });
|
|
3409
|
-
const elapsed = Date.now() - start;
|
|
3410
|
-
// Should return quickly with early termination
|
|
3411
|
-
assert.strictEqual(results.length, 10);
|
|
3412
|
-
// Processing should be fast due to early termination
|
|
3413
|
-
assert.ok(elapsed < 1000, `Query took too long: ${elapsed}ms`);
|
|
3414
|
-
}
|
|
3415
|
-
finally {
|
|
3416
|
-
removeTempDir(tempDir);
|
|
3417
|
-
}
|
|
3418
|
-
});
|
|
3419
|
-
});
|
|
3420
|
-
describe('QueryCache', () => {
|
|
3421
|
-
let tempDir;
|
|
3422
|
-
let backend;
|
|
3423
|
-
before(() => {
|
|
3424
|
-
tempDir = getSharedTempDir('QueryCache');
|
|
3425
|
-
});
|
|
3426
|
-
beforeEach(() => {
|
|
3427
|
-
clearTempDir(tempDir);
|
|
3428
|
-
backend = new LocalJsonlBackend(tempDir);
|
|
3429
|
-
});
|
|
3430
|
-
after(() => {
|
|
3431
|
-
removeSharedTempDir('QueryCache');
|
|
3432
|
-
});
|
|
3433
|
-
describe('caching behavior', () => {
|
|
3434
|
-
it('should return cached results on second query with same options', async () => {
|
|
3435
|
-
const today = getTestDate();
|
|
3436
|
-
const mockSpans = [
|
|
3437
|
-
{ traceId: 'cache-test-1', spanId: 'span1', name: 'test-op', startTime: [1700000000, 0] },
|
|
3438
|
-
];
|
|
3439
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
3440
|
-
// First query - should read from file
|
|
3441
|
-
const result1 = await backend.queryTraces({ spanName: 'test-op' });
|
|
3442
|
-
assert.strictEqual(result1.length, 1);
|
|
3443
|
-
// Modify the file - but cache should return old result
|
|
3444
|
-
const newSpans = [
|
|
3445
|
-
{ traceId: 'cache-test-1', spanId: 'span1', name: 'test-op', startTime: [1700000000, 0] },
|
|
3446
|
-
{ traceId: 'cache-test-2', spanId: 'span2', name: 'test-op', startTime: [1700000001, 0] },
|
|
3447
|
-
];
|
|
3448
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), newSpans);
|
|
3449
|
-
// Second query with same options - should return cached result
|
|
3450
|
-
const result2 = await backend.queryTraces({ spanName: 'test-op' });
|
|
3451
|
-
assert.strictEqual(result2.length, 1, 'Should return cached result, not new file contents');
|
|
3452
|
-
});
|
|
3453
|
-
it('should return fresh results when query options differ', async () => {
|
|
3454
|
-
const today = getTestDate();
|
|
3455
|
-
const mockSpans = [
|
|
3456
|
-
{ traceId: 'cache-test-1', spanId: 'span1', name: 'alpha-op', startTime: [1700000000, 0] },
|
|
3457
|
-
{ traceId: 'cache-test-2', spanId: 'span2', name: 'beta-op', startTime: [1700000000, 0] },
|
|
3458
|
-
];
|
|
3459
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
3460
|
-
const result1 = await backend.queryTraces({ spanName: 'alpha' });
|
|
3461
|
-
assert.strictEqual(result1.length, 1);
|
|
3462
|
-
const result2 = await backend.queryTraces({ spanName: 'beta' });
|
|
3463
|
-
assert.strictEqual(result2.length, 1);
|
|
3464
|
-
assert.strictEqual(result2[0].name, 'beta-op');
|
|
3465
|
-
});
|
|
3466
|
-
it('should clear cache when clearCache is called', async () => {
|
|
3467
|
-
const today = getTestDate();
|
|
3468
|
-
const mockSpans = [
|
|
3469
|
-
{ traceId: 'cache-test-1', spanId: 'span1', name: 'clear-test', startTime: [1700000000, 0] },
|
|
3470
|
-
];
|
|
3471
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
3472
|
-
// First query
|
|
3473
|
-
const result1 = await backend.queryTraces({ spanName: 'clear-test' });
|
|
3474
|
-
assert.strictEqual(result1.length, 1);
|
|
3475
|
-
// Modify the file
|
|
3476
|
-
const newSpans = [
|
|
3477
|
-
{ traceId: 'cache-test-1', spanId: 'span1', name: 'clear-test', startTime: [1700000000, 0] },
|
|
3478
|
-
{ traceId: 'cache-test-2', spanId: 'span2', name: 'clear-test', startTime: [1700000001, 0] },
|
|
3479
|
-
];
|
|
3480
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), newSpans);
|
|
3481
|
-
// Clear cache
|
|
3482
|
-
backend.clearCache();
|
|
3483
|
-
// Query again - should read fresh data
|
|
3484
|
-
const result2 = await backend.queryTraces({ spanName: 'clear-test' });
|
|
3485
|
-
assert.strictEqual(result2.length, 2, 'Should return fresh results after cache clear');
|
|
3486
|
-
});
|
|
3487
|
-
it('should cache logs query results', async () => {
|
|
3488
|
-
const today = getTestDate();
|
|
3489
|
-
const mockLogs = [
|
|
3490
|
-
{ timestamp: `${today}T10:00:00Z`, body: 'Test log message', severity: 'INFO' },
|
|
3491
|
-
];
|
|
3492
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
3493
|
-
const result1 = await backend.queryLogs({ search: 'Test' });
|
|
3494
|
-
assert.strictEqual(result1.length, 1);
|
|
3495
|
-
// Add another log to file
|
|
3496
|
-
const newLogs = [
|
|
3497
|
-
{ timestamp: `${today}T10:00:00Z`, body: 'Test log message', severity: 'INFO' },
|
|
3498
|
-
{ timestamp: `${today}T11:00:00Z`, body: 'Another Test log', severity: 'INFO' },
|
|
3499
|
-
];
|
|
3500
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), newLogs);
|
|
3501
|
-
// Should return cached result
|
|
3502
|
-
const result2 = await backend.queryLogs({ search: 'Test' });
|
|
3503
|
-
assert.strictEqual(result2.length, 1, 'Should return cached logs result');
|
|
3504
|
-
});
|
|
3505
|
-
it('should cache metrics query results', async () => {
|
|
3506
|
-
const today = getTestDate();
|
|
3507
|
-
const mockMetrics = [
|
|
3508
|
-
{ timestamp: `${today}T10:00:00Z`, name: 'test.metric', value: 100, type: 'gauge' },
|
|
3509
|
-
];
|
|
3510
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
3511
|
-
const result1 = await backend.queryMetrics({ metricName: 'test.metric' });
|
|
3512
|
-
assert.strictEqual(result1.length, 1);
|
|
3513
|
-
// Add another metric to file
|
|
3514
|
-
const newMetrics = [
|
|
3515
|
-
{ timestamp: `${today}T10:00:00Z`, name: 'test.metric', value: 100, type: 'gauge' },
|
|
3516
|
-
{ timestamp: `${today}T11:00:00Z`, name: 'test.metric', value: 200, type: 'gauge' },
|
|
3517
|
-
];
|
|
3518
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), newMetrics);
|
|
3519
|
-
// Should return cached result
|
|
3520
|
-
const result2 = await backend.queryMetrics({ metricName: 'test.metric' });
|
|
3521
|
-
assert.strictEqual(result2.length, 1, 'Should return cached metrics result');
|
|
3522
|
-
});
|
|
3523
|
-
it('should cache LLM events query results', async () => {
|
|
3524
|
-
const today = getTestDate();
|
|
3525
|
-
const mockEvents = [
|
|
3526
|
-
{ timestamp: `${today}T10:00:00Z`, name: 'llm.call', attributes: { model: 'claude' } },
|
|
3527
|
-
];
|
|
3528
|
-
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
3529
|
-
const result1 = await backend.queryLLMEvents({ eventName: 'llm.call' });
|
|
3530
|
-
assert.strictEqual(result1.length, 1);
|
|
3531
|
-
// Add another event to file
|
|
3532
|
-
const newEvents = [
|
|
3533
|
-
{ timestamp: `${today}T10:00:00Z`, name: 'llm.call', attributes: { model: 'claude' } },
|
|
3534
|
-
{ timestamp: `${today}T11:00:00Z`, name: 'llm.call', attributes: { model: 'gpt' } },
|
|
3535
|
-
];
|
|
3536
|
-
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), newEvents);
|
|
3537
|
-
// Should return cached result
|
|
3538
|
-
const result2 = await backend.queryLLMEvents({ eventName: 'llm.call' });
|
|
3539
|
-
assert.strictEqual(result2.length, 1, 'Should return cached LLM events result');
|
|
3540
|
-
});
|
|
3541
|
-
});
|
|
3542
|
-
describe('getCacheStats', () => {
|
|
3543
|
-
it('should return correct cache stats structure', () => {
|
|
3544
|
-
const stats = backend.getCacheStats();
|
|
3545
|
-
assert.ok(stats.traces, 'Should have traces stats');
|
|
3546
|
-
assert.ok(stats.logs, 'Should have logs stats');
|
|
3547
|
-
assert.ok(stats.metrics, 'Should have metrics stats');
|
|
3548
|
-
assert.ok(stats.llmEvents, 'Should have llmEvents stats');
|
|
3549
|
-
// Check structure of each cache stat
|
|
3550
|
-
for (const key of ['traces', 'logs', 'metrics', 'llmEvents']) {
|
|
3551
|
-
const stat = stats[key];
|
|
3552
|
-
assert.strictEqual(typeof stat.hits, 'number', `${key} should have hits`);
|
|
3553
|
-
assert.strictEqual(typeof stat.misses, 'number', `${key} should have misses`);
|
|
3554
|
-
assert.strictEqual(typeof stat.evictions, 'number', `${key} should have evictions`);
|
|
3555
|
-
assert.strictEqual(typeof stat.size, 'number', `${key} should have size`);
|
|
3556
|
-
assert.strictEqual(typeof stat.hitRate, 'number', `${key} should have hitRate`);
|
|
3557
|
-
}
|
|
3558
|
-
});
|
|
3559
|
-
it('should track cache hits and misses', async () => {
|
|
3560
|
-
const today = getTestDate();
|
|
3561
|
-
const mockSpans = [
|
|
3562
|
-
{ traceId: 'stats-test-1', spanId: 'span1', name: 'test-op', startTime: [1700000000, 0] },
|
|
3563
|
-
];
|
|
3564
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
3565
|
-
// Initial stats should be zero
|
|
3566
|
-
let stats = backend.getCacheStats();
|
|
3567
|
-
assert.strictEqual(stats.traces.hits, 0);
|
|
3568
|
-
assert.strictEqual(stats.traces.misses, 0);
|
|
3569
|
-
// First query - cache miss
|
|
3570
|
-
await backend.queryTraces({ spanName: 'test-op' });
|
|
3571
|
-
stats = backend.getCacheStats();
|
|
3572
|
-
assert.strictEqual(stats.traces.misses, 1);
|
|
3573
|
-
assert.strictEqual(stats.traces.hits, 0);
|
|
3574
|
-
// Second query with same options - cache hit
|
|
3575
|
-
await backend.queryTraces({ spanName: 'test-op' });
|
|
3576
|
-
stats = backend.getCacheStats();
|
|
3577
|
-
assert.strictEqual(stats.traces.hits, 1);
|
|
3578
|
-
assert.strictEqual(stats.traces.misses, 1);
|
|
3579
|
-
assert.ok(stats.traces.hitRate > 0, 'Hit rate should be > 0');
|
|
3580
|
-
});
|
|
3581
|
-
});
|
|
3582
|
-
describe('indexed queries', () => {
|
|
3583
|
-
it('should use index for trace queries when available', async () => {
|
|
3584
|
-
const today = getTestDate();
|
|
3585
|
-
const mockSpans = [
|
|
3586
|
-
{ traceId: 'trace1', spanId: 'span1', name: 'user-create', startTime: [1700000000, 0], resource: { serviceName: 'service-a' } },
|
|
3587
|
-
{ traceId: 'trace2', spanId: 'span2', name: 'db-query', startTime: [1700000001, 0], resource: { serviceName: 'service-b' } },
|
|
3588
|
-
{ traceId: 'trace1', spanId: 'span3', name: 'user-update', startTime: [1700000002, 0], resource: { serviceName: 'service-a' } },
|
|
3589
|
-
];
|
|
3590
|
-
const filePath = path.join(tempDir, `traces-${today}.jsonl`);
|
|
3591
|
-
writeJsonlFile(filePath, mockSpans);
|
|
3592
|
-
// Build index for the file
|
|
3593
|
-
await buildAndWriteIndex(filePath, 'traces');
|
|
3594
|
-
// Query should use the index
|
|
3595
|
-
const results = await backend.queryTraces({ traceId: 'trace1' });
|
|
3596
|
-
assert.strictEqual(results.length, 2);
|
|
3597
|
-
assert.ok(results.every(s => s.traceId === 'trace1'));
|
|
3598
|
-
});
|
|
3599
|
-
it('should fall back to full scan when no index exists', async () => {
|
|
3600
|
-
const today = getTestDate();
|
|
3601
|
-
const mockSpans = [
|
|
3602
|
-
{ traceId: 'trace1', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
|
|
3603
|
-
{ traceId: 'trace2', spanId: 'span2', name: 'op2', startTime: [1700000001, 0] },
|
|
3604
|
-
];
|
|
3605
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
3606
|
-
// No index built - should still work via full scan
|
|
3607
|
-
const results = await backend.queryTraces({ traceId: 'trace1' });
|
|
3608
|
-
assert.strictEqual(results.length, 1);
|
|
3609
|
-
assert.strictEqual(results[0].traceId, 'trace1');
|
|
3610
|
-
});
|
|
3611
|
-
it('should use index for log queries when available', async () => {
|
|
3612
|
-
const today = getTestDate();
|
|
3613
|
-
const mockLogs = [
|
|
3614
|
-
{ timestamp: `${today}T10:00:00Z`, severityText: 'ERROR', body: 'Error occurred', traceId: 'trace1' },
|
|
3615
|
-
{ timestamp: `${today}T10:01:00Z`, severityText: 'INFO', body: 'Info message', traceId: 'trace2' },
|
|
3616
|
-
{ timestamp: `${today}T10:02:00Z`, severityText: 'ERROR', body: 'Another error', traceId: 'trace3' },
|
|
3617
|
-
];
|
|
3618
|
-
const filePath = path.join(tempDir, `logs-${today}.jsonl`);
|
|
3619
|
-
writeJsonlFile(filePath, mockLogs);
|
|
3620
|
-
// Build index for the file
|
|
3621
|
-
await buildAndWriteIndex(filePath, 'logs');
|
|
3622
|
-
// Query should use the index
|
|
3623
|
-
const results = await backend.queryLogs({ severity: 'ERROR' });
|
|
3624
|
-
assert.strictEqual(results.length, 2);
|
|
3625
|
-
assert.ok(results.every(l => l.severity === 'ERROR'));
|
|
3626
|
-
});
|
|
3627
|
-
it('should use index for metric queries when available', async () => {
|
|
3628
|
-
const today = getTestDate();
|
|
3629
|
-
const mockMetrics = [
|
|
3630
|
-
{ timestamp: `${today}T10:00:00Z`, name: 'http.request.duration', value: 100 },
|
|
3631
|
-
{ timestamp: `${today}T10:01:00Z`, name: 'db.query.count', value: 50 },
|
|
3632
|
-
{ timestamp: `${today}T10:02:00Z`, name: 'http.request.size', value: 1024 },
|
|
3633
|
-
];
|
|
3634
|
-
const filePath = path.join(tempDir, `metrics-${today}.jsonl`);
|
|
3635
|
-
writeJsonlFile(filePath, mockMetrics);
|
|
3636
|
-
// Build index for the file
|
|
3637
|
-
await buildAndWriteIndex(filePath, 'metrics');
|
|
3638
|
-
// Query should use the index
|
|
3639
|
-
const results = await backend.queryMetrics({ metricName: 'http' });
|
|
3640
|
-
assert.strictEqual(results.length, 2);
|
|
3641
|
-
assert.ok(results.every(m => m.name.includes('http')));
|
|
3642
|
-
});
|
|
3643
|
-
it('should apply non-indexable filters after index lookup', async () => {
|
|
3644
|
-
const today = getTestDate();
|
|
3645
|
-
const mockSpans = [
|
|
3646
|
-
{
|
|
3647
|
-
traceId: 'trace1',
|
|
3648
|
-
spanId: 'span1',
|
|
3649
|
-
name: 'user-create',
|
|
3650
|
-
startTime: [1700000000, 0],
|
|
3651
|
-
endTime: [1700000000, 500000000], // 500ms duration
|
|
3652
|
-
},
|
|
3653
|
-
{
|
|
3654
|
-
traceId: 'trace1',
|
|
3655
|
-
spanId: 'span2',
|
|
3656
|
-
name: 'user-update',
|
|
3657
|
-
startTime: [1700000000, 0],
|
|
3658
|
-
endTime: [1700000002, 0], // 2s duration
|
|
3659
|
-
},
|
|
3660
|
-
];
|
|
3661
|
-
const filePath = path.join(tempDir, `traces-${today}.jsonl`);
|
|
3662
|
-
writeJsonlFile(filePath, mockSpans);
|
|
3663
|
-
// Build index
|
|
3664
|
-
await buildAndWriteIndex(filePath, 'traces');
|
|
3665
|
-
// Query with both indexable (traceId) and non-indexable (minDurationMs) filters
|
|
3666
|
-
const results = await backend.queryTraces({ traceId: 'trace1', minDurationMs: 1000 });
|
|
3667
|
-
assert.strictEqual(results.length, 1);
|
|
3668
|
-
assert.strictEqual(results[0].name, 'user-update');
|
|
3669
|
-
});
|
|
3670
|
-
it('should work correctly when index is stale', async () => {
|
|
3671
|
-
const today = getTestDate();
|
|
3672
|
-
const filePath = path.join(tempDir, `traces-${today}.jsonl`);
|
|
3673
|
-
// Create initial file and index
|
|
3674
|
-
const initialSpans = [
|
|
3675
|
-
{ traceId: 'trace1', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
|
|
3676
|
-
];
|
|
3677
|
-
writeJsonlFile(filePath, initialSpans);
|
|
3678
|
-
await buildAndWriteIndex(filePath, 'traces');
|
|
3679
|
-
// Modify the file to make index stale
|
|
3680
|
-
await new Promise(resolve => setTimeout(resolve, 10));
|
|
3681
|
-
const updatedSpans = [
|
|
3682
|
-
{ traceId: 'trace1', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
|
|
3683
|
-
{ traceId: 'trace2', spanId: 'span2', name: 'op2', startTime: [1700000001, 0] },
|
|
3684
|
-
];
|
|
3685
|
-
writeJsonlFile(filePath, updatedSpans);
|
|
3686
|
-
// Query should fall back to full scan and find both spans
|
|
3687
|
-
const results = await backend.queryTraces({});
|
|
3688
|
-
assert.strictEqual(results.length, 2);
|
|
3689
|
-
});
|
|
3690
|
-
it('should respect useIndexes=false constructor option', async () => {
|
|
3691
|
-
const today = getTestDate();
|
|
3692
|
-
const mockSpans = [
|
|
3693
|
-
{ traceId: 'trace1', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
|
|
3694
|
-
{ traceId: 'trace2', spanId: 'span2', name: 'op2', startTime: [1700000001, 0] },
|
|
3695
|
-
];
|
|
3696
|
-
const filePath = path.join(tempDir, `traces-${today}.jsonl`);
|
|
3697
|
-
writeJsonlFile(filePath, mockSpans);
|
|
3698
|
-
await buildAndWriteIndex(filePath, 'traces');
|
|
3699
|
-
// Create backend with indexes disabled
|
|
3700
|
-
const noIndexBackend = new LocalJsonlBackend(tempDir, false);
|
|
3701
|
-
// Should still work via full scan
|
|
3702
|
-
const results = await noIndexBackend.queryTraces({ traceId: 'trace1' });
|
|
3703
|
-
assert.strictEqual(results.length, 1);
|
|
3704
|
-
assert.strictEqual(results[0].traceId, 'trace1');
|
|
3705
|
-
});
|
|
3706
|
-
});
|
|
3707
|
-
});
|
|
3708
|
-
describe('MultiDirectoryBackend', () => {
|
|
3709
|
-
let projectDir;
|
|
3710
|
-
before(() => {
|
|
3711
|
-
projectDir = getSharedTempDir('MultiDirectoryBackend');
|
|
3712
|
-
});
|
|
3713
|
-
beforeEach(() => {
|
|
3714
|
-
// Clear project directory for each test
|
|
3715
|
-
clearTempDir(projectDir);
|
|
3716
|
-
});
|
|
3717
|
-
after(() => {
|
|
3718
|
-
removeSharedTempDir('MultiDirectoryBackend');
|
|
3719
|
-
});
|
|
3720
|
-
describe('constructor and getDirectories', () => {
|
|
3721
|
-
it('should return directories when local telemetry dirs exist', () => {
|
|
3722
|
-
// Create local telemetry directory
|
|
3723
|
-
const localTelemetry = path.join(projectDir, 'telemetry');
|
|
3724
|
-
fs.mkdirSync(localTelemetry, { recursive: true });
|
|
3725
|
-
const backend = new MultiDirectoryBackend(projectDir);
|
|
3726
|
-
const dirs = backend.getDirectories();
|
|
3727
|
-
assert.ok(Array.isArray(dirs));
|
|
3728
|
-
// Should include the local telemetry directory (resolved path due to symlink protection)
|
|
3729
|
-
const resolvedPath = fs.realpathSync(localTelemetry);
|
|
3730
|
-
const localDir = dirs.find(d => d.source === 'local' && d.path === resolvedPath);
|
|
3731
|
-
assert.ok(localDir);
|
|
3732
|
-
});
|
|
3733
|
-
it('should have name property set to multi-directory', () => {
|
|
3734
|
-
const backend = new MultiDirectoryBackend(projectDir);
|
|
3735
|
-
assert.strictEqual(backend.name, 'multi-directory');
|
|
3736
|
-
});
|
|
3737
|
-
it('should detect .telemetry local directory', () => {
|
|
3738
|
-
const localTelemetry = path.join(projectDir, '.telemetry');
|
|
3739
|
-
fs.mkdirSync(localTelemetry, { recursive: true });
|
|
3740
|
-
const backend = new MultiDirectoryBackend(projectDir);
|
|
3741
|
-
const dirs = backend.getDirectories();
|
|
3742
|
-
// Compare against resolved path (due to symlink protection)
|
|
3743
|
-
const resolvedPath = fs.realpathSync(localTelemetry);
|
|
3744
|
-
const localDir = dirs.find(d => d.path === resolvedPath);
|
|
3745
|
-
assert.ok(localDir);
|
|
3746
|
-
assert.strictEqual(localDir?.source, 'local');
|
|
3747
|
-
});
|
|
3748
|
-
it('should detect .claude/telemetry local directory', () => {
|
|
3749
|
-
const localTelemetry = path.join(projectDir, '.claude', 'telemetry');
|
|
3750
|
-
fs.mkdirSync(localTelemetry, { recursive: true });
|
|
3751
|
-
const backend = new MultiDirectoryBackend(projectDir);
|
|
3752
|
-
const dirs = backend.getDirectories();
|
|
3753
|
-
// Compare against resolved path (due to symlink protection)
|
|
3754
|
-
const resolvedPath = fs.realpathSync(localTelemetry);
|
|
3755
|
-
const localDir = dirs.find(d => d.path === resolvedPath);
|
|
3756
|
-
assert.ok(localDir);
|
|
3757
|
-
assert.strictEqual(localDir?.source, 'local');
|
|
3758
|
-
});
|
|
3759
|
-
});
|
|
3760
|
-
describe('queryTraces', () => {
|
|
3761
|
-
it('should query traces from local telemetry directory', async () => {
|
|
3762
|
-
const today = getTestDate();
|
|
3763
|
-
const localTelemetry = path.join(projectDir, 'telemetry');
|
|
3764
|
-
fs.mkdirSync(localTelemetry, { recursive: true });
|
|
3765
|
-
// Create traces in local directory with unique IDs
|
|
3766
|
-
writeJsonlFile(path.join(localTelemetry, `traces-${today}.jsonl`), [
|
|
3767
|
-
{ traceId: 'multidir-unique-test-abc123-1', spanId: 'span1', name: 'MultiDirUniqueOp_XYZ', startTime: [1700000000, 0] },
|
|
3768
|
-
{ traceId: 'multidir-unique-test-abc123-2', spanId: 'span2', name: 'MultiDirUniqueOp_XYZ', startTime: [1700000002, 0] },
|
|
3769
|
-
]);
|
|
3770
|
-
const backend = new MultiDirectoryBackend(projectDir);
|
|
3771
|
-
// Use spanName filter to find our specific test traces
|
|
3772
|
-
const results = await backend.queryTraces({ spanName: 'MultiDirUniqueOp_XYZ' });
|
|
3773
|
-
assert.strictEqual(results.length, 2);
|
|
3774
|
-
assert.ok(results.some(t => t.traceId === 'multidir-unique-test-abc123-1'), 'Should find test trace 1');
|
|
3775
|
-
assert.ok(results.some(t => t.traceId === 'multidir-unique-test-abc123-2'), 'Should find test trace 2');
|
|
3776
|
-
});
|
|
3777
|
-
it('should merge and sort traces by timestamp', async () => {
|
|
3778
|
-
const today = getTestDate();
|
|
3779
|
-
// Create two local telemetry directories
|
|
3780
|
-
const localTelemetry1 = path.join(projectDir, 'telemetry');
|
|
3781
|
-
const localTelemetry2 = path.join(projectDir, '.telemetry');
|
|
3782
|
-
fs.mkdirSync(localTelemetry1, { recursive: true });
|
|
3783
|
-
fs.mkdirSync(localTelemetry2, { recursive: true });
|
|
3784
|
-
// Create traces with different timestamps and unique operation name
|
|
3785
|
-
writeJsonlFile(path.join(localTelemetry1, `traces-${today}.jsonl`), [
|
|
3786
|
-
{ traceId: 'multidir-sort-old-xyz789', spanId: 'span1', name: 'MultiDirSortOp_ABC', startTime: [1700000000, 0] },
|
|
3787
|
-
]);
|
|
3788
|
-
writeJsonlFile(path.join(localTelemetry2, `traces-${today}.jsonl`), [
|
|
3789
|
-
{ traceId: 'multidir-sort-new-xyz789', spanId: 'span2', name: 'MultiDirSortOp_ABC', startTime: [1700000010, 0] },
|
|
3790
|
-
]);
|
|
3791
|
-
const backend = new MultiDirectoryBackend(projectDir);
|
|
3792
|
-
const results = await backend.queryTraces({ spanName: 'MultiDirSortOp_ABC' });
|
|
3793
|
-
assert.strictEqual(results.length, 2);
|
|
3794
|
-
// Newer trace should be first (sorted by startTimeUnixNano descending)
|
|
3795
|
-
assert.strictEqual(results[0].traceId, 'multidir-sort-new-xyz789');
|
|
3796
|
-
assert.strictEqual(results[1].traceId, 'multidir-sort-old-xyz789');
|
|
3797
|
-
});
|
|
3798
|
-
it('should respect limit parameter', async () => {
|
|
3799
|
-
const today = getTestDate();
|
|
3800
|
-
const localTelemetry = path.join(projectDir, 'telemetry');
|
|
3801
|
-
fs.mkdirSync(localTelemetry, { recursive: true });
|
|
3802
|
-
// Create many traces with unique prefix
|
|
3803
|
-
writeJsonlFile(path.join(localTelemetry, `traces-${today}.jsonl`), Array.from({ length: 150 }, (_, i) => ({
|
|
3804
|
-
traceId: `multidir-limit-trace-${i}`,
|
|
3805
|
-
spanId: `span${i}`,
|
|
3806
|
-
name: `MultiDirLimitOp_${i}`,
|
|
3807
|
-
startTime: [1700000000 + i, 0],
|
|
3808
|
-
})));
|
|
3809
|
-
const backend = new MultiDirectoryBackend(projectDir);
|
|
3810
|
-
const results = await backend.queryTraces({ spanName: 'MultiDirLimitOp', limit: 50 });
|
|
3811
|
-
assert.ok(results.length <= 50);
|
|
3812
|
-
});
|
|
3813
|
-
});
|
|
3814
|
-
describe('queryLogs', () => {
|
|
3815
|
-
it('should query logs from local telemetry directory', async () => {
|
|
3816
|
-
const today = getTestDate();
|
|
3817
|
-
const localTelemetry = path.join(projectDir, 'telemetry');
|
|
3818
|
-
fs.mkdirSync(localTelemetry, { recursive: true });
|
|
3819
|
-
writeJsonlFile(path.join(localTelemetry, `logs-${today}.jsonl`), [
|
|
3820
|
-
{ timestamp: `${today}T10:00:00Z`, body: 'MultiDirUniqueTestLog_ABC123_1', severity: 'INFO' },
|
|
3821
|
-
{ timestamp: `${today}T11:00:00Z`, body: 'MultiDirUniqueTestLog_ABC123_2', severity: 'ERROR' },
|
|
3822
|
-
]);
|
|
3823
|
-
const backend = new MultiDirectoryBackend(projectDir);
|
|
3824
|
-
// Use search filter to find our specific test logs
|
|
3825
|
-
const results = await backend.queryLogs({ search: 'MultiDirUniqueTestLog_ABC123' });
|
|
3826
|
-
assert.strictEqual(results.length, 2);
|
|
3827
|
-
assert.ok(results.some(l => l.body === 'MultiDirUniqueTestLog_ABC123_1'), 'Should find test log 1');
|
|
3828
|
-
assert.ok(results.some(l => l.body === 'MultiDirUniqueTestLog_ABC123_2'), 'Should find test log 2');
|
|
3829
|
-
});
|
|
3830
|
-
it('should sort logs by timestamp descending', async () => {
|
|
3831
|
-
const today = getTestDate();
|
|
3832
|
-
const localTelemetry = path.join(projectDir, 'telemetry');
|
|
3833
|
-
fs.mkdirSync(localTelemetry, { recursive: true });
|
|
3834
|
-
writeJsonlFile(path.join(localTelemetry, `logs-${today}.jsonl`), [
|
|
3835
|
-
{ timestamp: `${today}T08:00:00Z`, body: 'MultiDirSortTest_Early', severity: 'INFO' },
|
|
3836
|
-
{ timestamp: `${today}T12:00:00Z`, body: 'MultiDirSortTest_Late', severity: 'INFO' },
|
|
3837
|
-
]);
|
|
3838
|
-
const backend = new MultiDirectoryBackend(projectDir);
|
|
3839
|
-
const results = await backend.queryLogs({ search: 'MultiDirSortTest' });
|
|
3840
|
-
assert.strictEqual(results.length, 2);
|
|
3841
|
-
// Later log should come first (sorted descending)
|
|
3842
|
-
assert.strictEqual(results[0].body, 'MultiDirSortTest_Late');
|
|
3843
|
-
assert.strictEqual(results[1].body, 'MultiDirSortTest_Early');
|
|
3844
|
-
});
|
|
3845
|
-
});
|
|
3846
|
-
describe('queryMetrics', () => {
|
|
3847
|
-
it('should query metrics from local telemetry directory', async () => {
|
|
3848
|
-
const today = getTestDate();
|
|
3849
|
-
const localTelemetry = path.join(projectDir, 'telemetry');
|
|
3850
|
-
fs.mkdirSync(localTelemetry, { recursive: true });
|
|
3851
|
-
writeJsonlFile(path.join(localTelemetry, `metrics-${today}.jsonl`), [
|
|
3852
|
-
{ timestamp: `${today}T10:00:00Z`, name: 'multidir.unique.xyz789.metric1', value: 100, type: 'gauge' },
|
|
3853
|
-
{ timestamp: `${today}T11:00:00Z`, name: 'multidir.unique.xyz789.metric2', value: 200, type: 'gauge' },
|
|
3854
|
-
]);
|
|
3855
|
-
const backend = new MultiDirectoryBackend(projectDir);
|
|
3856
|
-
// Use metricName filter to find our specific test metrics
|
|
3857
|
-
const results = await backend.queryMetrics({ metricName: 'multidir.unique.xyz789' });
|
|
3858
|
-
assert.strictEqual(results.length, 2);
|
|
3859
|
-
assert.ok(results.some(m => m.name === 'multidir.unique.xyz789.metric1'), 'Should find test metric 1');
|
|
3860
|
-
assert.ok(results.some(m => m.name === 'multidir.unique.xyz789.metric2'), 'Should find test metric 2');
|
|
3861
|
-
});
|
|
3862
|
-
it('should respect limit parameter', async () => {
|
|
3863
|
-
const today = getTestDate();
|
|
3864
|
-
const localTelemetry = path.join(projectDir, 'telemetry');
|
|
3865
|
-
fs.mkdirSync(localTelemetry, { recursive: true });
|
|
3866
|
-
writeJsonlFile(path.join(localTelemetry, `metrics-${today}.jsonl`), Array.from({ length: 150 }, (_, i) => ({
|
|
3867
|
-
timestamp: `${today}T10:${String(Math.floor(i / 60) % 60).padStart(2, '0')}:${String(i % 60).padStart(2, '0')}Z`,
|
|
3868
|
-
name: `multidir.limit.abc123.metric-${i}`,
|
|
3869
|
-
value: i * 10,
|
|
3870
|
-
type: 'gauge',
|
|
3871
|
-
})));
|
|
3872
|
-
const backend = new MultiDirectoryBackend(projectDir);
|
|
3873
|
-
const results = await backend.queryMetrics({ metricName: 'multidir.limit.abc123', limit: 50 });
|
|
3874
|
-
assert.ok(results.length <= 50);
|
|
3875
|
-
});
|
|
3876
|
-
});
|
|
3877
|
-
describe('queryLLMEvents', () => {
|
|
3878
|
-
it('should query LLM events from local telemetry directory', async () => {
|
|
3879
|
-
const today = getTestDate();
|
|
3880
|
-
const localTelemetry = path.join(projectDir, 'telemetry');
|
|
3881
|
-
fs.mkdirSync(localTelemetry, { recursive: true });
|
|
3882
|
-
writeJsonlFile(path.join(localTelemetry, `llm-events-${today}.jsonl`), [
|
|
3883
|
-
{ timestamp: `${today}T10:00:00Z`, name: 'multidir.unique.xyz456.llm.event1', attributes: { model: 'claude' } },
|
|
3884
|
-
{ timestamp: `${today}T11:00:00Z`, name: 'multidir.unique.xyz456.llm.event2', attributes: { model: 'gpt' } },
|
|
3885
|
-
]);
|
|
3886
|
-
const backend = new MultiDirectoryBackend(projectDir);
|
|
3887
|
-
// Use eventName filter to find our specific test events
|
|
3888
|
-
const results = await backend.queryLLMEvents({ eventName: 'multidir.unique.xyz456' });
|
|
3889
|
-
assert.strictEqual(results.length, 2);
|
|
3890
|
-
assert.ok(results.some(e => e.name === 'multidir.unique.xyz456.llm.event1'), 'Should find test event 1');
|
|
3891
|
-
assert.ok(results.some(e => e.name === 'multidir.unique.xyz456.llm.event2'), 'Should find test event 2');
|
|
3892
|
-
});
|
|
3893
|
-
it('should sort LLM events by timestamp descending', async () => {
|
|
3894
|
-
const today = getTestDate();
|
|
3895
|
-
const localTelemetry = path.join(projectDir, 'telemetry');
|
|
3896
|
-
fs.mkdirSync(localTelemetry, { recursive: true });
|
|
3897
|
-
writeJsonlFile(path.join(localTelemetry, `llm-events-${today}.jsonl`), [
|
|
3898
|
-
{ timestamp: `${today}T08:00:00Z`, name: 'multidir.sort.abc789.early', attributes: {} },
|
|
3899
|
-
{ timestamp: `${today}T12:00:00Z`, name: 'multidir.sort.abc789.late', attributes: {} },
|
|
3900
|
-
]);
|
|
3901
|
-
const backend = new MultiDirectoryBackend(projectDir);
|
|
3902
|
-
const results = await backend.queryLLMEvents({ eventName: 'multidir.sort.abc789' });
|
|
3903
|
-
assert.strictEqual(results.length, 2);
|
|
3904
|
-
// Later event should be first (sorted descending)
|
|
3905
|
-
assert.strictEqual(results[0].name, 'multidir.sort.abc789.late');
|
|
3906
|
-
assert.strictEqual(results[1].name, 'multidir.sort.abc789.early');
|
|
3907
|
-
});
|
|
3908
|
-
});
|
|
3909
|
-
describe('healthCheck', () => {
|
|
3910
|
-
it('should return error when no directories found', async () => {
|
|
3911
|
-
// Create a backend with cwd that has no telemetry directories
|
|
3912
|
-
// AND ensure global telemetry doesn't exist for this test
|
|
3913
|
-
const emptyProject = createTempDir();
|
|
3914
|
-
try {
|
|
3915
|
-
const backend = new MultiDirectoryBackend(emptyProject);
|
|
3916
|
-
// Only test error condition if we actually have no directories
|
|
3917
|
-
if (backend.getDirectories().length === 0) {
|
|
3918
|
-
const health = await backend.healthCheck();
|
|
3919
|
-
assert.strictEqual(health.status, 'error');
|
|
3920
|
-
assert.ok(health.message?.includes('No telemetry directories'));
|
|
3921
|
-
}
|
|
3922
|
-
}
|
|
3923
|
-
finally {
|
|
3924
|
-
removeTempDir(emptyProject);
|
|
3925
|
-
}
|
|
3926
|
-
});
|
|
3927
|
-
it('should return ok when local telemetry directory exists', async () => {
|
|
3928
|
-
const today = getTestDate();
|
|
3929
|
-
const localTelemetry = path.join(projectDir, 'telemetry');
|
|
3930
|
-
fs.mkdirSync(localTelemetry, { recursive: true });
|
|
3931
|
-
// Create some telemetry files
|
|
3932
|
-
writeJsonlFile(path.join(localTelemetry, `traces-${today}.jsonl`), []);
|
|
3933
|
-
const backend = new MultiDirectoryBackend(projectDir);
|
|
3934
|
-
const health = await backend.healthCheck();
|
|
3935
|
-
assert.strictEqual(health.status, 'ok');
|
|
3936
|
-
assert.ok(health.directories);
|
|
3937
|
-
assert.ok(health.directories.length > 0);
|
|
3938
|
-
});
|
|
3939
|
-
it('should include directory statuses in health response', async () => {
|
|
3940
|
-
const localTelemetry = path.join(projectDir, 'telemetry');
|
|
3941
|
-
fs.mkdirSync(localTelemetry, { recursive: true });
|
|
3942
|
-
const backend = new MultiDirectoryBackend(projectDir);
|
|
3943
|
-
const health = await backend.healthCheck();
|
|
3944
|
-
if (health.directories) {
|
|
3945
|
-
for (const dir of health.directories) {
|
|
3946
|
-
assert.ok(dir.path);
|
|
3947
|
-
assert.ok(dir.source);
|
|
3948
|
-
assert.ok(dir.status);
|
|
3949
|
-
}
|
|
3950
|
-
}
|
|
3951
|
-
});
|
|
3952
|
-
it('should report correct directory count in message', async () => {
|
|
3953
|
-
const localTelemetry = path.join(projectDir, 'telemetry');
|
|
3954
|
-
fs.mkdirSync(localTelemetry, { recursive: true });
|
|
3955
|
-
const backend = new MultiDirectoryBackend(projectDir);
|
|
3956
|
-
const health = await backend.healthCheck();
|
|
3957
|
-
assert.ok(health.message?.includes('telemetry director'));
|
|
3958
|
-
});
|
|
3959
|
-
});
|
|
3960
|
-
describe('getCacheStats', () => {
|
|
3961
|
-
it('should return aggregated cache stats from all backends', () => {
|
|
3962
|
-
const localTelemetry = path.join(projectDir, 'telemetry');
|
|
3963
|
-
fs.mkdirSync(localTelemetry, { recursive: true });
|
|
3964
|
-
const backend = new MultiDirectoryBackend(projectDir);
|
|
3965
|
-
const stats = backend.getCacheStats();
|
|
3966
|
-
assert.ok(stats.traces, 'Should have traces stats');
|
|
3967
|
-
assert.ok(stats.logs, 'Should have logs stats');
|
|
3968
|
-
assert.ok(stats.metrics, 'Should have metrics stats');
|
|
3969
|
-
assert.ok(stats.llmEvents, 'Should have llmEvents stats');
|
|
3970
|
-
});
|
|
3971
|
-
it('should have correct structure for aggregated stats', () => {
|
|
3972
|
-
const localTelemetry = path.join(projectDir, 'telemetry');
|
|
3973
|
-
fs.mkdirSync(localTelemetry, { recursive: true });
|
|
3974
|
-
const backend = new MultiDirectoryBackend(projectDir);
|
|
3975
|
-
const stats = backend.getCacheStats();
|
|
3976
|
-
for (const key of ['traces', 'logs', 'metrics', 'llmEvents']) {
|
|
3977
|
-
const stat = stats[key];
|
|
3978
|
-
assert.strictEqual(typeof stat.hits, 'number', `${key} should have hits`);
|
|
3979
|
-
assert.strictEqual(typeof stat.misses, 'number', `${key} should have misses`);
|
|
3980
|
-
assert.strictEqual(typeof stat.evictions, 'number', `${key} should have evictions`);
|
|
3981
|
-
assert.strictEqual(typeof stat.size, 'number', `${key} should have size`);
|
|
3982
|
-
assert.strictEqual(typeof stat.hitRate, 'number', `${key} should have hitRate`);
|
|
3983
|
-
assert.ok(stat.hitRate >= 0 && stat.hitRate <= 1, `${key} hitRate should be between 0 and 1`);
|
|
3984
|
-
}
|
|
3985
|
-
});
|
|
3986
|
-
});
|
|
3987
|
-
});
|
|
3988
|
-
/**
|
|
3989
|
-
* OTLP Export Tests
|
|
3990
|
-
*/
|
|
3991
|
-
describe('OTLP Export', () => {
|
|
3992
|
-
let tempDir;
|
|
3993
|
-
let backend;
|
|
3994
|
-
before(() => {
|
|
3995
|
-
tempDir = getSharedTempDir('OTLPExport');
|
|
3996
|
-
});
|
|
3997
|
-
beforeEach(() => {
|
|
3998
|
-
clearTempDir(tempDir);
|
|
3999
|
-
backend = new LocalJsonlBackend(tempDir);
|
|
4000
|
-
});
|
|
4001
|
-
after(() => {
|
|
4002
|
-
removeSharedTempDir('OTLPExport');
|
|
4003
|
-
});
|
|
4004
|
-
describe('exportTracesOTLP', () => {
|
|
4005
|
-
it('should export traces in OTLP JSON format', async () => {
|
|
4006
|
-
const today = getTestDate();
|
|
4007
|
-
const mockSpans = [
|
|
4008
|
-
{
|
|
4009
|
-
traceId: 'trace1',
|
|
4010
|
-
spanId: 'span1',
|
|
4011
|
-
parentSpanId: undefined,
|
|
4012
|
-
name: 'root-span',
|
|
4013
|
-
kind: 1, // SERVER
|
|
4014
|
-
startTime: [1700000000, 0],
|
|
4015
|
-
endTime: [1700000001, 500000000],
|
|
4016
|
-
status: { code: 1, message: 'OK' },
|
|
4017
|
-
resource: { serviceName: 'test-service', serviceVersion: '1.0.0' },
|
|
4018
|
-
attributes: { 'http.method': 'GET', 'http.status_code': 200 },
|
|
4019
|
-
},
|
|
4020
|
-
{
|
|
4021
|
-
traceId: 'trace1',
|
|
4022
|
-
spanId: 'span2',
|
|
4023
|
-
parentSpanId: 'span1',
|
|
4024
|
-
name: 'db-query',
|
|
4025
|
-
kind: 2, // CLIENT
|
|
4026
|
-
startTime: [1700000000, 100000000],
|
|
4027
|
-
endTime: [1700000000, 500000000],
|
|
4028
|
-
status: { code: 1 },
|
|
4029
|
-
resource: { serviceName: 'test-service' },
|
|
4030
|
-
attributes: { 'db.system': 'postgresql' },
|
|
4031
|
-
},
|
|
4032
|
-
];
|
|
4033
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
4034
|
-
const otlp = await backend.exportTracesOTLP({});
|
|
4035
|
-
assert.ok(otlp.resourceSpans);
|
|
4036
|
-
assert.strictEqual(otlp.resourceSpans.length, 1);
|
|
4037
|
-
const resourceSpan = otlp.resourceSpans[0];
|
|
4038
|
-
assert.ok(resourceSpan.resource);
|
|
4039
|
-
assert.ok(resourceSpan.resource.attributes.some((a) => a.key === 'service.name' && a.value.stringValue === 'test-service'));
|
|
4040
|
-
assert.ok(resourceSpan.scopeSpans);
|
|
4041
|
-
assert.strictEqual(resourceSpan.scopeSpans.length, 1);
|
|
4042
|
-
const scopeSpan = resourceSpan.scopeSpans[0];
|
|
4043
|
-
assert.strictEqual(scopeSpan.spans.length, 2);
|
|
4044
|
-
const rootSpan = scopeSpan.spans.find((s) => s.name === 'root-span');
|
|
4045
|
-
assert.ok(rootSpan);
|
|
4046
|
-
assert.strictEqual(rootSpan.traceId, 'trace1');
|
|
4047
|
-
assert.strictEqual(rootSpan.spanId, 'span1');
|
|
4048
|
-
assert.strictEqual(rootSpan.kind, 1); // SERVER
|
|
4049
|
-
assert.strictEqual(rootSpan.status?.code, 1);
|
|
4050
|
-
});
|
|
4051
|
-
it('should group spans by service name in OTLP export', async () => {
|
|
4052
|
-
const today = getTestDate();
|
|
4053
|
-
const mockSpans = [
|
|
4054
|
-
{
|
|
4055
|
-
traceId: 'trace1',
|
|
4056
|
-
spanId: 'span1',
|
|
4057
|
-
name: 'service-a-op',
|
|
4058
|
-
startTime: [1700000000, 0],
|
|
4059
|
-
resource: { serviceName: 'service-a' },
|
|
4060
|
-
},
|
|
4061
|
-
{
|
|
4062
|
-
traceId: 'trace1',
|
|
4063
|
-
spanId: 'span2',
|
|
4064
|
-
name: 'service-b-op',
|
|
4065
|
-
startTime: [1700000000, 0],
|
|
4066
|
-
resource: { serviceName: 'service-b' },
|
|
4067
|
-
},
|
|
4068
|
-
];
|
|
4069
|
-
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
4070
|
-
const otlp = await backend.exportTracesOTLP({});
|
|
4071
|
-
assert.strictEqual(otlp.resourceSpans.length, 2);
|
|
4072
|
-
const serviceNames = otlp.resourceSpans.map((rs) => rs.resource.attributes.find((a) => a.key === 'service.name')?.value.stringValue);
|
|
4073
|
-
assert.ok(serviceNames.includes('service-a'));
|
|
4074
|
-
assert.ok(serviceNames.includes('service-b'));
|
|
4075
|
-
});
|
|
4076
|
-
});
|
|
4077
|
-
describe('exportLogsOTLP', () => {
|
|
4078
|
-
it('should export logs in OTLP JSON format', async () => {
|
|
4079
|
-
const today = getTestDate();
|
|
4080
|
-
const mockLogs = [
|
|
4081
|
-
{
|
|
4082
|
-
timestamp: '2024-01-15T10:00:00.000Z',
|
|
4083
|
-
severityText: 'ERROR',
|
|
4084
|
-
body: 'Database connection failed',
|
|
4085
|
-
traceId: 'trace1',
|
|
4086
|
-
spanId: 'span1',
|
|
4087
|
-
resource: { serviceName: 'test-service' },
|
|
4088
|
-
attributes: { 'error.type': 'ConnectionError' },
|
|
4089
|
-
},
|
|
4090
|
-
{
|
|
4091
|
-
timestamp: '2024-01-15T10:00:01.000Z',
|
|
4092
|
-
severityText: 'INFO',
|
|
4093
|
-
body: 'Request processed',
|
|
4094
|
-
resource: { serviceName: 'test-service' },
|
|
4095
|
-
},
|
|
4096
|
-
];
|
|
4097
|
-
writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
|
|
4098
|
-
const otlp = await backend.exportLogsOTLP({});
|
|
4099
|
-
assert.ok(otlp.resourceLogs);
|
|
4100
|
-
assert.strictEqual(otlp.resourceLogs.length, 1);
|
|
4101
|
-
const resourceLog = otlp.resourceLogs[0];
|
|
4102
|
-
assert.ok(resourceLog.resource);
|
|
4103
|
-
assert.ok(resourceLog.resource.attributes.some((a) => a.key === 'service.name' && a.value.stringValue === 'test-service'));
|
|
4104
|
-
assert.ok(resourceLog.scopeLogs);
|
|
4105
|
-
const scopeLog = resourceLog.scopeLogs[0];
|
|
4106
|
-
assert.strictEqual(scopeLog.logRecords.length, 2);
|
|
4107
|
-
const errorLog = scopeLog.logRecords.find((l) => l.severityText === 'ERROR');
|
|
4108
|
-
assert.ok(errorLog);
|
|
4109
|
-
assert.ok(errorLog.body?.stringValue?.includes('Database connection failed'));
|
|
4110
|
-
assert.strictEqual(errorLog.traceId, 'trace1');
|
|
4111
|
-
assert.strictEqual(errorLog.spanId, 'span1');
|
|
4112
|
-
assert.strictEqual(errorLog.severityNumber, 17); // ERROR severity number
|
|
4113
|
-
});
|
|
4114
|
-
});
|
|
4115
|
-
describe('exportMetricsOTLP', () => {
|
|
4116
|
-
it('should export gauge metrics in OTLP JSON format', async () => {
|
|
4117
|
-
const today = getTestDate();
|
|
4118
|
-
const mockMetrics = [
|
|
4119
|
-
{
|
|
4120
|
-
timestamp: '2024-01-15T10:00:00.000Z',
|
|
4121
|
-
name: 'cpu.utilization',
|
|
4122
|
-
value: 75.5,
|
|
4123
|
-
type: 'gauge',
|
|
4124
|
-
unit: 'percent',
|
|
4125
|
-
resource: { serviceName: 'test-service' },
|
|
4126
|
-
attributes: { 'host.name': 'server1' },
|
|
4127
|
-
},
|
|
4128
|
-
];
|
|
4129
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
4130
|
-
const otlp = await backend.exportMetricsOTLP({});
|
|
4131
|
-
assert.ok(otlp.resourceMetrics);
|
|
4132
|
-
assert.strictEqual(otlp.resourceMetrics.length, 1);
|
|
4133
|
-
const resourceMetric = otlp.resourceMetrics[0];
|
|
4134
|
-
assert.ok(resourceMetric.scopeMetrics);
|
|
4135
|
-
const metric = resourceMetric.scopeMetrics[0].metrics[0];
|
|
4136
|
-
assert.strictEqual(metric.name, 'cpu.utilization');
|
|
4137
|
-
assert.strictEqual(metric.unit, 'percent');
|
|
4138
|
-
assert.ok(metric.gauge);
|
|
4139
|
-
assert.strictEqual(metric.gauge.dataPoints.length, 1);
|
|
4140
|
-
assert.strictEqual(metric.gauge.dataPoints[0].asDouble, 75.5);
|
|
4141
|
-
});
|
|
4142
|
-
it('should export counter metrics with aggregation temporality', async () => {
|
|
4143
|
-
const today = getTestDate();
|
|
4144
|
-
const mockMetrics = [
|
|
4145
|
-
{
|
|
4146
|
-
timestamp: '2024-01-15T10:00:00.000Z',
|
|
4147
|
-
name: 'http.requests',
|
|
4148
|
-
value: 100,
|
|
4149
|
-
type: 'counter',
|
|
4150
|
-
unit: 'requests',
|
|
4151
|
-
aggregationTemporality: 2, // CUMULATIVE
|
|
4152
|
-
resource: { serviceName: 'test-service' },
|
|
4153
|
-
},
|
|
4154
|
-
];
|
|
4155
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
4156
|
-
const otlp = await backend.exportMetricsOTLP({});
|
|
4157
|
-
const metric = otlp.resourceMetrics[0].scopeMetrics[0].metrics[0];
|
|
4158
|
-
assert.ok(metric.sum);
|
|
4159
|
-
assert.strictEqual(metric.sum.aggregationTemporality, 2); // CUMULATIVE
|
|
4160
|
-
assert.strictEqual(metric.sum.isMonotonic, true);
|
|
4161
|
-
});
|
|
4162
|
-
it('should export histogram metrics in OTLP JSON format', async () => {
|
|
4163
|
-
const today = getTestDate();
|
|
4164
|
-
const mockMetrics = [
|
|
4165
|
-
{
|
|
4166
|
-
timestamp: '2024-01-15T10:00:00.000Z',
|
|
4167
|
-
name: 'http.request.duration',
|
|
4168
|
-
value: 250,
|
|
4169
|
-
type: 'histogram',
|
|
4170
|
-
unit: 'ms',
|
|
4171
|
-
histogram: {
|
|
4172
|
-
buckets: [
|
|
4173
|
-
{ le: 10, count: 5 },
|
|
4174
|
-
{ le: 50, count: 15 },
|
|
4175
|
-
{ le: 100, count: 25 },
|
|
4176
|
-
{ le: 500, count: 45 },
|
|
4177
|
-
{ le: Infinity, count: 50 },
|
|
4178
|
-
],
|
|
4179
|
-
sum: 12500,
|
|
4180
|
-
count: 50,
|
|
4181
|
-
},
|
|
4182
|
-
aggregationTemporality: 2,
|
|
4183
|
-
resource: { serviceName: 'test-service' },
|
|
4184
|
-
},
|
|
4185
|
-
];
|
|
4186
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
4187
|
-
const otlp = await backend.exportMetricsOTLP({});
|
|
4188
|
-
const metric = otlp.resourceMetrics[0].scopeMetrics[0].metrics[0];
|
|
4189
|
-
assert.strictEqual(metric.name, 'http.request.duration');
|
|
4190
|
-
assert.ok(metric.histogram);
|
|
4191
|
-
assert.strictEqual(metric.histogram.aggregationTemporality, 2);
|
|
4192
|
-
const dataPoint = metric.histogram.dataPoints[0];
|
|
4193
|
-
assert.strictEqual(dataPoint.count, '50');
|
|
4194
|
-
assert.strictEqual(dataPoint.sum, 12500);
|
|
4195
|
-
assert.strictEqual(dataPoint.bucketCounts.length, 5);
|
|
4196
|
-
});
|
|
4197
|
-
it('should include exemplars in OTLP metric export', async () => {
|
|
4198
|
-
const today = getTestDate();
|
|
4199
|
-
const mockMetrics = [
|
|
4200
|
-
{
|
|
4201
|
-
timestamp: '2024-01-15T10:00:00.000Z',
|
|
4202
|
-
name: 'http.latency',
|
|
4203
|
-
value: 150,
|
|
4204
|
-
type: 'gauge',
|
|
4205
|
-
unit: 'ms',
|
|
4206
|
-
exemplars: [
|
|
4207
|
-
{
|
|
4208
|
-
timestamp: '2024-01-15T10:00:00.000Z',
|
|
4209
|
-
value: 150,
|
|
4210
|
-
traceId: 'trace123',
|
|
4211
|
-
spanId: 'span456',
|
|
4212
|
-
},
|
|
4213
|
-
],
|
|
4214
|
-
resource: { serviceName: 'test-service' },
|
|
4215
|
-
},
|
|
4216
|
-
];
|
|
4217
|
-
writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
|
|
4218
|
-
const otlp = await backend.exportMetricsOTLP({});
|
|
4219
|
-
const dataPoint = otlp.resourceMetrics[0].scopeMetrics[0].metrics[0].gauge?.dataPoints[0];
|
|
4220
|
-
assert.ok(dataPoint?.exemplars);
|
|
4221
|
-
assert.strictEqual(dataPoint.exemplars.length, 1);
|
|
4222
|
-
assert.strictEqual(dataPoint.exemplars[0].traceId, 'trace123');
|
|
4223
|
-
assert.strictEqual(dataPoint.exemplars[0].spanId, 'span456');
|
|
4224
|
-
assert.strictEqual(dataPoint.exemplars[0].asDouble, 150);
|
|
4225
|
-
});
|
|
4226
|
-
});
|
|
4227
|
-
describe('queryEvaluations', () => {
|
|
4228
|
-
it('should read and normalize evaluations from JSONL files', async () => {
|
|
4229
|
-
const today = getTestDate();
|
|
4230
|
-
const mockEvaluations = [
|
|
4231
|
-
{
|
|
4232
|
-
timestamp: '2026-01-29T10:00:00Z',
|
|
4233
|
-
attributes: {
|
|
4234
|
-
'gen_ai.evaluation.name': 'Relevance',
|
|
4235
|
-
'gen_ai.evaluation.score.value': 0.92,
|
|
4236
|
-
'gen_ai.evaluation.score.label': 'relevant',
|
|
4237
|
-
'gen_ai.evaluation.explanation': 'Response addresses the query',
|
|
4238
|
-
},
|
|
4239
|
-
},
|
|
4240
|
-
];
|
|
4241
|
-
writeJsonlFile(path.join(tempDir, `evaluations-${today}.jsonl`), mockEvaluations);
|
|
4242
|
-
const results = await backend.queryEvaluations({});
|
|
4243
|
-
assert.strictEqual(results.length, 1);
|
|
4244
|
-
assert.strictEqual(results[0].evaluationName, 'Relevance');
|
|
4245
|
-
assert.strictEqual(results[0].scoreValue, 0.92);
|
|
4246
|
-
assert.strictEqual(results[0].scoreLabel, 'relevant');
|
|
4247
|
-
assert.strictEqual(results[0].explanation, 'Response addresses the query');
|
|
4248
|
-
});
|
|
4249
|
-
it('should filter evaluations by evaluationName substring', async () => {
|
|
4250
|
-
const today = getTestDate();
|
|
4251
|
-
const mockEvaluations = [
|
|
4252
|
-
{ timestamp: '2026-01-29T10:00:00Z', attributes: { 'gen_ai.evaluation.name': 'Relevance' } },
|
|
4253
|
-
{ timestamp: '2026-01-29T10:01:00Z', attributes: { 'gen_ai.evaluation.name': 'Faithfulness' } },
|
|
4254
|
-
{ timestamp: '2026-01-29T10:02:00Z', attributes: { 'gen_ai.evaluation.name': 'RelevanceScore' } },
|
|
4255
|
-
];
|
|
4256
|
-
writeJsonlFile(path.join(tempDir, `evaluations-${today}.jsonl`), mockEvaluations);
|
|
4257
|
-
const results = await backend.queryEvaluations({ evaluationName: 'Relevance' });
|
|
4258
|
-
assert.strictEqual(results.length, 2);
|
|
4259
|
-
assert.ok(results.every(e => e.evaluationName.includes('Relevance')));
|
|
4260
|
-
});
|
|
4261
|
-
it('should filter evaluations by scoreMin threshold', async () => {
|
|
4262
|
-
const today = getTestDate();
|
|
4263
|
-
const mockEvaluations = [
|
|
4264
|
-
{ timestamp: '2026-01-29T10:00:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.score.value': 0.3 } },
|
|
4265
|
-
{ timestamp: '2026-01-29T10:01:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.score.value': 0.7 } },
|
|
4266
|
-
{ timestamp: '2026-01-29T10:02:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.score.value': 0.9 } },
|
|
4267
|
-
];
|
|
4268
|
-
writeJsonlFile(path.join(tempDir, `evaluations-${today}.jsonl`), mockEvaluations);
|
|
4269
|
-
const results = await backend.queryEvaluations({ scoreMin: 0.5 });
|
|
4270
|
-
assert.strictEqual(results.length, 2);
|
|
4271
|
-
assert.ok(results.every(e => e.scoreValue >= 0.5));
|
|
4272
|
-
});
|
|
4273
|
-
it('should filter evaluations by scoreMax threshold', async () => {
|
|
4274
|
-
const today = getTestDate();
|
|
4275
|
-
const mockEvaluations = [
|
|
4276
|
-
{ timestamp: '2026-01-29T10:00:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.score.value': 0.3 } },
|
|
4277
|
-
{ timestamp: '2026-01-29T10:01:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.score.value': 0.7 } },
|
|
4278
|
-
{ timestamp: '2026-01-29T10:02:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.score.value': 0.9 } },
|
|
4279
|
-
];
|
|
4280
|
-
writeJsonlFile(path.join(tempDir, `evaluations-${today}.jsonl`), mockEvaluations);
|
|
4281
|
-
const results = await backend.queryEvaluations({ scoreMax: 0.5 });
|
|
4282
|
-
assert.strictEqual(results.length, 1);
|
|
4283
|
-
assert.strictEqual(results[0].scoreValue, 0.3);
|
|
4284
|
-
});
|
|
4285
|
-
it('should filter evaluations by score range', async () => {
|
|
4286
|
-
const today = getTestDate();
|
|
4287
|
-
const mockEvaluations = [
|
|
4288
|
-
{ timestamp: '2026-01-29T10:00:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.score.value': 0.3 } },
|
|
4289
|
-
{ timestamp: '2026-01-29T10:01:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.score.value': 0.5 } },
|
|
4290
|
-
{ timestamp: '2026-01-29T10:02:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.score.value': 0.7 } },
|
|
4291
|
-
{ timestamp: '2026-01-29T10:03:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.score.value': 0.9 } },
|
|
4292
|
-
];
|
|
4293
|
-
writeJsonlFile(path.join(tempDir, `evaluations-${today}.jsonl`), mockEvaluations);
|
|
4294
|
-
const results = await backend.queryEvaluations({ scoreMin: 0.4, scoreMax: 0.8 });
|
|
4295
|
-
assert.strictEqual(results.length, 2);
|
|
4296
|
-
assert.ok(results.some(e => e.scoreValue === 0.5));
|
|
4297
|
-
assert.ok(results.some(e => e.scoreValue === 0.7));
|
|
4298
|
-
});
|
|
4299
|
-
it('should pass evaluations without scoreValue through score range filters (P1-1)', async () => {
|
|
4300
|
-
const today = getTestDate();
|
|
4301
|
-
const mockEvaluations = [
|
|
4302
|
-
{ timestamp: '2026-01-29T10:00:00Z', attributes: { 'gen_ai.evaluation.name': 'ToolCorrectness', 'gen_ai.evaluation.score.label': 'pass' } },
|
|
4303
|
-
{ timestamp: '2026-01-29T10:01:00Z', attributes: { 'gen_ai.evaluation.name': 'Relevance', 'gen_ai.evaluation.score.value': 0.8 } },
|
|
4304
|
-
{ timestamp: '2026-01-29T10:02:00Z', attributes: { 'gen_ai.evaluation.name': 'Relevance', 'gen_ai.evaluation.score.value': 0.3 } },
|
|
4305
|
-
];
|
|
4306
|
-
writeJsonlFile(path.join(tempDir, `evaluations-${today}.jsonl`), mockEvaluations);
|
|
4307
|
-
const results = await backend.queryEvaluations({ scoreMin: 0.5 });
|
|
4308
|
-
// Should include ToolCorrectness (no scoreValue) AND Relevance with 0.8
|
|
4309
|
-
assert.strictEqual(results.length, 2);
|
|
4310
|
-
assert.ok(results.some(e => e.evaluationName === 'ToolCorrectness'));
|
|
4311
|
-
assert.ok(results.some(e => e.scoreValue === 0.8));
|
|
4312
|
-
});
|
|
4313
|
-
it('should filter evaluations by scoreLabel exact match', async () => {
|
|
4314
|
-
const today = getTestDate();
|
|
4315
|
-
const mockEvaluations = [
|
|
4316
|
-
{ timestamp: '2026-01-29T10:00:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.score.label': 'pass' } },
|
|
4317
|
-
{ timestamp: '2026-01-29T10:01:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.score.label': 'fail' } },
|
|
4318
|
-
{ timestamp: '2026-01-29T10:02:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.score.label': 'pass' } },
|
|
4319
|
-
];
|
|
4320
|
-
writeJsonlFile(path.join(tempDir, `evaluations-${today}.jsonl`), mockEvaluations);
|
|
4321
|
-
const results = await backend.queryEvaluations({ scoreLabel: 'pass' });
|
|
4322
|
-
assert.strictEqual(results.length, 2);
|
|
4323
|
-
assert.ok(results.every(e => e.scoreLabel === 'pass'));
|
|
4324
|
-
});
|
|
4325
|
-
it('should filter evaluations by responseId', async () => {
|
|
4326
|
-
const today = getTestDate();
|
|
4327
|
-
const mockEvaluations = [
|
|
4328
|
-
{ timestamp: '2026-01-29T10:00:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.response.id': 'resp-123' } },
|
|
4329
|
-
{ timestamp: '2026-01-29T10:01:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.response.id': 'resp-456' } },
|
|
4330
|
-
{ timestamp: '2026-01-29T10:02:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.response.id': 'resp-123' } },
|
|
4331
|
-
];
|
|
4332
|
-
writeJsonlFile(path.join(tempDir, `evaluations-${today}.jsonl`), mockEvaluations);
|
|
4333
|
-
const results = await backend.queryEvaluations({ responseId: 'resp-123' });
|
|
4334
|
-
assert.strictEqual(results.length, 2);
|
|
4335
|
-
assert.ok(results.every(e => e.responseId === 'resp-123'));
|
|
4336
|
-
});
|
|
4337
|
-
it('should filter evaluations by traceId', async () => {
|
|
4338
|
-
const today = getTestDate();
|
|
4339
|
-
const mockEvaluations = [
|
|
4340
|
-
{ timestamp: '2026-01-29T10:00:00Z', traceId: 'trace-abc', attributes: { 'gen_ai.evaluation.name': 'Test' } },
|
|
4341
|
-
{ timestamp: '2026-01-29T10:01:00Z', traceId: 'trace-xyz', attributes: { 'gen_ai.evaluation.name': 'Test' } },
|
|
4342
|
-
{ timestamp: '2026-01-29T10:02:00Z', traceId: 'trace-abc', attributes: { 'gen_ai.evaluation.name': 'Test' } },
|
|
4343
|
-
];
|
|
4344
|
-
writeJsonlFile(path.join(tempDir, `evaluations-${today}.jsonl`), mockEvaluations);
|
|
4345
|
-
const results = await backend.queryEvaluations({ traceId: 'trace-abc' });
|
|
4346
|
-
assert.strictEqual(results.length, 2);
|
|
4347
|
-
assert.ok(results.every(e => e.traceId === 'trace-abc'));
|
|
4348
|
-
});
|
|
4349
|
-
it('should filter evaluations by sessionId', async () => {
|
|
4350
|
-
const today = getTestDate();
|
|
4351
|
-
const mockEvaluations = [
|
|
4352
|
-
{ timestamp: '2026-01-29T10:00:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'session.id': 'sess-111' } },
|
|
4353
|
-
{ timestamp: '2026-01-29T10:01:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'session.id': 'sess-222' } },
|
|
4354
|
-
{ timestamp: '2026-01-29T10:02:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'session.id': 'sess-111' } },
|
|
4355
|
-
];
|
|
4356
|
-
writeJsonlFile(path.join(tempDir, `evaluations-${today}.jsonl`), mockEvaluations);
|
|
4357
|
-
const results = await backend.queryEvaluations({ sessionId: 'sess-111' });
|
|
4358
|
-
assert.strictEqual(results.length, 2);
|
|
4359
|
-
assert.ok(results.every(e => e.sessionId === 'sess-111'));
|
|
4360
|
-
});
|
|
4361
|
-
it('should skip evaluations without required evaluationName', async () => {
|
|
4362
|
-
const today = getTestDate();
|
|
4363
|
-
const mockEvaluations = [
|
|
4364
|
-
{ timestamp: '2026-01-29T10:00:00Z', attributes: { 'gen_ai.evaluation.name': 'Valid' } },
|
|
4365
|
-
{ timestamp: '2026-01-29T10:01:00Z', attributes: { 'gen_ai.evaluation.score.value': 0.5 } }, // Missing name
|
|
4366
|
-
{ timestamp: '2026-01-29T10:02:00Z', attributes: {} }, // Empty
|
|
4367
|
-
];
|
|
4368
|
-
writeJsonlFile(path.join(tempDir, `evaluations-${today}.jsonl`), mockEvaluations);
|
|
4369
|
-
const results = await backend.queryEvaluations({});
|
|
4370
|
-
assert.strictEqual(results.length, 1);
|
|
4371
|
-
assert.strictEqual(results[0].evaluationName, 'Valid');
|
|
4372
|
-
});
|
|
4373
|
-
it('should reject NaN and Infinity in scoreValue (P0-2)', async () => {
|
|
4374
|
-
const today = getTestDate();
|
|
4375
|
-
// Write raw JSONL with invalid numbers (simulating corrupted data)
|
|
4376
|
-
const rawContent = [
|
|
4377
|
-
JSON.stringify({ timestamp: '2026-01-29T10:00:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.score.value': 0.5 } }),
|
|
4378
|
-
'{"timestamp":"2026-01-29T10:01:00Z","attributes":{"gen_ai.evaluation.name":"NaNTest","gen_ai.evaluation.score.value":"NaN"}}',
|
|
4379
|
-
JSON.stringify({ timestamp: '2026-01-29T10:02:00Z', attributes: { 'gen_ai.evaluation.name': 'Valid', 'gen_ai.evaluation.score.value': 0.8 } }),
|
|
4380
|
-
].join('\n');
|
|
4381
|
-
fs.writeFileSync(path.join(tempDir, `evaluations-${today}.jsonl`), rawContent, 'utf-8');
|
|
4382
|
-
const results = await backend.queryEvaluations({});
|
|
4383
|
-
// Should have 3 evaluations (NaNTest has scoreValue as string "NaN" so it becomes undefined, but evaluation itself is valid)
|
|
4384
|
-
assert.strictEqual(results.length, 3);
|
|
4385
|
-
// The NaNTest should have undefined scoreValue (string "NaN" is not a number type)
|
|
4386
|
-
const nanTest = results.find(e => e.evaluationName === 'NaNTest');
|
|
4387
|
-
assert.strictEqual(nanTest?.scoreValue, undefined);
|
|
4388
|
-
});
|
|
4389
|
-
it('should reject empty strings in scoreLabel (P0-2)', async () => {
|
|
4390
|
-
const today = getTestDate();
|
|
4391
|
-
const mockEvaluations = [
|
|
4392
|
-
{ timestamp: '2026-01-29T10:00:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.score.label': 'pass' } },
|
|
4393
|
-
{ timestamp: '2026-01-29T10:01:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.score.label': '' } },
|
|
4394
|
-
{ timestamp: '2026-01-29T10:02:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.score.label': ' ' } },
|
|
4395
|
-
];
|
|
4396
|
-
writeJsonlFile(path.join(tempDir, `evaluations-${today}.jsonl`), mockEvaluations);
|
|
4397
|
-
const results = await backend.queryEvaluations({});
|
|
4398
|
-
assert.strictEqual(results.length, 3);
|
|
4399
|
-
// Only first should have scoreLabel
|
|
4400
|
-
assert.strictEqual(results[0].scoreLabel, 'pass');
|
|
4401
|
-
assert.strictEqual(results[1].scoreLabel, undefined);
|
|
4402
|
-
assert.strictEqual(results[2].scoreLabel, undefined);
|
|
4403
|
-
});
|
|
4404
|
-
it('should cache query results', async () => {
|
|
4405
|
-
const today = getTestDate();
|
|
4406
|
-
const mockEvaluations = [
|
|
4407
|
-
{ timestamp: '2026-01-29T10:00:00Z', attributes: { 'gen_ai.evaluation.name': 'Test' } },
|
|
4408
|
-
];
|
|
4409
|
-
writeJsonlFile(path.join(tempDir, `evaluations-${today}.jsonl`), mockEvaluations);
|
|
4410
|
-
// First query
|
|
4411
|
-
const results1 = await backend.queryEvaluations({ evaluationName: 'Test' });
|
|
4412
|
-
const stats1 = backend.getCacheStats();
|
|
4413
|
-
// Second query (same params)
|
|
4414
|
-
const results2 = await backend.queryEvaluations({ evaluationName: 'Test' });
|
|
4415
|
-
const stats2 = backend.getCacheStats();
|
|
4416
|
-
assert.deepStrictEqual(results1, results2);
|
|
4417
|
-
assert.strictEqual(stats2.evaluations.hits, stats1.evaluations.hits + 1);
|
|
4418
|
-
});
|
|
4419
|
-
it('should apply limit and offset', async () => {
|
|
4420
|
-
const today = getTestDate();
|
|
4421
|
-
const mockEvaluations = Array.from({ length: 10 }, (_, i) => ({
|
|
4422
|
-
timestamp: `2026-01-29T10:${String(i).padStart(2, '0')}:00Z`,
|
|
4423
|
-
attributes: { 'gen_ai.evaluation.name': `Eval${i}` },
|
|
4424
|
-
}));
|
|
4425
|
-
writeJsonlFile(path.join(tempDir, `evaluations-${today}.jsonl`), mockEvaluations);
|
|
4426
|
-
const results = await backend.queryEvaluations({ limit: 3, offset: 2 });
|
|
4427
|
-
assert.strictEqual(results.length, 3);
|
|
4428
|
-
assert.strictEqual(results[0].evaluationName, 'Eval2');
|
|
4429
|
-
assert.strictEqual(results[1].evaluationName, 'Eval3');
|
|
4430
|
-
assert.strictEqual(results[2].evaluationName, 'Eval4');
|
|
4431
|
-
});
|
|
4432
|
-
it('should return empty array when no files found', async () => {
|
|
4433
|
-
const results = await backend.queryEvaluations({});
|
|
4434
|
-
assert.deepStrictEqual(results, []);
|
|
4435
|
-
});
|
|
4436
|
-
it('should use index when available', async () => {
|
|
4437
|
-
const today = getTestDate();
|
|
4438
|
-
const mockEvaluations = [
|
|
4439
|
-
{ timestamp: '2026-01-29T10:00:00Z', attributes: { 'gen_ai.evaluation.name': 'Relevance', 'gen_ai.evaluation.score.label': 'pass' } },
|
|
4440
|
-
{ timestamp: '2026-01-29T10:01:00Z', attributes: { 'gen_ai.evaluation.name': 'Faithfulness', 'gen_ai.evaluation.score.label': 'fail' } },
|
|
4441
|
-
{ timestamp: '2026-01-29T10:02:00Z', attributes: { 'gen_ai.evaluation.name': 'Relevance', 'gen_ai.evaluation.score.label': 'pass' } },
|
|
4442
|
-
];
|
|
4443
|
-
const filePath = path.join(tempDir, `evaluations-${today}.jsonl`);
|
|
4444
|
-
writeJsonlFile(filePath, mockEvaluations);
|
|
4445
|
-
// Build index
|
|
4446
|
-
await buildAndWriteIndex(filePath, 'evaluations');
|
|
4447
|
-
// Verify index exists
|
|
4448
|
-
const idxPath = getIndexPath(filePath);
|
|
4449
|
-
assert.ok(fs.existsSync(idxPath));
|
|
4450
|
-
// Query should use index
|
|
4451
|
-
const results = await backend.queryEvaluations({ evaluationName: 'Relevance' });
|
|
4452
|
-
assert.strictEqual(results.length, 2);
|
|
4453
|
-
assert.ok(results.every(e => e.evaluationName === 'Relevance'));
|
|
4454
|
-
});
|
|
4455
|
-
it('should filter by date range', async () => {
|
|
4456
|
-
const today = getTestDate();
|
|
4457
|
-
const yesterday = new Date(Date.now() - 24 * 60 * 60 * 1000).toISOString().split('T')[0];
|
|
4458
|
-
// Write files for both days
|
|
4459
|
-
writeJsonlFile(path.join(tempDir, `evaluations-${today}.jsonl`), [
|
|
4460
|
-
{ timestamp: `${today}T10:00:00Z`, attributes: { 'gen_ai.evaluation.name': 'Today' } },
|
|
4461
|
-
]);
|
|
4462
|
-
writeJsonlFile(path.join(tempDir, `evaluations-${yesterday}.jsonl`), [
|
|
4463
|
-
{ timestamp: `${yesterday}T10:00:00Z`, attributes: { 'gen_ai.evaluation.name': 'Yesterday' } },
|
|
4464
|
-
]);
|
|
4465
|
-
// Query only today
|
|
4466
|
-
const results = await backend.queryEvaluations({ startDate: today, endDate: today });
|
|
4467
|
-
assert.strictEqual(results.length, 1);
|
|
4468
|
-
assert.strictEqual(results[0].evaluationName, 'Today');
|
|
4469
|
-
});
|
|
4470
|
-
// Phase 3: evaluator field tests
|
|
4471
|
-
it('should read and normalize evaluator and evaluatorType fields', async () => {
|
|
4472
|
-
const today = getTestDate();
|
|
4473
|
-
const mockEvaluations = [
|
|
4474
|
-
{
|
|
4475
|
-
timestamp: '2026-01-29T10:00:00Z',
|
|
4476
|
-
attributes: {
|
|
4477
|
-
'gen_ai.evaluation.name': 'Relevance',
|
|
4478
|
-
'gen_ai.evaluation.score.value': 0.92,
|
|
4479
|
-
'gen_ai.evaluation.evaluator': 'gpt-4-as-judge',
|
|
4480
|
-
'gen_ai.evaluation.evaluator.type': 'llm',
|
|
4481
|
-
},
|
|
4482
|
-
},
|
|
4483
|
-
];
|
|
4484
|
-
writeJsonlFile(path.join(tempDir, `evaluations-${today}.jsonl`), mockEvaluations);
|
|
4485
|
-
const results = await backend.queryEvaluations({});
|
|
4486
|
-
assert.strictEqual(results.length, 1);
|
|
4487
|
-
assert.strictEqual(results[0].evaluator, 'gpt-4-as-judge');
|
|
4488
|
-
assert.strictEqual(results[0].evaluatorType, 'llm');
|
|
4489
|
-
});
|
|
4490
|
-
it('should filter evaluations by evaluator exact match', async () => {
|
|
4491
|
-
const today = getTestDate();
|
|
4492
|
-
const mockEvaluations = [
|
|
4493
|
-
{ timestamp: '2026-01-29T10:00:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.evaluator': 'gpt-4-as-judge' } },
|
|
4494
|
-
{ timestamp: '2026-01-29T10:01:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.evaluator': 'human-reviewer' } },
|
|
4495
|
-
{ timestamp: '2026-01-29T10:02:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.evaluator': 'gpt-4-as-judge' } },
|
|
4496
|
-
];
|
|
4497
|
-
writeJsonlFile(path.join(tempDir, `evaluations-${today}.jsonl`), mockEvaluations);
|
|
4498
|
-
const results = await backend.queryEvaluations({ evaluator: 'gpt-4-as-judge' });
|
|
4499
|
-
assert.strictEqual(results.length, 2);
|
|
4500
|
-
assert.ok(results.every(e => e.evaluator === 'gpt-4-as-judge'));
|
|
4501
|
-
});
|
|
4502
|
-
it('should filter evaluations by evaluatorType exact match', async () => {
|
|
4503
|
-
const today = getTestDate();
|
|
4504
|
-
const mockEvaluations = [
|
|
4505
|
-
{ timestamp: '2026-01-29T10:00:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.evaluator.type': 'llm' } },
|
|
4506
|
-
{ timestamp: '2026-01-29T10:01:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.evaluator.type': 'human' } },
|
|
4507
|
-
{ timestamp: '2026-01-29T10:02:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.evaluator.type': 'llm' } },
|
|
4508
|
-
];
|
|
4509
|
-
writeJsonlFile(path.join(tempDir, `evaluations-${today}.jsonl`), mockEvaluations);
|
|
4510
|
-
const results = await backend.queryEvaluations({ evaluatorType: 'llm' });
|
|
4511
|
-
assert.strictEqual(results.length, 2);
|
|
4512
|
-
assert.ok(results.every(e => e.evaluatorType === 'llm'));
|
|
4513
|
-
});
|
|
4514
|
-
it('should handle all valid evaluatorType values', async () => {
|
|
4515
|
-
const today = getTestDate();
|
|
4516
|
-
const mockEvaluations = [
|
|
4517
|
-
{ timestamp: '2026-01-29T10:00:00Z', attributes: { 'gen_ai.evaluation.name': 'Test1', 'gen_ai.evaluation.evaluator.type': 'llm' } },
|
|
4518
|
-
{ timestamp: '2026-01-29T10:01:00Z', attributes: { 'gen_ai.evaluation.name': 'Test2', 'gen_ai.evaluation.evaluator.type': 'human' } },
|
|
4519
|
-
{ timestamp: '2026-01-29T10:02:00Z', attributes: { 'gen_ai.evaluation.name': 'Test3', 'gen_ai.evaluation.evaluator.type': 'rule' } },
|
|
4520
|
-
{ timestamp: '2026-01-29T10:03:00Z', attributes: { 'gen_ai.evaluation.name': 'Test4', 'gen_ai.evaluation.evaluator.type': 'classifier' } },
|
|
4521
|
-
];
|
|
4522
|
-
writeJsonlFile(path.join(tempDir, `evaluations-${today}.jsonl`), mockEvaluations);
|
|
4523
|
-
const results = await backend.queryEvaluations({});
|
|
4524
|
-
assert.strictEqual(results.length, 4);
|
|
4525
|
-
// Results may not be in insertion order - check that all types are present
|
|
4526
|
-
const types = results.map(r => r.evaluatorType).sort();
|
|
4527
|
-
assert.deepStrictEqual(types, ['classifier', 'human', 'llm', 'rule']);
|
|
4528
|
-
});
|
|
4529
|
-
it('should reject invalid evaluatorType values', async () => {
|
|
4530
|
-
const today = getTestDate();
|
|
4531
|
-
const mockEvaluations = [
|
|
4532
|
-
{ timestamp: '2026-01-29T10:00:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.evaluator.type': 'invalid' } },
|
|
4533
|
-
{ timestamp: '2026-01-29T10:01:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.evaluator.type': 'LLM' } }, // Case matters
|
|
4534
|
-
];
|
|
4535
|
-
writeJsonlFile(path.join(tempDir, `evaluations-${today}.jsonl`), mockEvaluations);
|
|
4536
|
-
const results = await backend.queryEvaluations({});
|
|
4537
|
-
assert.strictEqual(results.length, 2);
|
|
4538
|
-
// Invalid types should be undefined
|
|
4539
|
-
assert.strictEqual(results[0].evaluatorType, undefined);
|
|
4540
|
-
assert.strictEqual(results[1].evaluatorType, 'llm'); // Normalized to lowercase
|
|
4541
|
-
});
|
|
4542
|
-
it('should use index when filtering by evaluator', async () => {
|
|
4543
|
-
const today = getTestDate();
|
|
4544
|
-
const filePath = path.join(tempDir, `evaluations-${today}.jsonl`);
|
|
4545
|
-
const mockEvaluations = [
|
|
4546
|
-
{ timestamp: '2026-01-29T10:00:00Z', attributes: { 'gen_ai.evaluation.name': 'Relevance', 'gen_ai.evaluation.evaluator': 'gpt-4-as-judge' } },
|
|
4547
|
-
{ timestamp: '2026-01-29T10:01:00Z', attributes: { 'gen_ai.evaluation.name': 'Faithfulness', 'gen_ai.evaluation.evaluator': 'human-reviewer' } },
|
|
4548
|
-
{ timestamp: '2026-01-29T10:02:00Z', attributes: { 'gen_ai.evaluation.name': 'Relevance', 'gen_ai.evaluation.evaluator': 'gpt-4-as-judge' } },
|
|
4549
|
-
];
|
|
4550
|
-
writeJsonlFile(filePath, mockEvaluations);
|
|
4551
|
-
// Build index
|
|
4552
|
-
await buildAndWriteIndex(filePath, 'evaluations');
|
|
4553
|
-
// Verify index exists
|
|
4554
|
-
const idxPath = getIndexPath(filePath);
|
|
4555
|
-
assert.ok(fs.existsSync(idxPath));
|
|
4556
|
-
// Query should use index
|
|
4557
|
-
const results = await backend.queryEvaluations({ evaluator: 'gpt-4-as-judge' });
|
|
4558
|
-
assert.strictEqual(results.length, 2);
|
|
4559
|
-
assert.ok(results.every(e => e.evaluator === 'gpt-4-as-judge'));
|
|
4560
|
-
});
|
|
4561
|
-
it('should handle empty evaluator string', async () => {
|
|
4562
|
-
const today = getTestDate();
|
|
4563
|
-
const mockEvaluations = [
|
|
4564
|
-
{ timestamp: '2026-01-29T10:00:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.evaluator': '' } },
|
|
4565
|
-
{ timestamp: '2026-01-29T10:01:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.evaluator': ' ' } },
|
|
4566
|
-
{ timestamp: '2026-01-29T10:02:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.evaluator': 'valid' } },
|
|
4567
|
-
];
|
|
4568
|
-
writeJsonlFile(path.join(tempDir, `evaluations-${today}.jsonl`), mockEvaluations);
|
|
4569
|
-
const results = await backend.queryEvaluations({});
|
|
4570
|
-
assert.strictEqual(results.length, 3);
|
|
4571
|
-
// Empty and whitespace-only should be undefined
|
|
4572
|
-
assert.strictEqual(results[0].evaluator, undefined);
|
|
4573
|
-
assert.strictEqual(results[1].evaluator, undefined);
|
|
4574
|
-
assert.strictEqual(results[2].evaluator, 'valid');
|
|
4575
|
-
});
|
|
4576
|
-
it('should filter by both evaluator and evaluatorType', async () => {
|
|
4577
|
-
const today = getTestDate();
|
|
4578
|
-
const mockEvaluations = [
|
|
4579
|
-
{ timestamp: '2026-01-29T10:00:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.evaluator': 'gpt-4', 'gen_ai.evaluation.evaluator.type': 'llm' } },
|
|
4580
|
-
{ timestamp: '2026-01-29T10:01:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.evaluator': 'gpt-4', 'gen_ai.evaluation.evaluator.type': 'classifier' } },
|
|
4581
|
-
{ timestamp: '2026-01-29T10:02:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.evaluator': 'claude', 'gen_ai.evaluation.evaluator.type': 'llm' } },
|
|
4582
|
-
];
|
|
4583
|
-
writeJsonlFile(path.join(tempDir, `evaluations-${today}.jsonl`), mockEvaluations);
|
|
4584
|
-
const results = await backend.queryEvaluations({ evaluator: 'gpt-4', evaluatorType: 'llm' });
|
|
4585
|
-
assert.strictEqual(results.length, 1);
|
|
4586
|
-
assert.strictEqual(results[0].evaluator, 'gpt-4');
|
|
4587
|
-
assert.strictEqual(results[0].evaluatorType, 'llm');
|
|
4588
|
-
});
|
|
4589
|
-
// Phase 1: scoreUnit field tests
|
|
4590
|
-
it('should read and normalize scoreUnit field', async () => {
|
|
4591
|
-
const today = getTestDate();
|
|
4592
|
-
const mockEvaluations = [
|
|
4593
|
-
{
|
|
4594
|
-
timestamp: '2026-01-29T10:00:00Z',
|
|
4595
|
-
attributes: {
|
|
4596
|
-
'gen_ai.evaluation.name': 'Relevance',
|
|
4597
|
-
'gen_ai.evaluation.score.value': 85,
|
|
4598
|
-
'gen_ai.evaluation.score.unit': 'percentage',
|
|
4599
|
-
},
|
|
4600
|
-
},
|
|
4601
|
-
];
|
|
4602
|
-
writeJsonlFile(path.join(tempDir, `evaluations-${today}.jsonl`), mockEvaluations);
|
|
4603
|
-
const results = await backend.queryEvaluations({});
|
|
4604
|
-
assert.strictEqual(results.length, 1);
|
|
4605
|
-
assert.strictEqual(results[0].scoreValue, 85);
|
|
4606
|
-
assert.strictEqual(results[0].scoreUnit, 'percentage');
|
|
4607
|
-
});
|
|
4608
|
-
it('should handle various scoreUnit values', async () => {
|
|
4609
|
-
const today = getTestDate();
|
|
4610
|
-
const mockEvaluations = [
|
|
4611
|
-
{ timestamp: '2026-01-29T10:00:00Z', attributes: { 'gen_ai.evaluation.name': 'Test1', 'gen_ai.evaluation.score.value': 85, 'gen_ai.evaluation.score.unit': 'percentage' } },
|
|
4612
|
-
{ timestamp: '2026-01-29T10:01:00Z', attributes: { 'gen_ai.evaluation.name': 'Test2', 'gen_ai.evaluation.score.value': 0.85, 'gen_ai.evaluation.score.unit': 'ratio_0_1' } },
|
|
4613
|
-
{ timestamp: '2026-01-29T10:02:00Z', attributes: { 'gen_ai.evaluation.name': 'Test3', 'gen_ai.evaluation.score.value': 4, 'gen_ai.evaluation.score.unit': 'stars_1_5' } },
|
|
4614
|
-
];
|
|
4615
|
-
writeJsonlFile(path.join(tempDir, `evaluations-${today}.jsonl`), mockEvaluations);
|
|
4616
|
-
const results = await backend.queryEvaluations({});
|
|
4617
|
-
assert.strictEqual(results.length, 3);
|
|
4618
|
-
// Results may not be in insertion order
|
|
4619
|
-
const percentage = results.find(r => r.evaluationName === 'Test1');
|
|
4620
|
-
const ratio = results.find(r => r.evaluationName === 'Test2');
|
|
4621
|
-
const stars = results.find(r => r.evaluationName === 'Test3');
|
|
4622
|
-
assert.strictEqual(percentage?.scoreUnit, 'percentage');
|
|
4623
|
-
assert.strictEqual(ratio?.scoreUnit, 'ratio_0_1');
|
|
4624
|
-
assert.strictEqual(stars?.scoreUnit, 'stars_1_5');
|
|
4625
|
-
});
|
|
4626
|
-
it('should handle missing scoreUnit', async () => {
|
|
4627
|
-
const today = getTestDate();
|
|
4628
|
-
const mockEvaluations = [
|
|
4629
|
-
{ timestamp: '2026-01-29T10:00:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.score.value': 0.85 } },
|
|
4630
|
-
];
|
|
4631
|
-
writeJsonlFile(path.join(tempDir, `evaluations-${today}.jsonl`), mockEvaluations);
|
|
4632
|
-
const results = await backend.queryEvaluations({});
|
|
4633
|
-
assert.strictEqual(results.length, 1);
|
|
4634
|
-
assert.strictEqual(results[0].scoreValue, 0.85);
|
|
4635
|
-
assert.strictEqual(results[0].scoreUnit, undefined);
|
|
4636
|
-
});
|
|
4637
|
-
it('should handle empty scoreUnit string', async () => {
|
|
4638
|
-
const today = getTestDate();
|
|
4639
|
-
const mockEvaluations = [
|
|
4640
|
-
{ timestamp: '2026-01-29T10:00:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.score.value': 0.85, 'gen_ai.evaluation.score.unit': '' } },
|
|
4641
|
-
{ timestamp: '2026-01-29T10:01:00Z', attributes: { 'gen_ai.evaluation.name': 'Test', 'gen_ai.evaluation.score.value': 0.90, 'gen_ai.evaluation.score.unit': ' ' } },
|
|
4642
|
-
];
|
|
4643
|
-
writeJsonlFile(path.join(tempDir, `evaluations-${today}.jsonl`), mockEvaluations);
|
|
4644
|
-
const results = await backend.queryEvaluations({});
|
|
4645
|
-
assert.strictEqual(results.length, 2);
|
|
4646
|
-
// Empty and whitespace-only should be undefined
|
|
4647
|
-
assert.ok(results.every(r => r.scoreUnit === undefined));
|
|
4648
|
-
});
|
|
4649
|
-
});
|
|
4650
|
-
});
|
|
4651
|
-
//# sourceMappingURL=local-jsonl.test.js.map
|