evlog 2.14.1 → 2.15.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +4 -4
- package/dist/adapters/axiom.d.mts +18 -27
- package/dist/adapters/axiom.d.mts.map +1 -1
- package/dist/adapters/axiom.mjs +40 -30
- package/dist/adapters/axiom.mjs.map +1 -1
- package/dist/adapters/better-stack.d.mts +11 -24
- package/dist/adapters/better-stack.d.mts.map +1 -1
- package/dist/adapters/better-stack.mjs +34 -29
- package/dist/adapters/better-stack.mjs.map +1 -1
- package/dist/adapters/datadog.d.mts +1 -1
- package/dist/adapters/datadog.d.mts.map +1 -1
- package/dist/adapters/datadog.mjs +10 -4
- package/dist/adapters/datadog.mjs.map +1 -1
- package/dist/adapters/fs.d.mts +2 -2
- package/dist/adapters/fs.d.mts.map +1 -1
- package/dist/adapters/fs.mjs +19 -7
- package/dist/adapters/fs.mjs.map +1 -1
- package/dist/adapters/hyperdx.d.mts +1 -1
- package/dist/adapters/hyperdx.mjs +1 -2
- package/dist/adapters/hyperdx.mjs.map +1 -1
- package/dist/adapters/otlp.d.mts +1 -1
- package/dist/adapters/otlp.d.mts.map +1 -1
- package/dist/adapters/otlp.mjs +36 -31
- package/dist/adapters/otlp.mjs.map +1 -1
- package/dist/adapters/posthog.d.mts +50 -70
- package/dist/adapters/posthog.d.mts.map +1 -1
- package/dist/adapters/posthog.mjs +50 -85
- package/dist/adapters/posthog.mjs.map +1 -1
- package/dist/adapters/sentry.d.mts +1 -1
- package/dist/adapters/sentry.d.mts.map +1 -1
- package/dist/adapters/sentry.mjs +15 -5
- package/dist/adapters/sentry.mjs.map +1 -1
- package/dist/ai/index.d.mts +1 -1
- package/dist/{audit-DQoBo7Dl.mjs → audit--n0QRR2Y.mjs} +152 -13
- package/dist/audit--n0QRR2Y.mjs.map +1 -0
- package/dist/{audit-CTIviX3P.d.mts → audit-CJl-wZ10.d.mts} +134 -2
- package/dist/audit-CJl-wZ10.d.mts.map +1 -0
- package/dist/better-auth/index.d.mts +1 -1
- package/dist/browser.d.mts +1 -1
- package/dist/define-D6OJdSUH.mjs +63 -0
- package/dist/define-D6OJdSUH.mjs.map +1 -0
- package/dist/define-Fp8TrdEB.d.mts +57 -0
- package/dist/define-Fp8TrdEB.d.mts.map +1 -0
- package/dist/{dist-Do8P4zWd.mjs → dist-BIlS38vi.mjs} +1 -1
- package/dist/dist-BIlS38vi.mjs.map +1 -0
- package/dist/drain-ByWUeOQC.mjs +160 -0
- package/dist/drain-ByWUeOQC.mjs.map +1 -0
- package/dist/elysia/index.d.mts +25 -2
- package/dist/elysia/index.d.mts.map +1 -1
- package/dist/elysia/index.mjs +53 -20
- package/dist/elysia/index.mjs.map +1 -1
- package/dist/enricher-BA6viylF.mjs +95 -0
- package/dist/enricher-BA6viylF.mjs.map +1 -0
- package/dist/enricher-CLSnrzrr.d.mts +42 -0
- package/dist/enricher-CLSnrzrr.d.mts.map +1 -0
- package/dist/enrichers.d.mts +16 -9
- package/dist/enrichers.d.mts.map +1 -1
- package/dist/enrichers.mjs +81 -64
- package/dist/enrichers.mjs.map +1 -1
- package/dist/{error-C7gSQVqk.d.mts → error-C-66_G2M.d.mts} +2 -2
- package/dist/{error-C7gSQVqk.d.mts.map → error-C-66_G2M.d.mts.map} +1 -1
- package/dist/error.d.mts +1 -1
- package/dist/{errors-BJRXUfMg.mjs → errors-BQgyQ9xe.mjs} +1 -1
- package/dist/{errors-BJRXUfMg.mjs.map → errors-BQgyQ9xe.mjs.map} +1 -1
- package/dist/{errors-4MPmTzjY.d.mts → errors-DQoYsDW1.d.mts} +2 -2
- package/dist/{errors-4MPmTzjY.d.mts.map → errors-DQoYsDW1.d.mts.map} +1 -1
- package/dist/event-ef-5Dbxg.mjs +53 -0
- package/dist/event-ef-5Dbxg.mjs.map +1 -0
- package/dist/express/index.d.mts +2 -2
- package/dist/express/index.d.mts.map +1 -1
- package/dist/express/index.mjs +17 -15
- package/dist/express/index.mjs.map +1 -1
- package/dist/fastify/index.d.mts +2 -2
- package/dist/fastify/index.d.mts.map +1 -1
- package/dist/fastify/index.mjs +19 -20
- package/dist/fastify/index.mjs.map +1 -1
- package/dist/fork-D44V93-K.mjs +227 -0
- package/dist/fork-D44V93-K.mjs.map +1 -0
- package/dist/{headers-D74M0wsg.mjs → headers-CU-QqnYg.mjs} +19 -2
- package/dist/headers-CU-QqnYg.mjs.map +1 -0
- package/dist/hono/index.d.mts +2 -2
- package/dist/hono/index.d.mts.map +1 -1
- package/dist/hono/index.mjs +14 -10
- package/dist/hono/index.mjs.map +1 -1
- package/dist/http.d.mts +1 -1
- package/dist/index.d.mts +8 -7
- package/dist/index.mjs +3 -2
- package/dist/integration-Bz8X6_Lb.mjs +75 -0
- package/dist/integration-Bz8X6_Lb.mjs.map +1 -0
- package/dist/{logger-DttRJRGa.d.mts → logger-Brt5-WMK.d.mts} +9 -3
- package/dist/logger-Brt5-WMK.d.mts.map +1 -0
- package/dist/logger.d.mts +2 -2
- package/dist/logger.mjs +2 -2
- package/dist/middleware-CGM-bOvE.d.mts +72 -0
- package/dist/middleware-CGM-bOvE.d.mts.map +1 -0
- package/dist/nestjs/index.d.mts +2 -2
- package/dist/nestjs/index.mjs +3 -4
- package/dist/nestjs/index.mjs.map +1 -1
- package/dist/next/client.d.mts +1 -1
- package/dist/next/index.d.mts +4 -4
- package/dist/next/index.mjs +3 -3
- package/dist/next/instrumentation.d.mts +1 -1
- package/dist/next/instrumentation.mjs +1 -1
- package/dist/nitro/errorHandler.mjs +2 -2
- package/dist/nitro/module.d.mts +2 -2
- package/dist/nitro/plugin.mjs +21 -11
- package/dist/nitro/plugin.mjs.map +1 -1
- package/dist/nitro/v3/errorHandler.mjs +3 -3
- package/dist/nitro/v3/index.d.mts +2 -2
- package/dist/nitro/v3/module.d.mts +1 -1
- package/dist/nitro/v3/plugin.mjs +29 -17
- package/dist/nitro/v3/plugin.mjs.map +1 -1
- package/dist/nitro/v3/useLogger.d.mts +1 -1
- package/dist/{nitro-CPPRCPbG.d.mts → nitro-DHPb9dXG.d.mts} +2 -2
- package/dist/{nitro-CPPRCPbG.d.mts.map → nitro-DHPb9dXG.d.mts.map} +1 -1
- package/dist/{nitro-OmT_M4Pb.mjs → nitro-DavLelNz.mjs} +2 -2
- package/dist/nitro-DavLelNz.mjs.map +1 -0
- package/dist/{nitroConfigBridge-C37lXaNm.mjs → nitroConfigBridge-aZ1e5upQ.mjs} +1 -1
- package/dist/nitroConfigBridge-aZ1e5upQ.mjs.map +1 -0
- package/dist/nuxt/module.d.mts +1 -1
- package/dist/nuxt/module.mjs +2 -2
- package/dist/{parseError-o1GpZEOR.d.mts → parseError-B1zJZvQ5.d.mts} +2 -2
- package/dist/parseError-B1zJZvQ5.d.mts.map +1 -0
- package/dist/react-router/index.d.mts +2 -2
- package/dist/react-router/index.mjs +3 -4
- package/dist/react-router/index.mjs.map +1 -1
- package/dist/{routes-CGPmbzCZ.mjs → routes-B48wm7Pb.mjs} +1 -1
- package/dist/{routes-CGPmbzCZ.mjs.map → routes-B48wm7Pb.mjs.map} +1 -1
- package/dist/runtime/client/log.d.mts +1 -1
- package/dist/runtime/server/routes/_evlog/ingest.post.mjs +21 -10
- package/dist/runtime/server/routes/_evlog/ingest.post.mjs.map +1 -1
- package/dist/runtime/server/useLogger.d.mts +1 -1
- package/dist/runtime/utils/parseError.d.mts +2 -2
- package/dist/runtime/utils/parseError.mjs +1 -1
- package/dist/{_severity-CQijvfhU.mjs → severity-BYWZ96Sb.mjs} +6 -2
- package/dist/severity-BYWZ96Sb.mjs.map +1 -0
- package/dist/{source-location-DRvDDqfq.mjs → source-location-Dco0cRTz.mjs} +3 -3
- package/dist/source-location-Dco0cRTz.mjs.map +1 -0
- package/dist/storage-BT-3fT1-.mjs +27 -0
- package/dist/storage-BT-3fT1-.mjs.map +1 -0
- package/dist/sveltekit/index.d.mts +2 -2
- package/dist/sveltekit/index.mjs +5 -6
- package/dist/sveltekit/index.mjs.map +1 -1
- package/dist/toolkit.d.mts +288 -12
- package/dist/toolkit.d.mts.map +1 -1
- package/dist/toolkit.mjs +13 -7
- package/dist/types.d.mts +1 -1
- package/dist/{useLogger-CyPP1sVB.d.mts → useLogger-Cb1R6bQE.d.mts} +2 -2
- package/dist/{useLogger-CyPP1sVB.d.mts.map → useLogger-Cb1R6bQE.d.mts.map} +1 -1
- package/dist/{utils-Dmin7wVL.d.mts → utils-gQCeZMbg.d.mts} +2 -2
- package/dist/{utils-Dmin7wVL.d.mts.map → utils-gQCeZMbg.d.mts.map} +1 -1
- package/dist/utils.d.mts +1 -1
- package/dist/vite/index.d.mts +1 -1
- package/dist/vite/index.mjs +1 -1
- package/dist/workers.d.mts +1 -1
- package/dist/workers.mjs +1 -1
- package/package.json +22 -19
- package/dist/_drain-CmCtsuF6.mjs +0 -23
- package/dist/_drain-CmCtsuF6.mjs.map +0 -1
- package/dist/_http-BY1e9pwC.mjs +0 -78
- package/dist/_http-BY1e9pwC.mjs.map +0 -1
- package/dist/_severity-CQijvfhU.mjs.map +0 -1
- package/dist/audit-CTIviX3P.d.mts.map +0 -1
- package/dist/audit-DQoBo7Dl.mjs.map +0 -1
- package/dist/dist-Do8P4zWd.mjs.map +0 -1
- package/dist/fork-D1j1Fuzy.mjs +0 -72
- package/dist/fork-D1j1Fuzy.mjs.map +0 -1
- package/dist/headers-D74M0wsg.mjs.map +0 -1
- package/dist/logger-DttRJRGa.d.mts.map +0 -1
- package/dist/middleware-CTnDsST-.d.mts +0 -93
- package/dist/middleware-CTnDsST-.d.mts.map +0 -1
- package/dist/middleware-oAccqyPp.mjs +0 -123
- package/dist/middleware-oAccqyPp.mjs.map +0 -1
- package/dist/nitro-OmT_M4Pb.mjs.map +0 -1
- package/dist/nitroConfigBridge-C37lXaNm.mjs.map +0 -1
- package/dist/parseError-o1GpZEOR.d.mts.map +0 -1
- package/dist/source-location-DRvDDqfq.mjs.map +0 -1
- package/dist/storage-CFGTn37X.mjs +0 -46
- package/dist/storage-CFGTn37X.mjs.map +0 -1
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"dist-BIlS38vi.mjs","names":[],"sources":["../../../node_modules/.pnpm/ufo@1.6.4/node_modules/ufo/dist/index.mjs"],"sourcesContent":["const n = /[^\\0-\\x7E]/;\nconst t = /[\\x2E\\u3002\\uFF0E\\uFF61]/g;\nconst o = {\n overflow: \"Overflow Error\",\n \"not-basic\": \"Illegal Input\",\n \"invalid-input\": \"Invalid Input\"\n};\nconst e = Math.floor;\nconst r = String.fromCharCode;\nfunction s(n2) {\n throw new RangeError(o[n2]);\n}\nconst c = function(n2, t2) {\n return n2 + 22 + 75 * (n2 < 26) - ((t2 != 0) << 5);\n};\nconst u = function(n2, t2, o2) {\n let r2 = 0;\n for (n2 = o2 ? e(n2 / 700) : n2 >> 1, n2 += e(n2 / t2); n2 > 455; r2 += 36) {\n n2 = e(n2 / 35);\n }\n return e(r2 + 36 * n2 / (n2 + 38));\n};\nfunction toASCII(o2) {\n return (function(n2, o3) {\n const e2 = n2.split(\"@\");\n let r2 = \"\";\n e2.length > 1 && (r2 = e2[0] + \"@\", n2 = e2[1]);\n const s2 = (function(n3, t2) {\n const o4 = [];\n let e3 = n3.length;\n for (; e3--; ) {\n o4[e3] = t2(n3[e3]);\n }\n return o4;\n })((n2 = n2.replace(t, \".\")).split(\".\"), o3).join(\".\");\n return r2 + s2;\n })(o2, function(t2) {\n return n.test(t2) ? \"xn--\" + (function(n2) {\n const t3 = [];\n const o3 = (n2 = (function(n3) {\n const t4 = [];\n let o4 = 0;\n const e2 = n3.length;\n for (; o4 < e2; ) {\n const r2 = n3.charCodeAt(o4++);\n if (r2 >= 55296 && r2 <= 56319 && o4 < e2) {\n const e3 = n3.charCodeAt(o4++);\n (64512 & e3) == 56320 ? t4.push(((1023 & r2) << 10) + (1023 & e3) + 65536) : (t4.push(r2), o4--);\n } else {\n t4.push(r2);\n }\n }\n return t4;\n })(n2)).length;\n let f = 128;\n let i = 0;\n let l = 72;\n for (const o4 of n2) {\n o4 < 128 && t3.push(r(o4));\n }\n const h = t3.length;\n let p = h;\n for (h && t3.push(\"-\"); p < o3; ) {\n let o4 = 2147483647;\n for (const t4 of n2) {\n t4 >= f && t4 < o4 && (o4 = t4);\n }\n const a = p + 1;\n o4 - f > e((2147483647 - i) / a) && s(\"overflow\"), i += (o4 - f) * a, f = o4;\n for (const o5 of n2) {\n if (o5 < f && ++i > 2147483647 && s(\"overflow\"), o5 == f) {\n let n3 = i;\n for (let o6 = 36; ; o6 += 36) {\n const s2 = o6 <= l ? 1 : o6 >= l + 26 ? 26 : o6 - l;\n if (n3 < s2) {\n break;\n }\n const u2 = n3 - s2;\n const f2 = 36 - s2;\n t3.push(r(c(s2 + u2 % f2, 0))), n3 = e(u2 / f2);\n }\n t3.push(r(c(n3, 0))), l = u(i, a, p == h), i = 0, ++p;\n }\n }\n ++i, ++f;\n }\n return t3.join(\"\");\n })(t2) : t2;\n });\n}\n\nconst HASH_RE = /#/g;\nconst AMPERSAND_RE = /&/g;\nconst SLASH_RE = /\\//g;\nconst EQUAL_RE = /=/g;\nconst IM_RE = /\\?/g;\nconst PLUS_RE = /\\+/g;\nconst ENC_CARET_RE = /%5e/gi;\nconst ENC_BACKTICK_RE = /%60/gi;\nconst ENC_CURLY_OPEN_RE = /%7b/gi;\nconst ENC_PIPE_RE = /%7c/gi;\nconst ENC_CURLY_CLOSE_RE = /%7d/gi;\nconst ENC_SPACE_RE = /%20/gi;\nconst ENC_SLASH_RE = /%2f/gi;\nconst ENC_ENC_SLASH_RE = /%252f/gi;\nfunction encode(text) {\n return encodeURI(\"\" + text).replace(ENC_PIPE_RE, \"|\");\n}\nfunction encodeHash(text) {\n return encode(text).replace(ENC_CURLY_OPEN_RE, \"{\").replace(ENC_CURLY_CLOSE_RE, \"}\").replace(ENC_CARET_RE, \"^\");\n}\nfunction encodeQueryValue(input) {\n return encode(typeof input === \"string\" ? input : JSON.stringify(input)).replace(PLUS_RE, \"%2B\").replace(ENC_SPACE_RE, \"+\").replace(HASH_RE, \"%23\").replace(AMPERSAND_RE, \"%26\").replace(ENC_BACKTICK_RE, \"`\").replace(ENC_CARET_RE, \"^\").replace(SLASH_RE, \"%2F\");\n}\nfunction encodeQueryKey(text) {\n return encodeQueryValue(text).replace(EQUAL_RE, \"%3D\");\n}\nfunction encodePath(text) {\n return encode(text).replace(HASH_RE, \"%23\").replace(IM_RE, \"%3F\").replace(ENC_ENC_SLASH_RE, \"%2F\").replace(AMPERSAND_RE, \"%26\").replace(PLUS_RE, \"%2B\");\n}\nfunction encodeParam(text) {\n return encodePath(text).replace(SLASH_RE, \"%2F\");\n}\nfunction decode(text = \"\") {\n try {\n return decodeURIComponent(\"\" + text);\n } catch {\n return \"\" + text;\n }\n}\nfunction decodePath(text) {\n return decode(text.replace(ENC_SLASH_RE, \"%252F\"));\n}\nfunction decodeQueryKey(text) {\n return decode(text.replace(PLUS_RE, \" \"));\n}\nfunction decodeQueryValue(text) {\n return decode(text.replace(PLUS_RE, \" \"));\n}\nfunction encodeHost(name = \"\") {\n return toASCII(name);\n}\n\nfunction parseQuery(parametersString = \"\") {\n const object = /* @__PURE__ */ Object.create(null);\n if (parametersString[0] === \"?\") {\n parametersString = parametersString.slice(1);\n }\n for (const parameter of parametersString.split(\"&\")) {\n const s = parameter.match(/([^=]+)=?(.*)/) || [];\n if (s.length < 2) {\n continue;\n }\n const key = decodeQueryKey(s[1]);\n if (key === \"__proto__\" || key === \"constructor\") {\n continue;\n }\n const value = decodeQueryValue(s[2] || \"\");\n if (object[key] === void 0) {\n object[key] = value;\n } else if (Array.isArray(object[key])) {\n object[key].push(value);\n } else {\n object[key] = [object[key], value];\n }\n }\n return object;\n}\nfunction encodeQueryItem(key, value) {\n if (typeof value === \"number\" || typeof value === \"boolean\") {\n value = String(value);\n }\n if (!value) {\n return encodeQueryKey(key);\n }\n if (Array.isArray(value)) {\n return value.map(\n (_value) => `${encodeQueryKey(key)}=${encodeQueryValue(_value)}`\n ).join(\"&\");\n }\n return `${encodeQueryKey(key)}=${encodeQueryValue(value)}`;\n}\nfunction stringifyQuery(query) {\n return Object.keys(query).filter((k) => query[k] !== void 0).map((k) => encodeQueryItem(k, query[k])).filter(Boolean).join(\"&\");\n}\n\nconst PROTOCOL_STRICT_REGEX = /^[\\s\\w\\0+.-]{2,}:([/\\\\]{1,2})/;\nconst PROTOCOL_REGEX = /^[\\s\\w\\0+.-]{2,}:([/\\\\]{2})?/;\nconst PROTOCOL_RELATIVE_REGEX = /^([/\\\\]\\s*){2,}[^/\\\\]/;\nconst PROTOCOL_SCRIPT_RE = /^[\\s\\0]*(blob|data|javascript|vbscript):$/i;\nconst TRAILING_SLASH_RE = /\\/$|\\/\\?|\\/#/;\nconst JOIN_LEADING_SLASH_RE = /^\\.?\\//;\nfunction isRelative(inputString) {\n return [\"./\", \"../\"].some((string_) => inputString.startsWith(string_));\n}\nfunction hasProtocol(inputString, opts = {}) {\n if (typeof opts === \"boolean\") {\n opts = { acceptRelative: opts };\n }\n if (opts.strict) {\n return PROTOCOL_STRICT_REGEX.test(inputString);\n }\n return PROTOCOL_REGEX.test(inputString) || (opts.acceptRelative ? PROTOCOL_RELATIVE_REGEX.test(inputString) : false);\n}\nfunction isScriptProtocol(protocol) {\n return !!protocol && PROTOCOL_SCRIPT_RE.test(protocol);\n}\nfunction hasTrailingSlash(input = \"\", respectQueryAndFragment) {\n if (!respectQueryAndFragment) {\n return input.endsWith(\"/\");\n }\n return TRAILING_SLASH_RE.test(input);\n}\nfunction withoutTrailingSlash(input = \"\", respectQueryAndFragment) {\n if (!respectQueryAndFragment) {\n return (hasTrailingSlash(input) ? input.slice(0, -1) : input) || \"/\";\n }\n if (!hasTrailingSlash(input, true)) {\n return input || \"/\";\n }\n let path = input;\n let fragment = \"\";\n const fragmentIndex = input.indexOf(\"#\");\n if (fragmentIndex !== -1) {\n path = input.slice(0, fragmentIndex);\n fragment = input.slice(fragmentIndex);\n }\n const [s0, ...s] = path.split(\"?\");\n const cleanPath = s0.endsWith(\"/\") ? s0.slice(0, -1) : s0;\n return (cleanPath || \"/\") + (s.length > 0 ? `?${s.join(\"?\")}` : \"\") + fragment;\n}\nfunction withTrailingSlash(input = \"\", respectQueryAndFragment) {\n if (!respectQueryAndFragment) {\n return input.endsWith(\"/\") ? input : input + \"/\";\n }\n if (hasTrailingSlash(input, true)) {\n return input || \"/\";\n }\n let path = input;\n let fragment = \"\";\n const fragmentIndex = input.indexOf(\"#\");\n if (fragmentIndex !== -1) {\n path = input.slice(0, fragmentIndex);\n fragment = input.slice(fragmentIndex);\n if (!path) {\n return fragment;\n }\n }\n const [s0, ...s] = path.split(\"?\");\n return s0 + \"/\" + (s.length > 0 ? `?${s.join(\"?\")}` : \"\") + fragment;\n}\nfunction hasLeadingSlash(input = \"\") {\n return input.startsWith(\"/\");\n}\nfunction withoutLeadingSlash(input = \"\") {\n return (hasLeadingSlash(input) ? input.slice(1) : input) || \"/\";\n}\nfunction withLeadingSlash(input = \"\") {\n return hasLeadingSlash(input) ? input : \"/\" + input;\n}\nfunction cleanDoubleSlashes(input = \"\") {\n return input.split(\"://\").map((string_) => string_.replace(/\\/{2,}/g, \"/\")).join(\"://\");\n}\nfunction withBase(input, base) {\n if (isEmptyURL(base) || hasProtocol(input)) {\n return input;\n }\n const _base = withoutTrailingSlash(base);\n if (input.startsWith(_base)) {\n const nextChar = input[_base.length];\n if (!nextChar || nextChar === \"/\" || nextChar === \"?\") {\n return input;\n }\n }\n return joinURL(_base, input);\n}\nfunction withoutBase(input, base) {\n if (isEmptyURL(base)) {\n return input;\n }\n const _base = withoutTrailingSlash(base);\n if (!input.startsWith(_base)) {\n return input;\n }\n const nextChar = input[_base.length];\n if (nextChar && nextChar !== \"/\" && nextChar !== \"?\") {\n return input;\n }\n const trimmed = input.slice(_base.length).replace(/^\\/+/, \"\");\n return \"/\" + trimmed;\n}\nfunction withQuery(input, query) {\n const parsed = parseURL(input);\n const mergedQuery = { ...parseQuery(parsed.search), ...query };\n parsed.search = stringifyQuery(mergedQuery);\n return stringifyParsedURL(parsed);\n}\nfunction filterQuery(input, predicate) {\n if (!input.includes(\"?\")) {\n return input;\n }\n const parsed = parseURL(input);\n const query = parseQuery(parsed.search);\n const filteredQuery = Object.fromEntries(\n Object.entries(query).filter(([key, value]) => predicate(key, value))\n );\n parsed.search = stringifyQuery(filteredQuery);\n return stringifyParsedURL(parsed);\n}\nfunction getQuery(input) {\n return parseQuery(parseURL(input).search);\n}\nfunction isEmptyURL(url) {\n return !url || url === \"/\";\n}\nfunction isNonEmptyURL(url) {\n return url && url !== \"/\";\n}\nfunction joinURL(base, ...input) {\n let url = base || \"\";\n for (const segment of input.filter((url2) => isNonEmptyURL(url2))) {\n if (url) {\n const _segment = segment.replace(JOIN_LEADING_SLASH_RE, \"\");\n url = withTrailingSlash(url) + _segment;\n } else {\n url = segment;\n }\n }\n return url;\n}\nfunction joinRelativeURL(..._input) {\n const JOIN_SEGMENT_SPLIT_RE = /\\/(?!\\/)/;\n const input = _input.filter(Boolean);\n const segments = [];\n let segmentsDepth = 0;\n for (const i of input) {\n if (!i || i === \"/\") {\n continue;\n }\n for (const [sindex, s] of i.split(JOIN_SEGMENT_SPLIT_RE).entries()) {\n if (!s || s === \".\") {\n continue;\n }\n if (s === \"..\") {\n if (segments.length === 1 && hasProtocol(segments[0])) {\n continue;\n }\n segments.pop();\n segmentsDepth--;\n continue;\n }\n if (sindex === 1 && segments[segments.length - 1]?.endsWith(\":/\")) {\n segments[segments.length - 1] += \"/\" + s;\n continue;\n }\n segments.push(s);\n segmentsDepth++;\n }\n }\n let url = segments.join(\"/\");\n if (segmentsDepth >= 0) {\n if (input[0]?.startsWith(\"/\") && !url.startsWith(\"/\")) {\n url = \"/\" + url;\n } else if (input[0]?.startsWith(\"./\") && !url.startsWith(\"./\")) {\n url = \"./\" + url;\n }\n } else {\n url = \"../\".repeat(-1 * segmentsDepth) + url;\n }\n if (input[input.length - 1]?.endsWith(\"/\") && !url.endsWith(\"/\")) {\n url += \"/\";\n }\n return url;\n}\nfunction withHttp(input) {\n return withProtocol(input, \"http://\");\n}\nfunction withHttps(input) {\n return withProtocol(input, \"https://\");\n}\nfunction withoutProtocol(input) {\n return withProtocol(input, \"\");\n}\nfunction withProtocol(input, protocol) {\n let match = input.match(PROTOCOL_REGEX);\n if (!match) {\n match = input.match(/^\\/{2,}/);\n }\n if (!match) {\n return protocol + input;\n }\n return protocol + input.slice(match[0].length);\n}\nfunction normalizeURL(input) {\n const parsed = parseURL(input);\n parsed.pathname = encodePath(decodePath(parsed.pathname));\n parsed.hash = encodeHash(decode(parsed.hash));\n parsed.host = encodeHost(decode(parsed.host));\n parsed.search = stringifyQuery(parseQuery(parsed.search));\n return stringifyParsedURL(parsed);\n}\nfunction resolveURL(base = \"\", ...inputs) {\n if (typeof base !== \"string\") {\n throw new TypeError(\n `URL input should be string received ${typeof base} (${base})`\n );\n }\n const filteredInputs = inputs.filter((input) => isNonEmptyURL(input));\n if (filteredInputs.length === 0) {\n return base;\n }\n const url = parseURL(base);\n for (const inputSegment of filteredInputs) {\n const urlSegment = parseURL(inputSegment);\n if (urlSegment.pathname) {\n url.pathname = withTrailingSlash(url.pathname) + withoutLeadingSlash(urlSegment.pathname);\n }\n if (urlSegment.hash && urlSegment.hash !== \"#\") {\n url.hash = urlSegment.hash;\n }\n if (urlSegment.search && urlSegment.search !== \"?\") {\n if (url.search && url.search !== \"?\") {\n const queryString = stringifyQuery({\n ...parseQuery(url.search),\n ...parseQuery(urlSegment.search)\n });\n url.search = queryString.length > 0 ? \"?\" + queryString : \"\";\n } else {\n url.search = urlSegment.search;\n }\n }\n }\n return stringifyParsedURL(url);\n}\nfunction isSamePath(p1, p2) {\n return decode(withoutTrailingSlash(p1)) === decode(withoutTrailingSlash(p2));\n}\nfunction isEqual(a, b, options = {}) {\n if (!options.trailingSlash) {\n a = withTrailingSlash(a);\n b = withTrailingSlash(b);\n }\n if (!options.leadingSlash) {\n a = withLeadingSlash(a);\n b = withLeadingSlash(b);\n }\n if (!options.encoding) {\n a = decode(a);\n b = decode(b);\n }\n return a === b;\n}\nfunction withFragment(input, hash) {\n if (!hash || hash === \"#\") {\n return input;\n }\n const parsed = parseURL(input);\n parsed.hash = hash === \"\" ? \"\" : \"#\" + encodeHash(hash);\n return stringifyParsedURL(parsed);\n}\nfunction withoutFragment(input) {\n return stringifyParsedURL({ ...parseURL(input), hash: \"\" });\n}\nfunction withoutHost(input) {\n const parsed = parseURL(input);\n return (parsed.pathname || \"/\") + parsed.search + parsed.hash;\n}\n\nconst protocolRelative = Symbol.for(\"ufo:protocolRelative\");\nfunction parseURL(input = \"\", defaultProto) {\n const _specialProtoMatch = input.match(\n /^[\\s\\0]*(blob:|data:|javascript:|vbscript:)(.*)/i\n );\n if (_specialProtoMatch) {\n const [, _proto, _pathname = \"\"] = _specialProtoMatch;\n return {\n protocol: _proto.toLowerCase(),\n pathname: _pathname,\n href: _proto + _pathname,\n auth: \"\",\n host: \"\",\n search: \"\",\n hash: \"\"\n };\n }\n if (!hasProtocol(input, { acceptRelative: true })) {\n return defaultProto ? parseURL(defaultProto + input) : parsePath(input);\n }\n const [, protocol = \"\", auth, hostAndPath = \"\"] = input.replace(/\\\\/g, \"/\").match(/^[\\s\\0]*([\\w+.-]{2,}:)?\\/\\/([^/@]+@)?(.*)/) || [];\n let [, host = \"\", path = \"\"] = hostAndPath.match(/([^#/?]*)(.*)?/) || [];\n if (protocol === \"file:\") {\n path = path.replace(/\\/(?=[A-Za-z]:)/, \"\");\n }\n const { pathname, search, hash } = parsePath(path);\n return {\n protocol: protocol.toLowerCase(),\n auth: auth ? auth.slice(0, Math.max(0, auth.length - 1)) : \"\",\n host,\n pathname,\n search,\n hash,\n [protocolRelative]: !protocol\n };\n}\nfunction parsePath(input = \"\") {\n const [pathname = \"\", search = \"\", hash = \"\"] = (input.match(/([^#?]*)(\\?[^#]*)?(#.*)?/) || []).splice(1);\n return {\n pathname,\n search,\n hash\n };\n}\nfunction parseAuth(input = \"\") {\n const [username, password] = input.split(\":\");\n return {\n username: decode(username),\n password: decode(password)\n };\n}\nfunction parseHost(input = \"\") {\n const [hostname, port] = (input.match(/([^/:]*):?(\\d+)?/) || []).splice(1);\n return {\n hostname: decode(hostname),\n port\n };\n}\nfunction stringifyParsedURL(parsed) {\n const pathname = parsed.pathname || \"\";\n const search = parsed.search ? (parsed.search.startsWith(\"?\") ? \"\" : \"?\") + parsed.search : \"\";\n const hash = parsed.hash || \"\";\n const auth = parsed.auth ? parsed.auth + \"@\" : \"\";\n const host = parsed.host || \"\";\n const proto = parsed.protocol || parsed[protocolRelative] ? (parsed.protocol || \"\") + \"//\" : \"\";\n return proto + auth + host + pathname + search + hash;\n}\nconst FILENAME_STRICT_REGEX = /\\/([^/]+\\.[^/]+)$/;\nconst FILENAME_REGEX = /\\/([^/]+)$/;\nfunction parseFilename(input = \"\", opts) {\n const { pathname } = parseURL(input);\n const matches = opts?.strict ? pathname.match(FILENAME_STRICT_REGEX) : pathname.match(FILENAME_REGEX);\n return matches ? matches[1] : void 0;\n}\n\nclass $URL {\n protocol;\n host;\n auth;\n pathname;\n query = {};\n hash;\n constructor(input = \"\") {\n if (typeof input !== \"string\") {\n throw new TypeError(\n `URL input should be string received ${typeof input} (${input})`\n );\n }\n const parsed = parseURL(input);\n this.protocol = decode(parsed.protocol);\n this.host = decode(parsed.host);\n this.auth = decode(parsed.auth);\n this.pathname = decodePath(parsed.pathname);\n this.query = parseQuery(parsed.search);\n this.hash = decode(parsed.hash);\n }\n get hostname() {\n return parseHost(this.host).hostname;\n }\n get port() {\n return parseHost(this.host).port || \"\";\n }\n get username() {\n return parseAuth(this.auth).username;\n }\n get password() {\n return parseAuth(this.auth).password || \"\";\n }\n get hasProtocol() {\n return this.protocol.length;\n }\n get isAbsolute() {\n return this.hasProtocol || this.pathname[0] === \"/\";\n }\n get search() {\n const q = stringifyQuery(this.query);\n return q.length > 0 ? \"?\" + q : \"\";\n }\n get searchParams() {\n const p = new URLSearchParams();\n for (const name in this.query) {\n const value = this.query[name];\n if (Array.isArray(value)) {\n for (const v of value) {\n p.append(name, v);\n }\n } else {\n p.append(\n name,\n typeof value === \"string\" ? value : JSON.stringify(value)\n );\n }\n }\n return p;\n }\n get origin() {\n return (this.protocol ? this.protocol + \"//\" : \"\") + encodeHost(this.host);\n }\n get fullpath() {\n return encodePath(this.pathname) + this.search + encodeHash(this.hash);\n }\n get encodedAuth() {\n if (!this.auth) {\n return \"\";\n }\n const { username, password } = parseAuth(this.auth);\n return encodeURIComponent(username) + (password ? \":\" + encodeURIComponent(password) : \"\");\n }\n get href() {\n const auth = this.encodedAuth;\n const originWithAuth = (this.protocol ? this.protocol + \"//\" : \"\") + (auth ? auth + \"@\" : \"\") + encodeHost(this.host);\n return this.hasProtocol && this.isAbsolute ? originWithAuth + this.fullpath : this.fullpath;\n }\n append(url) {\n if (url.hasProtocol) {\n throw new Error(\"Cannot append a URL with protocol\");\n }\n Object.assign(this.query, url.query);\n if (url.pathname) {\n this.pathname = withTrailingSlash(this.pathname) + withoutLeadingSlash(url.pathname);\n }\n if (url.hash) {\n this.hash = url.hash;\n }\n }\n toJSON() {\n return this.href;\n }\n toString() {\n return this.href;\n }\n}\nfunction createURL(input) {\n return new $URL(input);\n}\n\nexport { $URL, cleanDoubleSlashes, createURL, decode, decodePath, decodeQueryKey, decodeQueryValue, encode, encodeHash, encodeHost, encodeParam, encodePath, encodeQueryItem, encodeQueryKey, encodeQueryValue, filterQuery, getQuery, hasLeadingSlash, hasProtocol, hasTrailingSlash, isEmptyURL, isEqual, isNonEmptyURL, isRelative, isSamePath, isScriptProtocol, joinRelativeURL, joinURL, normalizeURL, parseAuth, parseFilename, parseHost, parsePath, parseQuery, parseURL, resolveURL, stringifyParsedURL, stringifyQuery, withBase, withFragment, withHttp, withHttps, withLeadingSlash, withProtocol, withQuery, withTrailingSlash, withoutBase, withoutFragment, withoutHost, withoutLeadingSlash, withoutProtocol, withoutTrailingSlash };\n"],"x_google_ignoreList":[0],"mappings":"AAQU,OAAO;AAkLjB,MAAM,wBAAwB;AAC9B,MAAM,iBAAiB;AACvB,MAAM,0BAA0B;AAOhC,SAAS,YAAY,aAAa,OAAO,EAAE,EAAE;AAC3C,KAAI,OAAO,SAAS,UAClB,QAAO,EAAE,gBAAgB,MAAM;AAEjC,KAAI,KAAK,OACP,QAAO,sBAAsB,KAAK,YAAY;AAEhD,QAAO,eAAe,KAAK,YAAY,KAAK,KAAK,iBAAiB,wBAAwB,KAAK,YAAY,GAAG;;AA0QhH,MAAM,mBAAmB,OAAO,IAAI,uBAAuB;AAC3D,SAAS,SAAS,QAAQ,IAAI,cAAc;CAC1C,MAAM,qBAAqB,MAAM,MAC/B,mDACD;AACD,KAAI,oBAAoB;EACtB,MAAM,GAAG,QAAQ,YAAY,MAAM;AACnC,SAAO;GACL,UAAU,OAAO,aAAa;GAC9B,UAAU;GACV,MAAM,SAAS;GACf,MAAM;GACN,MAAM;GACN,QAAQ;GACR,MAAM;GACP;;AAEH,KAAI,CAAC,YAAY,OAAO,EAAE,gBAAgB,MAAM,CAAC,CAC/C,QAAO,eAAe,SAAS,eAAe,MAAM,GAAG,UAAU,MAAM;CAEzE,MAAM,GAAG,WAAW,IAAI,MAAM,cAAc,MAAM,MAAM,QAAQ,OAAO,IAAI,CAAC,MAAM,4CAA4C,IAAI,EAAE;CACpI,IAAI,GAAG,OAAO,IAAI,OAAO,MAAM,YAAY,MAAM,iBAAiB,IAAI,EAAE;AACxE,KAAI,aAAa,QACf,QAAO,KAAK,QAAQ,mBAAmB,GAAG;CAE5C,MAAM,EAAE,UAAU,QAAQ,SAAS,UAAU,KAAK;AAClD,QAAO;EACL,UAAU,SAAS,aAAa;EAChC,MAAM,OAAO,KAAK,MAAM,GAAG,KAAK,IAAI,GAAG,KAAK,SAAS,EAAE,CAAC,GAAG;EAC3D;EACA;EACA;EACA;GACC,mBAAmB,CAAC;EACtB;;AAEH,SAAS,UAAU,QAAQ,IAAI;CAC7B,MAAM,CAAC,WAAW,IAAI,SAAS,IAAI,OAAO,OAAO,MAAM,MAAM,2BAA2B,IAAI,EAAE,EAAE,OAAO,EAAE;AACzG,QAAO;EACL;EACA;EACA;EACD"}
|
|
@@ -0,0 +1,160 @@
|
|
|
1
|
+
import { t as getNitroRuntimeConfigRecord } from "./nitroConfigBridge-aZ1e5upQ.mjs";
|
|
2
|
+
//#region src/shared/config.ts
|
|
3
|
+
/** Read the full Nitro `useRuntimeConfig()` record (or `undefined` outside Nitro). */
|
|
4
|
+
function getRuntimeConfig() {
|
|
5
|
+
return getNitroRuntimeConfigRecord();
|
|
6
|
+
}
|
|
7
|
+
/**
|
|
8
|
+
* Resolve adapter configuration with the standard priority chain:
|
|
9
|
+
*
|
|
10
|
+
* 1. `overrides` passed to the drain factory
|
|
11
|
+
* 2. `runtimeConfig.evlog.{namespace}.{key}` (Nitro)
|
|
12
|
+
* 3. `runtimeConfig.{namespace}.{key}` (Nitro)
|
|
13
|
+
* 4. `process.env[envKey]` for each env in `field.env`
|
|
14
|
+
*/
|
|
15
|
+
async function resolveAdapterConfig(namespace, fields, overrides) {
|
|
16
|
+
const runtimeConfig = shouldProbeRuntimeConfig(fields, overrides) ? await getRuntimeConfig() : void 0;
|
|
17
|
+
const evlogNs = runtimeConfig?.evlog?.[namespace];
|
|
18
|
+
const rootNs = runtimeConfig?.[namespace];
|
|
19
|
+
const config = {};
|
|
20
|
+
for (const { key, env } of fields) config[key] = overrides?.[key] ?? evlogNs?.[key] ?? rootNs?.[key] ?? resolveEnv(env);
|
|
21
|
+
return config;
|
|
22
|
+
}
|
|
23
|
+
function shouldProbeRuntimeConfig(fields, overrides) {
|
|
24
|
+
return fields.some(({ key, env }) => {
|
|
25
|
+
if (overrides?.[key] !== void 0) return false;
|
|
26
|
+
if (!env) return false;
|
|
27
|
+
return resolveEnv(env) === void 0;
|
|
28
|
+
});
|
|
29
|
+
}
|
|
30
|
+
function resolveEnv(envKeys) {
|
|
31
|
+
if (!envKeys) return void 0;
|
|
32
|
+
for (const key of envKeys) {
|
|
33
|
+
const val = process.env[key];
|
|
34
|
+
if (val) return val;
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
//#endregion
|
|
38
|
+
//#region src/shared/http.ts
|
|
39
|
+
function isRetryable(error) {
|
|
40
|
+
if (error instanceof DOMException && error.name === "AbortError") return true;
|
|
41
|
+
if (error instanceof TypeError) return true;
|
|
42
|
+
if (error instanceof Error) {
|
|
43
|
+
const match = error.message.match(/API error: (\d+)/);
|
|
44
|
+
if (match) return Number.parseInt(match[1]) >= 500;
|
|
45
|
+
}
|
|
46
|
+
return false;
|
|
47
|
+
}
|
|
48
|
+
/**
|
|
49
|
+
* POST a body with timeout + retry. Throws label-prefixed errors with a
|
|
50
|
+
* truncated response body. Safe to call from any drain `send()`.
|
|
51
|
+
*/
|
|
52
|
+
async function httpPost({ url, headers, body, timeout, label, retries = 2 }) {
|
|
53
|
+
const normalizedRetries = Number.isFinite(retries) && retries >= 0 ? Math.floor(retries) : 2;
|
|
54
|
+
let lastError;
|
|
55
|
+
for (let attempt = 0; attempt <= normalizedRetries; attempt++) {
|
|
56
|
+
const controller = new AbortController();
|
|
57
|
+
const timeoutId = setTimeout(() => controller.abort(), timeout);
|
|
58
|
+
try {
|
|
59
|
+
const response = await fetch(url, {
|
|
60
|
+
method: "POST",
|
|
61
|
+
headers,
|
|
62
|
+
body,
|
|
63
|
+
signal: controller.signal
|
|
64
|
+
});
|
|
65
|
+
if (!response.ok) {
|
|
66
|
+
const text = await response.text().catch(() => "Unknown error");
|
|
67
|
+
const safeText = text.length > 200 ? `${text.slice(0, 200)}...[truncated]` : text;
|
|
68
|
+
throw new Error(`${label} API error: ${response.status} ${response.statusText} - ${safeText}`);
|
|
69
|
+
}
|
|
70
|
+
clearTimeout(timeoutId);
|
|
71
|
+
return;
|
|
72
|
+
} catch (error) {
|
|
73
|
+
clearTimeout(timeoutId);
|
|
74
|
+
if (error instanceof DOMException && error.name === "AbortError") lastError = /* @__PURE__ */ new Error(`${label} request timed out after ${timeout}ms`);
|
|
75
|
+
else lastError = error;
|
|
76
|
+
if (!isRetryable(error) || attempt === normalizedRetries) throw lastError;
|
|
77
|
+
await new Promise((r) => setTimeout(r, 200 * 2 ** attempt));
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
throw lastError;
|
|
81
|
+
}
|
|
82
|
+
//#endregion
|
|
83
|
+
//#region src/shared/drain.ts
|
|
84
|
+
/**
|
|
85
|
+
* Build a drain callback. Errors raised by `send` are logged with the drain
|
|
86
|
+
* name and swallowed, so a failing drain never breaks the request pipeline.
|
|
87
|
+
*
|
|
88
|
+
* @example
|
|
89
|
+
* ```ts
|
|
90
|
+
* export function createMyDrain(overrides?: Partial<MyConfig>) {
|
|
91
|
+
* return defineDrain<MyConfig>({
|
|
92
|
+
* name: 'my-drain',
|
|
93
|
+
* resolve: () => ({ url: process.env.MY_URL ?? null }),
|
|
94
|
+
* send: async (events, config) => { ... },
|
|
95
|
+
* })
|
|
96
|
+
* }
|
|
97
|
+
* ```
|
|
98
|
+
*/
|
|
99
|
+
function defineDrain(options) {
|
|
100
|
+
return async (ctx) => {
|
|
101
|
+
const contexts = Array.isArray(ctx) ? ctx : [ctx];
|
|
102
|
+
if (contexts.length === 0) return;
|
|
103
|
+
const config = await options.resolve();
|
|
104
|
+
if (!config) return;
|
|
105
|
+
try {
|
|
106
|
+
await options.send(contexts.map((c) => c.event), config);
|
|
107
|
+
} catch (error) {
|
|
108
|
+
console.error(`[evlog/${options.name}] Failed to send events:`, error);
|
|
109
|
+
}
|
|
110
|
+
};
|
|
111
|
+
}
|
|
112
|
+
const DEFAULT_HTTP_TIMEOUT = 5e3;
|
|
113
|
+
/**
|
|
114
|
+
* Build an HTTP drain. Timeouts/retries are resolved from the config (with
|
|
115
|
+
* overrides via `resolveTimeout` / `resolveRetries`) and forwarded to
|
|
116
|
+
* {@link httpPost}.
|
|
117
|
+
*
|
|
118
|
+
* @example
|
|
119
|
+
* ```ts
|
|
120
|
+
* export function createMyDrain(overrides?: Partial<MyConfig>) {
|
|
121
|
+
* return defineHttpDrain<MyConfig>({
|
|
122
|
+
* name: 'my',
|
|
123
|
+
* resolve: async () => {
|
|
124
|
+
* const cfg = await resolveAdapterConfig<MyConfig>('my', FIELDS, overrides)
|
|
125
|
+
* return cfg.apiKey ? cfg as MyConfig : null
|
|
126
|
+
* },
|
|
127
|
+
* encode: (events, config) => ({
|
|
128
|
+
* url: `${config.endpoint ?? 'https://api.my.com'}/ingest`,
|
|
129
|
+
* headers: { 'Content-Type': 'application/json', Authorization: `Bearer ${config.apiKey}` },
|
|
130
|
+
* body: JSON.stringify(events),
|
|
131
|
+
* }),
|
|
132
|
+
* })
|
|
133
|
+
* }
|
|
134
|
+
* ```
|
|
135
|
+
*/
|
|
136
|
+
function defineHttpDrain(options) {
|
|
137
|
+
return defineDrain({
|
|
138
|
+
name: options.name,
|
|
139
|
+
resolve: options.resolve,
|
|
140
|
+
send: async (events, config) => {
|
|
141
|
+
if (events.length === 0) return;
|
|
142
|
+
const request = options.encode(events, config);
|
|
143
|
+
if (!request) return;
|
|
144
|
+
const timeout = options.resolveTimeout?.(config) ?? config.timeout ?? options.timeout ?? DEFAULT_HTTP_TIMEOUT;
|
|
145
|
+
const retries = options.resolveRetries?.(config) ?? config.retries ?? options.retries;
|
|
146
|
+
await httpPost({
|
|
147
|
+
url: request.url,
|
|
148
|
+
headers: request.headers,
|
|
149
|
+
body: request.body,
|
|
150
|
+
timeout,
|
|
151
|
+
retries,
|
|
152
|
+
label: options.name
|
|
153
|
+
});
|
|
154
|
+
}
|
|
155
|
+
});
|
|
156
|
+
}
|
|
157
|
+
//#endregion
|
|
158
|
+
export { resolveAdapterConfig as a, getRuntimeConfig as i, defineHttpDrain as n, httpPost as r, defineDrain as t };
|
|
159
|
+
|
|
160
|
+
//# sourceMappingURL=drain-ByWUeOQC.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"drain-ByWUeOQC.mjs","names":[],"sources":["../src/shared/config.ts","../src/shared/http.ts","../src/shared/drain.ts"],"sourcesContent":["import { getNitroRuntimeConfigRecord } from './nitroConfigBridge'\n\n/** Read the full Nitro `useRuntimeConfig()` record (or `undefined` outside Nitro). */\nexport function getRuntimeConfig(): Promise<Record<string, any> | undefined> {\n return getNitroRuntimeConfigRecord()\n}\n\n/**\n * Description of a single adapter config field. `env` is the ordered list of\n * environment variables to fall back to, e.g. `['NUXT_AXIOM_TOKEN', 'AXIOM_TOKEN']`.\n */\nexport interface ConfigField<T> {\n key: keyof T & string\n env?: string[]\n}\n\n/**\n * Resolve adapter configuration with the standard priority chain:\n *\n * 1. `overrides` passed to the drain factory\n * 2. `runtimeConfig.evlog.{namespace}.{key}` (Nitro)\n * 3. `runtimeConfig.{namespace}.{key}` (Nitro)\n * 4. `process.env[envKey]` for each env in `field.env`\n */\nexport async function resolveAdapterConfig<T>(\n namespace: string,\n fields: ConfigField<T>[],\n overrides?: Partial<T>,\n): Promise<Partial<T>> {\n const runtimeConfig = shouldProbeRuntimeConfig(fields, overrides)\n ? await getRuntimeConfig()\n : undefined\n const evlogNs = runtimeConfig?.evlog?.[namespace]\n const rootNs = runtimeConfig?.[namespace]\n\n const config: Record<string, unknown> = {}\n\n for (const { key, env } of fields) {\n config[key] =\n overrides?.[key]\n ?? evlogNs?.[key]\n ?? rootNs?.[key]\n ?? resolveEnv(env)\n }\n\n return config as Partial<T>\n}\n\n// Avoid the Nitro virtual-module import when env/overrides already resolve\n// every env-backed field — optional tuning fields (timeout, retries) should\n// not trigger a runtime probe in non-Nitro runtimes.\nfunction shouldProbeRuntimeConfig<T>(\n fields: ConfigField<T>[],\n overrides?: Partial<T>,\n): boolean {\n return fields.some(({ key, env }) => {\n if (overrides?.[key] !== undefined) return false\n if (!env) return false\n return resolveEnv(env) === undefined\n })\n}\n\nfunction resolveEnv(envKeys?: string[]): string | undefined {\n if (!envKeys) return undefined\n for (const key of envKeys) {\n const val = process.env[key]\n if (val) return val\n }\n return undefined\n}\n","/**\n * Minimal HTTP transport for drain adapters: abort-based timeouts, exponential\n * backoff on `5xx` / network errors, response bodies truncated in error messages.\n */\n\nexport interface HttpPostOptions {\n url: string\n /** Caller is responsible for `Content-Type`. */\n headers: Record<string, string>\n /** Pre-serialized request body. */\n body: string\n /** Abort the request after this many milliseconds. */\n timeout: number\n /** Prefix used in error messages. */\n label: string\n /**\n * Retries network errors, aborts, and `5xx` responses with exponential backoff.\n * @default 2\n */\n retries?: number\n}\n\nfunction isRetryable(error: unknown): boolean {\n if (error instanceof DOMException && error.name === 'AbortError') return true\n if (error instanceof TypeError) return true\n if (error instanceof Error) {\n const match = error.message.match(/API error: (\\d+)/)\n if (match) return Number.parseInt(match[1]) >= 500\n }\n return false\n}\n\n/**\n * POST a body with timeout + retry. Throws label-prefixed errors with a\n * truncated response body. Safe to call from any drain `send()`.\n */\nexport async function httpPost({ url, headers, body, timeout, label, retries = 2 }: HttpPostOptions): Promise<void> {\n const normalizedRetries = Number.isFinite(retries) && retries >= 0 ? Math.floor(retries) : 2\n\n let lastError: Error | undefined\n\n for (let attempt = 0; attempt <= normalizedRetries; attempt++) {\n const controller = new AbortController()\n const timeoutId = setTimeout(() => controller.abort(), timeout)\n\n try {\n const response = await fetch(url, {\n method: 'POST',\n headers,\n body,\n signal: controller.signal,\n })\n\n if (!response.ok) {\n const text = await response.text().catch(() => 'Unknown error')\n const safeText = text.length > 200 ? `${text.slice(0, 200)}...[truncated]` : text\n throw new Error(`${label} API error: ${response.status} ${response.statusText} - ${safeText}`)\n }\n\n clearTimeout(timeoutId)\n return\n } catch (error) {\n clearTimeout(timeoutId)\n\n if (error instanceof DOMException && error.name === 'AbortError') {\n lastError = new Error(`${label} request timed out after ${timeout}ms`)\n } else {\n lastError = error as Error\n }\n\n if (!isRetryable(error) || attempt === normalizedRetries) {\n throw lastError\n }\n\n await new Promise<void>(r => setTimeout(r, 200 * 2 ** attempt))\n }\n }\n\n throw lastError!\n}\n","import type { DrainContext, WideEvent } from '../types'\nimport { httpPost } from './http'\n\n/**\n * Drain definition backed by an arbitrary `send` function. Use this for\n * non-HTTP transports (filesystem, in-memory queue, native SDK). For HTTP\n * backends, use `defineHttpDrain` instead.\n */\nexport interface DrainOptions<TConfig> {\n /** Stable identifier used in error logs. */\n name: string\n /** Return `null` to skip draining (e.g. missing API key in dev). */\n resolve: () => TConfig | null | Promise<TConfig | null>\n send: (events: WideEvent[], config: TConfig) => Promise<void>\n}\n\n/**\n * Build a drain callback. Errors raised by `send` are logged with the drain\n * name and swallowed, so a failing drain never breaks the request pipeline.\n *\n * @example\n * ```ts\n * export function createMyDrain(overrides?: Partial<MyConfig>) {\n * return defineDrain<MyConfig>({\n * name: 'my-drain',\n * resolve: () => ({ url: process.env.MY_URL ?? null }),\n * send: async (events, config) => { ... },\n * })\n * }\n * ```\n */\nexport function defineDrain<TConfig>(options: DrainOptions<TConfig>): (ctx: DrainContext | DrainContext[]) => Promise<void> {\n return async (ctx: DrainContext | DrainContext[]) => {\n const contexts = Array.isArray(ctx) ? ctx : [ctx]\n if (contexts.length === 0) return\n\n const config = await options.resolve()\n if (!config) return\n\n try {\n await options.send(contexts.map(c => c.event), config)\n } catch (error) {\n console.error(`[evlog/${options.name}] Failed to send events:`, error)\n }\n }\n}\n\nexport interface HttpDrainRequest {\n url: string\n /** Caller is responsible for `Content-Type`. */\n headers: Record<string, string>\n body: string\n}\n\n/** Adapters only need to ship config + `encode()` — no manual `fetch`. */\nexport interface HttpDrainOptions<TConfig> {\n /** Stable identifier used in error logs. */\n name: string\n /** Return `null` to skip draining (e.g. missing API key in dev). */\n resolve: () => TConfig | null | Promise<TConfig | null>\n /** Return `null` to skip the batch without raising. */\n encode: (events: WideEvent[], config: TConfig) => HttpDrainRequest | null\n /** @default 5000 */\n timeout?: number\n /** @default 2 */\n retries?: number\n /** Read the timeout off the resolved config (falls back to `timeout`). */\n resolveTimeout?: (config: TConfig) => number | undefined\n /** Read the retry count off the resolved config (falls back to `retries`). */\n resolveRetries?: (config: TConfig) => number | undefined\n}\n\nconst DEFAULT_HTTP_TIMEOUT = 5000\n\n/**\n * Build an HTTP drain. Timeouts/retries are resolved from the config (with\n * overrides via `resolveTimeout` / `resolveRetries`) and forwarded to\n * {@link httpPost}.\n *\n * @example\n * ```ts\n * export function createMyDrain(overrides?: Partial<MyConfig>) {\n * return defineHttpDrain<MyConfig>({\n * name: 'my',\n * resolve: async () => {\n * const cfg = await resolveAdapterConfig<MyConfig>('my', FIELDS, overrides)\n * return cfg.apiKey ? cfg as MyConfig : null\n * },\n * encode: (events, config) => ({\n * url: `${config.endpoint ?? 'https://api.my.com'}/ingest`,\n * headers: { 'Content-Type': 'application/json', Authorization: `Bearer ${config.apiKey}` },\n * body: JSON.stringify(events),\n * }),\n * })\n * }\n * ```\n */\nexport function defineHttpDrain<TConfig>(options: HttpDrainOptions<TConfig>): (ctx: DrainContext | DrainContext[]) => Promise<void> {\n return defineDrain<TConfig>({\n name: options.name,\n resolve: options.resolve,\n send: async (events, config) => {\n if (events.length === 0) return\n const request = options.encode(events, config)\n if (!request) return\n const timeout = options.resolveTimeout?.(config)\n ?? (config as { timeout?: number }).timeout\n ?? options.timeout\n ?? DEFAULT_HTTP_TIMEOUT\n const retries = options.resolveRetries?.(config)\n ?? (config as { retries?: number }).retries\n ?? options.retries\n await httpPost({\n url: request.url,\n headers: request.headers,\n body: request.body,\n timeout,\n retries,\n label: options.name,\n })\n },\n })\n}\n"],"mappings":";;;AAGA,SAAgB,mBAA6D;AAC3E,QAAO,6BAA6B;;;;;;;;;;AAoBtC,eAAsB,qBACpB,WACA,QACA,WACqB;CACrB,MAAM,gBAAgB,yBAAyB,QAAQ,UAAU,GAC7D,MAAM,kBAAkB,GACxB,KAAA;CACJ,MAAM,UAAU,eAAe,QAAQ;CACvC,MAAM,SAAS,gBAAgB;CAE/B,MAAM,SAAkC,EAAE;AAE1C,MAAK,MAAM,EAAE,KAAK,SAAS,OACzB,QAAO,OACL,YAAY,QACT,UAAU,QACV,SAAS,QACT,WAAW,IAAI;AAGtB,QAAO;;AAMT,SAAS,yBACP,QACA,WACS;AACT,QAAO,OAAO,MAAM,EAAE,KAAK,UAAU;AACnC,MAAI,YAAY,SAAS,KAAA,EAAW,QAAO;AAC3C,MAAI,CAAC,IAAK,QAAO;AACjB,SAAO,WAAW,IAAI,KAAK,KAAA;GAC3B;;AAGJ,SAAS,WAAW,SAAwC;AAC1D,KAAI,CAAC,QAAS,QAAO,KAAA;AACrB,MAAK,MAAM,OAAO,SAAS;EACzB,MAAM,MAAM,QAAQ,IAAI;AACxB,MAAI,IAAK,QAAO;;;;;AC5CpB,SAAS,YAAY,OAAyB;AAC5C,KAAI,iBAAiB,gBAAgB,MAAM,SAAS,aAAc,QAAO;AACzE,KAAI,iBAAiB,UAAW,QAAO;AACvC,KAAI,iBAAiB,OAAO;EAC1B,MAAM,QAAQ,MAAM,QAAQ,MAAM,mBAAmB;AACrD,MAAI,MAAO,QAAO,OAAO,SAAS,MAAM,GAAG,IAAI;;AAEjD,QAAO;;;;;;AAOT,eAAsB,SAAS,EAAE,KAAK,SAAS,MAAM,SAAS,OAAO,UAAU,KAAqC;CAClH,MAAM,oBAAoB,OAAO,SAAS,QAAQ,IAAI,WAAW,IAAI,KAAK,MAAM,QAAQ,GAAG;CAE3F,IAAI;AAEJ,MAAK,IAAI,UAAU,GAAG,WAAW,mBAAmB,WAAW;EAC7D,MAAM,aAAa,IAAI,iBAAiB;EACxC,MAAM,YAAY,iBAAiB,WAAW,OAAO,EAAE,QAAQ;AAE/D,MAAI;GACF,MAAM,WAAW,MAAM,MAAM,KAAK;IAChC,QAAQ;IACR;IACA;IACA,QAAQ,WAAW;IACpB,CAAC;AAEF,OAAI,CAAC,SAAS,IAAI;IAChB,MAAM,OAAO,MAAM,SAAS,MAAM,CAAC,YAAY,gBAAgB;IAC/D,MAAM,WAAW,KAAK,SAAS,MAAM,GAAG,KAAK,MAAM,GAAG,IAAI,CAAC,kBAAkB;AAC7E,UAAM,IAAI,MAAM,GAAG,MAAM,cAAc,SAAS,OAAO,GAAG,SAAS,WAAW,KAAK,WAAW;;AAGhG,gBAAa,UAAU;AACvB;WACO,OAAO;AACd,gBAAa,UAAU;AAEvB,OAAI,iBAAiB,gBAAgB,MAAM,SAAS,aAClD,6BAAY,IAAI,MAAM,GAAG,MAAM,2BAA2B,QAAQ,IAAI;OAEtE,aAAY;AAGd,OAAI,CAAC,YAAY,MAAM,IAAI,YAAY,kBACrC,OAAM;AAGR,SAAM,IAAI,SAAc,MAAK,WAAW,GAAG,MAAM,KAAK,QAAQ,CAAC;;;AAInE,OAAM;;;;;;;;;;;;;;;;;;;AC/CR,SAAgB,YAAqB,SAAuF;AAC1H,QAAO,OAAO,QAAuC;EACnD,MAAM,WAAW,MAAM,QAAQ,IAAI,GAAG,MAAM,CAAC,IAAI;AACjD,MAAI,SAAS,WAAW,EAAG;EAE3B,MAAM,SAAS,MAAM,QAAQ,SAAS;AACtC,MAAI,CAAC,OAAQ;AAEb,MAAI;AACF,SAAM,QAAQ,KAAK,SAAS,KAAI,MAAK,EAAE,MAAM,EAAE,OAAO;WAC/C,OAAO;AACd,WAAQ,MAAM,UAAU,QAAQ,KAAK,2BAA2B,MAAM;;;;AA8B5E,MAAM,uBAAuB;;;;;;;;;;;;;;;;;;;;;;;;AAyB7B,SAAgB,gBAAyB,SAA2F;AAClI,QAAO,YAAqB;EAC1B,MAAM,QAAQ;EACd,SAAS,QAAQ;EACjB,MAAM,OAAO,QAAQ,WAAW;AAC9B,OAAI,OAAO,WAAW,EAAG;GACzB,MAAM,UAAU,QAAQ,OAAO,QAAQ,OAAO;AAC9C,OAAI,CAAC,QAAS;GACd,MAAM,UAAU,QAAQ,iBAAiB,OAAO,IAC1C,OAAgC,WACjC,QAAQ,WACR;GACL,MAAM,UAAU,QAAQ,iBAAiB,OAAO,IAC1C,OAAgC,WACjC,QAAQ;AACb,SAAM,SAAS;IACb,KAAK,QAAQ;IACb,SAAS,QAAQ;IACjB,MAAM,QAAQ;IACd;IACA;IACA,OAAO,QAAQ;IAChB,CAAC;;EAEL,CAAC"}
|
package/dist/elysia/index.d.mts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { Y as RequestLogger } from "../audit-
|
|
2
|
-
import { t as BaseEvlogOptions } from "../middleware-
|
|
1
|
+
import { Y as RequestLogger } from "../audit-CJl-wZ10.mjs";
|
|
2
|
+
import { t as BaseEvlogOptions } from "../middleware-CGM-bOvE.mjs";
|
|
3
3
|
import { Elysia } from "elysia";
|
|
4
4
|
|
|
5
5
|
//#region src/elysia/index.d.ts
|
|
@@ -23,6 +23,29 @@ type EvlogElysiaOptions = BaseEvlogOptions;
|
|
|
23
23
|
* ```
|
|
24
24
|
*/
|
|
25
25
|
declare function useLogger<T extends object = Record<string, unknown>>(): RequestLogger<T>;
|
|
26
|
+
/**
|
|
27
|
+
* Create an evlog plugin for Elysia.
|
|
28
|
+
*
|
|
29
|
+
* @example
|
|
30
|
+
* ```ts
|
|
31
|
+
* import { Elysia } from 'elysia'
|
|
32
|
+
* import { evlog } from 'evlog/elysia'
|
|
33
|
+
* import { createAxiomDrain } from 'evlog/axiom'
|
|
34
|
+
*
|
|
35
|
+
* const app = new Elysia()
|
|
36
|
+
* .use(evlog({
|
|
37
|
+
* drain: createAxiomDrain(),
|
|
38
|
+
* enrich: (ctx) => {
|
|
39
|
+
* ctx.event.region = process.env.FLY_REGION
|
|
40
|
+
* },
|
|
41
|
+
* }))
|
|
42
|
+
* .get('/health', ({ log }) => {
|
|
43
|
+
* log.set({ route: 'health' })
|
|
44
|
+
* return { ok: true }
|
|
45
|
+
* })
|
|
46
|
+
* .listen(3000)
|
|
47
|
+
* ```
|
|
48
|
+
*/
|
|
26
49
|
declare function evlog(options?: EvlogElysiaOptions): Elysia<"", {
|
|
27
50
|
decorator: {};
|
|
28
51
|
store: {};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.mts","names":[],"sources":["../../src/elysia/index.ts"],"mappings":";;;;;
|
|
1
|
+
{"version":3,"file":"index.d.mts","names":[],"sources":["../../src/elysia/index.ts"],"mappings":";;;;;KAWY,kBAAA,GAAqB,gBAAA;;AAAjC;;;;;AAoBA;;;;;;;;;;;;iBAAgB,SAAA,oBAA6B,MAAA,kBAAA,CAAA,GAA4B,aAAA,CAAc,CAAA;;AAuEvF;;;;;;;;;;;;;;;;;;;;;;iBAAgB,KAAA,CAAM,OAAA,GAAS,kBAAA,GAAuB,MAAA"}
|
package/dist/elysia/index.mjs
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { t as
|
|
3
|
-
import { t as attachForkToLogger } from "../fork-D1j1Fuzy.mjs";
|
|
1
|
+
import { t as attachForkToLogger } from "../fork-D44V93-K.mjs";
|
|
2
|
+
import { t as defineFrameworkIntegration } from "../integration-Bz8X6_Lb.mjs";
|
|
4
3
|
import { AsyncLocalStorage } from "node:async_hooks";
|
|
5
4
|
import { Elysia } from "elysia";
|
|
6
5
|
//#region src/elysia/index.ts
|
|
@@ -29,29 +28,63 @@ function useLogger() {
|
|
|
29
28
|
if (!logger || !activeLoggers.has(logger)) throw new Error("[evlog] useLogger() was called outside of an evlog plugin context. Make sure app.use(evlog()) is registered before your routes.");
|
|
30
29
|
return logger;
|
|
31
30
|
}
|
|
31
|
+
const integration = defineFrameworkIntegration({
|
|
32
|
+
name: "elysia",
|
|
33
|
+
extractRequest: ({ request, path, headers }) => ({
|
|
34
|
+
method: request.method,
|
|
35
|
+
path,
|
|
36
|
+
headers,
|
|
37
|
+
requestId: headers["x-request-id"]
|
|
38
|
+
}),
|
|
39
|
+
attachLogger: ({ request, path, headers }, logger) => {
|
|
40
|
+
attachForkToLogger(storage, logger, {
|
|
41
|
+
method: request.method,
|
|
42
|
+
path,
|
|
43
|
+
requestId: headers["x-request-id"]
|
|
44
|
+
}, {
|
|
45
|
+
onChildEnter: (child) => {
|
|
46
|
+
activeLoggers.add(child);
|
|
47
|
+
},
|
|
48
|
+
onChildExit: (child) => {
|
|
49
|
+
activeLoggers.delete(child);
|
|
50
|
+
}
|
|
51
|
+
});
|
|
52
|
+
activeLoggers.add(logger);
|
|
53
|
+
}
|
|
54
|
+
});
|
|
55
|
+
/**
|
|
56
|
+
* Create an evlog plugin for Elysia.
|
|
57
|
+
*
|
|
58
|
+
* @example
|
|
59
|
+
* ```ts
|
|
60
|
+
* import { Elysia } from 'elysia'
|
|
61
|
+
* import { evlog } from 'evlog/elysia'
|
|
62
|
+
* import { createAxiomDrain } from 'evlog/axiom'
|
|
63
|
+
*
|
|
64
|
+
* const app = new Elysia()
|
|
65
|
+
* .use(evlog({
|
|
66
|
+
* drain: createAxiomDrain(),
|
|
67
|
+
* enrich: (ctx) => {
|
|
68
|
+
* ctx.event.region = process.env.FLY_REGION
|
|
69
|
+
* },
|
|
70
|
+
* }))
|
|
71
|
+
* .get('/health', ({ log }) => {
|
|
72
|
+
* log.set({ route: 'health' })
|
|
73
|
+
* return { ok: true }
|
|
74
|
+
* })
|
|
75
|
+
* .listen(3000)
|
|
76
|
+
* ```
|
|
77
|
+
*/
|
|
32
78
|
function evlog(options = {}) {
|
|
33
79
|
const emitted = /* @__PURE__ */ new WeakSet();
|
|
34
80
|
const requestState = /* @__PURE__ */ new WeakMap();
|
|
35
81
|
return new Elysia({ name: "evlog" }).derive({ as: "global" }, ({ request, path, headers }) => {
|
|
36
|
-
const
|
|
37
|
-
|
|
82
|
+
const ctx = {
|
|
83
|
+
request,
|
|
38
84
|
path,
|
|
39
|
-
|
|
40
|
-
headers: filterSafeHeaders(headers),
|
|
41
|
-
...options
|
|
85
|
+
headers
|
|
42
86
|
};
|
|
43
|
-
const { logger, finish, skipped } =
|
|
44
|
-
if (!skipped) {
|
|
45
|
-
attachForkToLogger(storage, logger, middlewareOpts, {
|
|
46
|
-
onChildEnter: (child) => {
|
|
47
|
-
activeLoggers.add(child);
|
|
48
|
-
},
|
|
49
|
-
onChildExit: (child) => {
|
|
50
|
-
activeLoggers.delete(child);
|
|
51
|
-
}
|
|
52
|
-
});
|
|
53
|
-
activeLoggers.add(logger);
|
|
54
|
-
}
|
|
87
|
+
const { logger, finish, skipped } = integration.start(ctx, options);
|
|
55
88
|
storage.enterWith(logger);
|
|
56
89
|
requestState.set(request, {
|
|
57
90
|
finish,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.mjs","names":[],"sources":["../../src/elysia/index.ts"],"sourcesContent":["import { AsyncLocalStorage } from 'node:async_hooks'\nimport { Elysia } from 'elysia'\nimport type { RequestLogger } from '../types'\nimport {
|
|
1
|
+
{"version":3,"file":"index.mjs","names":[],"sources":["../../src/elysia/index.ts"],"sourcesContent":["import { AsyncLocalStorage } from 'node:async_hooks'\nimport { Elysia } from 'elysia'\nimport type { RequestLogger } from '../types'\nimport { defineFrameworkIntegration } from '../shared/integration'\nimport type { BaseEvlogOptions } from '../shared/middleware'\nimport { attachForkToLogger } from '../shared/fork'\n\nconst storage = new AsyncLocalStorage<RequestLogger>()\n\nconst activeLoggers = new WeakSet<RequestLogger>()\n\nexport type EvlogElysiaOptions = BaseEvlogOptions\n\n/**\n * Get the request-scoped logger from anywhere in the call stack.\n * Must be called inside a request handled by the `evlog()` plugin.\n *\n * Unlike other frameworks, Elysia uses `storage.enterWith()` which persists\n * beyond the request lifecycle. This accessor additionally checks `activeLoggers`\n * to ensure the logger belongs to an in-flight request.\n *\n * @example\n * ```ts\n * import { useLogger } from 'evlog/elysia'\n *\n * function findUser(id: string) {\n * const log = useLogger()\n * log.set({ user: { id } })\n * }\n * ```\n */\nexport function useLogger<T extends object = Record<string, unknown>>(): RequestLogger<T> {\n const logger = storage.getStore()\n if (!logger || !activeLoggers.has(logger)) {\n throw new Error(\n '[evlog] useLogger() was called outside of an evlog plugin context. '\n + 'Make sure app.use(evlog()) is registered before your routes.',\n )\n }\n return logger as RequestLogger<T>\n}\n\ninterface ElysiaContext {\n request: Request\n path: string\n headers: Record<string, string>\n}\n\nconst integration = defineFrameworkIntegration<ElysiaContext>({\n name: 'elysia',\n extractRequest: ({ request, path, headers }) => ({\n method: request.method,\n path,\n headers,\n requestId: headers['x-request-id'],\n }),\n attachLogger: ({ request, path, headers }, logger) => {\n attachForkToLogger(storage, logger, {\n method: request.method,\n path,\n requestId: headers['x-request-id'],\n }, {\n onChildEnter: (child) => {\n activeLoggers.add(child)\n },\n onChildExit: (child) => {\n activeLoggers.delete(child)\n },\n })\n activeLoggers.add(logger)\n },\n})\n\ninterface RequestState {\n finish: (opts?: { status?: number; error?: Error }) => Promise<unknown>\n skipped: boolean\n logger: RequestLogger\n}\n\n/**\n * Create an evlog plugin for Elysia.\n *\n * @example\n * ```ts\n * import { Elysia } from 'elysia'\n * import { evlog } from 'evlog/elysia'\n * import { createAxiomDrain } from 'evlog/axiom'\n *\n * const app = new Elysia()\n * .use(evlog({\n * drain: createAxiomDrain(),\n * enrich: (ctx) => {\n * ctx.event.region = process.env.FLY_REGION\n * },\n * }))\n * .get('/health', ({ log }) => {\n * log.set({ route: 'health' })\n * return { ok: true }\n * })\n * .listen(3000)\n * ```\n */\nexport function evlog(options: EvlogElysiaOptions = {}) {\n const emitted = new WeakSet<Request>()\n const requestState = new WeakMap<Request, RequestState>()\n\n return new Elysia({ name: 'evlog' })\n .derive({ as: 'global' }, ({ request, path, headers }) => {\n const ctx: ElysiaContext = { request, path, headers: headers as Record<string, string> }\n const { logger, finish, skipped } = integration.start(ctx, options)\n storage.enterWith(logger)\n requestState.set(request, { finish, skipped, logger })\n return { log: logger }\n })\n .onAfterResponse({ as: 'global' }, async ({ request, set }) => {\n const state = requestState.get(request)\n if (!state || state.skipped || emitted.has(request)) return\n emitted.add(request)\n await state.finish({ status: set.status as number || 200 })\n activeLoggers.delete(state.logger)\n storage.enterWith(undefined as unknown as RequestLogger)\n })\n .onError({ as: 'global' }, async ({ request, error }) => {\n const state = requestState.get(request)\n if (!state || state.skipped || emitted.has(request)) return\n emitted.add(request)\n const err = error instanceof Error ? error : new Error(String(error))\n state.logger.error(err)\n await state.finish({ error: err })\n activeLoggers.delete(state.logger)\n storage.enterWith(undefined as unknown as RequestLogger)\n })\n}\n"],"mappings":";;;;;AAOA,MAAM,UAAU,IAAI,mBAAkC;AAEtD,MAAM,gCAAgB,IAAI,SAAwB;;;;;;;;;;;;;;;;;;;AAsBlD,SAAgB,YAA0E;CACxF,MAAM,SAAS,QAAQ,UAAU;AACjC,KAAI,CAAC,UAAU,CAAC,cAAc,IAAI,OAAO,CACvC,OAAM,IAAI,MACR,kIAED;AAEH,QAAO;;AAST,MAAM,cAAc,2BAA0C;CAC5D,MAAM;CACN,iBAAiB,EAAE,SAAS,MAAM,eAAe;EAC/C,QAAQ,QAAQ;EAChB;EACA;EACA,WAAW,QAAQ;EACpB;CACD,eAAe,EAAE,SAAS,MAAM,WAAW,WAAW;AACpD,qBAAmB,SAAS,QAAQ;GAClC,QAAQ,QAAQ;GAChB;GACA,WAAW,QAAQ;GACpB,EAAE;GACD,eAAe,UAAU;AACvB,kBAAc,IAAI,MAAM;;GAE1B,cAAc,UAAU;AACtB,kBAAc,OAAO,MAAM;;GAE9B,CAAC;AACF,gBAAc,IAAI,OAAO;;CAE5B,CAAC;;;;;;;;;;;;;;;;;;;;;;;;AA+BF,SAAgB,MAAM,UAA8B,EAAE,EAAE;CACtD,MAAM,0BAAU,IAAI,SAAkB;CACtC,MAAM,+BAAe,IAAI,SAAgC;AAEzD,QAAO,IAAI,OAAO,EAAE,MAAM,SAAS,CAAC,CACjC,OAAO,EAAE,IAAI,UAAU,GAAG,EAAE,SAAS,MAAM,cAAc;EACxD,MAAM,MAAqB;GAAE;GAAS;GAAe;GAAmC;EACxF,MAAM,EAAE,QAAQ,QAAQ,YAAY,YAAY,MAAM,KAAK,QAAQ;AACnE,UAAQ,UAAU,OAAO;AACzB,eAAa,IAAI,SAAS;GAAE;GAAQ;GAAS;GAAQ,CAAC;AACtD,SAAO,EAAE,KAAK,QAAQ;GACtB,CACD,gBAAgB,EAAE,IAAI,UAAU,EAAE,OAAO,EAAE,SAAS,UAAU;EAC7D,MAAM,QAAQ,aAAa,IAAI,QAAQ;AACvC,MAAI,CAAC,SAAS,MAAM,WAAW,QAAQ,IAAI,QAAQ,CAAE;AACrD,UAAQ,IAAI,QAAQ;AACpB,QAAM,MAAM,OAAO,EAAE,QAAQ,IAAI,UAAoB,KAAK,CAAC;AAC3D,gBAAc,OAAO,MAAM,OAAO;AAClC,UAAQ,UAAU,KAAA,EAAsC;GACxD,CACD,QAAQ,EAAE,IAAI,UAAU,EAAE,OAAO,EAAE,SAAS,YAAY;EACvD,MAAM,QAAQ,aAAa,IAAI,QAAQ;AACvC,MAAI,CAAC,SAAS,MAAM,WAAW,QAAQ,IAAI,QAAQ,CAAE;AACrD,UAAQ,IAAI,QAAQ;EACpB,MAAM,MAAM,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,MAAM,CAAC;AACrE,QAAM,OAAO,MAAM,IAAI;AACvB,QAAM,MAAM,OAAO,EAAE,OAAO,KAAK,CAAC;AAClC,gBAAc,OAAO,MAAM,OAAO;AAClC,UAAQ,UAAU,KAAA,EAAsC;GACxD"}
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
import { t as mergeEventField } from "./event-ef-5Dbxg.mjs";
|
|
2
|
+
//#region src/shared/compose.ts
|
|
3
|
+
/**
|
|
4
|
+
* Compose enricher callbacks into one. Runs in registration order; errors are
|
|
5
|
+
* caught per-callback so one buggy enricher never blocks the others.
|
|
6
|
+
*/
|
|
7
|
+
function composeEnrichers(enrichers, options = {}) {
|
|
8
|
+
const label = options.name ?? "compose-enrichers";
|
|
9
|
+
return async (ctx) => {
|
|
10
|
+
for (const enricher of enrichers) try {
|
|
11
|
+
await enricher(ctx);
|
|
12
|
+
} catch (err) {
|
|
13
|
+
console.error(`[evlog/${label}] enrich failed:`, err);
|
|
14
|
+
}
|
|
15
|
+
};
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* Fan out to multiple drains concurrently (`Promise.allSettled`). A slow
|
|
19
|
+
* Sentry drain never blocks an Axiom drain on the same event.
|
|
20
|
+
*/
|
|
21
|
+
function composeDrains(drains, options = {}) {
|
|
22
|
+
const label = options.name ?? "compose-drains";
|
|
23
|
+
return async (ctx) => {
|
|
24
|
+
if (drains.length === 0) return;
|
|
25
|
+
await Promise.allSettled(drains.map(async (drain) => {
|
|
26
|
+
try {
|
|
27
|
+
await drain(ctx);
|
|
28
|
+
} catch (err) {
|
|
29
|
+
console.error(`[evlog/${label}] drain failed:`, err);
|
|
30
|
+
}
|
|
31
|
+
}));
|
|
32
|
+
};
|
|
33
|
+
}
|
|
34
|
+
/**
|
|
35
|
+
* Compose tail-sampling `keep` callbacks. `ctx.shouldKeep` is true after the
|
|
36
|
+
* run if any callback set it. Errors are isolated.
|
|
37
|
+
*/
|
|
38
|
+
function composeKeep(keepers, options = {}) {
|
|
39
|
+
const label = options.name ?? "compose-keep";
|
|
40
|
+
return async (ctx) => {
|
|
41
|
+
for (const keep of keepers) try {
|
|
42
|
+
await keep(ctx);
|
|
43
|
+
} catch (err) {
|
|
44
|
+
console.error(`[evlog/${label}] keep failed:`, err);
|
|
45
|
+
}
|
|
46
|
+
};
|
|
47
|
+
}
|
|
48
|
+
/** Merge plugin lists. Later registrations override earlier ones by `name`. */
|
|
49
|
+
function composePlugins(...lists) {
|
|
50
|
+
const merged = /* @__PURE__ */ new Map();
|
|
51
|
+
for (const list of lists) {
|
|
52
|
+
if (!list) continue;
|
|
53
|
+
for (const plugin of list) merged.set(plugin.name, plugin);
|
|
54
|
+
}
|
|
55
|
+
return Array.from(merged.values());
|
|
56
|
+
}
|
|
57
|
+
//#endregion
|
|
58
|
+
//#region src/shared/enricher.ts
|
|
59
|
+
/**
|
|
60
|
+
* Build an enricher: skips when `compute` returns `undefined`, merges with
|
|
61
|
+
* {@link mergeEventField} respecting `overwrite`, and isolates errors under
|
|
62
|
+
* `[evlog/{name}]`.
|
|
63
|
+
*
|
|
64
|
+
* @example
|
|
65
|
+
* ```ts
|
|
66
|
+
* export const tenantEnricher = defineEnricher<{ id: string }>({
|
|
67
|
+
* name: 'tenant',
|
|
68
|
+
* field: 'tenant',
|
|
69
|
+
* compute({ headers }) {
|
|
70
|
+
* const id = getHeader(headers, 'x-tenant-id')
|
|
71
|
+
* return id ? { id } : undefined
|
|
72
|
+
* },
|
|
73
|
+
* })
|
|
74
|
+
* ```
|
|
75
|
+
*/
|
|
76
|
+
function defineEnricher(def, options = {}) {
|
|
77
|
+
const { name, field, compute } = def;
|
|
78
|
+
return (ctx) => {
|
|
79
|
+
let computed;
|
|
80
|
+
try {
|
|
81
|
+
computed = compute(ctx);
|
|
82
|
+
} catch (err) {
|
|
83
|
+
console.error(`[evlog/${name}] enrich failed:`, err);
|
|
84
|
+
return;
|
|
85
|
+
}
|
|
86
|
+
if (computed === void 0) return;
|
|
87
|
+
if (!field) return;
|
|
88
|
+
const target = ctx.event[field];
|
|
89
|
+
ctx.event[field] = mergeEventField(target, computed, options.overwrite);
|
|
90
|
+
};
|
|
91
|
+
}
|
|
92
|
+
//#endregion
|
|
93
|
+
export { composePlugins as a, composeKeep as i, composeDrains as n, composeEnrichers as r, defineEnricher as t };
|
|
94
|
+
|
|
95
|
+
//# sourceMappingURL=enricher-BA6viylF.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"enricher-BA6viylF.mjs","names":[],"sources":["../src/shared/compose.ts","../src/shared/enricher.ts"],"sourcesContent":["import type { DrainContext, EnrichContext, TailSamplingContext } from '../types'\nimport type { EvlogPlugin } from './plugin'\n\n/**\n * Compose enricher callbacks into one. Runs in registration order; errors are\n * caught per-callback so one buggy enricher never blocks the others.\n */\nexport function composeEnrichers(\n enrichers: Array<(ctx: EnrichContext) => void | Promise<void>>,\n options: { name?: string } = {},\n): (ctx: EnrichContext) => Promise<void> {\n const label = options.name ?? 'compose-enrichers'\n return async (ctx) => {\n for (const enricher of enrichers) {\n try {\n await enricher(ctx)\n } catch (err) {\n console.error(`[evlog/${label}] enrich failed:`, err)\n }\n }\n }\n}\n\n/**\n * Fan out to multiple drains concurrently (`Promise.allSettled`). A slow\n * Sentry drain never blocks an Axiom drain on the same event.\n */\nexport function composeDrains(\n drains: Array<(ctx: DrainContext) => void | Promise<void>>,\n options: { name?: string } = {},\n): (ctx: DrainContext) => Promise<void> {\n const label = options.name ?? 'compose-drains'\n return async (ctx) => {\n if (drains.length === 0) return\n await Promise.allSettled(\n drains.map(async (drain) => {\n try {\n await drain(ctx)\n } catch (err) {\n console.error(`[evlog/${label}] drain failed:`, err)\n }\n }),\n )\n }\n}\n\n/**\n * Compose tail-sampling `keep` callbacks. `ctx.shouldKeep` is true after the\n * run if any callback set it. Errors are isolated.\n */\nexport function composeKeep(\n keepers: Array<(ctx: TailSamplingContext) => void | Promise<void>>,\n options: { name?: string } = {},\n): (ctx: TailSamplingContext) => Promise<void> {\n const label = options.name ?? 'compose-keep'\n return async (ctx) => {\n for (const keep of keepers) {\n try {\n await keep(ctx)\n } catch (err) {\n console.error(`[evlog/${label}] keep failed:`, err)\n }\n }\n }\n}\n\n/** Merge plugin lists. Later registrations override earlier ones by `name`. */\nexport function composePlugins(...lists: Array<EvlogPlugin[] | undefined>): EvlogPlugin[] {\n const merged = new Map<string, EvlogPlugin>()\n for (const list of lists) {\n if (!list) continue\n for (const plugin of list) {\n merged.set(plugin.name, plugin)\n }\n }\n return Array.from(merged.values())\n}\n","import type { EnrichContext, WideEvent } from '../types'\nimport { mergeEventField } from './event'\n\nexport interface EnricherOptions {\n /**\n * Replace existing event fields with the computed value. Defaults to `false`\n * so user-provided context (e.g. `log.set({ geo: ... })`) wins.\n */\n overwrite?: boolean\n}\n\nexport interface EnricherDefinition<T extends object> {\n /** Stable identifier used in error logs. */\n name: string\n /**\n * Top-level event field to merge into. Omit when the enricher writes to\n * multiple fields and handles its own merging inside `compute`.\n */\n field?: keyof WideEvent & string\n /** Return `undefined` to skip enrichment (e.g. when a required header is missing). */\n compute: (ctx: EnrichContext) => T | undefined\n}\n\n/**\n * Build an enricher: skips when `compute` returns `undefined`, merges with\n * {@link mergeEventField} respecting `overwrite`, and isolates errors under\n * `[evlog/{name}]`.\n *\n * @example\n * ```ts\n * export const tenantEnricher = defineEnricher<{ id: string }>({\n * name: 'tenant',\n * field: 'tenant',\n * compute({ headers }) {\n * const id = getHeader(headers, 'x-tenant-id')\n * return id ? { id } : undefined\n * },\n * })\n * ```\n */\nexport function defineEnricher<T extends object>(\n def: EnricherDefinition<T>,\n options: EnricherOptions = {},\n): (ctx: EnrichContext) => void {\n const { name, field, compute } = def\n return (ctx) => {\n let computed: T | undefined\n try {\n computed = compute(ctx)\n } catch (err) {\n console.error(`[evlog/${name}] enrich failed:`, err)\n return\n }\n if (computed === undefined) return\n if (!field) return\n const target = ctx.event[field]\n ctx.event[field] = mergeEventField<T>(target, computed, options.overwrite)\n }\n}\n"],"mappings":";;;;;;AAOA,SAAgB,iBACd,WACA,UAA6B,EAAE,EACQ;CACvC,MAAM,QAAQ,QAAQ,QAAQ;AAC9B,QAAO,OAAO,QAAQ;AACpB,OAAK,MAAM,YAAY,UACrB,KAAI;AACF,SAAM,SAAS,IAAI;WACZ,KAAK;AACZ,WAAQ,MAAM,UAAU,MAAM,mBAAmB,IAAI;;;;;;;;AAU7D,SAAgB,cACd,QACA,UAA6B,EAAE,EACO;CACtC,MAAM,QAAQ,QAAQ,QAAQ;AAC9B,QAAO,OAAO,QAAQ;AACpB,MAAI,OAAO,WAAW,EAAG;AACzB,QAAM,QAAQ,WACZ,OAAO,IAAI,OAAO,UAAU;AAC1B,OAAI;AACF,UAAM,MAAM,IAAI;YACT,KAAK;AACZ,YAAQ,MAAM,UAAU,MAAM,kBAAkB,IAAI;;IAEtD,CACH;;;;;;;AAQL,SAAgB,YACd,SACA,UAA6B,EAAE,EACc;CAC7C,MAAM,QAAQ,QAAQ,QAAQ;AAC9B,QAAO,OAAO,QAAQ;AACpB,OAAK,MAAM,QAAQ,QACjB,KAAI;AACF,SAAM,KAAK,IAAI;WACR,KAAK;AACZ,WAAQ,MAAM,UAAU,MAAM,iBAAiB,IAAI;;;;;AAO3D,SAAgB,eAAe,GAAG,OAAwD;CACxF,MAAM,yBAAS,IAAI,KAA0B;AAC7C,MAAK,MAAM,QAAQ,OAAO;AACxB,MAAI,CAAC,KAAM;AACX,OAAK,MAAM,UAAU,KACnB,QAAO,IAAI,OAAO,MAAM,OAAO;;AAGnC,QAAO,MAAM,KAAK,OAAO,QAAQ,CAAC;;;;;;;;;;;;;;;;;;;;;ACnCpC,SAAgB,eACd,KACA,UAA2B,EAAE,EACC;CAC9B,MAAM,EAAE,MAAM,OAAO,YAAY;AACjC,SAAQ,QAAQ;EACd,IAAI;AACJ,MAAI;AACF,cAAW,QAAQ,IAAI;WAChB,KAAK;AACZ,WAAQ,MAAM,UAAU,KAAK,mBAAmB,IAAI;AACpD;;AAEF,MAAI,aAAa,KAAA,EAAW;AAC5B,MAAI,CAAC,MAAO;EACZ,MAAM,SAAS,IAAI,MAAM;AACzB,MAAI,MAAM,SAAS,gBAAmB,QAAQ,UAAU,QAAQ,UAAU"}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import { I as EnrichContext, it as WideEvent } from "./audit-CJl-wZ10.mjs";
|
|
2
|
+
|
|
3
|
+
//#region src/shared/enricher.d.ts
|
|
4
|
+
interface EnricherOptions {
|
|
5
|
+
/**
|
|
6
|
+
* Replace existing event fields with the computed value. Defaults to `false`
|
|
7
|
+
* so user-provided context (e.g. `log.set({ geo: ... })`) wins.
|
|
8
|
+
*/
|
|
9
|
+
overwrite?: boolean;
|
|
10
|
+
}
|
|
11
|
+
interface EnricherDefinition<T extends object> {
|
|
12
|
+
/** Stable identifier used in error logs. */
|
|
13
|
+
name: string;
|
|
14
|
+
/**
|
|
15
|
+
* Top-level event field to merge into. Omit when the enricher writes to
|
|
16
|
+
* multiple fields and handles its own merging inside `compute`.
|
|
17
|
+
*/
|
|
18
|
+
field?: keyof WideEvent & string;
|
|
19
|
+
/** Return `undefined` to skip enrichment (e.g. when a required header is missing). */
|
|
20
|
+
compute: (ctx: EnrichContext) => T | undefined;
|
|
21
|
+
}
|
|
22
|
+
/**
|
|
23
|
+
* Build an enricher: skips when `compute` returns `undefined`, merges with
|
|
24
|
+
* {@link mergeEventField} respecting `overwrite`, and isolates errors under
|
|
25
|
+
* `[evlog/{name}]`.
|
|
26
|
+
*
|
|
27
|
+
* @example
|
|
28
|
+
* ```ts
|
|
29
|
+
* export const tenantEnricher = defineEnricher<{ id: string }>({
|
|
30
|
+
* name: 'tenant',
|
|
31
|
+
* field: 'tenant',
|
|
32
|
+
* compute({ headers }) {
|
|
33
|
+
* const id = getHeader(headers, 'x-tenant-id')
|
|
34
|
+
* return id ? { id } : undefined
|
|
35
|
+
* },
|
|
36
|
+
* })
|
|
37
|
+
* ```
|
|
38
|
+
*/
|
|
39
|
+
declare function defineEnricher<T extends object>(def: EnricherDefinition<T>, options?: EnricherOptions): (ctx: EnrichContext) => void;
|
|
40
|
+
//#endregion
|
|
41
|
+
export { EnricherOptions as n, defineEnricher as r, EnricherDefinition as t };
|
|
42
|
+
//# sourceMappingURL=enricher-CLSnrzrr.d.mts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"enricher-CLSnrzrr.d.mts","names":[],"sources":["../src/shared/enricher.ts"],"mappings":";;;UAGiB,eAAA;;AAAjB;;;EAKE,SAAA;AAAA;AAAA,UAGe,kBAAA;EAAkB;EAEjC,IAAA;EAKc;;;;EAAd,KAAA,SAAc,SAAA;EAPoB;EASlC,OAAA,GAAU,GAAA,EAAK,aAAA,KAAkB,CAAA;AAAA;;;;;;;;AAoBnC;;;;;;;;;;iBAAgB,cAAA,kBAAA,CACd,GAAA,EAAK,kBAAA,CAAmB,CAAA,GACxB,OAAA,GAAS,eAAA,IACP,GAAA,EAAK,aAAA"}
|
package/dist/enrichers.d.mts
CHANGED
|
@@ -1,13 +1,7 @@
|
|
|
1
|
-
import { I as EnrichContext } from "./audit-
|
|
1
|
+
import { I as EnrichContext } from "./audit-CJl-wZ10.mjs";
|
|
2
|
+
import { n as EnricherOptions } from "./enricher-CLSnrzrr.mjs";
|
|
2
3
|
|
|
3
4
|
//#region src/enrichers/index.d.ts
|
|
4
|
-
interface EnricherOptions {
|
|
5
|
-
/**
|
|
6
|
-
* When true, overwrite any existing fields in the event.
|
|
7
|
-
* Defaults to false to preserve user-provided data.
|
|
8
|
-
*/
|
|
9
|
-
overwrite?: boolean;
|
|
10
|
-
}
|
|
11
5
|
interface UserAgentInfo {
|
|
12
6
|
raw: string;
|
|
13
7
|
browser?: {
|
|
@@ -69,6 +63,19 @@ declare function createRequestSizeEnricher(options?: EnricherOptions): (ctx: Enr
|
|
|
69
63
|
* Also sets `event.traceId` and `event.spanId` at the top level.
|
|
70
64
|
*/
|
|
71
65
|
declare function createTraceContextEnricher(options?: EnricherOptions): (ctx: EnrichContext) => void;
|
|
66
|
+
/**
|
|
67
|
+
* Compose every built-in enricher into a single async enricher, in the order
|
|
68
|
+
* `userAgent → geo → requestSize → traceContext`.
|
|
69
|
+
*
|
|
70
|
+
* Drop-in shorthand for the most common middleware setup:
|
|
71
|
+
*
|
|
72
|
+
* ```ts
|
|
73
|
+
* import { createDefaultEnrichers } from 'evlog/enrichers'
|
|
74
|
+
*
|
|
75
|
+
* app.use(evlog({ enrich: createDefaultEnrichers() }))
|
|
76
|
+
* ```
|
|
77
|
+
*/
|
|
78
|
+
declare function createDefaultEnrichers(options?: EnricherOptions): (ctx: EnrichContext) => Promise<void>;
|
|
72
79
|
//#endregion
|
|
73
|
-
export { EnricherOptions, GeoInfo, RequestSizeInfo, TraceContextInfo, UserAgentInfo, createGeoEnricher, createRequestSizeEnricher, createTraceContextEnricher, createUserAgentEnricher };
|
|
80
|
+
export { type EnricherOptions, GeoInfo, RequestSizeInfo, TraceContextInfo, UserAgentInfo, createDefaultEnrichers, createGeoEnricher, createRequestSizeEnricher, createTraceContextEnricher, createUserAgentEnricher };
|
|
74
81
|
//# sourceMappingURL=enrichers.d.mts.map
|
package/dist/enrichers.d.mts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"enrichers.d.mts","names":[],"sources":["../src/enrichers/index.ts"],"mappings":"
|
|
1
|
+
{"version":3,"file":"enrichers.d.mts","names":[],"sources":["../src/enrichers/index.ts"],"mappings":";;;;UAOiB,aAAA;EACf,GAAA;EACA,OAAA;IAAY,IAAA;IAAc,OAAA;EAAA;EAC1B,EAAA;IAAO,IAAA;IAAc,OAAA;EAAA;EACrB,MAAA;IAAW,IAAA;EAAA;AAAA;AAAA,UAGI,OAAA;EACf,OAAA;EACA,MAAA;EACA,UAAA;EACA,IAAA;EACA,QAAA;EACA,SAAA;AAAA;AAAA,UAGe,eAAA;EACf,YAAA;EACA,aAAA;AAAA;AAAA,UAGe,gBAAA;EACf,WAAA;EACA,UAAA;EACA,OAAA;EACA,MAAA;AAAA;;AAJF;;;iBAwEgB,uBAAA,CAAwB,OAAA,GAAS,eAAA,IAAwB,GAAA,EAAK,aAAA;;;;;;;AAA9E;;;;;;iBAuBgB,iBAAA,CAAkB,OAAA,GAAS,eAAA,IAAwB,GAAA,EAAK,aAAA;;;;AAAxE;iBAuBgB,yBAAA,CAA0B,OAAA,GAAS,eAAA,IAAwB,GAAA,EAAK,aAAA;;;;;;iBAkBhE,0BAAA,CAA2B,OAAA,GAAS,eAAA,IAAwB,GAAA,EAAK,aAAA;;;AAlBjF;;;;;;;;;;iBA+DgB,sBAAA,CAAuB,OAAA,GAAS,eAAA,IAAwB,GAAA,EAAK,aAAA,KAAkB,OAAA"}
|