@l.x/logger 0.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.depcheckrc +9 -0
- package/.eslintrc.js +18 -0
- package/LICENSE +122 -0
- package/README.md +3 -0
- package/package.json +37 -0
- package/project.json +18 -0
- package/src/asyncContext.ts +24 -0
- package/src/clientLogger.ts +114 -0
- package/src/consoleLogger.ts +137 -0
- package/src/index.ts +24 -0
- package/src/requestContext.ts +54 -0
- package/src/server.ts +18 -0
- package/src/serverLogger.ts +70 -0
- package/src/sourceMapResolver.ts +99 -0
- package/src/structuredJsonLogger.ts +141 -0
- package/src/transports/buffered.ts +64 -0
- package/src/transports/console.ts +39 -0
- package/src/transports/pending.ts +42 -0
- package/src/transports/trpc.ts +20 -0
- package/src/types.ts +84 -0
- package/src/wideEvent.ts +128 -0
- package/tsconfig.json +29 -0
- package/tsconfig.lint.json +8 -0
package/.depcheckrc
ADDED
package/.eslintrc.js
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
module.exports = {
|
|
2
|
+
extends: ['@luxfi/eslint-config/lib'],
|
|
3
|
+
parserOptions: {
|
|
4
|
+
tsconfigRootDir: __dirname,
|
|
5
|
+
},
|
|
6
|
+
overrides: [
|
|
7
|
+
{
|
|
8
|
+
files: ['*.ts', '*.tsx'],
|
|
9
|
+
rules: {
|
|
10
|
+
'no-relative-import-paths/no-relative-import-paths': 'off',
|
|
11
|
+
// Logger interface defines error(msg, error?, context?) — 3 params is inherent to the contract.
|
|
12
|
+
// Factory functions take (transport, service, parentContext, minLevel) to match the child() pattern.
|
|
13
|
+
// Restructuring these into options objects would make the API awkward for a logging library.
|
|
14
|
+
'max-params': ['error', { max: 6 }],
|
|
15
|
+
},
|
|
16
|
+
},
|
|
17
|
+
],
|
|
18
|
+
}
|
package/LICENSE
ADDED
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
Lux Ecosystem License
|
|
2
|
+
Version 1.2, December 2025
|
|
3
|
+
|
|
4
|
+
Copyright (c) 2020-2025 Lux Industries Inc.
|
|
5
|
+
All rights reserved.
|
|
6
|
+
|
|
7
|
+
TECHNOLOGY PORTFOLIO - PATENT APPLICATIONS PLANNED
|
|
8
|
+
Contact: licensing@lux.network
|
|
9
|
+
|
|
10
|
+
================================================================================
|
|
11
|
+
TERMS AND CONDITIONS
|
|
12
|
+
================================================================================
|
|
13
|
+
|
|
14
|
+
1. DEFINITIONS
|
|
15
|
+
|
|
16
|
+
"Lux Primary Network" means the official Lux blockchain with Network ID=1
|
|
17
|
+
and EVM Chain ID=96369.
|
|
18
|
+
|
|
19
|
+
"Authorized Network" means the Lux Primary Network, official testnets/devnets,
|
|
20
|
+
and any L1/L2/L3 chain descending from the Lux Primary Network.
|
|
21
|
+
|
|
22
|
+
"Descending Chain" means an L1/L2/L3 chain built on, anchored to, or deriving
|
|
23
|
+
security from the Lux Primary Network or its authorized testnets.
|
|
24
|
+
|
|
25
|
+
"Research Use" means non-commercial academic research, education, personal
|
|
26
|
+
study, or evaluation purposes.
|
|
27
|
+
|
|
28
|
+
"Commercial Use" means any use in connection with a product or service
|
|
29
|
+
offered for sale or fee, internal use by a for-profit entity, or any use
|
|
30
|
+
to generate revenue.
|
|
31
|
+
|
|
32
|
+
2. GRANT OF LICENSE
|
|
33
|
+
|
|
34
|
+
Subject to these terms, Lux Industries Inc grants you a non-exclusive,
|
|
35
|
+
royalty-free license to:
|
|
36
|
+
|
|
37
|
+
(a) Use for Research Use without restriction;
|
|
38
|
+
|
|
39
|
+
(b) Operate on the Lux Primary Network (Network ID=1, EVM Chain ID=96369);
|
|
40
|
+
|
|
41
|
+
(c) Operate on official Lux testnets and devnets;
|
|
42
|
+
|
|
43
|
+
(d) Operate L1/L2/L3 chains descending from the Lux Primary Network;
|
|
44
|
+
|
|
45
|
+
(e) Build applications within the Lux ecosystem;
|
|
46
|
+
|
|
47
|
+
(f) Contribute improvements back to the original repositories.
|
|
48
|
+
|
|
49
|
+
3. RESTRICTIONS
|
|
50
|
+
|
|
51
|
+
Without a commercial license from Lux Industries Inc, you may NOT:
|
|
52
|
+
|
|
53
|
+
(a) Fork the Lux Network or any Lux software;
|
|
54
|
+
|
|
55
|
+
(b) Create competing networks not descending from Lux Primary Network;
|
|
56
|
+
|
|
57
|
+
(c) Use for Commercial Use outside the Lux ecosystem;
|
|
58
|
+
|
|
59
|
+
(d) Sublicense or transfer rights outside the Lux ecosystem;
|
|
60
|
+
|
|
61
|
+
(e) Use to create competing blockchain networks, exchanges, custody
|
|
62
|
+
services, or cryptographic systems outside the Lux ecosystem.
|
|
63
|
+
|
|
64
|
+
4. NO FORKS POLICY
|
|
65
|
+
|
|
66
|
+
Lux Industries Inc maintains ZERO TOLERANCE for unauthorized forks.
|
|
67
|
+
Any fork or deployment on an unauthorized network constitutes:
|
|
68
|
+
|
|
69
|
+
(a) Breach of this license;
|
|
70
|
+
(b) Grounds for immediate legal action.
|
|
71
|
+
|
|
72
|
+
5. RIGHTS RESERVATION
|
|
73
|
+
|
|
74
|
+
All rights not explicitly granted are reserved by Lux Industries Inc.
|
|
75
|
+
|
|
76
|
+
We plan to apply for patent protection for the technology in this
|
|
77
|
+
repository. Any implementation outside the Lux ecosystem may require
|
|
78
|
+
a separate commercial license.
|
|
79
|
+
|
|
80
|
+
6. DISCLAIMER OF WARRANTY
|
|
81
|
+
|
|
82
|
+
THIS SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
83
|
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
|
84
|
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
|
85
|
+
|
|
86
|
+
7. LIMITATION OF LIABILITY
|
|
87
|
+
|
|
88
|
+
IN NO EVENT SHALL LUX INDUSTRIES INC BE LIABLE FOR ANY CLAIM, DAMAGES
|
|
89
|
+
OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
|
90
|
+
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE.
|
|
91
|
+
|
|
92
|
+
8. TERMINATION
|
|
93
|
+
|
|
94
|
+
This license terminates immediately upon any breach, including but not
|
|
95
|
+
limited to deployment on unauthorized networks or creation of forks.
|
|
96
|
+
|
|
97
|
+
9. GOVERNING LAW
|
|
98
|
+
|
|
99
|
+
This License shall be governed by the laws of the State of Delaware.
|
|
100
|
+
|
|
101
|
+
10. COMMERCIAL LICENSING
|
|
102
|
+
|
|
103
|
+
For commercial use outside the Lux ecosystem:
|
|
104
|
+
|
|
105
|
+
Lux Industries Inc.
|
|
106
|
+
Email: licensing@lux.network
|
|
107
|
+
Subject: Commercial License Request
|
|
108
|
+
|
|
109
|
+
================================================================================
|
|
110
|
+
TL;DR
|
|
111
|
+
================================================================================
|
|
112
|
+
|
|
113
|
+
- Research/academic use = OK
|
|
114
|
+
- Lux Primary Network (Network ID=1, Chain ID=96369) = OK
|
|
115
|
+
- L1/L2/L3 chains descending from Lux Primary Network = OK
|
|
116
|
+
- Commercial products outside Lux ecosystem = Contact licensing@lux.network
|
|
117
|
+
- Forks = Absolutely not
|
|
118
|
+
|
|
119
|
+
================================================================================
|
|
120
|
+
|
|
121
|
+
See LP-0012 for full licensing documentation:
|
|
122
|
+
https://github.com/luxfi/lps/blob/main/LPs/lp-0012-ecosystem-licensing.md
|
package/README.md
ADDED
package/package.json
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@l.x/logger",
|
|
3
|
+
"version": "0.0.0",
|
|
4
|
+
"dependencies": {
|
|
5
|
+
"source-map-js": "1.2.1",
|
|
6
|
+
"@l.x/privacy": "0.0.0"
|
|
7
|
+
},
|
|
8
|
+
"devDependencies": {
|
|
9
|
+
"@types/node": "22.13.1",
|
|
10
|
+
"@typescript/native-preview": "7.0.0-dev.20260311.1",
|
|
11
|
+
"depcheck": "1.4.7",
|
|
12
|
+
"eslint": "8.57.1",
|
|
13
|
+
"typescript": "5.8.3",
|
|
14
|
+
"@luxfi/eslint-config": "^1.0.5"
|
|
15
|
+
},
|
|
16
|
+
"nx": {
|
|
17
|
+
"includedScripts": []
|
|
18
|
+
},
|
|
19
|
+
"main": "src/index.ts",
|
|
20
|
+
"exports": {
|
|
21
|
+
".": "./src/index.ts",
|
|
22
|
+
"./server": "./src/server.ts"
|
|
23
|
+
},
|
|
24
|
+
"private": false,
|
|
25
|
+
"sideEffects": false,
|
|
26
|
+
"scripts": {
|
|
27
|
+
"typecheck": "nx typecheck logger",
|
|
28
|
+
"typecheck:tsgo": "nx typecheck:tsgo logger",
|
|
29
|
+
"lint": "nx lint logger",
|
|
30
|
+
"lint:fix": "nx lint:fix logger",
|
|
31
|
+
"lint:biome": "nx lint:biome logger",
|
|
32
|
+
"lint:biome:fix": "nx lint:biome:fix logger",
|
|
33
|
+
"lint:eslint": "nx lint:eslint logger",
|
|
34
|
+
"lint:eslint:fix": "nx lint:eslint:fix logger",
|
|
35
|
+
"check:deps:usage": "nx check:deps:usage logger"
|
|
36
|
+
}
|
|
37
|
+
}
|
package/project.json
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@l.x/logger",
|
|
3
|
+
"$schema": "../../node_modules/nx/schemas/project-schema.json",
|
|
4
|
+
"sourceRoot": "pkgs/logger/src",
|
|
5
|
+
"projectType": "library",
|
|
6
|
+
"tags": [],
|
|
7
|
+
"targets": {
|
|
8
|
+
"typecheck": {},
|
|
9
|
+
"typecheck:tsgo": {},
|
|
10
|
+
"lint:biome": {},
|
|
11
|
+
"lint:biome:fix": {},
|
|
12
|
+
"lint:eslint": {},
|
|
13
|
+
"lint:eslint:fix": {},
|
|
14
|
+
"lint": {},
|
|
15
|
+
"lint:fix": {},
|
|
16
|
+
"check:deps:usage": {}
|
|
17
|
+
}
|
|
18
|
+
}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Request Context Types
|
|
3
|
+
*
|
|
4
|
+
* Defines the RequestContext shape and the abstract RequestStore contract.
|
|
5
|
+
* The actual AsyncLocalStorage instance is created at the app boundary —
|
|
6
|
+
* this module has no Node.js dependencies.
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
import type { Logger } from './types'
|
|
10
|
+
import type { WideEvent } from './wideEvent'
|
|
11
|
+
|
|
12
|
+
/** Typed context stored per-request */
|
|
13
|
+
export interface RequestContext {
|
|
14
|
+
traceId: string
|
|
15
|
+
logger: Logger
|
|
16
|
+
wideEvent: WideEvent
|
|
17
|
+
userId?: string
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
/** Abstract store — matches AsyncLocalStorage's shape without importing node:async_hooks */
|
|
21
|
+
export interface RequestStore {
|
|
22
|
+
getStore(): RequestContext | undefined
|
|
23
|
+
run<R>(store: RequestContext, callback: () => R): R
|
|
24
|
+
}
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Client Logger
|
|
3
|
+
*
|
|
4
|
+
* Implements the Logger interface by converting log calls into LogEntry
|
|
5
|
+
* objects and forwarding them to a LogTransport.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import type { LogContext, LogEntry, Logger, LoggerFactory, LogLevel, LogTransport } from './types'
|
|
9
|
+
import { LOG_LEVEL_ORDER } from './types'
|
|
10
|
+
|
|
11
|
+
function shouldLog(level: LogLevel, minLevel: LogLevel): boolean {
|
|
12
|
+
return LOG_LEVEL_ORDER[level] >= LOG_LEVEL_ORDER[minLevel]
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
function serializeError(err: unknown): LogEntry['error'] | undefined {
|
|
16
|
+
if (err == null) {
|
|
17
|
+
return undefined
|
|
18
|
+
}
|
|
19
|
+
if (err instanceof Error) {
|
|
20
|
+
return { message: err.message, stack: err.stack }
|
|
21
|
+
}
|
|
22
|
+
return { message: String(err) }
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
function buildEntry(
|
|
26
|
+
level: LogLevel,
|
|
27
|
+
message: string,
|
|
28
|
+
service: string | undefined,
|
|
29
|
+
context: LogContext | undefined,
|
|
30
|
+
error?: unknown,
|
|
31
|
+
): LogEntry {
|
|
32
|
+
return {
|
|
33
|
+
timestamp: new Date().toISOString(),
|
|
34
|
+
level,
|
|
35
|
+
...(service ? { service } : {}),
|
|
36
|
+
message,
|
|
37
|
+
...(context && Object.keys(context).length > 0 ? { context } : {}),
|
|
38
|
+
...(error != null ? { error: serializeError(error) } : {}),
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
export function createClientLogger(
|
|
43
|
+
transport: LogTransport,
|
|
44
|
+
service?: string,
|
|
45
|
+
parentContext?: LogContext,
|
|
46
|
+
minLevel: LogLevel = 'warn',
|
|
47
|
+
): Logger {
|
|
48
|
+
function mergeContext(context?: LogContext): LogContext | undefined {
|
|
49
|
+
if (!parentContext) {
|
|
50
|
+
return context
|
|
51
|
+
}
|
|
52
|
+
if (!context) {
|
|
53
|
+
return parentContext
|
|
54
|
+
}
|
|
55
|
+
return { ...parentContext, ...context }
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
return {
|
|
59
|
+
trace(message, context?): void {
|
|
60
|
+
if (!shouldLog('trace', minLevel)) {
|
|
61
|
+
return
|
|
62
|
+
}
|
|
63
|
+
transport.send([buildEntry('trace', message, service, mergeContext(context))])
|
|
64
|
+
},
|
|
65
|
+
|
|
66
|
+
debug(message, context?): void {
|
|
67
|
+
if (!shouldLog('debug', minLevel)) {
|
|
68
|
+
return
|
|
69
|
+
}
|
|
70
|
+
transport.send([buildEntry('debug', message, service, mergeContext(context))])
|
|
71
|
+
},
|
|
72
|
+
|
|
73
|
+
info(message, context?): void {
|
|
74
|
+
if (!shouldLog('info', minLevel)) {
|
|
75
|
+
return
|
|
76
|
+
}
|
|
77
|
+
transport.send([buildEntry('info', message, service, mergeContext(context))])
|
|
78
|
+
},
|
|
79
|
+
|
|
80
|
+
warn(message, context?): void {
|
|
81
|
+
if (!shouldLog('warn', minLevel)) {
|
|
82
|
+
return
|
|
83
|
+
}
|
|
84
|
+
transport.send([buildEntry('warn', message, service, mergeContext(context))])
|
|
85
|
+
},
|
|
86
|
+
|
|
87
|
+
error(message, error?, context?): void {
|
|
88
|
+
if (!shouldLog('error', minLevel)) {
|
|
89
|
+
return
|
|
90
|
+
}
|
|
91
|
+
transport.send([buildEntry('error', message, service, mergeContext(context), error)])
|
|
92
|
+
},
|
|
93
|
+
|
|
94
|
+
fatal(message, error?, context?): void {
|
|
95
|
+
if (!shouldLog('fatal', minLevel)) {
|
|
96
|
+
return
|
|
97
|
+
}
|
|
98
|
+
transport.send([buildEntry('fatal', message, service, mergeContext(context), error)])
|
|
99
|
+
},
|
|
100
|
+
|
|
101
|
+
child(context): Logger {
|
|
102
|
+
const merged = parentContext ? { ...parentContext, ...context } : context
|
|
103
|
+
return createClientLogger(transport, service, merged, minLevel)
|
|
104
|
+
},
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
export function createClientLoggerFactory(transport: LogTransport, minLevel: LogLevel = 'warn'): LoggerFactory {
|
|
109
|
+
return {
|
|
110
|
+
createLogger(service): Logger {
|
|
111
|
+
return createClientLogger(transport, service, undefined, minLevel)
|
|
112
|
+
},
|
|
113
|
+
}
|
|
114
|
+
}
|
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Console Logger Implementation
|
|
3
|
+
*
|
|
4
|
+
* Simple console-based logger for development and debugging.
|
|
5
|
+
* Structured output with timestamps and context.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import type { LogContext, Logger, LoggerFactory, LogLevel } from './types'
|
|
9
|
+
import { LOG_LEVEL_ORDER } from './types'
|
|
10
|
+
|
|
11
|
+
function shouldLog(level: LogLevel, minLevel: LogLevel): boolean {
|
|
12
|
+
return LOG_LEVEL_ORDER[level] >= LOG_LEVEL_ORDER[minLevel]
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Format timestamp for log output
|
|
17
|
+
*/
|
|
18
|
+
function formatTimestamp(): string {
|
|
19
|
+
const now = new Date()
|
|
20
|
+
return now.toISOString().slice(11, 23) // HH:mm:ss.SSS
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Format context for log output
|
|
25
|
+
*/
|
|
26
|
+
function formatContext(context?: LogContext): string {
|
|
27
|
+
if (!context || Object.keys(context).length === 0) {
|
|
28
|
+
return ''
|
|
29
|
+
}
|
|
30
|
+
return ` ${JSON.stringify(context)}`
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* Create a console logger with optional service prefix and inherited context
|
|
35
|
+
*/
|
|
36
|
+
export function createConsoleLogger(
|
|
37
|
+
service?: string,
|
|
38
|
+
parentContext?: LogContext,
|
|
39
|
+
minLevel: LogLevel = 'debug',
|
|
40
|
+
): Logger {
|
|
41
|
+
const prefix = service ? `[${service}]` : ''
|
|
42
|
+
|
|
43
|
+
function mergeContext(context?: LogContext): LogContext | undefined {
|
|
44
|
+
if (!parentContext) {
|
|
45
|
+
return context
|
|
46
|
+
}
|
|
47
|
+
if (!context) {
|
|
48
|
+
return parentContext
|
|
49
|
+
}
|
|
50
|
+
return { ...parentContext, ...context }
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
return {
|
|
54
|
+
trace(message: string, context?: LogContext): void {
|
|
55
|
+
if (!shouldLog('trace', minLevel)) {
|
|
56
|
+
return
|
|
57
|
+
}
|
|
58
|
+
// biome-ignore lint/suspicious/noConsole: Logger implementation
|
|
59
|
+
console.debug(`${formatTimestamp()} TRACE ${prefix} ${message}${formatContext(mergeContext(context))}`)
|
|
60
|
+
},
|
|
61
|
+
|
|
62
|
+
debug(message: string, context?: LogContext): void {
|
|
63
|
+
if (!shouldLog('debug', minLevel)) {
|
|
64
|
+
return
|
|
65
|
+
}
|
|
66
|
+
// biome-ignore lint/suspicious/noConsole: Logger implementation
|
|
67
|
+
console.debug(`${formatTimestamp()} DEBUG ${prefix} ${message}${formatContext(mergeContext(context))}`)
|
|
68
|
+
},
|
|
69
|
+
|
|
70
|
+
info(message: string, context?: LogContext): void {
|
|
71
|
+
if (!shouldLog('info', minLevel)) {
|
|
72
|
+
return
|
|
73
|
+
}
|
|
74
|
+
// biome-ignore lint/suspicious/noConsole: Logger implementation
|
|
75
|
+
console.info(`${formatTimestamp()} INFO ${prefix} ${message}${formatContext(mergeContext(context))}`)
|
|
76
|
+
},
|
|
77
|
+
|
|
78
|
+
warn(message: string, context?: LogContext): void {
|
|
79
|
+
if (!shouldLog('warn', minLevel)) {
|
|
80
|
+
return
|
|
81
|
+
}
|
|
82
|
+
// biome-ignore lint/suspicious/noConsole: Logger implementation
|
|
83
|
+
console.warn(`${formatTimestamp()} WARN ${prefix} ${message}${formatContext(mergeContext(context))}`)
|
|
84
|
+
},
|
|
85
|
+
|
|
86
|
+
error(message: string, error?: unknown, context?: LogContext): void {
|
|
87
|
+
if (!shouldLog('error', minLevel)) {
|
|
88
|
+
return
|
|
89
|
+
}
|
|
90
|
+
const errorDetails = error instanceof Error ? { name: error.name, message: error.message } : { error }
|
|
91
|
+
// biome-ignore lint/suspicious/noConsole: Logger implementation
|
|
92
|
+
console.error(`${formatTimestamp()} ERROR ${prefix} ${message}`, errorDetails, mergeContext(context) ?? {})
|
|
93
|
+
},
|
|
94
|
+
|
|
95
|
+
fatal(message: string, error?: unknown, context?: LogContext): void {
|
|
96
|
+
if (!shouldLog('fatal', minLevel)) {
|
|
97
|
+
return
|
|
98
|
+
}
|
|
99
|
+
const errorDetails = error instanceof Error ? { name: error.name, message: error.message } : { error }
|
|
100
|
+
// biome-ignore lint/suspicious/noConsole: Logger implementation
|
|
101
|
+
console.error(`${formatTimestamp()} FATAL ${prefix} ${message}`, errorDetails, mergeContext(context) ?? {})
|
|
102
|
+
},
|
|
103
|
+
|
|
104
|
+
child(context: LogContext): Logger {
|
|
105
|
+
const merged = parentContext ? { ...parentContext, ...context } : context
|
|
106
|
+
return createConsoleLogger(service, merged, minLevel)
|
|
107
|
+
},
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
/**
|
|
112
|
+
* Console logger factory — defaults to 'info' level to suppress DEBUG noise
|
|
113
|
+
*/
|
|
114
|
+
export function createConsoleLoggerFactory(minLevel: LogLevel = 'info'): LoggerFactory {
|
|
115
|
+
return {
|
|
116
|
+
createLogger(service: string): Logger {
|
|
117
|
+
return createConsoleLogger(service, undefined, minLevel)
|
|
118
|
+
},
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
export const consoleLoggerFactory: LoggerFactory = createConsoleLoggerFactory('info')
|
|
123
|
+
|
|
124
|
+
/**
|
|
125
|
+
* Default noop logger for when logging is disabled
|
|
126
|
+
*/
|
|
127
|
+
export const noopLogger: Logger = {
|
|
128
|
+
trace: () => {},
|
|
129
|
+
debug: () => {},
|
|
130
|
+
info: () => {},
|
|
131
|
+
warn: () => {},
|
|
132
|
+
error: () => {},
|
|
133
|
+
fatal: () => {},
|
|
134
|
+
child(): Logger {
|
|
135
|
+
return noopLogger
|
|
136
|
+
},
|
|
137
|
+
}
|
package/src/index.ts
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
// Client-safe exports — no Node.js dependencies.
|
|
2
|
+
// Server-only exports (createServerLogger, createStackResolver, etc.) live in @l.x/logger/server.
|
|
3
|
+
|
|
4
|
+
export { createClientLogger, createClientLoggerFactory } from './clientLogger'
|
|
5
|
+
export { consoleLoggerFactory, createConsoleLogger, createConsoleLoggerFactory, noopLogger } from './consoleLogger'
|
|
6
|
+
export type { RequestScopedContext } from './requestContext'
|
|
7
|
+
export { requestContext } from './requestContext'
|
|
8
|
+
export type { StackResolver, StackResolverCtx } from './sourceMapResolver'
|
|
9
|
+
export {
|
|
10
|
+
createStructuredJsonLogger,
|
|
11
|
+
createStructuredJsonLoggerFactory,
|
|
12
|
+
structuredJsonLoggerFactory,
|
|
13
|
+
} from './structuredJsonLogger'
|
|
14
|
+
export type { BufferedTransportOptions } from './transports/buffered'
|
|
15
|
+
export { createBufferedTransport } from './transports/buffered'
|
|
16
|
+
export { createConsoleTransport } from './transports/console'
|
|
17
|
+
export type { PendingTransport } from './transports/pending'
|
|
18
|
+
export { createPendingTransport } from './transports/pending'
|
|
19
|
+
export type { LogIngestionClient } from './transports/trpc'
|
|
20
|
+
export { createTrpcLogTransport } from './transports/trpc'
|
|
21
|
+
export type { LogContext, LogEntry, Logger, LoggerFactory, LogLevel, LogTransport } from './types'
|
|
22
|
+
export { LOG_LEVEL_ORDER } from './types'
|
|
23
|
+
// Types only — value exports in @l.x/logger/server
|
|
24
|
+
export type { ErrorSource, SerializedError, WideEvent, WideEventFactory } from './wideEvent'
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Request-scoped context shared between Hono middleware and tRPC.
|
|
3
|
+
*
|
|
4
|
+
* Uses a Map keyed by trace_id (set as x-trace-id header on the request).
|
|
5
|
+
* WeakMap<Request> doesn't work because Hono sub-app routing may create
|
|
6
|
+
* new Request objects, breaking object identity.
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
import type { Logger } from './types'
|
|
10
|
+
import type { WideEvent } from './wideEvent'
|
|
11
|
+
|
|
12
|
+
export interface RequestScopedContext {
|
|
13
|
+
wideEvent: WideEvent
|
|
14
|
+
logger: Logger
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
interface StoredContext extends RequestScopedContext {
|
|
18
|
+
createdAt: number
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
/** Requests older than 5 minutes are considered dead */
|
|
22
|
+
const MAX_AGE_MS = 5 * 60 * 1000
|
|
23
|
+
/** Evict stale entries every 100 requests to bound Map growth */
|
|
24
|
+
const EVICTION_INTERVAL = 100
|
|
25
|
+
|
|
26
|
+
const contextMap = new Map<string, StoredContext>()
|
|
27
|
+
let operationCount = 0
|
|
28
|
+
|
|
29
|
+
function evictStaleEntries(): void {
|
|
30
|
+
const now = Date.now()
|
|
31
|
+
for (const [key, value] of contextMap) {
|
|
32
|
+
if (now - value.createdAt > MAX_AGE_MS) {
|
|
33
|
+
contextMap.delete(key)
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
export const requestContext = {
|
|
39
|
+
set(traceId: string, ctx: RequestScopedContext): void {
|
|
40
|
+
contextMap.set(traceId, { ...ctx, createdAt: Date.now() })
|
|
41
|
+
if (++operationCount % EVICTION_INTERVAL === 0) {
|
|
42
|
+
evictStaleEntries()
|
|
43
|
+
}
|
|
44
|
+
},
|
|
45
|
+
|
|
46
|
+
get(traceId: string): RequestScopedContext | undefined {
|
|
47
|
+
return contextMap.get(traceId)
|
|
48
|
+
},
|
|
49
|
+
|
|
50
|
+
/** Clean up after request completes to prevent memory leaks */
|
|
51
|
+
delete(traceId: string): void {
|
|
52
|
+
contextMap.delete(traceId)
|
|
53
|
+
},
|
|
54
|
+
}
|
package/src/server.ts
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
// Server-only exports — requires Node.js.
|
|
2
|
+
// Re-exports everything from the client-safe barrel for convenience.
|
|
3
|
+
|
|
4
|
+
export type { RequestContext, RequestStore } from './asyncContext'
|
|
5
|
+
export * from './index'
|
|
6
|
+
export type { RequestScopedContext } from './requestContext'
|
|
7
|
+
export { requestContext } from './requestContext'
|
|
8
|
+
export type { ServerLogger, ServerLoggerCtx } from './serverLogger'
|
|
9
|
+
export { createServerLogger } from './serverLogger'
|
|
10
|
+
export type { StackResolver, StackResolverCtx } from './sourceMapResolver'
|
|
11
|
+
export { createStackResolver } from './sourceMapResolver'
|
|
12
|
+
export {
|
|
13
|
+
createStructuredJsonLogger,
|
|
14
|
+
createStructuredJsonLoggerFactory,
|
|
15
|
+
structuredJsonLoggerFactory,
|
|
16
|
+
} from './structuredJsonLogger'
|
|
17
|
+
export type { ErrorSource, SerializedError, WideEvent, WideEventFactory } from './wideEvent'
|
|
18
|
+
export { createWideEvent, serializeErrorForWideEvent, wideEventFactory } from './wideEvent'
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Server Logger Factory
|
|
3
|
+
*
|
|
4
|
+
* createServerLogger(ctx) → ServerLogger
|
|
5
|
+
*
|
|
6
|
+
* The app boundary provides platform deps (ALS, write function).
|
|
7
|
+
* Returns a wired instance with .create(), .getRequestLogger(), .runWithContext().
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
import type { RequestContext, RequestStore } from './asyncContext'
|
|
11
|
+
import type { RequestScopedContext } from './requestContext'
|
|
12
|
+
import { createStructuredJsonLoggerFactory } from './structuredJsonLogger'
|
|
13
|
+
import type { Logger, LoggerFactory, LogLevel } from './types'
|
|
14
|
+
|
|
15
|
+
export interface ServerLoggerCtx {
|
|
16
|
+
/** The AsyncLocalStorage instance — created by the app */
|
|
17
|
+
requestStore: RequestStore
|
|
18
|
+
/** Map-based fallback context (import { requestContext } from '@l.x/logger') */
|
|
19
|
+
requestContext: { get(traceId: string): RequestScopedContext | undefined }
|
|
20
|
+
/** Minimum log level (defaults to 'info') */
|
|
21
|
+
minLevel?: LogLevel
|
|
22
|
+
/** Service name for the fallback logger (defaults to 'unknown') */
|
|
23
|
+
serviceName?: string
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
export interface ServerLogger {
|
|
27
|
+
/** Create a named logger: logger.create('dev-portal') */
|
|
28
|
+
create: (service: string) => Logger
|
|
29
|
+
/** Get the request-scoped logger (ALS → Map fallback → fallback logger) */
|
|
30
|
+
getRequestLogger: (request: Request) => Logger
|
|
31
|
+
/** Get the current request context from the ALS (if inside runWithContext) */
|
|
32
|
+
getRequestContext: () => RequestContext | undefined
|
|
33
|
+
/** Wrap a request in context scope */
|
|
34
|
+
runWithContext: <R>(ctx: RequestContext, fn: () => R) => R
|
|
35
|
+
/** The underlying factory (for passing to code that expects LoggerFactory) */
|
|
36
|
+
loggerFactory: LoggerFactory
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
export function createServerLogger(ctx: ServerLoggerCtx): ServerLogger {
|
|
40
|
+
const factory = createStructuredJsonLoggerFactory(ctx.minLevel ?? 'info')
|
|
41
|
+
const fallbackLogger = factory.createLogger(ctx.serviceName ?? 'unknown')
|
|
42
|
+
|
|
43
|
+
function getRequestLogger(request: Request): Logger {
|
|
44
|
+
// Primary: ALS context
|
|
45
|
+
const asyncCtx = ctx.requestStore.getStore()
|
|
46
|
+
if (asyncCtx) {
|
|
47
|
+
return asyncCtx.logger
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
// Secondary: Map-based context via trace ID header
|
|
51
|
+
const traceId = request.headers.get('x-trace-id')
|
|
52
|
+
if (traceId) {
|
|
53
|
+
const mapCtx = ctx.requestContext.get(traceId)
|
|
54
|
+
if (mapCtx) {
|
|
55
|
+
return mapCtx.logger
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
fallbackLogger.warn('request_logger.fallback', { reason: 'missing_trace_id' })
|
|
60
|
+
return fallbackLogger
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
return {
|
|
64
|
+
create: (service: string) => factory.createLogger(service),
|
|
65
|
+
getRequestLogger,
|
|
66
|
+
getRequestContext: () => ctx.requestStore.getStore(),
|
|
67
|
+
runWithContext: <R>(reqCtx: RequestContext, fn: () => R): R => ctx.requestStore.run(reqCtx, fn),
|
|
68
|
+
loggerFactory: factory,
|
|
69
|
+
}
|
|
70
|
+
}
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
import type { RawSourceMap } from 'source-map-js'
|
|
2
|
+
import { SourceMapConsumer } from 'source-map-js'
|
|
3
|
+
|
|
4
|
+
export interface StackResolver {
|
|
5
|
+
resolve(stack: string): string
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
export interface StackResolverCtx {
|
|
9
|
+
sourceMapDir: string
|
|
10
|
+
fallbackDirs?: string[]
|
|
11
|
+
fs: {
|
|
12
|
+
readdirSync: (path: string) => string[]
|
|
13
|
+
readFileSync: (path: string, encoding: BufferEncoding) => string
|
|
14
|
+
}
|
|
15
|
+
path: {
|
|
16
|
+
basename: (path: string) => string
|
|
17
|
+
join: (...paths: string[]) => string
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
export function createStackResolver(ctx: StackResolverCtx): StackResolver {
|
|
22
|
+
const { sourceMapDir, fallbackDirs = [], fs, path } = ctx
|
|
23
|
+
// Lazy-load: parse source maps on first use, not at startup
|
|
24
|
+
let consumers: Map<string, SourceMapConsumer> | undefined
|
|
25
|
+
|
|
26
|
+
function loadFromDir(dir: string, target: Map<string, SourceMapConsumer>): void {
|
|
27
|
+
try {
|
|
28
|
+
const files = fs.readdirSync(dir).filter((f) => f.endsWith('.map'))
|
|
29
|
+
for (const file of files) {
|
|
30
|
+
const jsFile = file.replace(/\.map$/, '')
|
|
31
|
+
if (target.has(jsFile)) {
|
|
32
|
+
continue
|
|
33
|
+
} // first dir wins
|
|
34
|
+
const mapContent = fs.readFileSync(path.join(dir, file), 'utf-8')
|
|
35
|
+
const rawMap = JSON.parse(mapContent) as RawSourceMap
|
|
36
|
+
target.set(jsFile, new SourceMapConsumer(rawMap))
|
|
37
|
+
}
|
|
38
|
+
} catch {
|
|
39
|
+
// Dir doesn't exist or isn't readable — skip
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
function getConsumers(): Map<string, SourceMapConsumer> {
|
|
44
|
+
if (consumers) {
|
|
45
|
+
return consumers
|
|
46
|
+
}
|
|
47
|
+
consumers = new Map()
|
|
48
|
+
loadFromDir(sourceMapDir, consumers)
|
|
49
|
+
for (const dir of fallbackDirs) {
|
|
50
|
+
loadFromDir(dir, consumers)
|
|
51
|
+
}
|
|
52
|
+
return consumers
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
// Parse stack frames like:
|
|
56
|
+
// " at functionName (https://host/assets/index-abc123.js:10:42)"
|
|
57
|
+
// " at https://host/assets/index-abc123.js:10:42"
|
|
58
|
+
// eslint-disable-next-line security/detect-unsafe-regex -- regex is applied to individual stack frame lines, not unbounded input
|
|
59
|
+
const FRAME_RE = /^(\s+at\s+(?:.*?\s+\()?)(.+?):(\d+):(\d+)(\)?\s*)$/
|
|
60
|
+
|
|
61
|
+
return {
|
|
62
|
+
resolve(stack: string): string {
|
|
63
|
+
const maps = getConsumers()
|
|
64
|
+
if (maps.size === 0) {
|
|
65
|
+
return stack
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
return stack
|
|
69
|
+
.split('\n')
|
|
70
|
+
.map((line) => {
|
|
71
|
+
const match = FRAME_RE.exec(line)
|
|
72
|
+
if (!match) {
|
|
73
|
+
return line
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
const [, prefix, filePath, lineStr, colStr, suffix] = match
|
|
77
|
+
if (!prefix || !filePath || !lineStr || !colStr || !suffix) {
|
|
78
|
+
return line
|
|
79
|
+
}
|
|
80
|
+
const fileName = path.basename(filePath)
|
|
81
|
+
const consumer = maps.get(fileName)
|
|
82
|
+
if (!consumer) {
|
|
83
|
+
return line
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
const pos = consumer.originalPositionFor({
|
|
87
|
+
line: parseInt(lineStr, 10),
|
|
88
|
+
column: parseInt(colStr, 10),
|
|
89
|
+
})
|
|
90
|
+
|
|
91
|
+
if (pos.source) {
|
|
92
|
+
return `${prefix}${pos.source}:${pos.line}:${pos.column}${suffix}`
|
|
93
|
+
}
|
|
94
|
+
return line
|
|
95
|
+
})
|
|
96
|
+
.join('\n')
|
|
97
|
+
},
|
|
98
|
+
}
|
|
99
|
+
}
|
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Structured JSON Logger Implementation
|
|
3
|
+
*
|
|
4
|
+
* Writes single JSON lines to stdout for Datadog Agent sidecar ingestion.
|
|
5
|
+
* NOT console.log — raw process.stdout.write to avoid formatting overhead.
|
|
6
|
+
*
|
|
7
|
+
* Integrates PII scrubbing as the last step before serialization (single enforcement point).
|
|
8
|
+
* Adds Datadog correlation fields for trace/log correlation.
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
import type { Scrubber } from '@l.x/privacy'
|
|
12
|
+
import { createScrubber } from '@l.x/privacy'
|
|
13
|
+
import type { LogContext, Logger, LoggerFactory, LogLevel } from './types'
|
|
14
|
+
import { LOG_LEVEL_ORDER } from './types'
|
|
15
|
+
import { serializeErrorForWideEvent } from './wideEvent'
|
|
16
|
+
|
|
17
|
+
function shouldLog(level: LogLevel, minLevel: LogLevel): boolean {
|
|
18
|
+
return LOG_LEVEL_ORDER[level] >= LOG_LEVEL_ORDER[minLevel]
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
/** DD correlation defaults — trace/span IDs are empty until overridden by request-scoped .child() context. */
|
|
22
|
+
function getDDCorrelation(service: string | undefined): Record<string, string> {
|
|
23
|
+
return {
|
|
24
|
+
'dd.trace_id': '',
|
|
25
|
+
'dd.span_id': '',
|
|
26
|
+
'dd.service': process.env['DD_SERVICE'] || service || '',
|
|
27
|
+
'dd.version': process.env['DD_VERSION'] || '',
|
|
28
|
+
'dd.env': process.env['DD_ENV'] || '',
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
function writeLine(
|
|
33
|
+
level: LogLevel,
|
|
34
|
+
service: string | undefined,
|
|
35
|
+
message: string,
|
|
36
|
+
scrub: Scrubber,
|
|
37
|
+
context?: LogContext,
|
|
38
|
+
error?: unknown,
|
|
39
|
+
): void {
|
|
40
|
+
const line: Record<string, unknown> = {
|
|
41
|
+
timestamp: new Date().toISOString(),
|
|
42
|
+
level,
|
|
43
|
+
...(service ? { service } : {}),
|
|
44
|
+
message,
|
|
45
|
+
...getDDCorrelation(service),
|
|
46
|
+
...context,
|
|
47
|
+
}
|
|
48
|
+
if (error != null) {
|
|
49
|
+
line['error'] = serializeErrorForWideEvent(error)
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// Scrub is the LAST step before serialization — single enforcement point
|
|
53
|
+
const scrubbed = scrub(line)
|
|
54
|
+
process.stdout.write(`${JSON.stringify(scrubbed)}\n`)
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
/**
|
|
58
|
+
* Create a structured JSON logger that writes newline-delimited JSON to stdout.
|
|
59
|
+
* Scrubber is injected explicitly — not imported as a side effect.
|
|
60
|
+
*/
|
|
61
|
+
export function createStructuredJsonLogger(
|
|
62
|
+
service?: string,
|
|
63
|
+
parentContext?: LogContext,
|
|
64
|
+
minLevel: LogLevel = 'info',
|
|
65
|
+
scrub?: Scrubber,
|
|
66
|
+
): Logger {
|
|
67
|
+
const scrubFn = scrub ?? createScrubber()
|
|
68
|
+
|
|
69
|
+
function mergeContext(context?: LogContext): LogContext | undefined {
|
|
70
|
+
if (!parentContext) {
|
|
71
|
+
return context
|
|
72
|
+
}
|
|
73
|
+
if (!context) {
|
|
74
|
+
return parentContext
|
|
75
|
+
}
|
|
76
|
+
return { ...parentContext, ...context }
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
return {
|
|
80
|
+
trace(message: string, context?: LogContext): void {
|
|
81
|
+
if (!shouldLog('trace', minLevel)) {
|
|
82
|
+
return
|
|
83
|
+
}
|
|
84
|
+
writeLine('trace', service, message, scrubFn, mergeContext(context))
|
|
85
|
+
},
|
|
86
|
+
|
|
87
|
+
debug(message: string, context?: LogContext): void {
|
|
88
|
+
if (!shouldLog('debug', minLevel)) {
|
|
89
|
+
return
|
|
90
|
+
}
|
|
91
|
+
writeLine('debug', service, message, scrubFn, mergeContext(context))
|
|
92
|
+
},
|
|
93
|
+
|
|
94
|
+
info(message: string, context?: LogContext): void {
|
|
95
|
+
if (!shouldLog('info', minLevel)) {
|
|
96
|
+
return
|
|
97
|
+
}
|
|
98
|
+
writeLine('info', service, message, scrubFn, mergeContext(context))
|
|
99
|
+
},
|
|
100
|
+
|
|
101
|
+
warn(message: string, context?: LogContext): void {
|
|
102
|
+
if (!shouldLog('warn', minLevel)) {
|
|
103
|
+
return
|
|
104
|
+
}
|
|
105
|
+
writeLine('warn', service, message, scrubFn, mergeContext(context))
|
|
106
|
+
},
|
|
107
|
+
|
|
108
|
+
error(message: string, error?: unknown, context?: LogContext): void {
|
|
109
|
+
if (!shouldLog('error', minLevel)) {
|
|
110
|
+
return
|
|
111
|
+
}
|
|
112
|
+
writeLine('error', service, message, scrubFn, mergeContext(context), error)
|
|
113
|
+
},
|
|
114
|
+
|
|
115
|
+
fatal(message: string, error?: unknown, context?: LogContext): void {
|
|
116
|
+
if (!shouldLog('fatal', minLevel)) {
|
|
117
|
+
return
|
|
118
|
+
}
|
|
119
|
+
writeLine('fatal', service, message, scrubFn, mergeContext(context), error)
|
|
120
|
+
},
|
|
121
|
+
|
|
122
|
+
child(context: LogContext): Logger {
|
|
123
|
+
const merged = parentContext ? { ...parentContext, ...context } : context
|
|
124
|
+
return createStructuredJsonLogger(service, merged, minLevel, scrubFn)
|
|
125
|
+
},
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
/**
|
|
130
|
+
* Structured JSON logger factory — use in production (ECS + Datadog)
|
|
131
|
+
*/
|
|
132
|
+
export function createStructuredJsonLoggerFactory(minLevel: LogLevel = 'info'): LoggerFactory {
|
|
133
|
+
const scrub = createScrubber()
|
|
134
|
+
return {
|
|
135
|
+
createLogger(service: string): Logger {
|
|
136
|
+
return createStructuredJsonLogger(service, undefined, minLevel, scrub)
|
|
137
|
+
},
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
export const structuredJsonLoggerFactory: LoggerFactory = createStructuredJsonLoggerFactory('info')
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* BufferedTransport
|
|
3
|
+
*
|
|
4
|
+
* Wraps any LogTransport with batching. Flushes when the buffer
|
|
5
|
+
* hits maxSize, the timer expires, or the page becomes hidden.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import type { LogEntry, LogTransport } from '../types'
|
|
9
|
+
|
|
10
|
+
export interface BufferedTransportOptions {
|
|
11
|
+
maxSize?: number
|
|
12
|
+
flushIntervalMs?: number
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
const DEFAULT_MAX_SIZE = 10
|
|
16
|
+
const DEFAULT_FLUSH_INTERVAL_MS = 5_000
|
|
17
|
+
|
|
18
|
+
export function createBufferedTransport(inner: LogTransport, options?: BufferedTransportOptions): LogTransport {
|
|
19
|
+
const maxSize = options?.maxSize ?? DEFAULT_MAX_SIZE
|
|
20
|
+
const flushIntervalMs = options?.flushIntervalMs ?? DEFAULT_FLUSH_INTERVAL_MS
|
|
21
|
+
|
|
22
|
+
let buffer: LogEntry[] = []
|
|
23
|
+
let timer: ReturnType<typeof setTimeout> | null = null
|
|
24
|
+
|
|
25
|
+
function flush(): void {
|
|
26
|
+
if (timer !== null) {
|
|
27
|
+
clearTimeout(timer)
|
|
28
|
+
timer = null
|
|
29
|
+
}
|
|
30
|
+
if (buffer.length === 0) {
|
|
31
|
+
return
|
|
32
|
+
}
|
|
33
|
+
const batch = buffer
|
|
34
|
+
buffer = []
|
|
35
|
+
inner.send(batch)
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
function scheduleFlush(): void {
|
|
39
|
+
if (timer !== null) {
|
|
40
|
+
return
|
|
41
|
+
}
|
|
42
|
+
timer = setTimeout(flush, flushIntervalMs)
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
// Flush on page hide (tab switch, navigation, close)
|
|
46
|
+
if (typeof document !== 'undefined') {
|
|
47
|
+
document.addEventListener('visibilitychange', () => {
|
|
48
|
+
if (document.visibilityState === 'hidden') {
|
|
49
|
+
flush()
|
|
50
|
+
}
|
|
51
|
+
})
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
return {
|
|
55
|
+
send(entries): void {
|
|
56
|
+
buffer.push(...entries)
|
|
57
|
+
if (buffer.length >= maxSize) {
|
|
58
|
+
flush()
|
|
59
|
+
} else {
|
|
60
|
+
scheduleFlush()
|
|
61
|
+
}
|
|
62
|
+
},
|
|
63
|
+
}
|
|
64
|
+
}
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* ConsoleLogTransport
|
|
3
|
+
*
|
|
4
|
+
* Writes log entries to the browser console. Dev only.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import type { LogEntry, LogTransport } from '../types'
|
|
8
|
+
|
|
9
|
+
function formatEntry(entry: LogEntry): string {
|
|
10
|
+
const service = entry.service ? `[${entry.service}] ` : ''
|
|
11
|
+
return `${entry.timestamp} ${entry.level.toUpperCase()} ${service}${entry.message}`
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export function createConsoleTransport(): LogTransport {
|
|
15
|
+
return {
|
|
16
|
+
send(entries): void {
|
|
17
|
+
for (const entry of entries) {
|
|
18
|
+
const formatted = formatEntry(entry)
|
|
19
|
+
const extra = entry.context ?? {}
|
|
20
|
+
|
|
21
|
+
switch (entry.level) {
|
|
22
|
+
case 'error':
|
|
23
|
+
case 'fatal':
|
|
24
|
+
// biome-ignore lint/suspicious/noConsole: Logger transport implementation
|
|
25
|
+
console.error(formatted, entry.error ?? '', extra)
|
|
26
|
+
break
|
|
27
|
+
case 'warn':
|
|
28
|
+
// biome-ignore lint/suspicious/noConsole: Logger transport implementation
|
|
29
|
+
console.warn(formatted, extra)
|
|
30
|
+
break
|
|
31
|
+
default:
|
|
32
|
+
// biome-ignore lint/suspicious/noConsole: Logger transport implementation
|
|
33
|
+
console.debug(formatted, extra)
|
|
34
|
+
break
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
},
|
|
38
|
+
}
|
|
39
|
+
}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* PendingTransport
|
|
3
|
+
*
|
|
4
|
+
* Buffers log entries until a real transport is connected.
|
|
5
|
+
* Used as the initial transport so client code can log before
|
|
6
|
+
* the tRPC client (or any real transport) is available.
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
import type { LogTransport } from '../types'
|
|
10
|
+
|
|
11
|
+
export interface PendingTransport extends LogTransport {
|
|
12
|
+
connect(transport: LogTransport): void
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
const DEFAULT_MAX_BUFFER = 100
|
|
16
|
+
|
|
17
|
+
export function createPendingTransport(maxBuffer: number = DEFAULT_MAX_BUFFER): PendingTransport {
|
|
18
|
+
let buffer: Parameters<LogTransport['send']>[0] = []
|
|
19
|
+
let delegate: LogTransport | null = null
|
|
20
|
+
|
|
21
|
+
return {
|
|
22
|
+
send(entries): void {
|
|
23
|
+
if (delegate) {
|
|
24
|
+
delegate.send(entries)
|
|
25
|
+
return
|
|
26
|
+
}
|
|
27
|
+
buffer.push(...entries)
|
|
28
|
+
// Drop oldest entries if buffer exceeds max
|
|
29
|
+
if (buffer.length > maxBuffer) {
|
|
30
|
+
buffer = buffer.slice(buffer.length - maxBuffer)
|
|
31
|
+
}
|
|
32
|
+
},
|
|
33
|
+
|
|
34
|
+
connect(transport): void {
|
|
35
|
+
delegate = transport
|
|
36
|
+
if (buffer.length > 0) {
|
|
37
|
+
transport.send(buffer)
|
|
38
|
+
buffer = []
|
|
39
|
+
}
|
|
40
|
+
},
|
|
41
|
+
}
|
|
42
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* TrpcLogTransport
|
|
3
|
+
*
|
|
4
|
+
* Sends log entries to the server via a tRPC mutation.
|
|
5
|
+
* Fire-and-forget — errors are silently swallowed.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import type { LogEntry, LogTransport } from '../types'
|
|
9
|
+
|
|
10
|
+
export interface LogIngestionClient {
|
|
11
|
+
logs: { ingest: { mutate: (input: { entries: LogEntry[] }) => Promise<unknown> } }
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export function createTrpcLogTransport(client: LogIngestionClient): LogTransport {
|
|
15
|
+
return {
|
|
16
|
+
send(entries): void {
|
|
17
|
+
client.logs.ingest.mutate({ entries }).catch(() => {})
|
|
18
|
+
},
|
|
19
|
+
}
|
|
20
|
+
}
|
package/src/types.ts
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Logger Interface
|
|
3
|
+
*
|
|
4
|
+
* Simple logger contract for services and repositories.
|
|
5
|
+
* Allows swapping between console logging (dev) and production logging.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Log context for structured logging
|
|
10
|
+
*/
|
|
11
|
+
export interface LogContext {
|
|
12
|
+
/** Service or component name */
|
|
13
|
+
service?: string
|
|
14
|
+
/** Operation being performed */
|
|
15
|
+
operation?: string
|
|
16
|
+
/** User ID if available */
|
|
17
|
+
userId?: string
|
|
18
|
+
/** Additional metadata */
|
|
19
|
+
[key: string]: unknown
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* Logger interface for dependency injection
|
|
24
|
+
*/
|
|
25
|
+
export interface Logger {
|
|
26
|
+
/** Trace level logging - finest granularity, usually off */
|
|
27
|
+
trace(message: string, context?: LogContext): void
|
|
28
|
+
|
|
29
|
+
/** Debug level logging - development only */
|
|
30
|
+
debug(message: string, context?: LogContext): void
|
|
31
|
+
|
|
32
|
+
/** Info level logging - general information */
|
|
33
|
+
info(message: string, context?: LogContext): void
|
|
34
|
+
|
|
35
|
+
/** Warning level logging - potential issues */
|
|
36
|
+
warn(message: string, context?: LogContext): void
|
|
37
|
+
|
|
38
|
+
/** Error level logging - errors and exceptions */
|
|
39
|
+
error(message: string, error?: unknown, context?: LogContext): void
|
|
40
|
+
|
|
41
|
+
/** Fatal level logging - unrecoverable errors, process should exit */
|
|
42
|
+
fatal(message: string, error?: unknown, context?: LogContext): void
|
|
43
|
+
|
|
44
|
+
/** Create a child logger with preset context fields merged into every log call */
|
|
45
|
+
child(context: LogContext): Logger
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
/**
|
|
49
|
+
* Log levels in order of severity.
|
|
50
|
+
* Loggers should only emit messages at or above their configured level.
|
|
51
|
+
*/
|
|
52
|
+
export type LogLevel = 'trace' | 'debug' | 'info' | 'warn' | 'error' | 'fatal'
|
|
53
|
+
|
|
54
|
+
export const LOG_LEVEL_ORDER: Record<LogLevel, number> = {
|
|
55
|
+
trace: 0,
|
|
56
|
+
debug: 1,
|
|
57
|
+
info: 2,
|
|
58
|
+
warn: 3,
|
|
59
|
+
error: 4,
|
|
60
|
+
fatal: 5,
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
/**
|
|
64
|
+
* Factory to create a child logger with preset context
|
|
65
|
+
*/
|
|
66
|
+
export interface LoggerFactory {
|
|
67
|
+
/** Create a logger with preset service context */
|
|
68
|
+
createLogger(service: string): Logger
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
/** A single log entry ready for transport */
|
|
72
|
+
export interface LogEntry {
|
|
73
|
+
timestamp: string
|
|
74
|
+
level: LogLevel
|
|
75
|
+
service?: string
|
|
76
|
+
message: string
|
|
77
|
+
context?: LogContext
|
|
78
|
+
error?: { message: string; stack?: string }
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
/** Moves log entries from point A to point B */
|
|
82
|
+
export interface LogTransport {
|
|
83
|
+
send(entries: LogEntry[]): void
|
|
84
|
+
}
|
package/src/wideEvent.ts
ADDED
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Wide Event
|
|
3
|
+
*
|
|
4
|
+
* Canonical log line pattern: accumulate fields throughout a request lifecycle,
|
|
5
|
+
* then emit one structured log line at completion.
|
|
6
|
+
*
|
|
7
|
+
* Goal: one log line per request with enough context to diagnose any error
|
|
8
|
+
* without cross-referencing other logs.
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
import type { Logger } from './types'
|
|
12
|
+
|
|
13
|
+
/** Where the error originated — for Datadog faceting/alerting */
|
|
14
|
+
export type ErrorSource = 'trpc_procedure' | 'loader' | 'upstream_5xx' | 'unhandled'
|
|
15
|
+
|
|
16
|
+
/** Recursive error shape that preserves cause chains */
|
|
17
|
+
export interface SerializedError {
|
|
18
|
+
class: string
|
|
19
|
+
message: string
|
|
20
|
+
stack?: string
|
|
21
|
+
code?: string
|
|
22
|
+
cause?: SerializedError
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* Recursively serialize an error, walking the .cause chain.
|
|
27
|
+
* Pure function — no side effects, no implicit deps.
|
|
28
|
+
*/
|
|
29
|
+
export function serializeErrorForWideEvent(err: unknown, depth = 0): SerializedError {
|
|
30
|
+
// Guard against infinite/absurd cause chains
|
|
31
|
+
if (depth > 5) {
|
|
32
|
+
return { class: 'TruncatedCauseChain', message: '(cause chain exceeded 5 levels)' }
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
if (err instanceof Error) {
|
|
36
|
+
return {
|
|
37
|
+
class: err.constructor.name,
|
|
38
|
+
message: err.message,
|
|
39
|
+
stack: err.stack,
|
|
40
|
+
...('code' in err && err.code ? { code: String(err.code) } : {}),
|
|
41
|
+
...(err.cause ? { cause: serializeErrorForWideEvent(err.cause, depth + 1) } : {}),
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
if (err !== undefined && err !== null) {
|
|
46
|
+
return { class: typeof err, message: String(err) }
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
return { class: 'Unknown', message: '(no error value)' }
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
export interface WideEvent {
|
|
53
|
+
/** Add fields that accumulate through the request lifecycle */
|
|
54
|
+
add(fields: Record<string, unknown>): void
|
|
55
|
+
/** Record a procedure span (name, duration, outcome) */
|
|
56
|
+
addProcedure(span: { name: string; duration_ms: number; outcome: 'success' | 'error'; error_code?: string }): void
|
|
57
|
+
/** Capture a fully serialized error with cause chain + source classification */
|
|
58
|
+
addError(error: unknown, source?: ErrorSource): void
|
|
59
|
+
/** Flush the canonical log line at request completion */
|
|
60
|
+
flush(logger: Logger, outcome: 'success' | 'error'): void
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
export interface WideEventFactory {
|
|
64
|
+
create(req: Request): WideEvent
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
/**
|
|
68
|
+
* Create a wide event for a request.
|
|
69
|
+
* Does NOT generate its own trace_id — relies on the logger's child context
|
|
70
|
+
* for trace correlation (single source of truth for trace_id).
|
|
71
|
+
*/
|
|
72
|
+
export function createWideEvent(req: Request): WideEvent {
|
|
73
|
+
const url = new URL(req.url)
|
|
74
|
+
const method = req.method
|
|
75
|
+
const path = url.pathname
|
|
76
|
+
const start_time = Date.now()
|
|
77
|
+
|
|
78
|
+
const fields: Record<string, unknown> = {}
|
|
79
|
+
const procedures: Array<{ name: string; duration_ms: number; outcome: 'success' | 'error'; error_code?: string }> = []
|
|
80
|
+
let capturedError: SerializedError | undefined
|
|
81
|
+
let errorSource: ErrorSource | undefined
|
|
82
|
+
|
|
83
|
+
return {
|
|
84
|
+
add(newFields: Record<string, unknown>): void {
|
|
85
|
+
Object.assign(fields, newFields)
|
|
86
|
+
},
|
|
87
|
+
|
|
88
|
+
addProcedure(span): void {
|
|
89
|
+
procedures.push(span)
|
|
90
|
+
},
|
|
91
|
+
|
|
92
|
+
addError(error: unknown, source?: ErrorSource): void {
|
|
93
|
+
capturedError = serializeErrorForWideEvent(error)
|
|
94
|
+
if (source) {
|
|
95
|
+
errorSource = source
|
|
96
|
+
}
|
|
97
|
+
},
|
|
98
|
+
|
|
99
|
+
flush(logger: Logger, outcome: 'success' | 'error'): void {
|
|
100
|
+
const duration_ms = Date.now() - start_time
|
|
101
|
+
const payload = {
|
|
102
|
+
method,
|
|
103
|
+
path,
|
|
104
|
+
outcome,
|
|
105
|
+
duration_ms,
|
|
106
|
+
...(procedures.length > 0 ? { procedures } : {}),
|
|
107
|
+
...(capturedError && outcome === 'error' ? { error: capturedError } : {}),
|
|
108
|
+
...(errorSource && outcome === 'error' ? { error_source: errorSource } : {}),
|
|
109
|
+
...fields,
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
// Flush at the right log level so Datadog can index/alert on severity.
|
|
113
|
+
// Error details are already in payload.error (pre-serialized) — don't pass
|
|
114
|
+
// as the error param to avoid double-serialization by the structured logger.
|
|
115
|
+
if (outcome === 'error') {
|
|
116
|
+
logger.error('request.complete', undefined, payload)
|
|
117
|
+
} else {
|
|
118
|
+
logger.info('request.complete', payload)
|
|
119
|
+
}
|
|
120
|
+
},
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
export const wideEventFactory: WideEventFactory = {
|
|
125
|
+
create(req: Request): WideEvent {
|
|
126
|
+
return createWideEvent(req)
|
|
127
|
+
},
|
|
128
|
+
}
|
package/tsconfig.json
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
{
|
|
2
|
+
"extends": "../../config/tsconfig/app.json",
|
|
3
|
+
"include": [
|
|
4
|
+
"src/**/*.ts",
|
|
5
|
+
"src/**/*.tsx",
|
|
6
|
+
"src/**/*.json",
|
|
7
|
+
"src/global.d.ts"
|
|
8
|
+
],
|
|
9
|
+
"exclude": [
|
|
10
|
+
"src/**/*.spec.ts",
|
|
11
|
+
"src/**/*.spec.tsx",
|
|
12
|
+
"src/**/*.test.ts",
|
|
13
|
+
"src/**/*.test.tsx"
|
|
14
|
+
],
|
|
15
|
+
"compilerOptions": {
|
|
16
|
+
"noEmit": false,
|
|
17
|
+
"emitDeclarationOnly": true,
|
|
18
|
+
"types": ["node"],
|
|
19
|
+
"paths": {}
|
|
20
|
+
},
|
|
21
|
+
"references": [
|
|
22
|
+
{
|
|
23
|
+
"path": "../privacy"
|
|
24
|
+
},
|
|
25
|
+
{
|
|
26
|
+
"path": "../eslint-config"
|
|
27
|
+
}
|
|
28
|
+
]
|
|
29
|
+
}
|