@ktuban/safe-json-loader 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +191 -0
- package/dist/cjs/index.js +21 -0
- package/dist/cjs/index.js.map +1 -0
- package/dist/cjs/logger.js +67 -0
- package/dist/cjs/logger.js.map +1 -0
- package/dist/cjs/safeJsonLoader.js +391 -0
- package/dist/cjs/safeJsonLoader.js.map +1 -0
- package/dist/cjs/types.js +4 -0
- package/dist/cjs/types.js.map +1 -0
- package/dist/esm/index.js +5 -0
- package/dist/esm/index.js.map +1 -0
- package/dist/esm/logger.js +58 -0
- package/dist/esm/logger.js.map +1 -0
- package/dist/esm/safeJsonLoader.js +380 -0
- package/dist/esm/safeJsonLoader.js.map +1 -0
- package/dist/esm/types.js +3 -0
- package/dist/esm/types.js.map +1 -0
- package/dist/types/index.d.ts +3 -0
- package/dist/types/logger.d.ts +12 -0
- package/dist/types/safeJsonLoader.d.ts +53 -0
- package/dist/types/types.d.ts +111 -0
- package/package.json +78 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 K Tuban
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,191 @@
|
|
|
1
|
+
Got it, K — let’s update your **README.md** so it reflects the new helpers (`parseSafeJsonString` and `sanitizeParsedJsonObject`) alongside the loader. This way, developers see clearly how to use the library in **all entry points**: files, URLs, raw strings, and already‑parsed objects (like Express `req.body`).
|
|
2
|
+
|
|
3
|
+
Here’s the polished, industry‑grade README update:
|
|
4
|
+
|
|
5
|
+
---
|
|
6
|
+
|
|
7
|
+
# **safe-json-loader**
|
|
8
|
+
|
|
9
|
+
A **security‑hardened JSON loader and sanitizer** for Node.js that protects against prototype pollution, excessive depth, oversized payloads, unsafe remote JSON, and directory‑based DoS attacks.
|
|
10
|
+
Supports:
|
|
11
|
+
|
|
12
|
+
- Local JSON files
|
|
13
|
+
- Local directories of JSON files
|
|
14
|
+
- Remote JSON URLs
|
|
15
|
+
- Remote JSON indexes (`[]` or `{ files: [] }`)
|
|
16
|
+
- Safe parsing of raw JSON strings
|
|
17
|
+
- Safe sanitization of already‑parsed JSON objects
|
|
18
|
+
- Safe serialization via `safe-json-stringify`
|
|
19
|
+
|
|
20
|
+
---
|
|
21
|
+
|
|
22
|
+
## **Features**
|
|
23
|
+
|
|
24
|
+
- 🔐 **Security‑first design**
|
|
25
|
+
- Strips `__proto__`, `constructor`, and `prototype`
|
|
26
|
+
- Rebuilds objects using `Object.create(null)`
|
|
27
|
+
- Enforces maximum JSON depth
|
|
28
|
+
- Enforces per‑file and total directory size limits
|
|
29
|
+
- Enforces maximum number of files
|
|
30
|
+
- Safe remote loading with timeout, content‑type validation, and concurrency limits
|
|
31
|
+
|
|
32
|
+
- 🧹 **Helpers for all entry points**
|
|
33
|
+
- `loadSafeJsonResources()` → load from file, directory, or URL
|
|
34
|
+
- `parseSafeJsonString()` → sanitize raw JSON strings
|
|
35
|
+
- `sanitizeParsedJsonObject()` → sanitize already‑parsed objects (e.g. Express `req.body`)
|
|
36
|
+
|
|
37
|
+
- 🧪 **TypeScript‑first**
|
|
38
|
+
- Full type definitions
|
|
39
|
+
- Strongly typed loader output
|
|
40
|
+
|
|
41
|
+
---
|
|
42
|
+
|
|
43
|
+
## **Installation**
|
|
44
|
+
|
|
45
|
+
```bash
|
|
46
|
+
npm install safe-json-loader safe-json-stringify
|
|
47
|
+
```
|
|
48
|
+
|
|
49
|
+
Node.js **18+** required.
|
|
50
|
+
|
|
51
|
+
---
|
|
52
|
+
|
|
53
|
+
## **Usage**
|
|
54
|
+
|
|
55
|
+
### **1. Load from file, directory, or URL**
|
|
56
|
+
|
|
57
|
+
```ts
|
|
58
|
+
import { loadSafeJsonResources } from "safe-json-loader";
|
|
59
|
+
|
|
60
|
+
const files = await loadSafeJsonResources("./configs");
|
|
61
|
+
|
|
62
|
+
for (const file of files) {
|
|
63
|
+
console.log(file.name);
|
|
64
|
+
console.log(file.data);
|
|
65
|
+
}
|
|
66
|
+
```
|
|
67
|
+
|
|
68
|
+
---
|
|
69
|
+
|
|
70
|
+
### **2. Parse and sanitize a raw JSON string**
|
|
71
|
+
|
|
72
|
+
```ts
|
|
73
|
+
import { parseSafeJsonString } from "safe-json-loader";
|
|
74
|
+
|
|
75
|
+
const safeObj = parseSafeJsonString('{"user":{"__proto__":{"polluted":true}}}', {
|
|
76
|
+
maxJsonDepth: 30,
|
|
77
|
+
});
|
|
78
|
+
|
|
79
|
+
console.log(safeObj);
|
|
80
|
+
// => { user: {} } // pollution stripped
|
|
81
|
+
```
|
|
82
|
+
|
|
83
|
+
---
|
|
84
|
+
|
|
85
|
+
### **3. Sanitize an already‑parsed JSON object (Express example)**
|
|
86
|
+
|
|
87
|
+
```ts
|
|
88
|
+
import express from "express";
|
|
89
|
+
import { sanitizeParsedJsonObject } from "safe-json-loader";
|
|
90
|
+
|
|
91
|
+
const app = express();
|
|
92
|
+
app.use(express.json());
|
|
93
|
+
|
|
94
|
+
app.post("/api/data", (req, res) => {
|
|
95
|
+
try {
|
|
96
|
+
const safeBody = sanitizeParsedJsonObject(req.body, { maxJsonDepth: 30 });
|
|
97
|
+
res.json({ ok: true, sanitized: safeBody });
|
|
98
|
+
} catch (err: any) {
|
|
99
|
+
res.status(400).json({ error: err.message, code: err.code });
|
|
100
|
+
}
|
|
101
|
+
});
|
|
102
|
+
```
|
|
103
|
+
|
|
104
|
+
---
|
|
105
|
+
|
|
106
|
+
### **4. Safe serialization**
|
|
107
|
+
|
|
108
|
+
```ts
|
|
109
|
+
import safeStringify from "safe-json-stringify";
|
|
110
|
+
|
|
111
|
+
const json = safeStringify(file.data);
|
|
112
|
+
```
|
|
113
|
+
|
|
114
|
+
---
|
|
115
|
+
|
|
116
|
+
## **Options**
|
|
117
|
+
|
|
118
|
+
```ts
|
|
119
|
+
interface SafeJsonLoaderOptions {
|
|
120
|
+
maxFiles?: number; // default 100
|
|
121
|
+
maxTotalBytes?: number; // default 10 MB
|
|
122
|
+
maxFileBytes?: number; // default 2 MB
|
|
123
|
+
httpTimeoutMs?: number; // default 8000
|
|
124
|
+
maxConcurrency?: number; // default 5
|
|
125
|
+
looseJsonContentType?: boolean; // default true
|
|
126
|
+
maxJsonDepth?: number; // default 50
|
|
127
|
+
logger?: JsonLoaderLogger;
|
|
128
|
+
onFileLoaded?: (file) => void;
|
|
129
|
+
onFileSkipped?: (info) => void;
|
|
130
|
+
}
|
|
131
|
+
```
|
|
132
|
+
|
|
133
|
+
---
|
|
134
|
+
|
|
135
|
+
## **Returned Structure**
|
|
136
|
+
|
|
137
|
+
Each loaded file has the shape:
|
|
138
|
+
|
|
139
|
+
```ts
|
|
140
|
+
interface LoadedJsonFile {
|
|
141
|
+
name: string; // file name or URL basename
|
|
142
|
+
data: JsonValue; // sanitized JSON
|
|
143
|
+
__source: string; // absolute path or URL
|
|
144
|
+
}
|
|
145
|
+
```
|
|
146
|
+
|
|
147
|
+
---
|
|
148
|
+
|
|
149
|
+
## **Security Guarantees**
|
|
150
|
+
|
|
151
|
+
- ✔ Prototype pollution prevented
|
|
152
|
+
- ✔ No inherited prototypes
|
|
153
|
+
- ✔ Depth‑limited
|
|
154
|
+
- ✔ Size‑limited
|
|
155
|
+
- ✔ Safe remote fetch
|
|
156
|
+
- ✔ Concurrency‑limited
|
|
157
|
+
- ✔ Sanitized before user code touches it
|
|
158
|
+
|
|
159
|
+
---
|
|
160
|
+
|
|
161
|
+
## **Error Handling**
|
|
162
|
+
|
|
163
|
+
All errors are thrown as:
|
|
164
|
+
|
|
165
|
+
```ts
|
|
166
|
+
class JsonLoaderError extends Error {
|
|
167
|
+
code: string;
|
|
168
|
+
}
|
|
169
|
+
```
|
|
170
|
+
|
|
171
|
+
Example:
|
|
172
|
+
|
|
173
|
+
```ts
|
|
174
|
+
try {
|
|
175
|
+
await loadSafeJsonResources("./bad.json");
|
|
176
|
+
} catch (err) {
|
|
177
|
+
console.error(err.code, err.message);
|
|
178
|
+
}
|
|
179
|
+
```
|
|
180
|
+
|
|
181
|
+
---
|
|
182
|
+
|
|
183
|
+
## **License**
|
|
184
|
+
|
|
185
|
+
MIT
|
|
186
|
+
|
|
187
|
+
---
|
|
188
|
+
|
|
189
|
+
👉 With this update, your README now documents **all three entry points**: loader, string parser, and object sanitizer.
|
|
190
|
+
|
|
191
|
+
always sanitize `req.body` before schema validation, always set `maxJsonDepth` in production)
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
|
+
};
|
|
16
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
+
// Public types
|
|
18
|
+
__exportStar(require("./types.js"), exports);
|
|
19
|
+
__exportStar(require("./logger.js"), exports);
|
|
20
|
+
__exportStar(require("./safeJsonLoader.js"), exports);
|
|
21
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;AACA,eAAe;AACf,6CAA2B;AAC3B,8CAA4B;AAC5B,sDAAoC"}
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.ConsoleJsonLoaderLogger = void 0;
|
|
7
|
+
exports.mergeOptions = mergeOptions;
|
|
8
|
+
exports.logWith = logWith;
|
|
9
|
+
// logger.ts
|
|
10
|
+
const safe_json_stringify_1 = __importDefault(require("safe-json-stringify"));
|
|
11
|
+
const NOOP_LOGGER = {
|
|
12
|
+
log: () => {
|
|
13
|
+
// no‑op
|
|
14
|
+
},
|
|
15
|
+
};
|
|
16
|
+
const DEFAULT_OPTIONS = {
|
|
17
|
+
maxFiles: 100,
|
|
18
|
+
maxTotalBytes: 10 * 1024 * 1024, // 10 MB
|
|
19
|
+
maxFileBytes: 2 * 1024 * 1024, // 2 MB
|
|
20
|
+
httpTimeoutMs: 8000,
|
|
21
|
+
maxConcurrency: 5,
|
|
22
|
+
looseJsonContentType: true,
|
|
23
|
+
maxJsonDepth: 50,
|
|
24
|
+
logger: NOOP_LOGGER,
|
|
25
|
+
onFileLoaded: () => { },
|
|
26
|
+
onFileSkipped: () => { },
|
|
27
|
+
};
|
|
28
|
+
function mergeOptions(opts) {
|
|
29
|
+
const logger = opts?.logger ?? DEFAULT_OPTIONS.logger;
|
|
30
|
+
return {
|
|
31
|
+
...DEFAULT_OPTIONS,
|
|
32
|
+
...opts,
|
|
33
|
+
logger,
|
|
34
|
+
onFileLoaded: opts?.onFileLoaded ?? DEFAULT_OPTIONS.onFileLoaded,
|
|
35
|
+
onFileSkipped: opts?.onFileSkipped ?? DEFAULT_OPTIONS.onFileSkipped,
|
|
36
|
+
};
|
|
37
|
+
}
|
|
38
|
+
/**
|
|
39
|
+
* Simple adapter to allow direct use of console as logger, if desired.
|
|
40
|
+
*/
|
|
41
|
+
class ConsoleJsonLoaderLogger {
|
|
42
|
+
log(level, message, meta) {
|
|
43
|
+
const payload = meta ? `${message} | ${(0, safe_json_stringify_1.default)(meta)}` : message;
|
|
44
|
+
switch (level) {
|
|
45
|
+
case "debug":
|
|
46
|
+
console.debug(payload);
|
|
47
|
+
break;
|
|
48
|
+
case "info":
|
|
49
|
+
console.info(payload);
|
|
50
|
+
break;
|
|
51
|
+
case "warn":
|
|
52
|
+
console.warn(payload);
|
|
53
|
+
break;
|
|
54
|
+
case "error":
|
|
55
|
+
console.error(payload);
|
|
56
|
+
break;
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
exports.ConsoleJsonLoaderLogger = ConsoleJsonLoaderLogger;
|
|
61
|
+
/**
|
|
62
|
+
* Helper to log with a specific log level using resolved options.
|
|
63
|
+
*/
|
|
64
|
+
function logWith(options, level, message, meta) {
|
|
65
|
+
options.logger.log(level, message, meta);
|
|
66
|
+
}
|
|
67
|
+
//# sourceMappingURL=logger.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"logger.js","sourceRoot":"","sources":["../../src/logger.ts"],"names":[],"mappings":";;;;;;AA4BA,oCAYC;AA4BD,0BAOC;AA3ED,YAAY;AACZ,8EAA4C;AAQ5C,MAAM,WAAW,GAAqB;IACpC,GAAG,EAAE,GAAG,EAAE;QACR,QAAQ;IACV,CAAC;CACF,CAAC;AAEF,MAAM,eAAe,GAAkC;IACrD,QAAQ,EAAE,GAAG;IACb,aAAa,EAAE,EAAE,GAAG,IAAI,GAAG,IAAI,EAAE,QAAQ;IACzC,YAAY,EAAE,CAAC,GAAG,IAAI,GAAG,IAAI,EAAI,OAAO;IACxC,aAAa,EAAE,IAAI;IACnB,cAAc,EAAE,CAAC;IACjB,oBAAoB,EAAE,IAAI;IAC1B,YAAY,EAAE,EAAE;IAChB,MAAM,EAAE,WAAW;IACnB,YAAY,EAAE,GAAG,EAAE,GAAE,CAAC;IACtB,aAAa,EAAE,GAAG,EAAE,GAAE,CAAC;CACxB,CAAC;AAEF,SAAgB,YAAY,CAC1B,IAA4B;IAE5B,MAAM,MAAM,GAAG,IAAI,EAAE,MAAM,IAAI,eAAe,CAAC,MAAM,CAAC;IAEtD,OAAO;QACL,GAAG,eAAe;QAClB,GAAG,IAAI;QACP,MAAM;QACN,YAAY,EAAE,IAAI,EAAE,YAAY,IAAI,eAAe,CAAC,YAAY;QAChE,aAAa,EAAE,IAAI,EAAE,aAAa,IAAI,eAAe,CAAC,aAAa;KACpE,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAa,uBAAuB;IAClC,GAAG,CAAC,KAAe,EAAE,OAAe,EAAE,IAA8B;QAClE,MAAM,OAAO,GAAG,IAAI,CAAC,CAAC,CAAC,GAAG,OAAO,MAAM,IAAA,6BAAS,EAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC;QACnE,QAAQ,KAAK,EAAE,CAAC;YACd,KAAK,OAAO;gBACV,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;gBACvB,MAAM;YACR,KAAK,MAAM;gBACT,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;gBACtB,MAAM;YACR,KAAK,MAAM;gBACT,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;gBACtB,MAAM;YACR,KAAK,OAAO;gBACV,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;gBACvB,MAAM;QACV,CAAC;IACH,CAAC;CACF;AAlBD,0DAkBC;AAED;;GAEG;AACH,SAAgB,OAAO,CACrB,OAAsC,EACtC,KAAe,EACf,OAAe,EACf,IAA8B;IAE9B,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE,OAAO,EAAE,IAAI,CAAC,CAAC;AAC3C,CAAC"}
|
|
@@ -0,0 +1,391 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// safeJsonLoader.ts
|
|
3
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
4
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
5
|
+
};
|
|
6
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
+
exports.JsonLoaderError = void 0;
|
|
8
|
+
exports.sanitizePrototypePollution = sanitizePrototypePollution;
|
|
9
|
+
exports.loadSafeJsonResources = loadSafeJsonResources;
|
|
10
|
+
exports.parseSafeJsonString = parseSafeJsonString;
|
|
11
|
+
exports.sanitizeParsedJsonObject = sanitizeParsedJsonObject;
|
|
12
|
+
const promises_1 = __importDefault(require("fs/promises"));
|
|
13
|
+
const path_1 = __importDefault(require("path"));
|
|
14
|
+
const node_fetch_1 = __importDefault(require("node-fetch"));
|
|
15
|
+
const logger_js_1 = require("./logger.js");
|
|
16
|
+
/* -------------------------------------------------------------------------- */
|
|
17
|
+
/* ERROR TYPE */
|
|
18
|
+
/* -------------------------------------------------------------------------- */
|
|
19
|
+
class JsonLoaderError extends Error {
|
|
20
|
+
code;
|
|
21
|
+
constructor(message, code) {
|
|
22
|
+
super(message);
|
|
23
|
+
this.name = "JsonLoaderError";
|
|
24
|
+
this.code = code;
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
exports.JsonLoaderError = JsonLoaderError;
|
|
28
|
+
/* -------------------------------------------------------------------------- */
|
|
29
|
+
/* HELPERS */
|
|
30
|
+
/* -------------------------------------------------------------------------- */
|
|
31
|
+
function isHttpUrl(str) {
|
|
32
|
+
return /^https?:\/\//i.test(str);
|
|
33
|
+
}
|
|
34
|
+
function isJsonFile(file) {
|
|
35
|
+
return path_1.default.extname(file).toLowerCase() === ".json";
|
|
36
|
+
}
|
|
37
|
+
function ensureStringInput(input) {
|
|
38
|
+
if (typeof input === "string")
|
|
39
|
+
return input;
|
|
40
|
+
if (input.href)
|
|
41
|
+
return String(input);
|
|
42
|
+
return String(input);
|
|
43
|
+
}
|
|
44
|
+
// Minimal concurrency limiter to avoid external deps.
|
|
45
|
+
function createLimiter(maxConcurrency) {
|
|
46
|
+
let active = 0;
|
|
47
|
+
const queue = [];
|
|
48
|
+
const next = () => {
|
|
49
|
+
active--;
|
|
50
|
+
if (queue.length > 0) {
|
|
51
|
+
const fn = queue.shift();
|
|
52
|
+
fn();
|
|
53
|
+
}
|
|
54
|
+
};
|
|
55
|
+
const run = (task) => new Promise((resolve, reject) => {
|
|
56
|
+
const execute = () => {
|
|
57
|
+
active++;
|
|
58
|
+
task()
|
|
59
|
+
.then((result) => {
|
|
60
|
+
next();
|
|
61
|
+
resolve(result);
|
|
62
|
+
})
|
|
63
|
+
.catch((err) => {
|
|
64
|
+
next();
|
|
65
|
+
reject(err);
|
|
66
|
+
});
|
|
67
|
+
};
|
|
68
|
+
if (active < maxConcurrency) {
|
|
69
|
+
execute();
|
|
70
|
+
}
|
|
71
|
+
else {
|
|
72
|
+
queue.push(execute);
|
|
73
|
+
}
|
|
74
|
+
});
|
|
75
|
+
return run;
|
|
76
|
+
}
|
|
77
|
+
/* -------------------------------------------------------------------------- */
|
|
78
|
+
/* SECURITY: PROTOTYPE POLLUTION SANITIZER */
|
|
79
|
+
/* -------------------------------------------------------------------------- */
|
|
80
|
+
const POLLUTION_KEYS = new Set(["__proto__", "constructor", "prototype"]);
|
|
81
|
+
/**
|
|
82
|
+
* Deeply clones and strips dangerous keys to mitigate prototype pollution.
|
|
83
|
+
* Uses Object.create(null) to avoid inheriting from Object.prototype.
|
|
84
|
+
*/
|
|
85
|
+
function sanitizePrototypePollution(input, options) {
|
|
86
|
+
const maxDepth = options?.maxDepth ?? 1_000; // extremely high default, actual limit enforced separately
|
|
87
|
+
return deepSanitize(input, 0, maxDepth);
|
|
88
|
+
}
|
|
89
|
+
function deepSanitize(value, depth, maxDepth) {
|
|
90
|
+
if (depth > maxDepth) {
|
|
91
|
+
// Hard stop; caller should usually enforce a much smaller depth via options.maxJsonDepth.
|
|
92
|
+
throw new JsonLoaderError(`Maximum JSON depth exceeded during sanitation (depth > ${maxDepth}).`, "JSON_DEPTH_SANITATION_LIMIT");
|
|
93
|
+
}
|
|
94
|
+
if (Array.isArray(value)) {
|
|
95
|
+
const out = new Array(value.length);
|
|
96
|
+
for (let i = 0; i < value.length; i++) {
|
|
97
|
+
out[i] = deepSanitize(value[i], depth + 1, maxDepth);
|
|
98
|
+
}
|
|
99
|
+
return out;
|
|
100
|
+
}
|
|
101
|
+
if (value && typeof value === "object") {
|
|
102
|
+
const out = Object.create(null);
|
|
103
|
+
for (const key of Object.keys(value)) {
|
|
104
|
+
if (POLLUTION_KEYS.has(key)) {
|
|
105
|
+
continue;
|
|
106
|
+
}
|
|
107
|
+
out[key] = deepSanitize(value[key], depth + 1, maxDepth);
|
|
108
|
+
}
|
|
109
|
+
return out;
|
|
110
|
+
}
|
|
111
|
+
return value;
|
|
112
|
+
}
|
|
113
|
+
/**
|
|
114
|
+
* Computes maximum depth of a JSON value.
|
|
115
|
+
*/
|
|
116
|
+
function calculateDepth(value, currentDepth = 0) {
|
|
117
|
+
if (value === null)
|
|
118
|
+
return currentDepth;
|
|
119
|
+
if (Array.isArray(value)) {
|
|
120
|
+
let max = currentDepth;
|
|
121
|
+
for (const item of value) {
|
|
122
|
+
const depth = calculateDepth(item, currentDepth + 1);
|
|
123
|
+
if (depth > max)
|
|
124
|
+
max = depth;
|
|
125
|
+
}
|
|
126
|
+
return max;
|
|
127
|
+
}
|
|
128
|
+
if (typeof value === "object") {
|
|
129
|
+
let max = currentDepth;
|
|
130
|
+
for (const key of Object.keys(value)) {
|
|
131
|
+
const depth = calculateDepth(value[key], currentDepth + 1);
|
|
132
|
+
if (depth > max)
|
|
133
|
+
max = depth;
|
|
134
|
+
}
|
|
135
|
+
return max;
|
|
136
|
+
}
|
|
137
|
+
return currentDepth;
|
|
138
|
+
}
|
|
139
|
+
/* -------------------------------------------------------------------------- */
|
|
140
|
+
/* REMOTE JSON LOADING */
|
|
141
|
+
/* -------------------------------------------------------------------------- */
|
|
142
|
+
async function fetchWithTimeout(url, opts) {
|
|
143
|
+
const controller = new AbortController();
|
|
144
|
+
const timeoutId = setTimeout(() => controller.abort(), opts.httpTimeoutMs);
|
|
145
|
+
try {
|
|
146
|
+
const res = await (0, node_fetch_1.default)(url, { method: "GET", signal: controller.signal });
|
|
147
|
+
clearTimeout(timeoutId);
|
|
148
|
+
return res;
|
|
149
|
+
}
|
|
150
|
+
catch (err) {
|
|
151
|
+
clearTimeout(timeoutId);
|
|
152
|
+
throw new JsonLoaderError(`Failed to fetch remote JSON at ${url}: ${err?.message ?? String(err)}`, "REMOTE_FETCH_ERROR");
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
function assertJsonContentType(url, res, opts) {
|
|
156
|
+
const ct = res.headers.get("content-type") ?? "";
|
|
157
|
+
const lc = ct.toLowerCase();
|
|
158
|
+
const isJson = opts.looseJsonContentType
|
|
159
|
+
? lc.includes("json")
|
|
160
|
+
: lc.startsWith("application/json");
|
|
161
|
+
if (!isJson) {
|
|
162
|
+
throw new JsonLoaderError(`Remote URL does not advertise JSON content-type at ${url}: '${ct}'`, "REMOTE_CONTENT_TYPE_ERROR");
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
async function loadRemoteJson(url, opts) {
|
|
166
|
+
const res = await fetchWithTimeout(url, opts);
|
|
167
|
+
if (!res.ok) {
|
|
168
|
+
throw new JsonLoaderError(`Remote fetch failed at ${url}: ${res.status} ${res.statusText}`, "REMOTE_FETCH_STATUS_ERROR");
|
|
169
|
+
}
|
|
170
|
+
assertJsonContentType(url, res, opts);
|
|
171
|
+
const raw = (await res.json());
|
|
172
|
+
const sanitized = sanitizePrototypePollution(raw, {
|
|
173
|
+
maxDepth: opts.maxJsonDepth,
|
|
174
|
+
});
|
|
175
|
+
return sanitized;
|
|
176
|
+
}
|
|
177
|
+
async function loadRemoteIndex(indexUrl, indexJson, opts, limitRun) {
|
|
178
|
+
let fileList;
|
|
179
|
+
if (Array.isArray(indexJson)) {
|
|
180
|
+
fileList = indexJson;
|
|
181
|
+
}
|
|
182
|
+
else if (indexJson &&
|
|
183
|
+
typeof indexJson === "object" &&
|
|
184
|
+
Array.isArray(indexJson.files)) {
|
|
185
|
+
fileList = indexJson.files;
|
|
186
|
+
}
|
|
187
|
+
if (!Array.isArray(fileList)) {
|
|
188
|
+
throw new JsonLoaderError(`Remote directory index ${indexUrl} must return an array or { files: [] }.`, "REMOTE_INDEX_FORMAT_ERROR");
|
|
189
|
+
}
|
|
190
|
+
const urls = fileList.filter((item) => typeof item === "string");
|
|
191
|
+
if (urls.length === 0) {
|
|
192
|
+
return [];
|
|
193
|
+
}
|
|
194
|
+
if (urls.length > opts.maxFiles) {
|
|
195
|
+
throw new JsonLoaderError(`Remote directory index at ${indexUrl} lists ${urls.length} files, exceeding maxFiles=${opts.maxFiles}.`, "REMOTE_INDEX_LIMIT_ERROR");
|
|
196
|
+
}
|
|
197
|
+
(0, logger_js_1.logWith)(opts, "debug", "Loading remote index", {
|
|
198
|
+
indexUrl,
|
|
199
|
+
fileCount: urls.length,
|
|
200
|
+
});
|
|
201
|
+
const tasks = urls.map((fileUrl) => limitRun(async () => {
|
|
202
|
+
if (!isHttpUrl(fileUrl)) {
|
|
203
|
+
throw new JsonLoaderError(`Invalid remote file URL in index at ${indexUrl}: ${fileUrl}`, "REMOTE_INDEX_URL_ERROR");
|
|
204
|
+
}
|
|
205
|
+
const data = await loadRemoteJson(fileUrl, opts);
|
|
206
|
+
const file = Object.freeze({
|
|
207
|
+
name: path_1.default.basename(new URL(fileUrl).pathname),
|
|
208
|
+
data,
|
|
209
|
+
__source: fileUrl,
|
|
210
|
+
});
|
|
211
|
+
opts.onFileLoaded(file);
|
|
212
|
+
(0, logger_js_1.logWith)(opts, "info", "Remote JSON file loaded", {
|
|
213
|
+
url: fileUrl,
|
|
214
|
+
});
|
|
215
|
+
return file;
|
|
216
|
+
}));
|
|
217
|
+
return Promise.all(tasks);
|
|
218
|
+
}
|
|
219
|
+
/* -------------------------------------------------------------------------- */
|
|
220
|
+
/* LOCAL FILE JSON LOADER */
|
|
221
|
+
/* -------------------------------------------------------------------------- */
|
|
222
|
+
async function loadLocalJsonFile(filePath, opts) {
|
|
223
|
+
const stats = await promises_1.default.stat(filePath);
|
|
224
|
+
if (stats.size > opts.maxFileBytes) {
|
|
225
|
+
opts.onFileSkipped({
|
|
226
|
+
source: filePath,
|
|
227
|
+
reason: `File exceeds maxFileBytes=${opts.maxFileBytes}`,
|
|
228
|
+
});
|
|
229
|
+
(0, logger_js_1.logWith)(opts, "warn", "Skipping local file due to size limit", {
|
|
230
|
+
filePath,
|
|
231
|
+
size: stats.size,
|
|
232
|
+
maxFileBytes: opts.maxFileBytes,
|
|
233
|
+
});
|
|
234
|
+
throw new JsonLoaderError(`Local file too large: ${filePath} (${stats.size} bytes > ${opts.maxFileBytes}).`, "LOCAL_FILE_SIZE_ERROR");
|
|
235
|
+
}
|
|
236
|
+
const content = await promises_1.default.readFile(filePath, "utf8");
|
|
237
|
+
let raw;
|
|
238
|
+
try {
|
|
239
|
+
raw = JSON.parse(content);
|
|
240
|
+
}
|
|
241
|
+
catch (err) {
|
|
242
|
+
throw new JsonLoaderError(`Invalid JSON in file ${filePath}: ${err?.message ?? String(err)}`, "LOCAL_JSON_PARSE_ERROR");
|
|
243
|
+
}
|
|
244
|
+
const sanitized = sanitizePrototypePollution(raw, {
|
|
245
|
+
maxDepth: opts.maxJsonDepth,
|
|
246
|
+
});
|
|
247
|
+
const file = Object.freeze({
|
|
248
|
+
name: path_1.default.basename(filePath),
|
|
249
|
+
data: sanitized,
|
|
250
|
+
__source: filePath,
|
|
251
|
+
});
|
|
252
|
+
opts.onFileLoaded(file);
|
|
253
|
+
(0, logger_js_1.logWith)(opts, "info", "Local JSON file loaded", { filePath });
|
|
254
|
+
return file;
|
|
255
|
+
}
|
|
256
|
+
async function loadLocalDirectory(dirPath, opts, limitRun) {
|
|
257
|
+
const files = await promises_1.default.readdir(dirPath);
|
|
258
|
+
const jsonFiles = files.filter(isJsonFile);
|
|
259
|
+
if (jsonFiles.length === 0) {
|
|
260
|
+
(0, logger_js_1.logWith)(opts, "debug", "No JSON files found in directory", { dirPath });
|
|
261
|
+
return [];
|
|
262
|
+
}
|
|
263
|
+
if (jsonFiles.length > opts.maxFiles) {
|
|
264
|
+
throw new JsonLoaderError(`Directory ${dirPath} contains ${jsonFiles.length} JSON files, exceeding maxFiles=${opts.maxFiles}.`, "LOCAL_DIR_LIMIT_ERROR");
|
|
265
|
+
}
|
|
266
|
+
// Enforce total size limit
|
|
267
|
+
let totalSize = 0;
|
|
268
|
+
const filePaths = [];
|
|
269
|
+
for (const file of jsonFiles) {
|
|
270
|
+
const full = path_1.default.join(dirPath, file);
|
|
271
|
+
const stats = await promises_1.default.stat(full);
|
|
272
|
+
totalSize += stats.size;
|
|
273
|
+
if (totalSize > opts.maxTotalBytes) {
|
|
274
|
+
throw new JsonLoaderError(`Total size of JSON files in ${dirPath} exceeds maxTotalBytes=${opts.maxTotalBytes}.`, "LOCAL_DIR_TOTAL_SIZE_ERROR");
|
|
275
|
+
}
|
|
276
|
+
filePaths.push(full);
|
|
277
|
+
}
|
|
278
|
+
(0, logger_js_1.logWith)(opts, "debug", "Loading local directory", {
|
|
279
|
+
dirPath,
|
|
280
|
+
fileCount: filePaths.length,
|
|
281
|
+
totalSize,
|
|
282
|
+
});
|
|
283
|
+
const tasks = filePaths.map((filePath) => limitRun(() => loadLocalJsonFile(filePath, opts)));
|
|
284
|
+
return Promise.all(tasks);
|
|
285
|
+
}
|
|
286
|
+
/* -------------------------------------------------------------------------- */
|
|
287
|
+
/* PUBLIC API: SAFE JSON LOADER */
|
|
288
|
+
/* -------------------------------------------------------------------------- */
|
|
289
|
+
/**
|
|
290
|
+
* Load one or more JSON resources from:
|
|
291
|
+
* - Local file (.json)
|
|
292
|
+
* - Local directory (all .json files)
|
|
293
|
+
* - Remote URL returning JSON (object/array)
|
|
294
|
+
* - Remote URL acting as an index of JSON URLs (array or { files: [] })
|
|
295
|
+
*
|
|
296
|
+
* Security features:
|
|
297
|
+
* - Prototype‑pollution‑safe deep clone (strips __proto__, constructor, prototype)
|
|
298
|
+
* - Max depth for parsed JSON structures
|
|
299
|
+
* - Max file size and total directory size
|
|
300
|
+
* - Concurrency limit for I/O (local and remote)
|
|
301
|
+
* - HTTP timeout and content‑type checks
|
|
302
|
+
*
|
|
303
|
+
* This function does not enforce any domain/schema validation — callers
|
|
304
|
+
* should layer their own validation on top of the loaded `data`.
|
|
305
|
+
*/
|
|
306
|
+
async function loadSafeJsonResources(input, options) {
|
|
307
|
+
const inputStr = ensureStringInput(input);
|
|
308
|
+
if (!inputStr) {
|
|
309
|
+
throw new JsonLoaderError("Input path/URL must be a non-empty string.", "INPUT_VALIDATION_ERROR");
|
|
310
|
+
}
|
|
311
|
+
const opts = (0, logger_js_1.mergeOptions)(options);
|
|
312
|
+
const limitRun = createLimiter(opts.maxConcurrency);
|
|
313
|
+
/* ---------------------------------- REMOTE --------------------------------- */
|
|
314
|
+
if (isHttpUrl(inputStr)) {
|
|
315
|
+
const json = await loadRemoteJson(inputStr, opts);
|
|
316
|
+
if (Array.isArray(json) ||
|
|
317
|
+
(json &&
|
|
318
|
+
typeof json === "object" &&
|
|
319
|
+
Array.isArray(json.files))) {
|
|
320
|
+
return loadRemoteIndex(inputStr, json, opts, limitRun);
|
|
321
|
+
}
|
|
322
|
+
const file = Object.freeze({
|
|
323
|
+
name: path_1.default.basename(new URL(inputStr).pathname),
|
|
324
|
+
data: json,
|
|
325
|
+
__source: inputStr,
|
|
326
|
+
});
|
|
327
|
+
opts.onFileLoaded(file);
|
|
328
|
+
(0, logger_js_1.logWith)(opts, "info", "Remote JSON file loaded", { url: inputStr });
|
|
329
|
+
return [file];
|
|
330
|
+
}
|
|
331
|
+
/* ----------------------------------- LOCAL ---------------------------------- */
|
|
332
|
+
const resolved = path_1.default.resolve(inputStr);
|
|
333
|
+
let stats;
|
|
334
|
+
try {
|
|
335
|
+
stats = await promises_1.default.stat(resolved);
|
|
336
|
+
}
|
|
337
|
+
catch {
|
|
338
|
+
throw new JsonLoaderError(`Local path does not exist: ${resolved}`, "LOCAL_PATH_NOT_FOUND");
|
|
339
|
+
}
|
|
340
|
+
if (stats.isFile()) {
|
|
341
|
+
if (!isJsonFile(resolved)) {
|
|
342
|
+
throw new JsonLoaderError(`File is not a .json file: ${resolved}`, "LOCAL_FILE_EXTENSION_ERROR");
|
|
343
|
+
}
|
|
344
|
+
return [await loadLocalJsonFile(resolved, opts)];
|
|
345
|
+
}
|
|
346
|
+
if (stats.isDirectory()) {
|
|
347
|
+
return loadLocalDirectory(resolved, opts, limitRun);
|
|
348
|
+
}
|
|
349
|
+
throw new JsonLoaderError(`Unsupported path type: ${resolved}`, "LOCAL_PATH_TYPE_ERROR");
|
|
350
|
+
}
|
|
351
|
+
/**
|
|
352
|
+
* Safely parse and sanitize a JSON string.
|
|
353
|
+
*
|
|
354
|
+
* - Parses JSON with error handling
|
|
355
|
+
* - Strips prototype pollution keys (__proto__, constructor, prototype)
|
|
356
|
+
* - Enforces max depth
|
|
357
|
+
*
|
|
358
|
+
* @param input Raw JSON string
|
|
359
|
+
* @param opts Loader options (maxJsonDepth, etc.)
|
|
360
|
+
* @returns Safe, sanitized JSON object
|
|
361
|
+
*/
|
|
362
|
+
function parseSafeJsonString(input, opts) {
|
|
363
|
+
let raw;
|
|
364
|
+
try {
|
|
365
|
+
raw = JSON.parse(input);
|
|
366
|
+
}
|
|
367
|
+
catch (err) {
|
|
368
|
+
throw new JsonLoaderError(`Invalid JSON string: ${err?.message ?? String(err)}`, "STRING_JSON_PARSE_ERROR");
|
|
369
|
+
}
|
|
370
|
+
const sanitized = sanitizePrototypePollution(raw, {
|
|
371
|
+
maxDepth: opts?.maxJsonDepth ?? 50,
|
|
372
|
+
});
|
|
373
|
+
return sanitized;
|
|
374
|
+
}
|
|
375
|
+
/**
|
|
376
|
+
* Sanitize an already-parsed JSON object.
|
|
377
|
+
*
|
|
378
|
+
* - Strips prototype pollution keys (__proto__, constructor, prototype)
|
|
379
|
+
* - Enforces max depth
|
|
380
|
+
*
|
|
381
|
+
* @param input Parsed JSON object (e.g. req.body in Express)
|
|
382
|
+
* @param opts Loader options (maxJsonDepth, etc.)
|
|
383
|
+
* @returns Safe, sanitized JSON object
|
|
384
|
+
*/
|
|
385
|
+
function sanitizeParsedJsonObject(input, opts) {
|
|
386
|
+
const sanitized = sanitizePrototypePollution(input, {
|
|
387
|
+
maxDepth: opts?.maxJsonDepth ?? 50,
|
|
388
|
+
});
|
|
389
|
+
return sanitized;
|
|
390
|
+
}
|
|
391
|
+
//# sourceMappingURL=safeJsonLoader.js.map
|