@map-protocol/map1 0.0.1 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/dist/constants.d.ts +12 -0
- package/dist/constants.js +16 -0
- package/dist/core.d.ts +12 -0
- package/dist/core.js +210 -0
- package/dist/errors.d.ts +13 -0
- package/dist/errors.js +21 -0
- package/dist/index.d.ts +15 -0
- package/dist/index.js +74 -0
- package/dist/json-adapter.d.ts +14 -0
- package/dist/json-adapter.js +224 -0
- package/dist/projection.d.ts +1 -0
- package/dist/projection.js +141 -0
- package/package.json +34 -13
- package/README.md +0 -7
- package/index.js +0 -24
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Aaron Gerard Davidson
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
/** Canonical header: "MAP1\0" */
|
|
2
|
+
export declare const CANON_HDR: Buffer;
|
|
3
|
+
/** MCF type tags */
|
|
4
|
+
export declare const TAG_STRING = 1;
|
|
5
|
+
export declare const TAG_BYTES = 2;
|
|
6
|
+
export declare const TAG_LIST = 3;
|
|
7
|
+
export declare const TAG_MAP = 4;
|
|
8
|
+
/** Structural limits */
|
|
9
|
+
export declare const MAX_CANON_BYTES = 1048576;
|
|
10
|
+
export declare const MAX_DEPTH = 32;
|
|
11
|
+
export declare const MAX_MAP_ENTRIES = 65535;
|
|
12
|
+
export declare const MAX_LIST_ENTRIES = 65535;
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// MAP v1.0 constants
|
|
3
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
+
exports.MAX_LIST_ENTRIES = exports.MAX_MAP_ENTRIES = exports.MAX_DEPTH = exports.MAX_CANON_BYTES = exports.TAG_MAP = exports.TAG_LIST = exports.TAG_BYTES = exports.TAG_STRING = exports.CANON_HDR = void 0;
|
|
5
|
+
/** Canonical header: "MAP1\0" */
|
|
6
|
+
exports.CANON_HDR = Buffer.from("MAP1\0", "binary");
|
|
7
|
+
/** MCF type tags */
|
|
8
|
+
exports.TAG_STRING = 0x01;
|
|
9
|
+
exports.TAG_BYTES = 0x02;
|
|
10
|
+
exports.TAG_LIST = 0x03;
|
|
11
|
+
exports.TAG_MAP = 0x04;
|
|
12
|
+
/** Structural limits */
|
|
13
|
+
exports.MAX_CANON_BYTES = 1048576;
|
|
14
|
+
exports.MAX_DEPTH = 32;
|
|
15
|
+
exports.MAX_MAP_ENTRIES = 65535;
|
|
16
|
+
exports.MAX_LIST_ENTRIES = 65535;
|
package/dist/core.d.ts
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
export declare function sha256Hex(buf: Buffer): string;
|
|
2
|
+
/** Unsigned byte-wise comparison (memcmp semantics). */
|
|
3
|
+
export declare function keyCmp(a: Buffer, b: Buffer): number;
|
|
4
|
+
/** Reject strings containing unpaired surrogates (lone high/low code units). */
|
|
5
|
+
export declare function rejectSurrogatesInString(s: string): void;
|
|
6
|
+
/** Validate a byte buffer as UTF-8 scalar values. Returns the decoded JS string. */
|
|
7
|
+
export declare function validateUtf8ScalarBytes(bytes: Uint8Array): string;
|
|
8
|
+
export declare function mcfEncodeValue(val: unknown, depth: number): Buffer;
|
|
9
|
+
/** Encode a descriptor value to canonical bytes (MAP1 header + MCF body). */
|
|
10
|
+
export declare function canonBytesFromValue(val: unknown): Buffer;
|
|
11
|
+
/** Compute MID from pre-formed canonical bytes. Validates header + MCF structure. */
|
|
12
|
+
export declare function midFromCanonBytes(canon: Buffer): string;
|
package/dist/core.js
ADDED
|
@@ -0,0 +1,210 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// MAP v1.0 core: MCF binary format encode/decode, key comparison, UTF-8 validation
|
|
3
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
+
exports.sha256Hex = sha256Hex;
|
|
5
|
+
exports.keyCmp = keyCmp;
|
|
6
|
+
exports.rejectSurrogatesInString = rejectSurrogatesInString;
|
|
7
|
+
exports.validateUtf8ScalarBytes = validateUtf8ScalarBytes;
|
|
8
|
+
exports.mcfEncodeValue = mcfEncodeValue;
|
|
9
|
+
exports.canonBytesFromValue = canonBytesFromValue;
|
|
10
|
+
exports.midFromCanonBytes = midFromCanonBytes;
|
|
11
|
+
const crypto_1 = require("crypto");
|
|
12
|
+
const constants_1 = require("./constants");
|
|
13
|
+
const errors_1 = require("./errors");
|
|
14
|
+
// ────────── helpers ──────────
|
|
15
|
+
function sha256Hex(buf) {
|
|
16
|
+
return (0, crypto_1.createHash)("sha256").update(buf).digest("hex");
|
|
17
|
+
}
|
|
18
|
+
/** Unsigned byte-wise comparison (memcmp semantics). */
|
|
19
|
+
function keyCmp(a, b) {
|
|
20
|
+
const m = Math.min(a.length, b.length);
|
|
21
|
+
for (let i = 0; i < m; i++) {
|
|
22
|
+
if (a[i] !== b[i])
|
|
23
|
+
return a[i] < b[i] ? -1 : 1;
|
|
24
|
+
}
|
|
25
|
+
if (a.length === b.length)
|
|
26
|
+
return 0;
|
|
27
|
+
return a.length < b.length ? -1 : 1;
|
|
28
|
+
}
|
|
29
|
+
/** Reject strings containing unpaired surrogates (lone high/low code units). */
|
|
30
|
+
function rejectSurrogatesInString(s) {
|
|
31
|
+
for (let i = 0; i < s.length; i++) {
|
|
32
|
+
const cu = s.charCodeAt(i);
|
|
33
|
+
if (cu >= 0xd800 && cu <= 0xdbff) {
|
|
34
|
+
if (i + 1 >= s.length)
|
|
35
|
+
throw new errors_1.MapError(errors_1.ERR_UTF8, "unpaired surrogate");
|
|
36
|
+
const cu2 = s.charCodeAt(i + 1);
|
|
37
|
+
if (!(cu2 >= 0xdc00 && cu2 <= 0xdfff))
|
|
38
|
+
throw new errors_1.MapError(errors_1.ERR_UTF8, "unpaired surrogate");
|
|
39
|
+
i++; // consume well-formed pair (astral scalar)
|
|
40
|
+
continue;
|
|
41
|
+
}
|
|
42
|
+
if (cu >= 0xdc00 && cu <= 0xdfff)
|
|
43
|
+
throw new errors_1.MapError(errors_1.ERR_UTF8, "unpaired surrogate");
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
/** Validate a byte buffer as UTF-8 scalar values. Returns the decoded JS string. */
|
|
47
|
+
function validateUtf8ScalarBytes(bytes) {
|
|
48
|
+
let s;
|
|
49
|
+
try {
|
|
50
|
+
const dec = new TextDecoder("utf-8", { fatal: true });
|
|
51
|
+
s = dec.decode(bytes);
|
|
52
|
+
}
|
|
53
|
+
catch {
|
|
54
|
+
throw new errors_1.MapError(errors_1.ERR_UTF8, "invalid utf8");
|
|
55
|
+
}
|
|
56
|
+
rejectSurrogatesInString(s);
|
|
57
|
+
return s;
|
|
58
|
+
}
|
|
59
|
+
function u32be(n) {
|
|
60
|
+
const b = Buffer.alloc(4);
|
|
61
|
+
b.writeUInt32BE(n >>> 0, 0);
|
|
62
|
+
return b;
|
|
63
|
+
}
|
|
64
|
+
function readU32BE(buf, off) {
|
|
65
|
+
if (off + 4 > buf.length)
|
|
66
|
+
throw new errors_1.MapError(errors_1.ERR_CANON_MCF, "truncated u32");
|
|
67
|
+
return [buf.readUInt32BE(off), off + 4];
|
|
68
|
+
}
|
|
69
|
+
function mcfDecodeOne(buf, off, depth) {
|
|
70
|
+
if (off >= buf.length)
|
|
71
|
+
throw new errors_1.MapError(errors_1.ERR_CANON_MCF, "truncated tag");
|
|
72
|
+
const tag = buf[off];
|
|
73
|
+
off += 1;
|
|
74
|
+
if (tag === constants_1.TAG_STRING) {
|
|
75
|
+
let n;
|
|
76
|
+
[n, off] = readU32BE(buf, off);
|
|
77
|
+
if (off + n > buf.length)
|
|
78
|
+
throw new errors_1.MapError(errors_1.ERR_CANON_MCF, "truncated string");
|
|
79
|
+
const sb = buf.subarray(off, off + n);
|
|
80
|
+
off += n;
|
|
81
|
+
const s = validateUtf8ScalarBytes(sb);
|
|
82
|
+
return [s, off, Buffer.from(sb)];
|
|
83
|
+
}
|
|
84
|
+
if (tag === constants_1.TAG_BYTES) {
|
|
85
|
+
let n;
|
|
86
|
+
[n, off] = readU32BE(buf, off);
|
|
87
|
+
if (off + n > buf.length)
|
|
88
|
+
throw new errors_1.MapError(errors_1.ERR_CANON_MCF, "truncated bytes");
|
|
89
|
+
const b = buf.subarray(off, off + n);
|
|
90
|
+
off += n;
|
|
91
|
+
return [Buffer.from(b), off, null];
|
|
92
|
+
}
|
|
93
|
+
if (tag === constants_1.TAG_LIST) {
|
|
94
|
+
if (depth + 1 > constants_1.MAX_DEPTH)
|
|
95
|
+
throw new errors_1.MapError(errors_1.ERR_LIMIT_DEPTH, "depth");
|
|
96
|
+
let count;
|
|
97
|
+
[count, off] = readU32BE(buf, off);
|
|
98
|
+
if (count > constants_1.MAX_LIST_ENTRIES)
|
|
99
|
+
throw new errors_1.MapError(errors_1.ERR_LIMIT_SIZE, "list entries");
|
|
100
|
+
const arr = [];
|
|
101
|
+
for (let i = 0; i < count; i++) {
|
|
102
|
+
const res = mcfDecodeOne(buf, off, depth + 1);
|
|
103
|
+
arr.push(res[0]);
|
|
104
|
+
off = res[1];
|
|
105
|
+
}
|
|
106
|
+
return [arr, off, null];
|
|
107
|
+
}
|
|
108
|
+
if (tag === constants_1.TAG_MAP) {
|
|
109
|
+
if (depth + 1 > constants_1.MAX_DEPTH)
|
|
110
|
+
throw new errors_1.MapError(errors_1.ERR_LIMIT_DEPTH, "depth");
|
|
111
|
+
let count;
|
|
112
|
+
[count, off] = readU32BE(buf, off);
|
|
113
|
+
if (count > constants_1.MAX_MAP_ENTRIES)
|
|
114
|
+
throw new errors_1.MapError(errors_1.ERR_LIMIT_SIZE, "map entries");
|
|
115
|
+
const obj = {};
|
|
116
|
+
let prevKeyBytes = null;
|
|
117
|
+
for (let i = 0; i < count; i++) {
|
|
118
|
+
if (off >= buf.length)
|
|
119
|
+
throw new errors_1.MapError(errors_1.ERR_CANON_MCF, "truncated map key tag");
|
|
120
|
+
if (buf[off] !== constants_1.TAG_STRING)
|
|
121
|
+
throw new errors_1.MapError(errors_1.ERR_SCHEMA, "map key not string");
|
|
122
|
+
const [k, offK, kBytes] = mcfDecodeOne(buf, off, depth + 1);
|
|
123
|
+
off = offK;
|
|
124
|
+
const kb = kBytes;
|
|
125
|
+
if (prevKeyBytes !== null) {
|
|
126
|
+
const c = keyCmp(prevKeyBytes, kb);
|
|
127
|
+
if (c === 0)
|
|
128
|
+
throw new errors_1.MapError(errors_1.ERR_DUP_KEY, "dup");
|
|
129
|
+
if (c > 0)
|
|
130
|
+
throw new errors_1.MapError(errors_1.ERR_KEY_ORDER, "order");
|
|
131
|
+
}
|
|
132
|
+
prevKeyBytes = kb;
|
|
133
|
+
const resV = mcfDecodeOne(buf, off, depth + 1);
|
|
134
|
+
off = resV[1];
|
|
135
|
+
obj[k] = resV[0];
|
|
136
|
+
}
|
|
137
|
+
return [obj, off, null];
|
|
138
|
+
}
|
|
139
|
+
throw new errors_1.MapError(errors_1.ERR_CANON_MCF, "unknown tag");
|
|
140
|
+
}
|
|
141
|
+
// ────────── MCF encode ──────────
|
|
142
|
+
function mcfEncodeValue(val, depth) {
|
|
143
|
+
if (typeof val === "string") {
|
|
144
|
+
const b = Buffer.from(val, "utf8");
|
|
145
|
+
validateUtf8ScalarBytes(b);
|
|
146
|
+
return Buffer.concat([Buffer.from([constants_1.TAG_STRING]), u32be(b.length), b]);
|
|
147
|
+
}
|
|
148
|
+
if (Buffer.isBuffer(val)) {
|
|
149
|
+
return Buffer.concat([Buffer.from([constants_1.TAG_BYTES]), u32be(val.length), val]);
|
|
150
|
+
}
|
|
151
|
+
if (Array.isArray(val)) {
|
|
152
|
+
if (depth + 1 > constants_1.MAX_DEPTH)
|
|
153
|
+
throw new errors_1.MapError(errors_1.ERR_LIMIT_DEPTH, "depth");
|
|
154
|
+
if (val.length > constants_1.MAX_LIST_ENTRIES)
|
|
155
|
+
throw new errors_1.MapError(errors_1.ERR_LIMIT_SIZE, "list entries");
|
|
156
|
+
const parts = [Buffer.from([constants_1.TAG_LIST]), u32be(val.length)];
|
|
157
|
+
for (const it of val)
|
|
158
|
+
parts.push(mcfEncodeValue(it, depth + 1));
|
|
159
|
+
return Buffer.concat(parts);
|
|
160
|
+
}
|
|
161
|
+
if (val && typeof val === "object") {
|
|
162
|
+
if (depth + 1 > constants_1.MAX_DEPTH)
|
|
163
|
+
throw new errors_1.MapError(errors_1.ERR_LIMIT_DEPTH, "depth");
|
|
164
|
+
const keys = Object.keys(val);
|
|
165
|
+
const sorted = keys.slice().sort((ka, kb) => keyCmp(Buffer.from(ka, "utf8"), Buffer.from(kb, "utf8")));
|
|
166
|
+
// enforce ordering and uniqueness
|
|
167
|
+
for (let i = 1; i < sorted.length; i++) {
|
|
168
|
+
const a = Buffer.from(sorted[i - 1], "utf8");
|
|
169
|
+
const b = Buffer.from(sorted[i], "utf8");
|
|
170
|
+
const c = keyCmp(a, b);
|
|
171
|
+
if (c === 0)
|
|
172
|
+
throw new errors_1.MapError(errors_1.ERR_DUP_KEY, "dup");
|
|
173
|
+
if (c > 0)
|
|
174
|
+
throw new errors_1.MapError(errors_1.ERR_KEY_ORDER, "order");
|
|
175
|
+
}
|
|
176
|
+
if (sorted.length > constants_1.MAX_MAP_ENTRIES)
|
|
177
|
+
throw new errors_1.MapError(errors_1.ERR_LIMIT_SIZE, "map entries");
|
|
178
|
+
const parts = [Buffer.from([constants_1.TAG_MAP]), u32be(sorted.length)];
|
|
179
|
+
for (const k of sorted) {
|
|
180
|
+
const kb = Buffer.from(k, "utf8");
|
|
181
|
+
parts.push(Buffer.concat([Buffer.from([constants_1.TAG_STRING]), u32be(kb.length), kb]));
|
|
182
|
+
parts.push(mcfEncodeValue(val[k], depth + 1));
|
|
183
|
+
}
|
|
184
|
+
return Buffer.concat(parts);
|
|
185
|
+
}
|
|
186
|
+
throw new errors_1.MapError(errors_1.ERR_SCHEMA, "unsupported type");
|
|
187
|
+
}
|
|
188
|
+
// ────────── public core API ──────────
|
|
189
|
+
/** Encode a descriptor value to canonical bytes (MAP1 header + MCF body). */
|
|
190
|
+
function canonBytesFromValue(val) {
|
|
191
|
+
const body = mcfEncodeValue(val, 0);
|
|
192
|
+
const canon = Buffer.concat([constants_1.CANON_HDR, body]);
|
|
193
|
+
if (canon.length > constants_1.MAX_CANON_BYTES)
|
|
194
|
+
throw new errors_1.MapError(errors_1.ERR_LIMIT_SIZE, "too big");
|
|
195
|
+
return canon;
|
|
196
|
+
}
|
|
197
|
+
/** Compute MID from pre-formed canonical bytes. Validates header + MCF structure. */
|
|
198
|
+
function midFromCanonBytes(canon) {
|
|
199
|
+
if (canon.length > constants_1.MAX_CANON_BYTES)
|
|
200
|
+
throw new errors_1.MapError(errors_1.ERR_LIMIT_SIZE, "canon exceeds");
|
|
201
|
+
if (canon.subarray(0, constants_1.CANON_HDR.length).compare(constants_1.CANON_HDR) !== 0) {
|
|
202
|
+
throw new errors_1.MapError(errors_1.ERR_CANON_HDR, "bad header");
|
|
203
|
+
}
|
|
204
|
+
let off = constants_1.CANON_HDR.length;
|
|
205
|
+
const res = mcfDecodeOne(canon, off, 0);
|
|
206
|
+
off = res[1];
|
|
207
|
+
if (off !== canon.length)
|
|
208
|
+
throw new errors_1.MapError(errors_1.ERR_CANON_MCF, "trailing bytes");
|
|
209
|
+
return "map1:" + sha256Hex(canon);
|
|
210
|
+
}
|
package/dist/errors.d.ts
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
export declare const ERR_CANON_HDR = "ERR_CANON_HDR";
|
|
2
|
+
export declare const ERR_CANON_MCF = "ERR_CANON_MCF";
|
|
3
|
+
export declare const ERR_SCHEMA = "ERR_SCHEMA";
|
|
4
|
+
export declare const ERR_TYPE = "ERR_TYPE";
|
|
5
|
+
export declare const ERR_UTF8 = "ERR_UTF8";
|
|
6
|
+
export declare const ERR_DUP_KEY = "ERR_DUP_KEY";
|
|
7
|
+
export declare const ERR_KEY_ORDER = "ERR_KEY_ORDER";
|
|
8
|
+
export declare const ERR_LIMIT_DEPTH = "ERR_LIMIT_DEPTH";
|
|
9
|
+
export declare const ERR_LIMIT_SIZE = "ERR_LIMIT_SIZE";
|
|
10
|
+
export declare class MapError extends Error {
|
|
11
|
+
readonly code: string;
|
|
12
|
+
constructor(code: string, msg?: string);
|
|
13
|
+
}
|
package/dist/errors.js
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// MAP v1.0 error codes and MapError class
|
|
3
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
+
exports.MapError = exports.ERR_LIMIT_SIZE = exports.ERR_LIMIT_DEPTH = exports.ERR_KEY_ORDER = exports.ERR_DUP_KEY = exports.ERR_UTF8 = exports.ERR_TYPE = exports.ERR_SCHEMA = exports.ERR_CANON_MCF = exports.ERR_CANON_HDR = void 0;
|
|
5
|
+
exports.ERR_CANON_HDR = "ERR_CANON_HDR";
|
|
6
|
+
exports.ERR_CANON_MCF = "ERR_CANON_MCF";
|
|
7
|
+
exports.ERR_SCHEMA = "ERR_SCHEMA";
|
|
8
|
+
exports.ERR_TYPE = "ERR_TYPE";
|
|
9
|
+
exports.ERR_UTF8 = "ERR_UTF8";
|
|
10
|
+
exports.ERR_DUP_KEY = "ERR_DUP_KEY";
|
|
11
|
+
exports.ERR_KEY_ORDER = "ERR_KEY_ORDER";
|
|
12
|
+
exports.ERR_LIMIT_DEPTH = "ERR_LIMIT_DEPTH";
|
|
13
|
+
exports.ERR_LIMIT_SIZE = "ERR_LIMIT_SIZE";
|
|
14
|
+
class MapError extends Error {
|
|
15
|
+
constructor(code, msg) {
|
|
16
|
+
super(msg || code);
|
|
17
|
+
this.code = code;
|
|
18
|
+
this.name = "MapError";
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
exports.MapError = MapError;
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
export { MapError } from "./errors";
|
|
2
|
+
export { ERR_CANON_HDR, ERR_CANON_MCF, ERR_SCHEMA, ERR_TYPE, ERR_UTF8, ERR_DUP_KEY, ERR_KEY_ORDER, ERR_LIMIT_DEPTH, ERR_LIMIT_SIZE, } from "./errors";
|
|
3
|
+
export { midFromCanonBytes } from "./core";
|
|
4
|
+
/** Compute the FULL MID from a JS descriptor (plain object/string/array/boolean tree). */
|
|
5
|
+
export declare function midFull(descriptor: Record<string, unknown>): string;
|
|
6
|
+
/** Compute the BIND MID from a JS descriptor + pointer list. */
|
|
7
|
+
export declare function midBind(descriptor: Record<string, unknown>, pointers: string[]): string;
|
|
8
|
+
/** Compute FULL canonical bytes from a JS descriptor. */
|
|
9
|
+
export declare function canonicalBytesFull(descriptor: Record<string, unknown>): Buffer;
|
|
10
|
+
/** Compute BIND canonical bytes from a JS descriptor + pointer list. */
|
|
11
|
+
export declare function canonicalBytesBind(descriptor: Record<string, unknown>, pointers: string[]): Buffer;
|
|
12
|
+
/** Compute FULL MID from raw JSON bytes. Detects duplicate keys after escape resolution. */
|
|
13
|
+
export declare function midFullJson(raw: Buffer): string;
|
|
14
|
+
/** Compute BIND MID from raw JSON bytes + pointer list. */
|
|
15
|
+
export declare function midBindJson(raw: Buffer, pointers: string[]): string;
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// MAP v1.0 — Node/TypeScript package public API
|
|
3
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
+
exports.midFromCanonBytes = exports.ERR_LIMIT_SIZE = exports.ERR_LIMIT_DEPTH = exports.ERR_KEY_ORDER = exports.ERR_DUP_KEY = exports.ERR_UTF8 = exports.ERR_TYPE = exports.ERR_SCHEMA = exports.ERR_CANON_MCF = exports.ERR_CANON_HDR = exports.MapError = void 0;
|
|
5
|
+
exports.midFull = midFull;
|
|
6
|
+
exports.midBind = midBind;
|
|
7
|
+
exports.canonicalBytesFull = canonicalBytesFull;
|
|
8
|
+
exports.canonicalBytesBind = canonicalBytesBind;
|
|
9
|
+
exports.midFullJson = midFullJson;
|
|
10
|
+
exports.midBindJson = midBindJson;
|
|
11
|
+
var errors_1 = require("./errors");
|
|
12
|
+
Object.defineProperty(exports, "MapError", { enumerable: true, get: function () { return errors_1.MapError; } });
|
|
13
|
+
var errors_2 = require("./errors");
|
|
14
|
+
Object.defineProperty(exports, "ERR_CANON_HDR", { enumerable: true, get: function () { return errors_2.ERR_CANON_HDR; } });
|
|
15
|
+
Object.defineProperty(exports, "ERR_CANON_MCF", { enumerable: true, get: function () { return errors_2.ERR_CANON_MCF; } });
|
|
16
|
+
Object.defineProperty(exports, "ERR_SCHEMA", { enumerable: true, get: function () { return errors_2.ERR_SCHEMA; } });
|
|
17
|
+
Object.defineProperty(exports, "ERR_TYPE", { enumerable: true, get: function () { return errors_2.ERR_TYPE; } });
|
|
18
|
+
Object.defineProperty(exports, "ERR_UTF8", { enumerable: true, get: function () { return errors_2.ERR_UTF8; } });
|
|
19
|
+
Object.defineProperty(exports, "ERR_DUP_KEY", { enumerable: true, get: function () { return errors_2.ERR_DUP_KEY; } });
|
|
20
|
+
Object.defineProperty(exports, "ERR_KEY_ORDER", { enumerable: true, get: function () { return errors_2.ERR_KEY_ORDER; } });
|
|
21
|
+
Object.defineProperty(exports, "ERR_LIMIT_DEPTH", { enumerable: true, get: function () { return errors_2.ERR_LIMIT_DEPTH; } });
|
|
22
|
+
Object.defineProperty(exports, "ERR_LIMIT_SIZE", { enumerable: true, get: function () { return errors_2.ERR_LIMIT_SIZE; } });
|
|
23
|
+
const core_1 = require("./core");
|
|
24
|
+
const json_adapter_1 = require("./json-adapter");
|
|
25
|
+
const projection_1 = require("./projection");
|
|
26
|
+
const errors_3 = require("./errors");
|
|
27
|
+
const errors_4 = require("./errors");
|
|
28
|
+
var core_2 = require("./core");
|
|
29
|
+
Object.defineProperty(exports, "midFromCanonBytes", { enumerable: true, get: function () { return core_2.midFromCanonBytes; } });
|
|
30
|
+
// ────────── Descriptor-based API (JS objects) ──────────
|
|
31
|
+
/** Compute the FULL MID from a JS descriptor (plain object/string/array/boolean tree). */
|
|
32
|
+
function midFull(descriptor) {
|
|
33
|
+
const canon = (0, core_1.canonBytesFromValue)(descriptor);
|
|
34
|
+
return "map1:" + (0, core_1.sha256Hex)(canon);
|
|
35
|
+
}
|
|
36
|
+
/** Compute the BIND MID from a JS descriptor + pointer list. */
|
|
37
|
+
function midBind(descriptor, pointers) {
|
|
38
|
+
const proj = (0, projection_1.bindProject)(descriptor, pointers);
|
|
39
|
+
const canon = (0, core_1.canonBytesFromValue)(proj);
|
|
40
|
+
return "map1:" + (0, core_1.sha256Hex)(canon);
|
|
41
|
+
}
|
|
42
|
+
/** Compute FULL canonical bytes from a JS descriptor. */
|
|
43
|
+
function canonicalBytesFull(descriptor) {
|
|
44
|
+
return (0, core_1.canonBytesFromValue)(descriptor);
|
|
45
|
+
}
|
|
46
|
+
/** Compute BIND canonical bytes from a JS descriptor + pointer list. */
|
|
47
|
+
function canonicalBytesBind(descriptor, pointers) {
|
|
48
|
+
const proj = (0, projection_1.bindProject)(descriptor, pointers);
|
|
49
|
+
return (0, core_1.canonBytesFromValue)(proj);
|
|
50
|
+
}
|
|
51
|
+
// ────────── JSON-STRICT API (raw bytes) ──────────
|
|
52
|
+
/** Compute FULL MID from raw JSON bytes. Detects duplicate keys after escape resolution. */
|
|
53
|
+
function midFullJson(raw) {
|
|
54
|
+
const parsed = (0, json_adapter_1.parseJsonStrictWithDups)(raw);
|
|
55
|
+
const val = (0, json_adapter_1.jsonToCanonValue)(parsed.v);
|
|
56
|
+
const canon = (0, core_1.canonBytesFromValue)(val);
|
|
57
|
+
if (parsed.dupFound)
|
|
58
|
+
throw new errors_3.MapError(errors_4.ERR_DUP_KEY, "dup key");
|
|
59
|
+
if (parsed.surrogateFound)
|
|
60
|
+
throw new errors_3.MapError(errors_4.ERR_UTF8, "surrogate escape");
|
|
61
|
+
return "map1:" + (0, core_1.sha256Hex)(canon);
|
|
62
|
+
}
|
|
63
|
+
/** Compute BIND MID from raw JSON bytes + pointer list. */
|
|
64
|
+
function midBindJson(raw, pointers) {
|
|
65
|
+
const parsed = (0, json_adapter_1.parseJsonStrictWithDups)(raw);
|
|
66
|
+
const val = (0, json_adapter_1.jsonToCanonValue)(parsed.v);
|
|
67
|
+
const proj = (0, projection_1.bindProject)(val, pointers);
|
|
68
|
+
const canon = (0, core_1.canonBytesFromValue)(proj);
|
|
69
|
+
if (parsed.dupFound)
|
|
70
|
+
throw new errors_3.MapError(errors_4.ERR_DUP_KEY, "dup key");
|
|
71
|
+
if (parsed.surrogateFound)
|
|
72
|
+
throw new errors_3.MapError(errors_4.ERR_UTF8, "surrogate escape");
|
|
73
|
+
return "map1:" + (0, core_1.sha256Hex)(canon);
|
|
74
|
+
}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
interface NumMarker {
|
|
2
|
+
__num__: string;
|
|
3
|
+
}
|
|
4
|
+
type JsonValue = string | boolean | null | NumMarker | JsonValue[] | {
|
|
5
|
+
[k: string]: JsonValue;
|
|
6
|
+
};
|
|
7
|
+
interface ParseResult {
|
|
8
|
+
v: JsonValue;
|
|
9
|
+
dupFound: boolean;
|
|
10
|
+
surrogateFound: boolean;
|
|
11
|
+
}
|
|
12
|
+
declare function parseJsonStrictWithDups(raw: Buffer): ParseResult;
|
|
13
|
+
export declare function jsonToCanonValue(x: JsonValue): unknown;
|
|
14
|
+
export { parseJsonStrictWithDups, ParseResult, JsonValue };
|
|
@@ -0,0 +1,224 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// MAP v1.0 JSON-STRICT adapter
|
|
3
|
+
// Hand-rolled RFC 8259 parser with duplicate-key detection after escape resolution.
|
|
4
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
5
|
+
exports.jsonToCanonValue = jsonToCanonValue;
|
|
6
|
+
exports.parseJsonStrictWithDups = parseJsonStrictWithDups;
|
|
7
|
+
const errors_1 = require("./errors");
|
|
8
|
+
const constants_1 = require("./constants");
|
|
9
|
+
const core_1 = require("./core");
|
|
10
|
+
// ────────── helpers ──────────
|
|
11
|
+
function isWS(ch) {
|
|
12
|
+
return ch === 0x20 || ch === 0x09 || ch === 0x0a || ch === 0x0d;
|
|
13
|
+
}
|
|
14
|
+
function rejectBomStrict(raw) {
|
|
15
|
+
let i = 0;
|
|
16
|
+
while (i < raw.length && isWS(raw[i]))
|
|
17
|
+
i++;
|
|
18
|
+
if (i + 3 <= raw.length && raw[i] === 0xef && raw[i + 1] === 0xbb && raw[i + 2] === 0xbf) {
|
|
19
|
+
throw new errors_1.MapError(errors_1.ERR_SCHEMA, "BOM");
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
function utf8DecodeStrict(raw) {
|
|
23
|
+
try {
|
|
24
|
+
const dec = new TextDecoder("utf-8", { fatal: true });
|
|
25
|
+
return dec.decode(raw);
|
|
26
|
+
}
|
|
27
|
+
catch {
|
|
28
|
+
throw new errors_1.MapError(errors_1.ERR_UTF8, "json utf8");
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
// ────────── parser ──────────
|
|
32
|
+
function parseJsonStrictWithDups(raw) {
|
|
33
|
+
if (raw.length > constants_1.MAX_CANON_BYTES)
|
|
34
|
+
throw new errors_1.MapError(errors_1.ERR_LIMIT_SIZE, "input too big");
|
|
35
|
+
rejectBomStrict(raw);
|
|
36
|
+
const text = utf8DecodeStrict(raw);
|
|
37
|
+
let i = 0;
|
|
38
|
+
let dupFound = false;
|
|
39
|
+
let surrogateFound = false;
|
|
40
|
+
function skipWS() {
|
|
41
|
+
while (i < text.length && /\s/.test(text[i]))
|
|
42
|
+
i++;
|
|
43
|
+
}
|
|
44
|
+
function expect(ch) {
|
|
45
|
+
skipWS();
|
|
46
|
+
if (text[i] !== ch)
|
|
47
|
+
throw new errors_1.MapError(errors_1.ERR_CANON_MCF, "json parse");
|
|
48
|
+
i++;
|
|
49
|
+
}
|
|
50
|
+
function parseString() {
|
|
51
|
+
if (text[i] !== '"')
|
|
52
|
+
throw new errors_1.MapError(errors_1.ERR_CANON_MCF, "json parse");
|
|
53
|
+
i++;
|
|
54
|
+
let out = "";
|
|
55
|
+
let closed = false;
|
|
56
|
+
while (i < text.length) {
|
|
57
|
+
const ch = text[i++];
|
|
58
|
+
if (ch.charCodeAt(0) < 0x20)
|
|
59
|
+
throw new errors_1.MapError(errors_1.ERR_CANON_MCF, "json parse");
|
|
60
|
+
if (ch === '"') {
|
|
61
|
+
closed = true;
|
|
62
|
+
break;
|
|
63
|
+
}
|
|
64
|
+
if (ch === "\\") {
|
|
65
|
+
if (i >= text.length)
|
|
66
|
+
throw new errors_1.MapError(errors_1.ERR_CANON_MCF, "json parse");
|
|
67
|
+
const esc = text[i++];
|
|
68
|
+
if (esc === '"' || esc === "\\" || esc === "/")
|
|
69
|
+
out += esc;
|
|
70
|
+
else if (esc === "b")
|
|
71
|
+
out += "\b";
|
|
72
|
+
else if (esc === "f")
|
|
73
|
+
out += "\f";
|
|
74
|
+
else if (esc === "n")
|
|
75
|
+
out += "\n";
|
|
76
|
+
else if (esc === "r")
|
|
77
|
+
out += "\r";
|
|
78
|
+
else if (esc === "t")
|
|
79
|
+
out += "\t";
|
|
80
|
+
else if (esc === "u") {
|
|
81
|
+
const hex = text.slice(i, i + 4);
|
|
82
|
+
if (!/^[0-9a-fA-F]{4}$/.test(hex))
|
|
83
|
+
throw new errors_1.MapError(errors_1.ERR_CANON_MCF, "json parse");
|
|
84
|
+
const code = parseInt(hex, 16);
|
|
85
|
+
i += 4;
|
|
86
|
+
if (code >= 0xd800 && code <= 0xdfff) {
|
|
87
|
+
surrogateFound = true; // flag, do not append
|
|
88
|
+
}
|
|
89
|
+
else {
|
|
90
|
+
out += String.fromCodePoint(code);
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
else {
|
|
94
|
+
throw new errors_1.MapError(errors_1.ERR_CANON_MCF, "json parse");
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
else {
|
|
98
|
+
out += ch;
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
if (!closed)
|
|
102
|
+
throw new errors_1.MapError(errors_1.ERR_CANON_MCF, "json parse");
|
|
103
|
+
(0, core_1.rejectSurrogatesInString)(out);
|
|
104
|
+
return out;
|
|
105
|
+
}
|
|
106
|
+
function parseValue() {
|
|
107
|
+
skipWS();
|
|
108
|
+
if (i >= text.length)
|
|
109
|
+
throw new errors_1.MapError(errors_1.ERR_CANON_MCF, "json parse");
|
|
110
|
+
const c = text[i];
|
|
111
|
+
if (c === "{")
|
|
112
|
+
return parseObject();
|
|
113
|
+
if (c === "[")
|
|
114
|
+
return parseArray();
|
|
115
|
+
if (c === '"')
|
|
116
|
+
return parseString();
|
|
117
|
+
if (c === "t" && text.startsWith("true", i)) {
|
|
118
|
+
i += 4;
|
|
119
|
+
return true;
|
|
120
|
+
}
|
|
121
|
+
if (c === "f" && text.startsWith("false", i)) {
|
|
122
|
+
i += 5;
|
|
123
|
+
return false;
|
|
124
|
+
}
|
|
125
|
+
if (c === "n" && text.startsWith("null", i)) {
|
|
126
|
+
i += 4;
|
|
127
|
+
return null;
|
|
128
|
+
}
|
|
129
|
+
if (c === "-" || (c >= "0" && c <= "9")) {
|
|
130
|
+
const m = text.slice(i).match(/^-?(0|[1-9]\d*)(\.\d+)?([eE][+-]?\d+)?/);
|
|
131
|
+
if (!m)
|
|
132
|
+
throw new errors_1.MapError(errors_1.ERR_CANON_MCF, "json parse");
|
|
133
|
+
i += m[0].length;
|
|
134
|
+
return { __num__: m[0] };
|
|
135
|
+
}
|
|
136
|
+
throw new errors_1.MapError(errors_1.ERR_CANON_MCF, "json parse");
|
|
137
|
+
}
|
|
138
|
+
function parseObject() {
|
|
139
|
+
const obj = {};
|
|
140
|
+
const seen = new Set();
|
|
141
|
+
i++; // skip {
|
|
142
|
+
skipWS();
|
|
143
|
+
if (text[i] === "}") {
|
|
144
|
+
i++;
|
|
145
|
+
return obj;
|
|
146
|
+
}
|
|
147
|
+
while (true) {
|
|
148
|
+
skipWS();
|
|
149
|
+
const key = parseString();
|
|
150
|
+
(0, core_1.rejectSurrogatesInString)(key);
|
|
151
|
+
if (seen.has(key)) {
|
|
152
|
+
dupFound = true;
|
|
153
|
+
skipWS();
|
|
154
|
+
expect(":");
|
|
155
|
+
parseValue();
|
|
156
|
+
skipWS();
|
|
157
|
+
if (text[i] === "}") {
|
|
158
|
+
i++;
|
|
159
|
+
break;
|
|
160
|
+
}
|
|
161
|
+
expect(",");
|
|
162
|
+
continue;
|
|
163
|
+
}
|
|
164
|
+
seen.add(key);
|
|
165
|
+
skipWS();
|
|
166
|
+
expect(":");
|
|
167
|
+
const val = parseValue();
|
|
168
|
+
obj[key] = val;
|
|
169
|
+
skipWS();
|
|
170
|
+
if (text[i] === "}") {
|
|
171
|
+
i++;
|
|
172
|
+
break;
|
|
173
|
+
}
|
|
174
|
+
expect(",");
|
|
175
|
+
}
|
|
176
|
+
return obj;
|
|
177
|
+
}
|
|
178
|
+
function parseArray() {
|
|
179
|
+
const arr = [];
|
|
180
|
+
i++; // skip [
|
|
181
|
+
skipWS();
|
|
182
|
+
if (text[i] === "]") {
|
|
183
|
+
i++;
|
|
184
|
+
return arr;
|
|
185
|
+
}
|
|
186
|
+
while (true) {
|
|
187
|
+
const v = parseValue();
|
|
188
|
+
arr.push(v);
|
|
189
|
+
skipWS();
|
|
190
|
+
if (text[i] === "]") {
|
|
191
|
+
i++;
|
|
192
|
+
break;
|
|
193
|
+
}
|
|
194
|
+
expect(",");
|
|
195
|
+
}
|
|
196
|
+
return arr;
|
|
197
|
+
}
|
|
198
|
+
const v = parseValue();
|
|
199
|
+
skipWS();
|
|
200
|
+
if (i !== text.length)
|
|
201
|
+
throw new errors_1.MapError(errors_1.ERR_CANON_MCF, "trailing json");
|
|
202
|
+
return { v, dupFound, surrogateFound };
|
|
203
|
+
}
|
|
204
|
+
// ────────── JSON-to-canon value conversion ──────────
|
|
205
|
+
function jsonToCanonValue(x) {
|
|
206
|
+
if (x && typeof x === "object" && "__num__" in x)
|
|
207
|
+
throw new errors_1.MapError(errors_1.ERR_TYPE, "number");
|
|
208
|
+
if (x === null)
|
|
209
|
+
throw new errors_1.MapError(errors_1.ERR_TYPE, "null");
|
|
210
|
+
if (typeof x === "boolean")
|
|
211
|
+
return x ? "true" : "false";
|
|
212
|
+
if (typeof x === "string")
|
|
213
|
+
return x;
|
|
214
|
+
if (Array.isArray(x))
|
|
215
|
+
return x.map(jsonToCanonValue);
|
|
216
|
+
if (typeof x === "object") {
|
|
217
|
+
const out = {};
|
|
218
|
+
for (const k of Object.keys(x)) {
|
|
219
|
+
out[k] = jsonToCanonValue(x[k]);
|
|
220
|
+
}
|
|
221
|
+
return out;
|
|
222
|
+
}
|
|
223
|
+
throw new errors_1.MapError(errors_1.ERR_TYPE, "unknown");
|
|
224
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function bindProject(descriptor: Record<string, unknown>, pointers: string[]): Record<string, unknown>;
|
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// MAP v1.0 BIND projection (RFC 6901 JSON Pointers)
|
|
3
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
+
exports.bindProject = bindProject;
|
|
5
|
+
const errors_1 = require("./errors");
|
|
6
|
+
// ────────── JSON Pointer parsing (RFC 6901) ──────────
|
|
7
|
+
function parseJsonPointer(ptr) {
|
|
8
|
+
if (ptr === "")
|
|
9
|
+
return [];
|
|
10
|
+
if (!ptr.startsWith("/"))
|
|
11
|
+
throw new errors_1.MapError(errors_1.ERR_SCHEMA, "pointer must start with /");
|
|
12
|
+
const parts = ptr.split("/").slice(1);
|
|
13
|
+
return parts.map(t => {
|
|
14
|
+
let out = "";
|
|
15
|
+
for (let i = 0; i < t.length; i++) {
|
|
16
|
+
const ch = t[i];
|
|
17
|
+
if (ch !== "~") {
|
|
18
|
+
out += ch;
|
|
19
|
+
continue;
|
|
20
|
+
}
|
|
21
|
+
if (i + 1 >= t.length)
|
|
22
|
+
throw new errors_1.MapError(errors_1.ERR_SCHEMA, "invalid ~ escape in pointer");
|
|
23
|
+
const nxt = t[i + 1];
|
|
24
|
+
if (nxt === "0")
|
|
25
|
+
out += "~";
|
|
26
|
+
else if (nxt === "1")
|
|
27
|
+
out += "/";
|
|
28
|
+
else
|
|
29
|
+
throw new errors_1.MapError(errors_1.ERR_SCHEMA, "invalid ~ escape in pointer");
|
|
30
|
+
i++;
|
|
31
|
+
}
|
|
32
|
+
return out;
|
|
33
|
+
});
|
|
34
|
+
}
|
|
35
|
+
// ────────── BIND projection ──────────
|
|
36
|
+
function bindProject(descriptor, pointers) {
|
|
37
|
+
// Root must be MAP (object, not array)
|
|
38
|
+
if (!descriptor ||
|
|
39
|
+
typeof descriptor !== "object" ||
|
|
40
|
+
Array.isArray(descriptor)) {
|
|
41
|
+
throw new errors_1.MapError(errors_1.ERR_SCHEMA, "descriptor");
|
|
42
|
+
}
|
|
43
|
+
// Duplicate pointers → fail-closed
|
|
44
|
+
const seen = new Set();
|
|
45
|
+
for (const p of pointers) {
|
|
46
|
+
if (seen.has(p))
|
|
47
|
+
throw new errors_1.MapError(errors_1.ERR_SCHEMA, "duplicate pointers");
|
|
48
|
+
seen.add(p);
|
|
49
|
+
}
|
|
50
|
+
// Parse all pointers (fail-closed on parse error)
|
|
51
|
+
const parsed = pointers.map(p => ({ ptr: p, toks: parseJsonPointer(p) }));
|
|
52
|
+
// Determine matches
|
|
53
|
+
let anyMatch = false;
|
|
54
|
+
let anyUnmatched = false;
|
|
55
|
+
const matchedToks = [];
|
|
56
|
+
for (const { ptr, toks } of parsed) {
|
|
57
|
+
if (ptr === "") {
|
|
58
|
+
anyMatch = true;
|
|
59
|
+
continue;
|
|
60
|
+
} // empty pointer always matches MAP root
|
|
61
|
+
let cur = descriptor;
|
|
62
|
+
let ok = true;
|
|
63
|
+
for (const tok of toks) {
|
|
64
|
+
if (Array.isArray(cur))
|
|
65
|
+
throw new errors_1.MapError(errors_1.ERR_SCHEMA, "list traversal");
|
|
66
|
+
if (!cur || typeof cur !== "object") {
|
|
67
|
+
ok = false;
|
|
68
|
+
break;
|
|
69
|
+
}
|
|
70
|
+
if (!(tok in cur)) {
|
|
71
|
+
ok = false;
|
|
72
|
+
break;
|
|
73
|
+
}
|
|
74
|
+
cur = cur[tok];
|
|
75
|
+
}
|
|
76
|
+
if (ok) {
|
|
77
|
+
anyMatch = true;
|
|
78
|
+
matchedToks.push(toks);
|
|
79
|
+
}
|
|
80
|
+
else {
|
|
81
|
+
anyUnmatched = true;
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
// Unmatched pointers (fail-closed, with one exception)
|
|
85
|
+
if (!anyMatch)
|
|
86
|
+
return {};
|
|
87
|
+
if (anyUnmatched)
|
|
88
|
+
throw new errors_1.MapError(errors_1.ERR_SCHEMA, "unmatched pointer");
|
|
89
|
+
// Empty pointer "" => FULL-equivalent over MAP root
|
|
90
|
+
if (parsed.some(p => p.ptr === ""))
|
|
91
|
+
return descriptor;
|
|
92
|
+
// Overlapping pointers subsumption: discard those strictly within another
|
|
93
|
+
function isSubsumed(toks) {
|
|
94
|
+
for (const other of matchedToks) {
|
|
95
|
+
if (other.length < toks.length) {
|
|
96
|
+
let same = true;
|
|
97
|
+
for (let i = 0; i < other.length; i++) {
|
|
98
|
+
if (toks[i] !== other[i]) {
|
|
99
|
+
same = false;
|
|
100
|
+
break;
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
if (same)
|
|
104
|
+
return true;
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
return false;
|
|
108
|
+
}
|
|
109
|
+
const effective = matchedToks.filter(t => !isSubsumed(t));
|
|
110
|
+
// Build projection
|
|
111
|
+
const projected = {};
|
|
112
|
+
for (const toks of effective) {
|
|
113
|
+
let cur = descriptor;
|
|
114
|
+
const path = [];
|
|
115
|
+
for (const tok of toks) {
|
|
116
|
+
if (Array.isArray(cur))
|
|
117
|
+
throw new errors_1.MapError(errors_1.ERR_SCHEMA, "list traversal");
|
|
118
|
+
if (!cur || typeof cur !== "object")
|
|
119
|
+
throw new errors_1.MapError(errors_1.ERR_SCHEMA, "cannot traverse");
|
|
120
|
+
if (!(tok in cur))
|
|
121
|
+
throw new errors_1.MapError(errors_1.ERR_SCHEMA, "cannot traverse");
|
|
122
|
+
path.push(tok);
|
|
123
|
+
cur = cur[tok];
|
|
124
|
+
}
|
|
125
|
+
let outCur = projected;
|
|
126
|
+
for (let idx = 0; idx < path.length; idx++) {
|
|
127
|
+
const tok = path[idx];
|
|
128
|
+
if (idx === path.length - 1) {
|
|
129
|
+
outCur[tok] = cur;
|
|
130
|
+
}
|
|
131
|
+
else {
|
|
132
|
+
if (!(tok in outCur))
|
|
133
|
+
outCur[tok] = {};
|
|
134
|
+
if (Array.isArray(outCur[tok]))
|
|
135
|
+
throw new errors_1.MapError(errors_1.ERR_SCHEMA, "bind conflict");
|
|
136
|
+
outCur = outCur[tok];
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
return projected;
|
|
141
|
+
}
|
package/package.json
CHANGED
|
@@ -1,13 +1,34 @@
|
|
|
1
|
-
{
|
|
2
|
-
"name": "@map-protocol/map1",
|
|
3
|
-
"version": "0.0
|
|
4
|
-
"description": "MAP v1: deterministic
|
|
5
|
-
"main": "index.js",
|
|
6
|
-
"
|
|
7
|
-
"
|
|
8
|
-
|
|
9
|
-
"
|
|
10
|
-
"
|
|
11
|
-
|
|
12
|
-
"
|
|
13
|
-
|
|
1
|
+
{
|
|
2
|
+
"name": "@map-protocol/map1",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "MAP v1: deterministic identifiers for structured descriptors",
|
|
5
|
+
"main": "dist/index.js",
|
|
6
|
+
"types": "dist/index.d.ts",
|
|
7
|
+
"files": [
|
|
8
|
+
"dist/**/*.js",
|
|
9
|
+
"dist/**/*.d.ts",
|
|
10
|
+
"LICENSE"
|
|
11
|
+
],
|
|
12
|
+
"scripts": {
|
|
13
|
+
"build": "tsc",
|
|
14
|
+
"test": "node --test tests/*.test.js",
|
|
15
|
+
"test:conformance": "node --test tests/conformance.test.js",
|
|
16
|
+
"test:api": "node --test tests/api.test.js"
|
|
17
|
+
},
|
|
18
|
+
"keywords": [
|
|
19
|
+
"map1",
|
|
20
|
+
"canonical",
|
|
21
|
+
"deterministic",
|
|
22
|
+
"hash",
|
|
23
|
+
"identity",
|
|
24
|
+
"mcf",
|
|
25
|
+
"conformance"
|
|
26
|
+
],
|
|
27
|
+
"license": "MIT",
|
|
28
|
+
"homepage": "https://github.com/map-protocol/map1",
|
|
29
|
+
"engines": {
|
|
30
|
+
"node": ">=18"
|
|
31
|
+
},
|
|
32
|
+
"devDependencies": {},
|
|
33
|
+
"dependencies": {}
|
|
34
|
+
}
|
package/README.md
DELETED
package/index.js
DELETED
|
@@ -1,24 +0,0 @@
|
|
|
1
|
-
throw new Error("map1 v1.0 has not been published yet. See https://github.com/map-protocol/map1");
|
|
2
|
-
```
|
|
3
|
-
|
|
4
|
-
And a file called `README.md` with:
|
|
5
|
-
```
|
|
6
|
-
# map1
|
|
7
|
-
|
|
8
|
-
MAP v1: deterministic identity for structured data.
|
|
9
|
-
|
|
10
|
-
**This is a name reservation. The full package has not been published yet.**
|
|
11
|
-
|
|
12
|
-
See: https://github.com/map-protocol/map1
|
|
13
|
-
```
|
|
14
|
-
|
|
15
|
-
Then back in PowerShell:
|
|
16
|
-
```
|
|
17
|
-
npm login
|
|
18
|
-
```
|
|
19
|
-
|
|
20
|
-
It'll open your browser to sign in to npmjs.com. If you don't have an npm account yet, go to https://www.npmjs.com/signup first and create one.
|
|
21
|
-
|
|
22
|
-
After login succeeds:
|
|
23
|
-
```
|
|
24
|
-
npm publish
|