@gaearon/lex-builder 0.0.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +144 -0
- package/dist/filter.d.ts +7 -0
- package/dist/filter.d.ts.map +1 -0
- package/dist/filter.js +30 -0
- package/dist/filter.js.map +1 -0
- package/dist/filtered-indexer.d.ts +2100 -0
- package/dist/filtered-indexer.d.ts.map +1 -0
- package/dist/filtered-indexer.js +56 -0
- package/dist/filtered-indexer.js.map +1 -0
- package/dist/formatter.d.ts +13 -0
- package/dist/formatter.d.ts.map +1 -0
- package/dist/formatter.js +34 -0
- package/dist/formatter.js.map +1 -0
- package/dist/index.d.ts +8 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +16 -0
- package/dist/index.js.map +1 -0
- package/dist/lex-builder.d.ts +36 -0
- package/dist/lex-builder.d.ts.map +1 -0
- package/dist/lex-builder.js +144 -0
- package/dist/lex-builder.js.map +1 -0
- package/dist/lex-def-builder.d.ts +69 -0
- package/dist/lex-def-builder.d.ts.map +1 -0
- package/dist/lex-def-builder.js +734 -0
- package/dist/lex-def-builder.js.map +1 -0
- package/dist/lexicon-directory-indexer.d.ts +11 -0
- package/dist/lexicon-directory-indexer.d.ts.map +1 -0
- package/dist/lexicon-directory-indexer.js +46 -0
- package/dist/lexicon-directory-indexer.js.map +1 -0
- package/dist/polyfill.d.ts +1 -0
- package/dist/polyfill.d.ts.map +1 -0
- package/dist/polyfill.js +7 -0
- package/dist/polyfill.js.map +1 -0
- package/dist/ref-resolver.d.ts +53 -0
- package/dist/ref-resolver.d.ts.map +1 -0
- package/dist/ref-resolver.js +277 -0
- package/dist/ref-resolver.js.map +1 -0
- package/dist/ts-lang.d.ts +6 -0
- package/dist/ts-lang.d.ts.map +1 -0
- package/dist/ts-lang.js +150 -0
- package/dist/ts-lang.js.map +1 -0
- package/dist/util.d.ts +12 -0
- package/dist/util.d.ts.map +1 -0
- package/dist/util.js +72 -0
- package/dist/util.js.map +1 -0
- package/package.json +53 -0
- package/src/filter.ts +41 -0
- package/src/filtered-indexer.test.ts +84 -0
- package/src/filtered-indexer.ts +60 -0
- package/src/formatter.ts +42 -0
- package/src/index.ts +23 -0
- package/src/lex-builder.ts +186 -0
- package/src/lex-def-builder.ts +980 -0
- package/src/lexicon-directory-indexer.ts +52 -0
- package/src/polyfill.ts +7 -0
- package/src/ref-resolver.test.ts +75 -0
- package/src/ref-resolver.ts +368 -0
- package/src/ts-lang.ts +150 -0
- package/src/util.ts +72 -0
- package/tsconfig.build.json +13 -0
- package/tsconfig.json +7 -0
- package/tsconfig.tests.json +9 -0
package/dist/ts-lang.js
ADDED
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.isJsKeyword = isJsKeyword;
|
|
4
|
+
exports.isGlobalIdentifier = isGlobalIdentifier;
|
|
5
|
+
exports.isSafeLocalIdentifier = isSafeLocalIdentifier;
|
|
6
|
+
exports.isValidJsIdentifier = isValidJsIdentifier;
|
|
7
|
+
exports.asNamespaceExport = asNamespaceExport;
|
|
8
|
+
/**
|
|
9
|
+
* JavaScript keywords
|
|
10
|
+
* @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Lexical_grammar}
|
|
11
|
+
*/
|
|
12
|
+
const JS_KEYWORDS = new Set([
|
|
13
|
+
'abstract',
|
|
14
|
+
'arguments',
|
|
15
|
+
'as',
|
|
16
|
+
'async',
|
|
17
|
+
'await',
|
|
18
|
+
'boolean',
|
|
19
|
+
'break',
|
|
20
|
+
'byte',
|
|
21
|
+
'case',
|
|
22
|
+
'catch',
|
|
23
|
+
'char',
|
|
24
|
+
'class',
|
|
25
|
+
'const',
|
|
26
|
+
'continue',
|
|
27
|
+
'debugger',
|
|
28
|
+
'default',
|
|
29
|
+
'delete',
|
|
30
|
+
'do',
|
|
31
|
+
'double',
|
|
32
|
+
'else',
|
|
33
|
+
'enum',
|
|
34
|
+
'eval',
|
|
35
|
+
'export',
|
|
36
|
+
'extends',
|
|
37
|
+
'false',
|
|
38
|
+
'final',
|
|
39
|
+
'finally',
|
|
40
|
+
'float',
|
|
41
|
+
'for',
|
|
42
|
+
'from',
|
|
43
|
+
'function',
|
|
44
|
+
'get',
|
|
45
|
+
'goto',
|
|
46
|
+
'if',
|
|
47
|
+
'implements',
|
|
48
|
+
'import',
|
|
49
|
+
'in',
|
|
50
|
+
'instanceof',
|
|
51
|
+
'int',
|
|
52
|
+
'interface',
|
|
53
|
+
'let',
|
|
54
|
+
'long',
|
|
55
|
+
'native',
|
|
56
|
+
'new',
|
|
57
|
+
'null',
|
|
58
|
+
'of',
|
|
59
|
+
'package',
|
|
60
|
+
'private',
|
|
61
|
+
'protected',
|
|
62
|
+
'public',
|
|
63
|
+
'return',
|
|
64
|
+
'set',
|
|
65
|
+
'short',
|
|
66
|
+
'static',
|
|
67
|
+
'super',
|
|
68
|
+
'switch',
|
|
69
|
+
'synchronized',
|
|
70
|
+
'this',
|
|
71
|
+
'throw',
|
|
72
|
+
'throws',
|
|
73
|
+
'transient',
|
|
74
|
+
'true',
|
|
75
|
+
'try',
|
|
76
|
+
'typeof',
|
|
77
|
+
'undefined',
|
|
78
|
+
'using',
|
|
79
|
+
'var',
|
|
80
|
+
'void',
|
|
81
|
+
'volatile',
|
|
82
|
+
'while',
|
|
83
|
+
'with',
|
|
84
|
+
'yield',
|
|
85
|
+
]);
|
|
86
|
+
function isJsKeyword(word) {
|
|
87
|
+
return JS_KEYWORDS.has(word);
|
|
88
|
+
}
|
|
89
|
+
// Only important to list var/type names that are likely to be used in the
|
|
90
|
+
// generated code files.
|
|
91
|
+
const GLOBAL_IDENTIFIERS = new Set([
|
|
92
|
+
// import { l } from "@atproto/lex-schema"
|
|
93
|
+
'l',
|
|
94
|
+
// JS Globals
|
|
95
|
+
'self',
|
|
96
|
+
'globalThis',
|
|
97
|
+
// ESM
|
|
98
|
+
'import',
|
|
99
|
+
// CommonJS
|
|
100
|
+
'__dirname',
|
|
101
|
+
'__filename',
|
|
102
|
+
'require',
|
|
103
|
+
'module',
|
|
104
|
+
'exports',
|
|
105
|
+
// TS Primitives
|
|
106
|
+
'any',
|
|
107
|
+
'bigint',
|
|
108
|
+
'boolean',
|
|
109
|
+
'declare',
|
|
110
|
+
'never',
|
|
111
|
+
'null',
|
|
112
|
+
'number',
|
|
113
|
+
'object',
|
|
114
|
+
'string',
|
|
115
|
+
'symbol',
|
|
116
|
+
'undefined',
|
|
117
|
+
'unknown',
|
|
118
|
+
'void',
|
|
119
|
+
// TS Utility types
|
|
120
|
+
'Record',
|
|
121
|
+
'Partial',
|
|
122
|
+
'Readonly',
|
|
123
|
+
'Pick',
|
|
124
|
+
'Omit',
|
|
125
|
+
'Exclude',
|
|
126
|
+
'Extract',
|
|
127
|
+
'InstanceType',
|
|
128
|
+
'ReturnType',
|
|
129
|
+
'Required',
|
|
130
|
+
'ThisType',
|
|
131
|
+
'Uppercase',
|
|
132
|
+
'Lowercase',
|
|
133
|
+
'Capitalize',
|
|
134
|
+
'Uncapitalize',
|
|
135
|
+
]);
|
|
136
|
+
function isGlobalIdentifier(word) {
|
|
137
|
+
return GLOBAL_IDENTIFIERS.has(word);
|
|
138
|
+
}
|
|
139
|
+
function isSafeLocalIdentifier(name) {
|
|
140
|
+
return !isGlobalIdentifier(name) && isValidJsIdentifier(name);
|
|
141
|
+
}
|
|
142
|
+
function isValidJsIdentifier(name) {
|
|
143
|
+
return (name.length > 0 &&
|
|
144
|
+
!isJsKeyword(name) &&
|
|
145
|
+
/^[a-zA-Z_$][a-zA-Z0-9_$]*$/.test(name));
|
|
146
|
+
}
|
|
147
|
+
function asNamespaceExport(name) {
|
|
148
|
+
return isValidJsIdentifier(name) ? name : JSON.stringify(name);
|
|
149
|
+
}
|
|
150
|
+
//# sourceMappingURL=ts-lang.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ts-lang.js","sourceRoot":"","sources":["../src/ts-lang.ts"],"names":[],"mappings":";;AA+EA,kCAEC;AAkDD,gDAEC;AAED,sDAEC;AAED,kDAMC;AAED,8CAEC;AArJD;;;GAGG;AACH,MAAM,WAAW,GAAG,IAAI,GAAG,CAAC;IAC1B,UAAU;IACV,WAAW;IACX,IAAI;IACJ,OAAO;IACP,OAAO;IACP,SAAS;IACT,OAAO;IACP,MAAM;IACN,MAAM;IACN,OAAO;IACP,MAAM;IACN,OAAO;IACP,OAAO;IACP,UAAU;IACV,UAAU;IACV,SAAS;IACT,QAAQ;IACR,IAAI;IACJ,QAAQ;IACR,MAAM;IACN,MAAM;IACN,MAAM;IACN,QAAQ;IACR,SAAS;IACT,OAAO;IACP,OAAO;IACP,SAAS;IACT,OAAO;IACP,KAAK;IACL,MAAM;IACN,UAAU;IACV,KAAK;IACL,MAAM;IACN,IAAI;IACJ,YAAY;IACZ,QAAQ;IACR,IAAI;IACJ,YAAY;IACZ,KAAK;IACL,WAAW;IACX,KAAK;IACL,MAAM;IACN,QAAQ;IACR,KAAK;IACL,MAAM;IACN,IAAI;IACJ,SAAS;IACT,SAAS;IACT,WAAW;IACX,QAAQ;IACR,QAAQ;IACR,KAAK;IACL,OAAO;IACP,QAAQ;IACR,OAAO;IACP,QAAQ;IACR,cAAc;IACd,MAAM;IACN,OAAO;IACP,QAAQ;IACR,WAAW;IACX,MAAM;IACN,KAAK;IACL,QAAQ;IACR,WAAW;IACX,OAAO;IACP,KAAK;IACL,MAAM;IACN,UAAU;IACV,OAAO;IACP,MAAM;IACN,OAAO;CACR,CAAC,CAAA;AAEF,SAAgB,WAAW,CAAC,IAAY;IACtC,OAAO,WAAW,CAAC,GAAG,CAAC,IAAI,CAAC,CAAA;AAC9B,CAAC;AAED,0EAA0E;AAC1E,wBAAwB;AACxB,MAAM,kBAAkB,GAAG,IAAI,GAAG,CAAC;IACjC,0CAA0C;IAC1C,GAAG;IACH,aAAa;IACb,MAAM;IACN,YAAY;IACZ,MAAM;IACN,QAAQ;IACR,WAAW;IACX,WAAW;IACX,YAAY;IACZ,SAAS;IACT,QAAQ;IACR,SAAS;IACT,gBAAgB;IAChB,KAAK;IACL,QAAQ;IACR,SAAS;IACT,SAAS;IACT,OAAO;IACP,MAAM;IACN,QAAQ;IACR,QAAQ;IACR,QAAQ;IACR,QAAQ;IACR,WAAW;IACX,SAAS;IACT,MAAM;IACN,mBAAmB;IACnB,QAAQ;IACR,SAAS;IACT,UAAU;IACV,MAAM;IACN,MAAM;IACN,SAAS;IACT,SAAS;IACT,cAAc;IACd,YAAY;IACZ,UAAU;IACV,UAAU;IACV,WAAW;IACX,WAAW;IACX,YAAY;IACZ,cAAc;CACf,CAAC,CAAA;AAEF,SAAgB,kBAAkB,CAAC,IAAY;IAC7C,OAAO,kBAAkB,CAAC,GAAG,CAAC,IAAI,CAAC,CAAA;AACrC,CAAC;AAED,SAAgB,qBAAqB,CAAC,IAAY;IAChD,OAAO,CAAC,kBAAkB,CAAC,IAAI,CAAC,IAAI,mBAAmB,CAAC,IAAI,CAAC,CAAA;AAC/D,CAAC;AAED,SAAgB,mBAAmB,CAAC,IAAY;IAC9C,OAAO,CACL,IAAI,CAAC,MAAM,GAAG,CAAC;QACf,CAAC,WAAW,CAAC,IAAI,CAAC;QAClB,4BAA4B,CAAC,IAAI,CAAC,IAAI,CAAC,CACxC,CAAA;AACH,CAAC;AAED,SAAgB,iBAAiB,CAAC,IAAY;IAC5C,OAAO,mBAAmB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAA;AAChE,CAAC","sourcesContent":["/**\n * JavaScript keywords\n * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Lexical_grammar}\n */\nconst JS_KEYWORDS = new Set([\n 'abstract',\n 'arguments',\n 'as',\n 'async',\n 'await',\n 'boolean',\n 'break',\n 'byte',\n 'case',\n 'catch',\n 'char',\n 'class',\n 'const',\n 'continue',\n 'debugger',\n 'default',\n 'delete',\n 'do',\n 'double',\n 'else',\n 'enum',\n 'eval',\n 'export',\n 'extends',\n 'false',\n 'final',\n 'finally',\n 'float',\n 'for',\n 'from',\n 'function',\n 'get',\n 'goto',\n 'if',\n 'implements',\n 'import',\n 'in',\n 'instanceof',\n 'int',\n 'interface',\n 'let',\n 'long',\n 'native',\n 'new',\n 'null',\n 'of',\n 'package',\n 'private',\n 'protected',\n 'public',\n 'return',\n 'set',\n 'short',\n 'static',\n 'super',\n 'switch',\n 'synchronized',\n 'this',\n 'throw',\n 'throws',\n 'transient',\n 'true',\n 'try',\n 'typeof',\n 'undefined',\n 'using',\n 'var',\n 'void',\n 'volatile',\n 'while',\n 'with',\n 'yield',\n])\n\nexport function isJsKeyword(word: string) {\n return JS_KEYWORDS.has(word)\n}\n\n// Only important to list var/type names that are likely to be used in the\n// generated code files.\nconst GLOBAL_IDENTIFIERS = new Set([\n // import { l } from \"@atproto/lex-schema\"\n 'l',\n // JS Globals\n 'self',\n 'globalThis',\n // ESM\n 'import',\n // CommonJS\n '__dirname',\n '__filename',\n 'require',\n 'module',\n 'exports',\n // TS Primitives\n 'any',\n 'bigint',\n 'boolean',\n 'declare',\n 'never',\n 'null',\n 'number',\n 'object',\n 'string',\n 'symbol',\n 'undefined',\n 'unknown',\n 'void',\n // TS Utility types\n 'Record',\n 'Partial',\n 'Readonly',\n 'Pick',\n 'Omit',\n 'Exclude',\n 'Extract',\n 'InstanceType',\n 'ReturnType',\n 'Required',\n 'ThisType',\n 'Uppercase',\n 'Lowercase',\n 'Capitalize',\n 'Uncapitalize',\n])\n\nexport function isGlobalIdentifier(word: string) {\n return GLOBAL_IDENTIFIERS.has(word)\n}\n\nexport function isSafeLocalIdentifier(name: string) {\n return !isGlobalIdentifier(name) && isValidJsIdentifier(name)\n}\n\nexport function isValidJsIdentifier(name: string) {\n return (\n name.length > 0 &&\n !isJsKeyword(name) &&\n /^[a-zA-Z_$][a-zA-Z0-9_$]*$/.test(name)\n )\n}\n\nexport function asNamespaceExport(name: string) {\n return isValidJsIdentifier(name) ? name : JSON.stringify(name)\n}\n"]}
|
package/dist/util.d.ts
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
export declare function memoize<T extends (arg: string) => NonNullable<unknown> | null>(fn: T): T;
|
|
2
|
+
export declare function startsWithLower(str: string): boolean;
|
|
3
|
+
export declare function ucFirst(str: string): string;
|
|
4
|
+
export declare function lcFirst(str: string): string;
|
|
5
|
+
export declare function toPascalCase(str: string): string;
|
|
6
|
+
export declare function toCamelCase(str: string): string;
|
|
7
|
+
export declare function toConstantCase(str: string): string;
|
|
8
|
+
export declare function toLowerCase(str: string): string;
|
|
9
|
+
export declare function toUpperCase(str: string): string;
|
|
10
|
+
export declare function asRelativePath(from: string, to: string): string;
|
|
11
|
+
export declare function startsWithDigit(str: string): boolean;
|
|
12
|
+
//# sourceMappingURL=util.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"util.d.ts","sourceRoot":"","sources":["../src/util.ts"],"names":[],"mappings":"AAEA,wBAAgB,OAAO,CAAC,CAAC,SAAS,CAAC,GAAG,EAAE,MAAM,KAAK,WAAW,CAAC,OAAO,CAAC,GAAG,IAAI,EAC5E,EAAE,EAAE,CAAC,GACJ,CAAC,CASH;AAED,wBAAgB,eAAe,CAAC,GAAG,EAAE,MAAM,WAG1C;AAED,wBAAgB,OAAO,CAAC,GAAG,EAAE,MAAM,UAElC;AAED,wBAAgB,OAAO,CAAC,GAAG,EAAE,MAAM,UAElC;AAED,wBAAgB,YAAY,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAEhD;AAED,wBAAgB,WAAW,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAE/C;AAED,wBAAgB,cAAc,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAElD;AAED,wBAAgB,WAAW,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAE/C;AAED,wBAAgB,WAAW,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAE/C;AAeD,wBAAgB,cAAc,CAAC,IAAI,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,UAKtD;AAED,wBAAgB,eAAe,CAAC,GAAG,EAAE,MAAM,WAG1C"}
|
package/dist/util.js
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.memoize = memoize;
|
|
4
|
+
exports.startsWithLower = startsWithLower;
|
|
5
|
+
exports.ucFirst = ucFirst;
|
|
6
|
+
exports.lcFirst = lcFirst;
|
|
7
|
+
exports.toPascalCase = toPascalCase;
|
|
8
|
+
exports.toCamelCase = toCamelCase;
|
|
9
|
+
exports.toConstantCase = toConstantCase;
|
|
10
|
+
exports.toLowerCase = toLowerCase;
|
|
11
|
+
exports.toUpperCase = toUpperCase;
|
|
12
|
+
exports.asRelativePath = asRelativePath;
|
|
13
|
+
exports.startsWithDigit = startsWithDigit;
|
|
14
|
+
const node_path_1 = require("node:path");
|
|
15
|
+
function memoize(fn) {
|
|
16
|
+
const cache = new Map();
|
|
17
|
+
return ((arg) => {
|
|
18
|
+
const cached = cache.get(arg);
|
|
19
|
+
if (cached !== undefined)
|
|
20
|
+
return cached;
|
|
21
|
+
const result = fn(arg);
|
|
22
|
+
cache.set(arg, result);
|
|
23
|
+
return result;
|
|
24
|
+
});
|
|
25
|
+
}
|
|
26
|
+
function startsWithLower(str) {
|
|
27
|
+
const code = str.charCodeAt(0);
|
|
28
|
+
return code >= 97 && code <= 122; // 'a' to 'z'
|
|
29
|
+
}
|
|
30
|
+
function ucFirst(str) {
|
|
31
|
+
return str.charAt(0).toUpperCase() + str.slice(1);
|
|
32
|
+
}
|
|
33
|
+
function lcFirst(str) {
|
|
34
|
+
return str.charAt(0).toLowerCase() + str.slice(1);
|
|
35
|
+
}
|
|
36
|
+
function toPascalCase(str) {
|
|
37
|
+
return extractWords(str).map(toLowerCase).map(ucFirst).join('');
|
|
38
|
+
}
|
|
39
|
+
function toCamelCase(str) {
|
|
40
|
+
return lcFirst(toPascalCase(str));
|
|
41
|
+
}
|
|
42
|
+
function toConstantCase(str) {
|
|
43
|
+
return extractWords(str).map(toUpperCase).join('_');
|
|
44
|
+
}
|
|
45
|
+
function toLowerCase(str) {
|
|
46
|
+
return str.toLowerCase();
|
|
47
|
+
}
|
|
48
|
+
function toUpperCase(str) {
|
|
49
|
+
return str.toUpperCase();
|
|
50
|
+
}
|
|
51
|
+
function extractWords(str) {
|
|
52
|
+
const processedStr = str
|
|
53
|
+
.replace(/([a-z0-9])([A-Z])/g, '$1 $2') // split camelCase
|
|
54
|
+
.replace(/([A-Z])([A-Z][a-z])/g, '$1 $2') // split ALLCAPSWords
|
|
55
|
+
.replace(/([0-9])([A-Za-z])/g, '$1 $2') // split number followed by letter
|
|
56
|
+
.replace(/[^a-zA-Z0-9]+/g, ' ') // replace non-alphanumeric with space
|
|
57
|
+
.trim(); // trim leading/trailing spaces
|
|
58
|
+
return processedStr
|
|
59
|
+
? processedStr.split(/\s+/) // split by spaces
|
|
60
|
+
: []; // Avoid returning [''] for empty strings
|
|
61
|
+
}
|
|
62
|
+
function asRelativePath(from, to) {
|
|
63
|
+
const relPath = (0, node_path_1.relative)(from, to);
|
|
64
|
+
return relPath.startsWith('./') || relPath.startsWith('../')
|
|
65
|
+
? relPath
|
|
66
|
+
: `./${relPath}`;
|
|
67
|
+
}
|
|
68
|
+
function startsWithDigit(str) {
|
|
69
|
+
const code = str.charCodeAt(0);
|
|
70
|
+
return code >= 48 && code <= 57; // '0' to '9'
|
|
71
|
+
}
|
|
72
|
+
//# sourceMappingURL=util.js.map
|
package/dist/util.js.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"util.js","sourceRoot":"","sources":["../src/util.ts"],"names":[],"mappings":";;AAEA,0BAWC;AAED,0CAGC;AAED,0BAEC;AAED,0BAEC;AAED,oCAEC;AAED,kCAEC;AAED,wCAEC;AAED,kCAEC;AAED,kCAEC;AAeD,wCAKC;AAED,0CAGC;AAvED,yCAAoC;AAEpC,SAAgB,OAAO,CACrB,EAAK;IAEL,MAAM,KAAK,GAAG,IAAI,GAAG,EAAuC,CAAA;IAC5D,OAAO,CAAC,CAAC,GAAW,EAAE,EAAE;QACtB,MAAM,MAAM,GAAG,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,CAAA;QAC7B,IAAI,MAAM,KAAK,SAAS;YAAE,OAAO,MAAM,CAAA;QACvC,MAAM,MAAM,GAAG,EAAE,CAAC,GAAG,CAAC,CAAA;QACtB,KAAK,CAAC,GAAG,CAAC,GAAG,EAAE,MAAM,CAAC,CAAA;QACtB,OAAO,MAAM,CAAA;IACf,CAAC,CAAM,CAAA;AACT,CAAC;AAED,SAAgB,eAAe,CAAC,GAAW;IACzC,MAAM,IAAI,GAAG,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAA;IAC9B,OAAO,IAAI,IAAI,EAAE,IAAI,IAAI,IAAI,GAAG,CAAA,CAAC,aAAa;AAChD,CAAC;AAED,SAAgB,OAAO,CAAC,GAAW;IACjC,OAAO,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,GAAG,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAA;AACnD,CAAC;AAED,SAAgB,OAAO,CAAC,GAAW;IACjC,OAAO,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,GAAG,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAA;AACnD,CAAC;AAED,SAAgB,YAAY,CAAC,GAAW;IACtC,OAAO,YAAY,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAA;AACjE,CAAC;AAED,SAAgB,WAAW,CAAC,GAAW;IACrC,OAAO,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,CAAA;AACnC,CAAC;AAED,SAAgB,cAAc,CAAC,GAAW;IACxC,OAAO,YAAY,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;AACrD,CAAC;AAED,SAAgB,WAAW,CAAC,GAAW;IACrC,OAAO,GAAG,CAAC,WAAW,EAAE,CAAA;AAC1B,CAAC;AAED,SAAgB,WAAW,CAAC,GAAW;IACrC,OAAO,GAAG,CAAC,WAAW,EAAE,CAAA;AAC1B,CAAC;AAED,SAAS,YAAY,CAAC,GAAW;IAC/B,MAAM,YAAY,GAAG,GAAG;SACrB,OAAO,CAAC,oBAAoB,EAAE,OAAO,CAAC,CAAC,kBAAkB;SACzD,OAAO,CAAC,sBAAsB,EAAE,OAAO,CAAC,CAAC,qBAAqB;SAC9D,OAAO,CAAC,oBAAoB,EAAE,OAAO,CAAC,CAAC,kCAAkC;SACzE,OAAO,CAAC,gBAAgB,EAAE,GAAG,CAAC,CAAC,sCAAsC;SACrE,IAAI,EAAE,CAAA,CAAC,+BAA+B;IAEzC,OAAO,YAAY;QACjB,CAAC,CAAC,YAAY,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,kBAAkB;QAC9C,CAAC,CAAC,EAAE,CAAA,CAAC,yCAAyC;AAClD,CAAC;AAED,SAAgB,cAAc,CAAC,IAAY,EAAE,EAAU;IACrD,MAAM,OAAO,GAAG,IAAA,oBAAQ,EAAC,IAAI,EAAE,EAAE,CAAC,CAAA;IAClC,OAAO,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,OAAO,CAAC,UAAU,CAAC,KAAK,CAAC;QAC1D,CAAC,CAAC,OAAO;QACT,CAAC,CAAC,KAAK,OAAO,EAAE,CAAA;AACpB,CAAC;AAED,SAAgB,eAAe,CAAC,GAAW;IACzC,MAAM,IAAI,GAAG,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAA;IAC9B,OAAO,IAAI,IAAI,EAAE,IAAI,IAAI,IAAI,EAAE,CAAA,CAAC,aAAa;AAC/C,CAAC","sourcesContent":["import { relative } from 'node:path'\n\nexport function memoize<T extends (arg: string) => NonNullable<unknown> | null>(\n fn: T,\n): T {\n const cache = new Map<string, NonNullable<unknown> | null>()\n return ((arg: string) => {\n const cached = cache.get(arg)\n if (cached !== undefined) return cached\n const result = fn(arg)\n cache.set(arg, result)\n return result\n }) as T\n}\n\nexport function startsWithLower(str: string) {\n const code = str.charCodeAt(0)\n return code >= 97 && code <= 122 // 'a' to 'z'\n}\n\nexport function ucFirst(str: string) {\n return str.charAt(0).toUpperCase() + str.slice(1)\n}\n\nexport function lcFirst(str: string) {\n return str.charAt(0).toLowerCase() + str.slice(1)\n}\n\nexport function toPascalCase(str: string): string {\n return extractWords(str).map(toLowerCase).map(ucFirst).join('')\n}\n\nexport function toCamelCase(str: string): string {\n return lcFirst(toPascalCase(str))\n}\n\nexport function toConstantCase(str: string): string {\n return extractWords(str).map(toUpperCase).join('_')\n}\n\nexport function toLowerCase(str: string): string {\n return str.toLowerCase()\n}\n\nexport function toUpperCase(str: string): string {\n return str.toUpperCase()\n}\n\nfunction extractWords(str: string): string[] {\n const processedStr = str\n .replace(/([a-z0-9])([A-Z])/g, '$1 $2') // split camelCase\n .replace(/([A-Z])([A-Z][a-z])/g, '$1 $2') // split ALLCAPSWords\n .replace(/([0-9])([A-Za-z])/g, '$1 $2') // split number followed by letter\n .replace(/[^a-zA-Z0-9]+/g, ' ') // replace non-alphanumeric with space\n .trim() // trim leading/trailing spaces\n\n return processedStr\n ? processedStr.split(/\\s+/) // split by spaces\n : [] // Avoid returning [''] for empty strings\n}\n\nexport function asRelativePath(from: string, to: string) {\n const relPath = relative(from, to)\n return relPath.startsWith('./') || relPath.startsWith('../')\n ? relPath\n : `./${relPath}`\n}\n\nexport function startsWithDigit(str: string) {\n const code = str.charCodeAt(0)\n return code >= 48 && code <= 57 // '0' to '9'\n}\n"]}
|
package/package.json
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@gaearon/lex-builder",
|
|
3
|
+
"version": "0.0.13",
|
|
4
|
+
"license": "MIT",
|
|
5
|
+
"description": "TypeScript schema builder for AT Lexicons",
|
|
6
|
+
"keywords": [
|
|
7
|
+
"atproto",
|
|
8
|
+
"lexicon",
|
|
9
|
+
"build",
|
|
10
|
+
"lex"
|
|
11
|
+
],
|
|
12
|
+
"homepage": "https://atproto.com",
|
|
13
|
+
"repository": {
|
|
14
|
+
"type": "git",
|
|
15
|
+
"url": "https://github.com/bluesky-social/atproto",
|
|
16
|
+
"directory": "packages/lex/lex-builder"
|
|
17
|
+
},
|
|
18
|
+
"files": [
|
|
19
|
+
"./src",
|
|
20
|
+
"./tsconfig.build.json",
|
|
21
|
+
"./tsconfig.tests.json",
|
|
22
|
+
"./tsconfig.json",
|
|
23
|
+
"./dist",
|
|
24
|
+
"./CHANGELOG.md"
|
|
25
|
+
],
|
|
26
|
+
"sideEffects": false,
|
|
27
|
+
"type": "commonjs",
|
|
28
|
+
"main": "./dist/index.js",
|
|
29
|
+
"types": "./dist/index.d.ts",
|
|
30
|
+
"exports": {
|
|
31
|
+
".": {
|
|
32
|
+
"types": "./dist/index.d.ts",
|
|
33
|
+
"browser": "./dist/index.js",
|
|
34
|
+
"require": "./dist/index.js",
|
|
35
|
+
"import": "./dist/index.js"
|
|
36
|
+
}
|
|
37
|
+
},
|
|
38
|
+
"dependencies": {
|
|
39
|
+
"@atproto/lex-document": "workspace:^",
|
|
40
|
+
"@atproto/lex-schema": "workspace:^",
|
|
41
|
+
"prettier": "^3.2.5",
|
|
42
|
+
"ts-morph": "^27.0.0",
|
|
43
|
+
"tslib": "^2.8.1"
|
|
44
|
+
},
|
|
45
|
+
"devDependencies": {
|
|
46
|
+
"@ts-morph/common": "^0.28.0",
|
|
47
|
+
"vitest": "^4.0.16"
|
|
48
|
+
},
|
|
49
|
+
"scripts": {
|
|
50
|
+
"build": "tsc --build tsconfig.build.json",
|
|
51
|
+
"test": "vitest run"
|
|
52
|
+
}
|
|
53
|
+
}
|
package/src/filter.ts
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
export type BuildFilterOptions = {
|
|
2
|
+
include?: string | string[]
|
|
3
|
+
exclude?: string | string[]
|
|
4
|
+
}
|
|
5
|
+
|
|
6
|
+
export type Filter = (input: string) => boolean
|
|
7
|
+
|
|
8
|
+
export function buildFilter(options: BuildFilterOptions): Filter {
|
|
9
|
+
const include = createMatcher(options.include, () => true)
|
|
10
|
+
const exclude = createMatcher(options.exclude, () => false)
|
|
11
|
+
|
|
12
|
+
return (id) => include(id) && !exclude(id)
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
function createMatcher(
|
|
16
|
+
pattern: undefined | string | string[],
|
|
17
|
+
fallback: Filter,
|
|
18
|
+
): Filter {
|
|
19
|
+
if (!pattern?.length) {
|
|
20
|
+
return fallback
|
|
21
|
+
} else if (Array.isArray(pattern)) {
|
|
22
|
+
return pattern.map(buildMatcher).reduce(combineFilters)
|
|
23
|
+
} else {
|
|
24
|
+
return buildMatcher(pattern)
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
function combineFilters(a: Filter, b: Filter): Filter {
|
|
29
|
+
return (input: string) => a(input) || b(input)
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
function buildMatcher(pattern: string): Filter {
|
|
33
|
+
if (pattern.includes('*')) {
|
|
34
|
+
const regex = new RegExp(
|
|
35
|
+
`^${pattern.replaceAll('.', '\\.').replaceAll('*', '.+')}$`,
|
|
36
|
+
)
|
|
37
|
+
return (input: string) => regex.test(input)
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
return (input: string) => pattern === input
|
|
41
|
+
}
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
import { describe, expect, it } from 'vitest'
|
|
2
|
+
import { LexiconDocument, LexiconIndexer } from '@atproto/lex-document'
|
|
3
|
+
import { FilteredIndexer } from './filtered-indexer.js'
|
|
4
|
+
|
|
5
|
+
class DummyIndexer implements LexiconIndexer, AsyncIterable<LexiconDocument> {
|
|
6
|
+
readonly docs: Map<string, LexiconDocument>
|
|
7
|
+
|
|
8
|
+
constructor(docs: LexiconDocument[]) {
|
|
9
|
+
this.docs = new Map(docs.map((doc) => [doc.id, doc]))
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
async get(id: string): Promise<LexiconDocument> {
|
|
13
|
+
const doc = this.docs.get(id)
|
|
14
|
+
if (!doc) {
|
|
15
|
+
throw new Error(`Document not found: ${id}`)
|
|
16
|
+
}
|
|
17
|
+
return doc
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
async *[Symbol.asyncIterator]() {
|
|
21
|
+
for (const doc of this.docs.values()) {
|
|
22
|
+
yield doc
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
describe('FilteredIndexer', () => {
|
|
28
|
+
const docs: LexiconDocument[] = [
|
|
29
|
+
{
|
|
30
|
+
lexicon: 1,
|
|
31
|
+
id: 'com.example.alpha',
|
|
32
|
+
defs: {},
|
|
33
|
+
},
|
|
34
|
+
{
|
|
35
|
+
lexicon: 1,
|
|
36
|
+
id: 'com.example.beta',
|
|
37
|
+
defs: {},
|
|
38
|
+
},
|
|
39
|
+
{
|
|
40
|
+
lexicon: 1,
|
|
41
|
+
id: 'org.sample.gamma',
|
|
42
|
+
defs: {},
|
|
43
|
+
},
|
|
44
|
+
]
|
|
45
|
+
|
|
46
|
+
it('yields only filtered documents', async () => {
|
|
47
|
+
const indexer = new DummyIndexer(docs)
|
|
48
|
+
const filter = (id: string) => id.startsWith('com.example.')
|
|
49
|
+
const filteredIndexer = new FilteredIndexer(indexer, filter)
|
|
50
|
+
|
|
51
|
+
const yieldedDocs = []
|
|
52
|
+
for await (const doc of filteredIndexer) {
|
|
53
|
+
yieldedDocs.push(doc)
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
expect(yieldedDocs).toHaveLength(2)
|
|
57
|
+
expect(yieldedDocs.map((d) => d.id)).toEqual([
|
|
58
|
+
'com.example.alpha',
|
|
59
|
+
'com.example.beta',
|
|
60
|
+
])
|
|
61
|
+
})
|
|
62
|
+
|
|
63
|
+
it('bypasses filter for requested documents', async () => {
|
|
64
|
+
const indexer = new DummyIndexer(docs)
|
|
65
|
+
const filter = (id: string) => id.startsWith('com.example.')
|
|
66
|
+
const filteredIndexer = new FilteredIndexer(indexer, filter)
|
|
67
|
+
|
|
68
|
+
// Request a document that would normally be filtered out
|
|
69
|
+
const requestedDoc = await filteredIndexer.get('org.sample.gamma')
|
|
70
|
+
expect(requestedDoc.id).toBe('org.sample.gamma')
|
|
71
|
+
|
|
72
|
+
const yieldedDocs = []
|
|
73
|
+
for await (const doc of filteredIndexer) {
|
|
74
|
+
yieldedDocs.push(doc)
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
expect(yieldedDocs).toHaveLength(3)
|
|
78
|
+
expect(yieldedDocs.map((d) => d.id)).toEqual([
|
|
79
|
+
'com.example.alpha',
|
|
80
|
+
'com.example.beta',
|
|
81
|
+
'org.sample.gamma',
|
|
82
|
+
])
|
|
83
|
+
})
|
|
84
|
+
})
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
import { LexiconDocument, LexiconIndexer } from '@atproto/lex-document'
|
|
2
|
+
import { Filter } from './filter.js'
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* A lexicon indexer that filters documents based on a provided filter.
|
|
6
|
+
*
|
|
7
|
+
* If a document was filtered out but later requested via `get()`, the filter
|
|
8
|
+
* will be bypassed for that document.
|
|
9
|
+
*/
|
|
10
|
+
export class FilteredIndexer implements LexiconIndexer, AsyncDisposable {
|
|
11
|
+
protected readonly returned = new Set<string>()
|
|
12
|
+
|
|
13
|
+
constructor(
|
|
14
|
+
readonly indexer: LexiconIndexer & AsyncIterable<LexiconDocument>,
|
|
15
|
+
readonly filter: Filter,
|
|
16
|
+
) {}
|
|
17
|
+
|
|
18
|
+
async get(id: string): Promise<LexiconDocument> {
|
|
19
|
+
this.returned.add(id)
|
|
20
|
+
return this.indexer.get(id)
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
async *[Symbol.asyncIterator]() {
|
|
24
|
+
const returned = new Set<string>()
|
|
25
|
+
|
|
26
|
+
for await (const doc of this.indexer) {
|
|
27
|
+
if (returned.has(doc.id)) {
|
|
28
|
+
// Should never happen
|
|
29
|
+
throw new Error(`Duplicate lexicon document id: ${doc.id}`)
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
if (this.returned.has(doc.id) || this.filter(doc.id)) {
|
|
33
|
+
this.returned.add(doc.id)
|
|
34
|
+
returned.add(doc.id)
|
|
35
|
+
yield doc
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
// When we yield control back to the caller, there may be requests (.get())
|
|
40
|
+
// for documents that were initially ignored (filtered out). We won't be
|
|
41
|
+
// done iterating until every document that may have been requested when the
|
|
42
|
+
// control was yielded to the caller has been returned.
|
|
43
|
+
|
|
44
|
+
let returnedAny: boolean
|
|
45
|
+
do {
|
|
46
|
+
returnedAny = false
|
|
47
|
+
for (const id of this.returned) {
|
|
48
|
+
if (!returned.has(id)) {
|
|
49
|
+
yield await this.indexer.get(id)
|
|
50
|
+
returned.add(id)
|
|
51
|
+
returnedAny = true
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
} while (returnedAny)
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
async [Symbol.asyncDispose](): Promise<void> {
|
|
58
|
+
await this.indexer[Symbol.asyncDispose]?.()
|
|
59
|
+
}
|
|
60
|
+
}
|
package/src/formatter.ts
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import { Options as PrettierOptions, format as prettierFormat } from 'prettier'
|
|
2
|
+
|
|
3
|
+
const DEFAULT_FORMAT_OPTIONS: PrettierOptions = {
|
|
4
|
+
parser: 'typescript',
|
|
5
|
+
tabWidth: 2,
|
|
6
|
+
semi: false,
|
|
7
|
+
singleQuote: true,
|
|
8
|
+
trailingComma: 'all',
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
const DEFAULT_BANNER = `/*
|
|
12
|
+
* THIS FILE WAS GENERATED BY "@atproto/lex". DO NOT EDIT.
|
|
13
|
+
*/`
|
|
14
|
+
|
|
15
|
+
export type FormatterOptions = {
|
|
16
|
+
/** @default false */
|
|
17
|
+
pretty?: boolean | PrettierOptions
|
|
18
|
+
banner?: string
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
export class Formatter {
|
|
22
|
+
readonly banner: string
|
|
23
|
+
readonly prettierOptions: PrettierOptions | null
|
|
24
|
+
|
|
25
|
+
constructor(options: FormatterOptions = {}) {
|
|
26
|
+
this.banner = options?.banner ?? DEFAULT_BANNER
|
|
27
|
+
|
|
28
|
+
this.prettierOptions =
|
|
29
|
+
options?.pretty === true
|
|
30
|
+
? DEFAULT_FORMAT_OPTIONS
|
|
31
|
+
: options?.pretty || null
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
async format(code: string) {
|
|
35
|
+
const bannerPadding =
|
|
36
|
+
this.banner && !this.banner.endsWith('\n') ? '\n\n' : ''
|
|
37
|
+
const codePretty = this.prettierOptions
|
|
38
|
+
? await prettierFormat(code, this.prettierOptions)
|
|
39
|
+
: code
|
|
40
|
+
return `${this.banner}${bannerPadding}${codePretty}`
|
|
41
|
+
}
|
|
42
|
+
}
|
package/src/index.ts
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
// Must be first
|
|
2
|
+
import './polyfill.js'
|
|
3
|
+
|
|
4
|
+
import {
|
|
5
|
+
LexBuilder,
|
|
6
|
+
LexBuilderLoadOptions,
|
|
7
|
+
LexBuilderOptions,
|
|
8
|
+
LexBuilderSaveOptions,
|
|
9
|
+
} from './lex-builder.js'
|
|
10
|
+
|
|
11
|
+
export * from './lex-builder.js'
|
|
12
|
+
export * from './lex-def-builder.js'
|
|
13
|
+
export * from './lexicon-directory-indexer.js'
|
|
14
|
+
|
|
15
|
+
export type TsProjectBuildOptions = LexBuilderOptions &
|
|
16
|
+
LexBuilderLoadOptions &
|
|
17
|
+
LexBuilderSaveOptions
|
|
18
|
+
|
|
19
|
+
export async function build(options: TsProjectBuildOptions) {
|
|
20
|
+
const builder = new LexBuilder(options)
|
|
21
|
+
await builder.load(options)
|
|
22
|
+
await builder.save(options)
|
|
23
|
+
}
|