@atcute/lex-cli 2.4.0 → 2.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +40 -8
- package/dist/cli.js +10 -168
- package/dist/cli.js.map +1 -1
- package/dist/codegen.d.ts.map +1 -1
- package/dist/codegen.js +76 -78
- package/dist/codegen.js.map +1 -1
- package/dist/commands/export.d.ts +13 -0
- package/dist/commands/export.d.ts.map +1 -0
- package/dist/commands/export.js +76 -0
- package/dist/commands/export.js.map +1 -0
- package/dist/commands/generate.d.ts +13 -0
- package/dist/commands/generate.d.ts.map +1 -0
- package/dist/commands/generate.js +136 -0
- package/dist/commands/generate.js.map +1 -0
- package/dist/commands/pull.d.ts +13 -0
- package/dist/commands/pull.d.ts.map +1 -0
- package/dist/{pull.js → commands/pull.js} +35 -81
- package/dist/commands/pull.js.map +1 -0
- package/dist/config.d.ts +68 -6
- package/dist/config.d.ts.map +1 -1
- package/dist/config.js +54 -3
- package/dist/config.js.map +1 -1
- package/dist/lexicon-loader.d.ts +17 -0
- package/dist/lexicon-loader.d.ts.map +1 -0
- package/dist/lexicon-loader.js +167 -0
- package/dist/lexicon-loader.js.map +1 -0
- package/dist/pull-sources/atproto.d.ts +9 -0
- package/dist/pull-sources/atproto.d.ts.map +1 -0
- package/dist/pull-sources/atproto.js +192 -0
- package/dist/pull-sources/atproto.js.map +1 -0
- package/dist/pull-sources/git.d.ts +11 -0
- package/dist/pull-sources/git.d.ts.map +1 -0
- package/dist/pull-sources/git.js +80 -0
- package/dist/pull-sources/git.js.map +1 -0
- package/dist/pull-sources/types.d.ts +16 -0
- package/dist/pull-sources/types.d.ts.map +1 -0
- package/dist/pull-sources/types.js +2 -0
- package/dist/pull-sources/types.js.map +1 -0
- package/dist/shared-options.d.ts +6 -0
- package/dist/shared-options.d.ts.map +1 -0
- package/dist/shared-options.js +11 -0
- package/dist/shared-options.js.map +1 -0
- package/package.json +10 -7
- package/src/cli.ts +9 -210
- package/src/codegen.ts +90 -88
- package/src/commands/export.ts +106 -0
- package/src/commands/generate.ts +170 -0
- package/src/{pull.ts → commands/pull.ts} +49 -116
- package/src/config.ts +67 -4
- package/src/lexicon-loader.ts +199 -0
- package/src/pull-sources/atproto.ts +243 -0
- package/src/pull-sources/git.ts +103 -0
- package/src/pull-sources/types.ts +18 -0
- package/src/shared-options.ts +13 -0
- package/dist/pull.d.ts +0 -7
- package/dist/pull.d.ts.map +0 -1
- package/dist/pull.js.map +0 -1
package/src/codegen.ts
CHANGED
|
@@ -49,6 +49,33 @@ type Literal = string | number | boolean;
|
|
|
49
49
|
|
|
50
50
|
const lit: (val: Literal | Literal[]) => string = JSON.stringify;
|
|
51
51
|
|
|
52
|
+
interface LexPath {
|
|
53
|
+
nsid: string;
|
|
54
|
+
defId: string;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
const toLexUri = (path: LexPath): string => {
|
|
58
|
+
const { nsid, defId } = path;
|
|
59
|
+
return defId === 'main' ? nsid : `${nsid}#${defId}`;
|
|
60
|
+
};
|
|
61
|
+
|
|
62
|
+
const resolvePath = (from: LexPath, ref: string): LexPath => {
|
|
63
|
+
const index = ref.indexOf('#');
|
|
64
|
+
|
|
65
|
+
// nsid (no hash)
|
|
66
|
+
if (index === -1) {
|
|
67
|
+
return { nsid: ref, defId: 'main' };
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
// #defId (local ref)
|
|
71
|
+
if (index === 0) {
|
|
72
|
+
return { nsid: from.nsid, defId: ref.slice(1) };
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
// nsid#defId (full ref)
|
|
76
|
+
return { nsid: ref.slice(0, index), defId: ref.slice(index + 1) };
|
|
77
|
+
};
|
|
78
|
+
|
|
52
79
|
const resolveExternalImport = (nsid: string, mappings: ImportMapping[]): ImportMapping | undefined => {
|
|
53
80
|
return mappings.find((mapping) => {
|
|
54
81
|
return mapping.nsid.some((pattern) => {
|
|
@@ -111,7 +138,7 @@ export const generateLexiconApi = async (opts: LexiconApiOptions): Promise<Lexic
|
|
|
111
138
|
|
|
112
139
|
for (const defId of sortedDefIds) {
|
|
113
140
|
const def = doc.defs[defId];
|
|
114
|
-
const
|
|
141
|
+
const path: LexPath = { nsid: doc.id, defId };
|
|
115
142
|
|
|
116
143
|
const camelcased = toCamelCase(defId);
|
|
117
144
|
const varname = `${camelcased}Schema`;
|
|
@@ -119,59 +146,59 @@ export const generateLexiconApi = async (opts: LexiconApiOptions): Promise<Lexic
|
|
|
119
146
|
let result: string;
|
|
120
147
|
switch (def.type) {
|
|
121
148
|
case 'query': {
|
|
122
|
-
result = generateXrpcQuery(imports,
|
|
149
|
+
result = generateXrpcQuery(imports, path, def);
|
|
123
150
|
|
|
124
151
|
file.imports += `import type {} from '@atcute/lexicons/ambient';\n`;
|
|
125
152
|
|
|
126
153
|
file.ambients += `declare module '@atcute/lexicons/ambient' {\n`;
|
|
127
154
|
file.ambients += ` interface XRPCQueries {\n`;
|
|
128
|
-
file.ambients += ` ${lit(
|
|
155
|
+
file.ambients += ` ${lit(toLexUri(path))}: ${camelcased}Schema;\n`;
|
|
129
156
|
file.ambients += ` }\n`;
|
|
130
157
|
file.ambients += `}`;
|
|
131
158
|
break;
|
|
132
159
|
}
|
|
133
160
|
case 'procedure': {
|
|
134
|
-
result = generateXrpcProcedure(imports,
|
|
161
|
+
result = generateXrpcProcedure(imports, path, def);
|
|
135
162
|
|
|
136
163
|
file.imports += `import type {} from '@atcute/lexicons/ambient';\n`;
|
|
137
164
|
|
|
138
165
|
file.ambients += `declare module '@atcute/lexicons/ambient' {\n`;
|
|
139
166
|
file.ambients += ` interface XRPCProcedures {\n`;
|
|
140
|
-
file.ambients += ` ${lit(
|
|
167
|
+
file.ambients += ` ${lit(toLexUri(path))}: ${camelcased}Schema;\n`;
|
|
141
168
|
file.ambients += ` }\n`;
|
|
142
169
|
file.ambients += `}`;
|
|
143
170
|
break;
|
|
144
171
|
}
|
|
145
172
|
case 'subscription': {
|
|
146
|
-
result = generateXrpcSubscription(imports,
|
|
173
|
+
result = generateXrpcSubscription(imports, path, def);
|
|
147
174
|
|
|
148
175
|
file.imports += `import type {} from '@atcute/lexicons/ambient';\n`;
|
|
149
176
|
|
|
150
177
|
file.ambients += `declare module '@atcute/lexicons/ambient' {\n`;
|
|
151
178
|
file.ambients += ` interface XRPCSubscriptions {\n`;
|
|
152
|
-
file.ambients += ` ${lit(
|
|
179
|
+
file.ambients += ` ${lit(toLexUri(path))}: ${camelcased}Schema;\n`;
|
|
153
180
|
file.ambients += ` }\n`;
|
|
154
181
|
file.ambients += `}`;
|
|
155
182
|
break;
|
|
156
183
|
}
|
|
157
184
|
case 'object': {
|
|
158
|
-
result = generateObject(imports,
|
|
185
|
+
result = generateObject(imports, path, def);
|
|
159
186
|
break;
|
|
160
187
|
}
|
|
161
188
|
case 'record': {
|
|
162
|
-
result = generateRecord(imports,
|
|
189
|
+
result = generateRecord(imports, path, def);
|
|
163
190
|
|
|
164
191
|
file.imports += `import type {} from '@atcute/lexicons/ambient';\n`;
|
|
165
192
|
|
|
166
193
|
file.ambients += `declare module '@atcute/lexicons/ambient' {\n`;
|
|
167
194
|
file.ambients += ` interface Records {\n`;
|
|
168
|
-
file.ambients += ` ${lit(
|
|
195
|
+
file.ambients += ` ${lit(toLexUri(path))}: ${camelcased}Schema;\n`;
|
|
169
196
|
file.ambients += ` }\n`;
|
|
170
197
|
file.ambients += `}`;
|
|
171
198
|
break;
|
|
172
199
|
}
|
|
173
200
|
case 'token': {
|
|
174
|
-
result = `${PURE} v.literal(${lit(
|
|
201
|
+
result = `${PURE} v.literal(${lit(toLexUri(path))})`;
|
|
175
202
|
break;
|
|
176
203
|
}
|
|
177
204
|
case 'permission-set': {
|
|
@@ -179,7 +206,7 @@ export const generateLexiconApi = async (opts: LexiconApiOptions): Promise<Lexic
|
|
|
179
206
|
continue;
|
|
180
207
|
}
|
|
181
208
|
default: {
|
|
182
|
-
result = generateType(imports,
|
|
209
|
+
result = generateType(imports, path, def);
|
|
183
210
|
break;
|
|
184
211
|
}
|
|
185
212
|
}
|
|
@@ -372,42 +399,42 @@ export const generateLexiconApi = async (opts: LexiconApiOptions): Promise<Lexic
|
|
|
372
399
|
return { files };
|
|
373
400
|
};
|
|
374
401
|
|
|
375
|
-
const generateXrpcQuery = (imports: ImportSet,
|
|
376
|
-
const params = generateXrpcParameters(imports,
|
|
377
|
-
const output = generateXrpcBody(imports,
|
|
402
|
+
const generateXrpcQuery = (imports: ImportSet, path: LexPath, spec: LexXrpcQuery): string => {
|
|
403
|
+
const params = generateXrpcParameters(imports, path, spec.parameters);
|
|
404
|
+
const output = generateXrpcBody(imports, path, spec.output);
|
|
378
405
|
|
|
379
|
-
return `${PURE} v.query(${lit(
|
|
406
|
+
return `${PURE} v.query(${lit(toLexUri(path))}, {\n"params": ${params}, "output": ${output} })`;
|
|
380
407
|
};
|
|
381
408
|
|
|
382
|
-
const generateXrpcProcedure = (imports: ImportSet,
|
|
383
|
-
const params = generateXrpcParameters(imports,
|
|
384
|
-
const input = generateXrpcBody(imports,
|
|
385
|
-
const output = generateXrpcBody(imports,
|
|
409
|
+
const generateXrpcProcedure = (imports: ImportSet, path: LexPath, spec: LexXrpcProcedure): string => {
|
|
410
|
+
const params = generateXrpcParameters(imports, path, spec.parameters);
|
|
411
|
+
const input = generateXrpcBody(imports, path, spec.input);
|
|
412
|
+
const output = generateXrpcBody(imports, path, spec.output);
|
|
386
413
|
|
|
387
|
-
return `${PURE} v.procedure(${lit(
|
|
414
|
+
return `${PURE} v.procedure(${lit(toLexUri(path))}, {\n"params": ${params}, "input": ${input}, "output": ${output} })`;
|
|
388
415
|
};
|
|
389
416
|
|
|
390
|
-
const generateXrpcSubscription = (imports: ImportSet,
|
|
417
|
+
const generateXrpcSubscription = (imports: ImportSet, path: LexPath, spec: LexXrpcSubscription): string => {
|
|
391
418
|
const schema = spec.message?.schema;
|
|
392
419
|
|
|
393
|
-
const params = generateXrpcParameters(imports,
|
|
420
|
+
const params = generateXrpcParameters(imports, path, spec.parameters);
|
|
394
421
|
|
|
395
422
|
let inner = ``;
|
|
396
423
|
|
|
397
424
|
inner += `"params": ${params},`;
|
|
398
425
|
|
|
399
426
|
if (schema) {
|
|
400
|
-
const res = generateType(imports,
|
|
427
|
+
const res = generateType(imports, path, schema);
|
|
401
428
|
|
|
402
429
|
inner += `get "message" () { return ${res} },`;
|
|
403
430
|
} else {
|
|
404
431
|
inner += `"message": null,`;
|
|
405
432
|
}
|
|
406
433
|
|
|
407
|
-
return `${PURE} v.subscription(${lit(
|
|
434
|
+
return `${PURE} v.subscription(${lit(toLexUri(path))}, {\n${inner}})`;
|
|
408
435
|
};
|
|
409
436
|
|
|
410
|
-
const generateXrpcBody = (imports: ImportSet,
|
|
437
|
+
const generateXrpcBody = (imports: ImportSet, path: LexPath, spec: LexXrpcBody | undefined): string => {
|
|
411
438
|
if (spec === undefined) {
|
|
412
439
|
return `null`;
|
|
413
440
|
}
|
|
@@ -421,11 +448,11 @@ const generateXrpcBody = (imports: ImportSet, defUri: string, spec: LexXrpcBody
|
|
|
421
448
|
inner += `"type": "lex",`;
|
|
422
449
|
|
|
423
450
|
if (schema.type === 'object') {
|
|
424
|
-
const res = generateObject(imports,
|
|
451
|
+
const res = generateObject(imports, path, schema, 'none');
|
|
425
452
|
|
|
426
453
|
inner += `"schema": ${res},`;
|
|
427
454
|
} else {
|
|
428
|
-
const res = generateType(imports,
|
|
455
|
+
const res = generateType(imports, path, schema);
|
|
429
456
|
|
|
430
457
|
inner += `get "schema" () { return ${res} },`;
|
|
431
458
|
}
|
|
@@ -452,7 +479,7 @@ const generateXrpcBody = (imports: ImportSet, defUri: string, spec: LexXrpcBody
|
|
|
452
479
|
|
|
453
480
|
const generateXrpcParameters = (
|
|
454
481
|
imports: ImportSet,
|
|
455
|
-
|
|
482
|
+
path: LexPath,
|
|
456
483
|
spec: LexXrpcParameters | undefined,
|
|
457
484
|
): string => {
|
|
458
485
|
if (spec === undefined) {
|
|
@@ -489,11 +516,11 @@ const generateXrpcParameters = (
|
|
|
489
516
|
properties: transformedProperties ?? originalProperties,
|
|
490
517
|
};
|
|
491
518
|
|
|
492
|
-
return generateObject(imports,
|
|
519
|
+
return generateObject(imports, path, mask, 'none');
|
|
493
520
|
};
|
|
494
521
|
|
|
495
|
-
const generateRecord = (imports: ImportSet,
|
|
496
|
-
const schema = generateObject(imports,
|
|
522
|
+
const generateRecord = (imports: ImportSet, path: LexPath, spec: LexRecord): string => {
|
|
523
|
+
const schema = generateObject(imports, path, spec.record, 'required');
|
|
497
524
|
|
|
498
525
|
let key = `${PURE} v.string()`;
|
|
499
526
|
if (spec.key) {
|
|
@@ -511,7 +538,7 @@ const generateRecord = (imports: ImportSet, defUri: string, spec: LexRecord): st
|
|
|
511
538
|
|
|
512
539
|
const generateObject = (
|
|
513
540
|
imports: ImportSet,
|
|
514
|
-
|
|
541
|
+
path: LexPath,
|
|
515
542
|
spec: LexObject,
|
|
516
543
|
writeType: 'required' | 'optional' | 'none' = 'optional',
|
|
517
544
|
): string => {
|
|
@@ -522,11 +549,11 @@ const generateObject = (
|
|
|
522
549
|
|
|
523
550
|
switch (writeType) {
|
|
524
551
|
case 'optional': {
|
|
525
|
-
inner += `"$type": ${PURE} v.optional(${PURE} v.literal(${lit(
|
|
552
|
+
inner += `"$type": ${PURE} v.optional(${PURE} v.literal(${lit(toLexUri(path))})),`;
|
|
526
553
|
break;
|
|
527
554
|
}
|
|
528
555
|
case 'required': {
|
|
529
|
-
inner += `"$type": ${PURE} v.literal(${lit(
|
|
556
|
+
inner += `"$type": ${PURE} v.literal(${lit(toLexUri(path))}),`;
|
|
530
557
|
break;
|
|
531
558
|
}
|
|
532
559
|
}
|
|
@@ -544,10 +571,10 @@ const generateObject = (
|
|
|
544
571
|
|
|
545
572
|
for (const [prop, propSpec] of sortedEntries) {
|
|
546
573
|
const lazy = isRefVariant(propSpec.type === 'array' ? propSpec.items : propSpec);
|
|
547
|
-
const optional = !required.has(prop) && !('default' in propSpec);
|
|
574
|
+
const optional = !required.has(prop) && !('default' in propSpec && propSpec.default !== undefined);
|
|
548
575
|
const nulled = nullable.has(prop);
|
|
549
576
|
|
|
550
|
-
let call = generateType(imports,
|
|
577
|
+
let call = generateType(imports, path, propSpec, lazy);
|
|
551
578
|
|
|
552
579
|
if (nulled) {
|
|
553
580
|
call = `${PURE} v.nullable(${call})`;
|
|
@@ -678,62 +705,50 @@ const generateJsdocField = (spec: LexUserType | LexRefVariant | LexUnknown) => {
|
|
|
678
705
|
return res;
|
|
679
706
|
};
|
|
680
707
|
|
|
681
|
-
const generateType = (imports: ImportSet,
|
|
708
|
+
const generateType = (imports: ImportSet, path: LexPath, spec: LexDefinableField, lazy = false): string => {
|
|
682
709
|
switch (spec.type) {
|
|
683
710
|
// LexRefVariant
|
|
684
711
|
case 'ref': {
|
|
685
|
-
const
|
|
686
|
-
|
|
687
|
-
if (ref.startsWith('#')) {
|
|
688
|
-
const id = ref.slice(1);
|
|
689
|
-
|
|
690
|
-
return `${toCamelCase(id)}Schema`;
|
|
691
|
-
} else {
|
|
692
|
-
const [ns, id = 'main'] = ref.split('#');
|
|
693
|
-
if (ns === stripHash(defUri)) {
|
|
694
|
-
return `${toCamelCase(id)}Schema`;
|
|
695
|
-
}
|
|
696
|
-
|
|
697
|
-
imports.add(ns);
|
|
712
|
+
const refPath = resolvePath(path, spec.ref);
|
|
698
713
|
|
|
699
|
-
|
|
714
|
+
if (refPath.nsid === path.nsid) {
|
|
715
|
+
return `${toCamelCase(refPath.defId)}Schema`;
|
|
700
716
|
}
|
|
717
|
+
|
|
718
|
+
imports.add(refPath.nsid);
|
|
719
|
+
return `${toTitleCase(refPath.nsid)}.${toCamelCase(refPath.defId)}Schema`;
|
|
701
720
|
}
|
|
702
721
|
case 'union': {
|
|
703
|
-
const
|
|
704
|
-
.map((ref)
|
|
705
|
-
|
|
706
|
-
|
|
722
|
+
const refs = spec.refs
|
|
723
|
+
.map((ref) => {
|
|
724
|
+
const refPath = resolvePath(path, ref);
|
|
725
|
+
return { path: refPath, uri: toLexUri(refPath) };
|
|
726
|
+
})
|
|
727
|
+
.sort((a, b) => {
|
|
728
|
+
if (a.uri < b.uri) {
|
|
729
|
+
return -1;
|
|
707
730
|
}
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
if (ns === stripHash(defUri)) {
|
|
711
|
-
return `#${id}`;
|
|
731
|
+
if (a.uri > b.uri) {
|
|
732
|
+
return 1;
|
|
712
733
|
}
|
|
713
734
|
|
|
714
|
-
return
|
|
735
|
+
return 0;
|
|
715
736
|
})
|
|
716
|
-
.
|
|
717
|
-
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
const id = ref.slice(1);
|
|
721
|
-
|
|
722
|
-
return `${toCamelCase(id)}Schema`;
|
|
723
|
-
} else {
|
|
724
|
-
const [ns, id = 'main'] = ref.split('#');
|
|
725
|
-
imports.add(ns);
|
|
737
|
+
.map(({ path: refPath }): string => {
|
|
738
|
+
if (refPath.nsid === path.nsid) {
|
|
739
|
+
return `${toCamelCase(refPath.defId)}Schema`;
|
|
740
|
+
}
|
|
726
741
|
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
742
|
+
imports.add(refPath.nsid);
|
|
743
|
+
return `${toTitleCase(refPath.nsid)}.${toCamelCase(refPath.defId)}Schema`;
|
|
744
|
+
});
|
|
730
745
|
|
|
731
746
|
return `${PURE} v.variant([${refs.join(', ')}]${spec.closed ? `, true` : ``})`;
|
|
732
747
|
}
|
|
733
748
|
|
|
734
749
|
// LexArray
|
|
735
750
|
case 'array': {
|
|
736
|
-
let item = generateType(imports,
|
|
751
|
+
let item = generateType(imports, path, spec.items);
|
|
737
752
|
if (!lazy && (spec.items.type === 'ref' || spec.items.type === 'union')) {
|
|
738
753
|
item = `(() => { return ${item}; })`;
|
|
739
754
|
}
|
|
@@ -924,19 +939,6 @@ const isRefVariant = (spec: LexDefinableField): spec is LexRefVariant => {
|
|
|
924
939
|
return type === 'ref' || type === 'union';
|
|
925
940
|
};
|
|
926
941
|
|
|
927
|
-
const stripHash = (defUri: string): string => {
|
|
928
|
-
const index = defUri.indexOf('#');
|
|
929
|
-
if (index === -1) {
|
|
930
|
-
return defUri;
|
|
931
|
-
}
|
|
932
|
-
|
|
933
|
-
return defUri.slice(0, index);
|
|
934
|
-
};
|
|
935
|
-
|
|
936
|
-
const stripMainHash = (defUri: string): string => {
|
|
937
|
-
return defUri.endsWith('#main') ? defUri.slice(0, -'#main'.length) : defUri;
|
|
938
|
-
};
|
|
939
|
-
|
|
940
942
|
const toTitleCase = (v: string): string => {
|
|
941
943
|
v = v.replace(/^([a-z])/gi, (_, g) => g.toUpperCase());
|
|
942
944
|
v = v.replace(/[.#-]([a-z])/gi, (_, g) => g.toUpperCase());
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
import * as fs from 'node:fs/promises';
|
|
2
|
+
import * as path from 'node:path';
|
|
3
|
+
|
|
4
|
+
import type { LexiconDoc } from '@atcute/lexicon-doc';
|
|
5
|
+
import { merge, object } from '@optique/core/constructs';
|
|
6
|
+
import { message } from '@optique/core/message';
|
|
7
|
+
import { type InferValue } from '@optique/core/parser';
|
|
8
|
+
import { command, constant } from '@optique/core/primitives';
|
|
9
|
+
import pc from 'picocolors';
|
|
10
|
+
import prettier from 'prettier';
|
|
11
|
+
|
|
12
|
+
import { loadConfig, type ExportConfig, type NormalizedConfig } from '../config.js';
|
|
13
|
+
import { loadLexicons } from '../lexicon-loader.js';
|
|
14
|
+
import { sharedOptions } from '../shared-options.js';
|
|
15
|
+
|
|
16
|
+
export const exportCommandSchema = command(
|
|
17
|
+
'export',
|
|
18
|
+
merge(
|
|
19
|
+
object({
|
|
20
|
+
type: constant('export'),
|
|
21
|
+
}),
|
|
22
|
+
sharedOptions,
|
|
23
|
+
),
|
|
24
|
+
{
|
|
25
|
+
brief: message`export lexicon documents as JSON files`,
|
|
26
|
+
description: message`exports lexicon documents (from JSON or builder files) to JSON format for publishing or distribution.`,
|
|
27
|
+
},
|
|
28
|
+
);
|
|
29
|
+
|
|
30
|
+
export type ExportCommand = InferValue<typeof exportCommandSchema>;
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* ensures export configuration is present
|
|
34
|
+
* @param config the normalized config
|
|
35
|
+
* @returns the export config
|
|
36
|
+
*/
|
|
37
|
+
const ensureExportConfig = (config: NormalizedConfig): ExportConfig => {
|
|
38
|
+
if (!config.export) {
|
|
39
|
+
console.error(pc.bold(pc.red(`export configuration missing`)));
|
|
40
|
+
process.exit(1);
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
return config.export;
|
|
44
|
+
};
|
|
45
|
+
|
|
46
|
+
/**
|
|
47
|
+
* writes a lexicon document to disk as formatted JSON
|
|
48
|
+
* @param outdir output directory
|
|
49
|
+
* @param nsid the NSID of the lexicon
|
|
50
|
+
* @param doc the lexicon document
|
|
51
|
+
* @param prettierConfig prettier configuration
|
|
52
|
+
*/
|
|
53
|
+
const writeLexicon = async (
|
|
54
|
+
outdir: string,
|
|
55
|
+
nsid: string,
|
|
56
|
+
doc: LexiconDoc,
|
|
57
|
+
prettierConfig: prettier.Options | null,
|
|
58
|
+
): Promise<void> => {
|
|
59
|
+
const nsidPath = nsid.replaceAll('.', '/');
|
|
60
|
+
const target = path.join(outdir, `${nsidPath}.json`);
|
|
61
|
+
const dirname = path.dirname(target);
|
|
62
|
+
|
|
63
|
+
const code = await prettier.format(JSON.stringify(doc, null, 2), {
|
|
64
|
+
...(prettierConfig ?? {}),
|
|
65
|
+
parser: 'json',
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
await fs.mkdir(dirname, { recursive: true });
|
|
69
|
+
await fs.writeFile(target, code);
|
|
70
|
+
};
|
|
71
|
+
|
|
72
|
+
/**
|
|
73
|
+
* runs the export command to write lexicon documents as JSON files
|
|
74
|
+
* @param args parsed command arguments
|
|
75
|
+
*/
|
|
76
|
+
export const runExport = async (args: ExportCommand): Promise<void> => {
|
|
77
|
+
const config = await loadConfig(args.config);
|
|
78
|
+
const exportConfig = ensureExportConfig(config);
|
|
79
|
+
|
|
80
|
+
// use export.files if specified, otherwise fall back to root files config
|
|
81
|
+
const files = exportConfig.files ?? config.files;
|
|
82
|
+
const outdir = path.resolve(config.root, exportConfig.outdir);
|
|
83
|
+
const prettierConfig = await prettier.resolveConfig(config.root, { editorconfig: true });
|
|
84
|
+
|
|
85
|
+
// load lexicons from files
|
|
86
|
+
const loaded = await loadLexicons(files, config.root);
|
|
87
|
+
|
|
88
|
+
if (loaded.length === 0) {
|
|
89
|
+
console.warn(pc.yellow(`warning: no lexicons found to export`));
|
|
90
|
+
return;
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
// clean output directory if requested
|
|
94
|
+
if (exportConfig.clean) {
|
|
95
|
+
await fs.rm(outdir, { recursive: true, force: true });
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
await fs.mkdir(outdir, { recursive: true });
|
|
99
|
+
|
|
100
|
+
// write each lexicon as JSON
|
|
101
|
+
for (const { nsid, doc } of loaded) {
|
|
102
|
+
await writeLexicon(outdir, nsid, doc, prettierConfig);
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
console.log(pc.green(`exported ${loaded.length} lexicon(s) to ${outdir}`));
|
|
106
|
+
};
|
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
import * as fs from 'node:fs/promises';
|
|
2
|
+
import * as path from 'node:path';
|
|
3
|
+
|
|
4
|
+
import { merge, object } from '@optique/core/constructs';
|
|
5
|
+
import { message } from '@optique/core/message';
|
|
6
|
+
import { type InferValue } from '@optique/core/parser';
|
|
7
|
+
import { command, constant } from '@optique/core/primitives';
|
|
8
|
+
import pc from 'picocolors';
|
|
9
|
+
|
|
10
|
+
import { generateLexiconApi, type ImportMapping } from '../codegen.js';
|
|
11
|
+
import { loadConfig } from '../config.js';
|
|
12
|
+
import { loadLexicons } from '../lexicon-loader.js';
|
|
13
|
+
import { packageJsonSchema } from '../lexicon-metadata.js';
|
|
14
|
+
import { sharedOptions } from '../shared-options.js';
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* resolves package imports to ImportMapping[]
|
|
18
|
+
*/
|
|
19
|
+
const resolveImportsToMappings = async (
|
|
20
|
+
imports: string[],
|
|
21
|
+
configDirname: string,
|
|
22
|
+
): Promise<ImportMapping[]> => {
|
|
23
|
+
const mappings: ImportMapping[] = [];
|
|
24
|
+
|
|
25
|
+
for (const packageName of imports) {
|
|
26
|
+
// walk up from config directory to find package in node_modules
|
|
27
|
+
let packageJson: unknown;
|
|
28
|
+
let currentDir = configDirname;
|
|
29
|
+
let found = false;
|
|
30
|
+
|
|
31
|
+
while (currentDir !== path.dirname(currentDir)) {
|
|
32
|
+
const candidatePath = path.join(currentDir, 'node_modules', packageName, 'package.json');
|
|
33
|
+
try {
|
|
34
|
+
const content = await fs.readFile(candidatePath, 'utf8');
|
|
35
|
+
packageJson = JSON.parse(content);
|
|
36
|
+
found = true;
|
|
37
|
+
break;
|
|
38
|
+
} catch (err: any) {
|
|
39
|
+
// only continue to parent if file not found
|
|
40
|
+
if (err.code !== 'ENOENT') {
|
|
41
|
+
console.error(pc.bold(pc.red(`failed to read package.json for "${packageName}":`)));
|
|
42
|
+
console.error(err);
|
|
43
|
+
process.exit(1);
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
// not found, try parent directory
|
|
47
|
+
currentDir = path.dirname(currentDir);
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
if (!found) {
|
|
52
|
+
console.error(pc.bold(pc.red(`failed to resolve package "${packageName}"`)));
|
|
53
|
+
console.error(`Could not find package in node_modules starting from ${configDirname}`);
|
|
54
|
+
process.exit(1);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// validate package.json
|
|
58
|
+
const result = packageJsonSchema.try(packageJson, { mode: 'passthrough' });
|
|
59
|
+
if (!result.ok) {
|
|
60
|
+
console.error(pc.bold(pc.red(`invalid atcute:lexicons in "${packageName}":`)));
|
|
61
|
+
console.error(result.message);
|
|
62
|
+
|
|
63
|
+
for (const issue of result.issues) {
|
|
64
|
+
console.log(`- ${issue.code} at .${issue.path.join('.')}`);
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
process.exit(1);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
const lexicons = result.value['atcute:lexicons'];
|
|
71
|
+
if (!lexicons?.mappings) {
|
|
72
|
+
continue;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
// convert mapping to ImportMapping[]
|
|
76
|
+
for (const [pattern, entry] of Object.entries(lexicons.mappings)) {
|
|
77
|
+
const isWildcard = pattern.endsWith('.*');
|
|
78
|
+
|
|
79
|
+
mappings.push({
|
|
80
|
+
nsid: [pattern],
|
|
81
|
+
imports: (nsid: string) => {
|
|
82
|
+
// check if pattern matches
|
|
83
|
+
if (isWildcard) {
|
|
84
|
+
if (!nsid.startsWith(pattern.slice(0, -1))) {
|
|
85
|
+
throw new Error(`NSID ${nsid} does not match pattern ${pattern}`);
|
|
86
|
+
}
|
|
87
|
+
} else {
|
|
88
|
+
if (nsid !== pattern) {
|
|
89
|
+
throw new Error(`NSID ${nsid} does not match pattern ${pattern}`);
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
const nsidPrefix = isWildcard ? pattern.slice(0, -2) : pattern;
|
|
94
|
+
const nsidRemainder = isWildcard ? nsid.slice(nsidPrefix.length + 1) : '';
|
|
95
|
+
|
|
96
|
+
let expandedPath = entry.path
|
|
97
|
+
.replaceAll('{{nsid}}', nsid.replaceAll('.', '/'))
|
|
98
|
+
.replaceAll('{{nsid_remainder}}', nsidRemainder.replaceAll('.', '/'))
|
|
99
|
+
.replaceAll('{{nsid_prefix}}', nsidPrefix.replaceAll('.', '/'));
|
|
100
|
+
|
|
101
|
+
if (expandedPath === '.') {
|
|
102
|
+
expandedPath = packageName;
|
|
103
|
+
} else if (expandedPath.startsWith('./')) {
|
|
104
|
+
expandedPath = `${packageName}/${expandedPath.slice(2)}`;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
return {
|
|
108
|
+
type: entry.type,
|
|
109
|
+
from: expandedPath,
|
|
110
|
+
};
|
|
111
|
+
},
|
|
112
|
+
});
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
return mappings;
|
|
117
|
+
};
|
|
118
|
+
|
|
119
|
+
export const generateCommandSchema = command(
|
|
120
|
+
'generate',
|
|
121
|
+
merge(
|
|
122
|
+
object({
|
|
123
|
+
type: constant('generate'),
|
|
124
|
+
}),
|
|
125
|
+
sharedOptions,
|
|
126
|
+
),
|
|
127
|
+
{
|
|
128
|
+
brief: message`generate type definitions from lexicon documents`,
|
|
129
|
+
description: message`reads lexicon documents from the configured files and generates TypeScript type definitions and runtime validators.`,
|
|
130
|
+
},
|
|
131
|
+
);
|
|
132
|
+
|
|
133
|
+
export type GenerateCommand = InferValue<typeof generateCommandSchema>;
|
|
134
|
+
|
|
135
|
+
/**
|
|
136
|
+
* runs the generate command to create type definitions from lexicon documents
|
|
137
|
+
* @param args parsed command arguments
|
|
138
|
+
*/
|
|
139
|
+
export const runGenerate = async (args: GenerateCommand): Promise<void> => {
|
|
140
|
+
const config = await loadConfig(args.config);
|
|
141
|
+
|
|
142
|
+
// resolve imports to mappings
|
|
143
|
+
const importMappings = config.imports ? await resolveImportsToMappings(config.imports, config.root) : [];
|
|
144
|
+
const allMappings = [...importMappings, ...(config.mappings ?? [])];
|
|
145
|
+
|
|
146
|
+
// load lexicons from files
|
|
147
|
+
const loaded = await loadLexicons(config.files, config.root);
|
|
148
|
+
const documents = loaded.map((l) => l.doc);
|
|
149
|
+
|
|
150
|
+
const generationResult = await generateLexiconApi({
|
|
151
|
+
documents: documents,
|
|
152
|
+
mappings: allMappings,
|
|
153
|
+
modules: {
|
|
154
|
+
importSuffix: config.modules?.importSuffix ?? '.js',
|
|
155
|
+
},
|
|
156
|
+
prettier: {
|
|
157
|
+
cwd: process.cwd(),
|
|
158
|
+
},
|
|
159
|
+
});
|
|
160
|
+
|
|
161
|
+
const outdir = path.join(config.root, config.outdir);
|
|
162
|
+
|
|
163
|
+
for (const file of generationResult.files) {
|
|
164
|
+
const filename = path.join(outdir, file.filename);
|
|
165
|
+
const dirname = path.dirname(filename);
|
|
166
|
+
|
|
167
|
+
await fs.mkdir(dirname, { recursive: true });
|
|
168
|
+
await fs.writeFile(filename, file.code);
|
|
169
|
+
}
|
|
170
|
+
};
|