@atcute/lex-cli 2.4.0 → 2.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. package/README.md +107 -10
  2. package/dist/cli.js +10 -168
  3. package/dist/cli.js.map +1 -1
  4. package/dist/codegen.d.ts.map +1 -1
  5. package/dist/codegen.js +76 -78
  6. package/dist/codegen.js.map +1 -1
  7. package/dist/commands/export.d.ts +17 -0
  8. package/dist/commands/export.d.ts.map +1 -0
  9. package/dist/commands/export.js +76 -0
  10. package/dist/commands/export.js.map +1 -0
  11. package/dist/commands/generate.d.ts +17 -0
  12. package/dist/commands/generate.d.ts.map +1 -0
  13. package/dist/commands/generate.js +136 -0
  14. package/dist/commands/generate.js.map +1 -0
  15. package/dist/commands/pull.d.ts +17 -0
  16. package/dist/commands/pull.d.ts.map +1 -0
  17. package/dist/{pull.js → commands/pull.js} +35 -81
  18. package/dist/commands/pull.js.map +1 -0
  19. package/dist/config.d.ts +68 -6
  20. package/dist/config.d.ts.map +1 -1
  21. package/dist/config.js +54 -3
  22. package/dist/config.js.map +1 -1
  23. package/dist/git.d.ts.map +1 -1
  24. package/dist/git.js.map +1 -1
  25. package/dist/index.d.ts +65 -1
  26. package/dist/index.d.ts.map +1 -1
  27. package/dist/index.js.map +1 -1
  28. package/dist/lexicon-loader.d.ts +17 -0
  29. package/dist/lexicon-loader.d.ts.map +1 -0
  30. package/dist/lexicon-loader.js +167 -0
  31. package/dist/lexicon-loader.js.map +1 -0
  32. package/dist/lexicon-metadata.js.map +1 -1
  33. package/dist/pull-sources/atproto.d.ts +17 -0
  34. package/dist/pull-sources/atproto.d.ts.map +1 -0
  35. package/dist/pull-sources/atproto.js +192 -0
  36. package/dist/pull-sources/atproto.js.map +1 -0
  37. package/dist/pull-sources/git.d.ts +15 -0
  38. package/dist/pull-sources/git.d.ts.map +1 -0
  39. package/dist/pull-sources/git.js +80 -0
  40. package/dist/pull-sources/git.js.map +1 -0
  41. package/dist/pull-sources/types.d.ts +16 -0
  42. package/dist/pull-sources/types.d.ts.map +1 -0
  43. package/dist/pull-sources/types.js +2 -0
  44. package/dist/pull-sources/types.js.map +1 -0
  45. package/dist/shared-options.d.ts +6 -0
  46. package/dist/shared-options.d.ts.map +1 -0
  47. package/dist/shared-options.js +11 -0
  48. package/dist/shared-options.js.map +1 -0
  49. package/package.json +12 -9
  50. package/src/cli.ts +9 -210
  51. package/src/codegen.ts +90 -88
  52. package/src/commands/export.ts +106 -0
  53. package/src/commands/generate.ts +170 -0
  54. package/src/{pull.ts → commands/pull.ts} +49 -116
  55. package/src/config.ts +67 -4
  56. package/src/lexicon-loader.ts +201 -0
  57. package/src/pull-sources/atproto.ts +243 -0
  58. package/src/pull-sources/git.ts +103 -0
  59. package/src/pull-sources/types.ts +18 -0
  60. package/src/shared-options.ts +13 -0
  61. package/dist/pull.d.ts +0 -7
  62. package/dist/pull.d.ts.map +0 -1
  63. package/dist/pull.js.map +0 -1
@@ -0,0 +1,106 @@
1
+ import * as fs from 'node:fs/promises';
2
+ import * as path from 'node:path';
3
+
4
+ import type { LexiconDoc } from '@atcute/lexicon-doc';
5
+ import { merge, object } from '@optique/core/constructs';
6
+ import { message } from '@optique/core/message';
7
+ import { type InferValue } from '@optique/core/parser';
8
+ import { command, constant } from '@optique/core/primitives';
9
+ import pc from 'picocolors';
10
+ import prettier from 'prettier';
11
+
12
+ import { loadConfig, type ExportConfig, type NormalizedConfig } from '../config.js';
13
+ import { loadLexicons } from '../lexicon-loader.js';
14
+ import { sharedOptions } from '../shared-options.js';
15
+
16
+ export const exportCommandSchema = command(
17
+ 'export',
18
+ merge(
19
+ object({
20
+ type: constant('export'),
21
+ }),
22
+ sharedOptions,
23
+ ),
24
+ {
25
+ brief: message`export lexicon documents as JSON files`,
26
+ description: message`exports lexicon documents (from JSON or builder files) to JSON format for publishing or distribution.`,
27
+ },
28
+ );
29
+
30
+ export type ExportCommand = InferValue<typeof exportCommandSchema>;
31
+
32
+ /**
33
+ * ensures export configuration is present
34
+ * @param config the normalized config
35
+ * @returns the export config
36
+ */
37
+ const ensureExportConfig = (config: NormalizedConfig): ExportConfig => {
38
+ if (!config.export) {
39
+ console.error(pc.bold(pc.red(`export configuration missing`)));
40
+ process.exit(1);
41
+ }
42
+
43
+ return config.export;
44
+ };
45
+
46
+ /**
47
+ * writes a lexicon document to disk as formatted JSON
48
+ * @param outdir output directory
49
+ * @param nsid the NSID of the lexicon
50
+ * @param doc the lexicon document
51
+ * @param prettierConfig prettier configuration
52
+ */
53
+ const writeLexicon = async (
54
+ outdir: string,
55
+ nsid: string,
56
+ doc: LexiconDoc,
57
+ prettierConfig: prettier.Options | null,
58
+ ): Promise<void> => {
59
+ const nsidPath = nsid.replaceAll('.', '/');
60
+ const target = path.join(outdir, `${nsidPath}.json`);
61
+ const dirname = path.dirname(target);
62
+
63
+ const code = await prettier.format(JSON.stringify(doc, null, 2), {
64
+ ...(prettierConfig ?? {}),
65
+ parser: 'json',
66
+ });
67
+
68
+ await fs.mkdir(dirname, { recursive: true });
69
+ await fs.writeFile(target, code);
70
+ };
71
+
72
+ /**
73
+ * runs the export command to write lexicon documents as JSON files
74
+ * @param args parsed command arguments
75
+ */
76
+ export const runExport = async (args: ExportCommand): Promise<void> => {
77
+ const config = await loadConfig(args.config);
78
+ const exportConfig = ensureExportConfig(config);
79
+
80
+ // use export.files if specified, otherwise fall back to root files config
81
+ const files = exportConfig.files ?? config.files;
82
+ const outdir = path.resolve(config.root, exportConfig.outdir);
83
+ const prettierConfig = await prettier.resolveConfig(config.root, { editorconfig: true });
84
+
85
+ // load lexicons from files
86
+ const loaded = await loadLexicons(files, config.root);
87
+
88
+ if (loaded.length === 0) {
89
+ console.warn(pc.yellow(`warning: no lexicons found to export`));
90
+ return;
91
+ }
92
+
93
+ // clean output directory if requested
94
+ if (exportConfig.clean) {
95
+ await fs.rm(outdir, { recursive: true, force: true });
96
+ }
97
+
98
+ await fs.mkdir(outdir, { recursive: true });
99
+
100
+ // write each lexicon as JSON
101
+ for (const { nsid, doc } of loaded) {
102
+ await writeLexicon(outdir, nsid, doc, prettierConfig);
103
+ }
104
+
105
+ console.log(pc.green(`exported ${loaded.length} lexicon(s) to ${outdir}`));
106
+ };
@@ -0,0 +1,170 @@
1
+ import * as fs from 'node:fs/promises';
2
+ import * as path from 'node:path';
3
+
4
+ import { merge, object } from '@optique/core/constructs';
5
+ import { message } from '@optique/core/message';
6
+ import { type InferValue } from '@optique/core/parser';
7
+ import { command, constant } from '@optique/core/primitives';
8
+ import pc from 'picocolors';
9
+
10
+ import { generateLexiconApi, type ImportMapping } from '../codegen.js';
11
+ import { loadConfig } from '../config.js';
12
+ import { loadLexicons } from '../lexicon-loader.js';
13
+ import { packageJsonSchema } from '../lexicon-metadata.js';
14
+ import { sharedOptions } from '../shared-options.js';
15
+
16
+ /**
17
+ * resolves package imports to ImportMapping[]
18
+ */
19
+ const resolveImportsToMappings = async (
20
+ imports: string[],
21
+ configDirname: string,
22
+ ): Promise<ImportMapping[]> => {
23
+ const mappings: ImportMapping[] = [];
24
+
25
+ for (const packageName of imports) {
26
+ // walk up from config directory to find package in node_modules
27
+ let packageJson: unknown;
28
+ let currentDir = configDirname;
29
+ let found = false;
30
+
31
+ while (currentDir !== path.dirname(currentDir)) {
32
+ const candidatePath = path.join(currentDir, 'node_modules', packageName, 'package.json');
33
+ try {
34
+ const content = await fs.readFile(candidatePath, 'utf8');
35
+ packageJson = JSON.parse(content);
36
+ found = true;
37
+ break;
38
+ } catch (err: any) {
39
+ // only continue to parent if file not found
40
+ if (err.code !== 'ENOENT') {
41
+ console.error(pc.bold(pc.red(`failed to read package.json for "${packageName}":`)));
42
+ console.error(err);
43
+ process.exit(1);
44
+ }
45
+
46
+ // not found, try parent directory
47
+ currentDir = path.dirname(currentDir);
48
+ }
49
+ }
50
+
51
+ if (!found) {
52
+ console.error(pc.bold(pc.red(`failed to resolve package "${packageName}"`)));
53
+ console.error(`Could not find package in node_modules starting from ${configDirname}`);
54
+ process.exit(1);
55
+ }
56
+
57
+ // validate package.json
58
+ const result = packageJsonSchema.try(packageJson, { mode: 'passthrough' });
59
+ if (!result.ok) {
60
+ console.error(pc.bold(pc.red(`invalid atcute:lexicons in "${packageName}":`)));
61
+ console.error(result.message);
62
+
63
+ for (const issue of result.issues) {
64
+ console.log(`- ${issue.code} at .${issue.path.join('.')}`);
65
+ }
66
+
67
+ process.exit(1);
68
+ }
69
+
70
+ const lexicons = result.value['atcute:lexicons'];
71
+ if (!lexicons?.mappings) {
72
+ continue;
73
+ }
74
+
75
+ // convert mapping to ImportMapping[]
76
+ for (const [pattern, entry] of Object.entries(lexicons.mappings)) {
77
+ const isWildcard = pattern.endsWith('.*');
78
+
79
+ mappings.push({
80
+ nsid: [pattern],
81
+ imports: (nsid: string) => {
82
+ // check if pattern matches
83
+ if (isWildcard) {
84
+ if (!nsid.startsWith(pattern.slice(0, -1))) {
85
+ throw new Error(`NSID ${nsid} does not match pattern ${pattern}`);
86
+ }
87
+ } else {
88
+ if (nsid !== pattern) {
89
+ throw new Error(`NSID ${nsid} does not match pattern ${pattern}`);
90
+ }
91
+ }
92
+
93
+ const nsidPrefix = isWildcard ? pattern.slice(0, -2) : pattern;
94
+ const nsidRemainder = isWildcard ? nsid.slice(nsidPrefix.length + 1) : '';
95
+
96
+ let expandedPath = entry.path
97
+ .replaceAll('{{nsid}}', nsid.replaceAll('.', '/'))
98
+ .replaceAll('{{nsid_remainder}}', nsidRemainder.replaceAll('.', '/'))
99
+ .replaceAll('{{nsid_prefix}}', nsidPrefix.replaceAll('.', '/'));
100
+
101
+ if (expandedPath === '.') {
102
+ expandedPath = packageName;
103
+ } else if (expandedPath.startsWith('./')) {
104
+ expandedPath = `${packageName}/${expandedPath.slice(2)}`;
105
+ }
106
+
107
+ return {
108
+ type: entry.type,
109
+ from: expandedPath,
110
+ };
111
+ },
112
+ });
113
+ }
114
+ }
115
+
116
+ return mappings;
117
+ };
118
+
119
+ export const generateCommandSchema = command(
120
+ 'generate',
121
+ merge(
122
+ object({
123
+ type: constant('generate'),
124
+ }),
125
+ sharedOptions,
126
+ ),
127
+ {
128
+ brief: message`generate type definitions from lexicon documents`,
129
+ description: message`reads lexicon documents from the configured files and generates TypeScript type definitions and runtime validators.`,
130
+ },
131
+ );
132
+
133
+ export type GenerateCommand = InferValue<typeof generateCommandSchema>;
134
+
135
+ /**
136
+ * runs the generate command to create type definitions from lexicon documents
137
+ * @param args parsed command arguments
138
+ */
139
+ export const runGenerate = async (args: GenerateCommand): Promise<void> => {
140
+ const config = await loadConfig(args.config);
141
+
142
+ // resolve imports to mappings
143
+ const importMappings = config.imports ? await resolveImportsToMappings(config.imports, config.root) : [];
144
+ const allMappings = [...importMappings, ...(config.mappings ?? [])];
145
+
146
+ // load lexicons from files
147
+ const loaded = await loadLexicons(config.files, config.root);
148
+ const documents = loaded.map((l) => l.doc);
149
+
150
+ const generationResult = await generateLexiconApi({
151
+ documents: documents,
152
+ mappings: allMappings,
153
+ modules: {
154
+ importSuffix: config.modules?.importSuffix ?? '.js',
155
+ },
156
+ prettier: {
157
+ cwd: process.cwd(),
158
+ },
159
+ });
160
+
161
+ const outdir = path.join(config.root, config.outdir);
162
+
163
+ for (const file of generationResult.files) {
164
+ const filename = path.join(outdir, file.filename);
165
+ const dirname = path.dirname(filename);
166
+
167
+ await fs.mkdir(dirname, { recursive: true });
168
+ await fs.writeFile(filename, file.code);
169
+ }
170
+ };
@@ -1,34 +1,39 @@
1
1
  import * as fs from 'node:fs/promises';
2
- import * as os from 'node:os';
3
2
  import * as path from 'node:path';
4
3
 
5
4
  import { lexiconDoc, refineLexiconDoc, type LexiconDoc } from '@atcute/lexicon-doc';
6
- import prettier from 'prettier';
5
+ import { merge, object } from '@optique/core/constructs';
6
+ import { message } from '@optique/core/message';
7
+ import { type InferValue } from '@optique/core/parser';
8
+ import { command, constant } from '@optique/core/primitives';
7
9
  import pc from 'picocolors';
10
+ import prettier from 'prettier';
8
11
 
9
- import { runGit, GitError } from './git.js';
10
- import type { NormalizedConfig, PullConfig, SourceConfig } from './config.js';
12
+ import { loadConfig, type NormalizedConfig, type PullConfig, type SourceConfig } from '../config.js';
13
+ import { pullAtprotoSource } from '../pull-sources/atproto.js';
14
+ import { pullGitSource } from '../pull-sources/git.js';
15
+ import type { PullResult, PulledLexicon, SourceLocation } from '../pull-sources/types.js';
16
+ import { sharedOptions } from '../shared-options.js';
17
+
18
+ export const pullCommandSchema = command(
19
+ 'pull',
20
+ merge(
21
+ object({
22
+ type: constant('pull'),
23
+ }),
24
+ sharedOptions,
25
+ ),
26
+ {
27
+ brief: message`pull lexicon documents from configured sources`,
28
+ description: message`fetches lexicon documents from configured git repositories and writes them to the output directory.`,
29
+ },
30
+ );
31
+
32
+ export type PullCommand = InferValue<typeof pullCommandSchema>;
11
33
 
12
34
  interface SourceRevision {
13
35
  source: SourceConfig;
14
- rev: string;
15
- }
16
-
17
- interface SourceLocation {
18
- absolutePath: string;
19
- relativePath: string;
20
- sourceDescription: string;
21
- }
22
-
23
- interface PulledLexicon {
24
- nsid: string;
25
- doc: LexiconDoc;
26
- location: SourceLocation;
27
- }
28
-
29
- interface PullResult {
30
- pulled: Map<string, PulledLexicon>;
31
- rev: string;
36
+ rev?: string;
32
37
  }
33
38
 
34
39
  const ensurePullConfig = (config: NormalizedConfig): PullConfig => {
@@ -119,99 +124,13 @@ const writeLexicon = async (
119
124
  await fs.writeFile(target, code);
120
125
  };
121
126
 
122
- /**
123
- * pulls lexicon documents from a git repository source
124
- * @param source git source configuration
125
- * @returns pulled lexicons and commit hash
126
- */
127
- const pullGitSource = async (source: SourceConfig & { type: 'git' }): Promise<PullResult> => {
128
- const tempParent = await fs.mkdtemp(path.join(os.tmpdir(), 'lex-cli-pull-'));
129
-
130
- const cloneDir = path.join(tempParent, 'repo');
131
-
132
- try {
133
- await runGit(
134
- [
135
- 'clone',
136
- '--filter=blob:none',
137
- '--depth',
138
- '1',
139
- '--sparse',
140
- ...(source.ref ? ['--branch', source.ref, '--single-branch'] : []),
141
- source.remote,
142
- cloneDir,
143
- ],
144
- { timeoutMs: 60_000 },
145
- );
146
- } catch (err) {
147
- if (err instanceof GitError) {
148
- console.error(pc.bold(pc.red(`git clone failed for ${source.remote}:`)));
149
- console.error(err.stderr || err.message);
150
- process.exit(1);
151
- }
152
-
153
- throw err;
154
- }
155
-
156
- try {
157
- await runGit(['-C', cloneDir, 'sparse-checkout', 'set', '--no-cone', ...source.pattern], {
158
- timeoutMs: 30_000,
159
- });
160
- } catch (err) {
161
- if (err instanceof GitError) {
162
- console.error(pc.bold(pc.red(`git sparse-checkout failed for ${source.remote}:`)));
163
- console.error(err.stderr || err.message);
164
- process.exit(1);
165
- }
166
-
167
- throw err;
168
- }
169
-
170
- const pulled = new Map<string, PulledLexicon>();
171
-
172
- for await (const filename of fs.glob(source.pattern, { cwd: cloneDir })) {
173
- const absolute = path.join(cloneDir, filename);
174
- const stat = await fs.stat(absolute);
175
-
176
- if (!stat.isFile()) {
177
- continue;
178
- }
179
-
180
- const location: SourceLocation = {
181
- absolutePath: absolute,
182
- relativePath: filename,
183
- sourceDescription: source.remote,
184
- };
185
-
186
- const doc = await parseLexiconFile(location);
187
-
188
- pulled.set(doc.id, { nsid: doc.id, doc, location });
189
- }
190
-
191
- // get the commit hash
192
- let rev: string;
193
- try {
194
- const result = await runGit(['-C', cloneDir, 'rev-parse', 'HEAD'], { timeoutMs: 10_000 });
195
- rev = result.stdout.trim();
196
- } catch (err) {
197
- if (err instanceof GitError) {
198
- console.error(pc.bold(pc.red(`git rev-parse failed for ${source.remote}:`)));
199
- console.error(err.stderr || err.message);
200
- process.exit(1);
201
- }
202
-
203
- throw err;
204
- }
205
-
206
- await fs.rm(tempParent, { recursive: true, force: true });
207
-
208
- return { pulled, rev };
209
- };
210
-
211
127
  const pullSource = async (source: SourceConfig): Promise<PullResult> => {
212
128
  switch (source.type) {
213
129
  case 'git': {
214
- return pullGitSource(source);
130
+ return pullGitSource(source, parseLexiconFile);
131
+ }
132
+ case 'atproto': {
133
+ return pullAtprotoSource(source);
215
134
  }
216
135
  }
217
136
  };
@@ -232,7 +151,19 @@ const writeSourceReadme = async (
232
151
  switch (source.type) {
233
152
  case 'git': {
234
153
  lines.push(`- ${source.remote}${source.ref ? ` (ref: ${source.ref})` : ``}`);
235
- lines.push(` - commit: ${rev}`);
154
+ if (rev) {
155
+ lines.push(` - commit: ${rev}`);
156
+ }
157
+ break;
158
+ }
159
+ case 'atproto': {
160
+ if (source.mode === 'nsids') {
161
+ lines.push(`- atproto (nsids: ${source.nsids.join(', ')})`);
162
+ } else {
163
+ lines.push(
164
+ `- atproto (authority: ${source.authority}${source.pattern ? `, pattern: ${source.pattern.join(', ')}` : ''})`,
165
+ );
166
+ }
236
167
  break;
237
168
  }
238
169
  }
@@ -250,11 +181,13 @@ const writeSourceReadme = async (
250
181
  };
251
182
 
252
183
  /**
253
- * pulls lexicon documents from configured sources and writes them to disk using nsid-based paths.
254
- * @param config normalized lex-cli configuration
184
+ * runs the pull command to fetch lexicon documents from configured sources
185
+ * @param args parsed command arguments
255
186
  */
256
- export const runPull = async (config: NormalizedConfig): Promise<void> => {
187
+ export const runPull = async (args: PullCommand): Promise<void> => {
188
+ const config = await loadConfig(args.config);
257
189
  const pullConfig = ensurePullConfig(config);
190
+
258
191
  const outdir = path.resolve(config.root, pullConfig.outdir);
259
192
  const prettierConfig = await prettier.resolveConfig(config.root, { editorconfig: true });
260
193
 
package/src/config.ts CHANGED
@@ -1,10 +1,12 @@
1
+ import * as fs from 'node:fs/promises';
1
2
  import * as path from 'node:path';
2
3
  import * as url from 'node:url';
3
4
 
4
5
  import * as v from '@badrap/valita';
5
6
  import pc from 'picocolors';
6
7
 
7
- import { isNsid } from '@atcute/lexicons/syntax';
8
+ import { isAtprotoDid } from '@atcute/identity';
9
+ import { isHandle, isNsid } from '@atcute/lexicons/syntax';
8
10
 
9
11
  import type { ImportMapping } from './codegen.js';
10
12
 
@@ -20,7 +22,32 @@ const gitSourceConfigSchema = v.object({
20
22
  .assert((value) => value.length > 0, `must include at least one glob pattern`),
21
23
  });
22
24
 
23
- const sourceConfigSchema = v.union(gitSourceConfigSchema);
25
+ const atprotoNsidsSourceConfigSchema = v.object({
26
+ type: v.literal('atproto'),
27
+ mode: v.literal('nsids'),
28
+ nsids: v
29
+ .array(v.string().assert((value) => isNsid(value), `must be valid nsid`))
30
+ .assert((value) => value.length > 0, `must include at least one nsid`),
31
+ });
32
+
33
+ const atprotoAuthoritySourceConfigSchema = v.object({
34
+ type: v.literal('atproto'),
35
+ mode: v.literal('authority'),
36
+ authority: v
37
+ .string()
38
+ .assert((value) => isHandle(value) || isAtprotoDid(value), `must a valid at-identifier`),
39
+ pattern: v
40
+ .array(
41
+ v
42
+ .string()
43
+ .assert((value) => isValidLexiconPattern(value), `must be valid nsid or pattern ending with .*`),
44
+ )
45
+ .optional(),
46
+ });
47
+
48
+ const atprotoSourceConfigSchema = v.union(atprotoNsidsSourceConfigSchema, atprotoAuthoritySourceConfigSchema);
49
+
50
+ const sourceConfigSchema = v.union(gitSourceConfigSchema, atprotoSourceConfigSchema);
24
51
 
25
52
  const pullConfigSchema = v.object({
26
53
  outdir: v.string().assert((value) => value.length > 0, `must not be empty`),
@@ -30,9 +57,19 @@ const pullConfigSchema = v.object({
30
57
  .assert((value) => value.length > 0, `must include at least one source`),
31
58
  });
32
59
 
60
+ const exportConfigSchema = v.object({
61
+ outdir: v.string().assert((value) => value.length > 0, `must not be empty`),
62
+ files: v.array(v.string().assert((value) => value.length > 0, `must not be empty`)).optional(),
63
+ clean: v.boolean().optional(),
64
+ });
65
+
33
66
  export type GitSourceConfig = v.Infer<typeof gitSourceConfigSchema>;
67
+ export type AtprotoNsidsSourceConfig = v.Infer<typeof atprotoNsidsSourceConfigSchema>;
68
+ export type AtprotoAuthoritySourceConfig = v.Infer<typeof atprotoAuthoritySourceConfigSchema>;
69
+ export type AtprotoSourceConfig = v.Infer<typeof atprotoSourceConfigSchema>;
34
70
  export type SourceConfig = v.Infer<typeof sourceConfigSchema>;
35
71
  export type PullConfig = v.Infer<typeof pullConfigSchema>;
72
+ export type ExportConfig = v.Infer<typeof exportConfigSchema>;
36
73
 
37
74
  const isValidLexiconPattern = (pattern: string): boolean => {
38
75
  if (pattern.endsWith('.*')) {
@@ -90,6 +127,7 @@ export const lexiconConfigSchema = v.object({
90
127
  .partial()
91
128
  .optional(),
92
129
  pull: pullConfigSchema.optional(),
130
+ export: exportConfigSchema.optional(),
93
131
  });
94
132
 
95
133
  export type LexiconConfig = v.Infer<typeof lexiconConfigSchema>;
@@ -98,8 +136,33 @@ export interface NormalizedConfig extends LexiconConfig {
98
136
  root: string;
99
137
  }
100
138
 
101
- export const loadConfig = async (configPath: string): Promise<NormalizedConfig> => {
102
- const configFilename = path.resolve(configPath);
139
+ export const loadConfig = async (configPath?: string): Promise<NormalizedConfig> => {
140
+ let configFilename: string | undefined;
141
+
142
+ if (configPath) {
143
+ configFilename = path.resolve(configPath);
144
+ } else {
145
+ // try to find lex.config.js or lex.config.ts in the current directory
146
+ const candidates = ['lex.config.js', 'lex.config.ts'];
147
+
148
+ for (const candidate of candidates) {
149
+ const candidatePath = path.resolve(candidate);
150
+ try {
151
+ await fs.access(candidatePath);
152
+ configFilename = candidatePath;
153
+ break;
154
+ } catch {
155
+ // file doesn't exist, try next candidate
156
+ }
157
+ }
158
+
159
+ if (!configFilename) {
160
+ console.error(pc.bold(pc.red(`config file not found`)));
161
+ console.error(`looked for: ${candidates.join(', ')}`);
162
+ process.exit(1);
163
+ }
164
+ }
165
+
103
166
  const configDirname = path.dirname(configFilename);
104
167
 
105
168
  let rawConfig: unknown;