@ryanatkn/gro 0.178.0 → 0.180.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (108) hide show
  1. package/dist/build.task.d.ts +2 -0
  2. package/dist/build.task.d.ts.map +1 -1
  3. package/dist/build.task.js +14 -10
  4. package/dist/build_cache.d.ts +3 -3
  5. package/dist/build_cache.d.ts.map +1 -1
  6. package/dist/build_cache.js +53 -43
  7. package/dist/changeset.task.js +9 -9
  8. package/dist/clean_fs.d.ts +1 -1
  9. package/dist/clean_fs.d.ts.map +1 -1
  10. package/dist/clean_fs.js +3 -4
  11. package/dist/cli.d.ts +4 -4
  12. package/dist/cli.d.ts.map +1 -1
  13. package/dist/cli.js +11 -12
  14. package/dist/deploy.task.d.ts +7 -0
  15. package/dist/deploy.task.d.ts.map +1 -1
  16. package/dist/deploy.task.js +27 -14
  17. package/dist/esbuild_plugin_external_worker.js +1 -1
  18. package/dist/esbuild_plugin_svelte.js +4 -4
  19. package/dist/esbuild_plugin_sveltekit_local_imports.js +2 -2
  20. package/dist/filer.d.ts.map +1 -1
  21. package/dist/filer.js +103 -52
  22. package/dist/format_file.js +1 -1
  23. package/dist/gen.d.ts +1 -1
  24. package/dist/gen.d.ts.map +1 -1
  25. package/dist/gen.js +28 -22
  26. package/dist/gen.task.js +1 -1
  27. package/dist/gro.config.default.js +1 -1
  28. package/dist/gro_config.js +2 -2
  29. package/dist/gro_plugin_gen.js +1 -1
  30. package/dist/gro_plugin_server.js +2 -2
  31. package/dist/gro_plugin_sveltekit_app.d.ts.map +1 -1
  32. package/dist/gro_plugin_sveltekit_app.js +40 -36
  33. package/dist/gro_plugin_sveltekit_library.js +2 -1
  34. package/dist/input_path.d.ts +3 -3
  35. package/dist/input_path.d.ts.map +1 -1
  36. package/dist/input_path.js +16 -14
  37. package/dist/invoke_task.js +2 -2
  38. package/dist/lint.task.js +1 -1
  39. package/dist/loader.js +1 -1
  40. package/dist/modules.js +2 -2
  41. package/dist/package_json.d.ts +4 -4
  42. package/dist/package_json.d.ts.map +1 -1
  43. package/dist/package_json.js +12 -14
  44. package/dist/publish.task.js +6 -6
  45. package/dist/release.task.js +1 -1
  46. package/dist/resolve.task.js +2 -2
  47. package/dist/resolve_specifier.d.ts +1 -1
  48. package/dist/resolve_specifier.d.ts.map +1 -1
  49. package/dist/resolve_specifier.js +5 -4
  50. package/dist/run.task.js +2 -2
  51. package/dist/run_gen.d.ts.map +1 -1
  52. package/dist/run_gen.js +9 -8
  53. package/dist/run_task.js +4 -4
  54. package/dist/source_json.d.ts +2 -2
  55. package/dist/source_json.d.ts.map +1 -1
  56. package/dist/source_json.js +16 -15
  57. package/dist/sveltekit_helpers.js +3 -3
  58. package/dist/sveltekit_shim_env.js +1 -1
  59. package/dist/task.d.ts +1 -1
  60. package/dist/task.d.ts.map +1 -1
  61. package/dist/task.js +4 -4
  62. package/dist/test.task.d.ts.map +1 -1
  63. package/dist/test.task.js +5 -4
  64. package/dist/typecheck.task.js +3 -3
  65. package/dist/upgrade.task.js +4 -4
  66. package/package.json +7 -7
  67. package/src/lib/build.task.ts +15 -10
  68. package/src/lib/build_cache.ts +79 -63
  69. package/src/lib/changeset.task.ts +10 -10
  70. package/src/lib/clean_fs.ts +4 -4
  71. package/src/lib/cli.ts +15 -14
  72. package/src/lib/deploy.task.ts +30 -13
  73. package/src/lib/esbuild_plugin_external_worker.ts +1 -1
  74. package/src/lib/esbuild_plugin_svelte.ts +4 -4
  75. package/src/lib/esbuild_plugin_sveltekit_local_imports.ts +2 -2
  76. package/src/lib/filer.ts +111 -52
  77. package/src/lib/format_file.ts +1 -1
  78. package/src/lib/gen.task.ts +1 -1
  79. package/src/lib/gen.ts +52 -46
  80. package/src/lib/gro.config.default.ts +1 -1
  81. package/src/lib/gro_config.ts +2 -2
  82. package/src/lib/gro_plugin_gen.ts +1 -1
  83. package/src/lib/gro_plugin_server.ts +2 -2
  84. package/src/lib/gro_plugin_sveltekit_app.ts +49 -41
  85. package/src/lib/gro_plugin_sveltekit_library.ts +2 -2
  86. package/src/lib/input_path.ts +20 -21
  87. package/src/lib/invoke_task.ts +2 -2
  88. package/src/lib/lint.task.ts +1 -1
  89. package/src/lib/loader.ts +1 -1
  90. package/src/lib/modules.ts +2 -2
  91. package/src/lib/package_json.ts +16 -20
  92. package/src/lib/publish.task.ts +6 -6
  93. package/src/lib/release.task.ts +1 -1
  94. package/src/lib/resolve.task.ts +2 -2
  95. package/src/lib/resolve_specifier.ts +8 -4
  96. package/src/lib/run.task.ts +2 -2
  97. package/src/lib/run_gen.ts +15 -10
  98. package/src/lib/run_task.ts +4 -4
  99. package/src/lib/source_json.ts +25 -19
  100. package/src/lib/sveltekit_helpers.ts +3 -3
  101. package/src/lib/task.ts +11 -9
  102. package/src/lib/test.task.ts +4 -3
  103. package/src/lib/typecheck.task.ts +3 -3
  104. package/src/lib/upgrade.task.ts +4 -4
  105. package/dist/search_fs.d.ts +0 -26
  106. package/dist/search_fs.d.ts.map +0 -1
  107. package/dist/search_fs.js +0 -52
  108. package/src/lib/search_fs.ts +0 -100
@@ -1,16 +1,10 @@
1
- import {
2
- existsSync,
3
- mkdirSync,
4
- readdirSync,
5
- readFileSync,
6
- rmSync,
7
- statSync,
8
- writeFileSync,
9
- } from 'node:fs';
1
+ import {mkdir, readdir, readFile, rm, stat, writeFile} from 'node:fs/promises';
10
2
  import {join} from 'node:path';
11
3
  import type {Logger} from '@ryanatkn/belt/log.js';
12
4
  import {styleText as st} from 'node:util';
13
5
  import {git_current_commit_hash} from '@ryanatkn/belt/git.js';
6
+ import {fs_exists} from '@ryanatkn/belt/fs.js';
7
+ import {map_concurrent} from '@ryanatkn/belt/async.js';
14
8
  import {z} from 'zod';
15
9
 
16
10
  import {to_hash} from './hash.ts';
@@ -89,15 +83,15 @@ export const compute_build_cache_key = async (
89
83
  * Loads build cache metadata from .gro/ directory.
90
84
  * Invalid or corrupted cache files are automatically deleted.
91
85
  */
92
- export const load_build_cache_metadata = (): BuildCacheMetadata | null => {
86
+ export const load_build_cache_metadata = async (): Promise<BuildCacheMetadata | null> => {
93
87
  const metadata_path = join(paths.build, BUILD_CACHE_METADATA_FILENAME);
94
88
 
95
- if (!existsSync(metadata_path)) {
89
+ if (!(await fs_exists(metadata_path))) {
96
90
  return null;
97
91
  }
98
92
 
99
93
  try {
100
- const contents = readFileSync(metadata_path, 'utf-8');
94
+ const contents = await readFile(metadata_path, 'utf-8');
101
95
  const parsed = JSON.parse(contents);
102
96
 
103
97
  // Validate structure with Zod
@@ -107,7 +101,7 @@ export const load_build_cache_metadata = (): BuildCacheMetadata | null => {
107
101
  if (metadata.version !== BUILD_CACHE_VERSION) {
108
102
  // Clean up stale cache with old schema version
109
103
  try {
110
- rmSync(metadata_path, {force: true});
104
+ await rm(metadata_path, {force: true});
111
105
  } catch {
112
106
  // Ignore cleanup errors
113
107
  }
@@ -119,7 +113,7 @@ export const load_build_cache_metadata = (): BuildCacheMetadata | null => {
119
113
  // Clean up corrupted/invalid cache file
120
114
  // (catches JSON.parse, Zod validation, and version errors)
121
115
  try {
122
- rmSync(metadata_path, {force: true});
116
+ await rm(metadata_path, {force: true});
123
117
  } catch {
124
118
  // Ignore cleanup errors
125
119
  }
@@ -131,13 +125,16 @@ export const load_build_cache_metadata = (): BuildCacheMetadata | null => {
131
125
  * Saves build cache metadata to .gro/ directory.
132
126
  * Errors are logged but don't fail the build (cache is optional).
133
127
  */
134
- export const save_build_cache_metadata = (metadata: BuildCacheMetadata, log?: Logger): void => {
128
+ export const save_build_cache_metadata = async (
129
+ metadata: BuildCacheMetadata,
130
+ log?: Logger,
131
+ ): Promise<void> => {
135
132
  try {
136
133
  // Ensure .gro directory exists
137
- mkdirSync(paths.build, {recursive: true});
134
+ await mkdir(paths.build, {recursive: true});
138
135
 
139
136
  const metadata_path = join(paths.build, BUILD_CACHE_METADATA_FILENAME);
140
- writeFileSync(metadata_path, JSON.stringify(metadata, null, '\t'), 'utf-8');
137
+ await writeFile(metadata_path, JSON.stringify(metadata, null, '\t'), 'utf-8');
141
138
  } catch (error) {
142
139
  // Cache writes are optional - log warning but don't fail the build
143
140
  log?.warn(
@@ -154,33 +151,38 @@ export const save_build_cache_metadata = (metadata: BuildCacheMetadata, log?: Lo
154
151
  */
155
152
  export const validate_build_cache = async (metadata: BuildCacheMetadata): Promise<boolean> => {
156
153
  // Verify all tracked output files exist and have matching size
154
+ // Sequential checks with early return for performance
157
155
  for (const output of metadata.outputs) {
158
- if (!existsSync(output.path)) {
156
+ // eslint-disable-next-line no-await-in-loop
157
+ if (!(await fs_exists(output.path))) {
159
158
  return false;
160
159
  }
161
160
 
162
161
  // Fast negative check: size mismatch = definitely invalid
163
162
  // This avoids expensive file reads and hashing for files that have clearly changed
164
- const stats = statSync(output.path);
163
+ // eslint-disable-next-line no-await-in-loop
164
+ const stats = await stat(output.path);
165
165
  if (stats.size !== output.size) {
166
166
  return false;
167
167
  }
168
168
  }
169
169
 
170
170
  // Size matches for all files - now verify content with cryptographic hashing
171
- // Hash all files in parallel for performance
172
- const hash_promises = metadata.outputs.map(async (output) => {
173
- try {
174
- const contents = readFileSync(output.path);
175
- const actual_hash = await to_hash(contents);
176
- return actual_hash === output.hash;
177
- } catch {
178
- // File deleted/inaccessible between checks = cache invalid
179
- return false;
180
- }
181
- });
182
-
183
- const results = await Promise.all(hash_promises);
171
+ // Hash files with controlled concurrency (could be 10k+ files)
172
+ const results = await map_concurrent(
173
+ metadata.outputs,
174
+ async (output) => {
175
+ try {
176
+ const contents = await readFile(output.path);
177
+ const actual_hash = await to_hash(contents);
178
+ return actual_hash === output.hash;
179
+ } catch {
180
+ // File deleted/inaccessible between checks = cache invalid
181
+ return false;
182
+ }
183
+ },
184
+ 20,
185
+ );
184
186
  return results.every((valid) => valid);
185
187
  };
186
188
 
@@ -198,7 +200,7 @@ export const is_build_cache_valid = async (
198
200
  git_commit?: string | null,
199
201
  ): Promise<boolean> => {
200
202
  // Load existing metadata
201
- const metadata = load_build_cache_metadata();
203
+ const metadata = await load_build_cache_metadata();
202
204
  if (!metadata) {
203
205
  log.debug('No build cache metadata found');
204
206
  return false;
@@ -250,8 +252,12 @@ export const collect_build_outputs = async (
250
252
  const files_to_hash: Array<FileEntry> = [];
251
253
 
252
254
  // Recursively collect files
253
- const collect_files = (dir: string, relative_base: string, dir_prefix: string): void => {
254
- const entries = readdirSync(dir, {withFileTypes: true});
255
+ const collect_files = async (
256
+ dir: string,
257
+ relative_base: string,
258
+ dir_prefix: string,
259
+ ): Promise<void> => {
260
+ const entries = await readdir(dir, {withFileTypes: true});
255
261
 
256
262
  for (const entry of entries) {
257
263
  // Skip metadata file itself
@@ -264,7 +270,8 @@ export const collect_build_outputs = async (
264
270
  const cache_key = join(dir_prefix, relative_path);
265
271
 
266
272
  if (entry.isDirectory()) {
267
- collect_files(full_path, relative_path, dir_prefix);
273
+ // eslint-disable-next-line no-await-in-loop
274
+ await collect_files(full_path, relative_path, dir_prefix);
268
275
  } else if (entry.isFile()) {
269
276
  files_to_hash.push({full_path, cache_key});
270
277
  }
@@ -272,19 +279,22 @@ export const collect_build_outputs = async (
272
279
  }
273
280
  };
274
281
 
275
- // Collect files from all build directories
282
+ // Collect files from all build directories sequentially
276
283
  for (const build_dir of build_dirs) {
277
- if (!existsSync(build_dir)) {
284
+ // eslint-disable-next-line no-await-in-loop
285
+ if (!(await fs_exists(build_dir))) {
278
286
  continue; // Skip non-existent directories
279
287
  }
280
- collect_files(build_dir, '', build_dir);
288
+ // eslint-disable-next-line no-await-in-loop
289
+ await collect_files(build_dir, '', build_dir);
281
290
  }
282
291
 
283
- // Hash all files in parallel and collect stats
284
- const hash_promises = files_to_hash.map(
292
+ // Hash files with controlled concurrency and collect stats (could be 10k+ files)
293
+ return map_concurrent(
294
+ files_to_hash,
285
295
  async ({full_path, cache_key}): Promise<BuildOutputEntry> => {
286
- const stats = statSync(full_path);
287
- const contents = readFileSync(full_path);
296
+ const stats = await stat(full_path);
297
+ const contents = await readFile(full_path);
288
298
  const hash = await to_hash(contents);
289
299
 
290
300
  return {
@@ -296,40 +306,46 @@ export const collect_build_outputs = async (
296
306
  mode: stats.mode,
297
307
  };
298
308
  },
309
+ 20,
299
310
  );
300
-
301
- return await Promise.all(hash_promises);
302
311
  };
303
312
 
304
313
  /**
305
314
  * Discovers all build output directories in the current working directory.
306
315
  * Returns an array of directory names that exist: build/, dist/, dist_*
307
316
  */
308
- export const discover_build_output_dirs = (): Array<string> => {
317
+ export const discover_build_output_dirs = async (): Promise<Array<string>> => {
309
318
  const build_dirs: Array<string> = [];
310
319
 
311
- // Check for SvelteKit app output (build/)
312
- if (existsSync(SVELTEKIT_BUILD_DIRNAME)) {
320
+ // Check for SvelteKit app output (build/) and library output (dist/) in parallel
321
+ const [build_exists, dist_exists] = await Promise.all([
322
+ fs_exists(SVELTEKIT_BUILD_DIRNAME),
323
+ fs_exists(SVELTEKIT_DIST_DIRNAME),
324
+ ]);
325
+
326
+ if (build_exists) {
313
327
  build_dirs.push(SVELTEKIT_BUILD_DIRNAME);
314
328
  }
315
-
316
- // Check for SvelteKit library output (dist/)
317
- if (existsSync(SVELTEKIT_DIST_DIRNAME)) {
329
+ if (dist_exists) {
318
330
  build_dirs.push(SVELTEKIT_DIST_DIRNAME);
319
331
  }
320
332
 
321
333
  // Check for server and other plugin outputs (dist_*)
322
- const root_entries = readdirSync('.');
323
- const dist_dirs = root_entries.filter((p) => {
324
- if (!p.startsWith(GRO_DIST_PREFIX)) return false;
325
- try {
326
- return statSync(p).isDirectory();
327
- } catch {
328
- // File was deleted/moved during iteration - skip it
329
- return false;
330
- }
331
- });
332
- build_dirs.push(...dist_dirs);
334
+ const root_entries = await readdir('.');
335
+ const dist_dir_checks = await Promise.all(
336
+ root_entries
337
+ .filter((p) => p.startsWith(GRO_DIST_PREFIX))
338
+ .map(async (p) => {
339
+ try {
340
+ const s = await stat(p);
341
+ return s.isDirectory() ? p : null;
342
+ } catch {
343
+ // File was deleted/moved during iteration - skip it
344
+ return null;
345
+ }
346
+ }),
347
+ );
348
+ build_dirs.push(...dist_dir_checks.filter((p): p is string => p !== null));
333
349
 
334
350
  return build_dirs;
335
351
  };
@@ -350,7 +366,7 @@ export const create_build_cache_metadata = async (
350
366
  build_dirs?: Array<string>,
351
367
  ): Promise<BuildCacheMetadata> => {
352
368
  const cache_key = await compute_build_cache_key(config, log, git_commit);
353
- const dirs = build_dirs ?? discover_build_output_dirs();
369
+ const dirs = build_dirs ?? (await discover_build_output_dirs());
354
370
  const outputs = await collect_build_outputs(dirs);
355
371
 
356
372
  return {
@@ -2,9 +2,9 @@ import {z} from 'zod';
2
2
  import {spawn} from '@ryanatkn/belt/process.js';
3
3
  import {styleText as st} from 'node:util';
4
4
  import type {WrittenConfig} from '@changesets/types';
5
- import {readFile, writeFile} from 'node:fs/promises';
5
+ import {readdir, readFile, writeFile} from 'node:fs/promises';
6
6
  import {join} from 'node:path';
7
- import {existsSync, readdirSync} from 'node:fs';
7
+ import {fs_exists} from '@ryanatkn/belt/fs.js';
8
8
  import {
9
9
  GitOrigin,
10
10
  git_check_fully_staged_workspace,
@@ -92,14 +92,14 @@ export const task: Task<Args> = {
92
92
 
93
93
  const bump: ChangesetBump = minor ? 'minor' : major ? 'major' : 'patch';
94
94
 
95
- const found_changeset_cli = find_cli(changeset_cli);
95
+ const found_changeset_cli = await find_cli(changeset_cli);
96
96
  if (!found_changeset_cli) {
97
97
  throw new TaskError(
98
98
  'changeset command not found: install @changesets/cli locally or globally',
99
99
  );
100
100
  }
101
101
 
102
- const package_json = load_package_json();
102
+ const package_json = await load_package_json();
103
103
 
104
104
  const has_sveltekit_library_result = await has_sveltekit_library(package_json, svelte_config);
105
105
  if (!has_sveltekit_library_result.ok) {
@@ -110,7 +110,7 @@ export const task: Task<Args> = {
110
110
 
111
111
  const path = join(dir, 'config.json');
112
112
 
113
- const inited = existsSync(path);
113
+ const inited = await fs_exists(path);
114
114
 
115
115
  if (!inited) {
116
116
  await spawn_cli(found_changeset_cli, ['init'], log);
@@ -143,7 +143,7 @@ export const task: Task<Args> = {
143
143
 
144
144
  if (message) {
145
145
  // TODO see the helper below, simplify this to CLI flags when support is added to Changesets
146
- const changeset_adder = create_changeset_adder(package_json.name, dir, message, bump);
146
+ const changeset_adder = await create_changeset_adder(package_json.name, dir, message, bump);
147
147
  await spawn_cli(found_changeset_cli, ['add', '--empty'], log);
148
148
  await changeset_adder();
149
149
  if (!(await git_check_fully_staged_workspace())) {
@@ -161,15 +161,15 @@ export const task: Task<Args> = {
161
161
  * TODO ideally this wouldn't exist and we'd use CLI flags, but they doesn't exist yet
162
162
  * @see https://github.com/changesets/changesets/pull/1121
163
163
  */
164
- const create_changeset_adder = (
164
+ const create_changeset_adder = async (
165
165
  repo_name: string,
166
166
  dir: string,
167
167
  message: string,
168
168
  bump: ChangesetBump,
169
- ) => {
170
- const filenames_before = readdirSync(dir);
169
+ ): Promise<() => Promise<void>> => {
170
+ const filenames_before = await readdir(dir);
171
171
  return async () => {
172
- const filenames_after = readdirSync(dir);
172
+ const filenames_after = await readdir(dir);
173
173
  const filenames_added = filenames_after.filter((p) => !filenames_before.includes(p));
174
174
  if (!filenames_added.length) {
175
175
  throw Error('expected to find a new changeset file');
@@ -1,5 +1,5 @@
1
- import {rm} from 'node:fs/promises';
2
- import {readdirSync, type RmOptions} from 'node:fs';
1
+ import {readdir, rm} from 'node:fs/promises';
2
+ import type {RmOptions} from 'node:fs';
3
3
 
4
4
  import {paths} from './paths.ts';
5
5
  import {
@@ -35,8 +35,8 @@ export const clean_fs = async (
35
35
  promises.push(rm(paths.build_dev, rm_options));
36
36
  }
37
37
  if (build || build_dist) {
38
- const paths = readdirSync('.').filter((p) => p.startsWith(GRO_DIST_PREFIX));
39
- for (const path of paths) {
38
+ const dir_paths = (await readdir('.')).filter((p) => p.startsWith(GRO_DIST_PREFIX));
39
+ for (const path of dir_paths) {
40
40
  promises.push(rm(path, rm_options));
41
41
  }
42
42
  }
package/src/lib/cli.ts CHANGED
@@ -1,12 +1,13 @@
1
- import {spawnSync, type SpawnOptions} from 'node:child_process';
1
+ import type {SpawnOptions} from 'node:child_process';
2
2
  import {
3
3
  spawn,
4
+ spawn_out,
4
5
  spawn_process,
5
6
  type SpawnResult,
6
7
  type SpawnedProcess,
7
8
  } from '@ryanatkn/belt/process.js';
8
9
  import {join} from 'node:path';
9
- import {existsSync} from 'node:fs';
10
+ import {fs_exists} from '@ryanatkn/belt/fs.js';
10
11
  import {fileURLToPath, type URL} from 'node:url';
11
12
  import type {Logger} from '@ryanatkn/belt/log.js';
12
13
  import type {PathId} from '@ryanatkn/belt/path.js';
@@ -24,18 +25,18 @@ export type Cli =
24
25
  * Searches the filesystem for the CLI `name`, first local to the cwd and then globally.
25
26
  * @returns `null` if not found locally or globally
26
27
  */
27
- export const find_cli = (
28
+ export const find_cli = async (
28
29
  name: string,
29
30
  cwd: string | URL = process.cwd(),
30
31
  options?: SpawnOptions,
31
- ): Cli | null => {
32
+ ): Promise<Cli | null> => {
32
33
  const final_cwd = typeof cwd === 'string' ? cwd : fileURLToPath(cwd);
33
34
  const local_id = join(final_cwd, NODE_MODULES_DIRNAME, `.bin/${name}`);
34
- if (existsSync(local_id)) {
35
+ if (await fs_exists(local_id)) {
35
36
  return {name, id: local_id, kind: 'local'};
36
37
  }
37
- const {stdout} = spawnSync('which', [name], options);
38
- const global_id = stdout.toString().trim();
38
+ const {stdout} = await spawn_out('which', [name], options);
39
+ const global_id = stdout?.trim();
39
40
  if (!global_id) return null;
40
41
  return {name, id: global_id, kind: 'global'};
41
42
  };
@@ -51,7 +52,7 @@ export const spawn_cli = async (
51
52
  log?: Logger,
52
53
  options?: SpawnOptions,
53
54
  ): Promise<SpawnResult | undefined> => {
54
- const cli = resolve_cli(name_or_cli, args, options?.cwd, log, options);
55
+ const cli = await resolve_cli(name_or_cli, args, options?.cwd, log, options);
55
56
  if (!cli) return;
56
57
  return spawn(cli.id, args, options);
57
58
  };
@@ -61,27 +62,27 @@ export const spawn_cli = async (
61
62
  * If a string is provided for `name_or_cli`, it checks first local to the cwd and then globally.
62
63
  * @returns `undefined` if no CLI is found, or the spawn result
63
64
  */
64
- export const spawn_cli_process = (
65
+ export const spawn_cli_process = async (
65
66
  name_or_cli: string | Cli,
66
67
  args: Array<string> = [],
67
68
  log?: Logger,
68
69
  options?: SpawnOptions,
69
- ): SpawnedProcess | undefined => {
70
- const cli = resolve_cli(name_or_cli, args, options?.cwd, log, options);
70
+ ): Promise<SpawnedProcess | undefined> => {
71
+ const cli = await resolve_cli(name_or_cli, args, options?.cwd, log, options);
71
72
  if (!cli) return;
72
73
  return spawn_process(cli.id, args, options);
73
74
  };
74
75
 
75
- export const resolve_cli = (
76
+ export const resolve_cli = async (
76
77
  name_or_cli: string | Cli,
77
78
  args: Array<string> = [],
78
79
  cwd: string | URL | undefined,
79
80
  log?: Logger,
80
81
  options?: SpawnOptions,
81
- ): Cli | undefined => {
82
+ ): Promise<Cli | undefined> => {
82
83
  let final_cli;
83
84
  if (typeof name_or_cli === 'string') {
84
- const found = find_cli(name_or_cli, cwd, options);
85
+ const found = await find_cli(name_or_cli, cwd, options);
85
86
  if (!found) return;
86
87
  final_cli = found;
87
88
  } else {
@@ -2,9 +2,9 @@ import {spawn} from '@ryanatkn/belt/process.js';
2
2
  import {print_error} from '@ryanatkn/belt/print.js';
3
3
  import {styleText as st} from 'node:util';
4
4
  import {z} from 'zod';
5
- import {cp, mkdir, rm} from 'node:fs/promises';
5
+ import {cp, mkdir, readdir, rm} from 'node:fs/promises';
6
6
  import {join, resolve} from 'node:path';
7
- import {existsSync, readdirSync} from 'node:fs';
7
+ import {fs_exists, fs_empty_dir} from '@ryanatkn/belt/fs.js';
8
8
  import {
9
9
  git_check_clean_workspace,
10
10
  git_checkout,
@@ -21,7 +21,6 @@ import {
21
21
  git_clone_locally,
22
22
  git_current_branch_name,
23
23
  } from '@ryanatkn/belt/git.js';
24
- import {fs_empty_dir} from '@ryanatkn/belt/fs.js';
25
24
 
26
25
  import {TaskError, type Task} from './task.ts';
27
26
  import {print_path} from './paths.ts';
@@ -72,6 +71,19 @@ export const Args = z.strictObject({
72
71
  .default(false),
73
72
  build: z.boolean().meta({description: 'dual of no-build'}).default(true),
74
73
  'no-build': z.boolean().meta({description: 'opt out of building'}).default(false),
74
+ sync: z.boolean().meta({description: 'dual of no-sync'}).default(true),
75
+ 'no-sync': z.boolean().meta({description: 'opt out of gro sync in build'}).default(false),
76
+ gen: z.boolean().meta({description: 'dual of no-gen'}).default(true),
77
+ 'no-gen': z.boolean().meta({description: 'opt out of gro gen in build'}).default(false),
78
+ install: z.boolean().meta({description: 'dual of no-install'}).default(true),
79
+ 'no-install': z
80
+ .boolean()
81
+ .meta({description: 'opt out of installing packages before building'})
82
+ .default(false),
83
+ force_build: z
84
+ .boolean()
85
+ .meta({description: 'force a fresh build, ignoring the cache'})
86
+ .default(false),
75
87
  pull: z.boolean().meta({description: 'dual of no-pull'}).default(true),
76
88
  'no-pull': z.boolean().meta({description: 'opt out of git pull'}).default(false),
77
89
  });
@@ -93,6 +105,10 @@ export const task: Task<Args> = {
93
105
  dangerous,
94
106
  reset,
95
107
  build,
108
+ sync,
109
+ gen,
110
+ install,
111
+ force_build,
96
112
  pull,
97
113
  } = args;
98
114
 
@@ -154,7 +170,7 @@ export const task: Task<Args> = {
154
170
  // First, check if the deploy dir exists, and if so, attempt to sync it.
155
171
  // If anything goes wrong, delete the directory and we'll initialize it
156
172
  // using the same code path as if it didn't exist in the first place.
157
- if (existsSync(resolved_deploy_dir)) {
173
+ if (await fs_exists(resolved_deploy_dir)) {
158
174
  if (target !== (await git_current_branch_name(target_spawn_options))) {
159
175
  // We're in a bad state because the target branch has changed,
160
176
  // so delete the directory and continue as if it wasn't there.
@@ -175,7 +191,7 @@ export const task: Task<Args> = {
175
191
 
176
192
  // Second, initialize the deploy dir if needed.
177
193
  // It may not exist, or it may have been deleted after failing to sync above.
178
- if (!existsSync(resolved_deploy_dir)) {
194
+ if (!(await fs_exists(resolved_deploy_dir))) {
179
195
  const local_deploy_branch_exists = await git_local_branch_exists(target);
180
196
  await git_fetch(origin, ('+' + target + ':' + target) as GitBranch); // fetch+merge and allow non-fastforward updates with the +
181
197
  await git_clone_locally(origin, target, dir, resolved_deploy_dir);
@@ -193,7 +209,7 @@ export const task: Task<Args> = {
193
209
  // Remote target branch does not exist, so start from scratch
194
210
 
195
211
  // Delete the deploy dir and recreate it
196
- if (existsSync(resolved_deploy_dir)) {
212
+ if (await fs_exists(resolved_deploy_dir)) {
197
213
  await rm(resolved_deploy_dir, {recursive: true});
198
214
  await mkdir(resolved_deploy_dir, {recursive: true});
199
215
  }
@@ -222,14 +238,14 @@ export const task: Task<Args> = {
222
238
  // Build
223
239
  try {
224
240
  if (build) {
225
- await invoke_task('build');
241
+ await invoke_task('build', {sync, gen, install, force_build});
226
242
  }
227
- } catch (err) {
243
+ } catch (error) {
228
244
  log.error(
229
245
  st('red', 'build failed'),
230
246
  'but',
231
247
  st('green', 'no changes were made to git'),
232
- print_error(err),
248
+ print_error(error),
233
249
  );
234
250
  if (dry) {
235
251
  log.info(st('red', 'dry deploy failed'));
@@ -238,13 +254,14 @@ export const task: Task<Args> = {
238
254
  }
239
255
 
240
256
  // Verify build output exists
241
- if (!existsSync(build_dir)) {
257
+ if (!(await fs_exists(build_dir))) {
242
258
  throw new TaskError(`Directory to deploy does not exist after building: ${build_dir}`);
243
259
  }
244
260
 
245
261
  // Copy the build
262
+ const build_entries = await readdir(build_dir);
246
263
  await Promise.all(
247
- readdirSync(build_dir).map((path) =>
264
+ build_entries.map((path) =>
248
265
  cp(join(build_dir, path), join(resolved_deploy_dir, path), {recursive: true}),
249
266
  ),
250
267
  );
@@ -260,8 +277,8 @@ export const task: Task<Args> = {
260
277
  await spawn('git', ['add', '.', '-f'], target_spawn_options);
261
278
  await spawn('git', ['commit', '-m', 'deployment'], target_spawn_options);
262
279
  await spawn('git', ['push', origin, target, '-f'], target_spawn_options); // force push because we may be resetting the branch, see the checks above to make this safer
263
- } catch (err) {
264
- log.error(st('red', 'updating git failed:'), print_error(err));
280
+ } catch (error) {
281
+ log.error(st('red', 'updating git failed:'), print_error(error));
265
282
  throw new TaskError(`Deploy failed in a bad state: built but not pushed, see error above.`);
266
283
  }
267
284
 
@@ -84,7 +84,7 @@ export const esbuild_plugin_external_worker = ({
84
84
  };
85
85
 
86
86
  build.onResolve({filter: /\.worker(|\.js|\.ts)$/}, async ({path, resolveDir}) => {
87
- const parsed = resolve_specifier(path, resolveDir);
87
+ const parsed = await resolve_specifier(path, resolveDir);
88
88
  const {specifier, path_id, namespace} = parsed;
89
89
  const build_result = await build_worker(path_id);
90
90
  if (log) print_build_result(log, build_result);
@@ -71,8 +71,8 @@ export const esbuild_plugin_svelte = (options: EsbuildPluginSvelteOptions): esbu
71
71
  contents,
72
72
  warnings: warnings.map((w) => convert_svelte_message_to_esbuild(filename, source, w)),
73
73
  };
74
- } catch (err) {
75
- return {errors: [convert_svelte_message_to_esbuild(path, source, err)]};
74
+ } catch (error) {
75
+ return {errors: [convert_svelte_message_to_esbuild(path, source, error)]};
76
76
  }
77
77
  });
78
78
 
@@ -90,8 +90,8 @@ export const esbuild_plugin_svelte = (options: EsbuildPluginSvelteOptions): esbu
90
90
  contents,
91
91
  warnings: warnings.map((w) => convert_svelte_message_to_esbuild(filename, source, w)),
92
92
  };
93
- } catch (err) {
94
- return {errors: [convert_svelte_message_to_esbuild(path, source, err)]};
93
+ } catch (error) {
94
+ return {errors: [convert_svelte_message_to_esbuild(path, source, error)]};
95
95
  }
96
96
  });
97
97
  },
@@ -13,10 +13,10 @@ import {EVERYTHING_MATCHER} from './constants.ts';
13
13
  export const esbuild_plugin_sveltekit_local_imports = (): esbuild.Plugin => ({
14
14
  name: 'sveltekit_local_imports',
15
15
  setup: (build) => {
16
- build.onResolve({filter: /^(\/|\.)/}, (args) => {
16
+ build.onResolve({filter: /^(\/|\.)/}, async (args) => {
17
17
  const {path, importer} = args;
18
18
  if (!importer) return {path};
19
- const {path_id, namespace} = resolve_specifier(path, dirname(importer));
19
+ const {path_id, namespace} = await resolve_specifier(path, dirname(importer));
20
20
  return {path: path_id, namespace}; // `namespace` may be `undefined`, but esbuild needs the absolute path for json etc
21
21
  });
22
22
  build.onLoad(