@ryanatkn/gro 0.170.0 → 0.171.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/dist/build.task.d.ts +6 -1
  2. package/dist/build.task.d.ts.map +1 -1
  3. package/dist/build.task.js +86 -5
  4. package/dist/build_cache.d.ts +100 -0
  5. package/dist/build_cache.d.ts.map +1 -0
  6. package/dist/build_cache.js +289 -0
  7. package/dist/deploy.task.d.ts.map +1 -1
  8. package/dist/deploy.task.js +13 -10
  9. package/dist/esbuild_plugin_svelte.js +1 -1
  10. package/dist/gen.d.ts.map +1 -1
  11. package/dist/gro_config.d.ts +30 -1
  12. package/dist/gro_config.d.ts.map +1 -1
  13. package/dist/gro_config.js +28 -4
  14. package/dist/hash.d.ts +1 -1
  15. package/dist/hash.d.ts.map +1 -1
  16. package/dist/hash.js +1 -2
  17. package/dist/invoke_task.d.ts.map +1 -1
  18. package/dist/invoke_task.js +2 -1
  19. package/dist/package.d.ts.map +1 -1
  20. package/dist/package.js +23 -14
  21. package/dist/package_json.js +1 -1
  22. package/package.json +3 -3
  23. package/src/lib/build.task.ts +110 -6
  24. package/src/lib/build_cache.ts +362 -0
  25. package/src/lib/changelog.ts +1 -1
  26. package/src/lib/changeset.task.ts +1 -1
  27. package/src/lib/commit.task.ts +1 -1
  28. package/src/lib/deploy.task.ts +14 -10
  29. package/src/lib/esbuild_plugin_svelte.ts +1 -1
  30. package/src/lib/gen.ts +2 -1
  31. package/src/lib/gro_config.ts +62 -3
  32. package/src/lib/hash.ts +2 -4
  33. package/src/lib/invoke_task.ts +5 -2
  34. package/src/lib/package.ts +23 -14
  35. package/src/lib/package_json.ts +2 -2
  36. package/src/lib/parse_exports_context.ts +2 -2
  37. package/src/lib/parse_imports.ts +1 -1
  38. package/src/lib/upgrade.task.ts +1 -1
  39. package/dist/test_helpers.d.ts +0 -22
  40. package/dist/test_helpers.d.ts.map +0 -1
  41. package/dist/test_helpers.js +0 -123
  42. package/src/lib/test_helpers.ts +0 -161
@@ -0,0 +1,362 @@
1
+ import {
2
+ existsSync,
3
+ mkdirSync,
4
+ readdirSync,
5
+ readFileSync,
6
+ rmSync,
7
+ statSync,
8
+ writeFileSync,
9
+ } from 'node:fs';
10
+ import {join} from 'node:path';
11
+ import type {Logger} from '@ryanatkn/belt/log.js';
12
+ import {styleText as st} from 'node:util';
13
+ import {git_current_commit_hash} from '@ryanatkn/belt/git.js';
14
+ import {z} from 'zod';
15
+
16
+ import {to_hash} from './hash.ts';
17
+ import type {Gro_Config} from './gro_config.ts';
18
+ import {paths} from './paths.ts';
19
+ import {SVELTEKIT_BUILD_DIRNAME, SVELTEKIT_DIST_DIRNAME, GRO_DIST_PREFIX} from './constants.ts';
20
+
21
+ export const BUILD_CACHE_METADATA_FILENAME = 'build.json';
22
+ export const BUILD_CACHE_VERSION = '1';
23
+
24
+ /**
25
+ * Metadata about a single build output file.
26
+ * Includes cryptographic hash for validation plus filesystem stats for debugging and optimization.
27
+ */
28
+ export const Build_Output_Entry = z.strictObject({
29
+ path: z
30
+ .string()
31
+ .meta({description: "relative path from project root (e.g., 'build/index.html')."}),
32
+ hash: z.string().meta({description: 'SHA-256 hash of file contents'}),
33
+ size: z.number().meta({description: 'file size in bytes'}),
34
+ mtime: z.number().meta({description: 'modification time in milliseconds since epoch'}),
35
+ ctime: z.number().meta({
36
+ description: 'POSIX change time in milliseconds since epoch',
37
+ }),
38
+ mode: z.number().meta({description: 'unix file permission mode (e.g., 33188 = 0644)'}),
39
+ });
40
+ export type Build_Output_Entry = z.infer<typeof Build_Output_Entry>;
41
+
42
+ /**
43
+ * Metadata stored in .gro/ directory to track build cache validity.
44
+ * Schema validates structure at load time to catch corrupted cache files.
45
+ */
46
+ export const Build_Cache_Metadata = z.strictObject({
47
+ version: z.string().meta({description: 'schema version for future compatibility'}),
48
+ git_commit: z.string().nullable().meta({description: 'git commit hash at time of build'}),
49
+ build_cache_config_hash: z
50
+ .string()
51
+ .meta({description: "hash of user's custom build_cache_config from gro.config.ts."}),
52
+ timestamp: z.string().meta({description: 'timestamp when build completed'}),
53
+ outputs: z
54
+ .array(Build_Output_Entry)
55
+ .meta({description: 'build output files with hashes and filesystem stats'}),
56
+ });
57
+ export type Build_Cache_Metadata = z.infer<typeof Build_Cache_Metadata>;
58
+
59
+ /**
60
+ * Computes the cache key components for a build.
61
+ * This determines whether a cached build can be reused.
62
+ *
63
+ * @param config Gro config (build_cache_config_hash is already computed during config load)
64
+ * @param log Logger
65
+ * @param git_commit Optional pre-computed git commit hash (optimization to avoid re-reading)
66
+ */
67
+ export const compute_build_cache_key = async (
68
+ config: Gro_Config,
69
+ log: Logger,
70
+ git_commit?: string | null,
71
+ ): Promise<{
72
+ git_commit: string | null;
73
+ build_cache_config_hash: string;
74
+ }> => {
75
+ // 1. Git commit hash - primary cache key
76
+ const commit = git_commit !== undefined ? git_commit : await git_current_commit_hash();
77
+ if (!commit) {
78
+ log.warn('Not in a git repository - build cache will use null git commit');
79
+ }
80
+
81
+ // 2. Build cache config hash - already computed during config normalization
82
+ return {
83
+ git_commit: commit,
84
+ build_cache_config_hash: config.build_cache_config_hash,
85
+ };
86
+ };
87
+
88
+ /**
89
+ * Loads build cache metadata from .gro/ directory.
90
+ * Invalid or corrupted cache files are automatically deleted.
91
+ */
92
+ export const load_build_cache_metadata = (): Build_Cache_Metadata | null => {
93
+ const metadata_path = join(paths.build, BUILD_CACHE_METADATA_FILENAME);
94
+
95
+ if (!existsSync(metadata_path)) {
96
+ return null;
97
+ }
98
+
99
+ try {
100
+ const contents = readFileSync(metadata_path, 'utf-8');
101
+ const parsed = JSON.parse(contents);
102
+
103
+ // Validate structure with Zod
104
+ const metadata = Build_Cache_Metadata.parse(parsed);
105
+
106
+ // Validate version
107
+ if (metadata.version !== BUILD_CACHE_VERSION) {
108
+ // Clean up stale cache with old schema version
109
+ try {
110
+ rmSync(metadata_path, {force: true});
111
+ } catch {
112
+ // Ignore cleanup errors
113
+ }
114
+ return null;
115
+ }
116
+
117
+ return metadata;
118
+ } catch {
119
+ // Clean up corrupted/invalid cache file
120
+ // (catches JSON.parse, Zod validation, and version errors)
121
+ try {
122
+ rmSync(metadata_path, {force: true});
123
+ } catch {
124
+ // Ignore cleanup errors
125
+ }
126
+ return null;
127
+ }
128
+ };
129
+
130
+ /**
131
+ * Saves build cache metadata to .gro/ directory.
132
+ * Errors are logged but don't fail the build (cache is optional).
133
+ */
134
+ export const save_build_cache_metadata = (metadata: Build_Cache_Metadata, log?: Logger): void => {
135
+ try {
136
+ // Ensure .gro directory exists
137
+ mkdirSync(paths.build, {recursive: true});
138
+
139
+ const metadata_path = join(paths.build, BUILD_CACHE_METADATA_FILENAME);
140
+ writeFileSync(metadata_path, JSON.stringify(metadata, null, '\t'), 'utf-8');
141
+ } catch (error) {
142
+ // Cache writes are optional - log warning but don't fail the build
143
+ log?.warn(
144
+ st('yellow', 'Failed to save build cache'),
145
+ st('dim', `(${error instanceof Error ? error.message : String(error)})`),
146
+ );
147
+ }
148
+ };
149
+
150
+ /**
151
+ * Validates that a cached build is still valid by checking stats and hashing outputs.
152
+ * Uses size as a fast negative check before expensive hashing.
153
+ * This is comprehensive validation to catch manual tampering or corruption.
154
+ */
155
+ export const validate_build_cache = async (metadata: Build_Cache_Metadata): Promise<boolean> => {
156
+ // Verify all tracked output files exist and have matching size
157
+ for (const output of metadata.outputs) {
158
+ if (!existsSync(output.path)) {
159
+ return false;
160
+ }
161
+
162
+ // Fast negative check: size mismatch = definitely invalid
163
+ // This avoids expensive file reads and hashing for files that have clearly changed
164
+ const stats = statSync(output.path);
165
+ if (stats.size !== output.size) {
166
+ return false;
167
+ }
168
+ }
169
+
170
+ // Size matches for all files - now verify content with cryptographic hashing
171
+ // Hash all files in parallel for performance
172
+ const hash_promises = metadata.outputs.map(async (output) => {
173
+ try {
174
+ const contents = readFileSync(output.path);
175
+ const actual_hash = await to_hash(contents);
176
+ return actual_hash === output.hash;
177
+ } catch {
178
+ // File deleted/inaccessible between checks = cache invalid
179
+ return false;
180
+ }
181
+ });
182
+
183
+ const results = await Promise.all(hash_promises);
184
+ return results.every((valid) => valid);
185
+ };
186
+
187
+ /**
188
+ * Main function to check if the build cache is valid.
189
+ * Returns true if the cached build can be used, false if a fresh build is needed.
190
+ *
191
+ * @param config Gro config
192
+ * @param log Logger
193
+ * @param git_commit Optional pre-computed git commit hash (optimization)
194
+ */
195
+ export const is_build_cache_valid = async (
196
+ config: Gro_Config,
197
+ log: Logger,
198
+ git_commit?: string | null,
199
+ ): Promise<boolean> => {
200
+ // Load existing metadata
201
+ const metadata = load_build_cache_metadata();
202
+ if (!metadata) {
203
+ log.debug('No build cache metadata found');
204
+ return false;
205
+ }
206
+
207
+ // Compute current cache key
208
+ const current = await compute_build_cache_key(config, log, git_commit);
209
+
210
+ // Check if cache keys have changed
211
+ if (metadata.git_commit !== current.git_commit) {
212
+ log.debug('Build cache invalid: git commit changed');
213
+ return false;
214
+ }
215
+
216
+ if (metadata.build_cache_config_hash !== current.build_cache_config_hash) {
217
+ log.debug('Build cache invalid: build_cache_config changed');
218
+ return false;
219
+ }
220
+
221
+ // Comprehensive validation: verify output files
222
+ const outputs_valid = await validate_build_cache(metadata);
223
+ if (!outputs_valid) {
224
+ log.debug('Build cache invalid: output files missing or corrupted');
225
+ return false;
226
+ }
227
+
228
+ log.info(st('green', 'Build cache valid'), st('dim', `(from ${metadata.timestamp})`));
229
+ return true;
230
+ };
231
+
232
+ /**
233
+ * Collects information about all files in build output directories.
234
+ * Returns an array of entries with path, hash, size, mtime, ctime, and mode.
235
+ *
236
+ * Files are hashed in parallel for performance. For very large builds (10k+ files),
237
+ * this may take several seconds but ensures complete cache validation.
238
+ *
239
+ * @param build_dirs Array of output directories to scan (e.g., ['build', 'dist', 'dist_server'])
240
+ */
241
+ export const collect_build_outputs = async (
242
+ build_dirs: Array<string>,
243
+ ): Promise<Array<Build_Output_Entry>> => {
244
+ // Collect all files to hash first
245
+ interface File_Entry {
246
+ full_path: string;
247
+ cache_key: string;
248
+ }
249
+
250
+ const files_to_hash: Array<File_Entry> = [];
251
+
252
+ // Recursively collect files
253
+ const collect_files = (dir: string, relative_base: string, dir_prefix: string): void => {
254
+ const entries = readdirSync(dir, {withFileTypes: true});
255
+
256
+ for (const entry of entries) {
257
+ // Skip metadata file itself
258
+ if (entry.name === BUILD_CACHE_METADATA_FILENAME) {
259
+ continue;
260
+ }
261
+
262
+ const full_path = join(dir, entry.name);
263
+ const relative_path = relative_base ? join(relative_base, entry.name) : entry.name;
264
+ const cache_key = join(dir_prefix, relative_path);
265
+
266
+ if (entry.isDirectory()) {
267
+ collect_files(full_path, relative_path, dir_prefix);
268
+ } else if (entry.isFile()) {
269
+ files_to_hash.push({full_path, cache_key});
270
+ }
271
+ // Symlinks are intentionally ignored - we only hash regular files
272
+ }
273
+ };
274
+
275
+ // Collect files from all build directories
276
+ for (const build_dir of build_dirs) {
277
+ if (!existsSync(build_dir)) {
278
+ continue; // Skip non-existent directories
279
+ }
280
+ collect_files(build_dir, '', build_dir);
281
+ }
282
+
283
+ // Hash all files in parallel and collect stats
284
+ const hash_promises = files_to_hash.map(
285
+ async ({full_path, cache_key}): Promise<Build_Output_Entry> => {
286
+ const stats = statSync(full_path);
287
+ const contents = readFileSync(full_path);
288
+ const hash = await to_hash(contents);
289
+
290
+ return {
291
+ path: cache_key,
292
+ hash,
293
+ size: stats.size,
294
+ mtime: stats.mtimeMs,
295
+ ctime: stats.ctimeMs,
296
+ mode: stats.mode,
297
+ };
298
+ },
299
+ );
300
+
301
+ return await Promise.all(hash_promises);
302
+ };
303
+
304
+ /**
305
+ * Discovers all build output directories in the current working directory.
306
+ * Returns an array of directory names that exist: build/, dist/, dist_*
307
+ */
308
+ export const discover_build_output_dirs = (): Array<string> => {
309
+ const build_dirs: Array<string> = [];
310
+
311
+ // Check for SvelteKit app output (build/)
312
+ if (existsSync(SVELTEKIT_BUILD_DIRNAME)) {
313
+ build_dirs.push(SVELTEKIT_BUILD_DIRNAME);
314
+ }
315
+
316
+ // Check for SvelteKit library output (dist/)
317
+ if (existsSync(SVELTEKIT_DIST_DIRNAME)) {
318
+ build_dirs.push(SVELTEKIT_DIST_DIRNAME);
319
+ }
320
+
321
+ // Check for server and other plugin outputs (dist_*)
322
+ const root_entries = readdirSync('.');
323
+ const dist_dirs = root_entries.filter((p) => {
324
+ if (!p.startsWith(GRO_DIST_PREFIX)) return false;
325
+ try {
326
+ return statSync(p).isDirectory();
327
+ } catch {
328
+ // File was deleted/moved during iteration - skip it
329
+ return false;
330
+ }
331
+ });
332
+ build_dirs.push(...dist_dirs);
333
+
334
+ return build_dirs;
335
+ };
336
+
337
+ /**
338
+ * Creates build cache metadata after a successful build.
339
+ * Automatically discovers all build output directories (build/, dist/, dist_*).
340
+ *
341
+ * @param config Gro config
342
+ * @param log Logger
343
+ * @param git_commit Optional pre-computed git commit hash (optimization)
344
+ * @param build_dirs Optional pre-discovered build directories (optimization to avoid redundant filesystem scans)
345
+ */
346
+ export const create_build_cache_metadata = async (
347
+ config: Gro_Config,
348
+ log: Logger,
349
+ git_commit?: string | null,
350
+ build_dirs?: Array<string>,
351
+ ): Promise<Build_Cache_Metadata> => {
352
+ const cache_key = await compute_build_cache_key(config, log, git_commit);
353
+ const dirs = build_dirs ?? discover_build_output_dirs();
354
+ const outputs = await collect_build_outputs(dirs);
355
+
356
+ return {
357
+ version: BUILD_CACHE_VERSION,
358
+ ...cache_key,
359
+ timestamp: new Date().toISOString(),
360
+ outputs,
361
+ };
362
+ };
@@ -51,7 +51,7 @@ const map_changelog = async (
51
51
  for (const line of parsed) {
52
52
  const matches = LINE_WITH_SHA_MATCHER.exec(line);
53
53
  if (matches) {
54
- const commit_sha = matches[1];
54
+ const commit_sha = matches[1]!;
55
55
  const l = '- ' + line.substring(commit_sha.length + 4);
56
56
  const prs = await github_fetch_commit_prs(owner, repo, commit_sha, token, log, cache); // eslint-disable-line no-await-in-loop
57
57
  if (prs?.length) {
@@ -174,7 +174,7 @@ const create_changeset_adder = (
174
174
  if (filenames_added.length !== 1) {
175
175
  throw Error('expected to find exactly one new changeset file');
176
176
  }
177
- const path = join(dir, filenames_added[0]);
177
+ const path = join(dir, filenames_added[0]!);
178
178
  const contents = create_new_changeset(repo_name, message, bump);
179
179
  await writeFile(path, contents, 'utf8');
180
180
  await spawn('git', ['add', path]);
@@ -26,7 +26,7 @@ export const task: Task<Args> = {
26
26
 
27
27
  const branch = await git_current_branch_name();
28
28
 
29
- await spawn('git', ['commit', '-a', '-m', message]);
29
+ await spawn('git', ['commit', '-a', '-m', message!]);
30
30
  await git_push(origin, branch, undefined, true);
31
31
  },
32
32
  };
@@ -27,7 +27,7 @@ import {print_path} from './paths.ts';
27
27
  import {GRO_DIRNAME, GIT_DIRNAME, SVELTEKIT_BUILD_DIRNAME} from './constants.ts';
28
28
  import {empty_dir} from './fs.ts';
29
29
 
30
- // docs at ./docs/deploy.md
30
+ // docs at ../docs/deploy.md
31
31
 
32
32
  // terminal command for testing:
33
33
  // npm run bootstrap && rm -rf .gro && clear && gro deploy --source no-git-workspace --no-build --dry
@@ -159,11 +159,14 @@ export const task: Task<Args> = {
159
159
  await rm(resolved_deploy_dir, {recursive: true});
160
160
  } else {
161
161
  await spawn('git', ['reset', '--hard'], target_spawn_options); // in case it's dirty
162
- await git_pull(origin, target, target_spawn_options);
163
- if (await git_check_clean_workspace(target_spawn_options)) {
164
- // We're in a bad state because the local branch lost continuity with the remote,
165
- // so delete the directory and continue as if it wasn't there.
166
- await rm(resolved_deploy_dir, {recursive: true});
162
+ // Skip pulling target branch when resetting (optimization - we reset after anyway)
163
+ if (!reset) {
164
+ await git_pull(origin, target, target_spawn_options);
165
+ if (await git_check_clean_workspace(target_spawn_options)) {
166
+ // We're in a bad state because the local branch lost continuity with the remote,
167
+ // so delete the directory and continue as if it wasn't there.
168
+ await rm(resolved_deploy_dir, {recursive: true});
169
+ }
167
170
  }
168
171
  }
169
172
  }
@@ -219,10 +222,6 @@ export const task: Task<Args> = {
219
222
  if (build) {
220
223
  await invoke_task('build');
221
224
  }
222
- if (!existsSync(build_dir)) {
223
- log.error(st('red', 'directory to deploy does not exist after building:'), build_dir);
224
- return;
225
- }
226
225
  } catch (err) {
227
226
  log.error(
228
227
  st('red', 'build failed'),
@@ -236,6 +235,11 @@ export const task: Task<Args> = {
236
235
  throw new Task_Error(`Deploy safely canceled due to build failure. See the error above.`);
237
236
  }
238
237
 
238
+ // Verify build output exists
239
+ if (!existsSync(build_dir)) {
240
+ throw new Task_Error(`Directory to deploy does not exist after building: ${build_dir}`);
241
+ }
242
+
239
243
  // Copy the build
240
244
  await Promise.all(
241
245
  readdirSync(build_dir).map((path) =>
@@ -109,7 +109,7 @@ const convert_svelte_message_to_esbuild = (
109
109
  ): esbuild.PartialMessage => {
110
110
  let location: esbuild.PartialMessage['location'] = null;
111
111
  if (start && end) {
112
- const lineText = source.split(/\r\n|\r|\n/g)[start.line - 1];
112
+ const lineText = source.split(/\r\n|\r|\n/g)[start.line - 1] ?? '';
113
113
  const lineEnd = start.line === end.line ? end.column : lineText.length;
114
114
  location = {
115
115
  file: path,
package/src/lib/gen.ts CHANGED
@@ -53,6 +53,7 @@ export type Gen = Gen_Function | Gen_Config;
53
53
 
54
54
  export type Gen_Function = (ctx: Gen_Context) => Raw_Gen_Result | Promise<Raw_Gen_Result>;
55
55
 
56
+ // TODO add a Gen_Config_Raw variant and change `normalize_gen_config` to `gen_cook_config`
56
57
  export interface Gen_Config {
57
58
  generate: Gen_Function;
58
59
  dependencies?: Gen_Dependencies;
@@ -180,7 +181,7 @@ export const to_output_file_name = (filename: string): string => {
180
181
  for (let i = 0; i < length; i++) {
181
182
  if (i === gen_pattern_index) continue; // skip the `.gen.` pattern
182
183
  if (i === length - 1 && parts[i] === '') continue; // allow empty extension
183
- final_parts.push(parts[i]);
184
+ final_parts.push(parts[i]!);
184
185
  }
185
186
  return final_parts.join('.');
186
187
  };
@@ -2,6 +2,7 @@ import {join, resolve} from 'node:path';
2
2
  import {existsSync} from 'node:fs';
3
3
  import {identity} from '@ryanatkn/belt/function.js';
4
4
  import type {Path_Filter, Path_Id} from '@ryanatkn/belt/path.js';
5
+ import {json_stringify_deterministic} from '@ryanatkn/belt/json.js';
5
6
 
6
7
  import {GRO_DIST_DIR, IS_THIS_GRO, paths} from './paths.ts';
7
8
  import {
@@ -17,6 +18,14 @@ import create_default_config from './gro.config.default.ts';
17
18
  import type {Create_Config_Plugins} from './plugin.ts';
18
19
  import type {Map_Package_Json} from './package_json.ts';
19
20
  import type {Parsed_Svelte_Config} from './svelte_config.ts';
21
+ import {to_hash} from './hash.ts';
22
+
23
+ /**
24
+ * SHA-256 hash of empty string, used for configs without build_cache_config.
25
+ * This ensures consistent cache behavior when no custom config is provided.
26
+ */
27
+ export const EMPTY_BUILD_CACHE_CONFIG_HASH =
28
+ 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855';
20
29
 
21
30
  /**
22
31
  * The config that users can extend via `gro.config.ts`.
@@ -54,6 +63,13 @@ export interface Gro_Config extends Raw_Gro_Config {
54
63
  pm_cli: string;
55
64
  /** @default SVELTE_CONFIG_FILENAME */
56
65
  svelte_config_filename?: string;
66
+ /**
67
+ * SHA-256 hash of the user's `build_cache_config` from `gro.config.ts`.
68
+ * This is computed during config normalization and the raw value is immediately deleted.
69
+ * If no `build_cache_config` was provided, this is the hash of an empty string.
70
+ * @see Raw_Gro_Config.build_cache_config
71
+ */
72
+ build_cache_config_hash: string;
57
73
  }
58
74
 
59
75
  /**
@@ -68,6 +84,24 @@ export interface Raw_Gro_Config {
68
84
  search_filters?: Path_Filter | Array<Path_Filter> | null;
69
85
  js_cli?: string;
70
86
  pm_cli?: string;
87
+ /**
88
+ * Optional object defining custom build inputs for cache invalidation.
89
+ * This value is hashed during config normalization and used to detect
90
+ * when builds need to be regenerated due to non-source changes.
91
+ *
92
+ * Use cases:
93
+ * - Environment variables baked into build: `{api_url: process.env.PUBLIC_API_URL}`
94
+ * - External data files: `{data: fs.readFileSync('data.json', 'utf-8')}`
95
+ * - Build feature flags: `{enable_analytics: true}`
96
+ *
97
+ * Can be a static object or an async function that returns an object.
98
+ *
99
+ * IMPORTANT: It's safe to include secrets here because they are hashed and `delete`d
100
+ * during config normalization. The raw value is never logged or persisted.
101
+ */
102
+ build_cache_config?:
103
+ | Record<string, unknown>
104
+ | (() => Record<string, unknown> | Promise<Record<string, unknown>>);
71
105
  }
72
106
 
73
107
  export type Create_Gro_Config = (
@@ -87,6 +121,7 @@ export const create_empty_gro_config = (): Gro_Config => ({
87
121
  search_filters: [(id) => !SEARCH_EXCLUDER_DEFAULT.test(id)],
88
122
  js_cli: JS_CLI_DEFAULT,
89
123
  pm_cli: PM_CLI_DEFAULT,
124
+ build_cache_config_hash: EMPTY_BUILD_CACHE_CONFIG_HASH,
90
125
  });
91
126
 
92
127
  /**
@@ -112,8 +147,9 @@ export const EXPORTS_EXCLUDER_DEFAULT = /(\.md|\.(test|ignore)\.|\/(test|fixture
112
147
  /**
113
148
  * Transforms a `Raw_Gro_Config` to the more strict `Gro_Config`.
114
149
  * This allows users to provide a more relaxed config.
150
+ * Hashes the `build_cache_config` and deletes the raw value for security.
115
151
  */
116
- export const cook_gro_config = (raw_config: Raw_Gro_Config): Gro_Config => {
152
+ export const cook_gro_config = async (raw_config: Raw_Gro_Config): Promise<Gro_Config> => {
117
153
  const empty_config = create_empty_gro_config();
118
154
 
119
155
  // All of the raw config properties are optional,
@@ -125,8 +161,28 @@ export const cook_gro_config = (raw_config: Raw_Gro_Config): Gro_Config => {
125
161
  search_filters = empty_config.search_filters,
126
162
  js_cli = empty_config.js_cli,
127
163
  pm_cli = empty_config.pm_cli,
164
+ build_cache_config,
128
165
  } = raw_config;
129
166
 
167
+ // Hash build_cache_config and delete the raw value
168
+ // IMPORTANT: Raw value may contain secrets - hash it and delete immediately
169
+ let build_cache_config_hash: string;
170
+ if (!build_cache_config) {
171
+ build_cache_config_hash = EMPTY_BUILD_CACHE_CONFIG_HASH;
172
+ } else {
173
+ // Resolve if it's a function
174
+ const resolved =
175
+ typeof build_cache_config === 'function' ? await build_cache_config() : build_cache_config;
176
+
177
+ // Hash the JSON representation with deterministic key ordering
178
+ build_cache_config_hash = await to_hash(
179
+ new TextEncoder().encode(json_stringify_deterministic(resolved)),
180
+ );
181
+ }
182
+
183
+ // Delete the raw value to ensure it doesn't persist in memory
184
+ delete (raw_config as any).build_cache_config;
185
+
130
186
  return {
131
187
  plugins,
132
188
  map_package_json,
@@ -138,6 +194,7 @@ export const cook_gro_config = (raw_config: Raw_Gro_Config): Gro_Config => {
138
194
  : [],
139
195
  js_cli,
140
196
  pm_cli,
197
+ build_cache_config_hash,
141
198
  };
142
199
  };
143
200
 
@@ -146,7 +203,9 @@ export interface Gro_Config_Module {
146
203
  }
147
204
 
148
205
  export const load_gro_config = async (dir = paths.root): Promise<Gro_Config> => {
149
- const default_config = cook_gro_config(await create_default_config(create_empty_gro_config()));
206
+ const default_config = await cook_gro_config(
207
+ await create_default_config(create_empty_gro_config()),
208
+ );
150
209
 
151
210
  const config_path = join(dir, GRO_CONFIG_FILENAME);
152
211
  if (!existsSync(config_path)) {
@@ -159,7 +218,7 @@ export const load_gro_config = async (dir = paths.root): Promise<Gro_Config> =>
159
218
 
160
219
  validate_gro_config_module(config_module, config_path);
161
220
 
162
- return cook_gro_config(
221
+ return await cook_gro_config(
163
222
  typeof config_module.default === 'function'
164
223
  ? await config_module.default(default_config)
165
224
  : config_module.default,
package/src/lib/hash.ts CHANGED
@@ -1,12 +1,10 @@
1
- import {webcrypto} from 'node:crypto';
2
-
3
- const {subtle} = webcrypto;
1
+ const {subtle} = globalThis.crypto;
4
2
 
5
3
  /**
6
4
  * @see https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto
7
5
  */
8
6
  export const to_hash = async (
9
- data: Buffer,
7
+ data: BufferSource,
10
8
  algorithm: 'SHA-1' | 'SHA-256' | 'SHA-384' | 'SHA-512' = 'SHA-256',
11
9
  ): Promise<string> => {
12
10
  const digested = await subtle.digest(algorithm, data);
@@ -96,7 +96,10 @@ export const invoke_task = async (
96
96
  }
97
97
  const loaded_tasks = loaded.value;
98
98
 
99
- if (resolved_input_files.length > 1 || resolved_input_files[0].resolved_input_path.is_directory) {
99
+ if (
100
+ resolved_input_files.length > 1 ||
101
+ resolved_input_files[0]!.resolved_input_path.is_directory
102
+ ) {
100
103
  // The input path matches a directory. Log the tasks but don't run them.
101
104
  log_tasks(log, loaded_tasks);
102
105
  await finish();
@@ -105,7 +108,7 @@ export const invoke_task = async (
105
108
 
106
109
  // The input path matches a file that's presumable a task, so load and run it.
107
110
  if (loaded_tasks.modules.length !== 1) throw Error('expected one loaded task'); // run only one task at a time
108
- const task = loaded_tasks.modules[0];
111
+ const task = loaded_tasks.modules[0]!;
109
112
  log.info(
110
113
  `→ ${st('cyan', task.name)} ${(task.mod.task.summary && st('gray', task.mod.task.summary)) ?? ''}`,
111
114
  );