@ryanatkn/gro 0.121.1 → 0.122.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/config.js +7 -11
- package/dist/gen.d.ts +1 -6
- package/dist/gen.js +3 -8
- package/dist/gen.test.js +0 -1
- package/dist/gro_plugin_gen.js +1 -1
- package/dist/input_path.d.ts +2 -4
- package/dist/input_path.js +27 -47
- package/dist/input_path.test.js +5 -23
- package/dist/package.js +8 -8
- package/dist/path_constants.js +1 -1
- package/dist/task.d.ts +1 -6
- package/dist/task.js +3 -8
- package/package.json +7 -7
package/dist/config.js
CHANGED
|
@@ -20,17 +20,13 @@ export const create_empty_config = () => ({
|
|
|
20
20
|
* Customize via `search_filters` in the `Gro_Config`.
|
|
21
21
|
* See the test cases for the exact behavior.
|
|
22
22
|
*/
|
|
23
|
-
export const DEFAULT_SEARCH_EXCLUDER = new RegExp(`(${[
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
SERVER_DIST_PATH,
|
|
31
|
-
]
|
|
32
|
-
.map((p) => '(^|/)' + p)
|
|
33
|
-
.join('|')})($|/)`, 'u');
|
|
23
|
+
export const DEFAULT_SEARCH_EXCLUDER = new RegExp(`(${'(^|/)\\.[^/]+' + // exclude all `.`-prefixed directories
|
|
24
|
+
// TODO probably change to `pkg.name` instead of this catch-all (also `gro` below)
|
|
25
|
+
`|(^|/)${NODE_MODULES_DIRNAME}(?!/(@[^/]+/)?gro/${SVELTEKIT_DIST_DIRNAME})` + // exclude `node_modules` unless it's to the Gro directory
|
|
26
|
+
`|(^|/)${SVELTEKIT_BUILD_DIRNAME}` + // exclude the SvelteKit build directory
|
|
27
|
+
`|(^|/)(?<!(^|/)gro/)${SVELTEKIT_DIST_DIRNAME}` + // exclude the SvelteKit dist directory unless it's in the Gro directory
|
|
28
|
+
`|(^|/)${SERVER_DIST_PATH}` // exclude the Gro server plugin dist directory
|
|
29
|
+
})($|/)`, 'u');
|
|
34
30
|
const default_map_package_json = async (package_json) => {
|
|
35
31
|
if (package_json.exports) {
|
|
36
32
|
package_json.exports = Object.fromEntries(Object.entries(package_json.exports).filter(([k]) => !DEFAULT_EXPORTS_EXCLUDER.test(k)));
|
package/dist/gen.d.ts
CHANGED
|
@@ -85,10 +85,8 @@ export declare const analyze_gen_result: (file: Gen_File) => Promise<Analyzed_Ge
|
|
|
85
85
|
export declare const write_gen_results: (gen_results: Gen_Results, analyzed_gen_results: Analyzed_Gen_Result[], log: Logger) => Promise<void>;
|
|
86
86
|
export interface Found_Genfiles {
|
|
87
87
|
resolved_input_files: Resolved_Input_File[];
|
|
88
|
-
resolved_input_files_by_input_path: Map<Input_Path, Resolved_Input_File[]>;
|
|
89
88
|
resolved_input_files_by_root_dir: Map<Path_Id, Resolved_Input_File[]>;
|
|
90
89
|
resolved_input_paths: Resolved_Input_Path[];
|
|
91
|
-
resolved_input_paths_by_input_path: Map<Input_Path, Resolved_Input_Path[]>;
|
|
92
90
|
}
|
|
93
91
|
export type Find_Genfiles_Result = Result<{
|
|
94
92
|
value: Found_Genfiles;
|
|
@@ -97,16 +95,13 @@ export type Find_Genfiles_Failure = {
|
|
|
97
95
|
type: 'unmapped_input_paths';
|
|
98
96
|
unmapped_input_paths: Input_Path[];
|
|
99
97
|
resolved_input_paths: Resolved_Input_Path[];
|
|
100
|
-
resolved_input_paths_by_input_path: Map<Input_Path, Resolved_Input_Path[]>;
|
|
101
98
|
reasons: string[];
|
|
102
99
|
} | {
|
|
103
100
|
type: 'input_directories_with_no_files';
|
|
104
|
-
input_directories_with_no_files:
|
|
101
|
+
input_directories_with_no_files: Input_Path[];
|
|
105
102
|
resolved_input_files: Resolved_Input_File[];
|
|
106
|
-
resolved_input_files_by_input_path: Map<Input_Path, Resolved_Input_File[]>;
|
|
107
103
|
resolved_input_files_by_root_dir: Map<Path_Id, Resolved_Input_File[]>;
|
|
108
104
|
resolved_input_paths: Resolved_Input_Path[];
|
|
109
|
-
resolved_input_paths_by_input_path: Map<Input_Path, Resolved_Input_Path[]>;
|
|
110
105
|
reasons: string[];
|
|
111
106
|
};
|
|
112
107
|
/**
|
package/dist/gen.js
CHANGED
|
@@ -136,7 +136,7 @@ export const find_genfiles = async (input_paths, root_dirs, config, timings) =>
|
|
|
136
136
|
const extensions = [GEN_FILE_PATTERN];
|
|
137
137
|
// Check which extension variation works - if it's a directory, prefer others first!
|
|
138
138
|
const timing_to_resolve_input_paths = timings?.start('resolve input paths');
|
|
139
|
-
const { resolved_input_paths,
|
|
139
|
+
const { resolved_input_paths, unmapped_input_paths } = resolve_input_paths(input_paths, root_dirs, extensions);
|
|
140
140
|
timing_to_resolve_input_paths?.();
|
|
141
141
|
// Error if any input path could not be mapped.
|
|
142
142
|
if (unmapped_input_paths.length) {
|
|
@@ -145,13 +145,12 @@ export const find_genfiles = async (input_paths, root_dirs, config, timings) =>
|
|
|
145
145
|
type: 'unmapped_input_paths',
|
|
146
146
|
unmapped_input_paths,
|
|
147
147
|
resolved_input_paths,
|
|
148
|
-
resolved_input_paths_by_input_path,
|
|
149
148
|
reasons: unmapped_input_paths.map((input_path) => red(`Input path ${print_path(input_path)} cannot be mapped to a file or directory.`)),
|
|
150
149
|
};
|
|
151
150
|
}
|
|
152
151
|
// Find all of the files for any directories.
|
|
153
152
|
const timing_to_search_fs = timings?.start('find files');
|
|
154
|
-
const { resolved_input_files,
|
|
153
|
+
const { resolved_input_files, resolved_input_files_by_root_dir, input_directories_with_no_files } = resolve_input_files(resolved_input_paths, (id) => search_fs(id, {
|
|
155
154
|
filter: config.search_filters,
|
|
156
155
|
file_filter: (p) => extensions.some((e) => p.includes(e)),
|
|
157
156
|
}));
|
|
@@ -163,21 +162,17 @@ export const find_genfiles = async (input_paths, root_dirs, config, timings) =>
|
|
|
163
162
|
type: 'input_directories_with_no_files',
|
|
164
163
|
input_directories_with_no_files,
|
|
165
164
|
resolved_input_files,
|
|
166
|
-
resolved_input_files_by_input_path,
|
|
167
165
|
resolved_input_files_by_root_dir,
|
|
168
166
|
resolved_input_paths,
|
|
169
|
-
|
|
170
|
-
reasons: input_directories_with_no_files.map(({ input_path }) => red(`Input directory contains no matching files: ${print_path(input_path)}`)),
|
|
167
|
+
reasons: input_directories_with_no_files.map((input_path) => red(`Input directory contains no matching files: ${print_path(input_path)}`)),
|
|
171
168
|
};
|
|
172
169
|
}
|
|
173
170
|
return {
|
|
174
171
|
ok: true,
|
|
175
172
|
value: {
|
|
176
173
|
resolved_input_files,
|
|
177
|
-
resolved_input_files_by_input_path,
|
|
178
174
|
resolved_input_files_by_root_dir,
|
|
179
175
|
resolved_input_paths,
|
|
180
|
-
resolved_input_paths_by_input_path,
|
|
181
176
|
},
|
|
182
177
|
};
|
|
183
178
|
};
|
package/dist/gen.test.js
CHANGED
|
@@ -247,6 +247,5 @@ test('find_genfiles_result finds gen modules in a directory', async () => {
|
|
|
247
247
|
const find_genfiles_result = await find_genfiles(['docs'], [paths.lib], create_empty_config());
|
|
248
248
|
assert.ok(find_genfiles_result.ok);
|
|
249
249
|
assert.ok(find_genfiles_result.value.resolved_input_paths.length);
|
|
250
|
-
assert.ok(find_genfiles_result.value.resolved_input_paths_by_input_path.size);
|
|
251
250
|
});
|
|
252
251
|
test.run();
|
package/dist/gro_plugin_gen.js
CHANGED
|
@@ -45,7 +45,7 @@ export const plugin = () => {
|
|
|
45
45
|
// making us miss `build` events for gen dependencies,
|
|
46
46
|
// so we run `gen` here even if it's usually wasteful.
|
|
47
47
|
const found = await find_genfiles([paths.source], root_dirs, config);
|
|
48
|
-
if (found.ok && found.
|
|
48
|
+
if (found.ok && found.value.resolved_input_files.size > 0) {
|
|
49
49
|
await gen();
|
|
50
50
|
}
|
|
51
51
|
// Do we need to just generate everything once and exit?
|
package/dist/input_path.d.ts
CHANGED
|
@@ -42,7 +42,6 @@ export interface Resolved_Input_File {
|
|
|
42
42
|
}
|
|
43
43
|
export interface Resolved_Input_Paths {
|
|
44
44
|
resolved_input_paths: Resolved_Input_Path[];
|
|
45
|
-
resolved_input_paths_by_input_path: Map<Input_Path, Resolved_Input_Path[]>;
|
|
46
45
|
possible_paths_by_input_path: Map<Input_Path, Possible_Path[]>;
|
|
47
46
|
unmapped_input_paths: Input_Path[];
|
|
48
47
|
}
|
|
@@ -54,12 +53,11 @@ export interface Resolved_Input_Paths {
|
|
|
54
53
|
export declare const resolve_input_paths: (input_paths: Input_Path[], root_dirs: Path_Id[], extensions: string[]) => Resolved_Input_Paths;
|
|
55
54
|
export interface Resolved_Input_Files {
|
|
56
55
|
resolved_input_files: Resolved_Input_File[];
|
|
57
|
-
resolved_input_files_by_input_path: Map<Input_Path, Resolved_Input_File[]>;
|
|
58
56
|
resolved_input_files_by_root_dir: Map<Path_Id, Resolved_Input_File[]>;
|
|
59
|
-
input_directories_with_no_files:
|
|
57
|
+
input_directories_with_no_files: Input_Path[];
|
|
60
58
|
}
|
|
61
59
|
/**
|
|
62
60
|
* Finds all of the matching files for the given input paths.
|
|
63
61
|
* De-dupes source ids.
|
|
64
62
|
*/
|
|
65
|
-
export declare const resolve_input_files: (resolved_input_paths: Resolved_Input_Path[],
|
|
63
|
+
export declare const resolve_input_files: (resolved_input_paths: Resolved_Input_Path[], search?: (dir: string) => Resolved_Path[]) => Resolved_Input_Files;
|
package/dist/input_path.js
CHANGED
|
@@ -122,15 +122,6 @@ export const resolve_input_paths = (input_paths, root_dirs, extensions) => {
|
|
|
122
122
|
}
|
|
123
123
|
return {
|
|
124
124
|
resolved_input_paths,
|
|
125
|
-
resolved_input_paths_by_input_path: resolved_input_paths.reduce((map, resolved_input_path) => {
|
|
126
|
-
if (map.has(resolved_input_path.input_path)) {
|
|
127
|
-
map.get(resolved_input_path.input_path).push(resolved_input_path);
|
|
128
|
-
}
|
|
129
|
-
else {
|
|
130
|
-
map.set(resolved_input_path.input_path, [resolved_input_path]);
|
|
131
|
-
}
|
|
132
|
-
return map;
|
|
133
|
-
}, new Map()),
|
|
134
125
|
possible_paths_by_input_path,
|
|
135
126
|
unmapped_input_paths,
|
|
136
127
|
};
|
|
@@ -139,61 +130,50 @@ export const resolve_input_paths = (input_paths, root_dirs, extensions) => {
|
|
|
139
130
|
* Finds all of the matching files for the given input paths.
|
|
140
131
|
* De-dupes source ids.
|
|
141
132
|
*/
|
|
142
|
-
export const resolve_input_files = (resolved_input_paths,
|
|
133
|
+
export const resolve_input_files = (resolved_input_paths, search = search_fs) => {
|
|
143
134
|
const resolved_input_files = [];
|
|
144
|
-
|
|
145
|
-
const input_directories_with_no_files = [];
|
|
135
|
+
// Add all input paths initially, and remove each when resolved to a file.
|
|
146
136
|
const existing_path_ids = new Set();
|
|
147
137
|
// TODO parallelize but would need to de-dupe and retain order
|
|
148
138
|
for (const resolved_input_path of resolved_input_paths) {
|
|
149
139
|
const { input_path, id, is_directory } = resolved_input_path;
|
|
150
140
|
if (is_directory) {
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
}
|
|
164
|
-
}
|
|
165
|
-
if (path_ids.length) {
|
|
166
|
-
const resolved_input_files_for_input_path = [];
|
|
167
|
-
for (const path_id of path_ids) {
|
|
168
|
-
const resolved_input_file = {
|
|
169
|
-
id: path_id,
|
|
170
|
-
input_path,
|
|
171
|
-
resolved_input_path,
|
|
172
|
-
};
|
|
173
|
-
resolved_input_files.push(resolved_input_file);
|
|
174
|
-
resolved_input_files_for_input_path.push(resolved_input_file);
|
|
175
|
-
}
|
|
176
|
-
resolved_input_files_by_input_path.set(input_path, resolved_input_files_for_input_path);
|
|
177
|
-
}
|
|
178
|
-
if (!has_files) {
|
|
179
|
-
input_directories_with_no_files.push(resolved_input_path);
|
|
141
|
+
// Handle input paths that resolve to directories.
|
|
142
|
+
const files = search(id);
|
|
143
|
+
if (!files.length)
|
|
144
|
+
continue;
|
|
145
|
+
const path_ids = [];
|
|
146
|
+
for (const { path, is_directory } of files) {
|
|
147
|
+
if (is_directory)
|
|
148
|
+
continue;
|
|
149
|
+
const path_id = join(id, path);
|
|
150
|
+
if (!existing_path_ids.has(path_id)) {
|
|
151
|
+
existing_path_ids.add(path_id);
|
|
152
|
+
path_ids.push(path_id);
|
|
180
153
|
}
|
|
181
|
-
// do callers ever need `input_directories_with_duplicate_files`?
|
|
182
154
|
}
|
|
183
|
-
|
|
184
|
-
|
|
155
|
+
if (!path_ids.length)
|
|
156
|
+
continue;
|
|
157
|
+
const resolved_input_files_for_input_path = [];
|
|
158
|
+
for (const path_id of path_ids) {
|
|
159
|
+
const resolved_input_file = {
|
|
160
|
+
id: path_id,
|
|
161
|
+
input_path,
|
|
162
|
+
resolved_input_path,
|
|
163
|
+
};
|
|
164
|
+
resolved_input_files.push(resolved_input_file);
|
|
165
|
+
resolved_input_files_for_input_path.push(resolved_input_file);
|
|
185
166
|
}
|
|
186
167
|
}
|
|
187
168
|
else if (!existing_path_ids.has(id)) {
|
|
169
|
+
// Handle input paths that resolve to files.
|
|
188
170
|
existing_path_ids.add(id);
|
|
189
171
|
const resolved_input_file = { id, input_path, resolved_input_path };
|
|
190
172
|
resolved_input_files.push(resolved_input_file);
|
|
191
|
-
resolved_input_files_by_input_path.set(input_path, [resolved_input_file]);
|
|
192
173
|
}
|
|
193
174
|
}
|
|
194
175
|
return {
|
|
195
176
|
resolved_input_files,
|
|
196
|
-
resolved_input_files_by_input_path,
|
|
197
177
|
resolved_input_files_by_root_dir: resolved_input_files.reduce((map, resolved_input_file) => {
|
|
198
178
|
const { root_dir } = resolved_input_file.resolved_input_path;
|
|
199
179
|
if (map.has(root_dir)) {
|
|
@@ -204,6 +184,6 @@ export const resolve_input_files = (resolved_input_paths, custom_search_fs = sea
|
|
|
204
184
|
}
|
|
205
185
|
return map;
|
|
206
186
|
}, new Map()),
|
|
207
|
-
input_directories_with_no_files,
|
|
187
|
+
input_directories_with_no_files: Array.from(new Set(resolved_input_paths.map((p) => p.input_path)).difference(new Set(resolved_input_files.map((f) => [f.input_path, f.id]).flat()))),
|
|
208
188
|
};
|
|
209
189
|
};
|
package/dist/input_path.test.js
CHANGED
|
@@ -131,45 +131,27 @@ test('resolve_input_files', async () => {
|
|
|
131
131
|
root_dir: process.cwd(),
|
|
132
132
|
};
|
|
133
133
|
const d = {
|
|
134
|
-
id: 'fake/',
|
|
135
|
-
is_directory: true,
|
|
136
|
-
input_path: 'fake/',
|
|
137
|
-
root_dir: process.cwd(),
|
|
138
|
-
};
|
|
139
|
-
const e = {
|
|
140
134
|
id: 'fake',
|
|
141
135
|
is_directory: true,
|
|
142
136
|
input_path: 'fake',
|
|
143
137
|
root_dir: process.cwd(),
|
|
144
138
|
};
|
|
145
|
-
const
|
|
139
|
+
const e = {
|
|
146
140
|
id: 'fake/nomatches',
|
|
147
141
|
is_directory: true,
|
|
148
142
|
input_path: 'fake/nomatches',
|
|
149
143
|
root_dir: process.cwd(),
|
|
150
144
|
};
|
|
151
|
-
const result = resolve_input_files([a, b, c, d, e
|
|
145
|
+
const result = resolve_input_files([a, b, c, d, e], (id) => test_files[id]);
|
|
152
146
|
const resolved_input_files = [
|
|
153
147
|
{ id: a.id, input_path: a.input_path, resolved_input_path: a },
|
|
154
148
|
{ id: b.id, input_path: b.input_path, resolved_input_path: b },
|
|
155
149
|
{ id: 'fake/test3/a.ts', input_path: c.input_path, resolved_input_path: c },
|
|
156
150
|
{ id: 'fake/test3/b.ts', input_path: c.input_path, resolved_input_path: c },
|
|
157
|
-
{ id: 'fake/test3/c.ts', input_path:
|
|
151
|
+
{ id: 'fake/test3/c.ts', input_path: d.input_path, resolved_input_path: d },
|
|
158
152
|
];
|
|
159
153
|
assert.equal(result, {
|
|
160
154
|
resolved_input_files,
|
|
161
|
-
resolved_input_files_by_input_path: new Map([
|
|
162
|
-
['fake/test1.ext.ts', [{ id: a.id, input_path: a.input_path, resolved_input_path: a }]],
|
|
163
|
-
['fake/test2', [{ id: b.id, input_path: b.input_path, resolved_input_path: b }]],
|
|
164
|
-
[
|
|
165
|
-
'fake/test3',
|
|
166
|
-
[
|
|
167
|
-
{ id: 'fake/test3/a.ts', input_path: c.input_path, resolved_input_path: c },
|
|
168
|
-
{ id: 'fake/test3/b.ts', input_path: c.input_path, resolved_input_path: c },
|
|
169
|
-
],
|
|
170
|
-
],
|
|
171
|
-
['fake', [{ id: 'fake/test3/c.ts', input_path: e.input_path, resolved_input_path: e }]],
|
|
172
|
-
]),
|
|
173
155
|
resolved_input_files_by_root_dir: new Map([
|
|
174
156
|
[
|
|
175
157
|
process.cwd(),
|
|
@@ -178,11 +160,11 @@ test('resolve_input_files', async () => {
|
|
|
178
160
|
{ id: 'fake/test2.ext.ts', input_path: 'fake/test2', resolved_input_path: b },
|
|
179
161
|
{ id: 'fake/test3/a.ts', input_path: 'fake/test3', resolved_input_path: c },
|
|
180
162
|
{ id: 'fake/test3/b.ts', input_path: 'fake/test3', resolved_input_path: c },
|
|
181
|
-
{ id: 'fake/test3/c.ts', input_path: 'fake', resolved_input_path:
|
|
163
|
+
{ id: 'fake/test3/c.ts', input_path: 'fake', resolved_input_path: d },
|
|
182
164
|
],
|
|
183
165
|
],
|
|
184
166
|
]),
|
|
185
|
-
input_directories_with_no_files: [
|
|
167
|
+
input_directories_with_no_files: [e.input_path],
|
|
186
168
|
});
|
|
187
169
|
});
|
|
188
170
|
test.run();
|
package/dist/package.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
// generated by src/lib/package.gen.ts
|
|
2
2
|
export const package_json = {
|
|
3
3
|
name: '@ryanatkn/gro',
|
|
4
|
-
version: '0.
|
|
4
|
+
version: '0.122.0',
|
|
5
5
|
description: 'task runner and toolkit extending SvelteKit',
|
|
6
6
|
motto: 'generate, run, optimize',
|
|
7
7
|
icon: '🌰',
|
|
@@ -33,10 +33,10 @@ export const package_json = {
|
|
|
33
33
|
],
|
|
34
34
|
files: ['dist'],
|
|
35
35
|
dependencies: {
|
|
36
|
-
'@ryanatkn/belt': '^0.
|
|
36
|
+
'@ryanatkn/belt': '^0.21.0',
|
|
37
37
|
chokidar: '^3.6.0',
|
|
38
38
|
dotenv: '^16.4.5',
|
|
39
|
-
'es-module-lexer': '^1.5.
|
|
39
|
+
'es-module-lexer': '^1.5.4',
|
|
40
40
|
kleur: '^4.1.5',
|
|
41
41
|
mri: '^1.2.0',
|
|
42
42
|
prettier: '^3.3.2',
|
|
@@ -50,8 +50,8 @@ export const package_json = {
|
|
|
50
50
|
'@changesets/changelog-git': '^0.2.0',
|
|
51
51
|
'@changesets/types': '^6.0.0',
|
|
52
52
|
'@ryanatkn/eslint-config': '^0.1.3',
|
|
53
|
-
'@ryanatkn/fuz': '^0.
|
|
54
|
-
'@ryanatkn/moss': '^0.
|
|
53
|
+
'@ryanatkn/fuz': '^0.104.1',
|
|
54
|
+
'@ryanatkn/moss': '^0.5.0',
|
|
55
55
|
'@sveltejs/adapter-static': '^3.0.2',
|
|
56
56
|
'@sveltejs/kit': '^2.5.17',
|
|
57
57
|
'@sveltejs/package': '^2.3.2',
|
|
@@ -62,9 +62,9 @@ export const package_json = {
|
|
|
62
62
|
'@typescript-eslint/parser': '^7.13.1',
|
|
63
63
|
esbuild: '^0.20.2',
|
|
64
64
|
eslint: '^8.57.0',
|
|
65
|
-
'eslint-plugin-svelte': '^2.
|
|
65
|
+
'eslint-plugin-svelte': '^2.41.0',
|
|
66
66
|
svelte: '^5.0.0-next.164',
|
|
67
|
-
'svelte-check': '^3.8.
|
|
67
|
+
'svelte-check': '^3.8.2',
|
|
68
68
|
typescript: '^5.5.2',
|
|
69
69
|
uvu: '^0.5.6',
|
|
70
70
|
},
|
|
@@ -256,7 +256,7 @@ export const package_json = {
|
|
|
256
256
|
};
|
|
257
257
|
export const src_json = {
|
|
258
258
|
name: '@ryanatkn/gro',
|
|
259
|
-
version: '0.
|
|
259
|
+
version: '0.122.0',
|
|
260
260
|
modules: {
|
|
261
261
|
'.': {
|
|
262
262
|
path: 'index.ts',
|
package/dist/path_constants.js
CHANGED
|
@@ -8,7 +8,7 @@ If any of these become customizable from SvelteKit or Gro's configs, move them t
|
|
|
8
8
|
export const SOURCE_DIRNAME = 'src';
|
|
9
9
|
export const GRO_DIRNAME = '.gro';
|
|
10
10
|
export const GRO_DIST_PREFIX = 'dist_'; //
|
|
11
|
-
export const SERVER_DIST_PATH = 'dist_server'; // TODO should all of these be `_PATH` or should this be `DIRNAME`?
|
|
11
|
+
export const SERVER_DIST_PATH = 'dist_server'; // TODO should all of these be `_PATH` or should this be `DIRNAME`? also, add `_PLUGIN` to this name?
|
|
12
12
|
export const GRO_DEV_DIRNAME = GRO_DIRNAME + '/dev';
|
|
13
13
|
export const SOURCE_DIR = SOURCE_DIRNAME + '/';
|
|
14
14
|
export const GRO_DIR = GRO_DIRNAME + '/';
|
package/dist/task.d.ts
CHANGED
|
@@ -41,10 +41,8 @@ export interface Found_Task {
|
|
|
41
41
|
}
|
|
42
42
|
export interface Found_Tasks {
|
|
43
43
|
resolved_input_files: Resolved_Input_File[];
|
|
44
|
-
resolved_input_files_by_input_path: Map<Input_Path, Resolved_Input_File[]>;
|
|
45
44
|
resolved_input_files_by_root_dir: Map<Path_Id, Resolved_Input_File[]>;
|
|
46
45
|
resolved_input_paths: Resolved_Input_Path[];
|
|
47
|
-
resolved_input_paths_by_input_path: Map<Input_Path, Resolved_Input_Path[]>;
|
|
48
46
|
input_paths: Input_Path[];
|
|
49
47
|
task_root_dirs: Path_Id[];
|
|
50
48
|
}
|
|
@@ -55,18 +53,15 @@ export type Find_Modules_Failure = {
|
|
|
55
53
|
type: 'unmapped_input_paths';
|
|
56
54
|
unmapped_input_paths: Input_Path[];
|
|
57
55
|
resolved_input_paths: Resolved_Input_Path[];
|
|
58
|
-
resolved_input_paths_by_input_path: Map<Input_Path, Resolved_Input_Path[]>;
|
|
59
56
|
input_paths: Input_Path[];
|
|
60
57
|
task_root_dirs: Path_Id[];
|
|
61
58
|
reasons: string[];
|
|
62
59
|
} | {
|
|
63
60
|
type: 'input_directories_with_no_files';
|
|
64
|
-
input_directories_with_no_files:
|
|
61
|
+
input_directories_with_no_files: Input_Path[];
|
|
65
62
|
resolved_input_files: Resolved_Input_File[];
|
|
66
|
-
resolved_input_files_by_input_path: Map<Input_Path, Resolved_Input_File[]>;
|
|
67
63
|
resolved_input_files_by_root_dir: Map<Path_Id, Resolved_Input_File[]>;
|
|
68
64
|
resolved_input_paths: Resolved_Input_Path[];
|
|
69
|
-
resolved_input_paths_by_input_path: Map<Input_Path, Resolved_Input_Path[]>;
|
|
70
65
|
input_paths: Input_Path[];
|
|
71
66
|
task_root_dirs: Path_Id[];
|
|
72
67
|
reasons: string[];
|
package/dist/task.js
CHANGED
|
@@ -30,7 +30,7 @@ export class Task_Error extends Error {
|
|
|
30
30
|
export const find_tasks = (input_paths, task_root_dirs, config, timings) => {
|
|
31
31
|
// Check which extension variation works - if it's a directory, prefer others first!
|
|
32
32
|
const timing_to_resolve_input_paths = timings?.start('resolve input paths');
|
|
33
|
-
const { resolved_input_paths,
|
|
33
|
+
const { resolved_input_paths, unmapped_input_paths } = resolve_input_paths(input_paths, task_root_dirs, TASK_FILE_SUFFIXES);
|
|
34
34
|
timing_to_resolve_input_paths?.();
|
|
35
35
|
// Error if any input path could not be mapped.
|
|
36
36
|
if (unmapped_input_paths.length) {
|
|
@@ -39,7 +39,6 @@ export const find_tasks = (input_paths, task_root_dirs, config, timings) => {
|
|
|
39
39
|
type: 'unmapped_input_paths',
|
|
40
40
|
unmapped_input_paths,
|
|
41
41
|
resolved_input_paths,
|
|
42
|
-
resolved_input_paths_by_input_path,
|
|
43
42
|
input_paths,
|
|
44
43
|
task_root_dirs,
|
|
45
44
|
reasons: unmapped_input_paths.map((input_path) => red(`Input path ${print_path(input_path)} cannot be mapped to a file or directory.`)),
|
|
@@ -47,7 +46,7 @@ export const find_tasks = (input_paths, task_root_dirs, config, timings) => {
|
|
|
47
46
|
}
|
|
48
47
|
// Find all of the files for any directories.
|
|
49
48
|
const timing_to_resolve_input_files = timings?.start('resolve input files');
|
|
50
|
-
const { resolved_input_files,
|
|
49
|
+
const { resolved_input_files, resolved_input_files_by_root_dir, input_directories_with_no_files } = resolve_input_files(resolved_input_paths, (id) => search_fs(id, {
|
|
51
50
|
filter: config.search_filters,
|
|
52
51
|
file_filter: (p) => TASK_FILE_SUFFIXES.some((s) => p.endsWith(s)),
|
|
53
52
|
}));
|
|
@@ -59,23 +58,19 @@ export const find_tasks = (input_paths, task_root_dirs, config, timings) => {
|
|
|
59
58
|
type: 'input_directories_with_no_files',
|
|
60
59
|
input_directories_with_no_files,
|
|
61
60
|
resolved_input_files,
|
|
62
|
-
resolved_input_files_by_input_path,
|
|
63
61
|
resolved_input_files_by_root_dir,
|
|
64
62
|
resolved_input_paths,
|
|
65
|
-
resolved_input_paths_by_input_path,
|
|
66
63
|
input_paths,
|
|
67
64
|
task_root_dirs,
|
|
68
|
-
reasons: input_directories_with_no_files.map((
|
|
65
|
+
reasons: input_directories_with_no_files.map((input_path) => red(`Input directory contains no matching files: ${print_path(input_path)}`)),
|
|
69
66
|
};
|
|
70
67
|
}
|
|
71
68
|
return {
|
|
72
69
|
ok: true,
|
|
73
70
|
value: {
|
|
74
71
|
resolved_input_files,
|
|
75
|
-
resolved_input_files_by_input_path,
|
|
76
72
|
resolved_input_files_by_root_dir,
|
|
77
73
|
resolved_input_paths,
|
|
78
|
-
resolved_input_paths_by_input_path,
|
|
79
74
|
input_paths,
|
|
80
75
|
task_root_dirs,
|
|
81
76
|
},
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@ryanatkn/gro",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.122.0",
|
|
4
4
|
"description": "task runner and toolkit extending SvelteKit",
|
|
5
5
|
"motto": "generate, run, optimize",
|
|
6
6
|
"icon": "🌰",
|
|
@@ -45,10 +45,10 @@
|
|
|
45
45
|
"dist"
|
|
46
46
|
],
|
|
47
47
|
"dependencies": {
|
|
48
|
-
"@ryanatkn/belt": "^0.
|
|
48
|
+
"@ryanatkn/belt": "^0.21.0",
|
|
49
49
|
"chokidar": "^3.6.0",
|
|
50
50
|
"dotenv": "^16.4.5",
|
|
51
|
-
"es-module-lexer": "^1.5.
|
|
51
|
+
"es-module-lexer": "^1.5.4",
|
|
52
52
|
"kleur": "^4.1.5",
|
|
53
53
|
"mri": "^1.2.0",
|
|
54
54
|
"prettier": "^3.3.2",
|
|
@@ -65,8 +65,8 @@
|
|
|
65
65
|
"@changesets/changelog-git": "^0.2.0",
|
|
66
66
|
"@changesets/types": "^6.0.0",
|
|
67
67
|
"@ryanatkn/eslint-config": "^0.1.3",
|
|
68
|
-
"@ryanatkn/fuz": "^0.
|
|
69
|
-
"@ryanatkn/moss": "^0.
|
|
68
|
+
"@ryanatkn/fuz": "^0.104.1",
|
|
69
|
+
"@ryanatkn/moss": "^0.5.0",
|
|
70
70
|
"@sveltejs/adapter-static": "^3.0.2",
|
|
71
71
|
"@sveltejs/kit": "^2.5.17",
|
|
72
72
|
"@sveltejs/package": "^2.3.2",
|
|
@@ -77,9 +77,9 @@
|
|
|
77
77
|
"@typescript-eslint/parser": "^7.13.1",
|
|
78
78
|
"esbuild": "^0.20.2",
|
|
79
79
|
"eslint": "^8.57.0",
|
|
80
|
-
"eslint-plugin-svelte": "^2.
|
|
80
|
+
"eslint-plugin-svelte": "^2.41.0",
|
|
81
81
|
"svelte": "^5.0.0-next.164",
|
|
82
|
-
"svelte-check": "^3.8.
|
|
82
|
+
"svelte-check": "^3.8.2",
|
|
83
83
|
"typescript": "^5.5.2",
|
|
84
84
|
"uvu": "^0.5.6"
|
|
85
85
|
},
|