@fuzdev/fuz_gitops 0.57.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +119 -0
- package/dist/ModulesDetail.svelte +180 -0
- package/dist/ModulesDetail.svelte.d.ts +10 -0
- package/dist/ModulesDetail.svelte.d.ts.map +1 -0
- package/dist/ModulesNav.svelte +43 -0
- package/dist/ModulesNav.svelte.d.ts +11 -0
- package/dist/ModulesNav.svelte.d.ts.map +1 -0
- package/dist/ModulesPage.svelte +50 -0
- package/dist/ModulesPage.svelte.d.ts +9 -0
- package/dist/ModulesPage.svelte.d.ts.map +1 -0
- package/dist/PageFooter.svelte +15 -0
- package/dist/PageFooter.svelte.d.ts +19 -0
- package/dist/PageFooter.svelte.d.ts.map +1 -0
- package/dist/PageHeader.svelte +35 -0
- package/dist/PageHeader.svelte.d.ts +19 -0
- package/dist/PageHeader.svelte.d.ts.map +1 -0
- package/dist/PullRequestsDetail.svelte +53 -0
- package/dist/PullRequestsDetail.svelte.d.ts +10 -0
- package/dist/PullRequestsDetail.svelte.d.ts.map +1 -0
- package/dist/PullRequestsPage.svelte +47 -0
- package/dist/PullRequestsPage.svelte.d.ts +11 -0
- package/dist/PullRequestsPage.svelte.d.ts.map +1 -0
- package/dist/ReposTable.svelte +189 -0
- package/dist/ReposTable.svelte.d.ts +9 -0
- package/dist/ReposTable.svelte.d.ts.map +1 -0
- package/dist/ReposTree.svelte +88 -0
- package/dist/ReposTree.svelte.d.ts +11 -0
- package/dist/ReposTree.svelte.d.ts.map +1 -0
- package/dist/ReposTreeNav.svelte +55 -0
- package/dist/ReposTreeNav.svelte.d.ts +11 -0
- package/dist/ReposTreeNav.svelte.d.ts.map +1 -0
- package/dist/TablePage.svelte +46 -0
- package/dist/TablePage.svelte.d.ts +9 -0
- package/dist/TablePage.svelte.d.ts.map +1 -0
- package/dist/TreeItemPage.svelte +75 -0
- package/dist/TreeItemPage.svelte.d.ts +10 -0
- package/dist/TreeItemPage.svelte.d.ts.map +1 -0
- package/dist/TreePage.svelte +64 -0
- package/dist/TreePage.svelte.d.ts +9 -0
- package/dist/TreePage.svelte.d.ts.map +1 -0
- package/dist/changeset_generator.d.ts +38 -0
- package/dist/changeset_generator.d.ts.map +1 -0
- package/dist/changeset_generator.js +110 -0
- package/dist/changeset_reader.d.ts +75 -0
- package/dist/changeset_reader.d.ts.map +1 -0
- package/dist/changeset_reader.js +167 -0
- package/dist/constants.d.ts +9 -0
- package/dist/constants.d.ts.map +1 -0
- package/dist/constants.js +8 -0
- package/dist/dependency_graph.d.ts +120 -0
- package/dist/dependency_graph.d.ts.map +1 -0
- package/dist/dependency_graph.js +341 -0
- package/dist/dependency_updater.d.ts +46 -0
- package/dist/dependency_updater.d.ts.map +1 -0
- package/dist/dependency_updater.js +213 -0
- package/dist/fetch_repo_data.d.ts +19 -0
- package/dist/fetch_repo_data.d.ts.map +1 -0
- package/dist/fetch_repo_data.js +49 -0
- package/dist/fs_fetch_value_cache.d.ts +24 -0
- package/dist/fs_fetch_value_cache.d.ts.map +1 -0
- package/dist/fs_fetch_value_cache.js +61 -0
- package/dist/git_operations.d.ts +54 -0
- package/dist/git_operations.d.ts.map +1 -0
- package/dist/git_operations.js +144 -0
- package/dist/github.d.ts +91 -0
- package/dist/github.d.ts.map +1 -0
- package/dist/github.js +94 -0
- package/dist/github_helpers.d.ts +10 -0
- package/dist/github_helpers.d.ts.map +1 -0
- package/dist/github_helpers.js +13 -0
- package/dist/gitops_analyze.task.d.ts +17 -0
- package/dist/gitops_analyze.task.d.ts.map +1 -0
- package/dist/gitops_analyze.task.js +188 -0
- package/dist/gitops_config.d.ts +56 -0
- package/dist/gitops_config.d.ts.map +1 -0
- package/dist/gitops_config.js +63 -0
- package/dist/gitops_plan.task.d.ts +28 -0
- package/dist/gitops_plan.task.d.ts.map +1 -0
- package/dist/gitops_plan.task.js +217 -0
- package/dist/gitops_publish.task.d.ts +29 -0
- package/dist/gitops_publish.task.d.ts.map +1 -0
- package/dist/gitops_publish.task.js +178 -0
- package/dist/gitops_sync.task.d.ts +18 -0
- package/dist/gitops_sync.task.d.ts.map +1 -0
- package/dist/gitops_sync.task.js +95 -0
- package/dist/gitops_task_helpers.d.ts +63 -0
- package/dist/gitops_task_helpers.d.ts.map +1 -0
- package/dist/gitops_task_helpers.js +84 -0
- package/dist/gitops_validate.task.d.ts +12 -0
- package/dist/gitops_validate.task.d.ts.map +1 -0
- package/dist/gitops_validate.task.js +210 -0
- package/dist/graph_validation.d.ts +39 -0
- package/dist/graph_validation.d.ts.map +1 -0
- package/dist/graph_validation.js +79 -0
- package/dist/local_repo.d.ts +84 -0
- package/dist/local_repo.d.ts.map +1 -0
- package/dist/local_repo.js +213 -0
- package/dist/log_helpers.d.ts +43 -0
- package/dist/log_helpers.d.ts.map +1 -0
- package/dist/log_helpers.js +98 -0
- package/dist/multi_repo_publisher.d.ts +34 -0
- package/dist/multi_repo_publisher.d.ts.map +1 -0
- package/dist/multi_repo_publisher.js +364 -0
- package/dist/npm_install_helpers.d.ts +23 -0
- package/dist/npm_install_helpers.d.ts.map +1 -0
- package/dist/npm_install_helpers.js +60 -0
- package/dist/npm_registry.d.ts +46 -0
- package/dist/npm_registry.d.ts.map +1 -0
- package/dist/npm_registry.js +96 -0
- package/dist/operations.d.ts +409 -0
- package/dist/operations.d.ts.map +1 -0
- package/dist/operations.js +34 -0
- package/dist/operations_defaults.d.ts +19 -0
- package/dist/operations_defaults.d.ts.map +1 -0
- package/dist/operations_defaults.js +279 -0
- package/dist/output_helpers.d.ts +27 -0
- package/dist/output_helpers.d.ts.map +1 -0
- package/dist/output_helpers.js +39 -0
- package/dist/paths.d.ts +11 -0
- package/dist/paths.d.ts.map +1 -0
- package/dist/paths.js +10 -0
- package/dist/preflight_checks.d.ts +47 -0
- package/dist/preflight_checks.d.ts.map +1 -0
- package/dist/preflight_checks.js +181 -0
- package/dist/publishing_plan.d.ts +100 -0
- package/dist/publishing_plan.d.ts.map +1 -0
- package/dist/publishing_plan.js +353 -0
- package/dist/publishing_plan_helpers.d.ts +30 -0
- package/dist/publishing_plan_helpers.d.ts.map +1 -0
- package/dist/publishing_plan_helpers.js +112 -0
- package/dist/publishing_plan_logging.d.ts +18 -0
- package/dist/publishing_plan_logging.d.ts.map +1 -0
- package/dist/publishing_plan_logging.js +342 -0
- package/dist/repo.svelte.d.ts +52 -0
- package/dist/repo.svelte.d.ts.map +1 -0
- package/dist/repo.svelte.js +70 -0
- package/dist/repo_ops.d.ts +57 -0
- package/dist/repo_ops.d.ts.map +1 -0
- package/dist/repo_ops.js +167 -0
- package/dist/resolved_gitops_config.d.ts +9 -0
- package/dist/resolved_gitops_config.d.ts.map +1 -0
- package/dist/resolved_gitops_config.js +12 -0
- package/dist/semver.d.ts +24 -0
- package/dist/semver.d.ts.map +1 -0
- package/dist/semver.js +140 -0
- package/dist/serialization_types.d.ts +57 -0
- package/dist/serialization_types.d.ts.map +1 -0
- package/dist/serialization_types.js +40 -0
- package/dist/version_utils.d.ts +48 -0
- package/dist/version_utils.d.ts.map +1 -0
- package/dist/version_utils.js +125 -0
- package/package.json +107 -0
- package/src/lib/changeset_generator.ts +162 -0
- package/src/lib/changeset_reader.ts +218 -0
- package/src/lib/constants.ts +8 -0
- package/src/lib/dependency_graph.ts +423 -0
- package/src/lib/dependency_updater.ts +297 -0
- package/src/lib/fetch_repo_data.ts +64 -0
- package/src/lib/fs_fetch_value_cache.ts +75 -0
- package/src/lib/git_operations.ts +208 -0
- package/src/lib/github.ts +128 -0
- package/src/lib/github_helpers.ts +31 -0
- package/src/lib/gitops_analyze.task.ts +261 -0
- package/src/lib/gitops_config.ts +123 -0
- package/src/lib/gitops_plan.task.ts +272 -0
- package/src/lib/gitops_publish.task.ts +227 -0
- package/src/lib/gitops_sync.task.ts +109 -0
- package/src/lib/gitops_task_helpers.ts +126 -0
- package/src/lib/gitops_validate.task.ts +248 -0
- package/src/lib/graph_validation.ts +109 -0
- package/src/lib/local_repo.ts +359 -0
- package/src/lib/log_helpers.ts +147 -0
- package/src/lib/multi_repo_publisher.ts +464 -0
- package/src/lib/npm_install_helpers.ts +85 -0
- package/src/lib/npm_registry.ts +143 -0
- package/src/lib/operations.ts +334 -0
- package/src/lib/operations_defaults.ts +335 -0
- package/src/lib/output_helpers.ts +64 -0
- package/src/lib/paths.ts +11 -0
- package/src/lib/preflight_checks.ts +269 -0
- package/src/lib/publishing_plan.ts +531 -0
- package/src/lib/publishing_plan_helpers.ts +145 -0
- package/src/lib/publishing_plan_logging.ts +470 -0
- package/src/lib/repo.svelte.ts +95 -0
- package/src/lib/repo_ops.ts +213 -0
- package/src/lib/resolved_gitops_config.ts +27 -0
- package/src/lib/semver.ts +166 -0
- package/src/lib/serialization_types.ts +90 -0
- package/src/lib/version_utils.ts +150 -0
|
@@ -0,0 +1,341 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Dependency graph data structure and algorithms for multi-repo publishing.
|
|
3
|
+
*
|
|
4
|
+
* Provides `DependencyGraph` class with topological sort and cycle detection.
|
|
5
|
+
* For validation workflow and publishing order computation, see `graph_validation.ts`.
|
|
6
|
+
*/
|
|
7
|
+
import { EMPTY_OBJECT } from '@fuzdev/fuz_util/object.js';
|
|
8
|
+
export const DEPENDENCY_TYPE = {
|
|
9
|
+
PROD: 'prod',
|
|
10
|
+
PEER: 'peer',
|
|
11
|
+
DEV: 'dev',
|
|
12
|
+
};
|
|
13
|
+
export class DependencyGraph {
|
|
14
|
+
nodes;
|
|
15
|
+
edges; // pkg -> dependents
|
|
16
|
+
constructor() {
|
|
17
|
+
this.nodes = new Map();
|
|
18
|
+
this.edges = new Map();
|
|
19
|
+
}
|
|
20
|
+
init_from_repos(repos) {
|
|
21
|
+
// First pass: create nodes
|
|
22
|
+
for (const repo of repos) {
|
|
23
|
+
const { library } = repo;
|
|
24
|
+
const node = {
|
|
25
|
+
name: library.name,
|
|
26
|
+
version: library.package_json.version || '0.0.0',
|
|
27
|
+
repo,
|
|
28
|
+
dependencies: new Map(),
|
|
29
|
+
dependents: new Set(),
|
|
30
|
+
publishable: !!library.package_json.private === false, // eslint-disable-line @typescript-eslint/no-unnecessary-boolean-literal-compare
|
|
31
|
+
};
|
|
32
|
+
// Extract dependencies
|
|
33
|
+
const deps = library.package_json.dependencies || EMPTY_OBJECT;
|
|
34
|
+
const dev_deps = library.package_json.devDependencies || EMPTY_OBJECT;
|
|
35
|
+
const peer_deps = library.package_json.peerDependencies || EMPTY_OBJECT;
|
|
36
|
+
// Add dependencies, prioritizing prod/peer over dev
|
|
37
|
+
// (if a package appears in multiple dep types, use the stronger constraint)
|
|
38
|
+
for (const [name, version] of Object.entries(deps)) {
|
|
39
|
+
node.dependencies.set(name, { type: DEPENDENCY_TYPE.PROD, version });
|
|
40
|
+
}
|
|
41
|
+
for (const [name, version] of Object.entries(peer_deps)) {
|
|
42
|
+
node.dependencies.set(name, { type: DEPENDENCY_TYPE.PEER, version });
|
|
43
|
+
}
|
|
44
|
+
for (const [name, version] of Object.entries(dev_deps)) {
|
|
45
|
+
// Only add dev deps if not already present as prod/peer
|
|
46
|
+
if (!node.dependencies.has(name)) {
|
|
47
|
+
node.dependencies.set(name, { type: DEPENDENCY_TYPE.DEV, version });
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
this.nodes.set(library.name, node);
|
|
51
|
+
this.edges.set(library.name, new Set());
|
|
52
|
+
}
|
|
53
|
+
// Second pass: build edges (dependents)
|
|
54
|
+
for (const node of this.nodes.values()) {
|
|
55
|
+
for (const [dep_name] of node.dependencies) {
|
|
56
|
+
if (this.nodes.has(dep_name)) {
|
|
57
|
+
// Internal dependency
|
|
58
|
+
const dep_node = this.nodes.get(dep_name);
|
|
59
|
+
dep_node.dependents.add(node.name);
|
|
60
|
+
this.edges.get(dep_name).add(node.name);
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
get_node(name) {
|
|
66
|
+
return this.nodes.get(name);
|
|
67
|
+
}
|
|
68
|
+
get_dependents(name) {
|
|
69
|
+
return this.edges.get(name) || new Set();
|
|
70
|
+
}
|
|
71
|
+
get_dependencies(name) {
|
|
72
|
+
const node = this.nodes.get(name);
|
|
73
|
+
return node ? node.dependencies : new Map();
|
|
74
|
+
}
|
|
75
|
+
/**
|
|
76
|
+
* Computes topological sort order for dependency graph.
|
|
77
|
+
*
|
|
78
|
+
* Uses Kahn's algorithm with alphabetical ordering within tiers for
|
|
79
|
+
* deterministic results. Throws if cycles detected.
|
|
80
|
+
*
|
|
81
|
+
* @param exclude_dev if true, excludes dev dependencies to break cycles.
|
|
82
|
+
* Publishing uses exclude_dev=true to handle circular dev deps.
|
|
83
|
+
* @returns array of package names in dependency order (dependencies before dependents)
|
|
84
|
+
* @throws {Error} if circular dependencies detected in included dependency types
|
|
85
|
+
*/
|
|
86
|
+
topological_sort(exclude_dev = false) {
|
|
87
|
+
const visited = new Set();
|
|
88
|
+
const result = [];
|
|
89
|
+
// Count incoming edges for each node
|
|
90
|
+
const in_degree = new Map();
|
|
91
|
+
for (const name of this.nodes.keys()) {
|
|
92
|
+
in_degree.set(name, 0);
|
|
93
|
+
}
|
|
94
|
+
for (const node of this.nodes.values()) {
|
|
95
|
+
for (const [dep_name, spec] of node.dependencies) {
|
|
96
|
+
// Skip dev dependencies if requested
|
|
97
|
+
if (exclude_dev && spec.type === DEPENDENCY_TYPE.DEV)
|
|
98
|
+
continue;
|
|
99
|
+
if (this.nodes.has(dep_name)) {
|
|
100
|
+
in_degree.set(node.name, in_degree.get(node.name) + 1);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
// Start with nodes that have no dependencies
|
|
105
|
+
const queue = [];
|
|
106
|
+
for (const [name, degree] of in_degree) {
|
|
107
|
+
if (degree === 0) {
|
|
108
|
+
queue.push(name);
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
// Sort initial queue alphabetically for deterministic ordering within tier
|
|
112
|
+
queue.sort();
|
|
113
|
+
// Process nodes
|
|
114
|
+
while (queue.length > 0) {
|
|
115
|
+
const name = queue.shift();
|
|
116
|
+
result.push(name);
|
|
117
|
+
visited.add(name);
|
|
118
|
+
// Reduce in-degree for dependents
|
|
119
|
+
const node = this.nodes.get(name);
|
|
120
|
+
if (node) {
|
|
121
|
+
// Find packages that depend on this one
|
|
122
|
+
// Sort nodes to ensure deterministic iteration order
|
|
123
|
+
const sorted_nodes = Array.from(this.nodes.values()).sort((a, b) => a.name.localeCompare(b.name));
|
|
124
|
+
for (const other_node of sorted_nodes) {
|
|
125
|
+
for (const [dep_name, spec] of other_node.dependencies) {
|
|
126
|
+
// Skip dev dependencies if requested
|
|
127
|
+
if (exclude_dev && spec.type === DEPENDENCY_TYPE.DEV)
|
|
128
|
+
continue;
|
|
129
|
+
if (dep_name === name) {
|
|
130
|
+
const new_degree = in_degree.get(other_node.name) - 1;
|
|
131
|
+
in_degree.set(other_node.name, new_degree);
|
|
132
|
+
if (new_degree === 0) {
|
|
133
|
+
queue.push(other_node.name);
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
// Check for cycles
|
|
141
|
+
if (result.length !== this.nodes.size) {
|
|
142
|
+
const unvisited = Array.from(this.nodes.keys()).filter((n) => !visited.has(n));
|
|
143
|
+
throw new Error(`Circular dependency detected involving: ${unvisited.join(', ')}`);
|
|
144
|
+
}
|
|
145
|
+
return result;
|
|
146
|
+
}
|
|
147
|
+
detect_cycles() {
|
|
148
|
+
const cycles = [];
|
|
149
|
+
const visited = new Set();
|
|
150
|
+
const rec_stack = new Set();
|
|
151
|
+
const dfs = (name, path) => {
|
|
152
|
+
visited.add(name);
|
|
153
|
+
rec_stack.add(name);
|
|
154
|
+
path.push(name);
|
|
155
|
+
const node = this.nodes.get(name);
|
|
156
|
+
if (node) {
|
|
157
|
+
for (const [dep_name] of node.dependencies) {
|
|
158
|
+
if (this.nodes.has(dep_name)) {
|
|
159
|
+
if (!visited.has(dep_name)) {
|
|
160
|
+
dfs(dep_name, [...path]);
|
|
161
|
+
}
|
|
162
|
+
else if (rec_stack.has(dep_name)) {
|
|
163
|
+
// Found a cycle
|
|
164
|
+
const cycle_start = path.indexOf(dep_name);
|
|
165
|
+
cycles.push(path.slice(cycle_start).concat(dep_name));
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
rec_stack.delete(name);
|
|
171
|
+
};
|
|
172
|
+
for (const name of this.nodes.keys()) {
|
|
173
|
+
if (!visited.has(name)) {
|
|
174
|
+
dfs(name, []);
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
return cycles;
|
|
178
|
+
}
|
|
179
|
+
/**
|
|
180
|
+
* Detects circular dependencies, categorized by severity.
|
|
181
|
+
*
|
|
182
|
+
* Production/peer cycles prevent publishing (impossible to order packages).
|
|
183
|
+
* Dev cycles are normal (test utils, shared configs) and safely ignored.
|
|
184
|
+
*
|
|
185
|
+
* Uses DFS traversal with recursion stack to identify back edges.
|
|
186
|
+
* Deduplicates cycles using sorted cycle keys.
|
|
187
|
+
*
|
|
188
|
+
* @returns object with production_cycles (errors) and dev_cycles (info)
|
|
189
|
+
*/
|
|
190
|
+
detect_cycles_by_type() {
|
|
191
|
+
const production_cycles = [];
|
|
192
|
+
const dev_cycles = [];
|
|
193
|
+
const visited_prod = new Set();
|
|
194
|
+
const visited_dev = new Set();
|
|
195
|
+
const rec_stack_prod = new Set();
|
|
196
|
+
const rec_stack_dev = new Set();
|
|
197
|
+
// DFS for production/peer dependencies only
|
|
198
|
+
const dfs_prod = (name, path) => {
|
|
199
|
+
visited_prod.add(name);
|
|
200
|
+
rec_stack_prod.add(name);
|
|
201
|
+
path.push(name);
|
|
202
|
+
const node = this.nodes.get(name);
|
|
203
|
+
if (node) {
|
|
204
|
+
for (const [dep_name, spec] of node.dependencies) {
|
|
205
|
+
// Skip dev dependencies
|
|
206
|
+
if (spec.type === DEPENDENCY_TYPE.DEV)
|
|
207
|
+
continue;
|
|
208
|
+
if (this.nodes.has(dep_name)) {
|
|
209
|
+
if (!visited_prod.has(dep_name)) {
|
|
210
|
+
dfs_prod(dep_name, [...path]);
|
|
211
|
+
}
|
|
212
|
+
else if (rec_stack_prod.has(dep_name)) {
|
|
213
|
+
// Found a production cycle
|
|
214
|
+
const cycle_start = path.indexOf(dep_name);
|
|
215
|
+
const cycle = path.slice(cycle_start).concat(dep_name);
|
|
216
|
+
// Check if this cycle is unique
|
|
217
|
+
const cycle_key = [...cycle].sort().join(',');
|
|
218
|
+
const exists = production_cycles.some((c) => [...c].sort().join(',') === cycle_key);
|
|
219
|
+
if (!exists) {
|
|
220
|
+
production_cycles.push(cycle);
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
rec_stack_prod.delete(name);
|
|
227
|
+
};
|
|
228
|
+
// DFS for dev dependencies only
|
|
229
|
+
const dfs_dev = (name, path) => {
|
|
230
|
+
visited_dev.add(name);
|
|
231
|
+
rec_stack_dev.add(name);
|
|
232
|
+
path.push(name);
|
|
233
|
+
const node = this.nodes.get(name);
|
|
234
|
+
if (node) {
|
|
235
|
+
for (const [dep_name, spec] of node.dependencies) {
|
|
236
|
+
// Only check dev dependencies
|
|
237
|
+
if (spec.type !== DEPENDENCY_TYPE.DEV)
|
|
238
|
+
continue;
|
|
239
|
+
if (this.nodes.has(dep_name)) {
|
|
240
|
+
if (!visited_dev.has(dep_name)) {
|
|
241
|
+
dfs_dev(dep_name, [...path]);
|
|
242
|
+
}
|
|
243
|
+
else if (rec_stack_dev.has(dep_name)) {
|
|
244
|
+
// Found a dev cycle
|
|
245
|
+
const cycle_start = path.indexOf(dep_name);
|
|
246
|
+
const cycle = path.slice(cycle_start).concat(dep_name);
|
|
247
|
+
// Check if this cycle is unique
|
|
248
|
+
const cycle_key = [...cycle].sort().join(',');
|
|
249
|
+
const exists = dev_cycles.some((c) => [...c].sort().join(',') === cycle_key);
|
|
250
|
+
if (!exists) {
|
|
251
|
+
dev_cycles.push(cycle);
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
rec_stack_dev.delete(name);
|
|
258
|
+
};
|
|
259
|
+
// Check for production/peer cycles
|
|
260
|
+
for (const name of this.nodes.keys()) {
|
|
261
|
+
if (!visited_prod.has(name)) {
|
|
262
|
+
dfs_prod(name, []);
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
// Check for dev cycles
|
|
266
|
+
for (const name of this.nodes.keys()) {
|
|
267
|
+
if (!visited_dev.has(name)) {
|
|
268
|
+
dfs_dev(name, []);
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
return { production_cycles, dev_cycles };
|
|
272
|
+
}
|
|
273
|
+
toJSON() {
|
|
274
|
+
const nodes = Array.from(this.nodes.values()).map((node) => ({
|
|
275
|
+
name: node.name,
|
|
276
|
+
version: node.version,
|
|
277
|
+
dependencies: Array.from(node.dependencies.entries()).map(([name, spec]) => ({
|
|
278
|
+
name,
|
|
279
|
+
spec,
|
|
280
|
+
})),
|
|
281
|
+
dependents: Array.from(node.dependents),
|
|
282
|
+
publishable: node.publishable,
|
|
283
|
+
}));
|
|
284
|
+
const edges = [];
|
|
285
|
+
for (const [from, tos] of this.edges) {
|
|
286
|
+
for (const to of tos) {
|
|
287
|
+
edges.push({ from, to });
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
return { nodes, edges };
|
|
291
|
+
}
|
|
292
|
+
}
|
|
293
|
+
/**
|
|
294
|
+
* Builder for creating and analyzing dependency graphs.
|
|
295
|
+
*/
|
|
296
|
+
export class DependencyGraphBuilder {
|
|
297
|
+
/**
|
|
298
|
+
* Constructs dependency graph from local repos.
|
|
299
|
+
*
|
|
300
|
+
* Two-pass algorithm: first creates nodes, then builds edges (dependents).
|
|
301
|
+
* Prioritizes prod/peer deps over dev deps when same package appears in
|
|
302
|
+
* multiple dependency types (stronger constraint wins).
|
|
303
|
+
*
|
|
304
|
+
* @returns fully initialized dependency graph with all nodes and edges
|
|
305
|
+
*/
|
|
306
|
+
build_from_repos(repos) {
|
|
307
|
+
const graph = new DependencyGraph();
|
|
308
|
+
graph.init_from_repos(repos);
|
|
309
|
+
return graph;
|
|
310
|
+
}
|
|
311
|
+
/**
|
|
312
|
+
* Computes publishing order using topological sort with dev deps excluded.
|
|
313
|
+
*
|
|
314
|
+
* Excludes dev dependencies to break circular dev dependency cycles while
|
|
315
|
+
* preserving production/peer dependency ordering. This allows patterns like
|
|
316
|
+
* shared test utilities that depend on each other for development.
|
|
317
|
+
*
|
|
318
|
+
* @returns package names in safe publishing order (dependencies before dependents)
|
|
319
|
+
* @throws {Error} if production/peer cycles detected (cannot be resolved by exclusion)
|
|
320
|
+
*/
|
|
321
|
+
compute_publishing_order(graph) {
|
|
322
|
+
return graph.topological_sort(true); // Exclude dev dependencies
|
|
323
|
+
}
|
|
324
|
+
analyze(graph) {
|
|
325
|
+
const { production_cycles, dev_cycles } = graph.detect_cycles_by_type();
|
|
326
|
+
const wildcard_deps = [];
|
|
327
|
+
const missing_peers = [];
|
|
328
|
+
for (const node of graph.nodes.values()) {
|
|
329
|
+
for (const [dep_name, spec] of node.dependencies) {
|
|
330
|
+
if (spec.version === '*') {
|
|
331
|
+
wildcard_deps.push({ pkg: node.name, dep: dep_name, version: spec.version });
|
|
332
|
+
}
|
|
333
|
+
if (spec.type === DEPENDENCY_TYPE.PEER && !graph.nodes.has(dep_name)) {
|
|
334
|
+
// External peer dependency
|
|
335
|
+
missing_peers.push({ pkg: node.name, dep: dep_name });
|
|
336
|
+
}
|
|
337
|
+
}
|
|
338
|
+
}
|
|
339
|
+
return { production_cycles, dev_cycles, wildcard_deps, missing_peers };
|
|
340
|
+
}
|
|
341
|
+
}
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import type { Logger } from '@fuzdev/fuz_util/log.js';
|
|
2
|
+
import type { LocalRepo } from './local_repo.js';
|
|
3
|
+
import type { PublishedVersion } from './multi_repo_publisher.js';
|
|
4
|
+
import type { GitOperations, FsOperations } from './operations.js';
|
|
5
|
+
export type VersionStrategy = 'exact' | 'caret' | 'tilde' | 'gte';
|
|
6
|
+
export interface UpdatePackageJsonOptions {
|
|
7
|
+
strategy?: VersionStrategy;
|
|
8
|
+
published_versions?: Map<string, PublishedVersion>;
|
|
9
|
+
log?: Logger;
|
|
10
|
+
git_ops?: GitOperations;
|
|
11
|
+
fs_ops?: FsOperations;
|
|
12
|
+
}
|
|
13
|
+
/**
|
|
14
|
+
* Updates package.json dependencies and creates changeset if needed.
|
|
15
|
+
*
|
|
16
|
+
* Workflow:
|
|
17
|
+
* 1. Updates all dependency types (dependencies, devDependencies, peerDependencies)
|
|
18
|
+
* 2. Writes updated package.json with tabs formatting
|
|
19
|
+
* 3. Creates auto-changeset if published_versions provided (for transitive updates)
|
|
20
|
+
* 4. Commits both package.json and changeset with standard message
|
|
21
|
+
*
|
|
22
|
+
* Uses version strategy to determine prefix (exact, caret, tilde) while preserving
|
|
23
|
+
* existing prefixes when possible.
|
|
24
|
+
*
|
|
25
|
+
* @throws {Error} if file operations or git operations fail
|
|
26
|
+
*/
|
|
27
|
+
export declare const update_package_json: (repo: LocalRepo, updates: Map<string, string>, options?: UpdatePackageJsonOptions) => Promise<void>;
|
|
28
|
+
export interface UpdateAllReposOptions {
|
|
29
|
+
strategy?: VersionStrategy;
|
|
30
|
+
log?: Logger;
|
|
31
|
+
git_ops?: GitOperations;
|
|
32
|
+
fs_ops?: FsOperations;
|
|
33
|
+
}
|
|
34
|
+
export declare const update_all_repos: (repos: Array<LocalRepo>, published: Map<string, string>, options?: UpdateAllReposOptions) => Promise<{
|
|
35
|
+
updated: number;
|
|
36
|
+
failed: Array<{
|
|
37
|
+
repo: string;
|
|
38
|
+
error: Error;
|
|
39
|
+
}>;
|
|
40
|
+
}>;
|
|
41
|
+
export declare const find_updates_needed: (repo: LocalRepo, published: Map<string, string>) => Map<string, {
|
|
42
|
+
current: string;
|
|
43
|
+
new: string;
|
|
44
|
+
type: "dependencies" | "devDependencies" | "peerDependencies";
|
|
45
|
+
}>;
|
|
46
|
+
//# sourceMappingURL=dependency_updater.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"dependency_updater.d.ts","sourceRoot":"../src/lib/","sources":["../src/lib/dependency_updater.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,MAAM,EAAC,MAAM,yBAAyB,CAAC;AAGpD,OAAO,KAAK,EAAC,SAAS,EAAC,MAAM,iBAAiB,CAAC;AAC/C,OAAO,KAAK,EAAC,gBAAgB,EAAC,MAAM,2BAA2B,CAAC;AAMhE,OAAO,KAAK,EAAC,aAAa,EAAE,YAAY,EAAC,MAAM,iBAAiB,CAAC;AAGjE,MAAM,MAAM,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG,OAAO,GAAG,KAAK,CAAC;AAElE,MAAM,WAAW,wBAAwB;IACxC,QAAQ,CAAC,EAAE,eAAe,CAAC;IAC3B,kBAAkB,CAAC,EAAE,GAAG,CAAC,MAAM,EAAE,gBAAgB,CAAC,CAAC;IACnD,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,OAAO,CAAC,EAAE,aAAa,CAAC;IACxB,MAAM,CAAC,EAAE,YAAY,CAAC;CACtB;AAED;;;;;;;;;;;;;GAaG;AACH,eAAO,MAAM,mBAAmB,GAC/B,MAAM,SAAS,EACf,SAAS,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,EAC5B,UAAS,wBAA6B,KACpC,OAAO,CAAC,IAAI,CAkId,CAAC;AAEF,MAAM,WAAW,qBAAqB;IACrC,QAAQ,CAAC,EAAE,eAAe,CAAC;IAC3B,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,OAAO,CAAC,EAAE,aAAa,CAAC;IACxB,MAAM,CAAC,EAAE,YAAY,CAAC;CACtB;AAED,eAAO,MAAM,gBAAgB,GAC5B,OAAO,KAAK,CAAC,SAAS,CAAC,EACvB,WAAW,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,EAC9B,UAAS,qBAA0B,KACjC,OAAO,CAAC;IAAC,OAAO,EAAE,MAAM,CAAC;IAAC,MAAM,EAAE,KAAK,CAAC;QAAC,IAAI,EAAE,MAAM,CAAC;QAAC,KAAK,EAAE,KAAK,CAAA;KAAC,CAAC,CAAA;CAAC,CAuDxE,CAAC;AAEF,eAAO,MAAM,mBAAmB,GAC/B,MAAM,SAAS,EACf,WAAW,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,KAC5B,GAAG,CACL,MAAM,EACN;IAAC,OAAO,EAAE,MAAM,CAAC;IAAC,GAAG,EAAE,MAAM,CAAC;IAAC,IAAI,EAAE,cAAc,GAAG,iBAAiB,GAAG,kBAAkB,CAAA;CAAC,CAkD7F,CAAC"}
|
|
@@ -0,0 +1,213 @@
|
|
|
1
|
+
import { join } from 'node:path';
|
|
2
|
+
import { create_changeset_for_dependency_updates, create_dependency_updates, } from './changeset_generator.js';
|
|
3
|
+
import { needs_update, get_update_prefix } from './version_utils.js';
|
|
4
|
+
import { default_git_operations, default_fs_operations } from './operations_defaults.js';
|
|
5
|
+
/**
|
|
6
|
+
* Updates package.json dependencies and creates changeset if needed.
|
|
7
|
+
*
|
|
8
|
+
* Workflow:
|
|
9
|
+
* 1. Updates all dependency types (dependencies, devDependencies, peerDependencies)
|
|
10
|
+
* 2. Writes updated package.json with tabs formatting
|
|
11
|
+
* 3. Creates auto-changeset if published_versions provided (for transitive updates)
|
|
12
|
+
* 4. Commits both package.json and changeset with standard message
|
|
13
|
+
*
|
|
14
|
+
* Uses version strategy to determine prefix (exact, caret, tilde) while preserving
|
|
15
|
+
* existing prefixes when possible.
|
|
16
|
+
*
|
|
17
|
+
* @throws {Error} if file operations or git operations fail
|
|
18
|
+
*/
|
|
19
|
+
export const update_package_json = async (repo, updates, options = {}) => {
|
|
20
|
+
const { strategy = 'caret', published_versions, log, git_ops = default_git_operations, fs_ops = default_fs_operations, } = options;
|
|
21
|
+
if (updates.size === 0)
|
|
22
|
+
return;
|
|
23
|
+
const package_json_path = join(repo.repo_dir, 'package.json');
|
|
24
|
+
// Read current package.json
|
|
25
|
+
const content_result = await fs_ops.readFile({ path: package_json_path, encoding: 'utf8' });
|
|
26
|
+
if (!content_result.ok) {
|
|
27
|
+
throw new Error(`Failed to read package.json: ${content_result.message}`);
|
|
28
|
+
}
|
|
29
|
+
const package_json = JSON.parse(content_result.value);
|
|
30
|
+
// Apply version strategy
|
|
31
|
+
const prefix = strategy === 'exact' ? '' : strategy === 'caret' ? '^' : strategy === 'gte' ? '>=' : '~';
|
|
32
|
+
let updated = false;
|
|
33
|
+
// Update dependencies
|
|
34
|
+
if (package_json.dependencies) {
|
|
35
|
+
for (const [name, version] of updates) {
|
|
36
|
+
if (name in package_json.dependencies) {
|
|
37
|
+
const current = package_json.dependencies[name];
|
|
38
|
+
const update_prefix = get_update_prefix(current, prefix);
|
|
39
|
+
package_json.dependencies[name] = update_prefix + version;
|
|
40
|
+
updated = true;
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
// Update devDependencies
|
|
45
|
+
if (package_json.devDependencies) {
|
|
46
|
+
for (const [name, version] of updates) {
|
|
47
|
+
if (name in package_json.devDependencies) {
|
|
48
|
+
const current = package_json.devDependencies[name];
|
|
49
|
+
const update_prefix = get_update_prefix(current, prefix);
|
|
50
|
+
package_json.devDependencies[name] = update_prefix + version;
|
|
51
|
+
updated = true;
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
// Update peerDependencies
|
|
56
|
+
if (package_json.peerDependencies) {
|
|
57
|
+
for (const [name, version] of updates) {
|
|
58
|
+
if (name in package_json.peerDependencies) {
|
|
59
|
+
const current = package_json.peerDependencies[name];
|
|
60
|
+
const update_prefix = get_update_prefix(current, prefix);
|
|
61
|
+
package_json.peerDependencies[name] = update_prefix + version;
|
|
62
|
+
updated = true;
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
if (!updated)
|
|
67
|
+
return;
|
|
68
|
+
// Write updated package.json
|
|
69
|
+
const write_result = await fs_ops.writeFile({
|
|
70
|
+
path: package_json_path,
|
|
71
|
+
content: JSON.stringify(package_json, null, '\t') + '\n',
|
|
72
|
+
});
|
|
73
|
+
if (!write_result.ok) {
|
|
74
|
+
throw new Error(`Failed to write package.json: ${write_result.message}`);
|
|
75
|
+
}
|
|
76
|
+
// Create changeset if we have published version info
|
|
77
|
+
if (published_versions && published_versions.size > 0) {
|
|
78
|
+
// Build dependency updates info for changeset
|
|
79
|
+
const all_deps = new Map();
|
|
80
|
+
// Collect all current dependencies with their versions
|
|
81
|
+
if (repo.dependencies) {
|
|
82
|
+
for (const [name, version] of repo.dependencies) {
|
|
83
|
+
if (updates.has(name)) {
|
|
84
|
+
all_deps.set(name, version);
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
if (repo.dev_dependencies) {
|
|
89
|
+
for (const [name, version] of repo.dev_dependencies) {
|
|
90
|
+
if (updates.has(name)) {
|
|
91
|
+
all_deps.set(name, version);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
if (repo.peer_dependencies) {
|
|
96
|
+
for (const [name, version] of repo.peer_dependencies) {
|
|
97
|
+
if (updates.has(name)) {
|
|
98
|
+
all_deps.set(name, version);
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
const dependency_updates = create_dependency_updates(all_deps, published_versions);
|
|
103
|
+
if (dependency_updates.length > 0) {
|
|
104
|
+
const changeset_path = await create_changeset_for_dependency_updates(repo, dependency_updates, { log });
|
|
105
|
+
// Add changeset to git
|
|
106
|
+
const add_result = await git_ops.add({ files: changeset_path, cwd: repo.repo_dir });
|
|
107
|
+
if (!add_result.ok) {
|
|
108
|
+
throw new Error(`Failed to stage changeset: ${add_result.message}`);
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
// Commit the changes (including both package.json and changeset)
|
|
113
|
+
const add_pkg_result = await git_ops.add({ files: 'package.json', cwd: repo.repo_dir });
|
|
114
|
+
if (!add_pkg_result.ok) {
|
|
115
|
+
throw new Error(`Failed to stage package.json: ${add_pkg_result.message}`);
|
|
116
|
+
}
|
|
117
|
+
const commit_result = await git_ops.commit({
|
|
118
|
+
message: `update dependencies after publishing`,
|
|
119
|
+
cwd: repo.repo_dir,
|
|
120
|
+
});
|
|
121
|
+
if (!commit_result.ok) {
|
|
122
|
+
throw new Error(`Failed to commit: ${commit_result.message}`);
|
|
123
|
+
}
|
|
124
|
+
};
|
|
125
|
+
export const update_all_repos = async (repos, published, options = {}) => {
|
|
126
|
+
const { strategy = 'caret', log, git_ops = default_git_operations, fs_ops = default_fs_operations, } = options;
|
|
127
|
+
let updated_count = 0;
|
|
128
|
+
const failed = [];
|
|
129
|
+
for (const repo of repos) {
|
|
130
|
+
const updates = new Map();
|
|
131
|
+
// Find dependencies that were published
|
|
132
|
+
if (repo.dependencies) {
|
|
133
|
+
for (const [dep_name] of repo.dependencies) {
|
|
134
|
+
const new_version = published.get(dep_name);
|
|
135
|
+
if (new_version) {
|
|
136
|
+
updates.set(dep_name, new_version);
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
if (repo.dev_dependencies) {
|
|
141
|
+
for (const [dep_name] of repo.dev_dependencies) {
|
|
142
|
+
const new_version = published.get(dep_name);
|
|
143
|
+
if (new_version) {
|
|
144
|
+
updates.set(dep_name, new_version);
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
if (repo.peer_dependencies) {
|
|
149
|
+
for (const [dep_name] of repo.peer_dependencies) {
|
|
150
|
+
const new_version = published.get(dep_name);
|
|
151
|
+
if (new_version) {
|
|
152
|
+
updates.set(dep_name, new_version);
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
if (updates.size === 0)
|
|
157
|
+
continue;
|
|
158
|
+
try {
|
|
159
|
+
await update_package_json(repo, updates, { strategy, log, git_ops, fs_ops }); // eslint-disable-line no-await-in-loop
|
|
160
|
+
updated_count++;
|
|
161
|
+
log?.info(` Updated ${updates.size} dependencies in ${repo.library.name}`);
|
|
162
|
+
}
|
|
163
|
+
catch (error) {
|
|
164
|
+
const err = error instanceof Error ? error : new Error(String(error));
|
|
165
|
+
failed.push({ repo: repo.library.name, error: err });
|
|
166
|
+
log?.error(` Failed to update ${repo.library.name}: ${err.message}`);
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
return { updated: updated_count, failed };
|
|
170
|
+
};
|
|
171
|
+
export const find_updates_needed = (repo, published) => {
|
|
172
|
+
const updates = new Map();
|
|
173
|
+
// Check dependencies
|
|
174
|
+
if (repo.dependencies) {
|
|
175
|
+
for (const [dep_name, current_version] of repo.dependencies) {
|
|
176
|
+
const new_version = published.get(dep_name);
|
|
177
|
+
if (new_version && needs_update(current_version, new_version)) {
|
|
178
|
+
updates.set(dep_name, {
|
|
179
|
+
current: current_version,
|
|
180
|
+
new: new_version,
|
|
181
|
+
type: 'dependencies',
|
|
182
|
+
});
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
// Check devDependencies
|
|
187
|
+
if (repo.dev_dependencies) {
|
|
188
|
+
for (const [dep_name, current_version] of repo.dev_dependencies) {
|
|
189
|
+
const new_version = published.get(dep_name);
|
|
190
|
+
if (new_version && needs_update(current_version, new_version)) {
|
|
191
|
+
updates.set(dep_name, {
|
|
192
|
+
current: current_version,
|
|
193
|
+
new: new_version,
|
|
194
|
+
type: 'devDependencies',
|
|
195
|
+
});
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
// Check peerDependencies
|
|
200
|
+
if (repo.peer_dependencies) {
|
|
201
|
+
for (const [dep_name, current_version] of repo.peer_dependencies) {
|
|
202
|
+
const new_version = published.get(dep_name);
|
|
203
|
+
if (new_version && needs_update(current_version, new_version)) {
|
|
204
|
+
updates.set(dep_name, {
|
|
205
|
+
current: current_version,
|
|
206
|
+
new: new_version,
|
|
207
|
+
type: 'peerDependencies',
|
|
208
|
+
});
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
return updates;
|
|
213
|
+
};
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import type { Logger } from '@fuzdev/fuz_util/log.js';
|
|
2
|
+
import type { FetchValueCache } from '@fuzdev/fuz_util/fetch.js';
|
|
3
|
+
import type { RepoJson } from './repo.svelte.js';
|
|
4
|
+
import type { LocalRepo } from './local_repo.js';
|
|
5
|
+
/**
|
|
6
|
+
* Fetches GitHub metadata (CI status, PRs) for all repos.
|
|
7
|
+
*
|
|
8
|
+
* Fetches sequentially with delay between requests to respect GitHub API rate limits.
|
|
9
|
+
* Uses `await_in_loop` intentionally to avoid parallel requests overwhelming the API.
|
|
10
|
+
*
|
|
11
|
+
* Error handling: Logs fetch failures but continues processing remaining repos.
|
|
12
|
+
* Repos with failed fetches will have `null` for check_runs or pull_requests.
|
|
13
|
+
*
|
|
14
|
+
* @param delay milliseconds between API requests (default: 33ms)
|
|
15
|
+
* @param cache optional cache from belt's fetch.js for response memoization
|
|
16
|
+
* @returns array of Repo objects with GitHub metadata attached
|
|
17
|
+
*/
|
|
18
|
+
export declare const fetch_repo_data: (resolved_repos: Array<LocalRepo>, token?: string, cache?: FetchValueCache, log?: Logger, delay?: number, github_api_version?: string) => Promise<Array<RepoJson>>;
|
|
19
|
+
//# sourceMappingURL=fetch_repo_data.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"fetch_repo_data.d.ts","sourceRoot":"../src/lib/","sources":["../src/lib/fetch_repo_data.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,MAAM,EAAC,MAAM,yBAAyB,CAAC;AAEpD,OAAO,KAAK,EAAC,eAAe,EAAC,MAAM,2BAA2B,CAAC;AAG/D,OAAO,KAAK,EAAC,QAAQ,EAAC,MAAM,kBAAkB,CAAC;AAC/C,OAAO,KAAK,EAAC,SAAS,EAAC,MAAM,iBAAiB,CAAC;AAI/C;;;;;;;;;;;;GAYG;AACH,eAAO,MAAM,eAAe,GAC3B,gBAAgB,KAAK,CAAC,SAAS,CAAC,EAChC,QAAQ,MAAM,EACd,QAAQ,eAAe,EACvB,MAAM,MAAM,EACZ,cAAU,EACV,qBAAqB,MAAM,KACzB,OAAO,CAAC,KAAK,CAAC,QAAQ,CAAC,CAiCzB,CAAC"}
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import { wait } from '@fuzdev/fuz_util/async.js';
|
|
2
|
+
import { fetch_github_check_runs, fetch_github_pull_requests } from './github.js';
|
|
3
|
+
/* eslint-disable no-await-in-loop */
|
|
4
|
+
/**
|
|
5
|
+
* Fetches GitHub metadata (CI status, PRs) for all repos.
|
|
6
|
+
*
|
|
7
|
+
* Fetches sequentially with delay between requests to respect GitHub API rate limits.
|
|
8
|
+
* Uses `await_in_loop` intentionally to avoid parallel requests overwhelming the API.
|
|
9
|
+
*
|
|
10
|
+
* Error handling: Logs fetch failures but continues processing remaining repos.
|
|
11
|
+
* Repos with failed fetches will have `null` for check_runs or pull_requests.
|
|
12
|
+
*
|
|
13
|
+
* @param delay milliseconds between API requests (default: 33ms)
|
|
14
|
+
* @param cache optional cache from belt's fetch.js for response memoization
|
|
15
|
+
* @returns array of Repo objects with GitHub metadata attached
|
|
16
|
+
*/
|
|
17
|
+
export const fetch_repo_data = async (resolved_repos, token, cache, log, delay = 33, github_api_version) => {
|
|
18
|
+
const repos = [];
|
|
19
|
+
for (const { library, library_json, repo_config } of resolved_repos) {
|
|
20
|
+
const repo_url = library.repo_url;
|
|
21
|
+
// CI status
|
|
22
|
+
await wait(delay);
|
|
23
|
+
const check_runs = await fetch_github_check_runs(library, {
|
|
24
|
+
cache,
|
|
25
|
+
log,
|
|
26
|
+
token,
|
|
27
|
+
api_version: github_api_version,
|
|
28
|
+
ref: repo_config.branch,
|
|
29
|
+
});
|
|
30
|
+
if (!check_runs)
|
|
31
|
+
log?.error('failed to fetch CI status: ' + repo_url);
|
|
32
|
+
// pull requests
|
|
33
|
+
await wait(delay);
|
|
34
|
+
const pull_requests = await fetch_github_pull_requests(library, {
|
|
35
|
+
cache,
|
|
36
|
+
log,
|
|
37
|
+
token,
|
|
38
|
+
api_version: github_api_version,
|
|
39
|
+
});
|
|
40
|
+
if (!pull_requests)
|
|
41
|
+
log?.error('failed to fetch issues: ' + repo_url);
|
|
42
|
+
repos.push({
|
|
43
|
+
library_json,
|
|
44
|
+
check_runs,
|
|
45
|
+
pull_requests,
|
|
46
|
+
});
|
|
47
|
+
}
|
|
48
|
+
return repos;
|
|
49
|
+
};
|