@fuzdev/fuz_gitops 0.57.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +119 -0
- package/dist/ModulesDetail.svelte +180 -0
- package/dist/ModulesDetail.svelte.d.ts +10 -0
- package/dist/ModulesDetail.svelte.d.ts.map +1 -0
- package/dist/ModulesNav.svelte +43 -0
- package/dist/ModulesNav.svelte.d.ts +11 -0
- package/dist/ModulesNav.svelte.d.ts.map +1 -0
- package/dist/ModulesPage.svelte +50 -0
- package/dist/ModulesPage.svelte.d.ts +9 -0
- package/dist/ModulesPage.svelte.d.ts.map +1 -0
- package/dist/PageFooter.svelte +15 -0
- package/dist/PageFooter.svelte.d.ts +19 -0
- package/dist/PageFooter.svelte.d.ts.map +1 -0
- package/dist/PageHeader.svelte +35 -0
- package/dist/PageHeader.svelte.d.ts +19 -0
- package/dist/PageHeader.svelte.d.ts.map +1 -0
- package/dist/PullRequestsDetail.svelte +53 -0
- package/dist/PullRequestsDetail.svelte.d.ts +10 -0
- package/dist/PullRequestsDetail.svelte.d.ts.map +1 -0
- package/dist/PullRequestsPage.svelte +47 -0
- package/dist/PullRequestsPage.svelte.d.ts +11 -0
- package/dist/PullRequestsPage.svelte.d.ts.map +1 -0
- package/dist/ReposTable.svelte +189 -0
- package/dist/ReposTable.svelte.d.ts +9 -0
- package/dist/ReposTable.svelte.d.ts.map +1 -0
- package/dist/ReposTree.svelte +88 -0
- package/dist/ReposTree.svelte.d.ts +11 -0
- package/dist/ReposTree.svelte.d.ts.map +1 -0
- package/dist/ReposTreeNav.svelte +55 -0
- package/dist/ReposTreeNav.svelte.d.ts +11 -0
- package/dist/ReposTreeNav.svelte.d.ts.map +1 -0
- package/dist/TablePage.svelte +46 -0
- package/dist/TablePage.svelte.d.ts +9 -0
- package/dist/TablePage.svelte.d.ts.map +1 -0
- package/dist/TreeItemPage.svelte +75 -0
- package/dist/TreeItemPage.svelte.d.ts +10 -0
- package/dist/TreeItemPage.svelte.d.ts.map +1 -0
- package/dist/TreePage.svelte +64 -0
- package/dist/TreePage.svelte.d.ts +9 -0
- package/dist/TreePage.svelte.d.ts.map +1 -0
- package/dist/changeset_generator.d.ts +38 -0
- package/dist/changeset_generator.d.ts.map +1 -0
- package/dist/changeset_generator.js +110 -0
- package/dist/changeset_reader.d.ts +75 -0
- package/dist/changeset_reader.d.ts.map +1 -0
- package/dist/changeset_reader.js +167 -0
- package/dist/constants.d.ts +9 -0
- package/dist/constants.d.ts.map +1 -0
- package/dist/constants.js +8 -0
- package/dist/dependency_graph.d.ts +120 -0
- package/dist/dependency_graph.d.ts.map +1 -0
- package/dist/dependency_graph.js +341 -0
- package/dist/dependency_updater.d.ts +46 -0
- package/dist/dependency_updater.d.ts.map +1 -0
- package/dist/dependency_updater.js +213 -0
- package/dist/fetch_repo_data.d.ts +19 -0
- package/dist/fetch_repo_data.d.ts.map +1 -0
- package/dist/fetch_repo_data.js +49 -0
- package/dist/fs_fetch_value_cache.d.ts +24 -0
- package/dist/fs_fetch_value_cache.d.ts.map +1 -0
- package/dist/fs_fetch_value_cache.js +61 -0
- package/dist/git_operations.d.ts +54 -0
- package/dist/git_operations.d.ts.map +1 -0
- package/dist/git_operations.js +144 -0
- package/dist/github.d.ts +91 -0
- package/dist/github.d.ts.map +1 -0
- package/dist/github.js +94 -0
- package/dist/github_helpers.d.ts +10 -0
- package/dist/github_helpers.d.ts.map +1 -0
- package/dist/github_helpers.js +13 -0
- package/dist/gitops_analyze.task.d.ts +17 -0
- package/dist/gitops_analyze.task.d.ts.map +1 -0
- package/dist/gitops_analyze.task.js +188 -0
- package/dist/gitops_config.d.ts +56 -0
- package/dist/gitops_config.d.ts.map +1 -0
- package/dist/gitops_config.js +63 -0
- package/dist/gitops_plan.task.d.ts +28 -0
- package/dist/gitops_plan.task.d.ts.map +1 -0
- package/dist/gitops_plan.task.js +217 -0
- package/dist/gitops_publish.task.d.ts +29 -0
- package/dist/gitops_publish.task.d.ts.map +1 -0
- package/dist/gitops_publish.task.js +178 -0
- package/dist/gitops_sync.task.d.ts +18 -0
- package/dist/gitops_sync.task.d.ts.map +1 -0
- package/dist/gitops_sync.task.js +95 -0
- package/dist/gitops_task_helpers.d.ts +63 -0
- package/dist/gitops_task_helpers.d.ts.map +1 -0
- package/dist/gitops_task_helpers.js +84 -0
- package/dist/gitops_validate.task.d.ts +12 -0
- package/dist/gitops_validate.task.d.ts.map +1 -0
- package/dist/gitops_validate.task.js +210 -0
- package/dist/graph_validation.d.ts +39 -0
- package/dist/graph_validation.d.ts.map +1 -0
- package/dist/graph_validation.js +79 -0
- package/dist/local_repo.d.ts +84 -0
- package/dist/local_repo.d.ts.map +1 -0
- package/dist/local_repo.js +213 -0
- package/dist/log_helpers.d.ts +43 -0
- package/dist/log_helpers.d.ts.map +1 -0
- package/dist/log_helpers.js +98 -0
- package/dist/multi_repo_publisher.d.ts +34 -0
- package/dist/multi_repo_publisher.d.ts.map +1 -0
- package/dist/multi_repo_publisher.js +364 -0
- package/dist/npm_install_helpers.d.ts +23 -0
- package/dist/npm_install_helpers.d.ts.map +1 -0
- package/dist/npm_install_helpers.js +60 -0
- package/dist/npm_registry.d.ts +46 -0
- package/dist/npm_registry.d.ts.map +1 -0
- package/dist/npm_registry.js +96 -0
- package/dist/operations.d.ts +409 -0
- package/dist/operations.d.ts.map +1 -0
- package/dist/operations.js +34 -0
- package/dist/operations_defaults.d.ts +19 -0
- package/dist/operations_defaults.d.ts.map +1 -0
- package/dist/operations_defaults.js +279 -0
- package/dist/output_helpers.d.ts +27 -0
- package/dist/output_helpers.d.ts.map +1 -0
- package/dist/output_helpers.js +39 -0
- package/dist/paths.d.ts +11 -0
- package/dist/paths.d.ts.map +1 -0
- package/dist/paths.js +10 -0
- package/dist/preflight_checks.d.ts +47 -0
- package/dist/preflight_checks.d.ts.map +1 -0
- package/dist/preflight_checks.js +181 -0
- package/dist/publishing_plan.d.ts +100 -0
- package/dist/publishing_plan.d.ts.map +1 -0
- package/dist/publishing_plan.js +353 -0
- package/dist/publishing_plan_helpers.d.ts +30 -0
- package/dist/publishing_plan_helpers.d.ts.map +1 -0
- package/dist/publishing_plan_helpers.js +112 -0
- package/dist/publishing_plan_logging.d.ts +18 -0
- package/dist/publishing_plan_logging.d.ts.map +1 -0
- package/dist/publishing_plan_logging.js +342 -0
- package/dist/repo.svelte.d.ts +52 -0
- package/dist/repo.svelte.d.ts.map +1 -0
- package/dist/repo.svelte.js +70 -0
- package/dist/repo_ops.d.ts +57 -0
- package/dist/repo_ops.d.ts.map +1 -0
- package/dist/repo_ops.js +167 -0
- package/dist/resolved_gitops_config.d.ts +9 -0
- package/dist/resolved_gitops_config.d.ts.map +1 -0
- package/dist/resolved_gitops_config.js +12 -0
- package/dist/semver.d.ts +24 -0
- package/dist/semver.d.ts.map +1 -0
- package/dist/semver.js +140 -0
- package/dist/serialization_types.d.ts +57 -0
- package/dist/serialization_types.d.ts.map +1 -0
- package/dist/serialization_types.js +40 -0
- package/dist/version_utils.d.ts +48 -0
- package/dist/version_utils.d.ts.map +1 -0
- package/dist/version_utils.js +125 -0
- package/package.json +107 -0
- package/src/lib/changeset_generator.ts +162 -0
- package/src/lib/changeset_reader.ts +218 -0
- package/src/lib/constants.ts +8 -0
- package/src/lib/dependency_graph.ts +423 -0
- package/src/lib/dependency_updater.ts +297 -0
- package/src/lib/fetch_repo_data.ts +64 -0
- package/src/lib/fs_fetch_value_cache.ts +75 -0
- package/src/lib/git_operations.ts +208 -0
- package/src/lib/github.ts +128 -0
- package/src/lib/github_helpers.ts +31 -0
- package/src/lib/gitops_analyze.task.ts +261 -0
- package/src/lib/gitops_config.ts +123 -0
- package/src/lib/gitops_plan.task.ts +272 -0
- package/src/lib/gitops_publish.task.ts +227 -0
- package/src/lib/gitops_sync.task.ts +109 -0
- package/src/lib/gitops_task_helpers.ts +126 -0
- package/src/lib/gitops_validate.task.ts +248 -0
- package/src/lib/graph_validation.ts +109 -0
- package/src/lib/local_repo.ts +359 -0
- package/src/lib/log_helpers.ts +147 -0
- package/src/lib/multi_repo_publisher.ts +464 -0
- package/src/lib/npm_install_helpers.ts +85 -0
- package/src/lib/npm_registry.ts +143 -0
- package/src/lib/operations.ts +334 -0
- package/src/lib/operations_defaults.ts +335 -0
- package/src/lib/output_helpers.ts +64 -0
- package/src/lib/paths.ts +11 -0
- package/src/lib/preflight_checks.ts +269 -0
- package/src/lib/publishing_plan.ts +531 -0
- package/src/lib/publishing_plan_helpers.ts +145 -0
- package/src/lib/publishing_plan_logging.ts +470 -0
- package/src/lib/repo.svelte.ts +95 -0
- package/src/lib/repo_ops.ts +213 -0
- package/src/lib/resolved_gitops_config.ts +27 -0
- package/src/lib/semver.ts +166 -0
- package/src/lib/serialization_types.ts +90 -0
- package/src/lib/version_utils.ts +150 -0
|
@@ -0,0 +1,364 @@
|
|
|
1
|
+
import { TaskError } from '@ryanatkn/gro';
|
|
2
|
+
import { join } from 'node:path';
|
|
3
|
+
import { styleText as st } from 'node:util';
|
|
4
|
+
import { update_package_json } from './dependency_updater.js';
|
|
5
|
+
import { validate_dependency_graph } from './graph_validation.js';
|
|
6
|
+
import {} from './preflight_checks.js';
|
|
7
|
+
import { needs_update, is_breaking_change, detect_bump_type } from './version_utils.js';
|
|
8
|
+
import { default_gitops_operations } from './operations_defaults.js';
|
|
9
|
+
import { MAX_ITERATIONS } from './constants.js';
|
|
10
|
+
import { install_with_cache_healing } from './npm_install_helpers.js';
|
|
11
|
+
export const publish_repos = async (repos, options) => {
|
|
12
|
+
const start_time = Date.now();
|
|
13
|
+
const { dry_run, update_deps, log, ops = default_gitops_operations } = options;
|
|
14
|
+
// Preflight checks (skip for dry runs since we're not actually publishing)
|
|
15
|
+
if (!dry_run) {
|
|
16
|
+
const preflight_options = {
|
|
17
|
+
skip_changesets: false, // Always check for changesets
|
|
18
|
+
required_branch: 'main',
|
|
19
|
+
log,
|
|
20
|
+
};
|
|
21
|
+
const preflight = await ops.preflight.run_preflight_checks({
|
|
22
|
+
repos,
|
|
23
|
+
preflight_options,
|
|
24
|
+
git_ops: ops.git,
|
|
25
|
+
npm_ops: ops.npm,
|
|
26
|
+
build_ops: ops.build,
|
|
27
|
+
changeset_ops: ops.changeset,
|
|
28
|
+
});
|
|
29
|
+
if (!preflight.ok) {
|
|
30
|
+
throw new TaskError(`Preflight checks failed: ${preflight.errors.join(', ')}`);
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
else {
|
|
34
|
+
log?.info('⏭️ Skipping preflight checks for dry run');
|
|
35
|
+
}
|
|
36
|
+
// Build dependency graph and validate
|
|
37
|
+
const { publishing_order: order } = validate_dependency_graph(repos, {
|
|
38
|
+
log,
|
|
39
|
+
throw_on_prod_cycles: true,
|
|
40
|
+
log_cycles: true,
|
|
41
|
+
log_order: true,
|
|
42
|
+
});
|
|
43
|
+
const published = new Map();
|
|
44
|
+
const failed = new Map();
|
|
45
|
+
const changed_repos = new Set(); // Track repos with any changes for selective deployment
|
|
46
|
+
// Fixed-point iteration: keep publishing until no new changesets are created
|
|
47
|
+
// This handles transitive dependency updates (auto-generated changesets)
|
|
48
|
+
let iteration = 0;
|
|
49
|
+
let converged = false;
|
|
50
|
+
while (!converged && iteration < MAX_ITERATIONS) {
|
|
51
|
+
iteration++;
|
|
52
|
+
log?.info(st('cyan', `\n🚀 Publishing iteration ${iteration}/${MAX_ITERATIONS}...\n`));
|
|
53
|
+
// Track if any packages were published in this iteration
|
|
54
|
+
let published_in_iteration = false;
|
|
55
|
+
let published_count = 0;
|
|
56
|
+
// Track repos changed in THIS iteration only (for batch install)
|
|
57
|
+
const changed_in_iteration = new Set();
|
|
58
|
+
// Phase 1: Publish each package and immediately update dependents
|
|
59
|
+
for (let i = 0; i < order.length; i++) {
|
|
60
|
+
const pkg_name = order[i];
|
|
61
|
+
const repo = repos.find((r) => r.library.name === pkg_name);
|
|
62
|
+
if (!repo)
|
|
63
|
+
continue;
|
|
64
|
+
// Skip if already published in a previous iteration
|
|
65
|
+
if (published.has(pkg_name)) {
|
|
66
|
+
continue;
|
|
67
|
+
}
|
|
68
|
+
// Check for changesets (both dry and real runs)
|
|
69
|
+
const has_result = await ops.changeset.has_changesets({ repo });
|
|
70
|
+
if (!has_result.ok) {
|
|
71
|
+
// Failed to check changesets
|
|
72
|
+
const err = new Error(`Failed to check changesets: ${has_result.message}`);
|
|
73
|
+
failed.set(pkg_name, err);
|
|
74
|
+
log?.error(st('red', ` ❌ ${err.message}`));
|
|
75
|
+
break;
|
|
76
|
+
}
|
|
77
|
+
if (!has_result.value) {
|
|
78
|
+
// Skip packages without changesets
|
|
79
|
+
// In real publish: They might get auto-changesets during dependency updates
|
|
80
|
+
// In dry run: We can't simulate auto-changesets, so just skip
|
|
81
|
+
if (dry_run) {
|
|
82
|
+
// Silent skip in dry run - plan shows which packages get auto-changesets
|
|
83
|
+
continue;
|
|
84
|
+
}
|
|
85
|
+
else {
|
|
86
|
+
log?.info(st('yellow', ` ⚠️ Skipping ${pkg_name} - no changesets`));
|
|
87
|
+
continue;
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
try {
|
|
91
|
+
// 1. Publish this package
|
|
92
|
+
log?.info(st('dim', ` [${i + 1}/${order.length}] Publishing ${pkg_name}...`));
|
|
93
|
+
const version = await publish_single_repo(repo, options, ops);
|
|
94
|
+
published.set(pkg_name, version);
|
|
95
|
+
changed_repos.add(pkg_name); // Mark as changed for deployment
|
|
96
|
+
// Note: don't add to changed_in_iteration - published packages don't need install
|
|
97
|
+
// (their dependencies didn't change, only their version)
|
|
98
|
+
published_in_iteration = true;
|
|
99
|
+
published_count++;
|
|
100
|
+
log?.info(st('green', ` ✅ Published ${pkg_name}@${version.new_version}`));
|
|
101
|
+
if (!dry_run) {
|
|
102
|
+
// 2. Wait for this package to be available on NPM
|
|
103
|
+
log?.info(` ⏳ Waiting for ${pkg_name}@${version.new_version} on NPM...`);
|
|
104
|
+
const wait_result = await ops.npm.wait_for_package({
|
|
105
|
+
pkg: pkg_name,
|
|
106
|
+
version: version.new_version,
|
|
107
|
+
wait_options: {
|
|
108
|
+
max_attempts: 30,
|
|
109
|
+
initial_delay: 1000,
|
|
110
|
+
max_delay: 60000,
|
|
111
|
+
timeout: options.max_wait || 600000, // 10 minutes default
|
|
112
|
+
},
|
|
113
|
+
log,
|
|
114
|
+
});
|
|
115
|
+
if (!wait_result.ok) {
|
|
116
|
+
throw new Error(`Failed to wait for package: ${wait_result.message}${wait_result.timeout ? ' (timeout)' : ''}`);
|
|
117
|
+
}
|
|
118
|
+
// 3. Update all repos that have prod/peer deps on this package
|
|
119
|
+
if (update_deps) {
|
|
120
|
+
for (const dependent_repo of repos) {
|
|
121
|
+
const updates = new Map();
|
|
122
|
+
// Check prod dependencies
|
|
123
|
+
if (dependent_repo.dependencies?.has(pkg_name)) {
|
|
124
|
+
const current = dependent_repo.dependencies.get(pkg_name);
|
|
125
|
+
if (needs_update(current, version.new_version)) {
|
|
126
|
+
updates.set(pkg_name, version.new_version);
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
// Check peer dependencies
|
|
130
|
+
if (dependent_repo.peer_dependencies?.has(pkg_name)) {
|
|
131
|
+
const current = dependent_repo.peer_dependencies.get(pkg_name);
|
|
132
|
+
if (needs_update(current, version.new_version)) {
|
|
133
|
+
updates.set(pkg_name, version.new_version);
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
// Apply updates if any
|
|
137
|
+
if (updates.size > 0) {
|
|
138
|
+
log?.info(` Updating ${dependent_repo.library.name}'s dependency on ${pkg_name}`);
|
|
139
|
+
changed_repos.add(dependent_repo.library.name); // Mark as changed for deployment
|
|
140
|
+
changed_in_iteration.add(dependent_repo.library.name); // Track for batch install
|
|
141
|
+
await update_package_json(dependent_repo, updates, {
|
|
142
|
+
strategy: options.version_strategy || 'caret',
|
|
143
|
+
published_versions: published,
|
|
144
|
+
log,
|
|
145
|
+
git_ops: ops.git,
|
|
146
|
+
});
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
catch (error) {
|
|
153
|
+
const err = error instanceof Error ? error : new Error(String(error));
|
|
154
|
+
failed.set(pkg_name, err);
|
|
155
|
+
log?.error(st('red', ` ❌ Failed to publish ${pkg_name}: ${err.message}`));
|
|
156
|
+
break; // Always fail fast on error
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
// Phase 1b: Batch install dependencies for repos with updated package.json
|
|
160
|
+
// This ensures workspace stays consistent before next iteration
|
|
161
|
+
if (!dry_run && !options.skip_install && changed_in_iteration.size > 0) {
|
|
162
|
+
log?.info(st('cyan', '\n📦 Installing dependencies for updated repos...\n'));
|
|
163
|
+
for (const pkg_name of changed_in_iteration) {
|
|
164
|
+
const repo = repos.find((r) => r.library.name === pkg_name);
|
|
165
|
+
if (!repo)
|
|
166
|
+
continue;
|
|
167
|
+
try {
|
|
168
|
+
log?.info(` Installing ${pkg_name}...`);
|
|
169
|
+
await install_with_cache_healing(repo, ops, log);
|
|
170
|
+
log?.info(st('green', ` ✅ Installed ${pkg_name}`));
|
|
171
|
+
}
|
|
172
|
+
catch (error) {
|
|
173
|
+
const err = error instanceof Error ? error : new Error(String(error));
|
|
174
|
+
failed.set(pkg_name, err);
|
|
175
|
+
log?.error(st('red', ` ❌ Failed to install ${pkg_name}: ${err.message}`));
|
|
176
|
+
// Continue with other installs instead of breaking
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
// Log iteration summary
|
|
181
|
+
if (published_count > 0) {
|
|
182
|
+
log?.info(st('dim', `\nIteration ${iteration}: ${published_count} package(s) published\n`));
|
|
183
|
+
}
|
|
184
|
+
// Check for convergence: no packages published in this iteration
|
|
185
|
+
if (!published_in_iteration) {
|
|
186
|
+
converged = true;
|
|
187
|
+
log?.info(st('green', `\n✓ Converged after ${iteration} iteration(s) - no new changesets\n`));
|
|
188
|
+
}
|
|
189
|
+
else if (iteration === MAX_ITERATIONS) {
|
|
190
|
+
// Count packages that still have changesets (not yet published)
|
|
191
|
+
const pending_count = order.length - published.size;
|
|
192
|
+
const estimated_iterations = Math.ceil(pending_count / 2); // Rough estimate
|
|
193
|
+
log?.warn(st('yellow', `\n⚠️ Reached maximum iterations (${MAX_ITERATIONS}) without full convergence\n` +
|
|
194
|
+
` ${pending_count} package(s) may still have changesets to process\n` +
|
|
195
|
+
` Estimated ${estimated_iterations} more iteration(s) needed - run 'gro gitops_publish' again\n`));
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
// Phase 2: Update all dev dependencies (can have cycles)
|
|
199
|
+
// Dev dep changes require deployment even without version bumps (rebuild needed)
|
|
200
|
+
const dev_updated_repos = new Set();
|
|
201
|
+
if (update_deps && published.size > 0 && !dry_run) {
|
|
202
|
+
log?.info(st('cyan', '\n🔄 Updating dev dependencies...\n'));
|
|
203
|
+
for (const repo of repos) {
|
|
204
|
+
const dev_updates = new Map();
|
|
205
|
+
// Check dev dependencies only
|
|
206
|
+
if (repo.dev_dependencies) {
|
|
207
|
+
for (const [dep_name, current_version] of repo.dev_dependencies) {
|
|
208
|
+
const published_version = published.get(dep_name);
|
|
209
|
+
if (published_version && needs_update(current_version, published_version.new_version)) {
|
|
210
|
+
dev_updates.set(dep_name, published_version.new_version);
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
if (dev_updates.size > 0) {
|
|
215
|
+
log?.info(` Updating ${dev_updates.size} dev dependencies in ${repo.library.name}`);
|
|
216
|
+
changed_repos.add(repo.library.name); // Mark as changed for deployment
|
|
217
|
+
dev_updated_repos.add(repo.library.name); // Track for batch install
|
|
218
|
+
await update_package_json(repo, dev_updates, {
|
|
219
|
+
strategy: options.version_strategy || 'caret',
|
|
220
|
+
published_versions: published,
|
|
221
|
+
log,
|
|
222
|
+
git_ops: ops.git,
|
|
223
|
+
});
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
// Phase 2b: Install dev dependencies for repos with dev dep updates
|
|
228
|
+
if (!dry_run && !options.skip_install && dev_updated_repos.size > 0) {
|
|
229
|
+
log?.info(st('cyan', '\n📦 Installing dev dependencies for updated repos...\n'));
|
|
230
|
+
for (const pkg_name of dev_updated_repos) {
|
|
231
|
+
const repo = repos.find((r) => r.library.name === pkg_name);
|
|
232
|
+
if (!repo)
|
|
233
|
+
continue;
|
|
234
|
+
try {
|
|
235
|
+
log?.info(` Installing ${pkg_name}...`);
|
|
236
|
+
await install_with_cache_healing(repo, ops, log);
|
|
237
|
+
log?.info(st('green', ` ✅ Installed ${pkg_name}`));
|
|
238
|
+
}
|
|
239
|
+
catch (error) {
|
|
240
|
+
const err = error instanceof Error ? error : new Error(String(error));
|
|
241
|
+
failed.set(pkg_name, err);
|
|
242
|
+
log?.error(st('red', ` ❌ Failed to install ${pkg_name}: ${err.message}`));
|
|
243
|
+
// Continue with other installs instead of breaking
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
// Phase 3: Deploy repos with changes (optional)
|
|
248
|
+
// Deploys only repos that were: published, had prod/peer deps updated, or had dev deps updated
|
|
249
|
+
if (options.deploy && !dry_run) {
|
|
250
|
+
const repos_to_deploy = repos.filter((r) => changed_repos.has(r.library.name));
|
|
251
|
+
log?.info(st('cyan', `\n🚢 Deploying ${repos_to_deploy.length}/${repos.length} repos with changes...\n`));
|
|
252
|
+
for (const repo of repos_to_deploy) {
|
|
253
|
+
try {
|
|
254
|
+
log?.info(` Deploying ${repo.library.name}...`);
|
|
255
|
+
const deploy_result = await ops.process.spawn({
|
|
256
|
+
cmd: 'gro',
|
|
257
|
+
args: ['deploy', '--no-build'],
|
|
258
|
+
spawn_options: { cwd: repo.repo_dir },
|
|
259
|
+
});
|
|
260
|
+
if (deploy_result.ok) {
|
|
261
|
+
log?.info(st('green', ` ✅ Deployed ${repo.library.name}`));
|
|
262
|
+
}
|
|
263
|
+
else {
|
|
264
|
+
log?.warn(st('yellow', ` ⚠️ Failed to deploy ${repo.library.name}`));
|
|
265
|
+
}
|
|
266
|
+
}
|
|
267
|
+
catch (error) {
|
|
268
|
+
log?.error(st('red', ` ❌ Error deploying ${repo.library.name}: ${error}`));
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
// Summary
|
|
273
|
+
const duration = Date.now() - start_time;
|
|
274
|
+
const ok = failed.size === 0;
|
|
275
|
+
log?.info(st('cyan', '\n📋 Publishing Summary\n'));
|
|
276
|
+
log?.info(` Duration: ${(duration / 1000).toFixed(1)}s`);
|
|
277
|
+
log?.info(` Published: ${published.size} packages`);
|
|
278
|
+
if (failed.size > 0) {
|
|
279
|
+
log?.info(` Failed: ${failed.size} packages`);
|
|
280
|
+
}
|
|
281
|
+
if (ok) {
|
|
282
|
+
log?.info(st('green', '\n✨ All packages published successfully!\n'));
|
|
283
|
+
}
|
|
284
|
+
else {
|
|
285
|
+
log?.error(st('red', '\n❌ Some packages failed to publish\n'));
|
|
286
|
+
}
|
|
287
|
+
return {
|
|
288
|
+
ok,
|
|
289
|
+
published: Array.from(published.values()),
|
|
290
|
+
failed: Array.from(failed.entries()).map(([name, error]) => ({ name, error })),
|
|
291
|
+
duration,
|
|
292
|
+
};
|
|
293
|
+
};
|
|
294
|
+
/**
|
|
295
|
+
* Publishes a single repo using gro publish.
|
|
296
|
+
*
|
|
297
|
+
* Dry run mode: Predicts version from changesets without side effects.
|
|
298
|
+
* Real mode: Runs `gro publish --no-build` (builds already validated in preflight),
|
|
299
|
+
* reads new version from package.json, and returns metadata.
|
|
300
|
+
*
|
|
301
|
+
* @throws {Error} if changeset prediction fails (dry run) or publish fails (real)
|
|
302
|
+
*/
|
|
303
|
+
const publish_single_repo = async (repo, options, ops = default_gitops_operations) => {
|
|
304
|
+
const { dry_run, log } = options;
|
|
305
|
+
const old_version = repo.library.package_json.version || '0.0.0';
|
|
306
|
+
if (dry_run) {
|
|
307
|
+
// In dry run, predict version from changesets
|
|
308
|
+
const prediction = await ops.changeset.predict_next_version({ repo, log });
|
|
309
|
+
if (!prediction) {
|
|
310
|
+
// No changesets found, skip this repo
|
|
311
|
+
throw new Error(`No changesets found for ${repo.library.name}`);
|
|
312
|
+
}
|
|
313
|
+
if (!prediction.ok) {
|
|
314
|
+
// Error reading changesets
|
|
315
|
+
throw new Error(`Failed to predict version: ${prediction.message}`);
|
|
316
|
+
}
|
|
317
|
+
const { version: new_version, bump_type } = prediction;
|
|
318
|
+
const breaking = is_breaking_change(old_version, bump_type);
|
|
319
|
+
return {
|
|
320
|
+
name: repo.library.name,
|
|
321
|
+
old_version,
|
|
322
|
+
new_version,
|
|
323
|
+
bump_type,
|
|
324
|
+
breaking,
|
|
325
|
+
commit: 'dry_run',
|
|
326
|
+
tag: `v${new_version}`,
|
|
327
|
+
};
|
|
328
|
+
}
|
|
329
|
+
// Run gro publish with --no-build (builds were validated in preflight checks)
|
|
330
|
+
const publish_result = await ops.process.spawn({
|
|
331
|
+
cmd: 'gro',
|
|
332
|
+
args: ['publish', '--no-build'],
|
|
333
|
+
spawn_options: { cwd: repo.repo_dir },
|
|
334
|
+
});
|
|
335
|
+
if (!publish_result.ok) {
|
|
336
|
+
throw new Error(`Failed to publish ${repo.library.name}: ${publish_result.message}`);
|
|
337
|
+
}
|
|
338
|
+
// Read the new version from package.json after gro publish
|
|
339
|
+
const package_json_path = join(repo.repo_dir, 'package.json');
|
|
340
|
+
const content_result = await ops.fs.readFile({ path: package_json_path, encoding: 'utf8' });
|
|
341
|
+
if (!content_result.ok) {
|
|
342
|
+
throw new Error(`Failed to read package.json: ${content_result.message}`);
|
|
343
|
+
}
|
|
344
|
+
const package_json = JSON.parse(content_result.value);
|
|
345
|
+
const new_version = package_json.version;
|
|
346
|
+
// Determine bump type and if it's breaking
|
|
347
|
+
const bump_type = detect_bump_type(old_version, new_version);
|
|
348
|
+
const breaking = is_breaking_change(old_version, bump_type);
|
|
349
|
+
// Get actual commit hash
|
|
350
|
+
const commit_result = await ops.git.current_commit_hash({ cwd: repo.repo_dir });
|
|
351
|
+
if (!commit_result.ok) {
|
|
352
|
+
throw new Error(`Failed to get commit hash: ${commit_result.message}`);
|
|
353
|
+
}
|
|
354
|
+
const commit = commit_result.value;
|
|
355
|
+
return {
|
|
356
|
+
name: repo.library.name,
|
|
357
|
+
old_version,
|
|
358
|
+
new_version,
|
|
359
|
+
bump_type,
|
|
360
|
+
breaking,
|
|
361
|
+
commit,
|
|
362
|
+
tag: `v${new_version}`,
|
|
363
|
+
};
|
|
364
|
+
};
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import type { Logger } from '@fuzdev/fuz_util/log.js';
|
|
2
|
+
import type { LocalRepo } from './local_repo.js';
|
|
3
|
+
import type { GitopsOperations } from './operations.js';
|
|
4
|
+
/**
|
|
5
|
+
* Installs npm dependencies with cache healing on ETARGET errors.
|
|
6
|
+
*
|
|
7
|
+
* **Strategy:**
|
|
8
|
+
* 1. First attempt: regular `npm install`
|
|
9
|
+
* 2. On ETARGET error (stale cache): `npm cache clean --force` then retry
|
|
10
|
+
* 3. On other errors: fail immediately
|
|
11
|
+
*
|
|
12
|
+
* **Why ETARGET errors occur:**
|
|
13
|
+
* After publishing a package and waiting for NPM registry propagation,
|
|
14
|
+
* npm's local cache may still have stale "404" metadata. This healing
|
|
15
|
+
* strategy clears the cache to force fresh metadata fetch.
|
|
16
|
+
*
|
|
17
|
+
* @param repo - The repository to install dependencies for
|
|
18
|
+
* @param ops - Gitops operations (for dependency injection)
|
|
19
|
+
* @param log - Optional logger
|
|
20
|
+
* @throws Error if install fails (with details about cache healing attempts)
|
|
21
|
+
*/
|
|
22
|
+
export declare const install_with_cache_healing: (repo: LocalRepo, ops: GitopsOperations, log?: Logger) => Promise<void>;
|
|
23
|
+
//# sourceMappingURL=npm_install_helpers.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"npm_install_helpers.d.ts","sourceRoot":"../src/lib/","sources":["../src/lib/npm_install_helpers.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,MAAM,EAAC,MAAM,yBAAyB,CAAC;AAGpD,OAAO,KAAK,EAAC,SAAS,EAAC,MAAM,iBAAiB,CAAC;AAC/C,OAAO,KAAK,EAAC,gBAAgB,EAAC,MAAM,iBAAiB,CAAC;AAmBtD;;;;;;;;;;;;;;;;;GAiBG;AACH,eAAO,MAAM,0BAA0B,GACtC,MAAM,SAAS,EACf,KAAK,gBAAgB,EACrB,MAAM,MAAM,KACV,OAAO,CAAC,IAAI,CAuCd,CAAC"}
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
import { styleText as st } from 'node:util';
|
|
2
|
+
/**
|
|
3
|
+
* Checks if an npm install error is caused by stale cache (ETARGET).
|
|
4
|
+
* Detects various error message formats:
|
|
5
|
+
* - "code ETARGET"
|
|
6
|
+
* - "ETARGET"
|
|
7
|
+
* - "notarget"
|
|
8
|
+
* - "No matching version found"
|
|
9
|
+
*/
|
|
10
|
+
const is_etarget_error = (message, stderr) => {
|
|
11
|
+
const combined = `${message} ${stderr}`.toLowerCase();
|
|
12
|
+
return (combined.includes('etarget') ||
|
|
13
|
+
combined.includes('notarget') ||
|
|
14
|
+
combined.includes('no matching version found'));
|
|
15
|
+
};
|
|
16
|
+
/**
|
|
17
|
+
* Installs npm dependencies with cache healing on ETARGET errors.
|
|
18
|
+
*
|
|
19
|
+
* **Strategy:**
|
|
20
|
+
* 1. First attempt: regular `npm install`
|
|
21
|
+
* 2. On ETARGET error (stale cache): `npm cache clean --force` then retry
|
|
22
|
+
* 3. On other errors: fail immediately
|
|
23
|
+
*
|
|
24
|
+
* **Why ETARGET errors occur:**
|
|
25
|
+
* After publishing a package and waiting for NPM registry propagation,
|
|
26
|
+
* npm's local cache may still have stale "404" metadata. This healing
|
|
27
|
+
* strategy clears the cache to force fresh metadata fetch.
|
|
28
|
+
*
|
|
29
|
+
* @param repo - The repository to install dependencies for
|
|
30
|
+
* @param ops - Gitops operations (for dependency injection)
|
|
31
|
+
* @param log - Optional logger
|
|
32
|
+
* @throws Error if install fails (with details about cache healing attempts)
|
|
33
|
+
*/
|
|
34
|
+
export const install_with_cache_healing = async (repo, ops, log) => {
|
|
35
|
+
// First attempt
|
|
36
|
+
const install_result = await ops.npm.install({ cwd: repo.repo_dir });
|
|
37
|
+
if (install_result.ok) {
|
|
38
|
+
return; // Success
|
|
39
|
+
}
|
|
40
|
+
// Check if error is ETARGET (package not found due to stale cache)
|
|
41
|
+
const stderr = install_result.stderr || '';
|
|
42
|
+
const message = install_result.message || '';
|
|
43
|
+
if (!is_etarget_error(message, stderr)) {
|
|
44
|
+
// Different error - fail immediately without cache healing
|
|
45
|
+
throw new Error(`Failed to install dependencies in ${repo.library.name}: ${install_result.message}${stderr ? `\n${stderr}` : ''}`);
|
|
46
|
+
}
|
|
47
|
+
// ETARGET error - try cache healing
|
|
48
|
+
log?.warn(st('yellow', ` ⚠️ ETARGET error detected - cleaning npm cache...`));
|
|
49
|
+
const cache_result = await ops.npm.cache_clean();
|
|
50
|
+
if (!cache_result.ok) {
|
|
51
|
+
throw new Error(`Failed to clean npm cache: ${cache_result.message}`);
|
|
52
|
+
}
|
|
53
|
+
log?.info(' ✓ Cache cleaned, retrying install...');
|
|
54
|
+
// Retry install after cache clean
|
|
55
|
+
const retry_result = await ops.npm.install({ cwd: repo.repo_dir });
|
|
56
|
+
if (!retry_result.ok) {
|
|
57
|
+
throw new Error(`Failed to install dependencies after cache clean in ${repo.library.name}: ${retry_result.message}${retry_result.stderr ? `\n${retry_result.stderr}` : ''}`);
|
|
58
|
+
}
|
|
59
|
+
log?.info(st('green', ` ✓ Dependencies installed successfully after cache heal`));
|
|
60
|
+
};
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import type { Logger } from '@fuzdev/fuz_util/log.js';
|
|
2
|
+
export interface WaitOptions {
|
|
3
|
+
log?: Logger;
|
|
4
|
+
max_attempts?: number;
|
|
5
|
+
initial_delay?: number;
|
|
6
|
+
max_delay?: number;
|
|
7
|
+
timeout?: number;
|
|
8
|
+
}
|
|
9
|
+
export interface PackageInfo {
|
|
10
|
+
name: string;
|
|
11
|
+
version: string;
|
|
12
|
+
}
|
|
13
|
+
export declare const check_package_available: (pkg: string, version: string, options?: {
|
|
14
|
+
log?: Logger;
|
|
15
|
+
}) => Promise<boolean>;
|
|
16
|
+
/**
|
|
17
|
+
* Waits for package version to propagate to NPM registry.
|
|
18
|
+
*
|
|
19
|
+
* Uses exponential backoff with jitter to avoid hammering registry.
|
|
20
|
+
* Logs progress every 5 attempts. Respects timeout to avoid infinite waits.
|
|
21
|
+
*
|
|
22
|
+
* Critical for multi-repo publishing: ensures published packages are available
|
|
23
|
+
* before updating dependent packages.
|
|
24
|
+
*
|
|
25
|
+
* @param options.max_attempts max poll attempts (default 30)
|
|
26
|
+
* @param options.initial_delay starting delay in ms (default 1000)
|
|
27
|
+
* @param options.max_delay max delay between attempts (default 60000)
|
|
28
|
+
* @param options.timeout total timeout in ms (default 300000 = 5min)
|
|
29
|
+
* @throws {Error} if timeout reached or max attempts exceeded
|
|
30
|
+
*/
|
|
31
|
+
export declare const wait_for_package: (pkg: string, version: string, options?: WaitOptions) => Promise<void>;
|
|
32
|
+
/**
|
|
33
|
+
* Fetches package metadata from NPM registry.
|
|
34
|
+
*
|
|
35
|
+
* Returns name and latest version. Returns null if package doesn't exist
|
|
36
|
+
* or registry is unreachable.
|
|
37
|
+
*
|
|
38
|
+
* @returns package info or null on error/not found
|
|
39
|
+
*/
|
|
40
|
+
export declare const get_package_info: (pkg: string, options?: {
|
|
41
|
+
log?: Logger;
|
|
42
|
+
}) => Promise<PackageInfo | null>;
|
|
43
|
+
export declare const package_exists: (pkg: string, options?: {
|
|
44
|
+
log?: Logger;
|
|
45
|
+
}) => Promise<boolean>;
|
|
46
|
+
//# sourceMappingURL=npm_registry.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"npm_registry.d.ts","sourceRoot":"../src/lib/","sources":["../src/lib/npm_registry.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,MAAM,EAAC,MAAM,yBAAyB,CAAC;AAKpD,MAAM,WAAW,WAAW;IAC3B,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,OAAO,CAAC,EAAE,MAAM,CAAC;CACjB;AAED,MAAM,WAAW,WAAW;IAC3B,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,EAAE,MAAM,CAAC;CAChB;AAED,eAAO,MAAM,uBAAuB,GACnC,KAAK,MAAM,EACX,SAAS,MAAM,EACf,UAAS;IAAC,GAAG,CAAC,EAAE,MAAM,CAAA;CAAM,KAC1B,OAAO,CAAC,OAAO,CAiBjB,CAAC;AAEF;;;;;;;;;;;;;;GAcG;AACH,eAAO,MAAM,gBAAgB,GAC5B,KAAK,MAAM,EACX,SAAS,MAAM,EACf,UAAS,WAAgB,KACvB,OAAO,CAAC,IAAI,CA2Cd,CAAC;AAEF;;;;;;;GAOG;AACH,eAAO,MAAM,gBAAgB,GAC5B,KAAK,MAAM,EACX,UAAS;IAAC,GAAG,CAAC,EAAE,MAAM,CAAA;CAAM,KAC1B,OAAO,CAAC,WAAW,GAAG,IAAI,CAkB5B,CAAC;AAEF,eAAO,MAAM,cAAc,GAC1B,KAAK,MAAM,EACX,UAAS;IAAC,GAAG,CAAC,EAAE,MAAM,CAAA;CAAM,KAC1B,OAAO,CAAC,OAAO,CAGjB,CAAC"}
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
import { spawn_out } from '@fuzdev/fuz_util/process.js';
|
|
2
|
+
import { wait } from '@fuzdev/fuz_util/async.js';
|
|
3
|
+
import { styleText as st } from 'node:util';
|
|
4
|
+
export const check_package_available = async (pkg, version, options = {}) => {
|
|
5
|
+
const { log } = options;
|
|
6
|
+
try {
|
|
7
|
+
// Use npm view to check if the specific version exists
|
|
8
|
+
const result = await spawn_out('npm', ['view', `${pkg}@${version}`, 'version']);
|
|
9
|
+
if (result.stdout) {
|
|
10
|
+
const output = result.stdout.trim();
|
|
11
|
+
// If we get a version back, it exists
|
|
12
|
+
return output === version;
|
|
13
|
+
}
|
|
14
|
+
return false;
|
|
15
|
+
}
|
|
16
|
+
catch (error) {
|
|
17
|
+
log?.debug(`Failed to check ${pkg}@${version}: ${error}`);
|
|
18
|
+
return false;
|
|
19
|
+
}
|
|
20
|
+
};
|
|
21
|
+
/**
|
|
22
|
+
* Waits for package version to propagate to NPM registry.
|
|
23
|
+
*
|
|
24
|
+
* Uses exponential backoff with jitter to avoid hammering registry.
|
|
25
|
+
* Logs progress every 5 attempts. Respects timeout to avoid infinite waits.
|
|
26
|
+
*
|
|
27
|
+
* Critical for multi-repo publishing: ensures published packages are available
|
|
28
|
+
* before updating dependent packages.
|
|
29
|
+
*
|
|
30
|
+
* @param options.max_attempts max poll attempts (default 30)
|
|
31
|
+
* @param options.initial_delay starting delay in ms (default 1000)
|
|
32
|
+
* @param options.max_delay max delay between attempts (default 60000)
|
|
33
|
+
* @param options.timeout total timeout in ms (default 300000 = 5min)
|
|
34
|
+
* @throws {Error} if timeout reached or max attempts exceeded
|
|
35
|
+
*/
|
|
36
|
+
export const wait_for_package = async (pkg, version, options = {}) => {
|
|
37
|
+
const { log, max_attempts = 30, initial_delay = 1000, max_delay = 60000, timeout = 300000, // 5 minutes default
|
|
38
|
+
} = options;
|
|
39
|
+
const start_time = Date.now();
|
|
40
|
+
let attempt = 0;
|
|
41
|
+
let delay = initial_delay;
|
|
42
|
+
while (attempt < max_attempts) {
|
|
43
|
+
attempt++;
|
|
44
|
+
// Check timeout
|
|
45
|
+
if (Date.now() - start_time > timeout) {
|
|
46
|
+
throw new Error(`Timeout waiting for ${pkg}@${version} after ${timeout}ms`);
|
|
47
|
+
}
|
|
48
|
+
// Check if package is available
|
|
49
|
+
// eslint-disable-next-line no-await-in-loop
|
|
50
|
+
if (await check_package_available(pkg, version, { log })) {
|
|
51
|
+
log?.info(st('green', ` ✓ ${pkg}@${version} is now available on NPM`));
|
|
52
|
+
return;
|
|
53
|
+
}
|
|
54
|
+
// Log progress occasionally
|
|
55
|
+
if (attempt > 0 && attempt % 5 === 0) {
|
|
56
|
+
log?.info(st('dim', ` Still waiting... (attempt ${attempt}/${max_attempts})`));
|
|
57
|
+
}
|
|
58
|
+
// Wait with exponential backoff + jitter
|
|
59
|
+
const jitter = Math.random() * delay * 0.1; // 10% jitter
|
|
60
|
+
const actual_delay = Math.min(delay + jitter, max_delay);
|
|
61
|
+
await wait(actual_delay); // eslint-disable-line no-await-in-loop
|
|
62
|
+
// Exponential backoff
|
|
63
|
+
delay = Math.min(delay * 1.5, max_delay);
|
|
64
|
+
}
|
|
65
|
+
throw new Error(`${pkg}@${version} not available after ${max_attempts} attempts`);
|
|
66
|
+
};
|
|
67
|
+
/**
|
|
68
|
+
* Fetches package metadata from NPM registry.
|
|
69
|
+
*
|
|
70
|
+
* Returns name and latest version. Returns null if package doesn't exist
|
|
71
|
+
* or registry is unreachable.
|
|
72
|
+
*
|
|
73
|
+
* @returns package info or null on error/not found
|
|
74
|
+
*/
|
|
75
|
+
export const get_package_info = async (pkg, options = {}) => {
|
|
76
|
+
const { log } = options;
|
|
77
|
+
try {
|
|
78
|
+
const result = await spawn_out('npm', ['view', pkg, '--json']);
|
|
79
|
+
if (result.stdout) {
|
|
80
|
+
const data = JSON.parse(result.stdout);
|
|
81
|
+
return {
|
|
82
|
+
name: data.name,
|
|
83
|
+
version: data.version,
|
|
84
|
+
};
|
|
85
|
+
}
|
|
86
|
+
return null;
|
|
87
|
+
}
|
|
88
|
+
catch (error) {
|
|
89
|
+
log?.debug(`Failed to get package info for ${pkg}: ${error}`);
|
|
90
|
+
return null;
|
|
91
|
+
}
|
|
92
|
+
};
|
|
93
|
+
export const package_exists = async (pkg, options = {}) => {
|
|
94
|
+
const info = await get_package_info(pkg, options);
|
|
95
|
+
return info !== null;
|
|
96
|
+
};
|