@underpostnet/underpost 2.95.7 → 2.96.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -2
- package/baremetal/commission-workflows.json +44 -0
- package/baremetal/packer-workflows.json +13 -0
- package/cli.md +28 -31
- package/manifests/deployment/dd-default-development/deployment.yaml +2 -2
- package/manifests/deployment/dd-test-development/deployment.yaml +2 -2
- package/package.json +1 -1
- package/packer/images/Rocky9Amd64/Makefile +62 -0
- package/packer/images/Rocky9Amd64/QUICKSTART.md +113 -0
- package/packer/images/Rocky9Amd64/README.md +122 -0
- package/packer/images/Rocky9Amd64/http/rocky9.ks.pkrtpl.hcl +114 -0
- package/packer/images/Rocky9Amd64/rocky9.pkr.hcl +160 -0
- package/packer/scripts/fuse-nbd +64 -0
- package/packer/scripts/fuse-tar-root +63 -0
- package/scripts/maas-setup.sh +13 -2
- package/scripts/maas-upload-boot-resource.sh +183 -0
- package/scripts/packer-init-vars-file.sh +30 -0
- package/scripts/packer-setup.sh +52 -0
- package/src/cli/baremetal.js +243 -55
- package/src/cli/cloud-init.js +1 -1
- package/src/cli/env.js +24 -3
- package/src/cli/index.js +15 -0
- package/src/cli/repository.js +164 -0
- package/src/index.js +1 -1
- package/src/client/ssr/pages/404.js +0 -12
- package/src/client/ssr/pages/500.js +0 -12
- package/src/client/ssr/pages/maintenance.js +0 -14
- package/src/client/ssr/pages/offline.js +0 -21
package/src/cli/baremetal.js
CHANGED
|
@@ -4,15 +4,19 @@
|
|
|
4
4
|
* @namespace UnderpostBaremetal
|
|
5
5
|
*/
|
|
6
6
|
|
|
7
|
+
import { fileURLToPath } from 'url';
|
|
7
8
|
import { getNpmRootPath, getUnderpostRootPath } from '../server/conf.js';
|
|
8
9
|
import { openTerminal, pbcopy, shellExec } from '../server/process.js';
|
|
9
10
|
import dotenv from 'dotenv';
|
|
10
11
|
import { loggerFactory } from '../server/logger.js';
|
|
11
12
|
import { getLocalIPv4Address } from '../server/dns.js';
|
|
12
13
|
import fs from 'fs-extra';
|
|
14
|
+
import path from 'path';
|
|
13
15
|
import Downloader from '../server/downloader.js';
|
|
14
16
|
import UnderpostCloudInit from './cloud-init.js';
|
|
17
|
+
import UnderpostRepository from './repository.js';
|
|
15
18
|
import { s4, timer } from '../client/components/core/CommonJs.js';
|
|
19
|
+
import { spawnSync } from 'child_process';
|
|
16
20
|
|
|
17
21
|
const logger = loggerFactory(import.meta);
|
|
18
22
|
|
|
@@ -25,6 +29,19 @@ const logger = loggerFactory(import.meta);
|
|
|
25
29
|
*/
|
|
26
30
|
class UnderpostBaremetal {
|
|
27
31
|
static API = {
|
|
32
|
+
/**
|
|
33
|
+
* @method installPacker
|
|
34
|
+
* @description Installs Packer CLI.
|
|
35
|
+
* @memberof UnderpostBaremetal
|
|
36
|
+
* @returns {Promise<void>}
|
|
37
|
+
*/
|
|
38
|
+
async installPacker(underpostRoot) {
|
|
39
|
+
const scriptPath = `${underpostRoot}/scripts/packer-setup.sh`;
|
|
40
|
+
logger.info(`Installing Packer using script: ${scriptPath}`);
|
|
41
|
+
shellExec(`sudo chmod +x ${scriptPath}`);
|
|
42
|
+
shellExec(`sudo ${scriptPath}`);
|
|
43
|
+
},
|
|
44
|
+
|
|
28
45
|
/**
|
|
29
46
|
* @method callback
|
|
30
47
|
* @description Initiates a baremetal provisioning workflow based on the provided options.
|
|
@@ -40,6 +57,11 @@ class UnderpostBaremetal {
|
|
|
40
57
|
* @param {boolean} [options.controlServerUninstall=false] - Flag to uninstall the control server.
|
|
41
58
|
* @param {boolean} [options.controlServerDbInstall=false] - Flag to install the control server's database.
|
|
42
59
|
* @param {boolean} [options.controlServerDbUninstall=false] - Flag to uninstall the control server's database.
|
|
60
|
+
* @param {boolean} [options.installPacker=false] - Flag to install Packer CLI.
|
|
61
|
+
* @param {string} [options.packerMaasImageTemplate] - Template path from canonical/packer-maas to extract (requires workflow-id).
|
|
62
|
+
* @param {string} [options.packerWorkflowId] - Workflow ID for Packer MAAS image operations (used with --packer-maas-image-build or --packer-maas-image-upload).
|
|
63
|
+
* @param {boolean} [options.packerMaasImageBuild=false] - Flag to build a Packer MAAS image for the workflow specified by packerWorkflowId.
|
|
64
|
+
* @param {boolean} [options.packerMaasImageUpload=false] - Flag to upload a Packer MAAS image artifact without rebuilding for the workflow specified by packerWorkflowId.
|
|
43
65
|
* @param {boolean} [options.cloudInitUpdate=false] - Flag to update cloud-init configuration on the baremetal machine.
|
|
44
66
|
* @param {boolean} [options.commission=false] - Flag to commission the baremetal machine.
|
|
45
67
|
* @param {boolean} [options.nfsBuild=false] - Flag to build the NFS root filesystem.
|
|
@@ -60,6 +82,11 @@ class UnderpostBaremetal {
|
|
|
60
82
|
controlServerUninstall: false,
|
|
61
83
|
controlServerDbInstall: false,
|
|
62
84
|
controlServerDbUninstall: false,
|
|
85
|
+
installPacker: false,
|
|
86
|
+
packerMaasImageTemplate: false,
|
|
87
|
+
packerWorkflowId: '',
|
|
88
|
+
packerMaasImageBuild: false,
|
|
89
|
+
packerMaasImageUpload: false,
|
|
63
90
|
cloudInitUpdate: false,
|
|
64
91
|
commission: false,
|
|
65
92
|
nfsBuild: false,
|
|
@@ -108,6 +135,164 @@ class UnderpostBaremetal {
|
|
|
108
135
|
// Log the initiation of the baremetal callback with relevant metadata.
|
|
109
136
|
logger.info('Baremetal callback', callbackMetaData);
|
|
110
137
|
|
|
138
|
+
if (options.installPacker) {
|
|
139
|
+
await UnderpostBaremetal.API.installPacker(underpostRoot);
|
|
140
|
+
return;
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
if (options.packerMaasImageTemplate) {
|
|
144
|
+
if (!workflowId) {
|
|
145
|
+
throw new Error('workflow-id is required when using --packer-maas-image-template');
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
const templatePath = options.packerMaasImageTemplate;
|
|
149
|
+
const targetDir = `${underpostRoot}/packer/images/${workflowId}`;
|
|
150
|
+
|
|
151
|
+
logger.info(`Creating new Packer MAAS image template for workflow: ${workflowId}`);
|
|
152
|
+
logger.info(`Template path: ${templatePath}`);
|
|
153
|
+
logger.info(`Target directory: ${targetDir}`);
|
|
154
|
+
|
|
155
|
+
try {
|
|
156
|
+
// Use UnderpostRepository to copy files from GitHub
|
|
157
|
+
const result = await UnderpostRepository.API.copyGitUrlDirectoryRecursive({
|
|
158
|
+
gitUrl: 'https://github.com/canonical/packer-maas',
|
|
159
|
+
directoryPath: templatePath,
|
|
160
|
+
targetPath: targetDir,
|
|
161
|
+
branch: 'main',
|
|
162
|
+
overwrite: false,
|
|
163
|
+
});
|
|
164
|
+
|
|
165
|
+
logger.info(`\nSuccessfully copied ${result.filesCount} files`);
|
|
166
|
+
|
|
167
|
+
// Create empty workflow configuration entry
|
|
168
|
+
const workflowConfig = {
|
|
169
|
+
dir: `packer/images/${workflowId}`,
|
|
170
|
+
maas: {
|
|
171
|
+
name: `custom/${workflowId.toLowerCase()}`,
|
|
172
|
+
title: `${workflowId} Custom`,
|
|
173
|
+
architecture: 'amd64/generic',
|
|
174
|
+
base_image: 'ubuntu/22.04',
|
|
175
|
+
filetype: 'tgz',
|
|
176
|
+
content: `${workflowId.toLowerCase()}.tar.gz`,
|
|
177
|
+
},
|
|
178
|
+
};
|
|
179
|
+
|
|
180
|
+
const workflows = UnderpostBaremetal.API.loadPackerMaasImageBuildWorkflows();
|
|
181
|
+
workflows[workflowId] = workflowConfig;
|
|
182
|
+
UnderpostBaremetal.API.writePackerMaasImageBuildWorkflows(workflows);
|
|
183
|
+
|
|
184
|
+
logger.info('\nTemplate extracted successfully!');
|
|
185
|
+
logger.info(`\nAdded configuration for ${workflowId} to engine/baremetal/packer-workflows.json`);
|
|
186
|
+
logger.info('\nNext steps:');
|
|
187
|
+
logger.info(`1. Review and customize the Packer template files in: ${targetDir}`);
|
|
188
|
+
logger.info(`2. Review the workflow configuration in engine/baremetal/packer-workflows.json`);
|
|
189
|
+
logger.info(
|
|
190
|
+
`3. Build the image with: underpost baremetal ${workflowId} --packer-maas-image-build ${workflowId}`,
|
|
191
|
+
);
|
|
192
|
+
} catch (error) {
|
|
193
|
+
throw new Error(`Failed to extract template: ${error.message}`);
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
return;
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
if (options.packerMaasImageBuild || options.packerMaasImageUpload) {
|
|
200
|
+
// Use the workflow ID from --packer-workflow-id option
|
|
201
|
+
if (!options.packerWorkflowId) {
|
|
202
|
+
throw new Error('Workflow ID is required. Please specify using --packer-workflow-id <workflow-id>');
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
workflowId = options.packerWorkflowId;
|
|
206
|
+
|
|
207
|
+
const workflow = UnderpostBaremetal.API.loadPackerMaasImageBuildWorkflows()[workflowId];
|
|
208
|
+
if (!workflow) {
|
|
209
|
+
throw new Error(`Packer MAAS image build workflow not found: ${workflowId}`);
|
|
210
|
+
}
|
|
211
|
+
const packerDir = `${underpostRoot}/${workflow.dir}`;
|
|
212
|
+
const tarballPath = `${packerDir}/${workflow.maas.content}`;
|
|
213
|
+
|
|
214
|
+
// Build phase (skip if upload-only mode)
|
|
215
|
+
if (options.packerMaasImageBuild) {
|
|
216
|
+
if (shellExec('packer version', { silent: true }).code !== 0) {
|
|
217
|
+
throw new Error('Packer is not installed. Please install Packer to proceed.');
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
logger.info(`Building Packer image for ${workflowId} in ${packerDir}...`);
|
|
221
|
+
const artifacts = ['output-rocky9', 'packer_cache', 'x86_64_VARS.fd', 'rocky9.tar.gz'];
|
|
222
|
+
shellExec(`cd packer/images/${workflowId}
|
|
223
|
+
rm -rf ${artifacts.join(' ')}`);
|
|
224
|
+
shellExec(`chmod +x ${underpostRoot}/scripts/packer-init-vars-file.sh`);
|
|
225
|
+
shellExec(`${underpostRoot}/scripts/packer-init-vars-file.sh`);
|
|
226
|
+
|
|
227
|
+
const init = spawnSync('packer', ['init', '.'], { stdio: 'inherit', cwd: packerDir });
|
|
228
|
+
if (init.status !== 0) {
|
|
229
|
+
throw new Error('Packer init failed');
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
const build = spawnSync('packer', ['build', '.'], {
|
|
233
|
+
stdio: 'inherit',
|
|
234
|
+
cwd: packerDir,
|
|
235
|
+
env: { ...process.env, PACKER_LOG: '1' },
|
|
236
|
+
});
|
|
237
|
+
|
|
238
|
+
if (build.status !== 0) {
|
|
239
|
+
throw new Error('Packer build failed');
|
|
240
|
+
}
|
|
241
|
+
} else {
|
|
242
|
+
// Upload-only mode: verify tarball exists
|
|
243
|
+
logger.info(`Upload-only mode: checking for existing build artifact...`);
|
|
244
|
+
if (!fs.existsSync(tarballPath)) {
|
|
245
|
+
throw new Error(
|
|
246
|
+
`Build artifact not found: ${tarballPath}\n` +
|
|
247
|
+
`Please build first with: --packer-maas-image-build ${workflowId}`,
|
|
248
|
+
);
|
|
249
|
+
}
|
|
250
|
+
const stats = fs.statSync(tarballPath);
|
|
251
|
+
logger.info(`Found existing artifact: ${tarballPath} (${(stats.size / 1024 / 1024 / 1024).toFixed(2)} GB)`);
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
logger.info(`Uploading image to MAAS...`);
|
|
255
|
+
|
|
256
|
+
// Detect MAAS profile from 'maas list' output
|
|
257
|
+
let maasProfile = process.env.MAAS_ADMIN_USERNAME;
|
|
258
|
+
if (!maasProfile) {
|
|
259
|
+
const profileList = shellExec('maas list', { silent: true, stdout: true });
|
|
260
|
+
if (profileList) {
|
|
261
|
+
const firstLine = profileList.trim().split('\n')[0];
|
|
262
|
+
const match = firstLine.match(/^(\S+)\s+http/);
|
|
263
|
+
if (match) {
|
|
264
|
+
maasProfile = match[1];
|
|
265
|
+
logger.info(`Detected MAAS profile: ${maasProfile}`);
|
|
266
|
+
}
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
if (!maasProfile) {
|
|
271
|
+
throw new Error(
|
|
272
|
+
'MAAS profile not found. Please run "maas login" first or set MAAS_ADMIN_USERNAME environment variable.',
|
|
273
|
+
);
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
// Use the upload script to avoid MAAS CLI bugs
|
|
277
|
+
const uploadScript = `${underpostRoot}/scripts/maas-upload-boot-resource.sh`;
|
|
278
|
+
const uploadCmd = `${uploadScript} ${maasProfile} "${workflow.maas.name}" "${workflow.maas.title}" "${workflow.maas.architecture}" "${workflow.maas.base_image}" "${workflow.maas.filetype}" "${tarballPath}"`;
|
|
279
|
+
|
|
280
|
+
logger.info(`Uploading to MAAS using: ${uploadScript}`);
|
|
281
|
+
const uploadResult = shellExec(uploadCmd);
|
|
282
|
+
if (uploadResult.code !== 0) {
|
|
283
|
+
logger.error(`Upload failed with exit code: ${uploadResult.code}`);
|
|
284
|
+
if (uploadResult.stdout) {
|
|
285
|
+
logger.error(`Upload output:\n${uploadResult.stdout}`);
|
|
286
|
+
}
|
|
287
|
+
if (uploadResult.stderr) {
|
|
288
|
+
logger.error(`Upload error output:\n${uploadResult.stderr}`);
|
|
289
|
+
}
|
|
290
|
+
throw new Error('MAAS upload failed - see output above for details');
|
|
291
|
+
}
|
|
292
|
+
logger.info(`Successfully uploaded ${workflow.maas.name} to MAAS!`);
|
|
293
|
+
return;
|
|
294
|
+
}
|
|
295
|
+
|
|
111
296
|
// Handle various log display options.
|
|
112
297
|
if (options.logs === 'dhcp') {
|
|
113
298
|
shellExec(`journalctl -f -t dhcpd -u snap.maas.pebble.service`);
|
|
@@ -131,7 +316,11 @@ class UnderpostBaremetal {
|
|
|
131
316
|
|
|
132
317
|
// Handle NFS shell access option.
|
|
133
318
|
if (options.nfsSh === true) {
|
|
134
|
-
const
|
|
319
|
+
const workflowsConfig = UnderpostBaremetal.API.loadWorkflowsConfig();
|
|
320
|
+
if (!workflowsConfig[workflowId]) {
|
|
321
|
+
throw new Error(`Workflow configuration not found for ID: ${workflowId}`);
|
|
322
|
+
}
|
|
323
|
+
const { debootstrap } = workflowsConfig[workflowId];
|
|
135
324
|
// Copy the chroot command to the clipboard for easy execution.
|
|
136
325
|
if (debootstrap.image.architecture !== callbackMetaData.runnerHost.architecture)
|
|
137
326
|
switch (debootstrap.image.architecture) {
|
|
@@ -196,9 +385,14 @@ class UnderpostBaremetal {
|
|
|
196
385
|
return;
|
|
197
386
|
}
|
|
198
387
|
|
|
388
|
+
const workflowsConfig = UnderpostBaremetal.API.loadWorkflowsConfig();
|
|
389
|
+
if (!workflowsConfig[workflowId]) {
|
|
390
|
+
throw new Error(`Workflow configuration not found for ID: ${workflowId}`);
|
|
391
|
+
}
|
|
392
|
+
|
|
199
393
|
// Set debootstrap architecture.
|
|
200
394
|
{
|
|
201
|
-
const { architecture } =
|
|
395
|
+
const { architecture } = workflowsConfig[workflowId].debootstrap.image;
|
|
202
396
|
debootstrapArch = architecture;
|
|
203
397
|
}
|
|
204
398
|
|
|
@@ -228,7 +422,7 @@ class UnderpostBaremetal {
|
|
|
228
422
|
|
|
229
423
|
// Perform the first stage of debootstrap.
|
|
230
424
|
{
|
|
231
|
-
const { architecture, name } =
|
|
425
|
+
const { architecture, name } = workflowsConfig[workflowId].debootstrap.image;
|
|
232
426
|
shellExec(
|
|
233
427
|
[
|
|
234
428
|
`sudo debootstrap`,
|
|
@@ -273,7 +467,7 @@ class UnderpostBaremetal {
|
|
|
273
467
|
|
|
274
468
|
// Apply system provisioning steps (base, user, timezone, keyboard).
|
|
275
469
|
{
|
|
276
|
-
const { systemProvisioning, kernelLibVersion, chronyc } =
|
|
470
|
+
const { systemProvisioning, kernelLibVersion, chronyc } = workflowsConfig[workflowId];
|
|
277
471
|
const { timezone, chronyConfPath } = chronyc;
|
|
278
472
|
|
|
279
473
|
UnderpostBaremetal.API.crossArchRunner({
|
|
@@ -327,8 +521,7 @@ class UnderpostBaremetal {
|
|
|
327
521
|
|
|
328
522
|
// Handle commissioning tasks (placeholder for future implementation).
|
|
329
523
|
if (options.commission === true) {
|
|
330
|
-
const { firmwares, networkInterfaceName, maas, netmask, menuentryStr } =
|
|
331
|
-
UnderpostBaremetal.API.workflowsConfig[workflowId];
|
|
524
|
+
const { firmwares, networkInterfaceName, maas, netmask, menuentryStr } = workflowsConfig[workflowId];
|
|
332
525
|
const resource = resources.find(
|
|
333
526
|
(o) => o.architecture === maas.image.architecture && o.name === maas.image.name,
|
|
334
527
|
);
|
|
@@ -490,7 +683,7 @@ menuentry '${menuentryStr}' {
|
|
|
490
683
|
|
|
491
684
|
// Final commissioning steps.
|
|
492
685
|
if (options.commission === true || options.cloudInitUpdate === true) {
|
|
493
|
-
const { debootstrap, networkInterfaceName, chronyc, maas } =
|
|
686
|
+
const { debootstrap, networkInterfaceName, chronyc, maas } = workflowsConfig[workflowId];
|
|
494
687
|
const { timezone, chronyConfPath } = chronyc;
|
|
495
688
|
|
|
496
689
|
// Build cloud-init tools.
|
|
@@ -742,7 +935,7 @@ menuentry '${menuentryStr}' {
|
|
|
742
935
|
// Install necessary packages for debootstrap and QEMU.
|
|
743
936
|
shellExec(`sudo dnf install -y iptables-legacy`);
|
|
744
937
|
shellExec(`sudo dnf install -y debootstrap`);
|
|
745
|
-
shellExec(`sudo dnf install kernel-modules-extra-$(uname -r)`);
|
|
938
|
+
shellExec(`sudo dnf install -y kernel-modules-extra-$(uname -r)`);
|
|
746
939
|
// Reset QEMU user-static binfmt for proper cross-architecture execution.
|
|
747
940
|
shellExec(`sudo podman run --rm --privileged docker.io/multiarch/qemu-user-static:latest --reset -p yes`);
|
|
748
941
|
// Mount binfmt_misc filesystem.
|
|
@@ -914,9 +1107,13 @@ EOF`);
|
|
|
914
1107
|
*/
|
|
915
1108
|
nfsMountCallback({ hostname, workflowId, mount, unmount }) {
|
|
916
1109
|
let isMounted = false;
|
|
1110
|
+
const workflowsConfig = UnderpostBaremetal.API.loadWorkflowsConfig();
|
|
1111
|
+
if (!workflowsConfig[workflowId]) {
|
|
1112
|
+
throw new Error(`Workflow configuration not found for ID: ${workflowId}`);
|
|
1113
|
+
}
|
|
917
1114
|
// Iterate through defined NFS mounts in the workflow configuration.
|
|
918
|
-
for (const mountCmd of Object.keys(
|
|
919
|
-
for (const mountPath of
|
|
1115
|
+
for (const mountCmd of Object.keys(workflowsConfig[workflowId].nfs.mounts)) {
|
|
1116
|
+
for (const mountPath of workflowsConfig[workflowId].nfs.mounts[mountCmd]) {
|
|
920
1117
|
const hostMountPath = `${process.env.NFS_EXPORT_PATH}/${hostname}${mountPath}`;
|
|
921
1118
|
// Check if the path is already mounted using `mountpoint` command.
|
|
922
1119
|
const isPathMounted = !shellExec(`mountpoint ${hostMountPath}`, { silent: true, stdout: true }).match(
|
|
@@ -1262,55 +1459,46 @@ GATEWAY=${gateway}`;
|
|
|
1262
1459
|
},
|
|
1263
1460
|
|
|
1264
1461
|
/**
|
|
1265
|
-
* @
|
|
1266
|
-
* @
|
|
1462
|
+
* @method loadWorkflowsConfig
|
|
1463
|
+
* @namespace UnderpostBaremetal.API
|
|
1464
|
+
* @description Loads the commission workflows configuration from commission-workflows.json.
|
|
1267
1465
|
* Each workflow defines specific parameters like system provisioning type,
|
|
1268
1466
|
* kernel version, Chrony settings, debootstrap image details, and NFS mounts. *
|
|
1269
1467
|
* @memberof UnderpostBaremetal
|
|
1270
1468
|
*/
|
|
1271
|
-
|
|
1272
|
-
|
|
1273
|
-
|
|
1274
|
-
|
|
1275
|
-
|
|
1276
|
-
|
|
1277
|
-
|
|
1278
|
-
|
|
1279
|
-
|
|
1280
|
-
|
|
1281
|
-
|
|
1282
|
-
|
|
1283
|
-
|
|
1284
|
-
|
|
1285
|
-
|
|
1286
|
-
|
|
1287
|
-
|
|
1288
|
-
|
|
1289
|
-
|
|
1290
|
-
|
|
1291
|
-
|
|
1292
|
-
|
|
1293
|
-
|
|
1294
|
-
|
|
1295
|
-
|
|
1296
|
-
|
|
1297
|
-
|
|
1298
|
-
|
|
1299
|
-
|
|
1300
|
-
|
|
1301
|
-
|
|
1302
|
-
|
|
1303
|
-
|
|
1304
|
-
},
|
|
1305
|
-
},
|
|
1306
|
-
nfs: {
|
|
1307
|
-
mounts: {
|
|
1308
|
-
// Define NFS mount points and their types (bind, rbind).
|
|
1309
|
-
bind: ['/proc', '/sys', '/run'], // Standard bind mounts.
|
|
1310
|
-
rbind: ['/dev'], // Recursive bind mount for /dev.
|
|
1311
|
-
},
|
|
1312
|
-
},
|
|
1313
|
-
},
|
|
1469
|
+
loadWorkflowsConfig() {
|
|
1470
|
+
if (this._workflowsConfig) return this._workflowsConfig;
|
|
1471
|
+
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
1472
|
+
const configPath = path.resolve(__dirname, '../../baremetal/commission-workflows.json');
|
|
1473
|
+
this._workflowsConfig = fs.readJsonSync(configPath);
|
|
1474
|
+
return this._workflowsConfig;
|
|
1475
|
+
},
|
|
1476
|
+
|
|
1477
|
+
/**
|
|
1478
|
+
* @property {object} packerMaasImageBuildWorkflows
|
|
1479
|
+
* @description Configuration for PACKe mass image workflows.
|
|
1480
|
+
* @memberof UnderpostBaremetal
|
|
1481
|
+
*/
|
|
1482
|
+
loadPackerMaasImageBuildWorkflows() {
|
|
1483
|
+
if (this._packerMaasImageBuildWorkflows) return this._packerMaasImageBuildWorkflows;
|
|
1484
|
+
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
1485
|
+
const configPath = path.resolve(__dirname, '../../baremetal/packer-workflows.json');
|
|
1486
|
+
this._packerMaasImageBuildWorkflows = fs.readJsonSync(configPath);
|
|
1487
|
+
return this._packerMaasImageBuildWorkflows;
|
|
1488
|
+
},
|
|
1489
|
+
|
|
1490
|
+
/**
|
|
1491
|
+
* Write Packer MAAS image build workflows configuration to file
|
|
1492
|
+
* @param {object} workflows - The workflows configuration object
|
|
1493
|
+
* @description Writes the Packer MAAS image build workflows to packer-workflows.json
|
|
1494
|
+
* @memberof UnderpostBaremetal
|
|
1495
|
+
*/
|
|
1496
|
+
writePackerMaasImageBuildWorkflows(workflows) {
|
|
1497
|
+
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
1498
|
+
const configPath = path.resolve(__dirname, '../../baremetal/packer-workflows.json');
|
|
1499
|
+
fs.writeJsonSync(configPath, workflows, { spaces: 2 });
|
|
1500
|
+
this._packerMaasImageBuildWorkflows = workflows;
|
|
1501
|
+
return configPath;
|
|
1314
1502
|
},
|
|
1315
1503
|
};
|
|
1316
1504
|
}
|
package/src/cli/cloud-init.js
CHANGED
|
@@ -43,7 +43,7 @@ class UnderpostCloudInit {
|
|
|
43
43
|
buildTools({ workflowId, nfsHostPath, hostname, callbackMetaData, dev }) {
|
|
44
44
|
// Destructure workflow configuration for easier access.
|
|
45
45
|
const { systemProvisioning, chronyc, networkInterfaceName, debootstrap } =
|
|
46
|
-
UnderpostBaremetal.API.
|
|
46
|
+
UnderpostBaremetal.API.loadWorkflowsConfig()[workflowId];
|
|
47
47
|
const { timezone, chronyConfPath } = chronyc;
|
|
48
48
|
// Define the specific directory for underpost tools within the NFS host path.
|
|
49
49
|
const nfsHostToolsPath = `${nfsHostPath}/underpost`;
|
package/src/cli/env.js
CHANGED
|
@@ -74,17 +74,38 @@ class UnderpostRootEnv {
|
|
|
74
74
|
/**
|
|
75
75
|
* @method list
|
|
76
76
|
* @description Lists all environment variables in the underpost root environment.
|
|
77
|
+
* @param {string} key - Not used for list operation.
|
|
78
|
+
* @param {string} value - Not used for list operation.
|
|
79
|
+
* @param {object} options - Options for listing environment variables.
|
|
80
|
+
* @param {string} [options.filter] - Filter keyword to match against keys or values.
|
|
77
81
|
* @memberof UnderpostEnv
|
|
78
82
|
*/
|
|
79
|
-
list() {
|
|
83
|
+
list(key, value, options = {}) {
|
|
80
84
|
const exeRootPath = `${getNpmRootPath()}/underpost`;
|
|
81
85
|
const envPath = `${exeRootPath}/.env`;
|
|
82
86
|
if (!fs.existsSync(envPath)) {
|
|
83
87
|
logger.warn(`Empty environment variables`);
|
|
84
88
|
return {};
|
|
85
89
|
}
|
|
86
|
-
|
|
87
|
-
|
|
90
|
+
let env = dotenv.parse(fs.readFileSync(envPath, 'utf8'));
|
|
91
|
+
|
|
92
|
+
// Apply filter if provided
|
|
93
|
+
if (options.filter) {
|
|
94
|
+
const filterKeyword = options.filter.toLowerCase();
|
|
95
|
+
const filtered = {};
|
|
96
|
+
for (const [envKey, envValue] of Object.entries(env)) {
|
|
97
|
+
const keyMatch = envKey.toLowerCase().includes(filterKeyword);
|
|
98
|
+
const valueMatch = String(envValue).toLowerCase().includes(filterKeyword);
|
|
99
|
+
if (keyMatch || valueMatch) {
|
|
100
|
+
filtered[envKey] = envValue;
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
env = filtered;
|
|
104
|
+
logger.info(`underpost root (filtered by: ${options.filter})`, env);
|
|
105
|
+
} else {
|
|
106
|
+
logger.info('underpost root', env);
|
|
107
|
+
}
|
|
108
|
+
|
|
88
109
|
return env;
|
|
89
110
|
},
|
|
90
111
|
/**
|
package/src/cli/index.js
CHANGED
|
@@ -181,6 +181,7 @@ program
|
|
|
181
181
|
.argument('[key]', 'Optional: The specific configuration key to manage.')
|
|
182
182
|
.argument('[value]', 'Optional: The value to set for the configuration key.')
|
|
183
183
|
.option('--plain', 'Prints the configuration value in plain text.')
|
|
184
|
+
.option('--filter <keyword>', 'Filters the list by matching key or value (only for list operation).')
|
|
184
185
|
.description(`Manages Underpost configurations using various operators.`)
|
|
185
186
|
.action((...args) => Underpost.env[args[0]](args[1], args[2], args[3]));
|
|
186
187
|
|
|
@@ -610,6 +611,20 @@ program
|
|
|
610
611
|
.option('--control-server-uninstall', 'Uninstalls the baremetal control server.')
|
|
611
612
|
.option('--control-server-db-install', 'Installs up the database for the baremetal control server.')
|
|
612
613
|
.option('--control-server-db-uninstall', 'Uninstalls the database for the baremetal control server.')
|
|
614
|
+
.option('--install-packer', 'Installs Packer CLI.')
|
|
615
|
+
.option(
|
|
616
|
+
'--packer-maas-image-template <template-path>',
|
|
617
|
+
'Creates a new image folder from canonical/packer-maas template path (requires workflow-id).',
|
|
618
|
+
)
|
|
619
|
+
.option('--packer-workflow-id <workflow-id>', 'Specifies the workflow ID for Packer MAAS image operations.')
|
|
620
|
+
.option(
|
|
621
|
+
'--packer-maas-image-build',
|
|
622
|
+
'Builds a MAAS image using Packer for the workflow specified by --packer-workflow-id.',
|
|
623
|
+
)
|
|
624
|
+
.option(
|
|
625
|
+
'--packer-maas-image-upload',
|
|
626
|
+
'Uploads an existing MAAS image artifact without rebuilding for the workflow specified by --packer-workflow-id.',
|
|
627
|
+
)
|
|
613
628
|
.option('--commission', 'Init workflow for commissioning a physical machine.')
|
|
614
629
|
.option('--nfs-build', 'Builds an NFS root filesystem for a workflow id config architecture using QEMU emulation.')
|
|
615
630
|
.option('--nfs-mount', 'Mounts the NFS root filesystem for a workflow id config architecture.')
|
package/src/cli/repository.js
CHANGED
|
@@ -601,6 +601,170 @@ Prevent build private config repo.`,
|
|
|
601
601
|
shellExec(`cd ${path} && git clean -f -d`, { silent: true });
|
|
602
602
|
}
|
|
603
603
|
},
|
|
604
|
+
|
|
605
|
+
/**
|
|
606
|
+
* Copies files recursively from a Git repository URL directory path.
|
|
607
|
+
* @param {object} options - Configuration options for copying files.
|
|
608
|
+
* @param {string} options.gitUrl - The GitHub repository URL (e.g., 'https://github.com/canonical/packer-maas').
|
|
609
|
+
* @param {string} options.directoryPath - The directory path within the repository to copy (e.g., 'rocky-9').
|
|
610
|
+
* @param {string} options.targetPath - The local target path where files should be copied.
|
|
611
|
+
* @param {string} [options.branch='main'] - The git branch to use (default: 'main').
|
|
612
|
+
* @param {boolean} [options.overwrite=false] - Whether to overwrite existing target directory.
|
|
613
|
+
* @returns {Promise<object>} A promise that resolves with copied files information.
|
|
614
|
+
* @memberof UnderpostRepository
|
|
615
|
+
*/
|
|
616
|
+
async copyGitUrlDirectoryRecursive(options) {
|
|
617
|
+
const { gitUrl, directoryPath, targetPath, branch = 'main', overwrite = false } = options;
|
|
618
|
+
|
|
619
|
+
// Validate inputs
|
|
620
|
+
if (!gitUrl) {
|
|
621
|
+
throw new Error('gitUrl is required');
|
|
622
|
+
}
|
|
623
|
+
if (!directoryPath) {
|
|
624
|
+
throw new Error('directoryPath is required');
|
|
625
|
+
}
|
|
626
|
+
if (!targetPath) {
|
|
627
|
+
throw new Error('targetPath is required');
|
|
628
|
+
}
|
|
629
|
+
|
|
630
|
+
// Parse GitHub URL to extract owner and repo
|
|
631
|
+
const urlMatch = gitUrl.match(/github\.com\/([^\/]+)\/([^\/\.]+)/);
|
|
632
|
+
if (!urlMatch) {
|
|
633
|
+
throw new Error(`Invalid GitHub URL: ${gitUrl}`);
|
|
634
|
+
}
|
|
635
|
+
const [, owner, repo] = urlMatch;
|
|
636
|
+
|
|
637
|
+
logger.info(`Copying from ${owner}/${repo}/${directoryPath} to ${targetPath}`);
|
|
638
|
+
|
|
639
|
+
// Check if target directory exists
|
|
640
|
+
if (fs.existsSync(targetPath) && !overwrite) {
|
|
641
|
+
throw new Error(`Target directory already exists: ${targetPath}. Use overwrite option to replace.`);
|
|
642
|
+
}
|
|
643
|
+
|
|
644
|
+
// Create target directory
|
|
645
|
+
fs.mkdirSync(targetPath, { recursive: true });
|
|
646
|
+
|
|
647
|
+
// GitHub API base URL
|
|
648
|
+
const githubApiBase = 'https://api.github.com/repos';
|
|
649
|
+
const apiUrl = `${githubApiBase}/${owner}/${repo}/contents/${directoryPath}`;
|
|
650
|
+
|
|
651
|
+
logger.info(`Fetching directory contents from: ${apiUrl}`);
|
|
652
|
+
|
|
653
|
+
try {
|
|
654
|
+
// Fetch directory contents recursively
|
|
655
|
+
const copiedFiles = await this._fetchAndCopyGitHubDirectory({
|
|
656
|
+
apiUrl,
|
|
657
|
+
targetPath,
|
|
658
|
+
basePath: directoryPath,
|
|
659
|
+
branch,
|
|
660
|
+
});
|
|
661
|
+
|
|
662
|
+
logger.info(`Successfully copied ${copiedFiles.length} files to ${targetPath}`);
|
|
663
|
+
|
|
664
|
+
return {
|
|
665
|
+
success: true,
|
|
666
|
+
filesCount: copiedFiles.length,
|
|
667
|
+
files: copiedFiles,
|
|
668
|
+
targetPath,
|
|
669
|
+
};
|
|
670
|
+
} catch (error) {
|
|
671
|
+
// Clean up on error
|
|
672
|
+
if (fs.existsSync(targetPath)) {
|
|
673
|
+
fs.removeSync(targetPath);
|
|
674
|
+
logger.warn(`Cleaned up target directory after error: ${targetPath}`);
|
|
675
|
+
}
|
|
676
|
+
throw new Error(`Failed to copy directory: ${error.message}`);
|
|
677
|
+
}
|
|
678
|
+
},
|
|
679
|
+
|
|
680
|
+
/**
|
|
681
|
+
* Internal method to recursively fetch and copy files from GitHub API.
|
|
682
|
+
* @private
|
|
683
|
+
* @param {object} options - Fetch options.
|
|
684
|
+
* @param {string} options.apiUrl - The GitHub API URL.
|
|
685
|
+
* @param {string} options.targetPath - The local target path.
|
|
686
|
+
* @param {string} options.basePath - The base path in the repository.
|
|
687
|
+
* @param {string} options.branch - The git branch.
|
|
688
|
+
* @returns {Promise<array>} Array of copied file paths.
|
|
689
|
+
* @memberof UnderpostRepository
|
|
690
|
+
*/
|
|
691
|
+
async _fetchAndCopyGitHubDirectory(options) {
|
|
692
|
+
const { apiUrl, targetPath, basePath, branch } = options;
|
|
693
|
+
const copiedFiles = [];
|
|
694
|
+
|
|
695
|
+
const response = await fetch(apiUrl, {
|
|
696
|
+
headers: {
|
|
697
|
+
Accept: 'application/vnd.github.v3+json',
|
|
698
|
+
'User-Agent': 'underpost-cli',
|
|
699
|
+
},
|
|
700
|
+
});
|
|
701
|
+
|
|
702
|
+
if (!response.ok) {
|
|
703
|
+
const errorBody = await response.text();
|
|
704
|
+
logger.error(`GitHub API request failed for: ${apiUrl}`);
|
|
705
|
+
logger.error(`Status: ${response.status} ${response.statusText}`);
|
|
706
|
+
logger.error(`Response: ${errorBody}`);
|
|
707
|
+
throw new Error(`GitHub API request failed: ${response.status} ${response.statusText} - ${errorBody}`);
|
|
708
|
+
}
|
|
709
|
+
|
|
710
|
+
const contents = await response.json();
|
|
711
|
+
|
|
712
|
+
if (!Array.isArray(contents)) {
|
|
713
|
+
logger.error(`Expected directory but got: ${typeof contents}`);
|
|
714
|
+
logger.error(`API URL: ${apiUrl}`);
|
|
715
|
+
logger.error(`Response keys: ${Object.keys(contents).join(', ')}`);
|
|
716
|
+
if (contents.message) {
|
|
717
|
+
logger.error(`GitHub message: ${contents.message}`);
|
|
718
|
+
}
|
|
719
|
+
throw new Error(
|
|
720
|
+
`Path is not a directory: ${basePath}. Response: ${JSON.stringify(contents).substring(0, 200)}`,
|
|
721
|
+
);
|
|
722
|
+
}
|
|
723
|
+
|
|
724
|
+
logger.info(`Found ${contents.length} items in directory: ${basePath}`);
|
|
725
|
+
|
|
726
|
+
// Process each item in the directory
|
|
727
|
+
for (const item of contents) {
|
|
728
|
+
const itemTargetPath = `${targetPath}/${item.name}`;
|
|
729
|
+
|
|
730
|
+
if (item.type === 'file') {
|
|
731
|
+
logger.info(`Downloading file: ${item.path}`);
|
|
732
|
+
|
|
733
|
+
// Download file content
|
|
734
|
+
const fileResponse = await fetch(item.download_url);
|
|
735
|
+
if (!fileResponse.ok) {
|
|
736
|
+
logger.error(`Failed to download: ${item.download_url}`);
|
|
737
|
+
throw new Error(`Failed to download file: ${item.path} (${fileResponse.status})`);
|
|
738
|
+
}
|
|
739
|
+
|
|
740
|
+
const fileContent = await fileResponse.text();
|
|
741
|
+
fs.writeFileSync(itemTargetPath, fileContent);
|
|
742
|
+
|
|
743
|
+
logger.info(`✓ Saved: ${itemTargetPath}`);
|
|
744
|
+
copiedFiles.push(itemTargetPath);
|
|
745
|
+
} else if (item.type === 'dir') {
|
|
746
|
+
logger.info(`📁 Processing directory: ${item.path}`);
|
|
747
|
+
|
|
748
|
+
// Create subdirectory
|
|
749
|
+
fs.mkdirSync(itemTargetPath, { recursive: true });
|
|
750
|
+
|
|
751
|
+
// Recursively process subdirectory
|
|
752
|
+
const subFiles = await this._fetchAndCopyGitHubDirectory({
|
|
753
|
+
apiUrl: item.url,
|
|
754
|
+
targetPath: itemTargetPath,
|
|
755
|
+
basePath: item.path,
|
|
756
|
+
branch,
|
|
757
|
+
});
|
|
758
|
+
|
|
759
|
+
copiedFiles.push(...subFiles);
|
|
760
|
+
logger.info(`✓ Completed directory: ${item.path} (${subFiles.length} files)`);
|
|
761
|
+
} else {
|
|
762
|
+
logger.warn(`Skipping unknown item type '${item.type}': ${item.path}`);
|
|
763
|
+
}
|
|
764
|
+
}
|
|
765
|
+
|
|
766
|
+
return copiedFiles;
|
|
767
|
+
},
|
|
604
768
|
};
|
|
605
769
|
}
|
|
606
770
|
|