@underpostnet/underpost 2.95.8 → 2.96.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -2
- package/baremetal/commission-workflows.json +44 -0
- package/baremetal/packer-workflows.json +24 -0
- package/cli.md +29 -31
- package/manifests/deployment/dd-default-development/deployment.yaml +2 -2
- package/manifests/deployment/dd-test-development/deployment.yaml +2 -2
- package/package.json +1 -1
- package/packer/images/Rocky9Amd64/Makefile +62 -0
- package/packer/images/Rocky9Amd64/QUICKSTART.md +113 -0
- package/packer/images/Rocky9Amd64/README.md +122 -0
- package/packer/images/Rocky9Amd64/http/rocky9.ks.pkrtpl.hcl +114 -0
- package/packer/images/Rocky9Amd64/rocky9.pkr.hcl +164 -0
- package/packer/images/Rocky9Arm64/Makefile +69 -0
- package/packer/images/Rocky9Arm64/README.md +122 -0
- package/packer/images/Rocky9Arm64/http/rocky9.ks.pkrtpl.hcl +114 -0
- package/packer/images/Rocky9Arm64/rocky9.pkr.hcl +171 -0
- package/packer/scripts/fuse-nbd +64 -0
- package/packer/scripts/fuse-tar-root +63 -0
- package/scripts/maas-setup.sh +13 -2
- package/scripts/maas-upload-boot-resource.sh +183 -0
- package/scripts/packer-init-vars-file.sh +40 -0
- package/scripts/packer-setup.sh +289 -0
- package/src/cli/baremetal.js +342 -55
- package/src/cli/cloud-init.js +1 -1
- package/src/cli/env.js +24 -3
- package/src/cli/index.js +19 -0
- package/src/cli/repository.js +164 -0
- package/src/index.js +2 -1
- package/manifests/mariadb/config.yaml +0 -10
- package/manifests/mariadb/secret.yaml +0 -8
- package/src/client/ssr/pages/404.js +0 -12
- package/src/client/ssr/pages/500.js +0 -12
- package/src/client/ssr/pages/maintenance.js +0 -14
- package/src/client/ssr/pages/offline.js +0 -21
package/src/cli/repository.js
CHANGED
|
@@ -601,6 +601,170 @@ Prevent build private config repo.`,
|
|
|
601
601
|
shellExec(`cd ${path} && git clean -f -d`, { silent: true });
|
|
602
602
|
}
|
|
603
603
|
},
|
|
604
|
+
|
|
605
|
+
/**
|
|
606
|
+
* Copies files recursively from a Git repository URL directory path.
|
|
607
|
+
* @param {object} options - Configuration options for copying files.
|
|
608
|
+
* @param {string} options.gitUrl - The GitHub repository URL (e.g., 'https://github.com/canonical/packer-maas').
|
|
609
|
+
* @param {string} options.directoryPath - The directory path within the repository to copy (e.g., 'rocky-9').
|
|
610
|
+
* @param {string} options.targetPath - The local target path where files should be copied.
|
|
611
|
+
* @param {string} [options.branch='main'] - The git branch to use (default: 'main').
|
|
612
|
+
* @param {boolean} [options.overwrite=false] - Whether to overwrite existing target directory.
|
|
613
|
+
* @returns {Promise<object>} A promise that resolves with copied files information.
|
|
614
|
+
* @memberof UnderpostRepository
|
|
615
|
+
*/
|
|
616
|
+
async copyGitUrlDirectoryRecursive(options) {
|
|
617
|
+
const { gitUrl, directoryPath, targetPath, branch = 'main', overwrite = false } = options;
|
|
618
|
+
|
|
619
|
+
// Validate inputs
|
|
620
|
+
if (!gitUrl) {
|
|
621
|
+
throw new Error('gitUrl is required');
|
|
622
|
+
}
|
|
623
|
+
if (!directoryPath) {
|
|
624
|
+
throw new Error('directoryPath is required');
|
|
625
|
+
}
|
|
626
|
+
if (!targetPath) {
|
|
627
|
+
throw new Error('targetPath is required');
|
|
628
|
+
}
|
|
629
|
+
|
|
630
|
+
// Parse GitHub URL to extract owner and repo
|
|
631
|
+
const urlMatch = gitUrl.match(/github\.com\/([^\/]+)\/([^\/\.]+)/);
|
|
632
|
+
if (!urlMatch) {
|
|
633
|
+
throw new Error(`Invalid GitHub URL: ${gitUrl}`);
|
|
634
|
+
}
|
|
635
|
+
const [, owner, repo] = urlMatch;
|
|
636
|
+
|
|
637
|
+
logger.info(`Copying from ${owner}/${repo}/${directoryPath} to ${targetPath}`);
|
|
638
|
+
|
|
639
|
+
// Check if target directory exists
|
|
640
|
+
if (fs.existsSync(targetPath) && !overwrite) {
|
|
641
|
+
throw new Error(`Target directory already exists: ${targetPath}. Use overwrite option to replace.`);
|
|
642
|
+
}
|
|
643
|
+
|
|
644
|
+
// Create target directory
|
|
645
|
+
fs.mkdirSync(targetPath, { recursive: true });
|
|
646
|
+
|
|
647
|
+
// GitHub API base URL
|
|
648
|
+
const githubApiBase = 'https://api.github.com/repos';
|
|
649
|
+
const apiUrl = `${githubApiBase}/${owner}/${repo}/contents/${directoryPath}`;
|
|
650
|
+
|
|
651
|
+
logger.info(`Fetching directory contents from: ${apiUrl}`);
|
|
652
|
+
|
|
653
|
+
try {
|
|
654
|
+
// Fetch directory contents recursively
|
|
655
|
+
const copiedFiles = await this._fetchAndCopyGitHubDirectory({
|
|
656
|
+
apiUrl,
|
|
657
|
+
targetPath,
|
|
658
|
+
basePath: directoryPath,
|
|
659
|
+
branch,
|
|
660
|
+
});
|
|
661
|
+
|
|
662
|
+
logger.info(`Successfully copied ${copiedFiles.length} files to ${targetPath}`);
|
|
663
|
+
|
|
664
|
+
return {
|
|
665
|
+
success: true,
|
|
666
|
+
filesCount: copiedFiles.length,
|
|
667
|
+
files: copiedFiles,
|
|
668
|
+
targetPath,
|
|
669
|
+
};
|
|
670
|
+
} catch (error) {
|
|
671
|
+
// Clean up on error
|
|
672
|
+
if (fs.existsSync(targetPath)) {
|
|
673
|
+
fs.removeSync(targetPath);
|
|
674
|
+
logger.warn(`Cleaned up target directory after error: ${targetPath}`);
|
|
675
|
+
}
|
|
676
|
+
throw new Error(`Failed to copy directory: ${error.message}`);
|
|
677
|
+
}
|
|
678
|
+
},
|
|
679
|
+
|
|
680
|
+
/**
|
|
681
|
+
* Internal method to recursively fetch and copy files from GitHub API.
|
|
682
|
+
* @private
|
|
683
|
+
* @param {object} options - Fetch options.
|
|
684
|
+
* @param {string} options.apiUrl - The GitHub API URL.
|
|
685
|
+
* @param {string} options.targetPath - The local target path.
|
|
686
|
+
* @param {string} options.basePath - The base path in the repository.
|
|
687
|
+
* @param {string} options.branch - The git branch.
|
|
688
|
+
* @returns {Promise<array>} Array of copied file paths.
|
|
689
|
+
* @memberof UnderpostRepository
|
|
690
|
+
*/
|
|
691
|
+
async _fetchAndCopyGitHubDirectory(options) {
|
|
692
|
+
const { apiUrl, targetPath, basePath, branch } = options;
|
|
693
|
+
const copiedFiles = [];
|
|
694
|
+
|
|
695
|
+
const response = await fetch(apiUrl, {
|
|
696
|
+
headers: {
|
|
697
|
+
Accept: 'application/vnd.github.v3+json',
|
|
698
|
+
'User-Agent': 'underpost-cli',
|
|
699
|
+
},
|
|
700
|
+
});
|
|
701
|
+
|
|
702
|
+
if (!response.ok) {
|
|
703
|
+
const errorBody = await response.text();
|
|
704
|
+
logger.error(`GitHub API request failed for: ${apiUrl}`);
|
|
705
|
+
logger.error(`Status: ${response.status} ${response.statusText}`);
|
|
706
|
+
logger.error(`Response: ${errorBody}`);
|
|
707
|
+
throw new Error(`GitHub API request failed: ${response.status} ${response.statusText} - ${errorBody}`);
|
|
708
|
+
}
|
|
709
|
+
|
|
710
|
+
const contents = await response.json();
|
|
711
|
+
|
|
712
|
+
if (!Array.isArray(contents)) {
|
|
713
|
+
logger.error(`Expected directory but got: ${typeof contents}`);
|
|
714
|
+
logger.error(`API URL: ${apiUrl}`);
|
|
715
|
+
logger.error(`Response keys: ${Object.keys(contents).join(', ')}`);
|
|
716
|
+
if (contents.message) {
|
|
717
|
+
logger.error(`GitHub message: ${contents.message}`);
|
|
718
|
+
}
|
|
719
|
+
throw new Error(
|
|
720
|
+
`Path is not a directory: ${basePath}. Response: ${JSON.stringify(contents).substring(0, 200)}`,
|
|
721
|
+
);
|
|
722
|
+
}
|
|
723
|
+
|
|
724
|
+
logger.info(`Found ${contents.length} items in directory: ${basePath}`);
|
|
725
|
+
|
|
726
|
+
// Process each item in the directory
|
|
727
|
+
for (const item of contents) {
|
|
728
|
+
const itemTargetPath = `${targetPath}/${item.name}`;
|
|
729
|
+
|
|
730
|
+
if (item.type === 'file') {
|
|
731
|
+
logger.info(`Downloading file: ${item.path}`);
|
|
732
|
+
|
|
733
|
+
// Download file content
|
|
734
|
+
const fileResponse = await fetch(item.download_url);
|
|
735
|
+
if (!fileResponse.ok) {
|
|
736
|
+
logger.error(`Failed to download: ${item.download_url}`);
|
|
737
|
+
throw new Error(`Failed to download file: ${item.path} (${fileResponse.status})`);
|
|
738
|
+
}
|
|
739
|
+
|
|
740
|
+
const fileContent = await fileResponse.text();
|
|
741
|
+
fs.writeFileSync(itemTargetPath, fileContent);
|
|
742
|
+
|
|
743
|
+
logger.info(`✓ Saved: ${itemTargetPath}`);
|
|
744
|
+
copiedFiles.push(itemTargetPath);
|
|
745
|
+
} else if (item.type === 'dir') {
|
|
746
|
+
logger.info(`📁 Processing directory: ${item.path}`);
|
|
747
|
+
|
|
748
|
+
// Create subdirectory
|
|
749
|
+
fs.mkdirSync(itemTargetPath, { recursive: true });
|
|
750
|
+
|
|
751
|
+
// Recursively process subdirectory
|
|
752
|
+
const subFiles = await this._fetchAndCopyGitHubDirectory({
|
|
753
|
+
apiUrl: item.url,
|
|
754
|
+
targetPath: itemTargetPath,
|
|
755
|
+
basePath: item.path,
|
|
756
|
+
branch,
|
|
757
|
+
});
|
|
758
|
+
|
|
759
|
+
copiedFiles.push(...subFiles);
|
|
760
|
+
logger.info(`✓ Completed directory: ${item.path} (${subFiles.length} files)`);
|
|
761
|
+
} else {
|
|
762
|
+
logger.warn(`Skipping unknown item type '${item.type}': ${item.path}`);
|
|
763
|
+
}
|
|
764
|
+
}
|
|
765
|
+
|
|
766
|
+
return copiedFiles;
|
|
767
|
+
},
|
|
604
768
|
};
|
|
605
769
|
}
|
|
606
770
|
|
package/src/index.js
CHANGED
|
@@ -36,7 +36,7 @@ class Underpost {
|
|
|
36
36
|
* @type {String}
|
|
37
37
|
* @memberof Underpost
|
|
38
38
|
*/
|
|
39
|
-
static version = 'v2.
|
|
39
|
+
static version = 'v2.96.1';
|
|
40
40
|
/**
|
|
41
41
|
* Repository cli API
|
|
42
42
|
* @static
|
|
@@ -193,6 +193,7 @@ export {
|
|
|
193
193
|
UnderpostRootEnv,
|
|
194
194
|
UnderpostFileStorage,
|
|
195
195
|
UnderpostImage,
|
|
196
|
+
UnderpostStatic,
|
|
196
197
|
UnderpostLxd,
|
|
197
198
|
UnderpostMonitor,
|
|
198
199
|
UnderpostRepository,
|
|
@@ -1,12 +0,0 @@
|
|
|
1
|
-
import { e404 } from '../common/Alert.js';
|
|
2
|
-
import { append } from '../common/SsrCore.js';
|
|
3
|
-
import { Translate } from '../common/Translate.js';
|
|
4
|
-
import { Worker } from '../common/Worker.js';
|
|
5
|
-
/*imports*/
|
|
6
|
-
|
|
7
|
-
window.onload = () =>
|
|
8
|
-
Worker.instance({
|
|
9
|
-
render: async () => {
|
|
10
|
-
append('.page-render', await e404({ Translate }));
|
|
11
|
-
},
|
|
12
|
-
});
|
|
@@ -1,12 +0,0 @@
|
|
|
1
|
-
import { e500 } from '../common/Alert.js';
|
|
2
|
-
import { append } from '../common/SsrCore.js';
|
|
3
|
-
import { Translate } from '../common/Translate.js';
|
|
4
|
-
import { Worker } from '../common/Worker.js';
|
|
5
|
-
/*imports*/
|
|
6
|
-
|
|
7
|
-
window.onload = () =>
|
|
8
|
-
Worker.instance({
|
|
9
|
-
render: async () => {
|
|
10
|
-
append('.page-render', await e500({ Translate }));
|
|
11
|
-
},
|
|
12
|
-
});
|
|
@@ -1,14 +0,0 @@
|
|
|
1
|
-
import { htmls, loggerFactory } from '../common/SsrCore.js';
|
|
2
|
-
import { Alert } from '../common/Alert.js';
|
|
3
|
-
import { Translate } from '../common/Translate.js';
|
|
4
|
-
import { Worker } from '../common/Worker.js';
|
|
5
|
-
/*imports*/
|
|
6
|
-
|
|
7
|
-
const logger = loggerFactory({ url: location.toString() });
|
|
8
|
-
|
|
9
|
-
window.onload = () =>
|
|
10
|
-
Worker.instance({
|
|
11
|
-
render: async () => {
|
|
12
|
-
htmls(`.page-render`, html`${await Alert.maintenance({ Translate })}`);
|
|
13
|
-
},
|
|
14
|
-
});
|
|
@@ -1,21 +0,0 @@
|
|
|
1
|
-
import { htmls, loggerFactory } from '../common/SsrCore.js';
|
|
2
|
-
import { Alert } from '../common/Alert.js';
|
|
3
|
-
import { Translate } from '../common/Translate.js';
|
|
4
|
-
import { Worker } from '../common/Worker.js';
|
|
5
|
-
/*imports*/
|
|
6
|
-
|
|
7
|
-
const logger = loggerFactory({ url: location.toString() });
|
|
8
|
-
|
|
9
|
-
window.onload = () =>
|
|
10
|
-
Worker.instance({
|
|
11
|
-
render: async () => {
|
|
12
|
-
window.ononline = async () => {
|
|
13
|
-
location.href = location.pathname.split('/')[1] ? `/${location.pathname.split('/')[1].split('.')[0]}` : '/';
|
|
14
|
-
};
|
|
15
|
-
window.onoffline = async () => {
|
|
16
|
-
htmls(`.page-render`, html`${await Alert.noInternet({ Translate })}`);
|
|
17
|
-
};
|
|
18
|
-
if (navigator.onLine && !location.hostname.match('localhost')) window.ononline();
|
|
19
|
-
else window.onoffline();
|
|
20
|
-
},
|
|
21
|
-
});
|