@underpostnet/underpost 2.96.1 → 2.97.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.dockerignore +1 -2
- package/.env.development +0 -3
- package/.env.production +0 -3
- package/.env.test +0 -3
- package/.prettierignore +1 -2
- package/README.md +31 -31
- package/baremetal/commission-workflows.json +94 -17
- package/bin/deploy.js +1 -1
- package/cli.md +75 -41
- package/conf.js +1 -0
- package/manifests/deployment/dd-default-development/deployment.yaml +2 -2
- package/manifests/deployment/dd-test-development/deployment.yaml +4 -4
- package/package.json +3 -2
- package/packer/scripts/fuse-tar-root +3 -3
- package/scripts/disk-clean.sh +128 -187
- package/scripts/gpu-diag.sh +2 -2
- package/scripts/ip-info.sh +11 -11
- package/scripts/ipxe-setup.sh +197 -0
- package/scripts/maas-upload-boot-resource.sh +1 -1
- package/scripts/nvim.sh +1 -1
- package/scripts/packer-setup.sh +13 -13
- package/scripts/ports-ls.sh +31 -0
- package/scripts/quick-tftp.sh +19 -0
- package/scripts/rocky-setup.sh +2 -2
- package/scripts/rpmfusion-ffmpeg-setup.sh +4 -4
- package/scripts/ssl.sh +7 -7
- package/src/api/document/document.controller.js +15 -0
- package/src/api/document/document.model.js +44 -1
- package/src/api/document/document.router.js +2 -0
- package/src/api/document/document.service.js +398 -26
- package/src/cli/baremetal.js +2001 -463
- package/src/cli/cloud-init.js +354 -231
- package/src/cli/cluster.js +51 -53
- package/src/cli/db.js +22 -0
- package/src/cli/deploy.js +7 -3
- package/src/cli/image.js +1 -0
- package/src/cli/index.js +40 -37
- package/src/cli/lxd.js +3 -3
- package/src/cli/run.js +78 -12
- package/src/cli/ssh.js +1 -1
- package/src/client/components/core/Css.js +16 -2
- package/src/client/components/core/Input.js +3 -1
- package/src/client/components/core/Modal.js +125 -159
- package/src/client/components/core/Panel.js +436 -31
- package/src/client/components/core/PanelForm.js +222 -37
- package/src/client/components/core/SearchBox.js +801 -0
- package/src/client/components/core/Translate.js +11 -0
- package/src/client/services/document/document.service.js +42 -0
- package/src/index.js +1 -1
- package/src/server/dns.js +12 -6
- package/src/server/start.js +14 -6
package/scripts/packer-setup.sh
CHANGED
|
@@ -39,13 +39,13 @@ print "Distro detected: $PRETTY_NAME"
|
|
|
39
39
|
# Enable helpful repos and install helpers
|
|
40
40
|
print "Installing dnf-plugins-core and enabling CRB/PowerTools (if available)"
|
|
41
41
|
set +e
|
|
42
|
-
dnf install -y dnf-plugins-core >/dev/null 2>&1
|
|
43
|
-
dnf config-manager --set-enabled crb >/dev/null 2>&1
|
|
44
|
-
dnf config-manager --set-enabled powertools >/dev/null 2>&1
|
|
42
|
+
dnf install -y dnf-plugins-core >/dev/null 2>&1
|
|
43
|
+
dnf config-manager --set-enabled crb >/dev/null 2>&1
|
|
44
|
+
dnf config-manager --set-enabled powertools >/dev/null 2>&1
|
|
45
45
|
# EPEL
|
|
46
46
|
if ! rpm -q epel-release >/dev/null 2>&1; then
|
|
47
47
|
print "Installing epel-release"
|
|
48
|
-
dnf install -y epel-release
|
|
48
|
+
dnf install -y epel-release
|
|
49
49
|
fi
|
|
50
50
|
set -e
|
|
51
51
|
|
|
@@ -86,7 +86,7 @@ done
|
|
|
86
86
|
|
|
87
87
|
print "Enabling and starting libvirtd"
|
|
88
88
|
systemctl enable --now libvirtd || err "Failed to enable/start libvirtd"
|
|
89
|
-
systemctl status libvirtd --no-pager
|
|
89
|
+
systemctl status libvirtd --no-pager
|
|
90
90
|
|
|
91
91
|
# 3) Install NBD and filesystem tools required for MAAS image creation
|
|
92
92
|
print "Installing NBD and filesystem tooling: libnbd, nbdkit, e2fsprogs, kmod packages (best-effort)"
|
|
@@ -125,7 +125,7 @@ if command -v podman >/dev/null 2>&1; then
|
|
|
125
125
|
fi
|
|
126
126
|
done
|
|
127
127
|
|
|
128
|
-
podman rm "$CONTAINER_ID" >/dev/null 2>&1
|
|
128
|
+
podman rm "$CONTAINER_ID" >/dev/null 2>&1
|
|
129
129
|
fi
|
|
130
130
|
else
|
|
131
131
|
print "podman not available. Install podman to register binfmt for container/chroot convenience."
|
|
@@ -181,8 +181,8 @@ else
|
|
|
181
181
|
print "qemu-system-aarch64 now available after package install"
|
|
182
182
|
else
|
|
183
183
|
print "Compiling QEMU with aarch64-softmmu target. Installing build deps..."
|
|
184
|
-
dnf groupinstall -y 'Development Tools'
|
|
185
|
-
dnf install -y git libaio-devel libgcrypt-devel libfdt-devel glib2-devel zlib-devel pixman-devel libseccomp-devel libusb1-devel openssl-devel bison flex python3 python3-pip ninja-build
|
|
184
|
+
dnf groupinstall -y 'Development Tools'
|
|
185
|
+
dnf install -y git libaio-devel libgcrypt-devel libfdt-devel glib2-devel zlib-devel pixman-devel libseccomp-devel libusb1-devel openssl-devel bison flex python3 python3-pip ninja-build
|
|
186
186
|
|
|
187
187
|
# Enforce libslirp-devel for user networking
|
|
188
188
|
if ! dnf install -y libslirp-devel; then
|
|
@@ -192,8 +192,8 @@ else
|
|
|
192
192
|
|
|
193
193
|
# Install required Python packages for QEMU build
|
|
194
194
|
print "Installing Python dependencies for QEMU build"
|
|
195
|
-
python3 -m pip install --upgrade pip
|
|
196
|
-
python3 -m pip install tomli meson
|
|
195
|
+
python3 -m pip install --upgrade pip
|
|
196
|
+
python3 -m pip install tomli meson
|
|
197
197
|
|
|
198
198
|
TMPDIR=$(mktemp -d)
|
|
199
199
|
print "Cloning QEMU source to $TMPDIR/qemu"
|
|
@@ -209,7 +209,7 @@ else
|
|
|
209
209
|
make install || err "QEMU install failed"
|
|
210
210
|
# Update PATH to include /usr/local/bin where QEMU was installed
|
|
211
211
|
export PATH="/usr/local/bin:$PATH"
|
|
212
|
-
hash -r
|
|
212
|
+
hash -r
|
|
213
213
|
else
|
|
214
214
|
err "QEMU build (make) failed"
|
|
215
215
|
fi
|
|
@@ -224,7 +224,7 @@ else
|
|
|
224
224
|
fi
|
|
225
225
|
|
|
226
226
|
cd /
|
|
227
|
-
rm -rf "$TMPDIR"
|
|
227
|
+
rm -rf "$TMPDIR"
|
|
228
228
|
print "Removed build directory $TMPDIR"
|
|
229
229
|
fi
|
|
230
230
|
fi
|
|
@@ -242,7 +242,7 @@ else
|
|
|
242
242
|
fi
|
|
243
243
|
if command -v qemu-aarch64-static >/dev/null 2>&1; then print "qemu-aarch64-static: $(command -v qemu-aarch64-static)"; else print "qemu-aarch64-static: NOT INSTALLED"; fi
|
|
244
244
|
print "libvirtd status:"
|
|
245
|
-
systemctl status libvirtd --no-pager
|
|
245
|
+
systemctl status libvirtd --no-pager
|
|
246
246
|
|
|
247
247
|
cat <<'EOF'
|
|
248
248
|
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
set -euo pipefail
|
|
3
|
+
|
|
4
|
+
BASHRC="$HOME/.bashrc"
|
|
5
|
+
|
|
6
|
+
# Check whether a "ports" function is already defined
|
|
7
|
+
if grep -Eq '^\s*(function\s+ports|ports\s*\(\))' "$BASHRC"; then
|
|
8
|
+
echo "The 'ports' function already exists in $BASHRC. Nothing was changed."
|
|
9
|
+
exit 0
|
|
10
|
+
fi
|
|
11
|
+
|
|
12
|
+
# Append the function to the end of ~/.bashrc
|
|
13
|
+
cat >> "$BASHRC" <<'EOF'
|
|
14
|
+
|
|
15
|
+
# >>> ports function added by script >>>
|
|
16
|
+
ports() {
|
|
17
|
+
# no arguments: show listening TCP+UDP sockets
|
|
18
|
+
if [ -z "$1" ]; then
|
|
19
|
+
sudo ss -ltnup
|
|
20
|
+
return
|
|
21
|
+
fi
|
|
22
|
+
|
|
23
|
+
# with an argument: print the header and lines that exactly match the given port
|
|
24
|
+
local p="$1"
|
|
25
|
+
sudo ss -tunap | awk -v p="$p" 'NR==1 || $0 ~ (":"p"($| )")'
|
|
26
|
+
}
|
|
27
|
+
# <<< ports function added by script <<<
|
|
28
|
+
EOF
|
|
29
|
+
|
|
30
|
+
echo "Function 'ports' was added to $BASHRC."
|
|
31
|
+
echo "Load it now with: source ~/.bashrc (or open a new terminal)."
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
set -euo pipefail
|
|
3
|
+
|
|
4
|
+
url="${1:-}"
|
|
5
|
+
[[ "$url" =~ ^tftp://([^/]+)(/.+)$ ]] || { echo "Usage: $0 tftp://host/path"; exit 2; }
|
|
6
|
+
host="${BASH_REMATCH[1]}"
|
|
7
|
+
path="${BASH_REMATCH[2]}"
|
|
8
|
+
outfile="/tmp/$(basename "$path")"
|
|
9
|
+
|
|
10
|
+
if command -v curl >/dev/null 2>&1; then
|
|
11
|
+
curl -f --silent --output "$outfile" "$url" && echo "OK: $outfile ($(stat -c%s "$outfile") bytes)" || { echo "curl: failed"; exit 3; }
|
|
12
|
+
elif command -v tftp >/dev/null 2>&1; then
|
|
13
|
+
printf "get %s %s\nquit\n" "$path" "$outfile" | tftp "$host" \
|
|
14
|
+
&& [[ -s "$outfile" ]] \
|
|
15
|
+
&& echo "OK: $outfile ($(stat -c%s "$outfile") bytes)" \
|
|
16
|
+
|| { echo "tftp: failed"; exit 3; }
|
|
17
|
+
else
|
|
18
|
+
echo "Install 'curl' or 'tftp-client' (sudo dnf install -y curl tftp-client)"; exit 4
|
|
19
|
+
fi
|
package/scripts/rocky-setup.sh
CHANGED
|
@@ -9,19 +9,19 @@ if [ "${EUID:-$(id -u)}" -ne 0 ]; then
|
|
|
9
9
|
fi
|
|
10
10
|
|
|
11
11
|
echo "1) Ensure dnf-plugins-core is available (for config-manager)"
|
|
12
|
-
dnf -y install dnf-plugins-core
|
|
12
|
+
dnf -y install dnf-plugins-core
|
|
13
13
|
|
|
14
14
|
echo "2) Enable CodeReady / CRB (needed for some deps, e.g. ladspa)"
|
|
15
15
|
# On RHEL you'd use subscription-manager; on CentOS/Rocky/Alma use config-manager
|
|
16
|
-
dnf config-manager --set-enabled crb
|
|
16
|
+
dnf config-manager --set-enabled crb
|
|
17
17
|
|
|
18
18
|
echo "3) Install EPEL release (required by some ffmpeg deps)"
|
|
19
|
-
dnf -y install https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm
|
|
19
|
+
dnf -y install https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm
|
|
20
20
|
|
|
21
21
|
echo "4) Add RPM Fusion (free + nonfree) repositories"
|
|
22
22
|
# Using mirrors.rpmfusion.org recommended links
|
|
23
23
|
dnf -y install https://mirrors.rpmfusion.org/free/el/rpmfusion-free-release-9.noarch.rpm \
|
|
24
|
-
https://mirrors.rpmfusion.org/nonfree/el/rpmfusion-nonfree-release-9.noarch.rpm
|
|
24
|
+
https://mirrors.rpmfusion.org/nonfree/el/rpmfusion-nonfree-release-9.noarch.rpm
|
|
25
25
|
|
|
26
26
|
echo "5) Refresh metadata and update system"
|
|
27
27
|
dnf -y makecache
|
package/scripts/ssl.sh
CHANGED
|
@@ -48,7 +48,7 @@ info "Outputs: $CERT_FILE, $KEY_FILE, $FULLCHAIN_FILE, $ROOT_PEM"
|
|
|
48
48
|
|
|
49
49
|
# Install prerequisites
|
|
50
50
|
if ! command -v dnf >/dev/null 2>&1; then err "dnf not found. This script expects RHEL/Rocky with dnf."; exit 1; fi
|
|
51
|
-
sudo dnf install -y curl nss-tools ca-certificates
|
|
51
|
+
sudo dnf install -y curl nss-tools ca-certificates
|
|
52
52
|
|
|
53
53
|
# Download and install mkcert binary (no 'go install')
|
|
54
54
|
download_mkcert_binary() {
|
|
@@ -60,10 +60,10 @@ download_mkcert_binary() {
|
|
|
60
60
|
esac
|
|
61
61
|
info "Searching mkcert release for $ARCH_STR"
|
|
62
62
|
ASSET_URL=$(curl -sS "https://api.github.com/repos/FiloSottile/mkcert/releases/latest" | \
|
|
63
|
-
grep -E '"browser_download_url":' | grep -i "$ARCH_STR" | head -n1 | sed -E 's/.*"(https:[^"]+)".*/\1/'
|
|
63
|
+
grep -E '"browser_download_url":' | grep -i "$ARCH_STR" | head -n1 | sed -E 's/.*"(https:[^"]+)".*/\1/')
|
|
64
64
|
if [[ -z "$ASSET_URL" ]]; then
|
|
65
65
|
ASSET_URL=$(curl -sS "https://api.github.com/repos/FiloSottile/mkcert/releases/latest" | \
|
|
66
|
-
grep -E '"browser_download_url":' | grep -i 'linux' | grep -i 'amd64' | head -n1 | sed -E 's/.*"(https:[^"]+)".*/\1/'
|
|
66
|
+
grep -E '"browser_download_url":' | grep -i 'linux' | grep -i 'amd64' | head -n1 | sed -E 's/.*"(https:[^"]+)".*/\1/')
|
|
67
67
|
fi
|
|
68
68
|
if [[ -z "$ASSET_URL" ]]; then err "Could not find mkcert asset for your arch"; return 1; fi
|
|
69
69
|
TMP_BIN="$(mktemp -u /tmp/mkcert.XXXXXX)"
|
|
@@ -93,7 +93,7 @@ use_mkcert() {
|
|
|
93
93
|
info "Generating cert+key with mkcert"
|
|
94
94
|
if ! "$MKCERT_BIN" -cert-file "$CERT_FILE" -key-file "$KEY_FILE" "${MK_ARGS[@]}"; then err "mkcert failed to generate"; return 1; fi
|
|
95
95
|
# copy root CA from mkcert CAROOT into TARGET_DIR
|
|
96
|
-
if ROOT_FROM_MKCERT="$($MKCERT_BIN -CAROOT 2>/dev/null
|
|
96
|
+
if ROOT_FROM_MKCERT="$($MKCERT_BIN -CAROOT 2>/dev/null)"; then
|
|
97
97
|
if [[ -f "$ROOT_FROM_MKCERT/rootCA.pem" ]]; then
|
|
98
98
|
cp "$ROOT_FROM_MKCERT/rootCA.pem" "$ROOT_PEM"
|
|
99
99
|
info "Copied mkcert root CA to $ROOT_PEM"
|
|
@@ -135,11 +135,11 @@ use_openssl() {
|
|
|
135
135
|
mv -f "$CSR_KEY" "$KEY_FILE"
|
|
136
136
|
# create fullchain: leaf + root
|
|
137
137
|
cat "$CERT_FILE" "$ROOT_PEM" > "$FULLCHAIN_FILE"
|
|
138
|
-
sudo cp "$ROOT_PEM" /etc/pki/ca-trust/source/anchors/
|
|
139
|
-
sudo update-ca-trust extract
|
|
138
|
+
sudo cp "$ROOT_PEM" /etc/pki/ca-trust/source/anchors/
|
|
139
|
+
sudo update-ca-trust extract
|
|
140
140
|
if command -v certutil >/dev/null 2>&1; then
|
|
141
141
|
mkdir -p "$HOME/.pki/nssdb"
|
|
142
|
-
certutil -d sql:$HOME/.pki/nssdb -A -t "CT,C,C" -n "Local Dev Root CA" -i "$ROOT_PEM"
|
|
142
|
+
certutil -d sql:$HOME/.pki/nssdb -A -t "CT,C,C" -n "Local Dev Root CA" -i "$ROOT_PEM"
|
|
143
143
|
fi
|
|
144
144
|
info "OpenSSL created cert, key and fullchain"
|
|
145
145
|
return 0
|
|
@@ -61,6 +61,21 @@ const DocumentController = {
|
|
|
61
61
|
});
|
|
62
62
|
}
|
|
63
63
|
},
|
|
64
|
+
patch: async (req, res, options) => {
|
|
65
|
+
try {
|
|
66
|
+
const result = await DocumentService.patch(req, res, options);
|
|
67
|
+
return res.status(200).json({
|
|
68
|
+
status: 'success',
|
|
69
|
+
data: result,
|
|
70
|
+
});
|
|
71
|
+
} catch (error) {
|
|
72
|
+
logger.error(error, error.stack);
|
|
73
|
+
return res.status(400).json({
|
|
74
|
+
status: 'error',
|
|
75
|
+
message: error.message,
|
|
76
|
+
});
|
|
77
|
+
}
|
|
78
|
+
},
|
|
64
79
|
};
|
|
65
80
|
|
|
66
81
|
export { DocumentController };
|
|
@@ -11,6 +11,7 @@ const DocumentSchema = new Schema(
|
|
|
11
11
|
location: { type: String },
|
|
12
12
|
title: { type: String },
|
|
13
13
|
tags: [{ type: String }],
|
|
14
|
+
isPublic: { type: Boolean, default: false },
|
|
14
15
|
fileId: {
|
|
15
16
|
type: Schema.Types.ObjectId,
|
|
16
17
|
ref: 'File',
|
|
@@ -19,6 +20,16 @@ const DocumentSchema = new Schema(
|
|
|
19
20
|
type: Schema.Types.ObjectId,
|
|
20
21
|
ref: 'File',
|
|
21
22
|
},
|
|
23
|
+
share: {
|
|
24
|
+
copyShareLinkEvent: [
|
|
25
|
+
{
|
|
26
|
+
year: { type: Number },
|
|
27
|
+
month: { type: Number },
|
|
28
|
+
day: { type: Number },
|
|
29
|
+
count: { type: Number, default: 0 },
|
|
30
|
+
},
|
|
31
|
+
],
|
|
32
|
+
},
|
|
22
33
|
},
|
|
23
34
|
{
|
|
24
35
|
timestamps: true,
|
|
@@ -49,10 +60,42 @@ const DocumentDto = {
|
|
|
49
60
|
return {
|
|
50
61
|
path: 'userId',
|
|
51
62
|
model: 'User',
|
|
52
|
-
select: '_id email username',
|
|
63
|
+
select: '_id email username profileImageId',
|
|
64
|
+
populate: {
|
|
65
|
+
path: 'profileImageId',
|
|
66
|
+
model: 'File',
|
|
67
|
+
select: '_id name mimetype',
|
|
68
|
+
},
|
|
53
69
|
};
|
|
54
70
|
},
|
|
55
71
|
},
|
|
72
|
+
getTotalCopyShareLinkCount: (document) => {
|
|
73
|
+
if (!document.share || !document.share.copyShareLinkEvent) return 0;
|
|
74
|
+
return document.share.copyShareLinkEvent.reduce((total, event) => total + (event.count || 0), 0);
|
|
75
|
+
},
|
|
76
|
+
/**
|
|
77
|
+
* Filter 'public' tag from tags array
|
|
78
|
+
* The 'public' tag is internal and should not be rendered to users
|
|
79
|
+
* @param {string[]} tags - Array of tags
|
|
80
|
+
* @returns {string[]} - Filtered tags without 'public'
|
|
81
|
+
*/
|
|
82
|
+
filterPublicTag: (tags) => {
|
|
83
|
+
if (!tags || !Array.isArray(tags)) return [];
|
|
84
|
+
return tags.filter((tag) => tag !== 'public');
|
|
85
|
+
},
|
|
86
|
+
/**
|
|
87
|
+
* Extract isPublic boolean from tags array and return cleaned tags
|
|
88
|
+
* @param {string[]} tags - Array of tags potentially containing 'public'
|
|
89
|
+
* @returns {{ isPublic: boolean, tags: string[] }} - Object with isPublic flag and cleaned tags
|
|
90
|
+
*/
|
|
91
|
+
extractPublicFromTags: (tags) => {
|
|
92
|
+
if (!tags || !Array.isArray(tags)) {
|
|
93
|
+
return { isPublic: false, tags: [] };
|
|
94
|
+
}
|
|
95
|
+
const hasPublicTag = tags.includes('public');
|
|
96
|
+
const cleanedTags = tags.filter((tag) => tag !== 'public');
|
|
97
|
+
return { isPublic: hasPublicTag, tags: cleanedTags };
|
|
98
|
+
},
|
|
56
99
|
};
|
|
57
100
|
|
|
58
101
|
export { DocumentSchema, DocumentModel, ProviderSchema, DocumentDto };
|
|
@@ -9,11 +9,13 @@ const DocumentRouter = (options) => {
|
|
|
9
9
|
const authMiddleware = options.authMiddleware;
|
|
10
10
|
router.post(`/:id`, authMiddleware, async (req, res) => await DocumentController.post(req, res, options));
|
|
11
11
|
router.post(`/`, authMiddleware, async (req, res) => await DocumentController.post(req, res, options));
|
|
12
|
+
router.get(`/public/high`, async (req, res) => await DocumentController.get(req, res, options));
|
|
12
13
|
router.get(`/public`, async (req, res) => await DocumentController.get(req, res, options));
|
|
13
14
|
router.get(`/:id`, authMiddleware, async (req, res) => await DocumentController.get(req, res, options));
|
|
14
15
|
router.get(`/`, authMiddleware, async (req, res) => await DocumentController.get(req, res, options));
|
|
15
16
|
router.put(`/:id`, authMiddleware, async (req, res) => await DocumentController.put(req, res, options));
|
|
16
17
|
router.put(`/`, authMiddleware, async (req, res) => await DocumentController.put(req, res, options));
|
|
18
|
+
router.patch(`/:id/copy-share-link`, async (req, res) => await DocumentController.patch(req, res, options));
|
|
17
19
|
router.delete(`/:id`, authMiddleware, async (req, res) => await DocumentController.delete(req, res, options));
|
|
18
20
|
router.delete(`/`, authMiddleware, async (req, res) => await DocumentController.delete(req, res, options));
|
|
19
21
|
return router;
|