@underundre/undev 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +148 -0
- package/README.ru.md +148 -0
- package/configs/.editorconfig +28 -0
- package/configs/commitlint.config.js +36 -0
- package/configs/eslint.config.js +58 -0
- package/configs/prettier.config.js +23 -0
- package/configs/tsconfig.base.json +23 -0
- package/package.json +53 -0
- package/scripts/common.sh +74 -0
- package/scripts/db/backup.sh +53 -0
- package/scripts/db/restore.sh +46 -0
- package/scripts/deploy/deploy.sh +102 -0
- package/scripts/deploy/logs.sh +40 -0
- package/scripts/deploy/rollback.sh +46 -0
- package/scripts/dev/setup.sh +69 -0
- package/scripts/docker/cleanup.sh +49 -0
- package/scripts/monitoring/security-audit.sh +76 -0
- package/scripts/server/health-check.sh +83 -0
- package/scripts/server/setup-ssl.sh +30 -0
- package/scripts/server/setup-vps.sh +92 -0
- package/templates/.env.example +32 -0
- package/templates/.github/workflows/ci.yml +48 -0
- package/templates/docker-compose.dev.yml +41 -0
- package/templates/package-scripts.jsonc +52 -0
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
# ─────────────────────────────────────────────────
|
|
3
|
+
# PostgreSQL database restore from backup.
|
|
4
|
+
#
|
|
5
|
+
# Usage:
|
|
6
|
+
# ./scripts/db/restore.sh backups/mydb_20260409.dump
|
|
7
|
+
# POSTGRES_DB=mydb ./scripts/db/restore.sh latest
|
|
8
|
+
# ─────────────────────────────────────────────────
|
|
9
|
+
|
|
10
|
+
source "$(dirname "$0")/../common.sh"
|
|
11
|
+
load_env
|
|
12
|
+
|
|
13
|
+
require_cmd pg_restore
|
|
14
|
+
|
|
15
|
+
: "${POSTGRES_DB:?Set POSTGRES_DB}"
|
|
16
|
+
|
|
17
|
+
DB_HOST="${POSTGRES_HOST:-localhost}"
|
|
18
|
+
DB_PORT="${POSTGRES_PORT:-5432}"
|
|
19
|
+
DB_USER="${POSTGRES_USER:-postgres}"
|
|
20
|
+
BACKUP_DIR="${BACKUP_DIR:-$REPO_ROOT/backups}"
|
|
21
|
+
|
|
22
|
+
BACKUP_FILE="${1:?Usage: restore.sh <backup-file|latest>}"
|
|
23
|
+
|
|
24
|
+
if [[ "$BACKUP_FILE" == "latest" ]]; then
|
|
25
|
+
BACKUP_FILE=$(ls -t "$BACKUP_DIR"/*.dump 2>/dev/null | head -1)
|
|
26
|
+
if [[ -z "$BACKUP_FILE" ]]; then
|
|
27
|
+
error "No backups found in $BACKUP_DIR"
|
|
28
|
+
exit 1
|
|
29
|
+
fi
|
|
30
|
+
info "Using latest: $BACKUP_FILE"
|
|
31
|
+
fi
|
|
32
|
+
|
|
33
|
+
if [[ ! -f "$BACKUP_FILE" ]]; then
|
|
34
|
+
error "Backup file not found: $BACKUP_FILE"
|
|
35
|
+
exit 1
|
|
36
|
+
fi
|
|
37
|
+
|
|
38
|
+
warn "This will DROP and recreate $POSTGRES_DB!"
|
|
39
|
+
confirm "Restore $POSTGRES_DB from $BACKUP_FILE?" || exit 0
|
|
40
|
+
|
|
41
|
+
step "Restoring..."
|
|
42
|
+
pg_restore -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" \
|
|
43
|
+
--clean --if-exists --no-owner \
|
|
44
|
+
-d "$POSTGRES_DB" "$BACKUP_FILE"
|
|
45
|
+
|
|
46
|
+
log "Restore complete"
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
# ─────────────────────────────────────────────────
|
|
3
|
+
# Zero-downtime deployment via SSH.
|
|
4
|
+
#
|
|
5
|
+
# Config (env vars or .env.production):
|
|
6
|
+
# PROD_SSH_HOST — SSH host alias or user@host
|
|
7
|
+
# REMOTE_APP_DIR — App directory on server (e.g., /home/deploy/myapp)
|
|
8
|
+
# REMOTE_SCRIPT — Server-side deploy script name (default: server-deploy.sh)
|
|
9
|
+
#
|
|
10
|
+
# Usage:
|
|
11
|
+
# ./scripts/deploy/deploy.sh # Full deploy with checks
|
|
12
|
+
# ./scripts/deploy/deploy.sh --fast # Skip all prompts
|
|
13
|
+
# ./scripts/deploy/deploy.sh --skip-tests # Skip test step
|
|
14
|
+
# ─────────────────────────────────────────────────
|
|
15
|
+
|
|
16
|
+
source "$(dirname "$0")/../common.sh"
|
|
17
|
+
load_env ".env.production"
|
|
18
|
+
|
|
19
|
+
# Parse flags
|
|
20
|
+
SKIP_TESTS=false
|
|
21
|
+
FAST_MODE=false
|
|
22
|
+
for arg in "$@"; do
|
|
23
|
+
case $arg in
|
|
24
|
+
--skip-tests) SKIP_TESTS=true ;;
|
|
25
|
+
--fast) FAST_MODE=true; SKIP_TESTS=true ;;
|
|
26
|
+
--yes) YES=true ;;
|
|
27
|
+
-h|--help)
|
|
28
|
+
echo "Usage: deploy.sh [--fast] [--skip-tests] [--yes]"
|
|
29
|
+
exit 0 ;;
|
|
30
|
+
esac
|
|
31
|
+
done
|
|
32
|
+
|
|
33
|
+
# Required config
|
|
34
|
+
: "${PROD_SSH_HOST:?Set PROD_SSH_HOST in .env.production or env}"
|
|
35
|
+
: "${REMOTE_APP_DIR:?Set REMOTE_APP_DIR in .env.production or env}"
|
|
36
|
+
REMOTE_SCRIPT="${REMOTE_SCRIPT:-server-deploy.sh}"
|
|
37
|
+
|
|
38
|
+
BRANCH=$(git_branch)
|
|
39
|
+
COMMIT=$(git_commit)
|
|
40
|
+
VERSION=$(git_version)
|
|
41
|
+
TIMESTAMP=$(date '+%Y-%m-%d %H:%M:%S')
|
|
42
|
+
|
|
43
|
+
echo -e "${BLUE}================================${NC}"
|
|
44
|
+
echo -e "${BLUE} Deploy to Production${NC}"
|
|
45
|
+
echo -e "${BLUE}================================${NC}"
|
|
46
|
+
echo ""
|
|
47
|
+
info "Branch: $BRANCH"
|
|
48
|
+
info "Commit: $COMMIT"
|
|
49
|
+
info "Version: $VERSION"
|
|
50
|
+
info "Host: $PROD_SSH_HOST"
|
|
51
|
+
echo ""
|
|
52
|
+
|
|
53
|
+
notify_telegram "🚀 *Deploy Started*
|
|
54
|
+
👤 $(whoami)
|
|
55
|
+
🌿 $BRANCH ($COMMIT)
|
|
56
|
+
📦 v$VERSION"
|
|
57
|
+
|
|
58
|
+
# Pre-flight: SSH
|
|
59
|
+
step "Checking SSH connection..."
|
|
60
|
+
if ! ssh -q "$PROD_SSH_HOST" exit 2>/dev/null; then
|
|
61
|
+
error "Cannot connect to $PROD_SSH_HOST"
|
|
62
|
+
exit 1
|
|
63
|
+
fi
|
|
64
|
+
log "SSH OK"
|
|
65
|
+
|
|
66
|
+
# Pre-flight: Dirty working tree
|
|
67
|
+
if git_dirty; then
|
|
68
|
+
warn "Working tree has uncommitted changes"
|
|
69
|
+
if [[ "$FAST_MODE" != "true" ]]; then
|
|
70
|
+
confirm "Deploy anyway?" || exit 1
|
|
71
|
+
fi
|
|
72
|
+
fi
|
|
73
|
+
|
|
74
|
+
# Pre-flight: Tests
|
|
75
|
+
if [[ "$SKIP_TESTS" != "true" ]]; then
|
|
76
|
+
step "Running validate..."
|
|
77
|
+
npm run validate 2>&1 || { error "Validation failed"; exit 1; }
|
|
78
|
+
log "Validation passed"
|
|
79
|
+
fi
|
|
80
|
+
|
|
81
|
+
# Push
|
|
82
|
+
step "Pushing to origin..."
|
|
83
|
+
git push origin "$BRANCH" 2>&1 || { error "Git push failed"; exit 1; }
|
|
84
|
+
log "Pushed"
|
|
85
|
+
|
|
86
|
+
# Deploy on server
|
|
87
|
+
step "Running remote deploy..."
|
|
88
|
+
ssh "$PROD_SSH_HOST" "cd $REMOTE_APP_DIR && bash scripts/$REMOTE_SCRIPT" 2>&1
|
|
89
|
+
DEPLOY_EXIT=$?
|
|
90
|
+
|
|
91
|
+
if [[ $DEPLOY_EXIT -eq 0 ]]; then
|
|
92
|
+
log "Deploy successful"
|
|
93
|
+
notify_telegram "✅ *Deploy Complete*
|
|
94
|
+
📦 v$VERSION ($COMMIT)
|
|
95
|
+
🌿 $BRANCH"
|
|
96
|
+
else
|
|
97
|
+
error "Deploy failed (exit code $DEPLOY_EXIT)"
|
|
98
|
+
notify_telegram "❌ *Deploy Failed*
|
|
99
|
+
📦 v$VERSION ($COMMIT)
|
|
100
|
+
⚠️ Exit code: $DEPLOY_EXIT"
|
|
101
|
+
exit $DEPLOY_EXIT
|
|
102
|
+
fi
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
# ─────────────────────────────────────────────────
|
|
3
|
+
# Tail production logs via SSH.
|
|
4
|
+
#
|
|
5
|
+
# Config: PROD_SSH_HOST, REMOTE_APP_DIR
|
|
6
|
+
#
|
|
7
|
+
# Usage:
|
|
8
|
+
# ./scripts/deploy/logs.sh # Tail app logs (pm2)
|
|
9
|
+
# ./scripts/deploy/logs.sh --docker # Tail docker compose logs
|
|
10
|
+
# ./scripts/deploy/logs.sh --nginx # Tail nginx access log
|
|
11
|
+
# ./scripts/deploy/logs.sh --error # Tail nginx error log
|
|
12
|
+
# ─────────────────────────────────────────────────
|
|
13
|
+
|
|
14
|
+
source "$(dirname "$0")/../common.sh"
|
|
15
|
+
load_env ".env.production"
|
|
16
|
+
|
|
17
|
+
: "${PROD_SSH_HOST:?Set PROD_SSH_HOST}"
|
|
18
|
+
: "${REMOTE_APP_DIR:?Set REMOTE_APP_DIR}"
|
|
19
|
+
|
|
20
|
+
MODE="${1:---pm2}"
|
|
21
|
+
LINES="${LINES:-100}"
|
|
22
|
+
|
|
23
|
+
case "$MODE" in
|
|
24
|
+
--docker)
|
|
25
|
+
info "Tailing Docker Compose logs..."
|
|
26
|
+
ssh -t "$PROD_SSH_HOST" "cd $REMOTE_APP_DIR && docker compose logs -f --tail=$LINES"
|
|
27
|
+
;;
|
|
28
|
+
--nginx)
|
|
29
|
+
info "Tailing Nginx access log..."
|
|
30
|
+
ssh -t "$PROD_SSH_HOST" "tail -f -n $LINES /var/log/nginx/access.log"
|
|
31
|
+
;;
|
|
32
|
+
--error)
|
|
33
|
+
info "Tailing Nginx error log..."
|
|
34
|
+
ssh -t "$PROD_SSH_HOST" "tail -f -n $LINES /var/log/nginx/error.log"
|
|
35
|
+
;;
|
|
36
|
+
--pm2|*)
|
|
37
|
+
info "Tailing PM2 logs..."
|
|
38
|
+
ssh -t "$PROD_SSH_HOST" "cd $REMOTE_APP_DIR && pm2 logs --lines $LINES"
|
|
39
|
+
;;
|
|
40
|
+
esac
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
# ─────────────────────────────────────────────────
|
|
3
|
+
# Rollback to previous deployment.
|
|
4
|
+
#
|
|
5
|
+
# Config: PROD_SSH_HOST, REMOTE_APP_DIR (same as deploy.sh)
|
|
6
|
+
#
|
|
7
|
+
# Usage:
|
|
8
|
+
# ./scripts/deploy/rollback.sh # Rollback to previous
|
|
9
|
+
# ./scripts/deploy/rollback.sh <commit> # Rollback to specific commit
|
|
10
|
+
# ─────────────────────────────────────────────────
|
|
11
|
+
|
|
12
|
+
source "$(dirname "$0")/../common.sh"
|
|
13
|
+
load_env ".env.production"
|
|
14
|
+
|
|
15
|
+
: "${PROD_SSH_HOST:?Set PROD_SSH_HOST}"
|
|
16
|
+
: "${REMOTE_APP_DIR:?Set REMOTE_APP_DIR}"
|
|
17
|
+
|
|
18
|
+
TARGET_COMMIT="${1:-}"
|
|
19
|
+
|
|
20
|
+
echo -e "${YELLOW}================================${NC}"
|
|
21
|
+
echo -e "${YELLOW} Rollback Production${NC}"
|
|
22
|
+
echo -e "${YELLOW}================================${NC}"
|
|
23
|
+
echo ""
|
|
24
|
+
|
|
25
|
+
if [[ -n "$TARGET_COMMIT" ]]; then
|
|
26
|
+
info "Rolling back to commit: $TARGET_COMMIT"
|
|
27
|
+
else
|
|
28
|
+
info "Rolling back to previous deployment"
|
|
29
|
+
fi
|
|
30
|
+
|
|
31
|
+
confirm "Are you sure you want to rollback production?" || exit 0
|
|
32
|
+
|
|
33
|
+
notify_telegram "⚠️ *Rollback Started*
|
|
34
|
+
👤 $(whoami)
|
|
35
|
+
🎯 ${TARGET_COMMIT:-previous}"
|
|
36
|
+
|
|
37
|
+
step "Running remote rollback..."
|
|
38
|
+
if [[ -n "$TARGET_COMMIT" ]]; then
|
|
39
|
+
ssh "$PROD_SSH_HOST" "cd $REMOTE_APP_DIR && git fetch && git checkout $TARGET_COMMIT && npm ci --production && pm2 restart all"
|
|
40
|
+
else
|
|
41
|
+
ssh "$PROD_SSH_HOST" "cd $REMOTE_APP_DIR && git checkout HEAD~1 && npm ci --production && pm2 restart all"
|
|
42
|
+
fi
|
|
43
|
+
|
|
44
|
+
log "Rollback complete"
|
|
45
|
+
notify_telegram "✅ *Rollback Complete*
|
|
46
|
+
🎯 ${TARGET_COMMIT:-previous deployment}"
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
# ─────────────────────────────────────────────────
|
|
3
|
+
# Local dev environment setup.
|
|
4
|
+
# Run after cloning a project to get up and running.
|
|
5
|
+
#
|
|
6
|
+
# What it does:
|
|
7
|
+
# 1. Check Node.js version
|
|
8
|
+
# 2. Install dependencies
|
|
9
|
+
# 3. Copy .env.example → .env (if missing)
|
|
10
|
+
# 4. Run DB migrations (if drizzle detected)
|
|
11
|
+
# 5. Verify build
|
|
12
|
+
#
|
|
13
|
+
# Usage:
|
|
14
|
+
# ./scripts/dev/setup.sh
|
|
15
|
+
# ─────────────────────────────────────────────────
|
|
16
|
+
|
|
17
|
+
source "$(dirname "$0")/../common.sh"
|
|
18
|
+
|
|
19
|
+
echo -e "${BLUE}=== Dev Environment Setup ===${NC}"
|
|
20
|
+
echo ""
|
|
21
|
+
|
|
22
|
+
# 1. Node.js version check
|
|
23
|
+
step "Checking Node.js..."
|
|
24
|
+
require_cmd node npm
|
|
25
|
+
NODE_VERSION=$(node -v | tr -d 'v')
|
|
26
|
+
NODE_MAJOR=${NODE_VERSION%%.*}
|
|
27
|
+
if [[ $NODE_MAJOR -lt 20 ]]; then
|
|
28
|
+
error "Node.js 20+ required (found: $NODE_VERSION)"
|
|
29
|
+
info "Install via: nvm install 20"
|
|
30
|
+
exit 1
|
|
31
|
+
fi
|
|
32
|
+
log "Node.js $NODE_VERSION"
|
|
33
|
+
|
|
34
|
+
# 2. Dependencies
|
|
35
|
+
step "Installing dependencies..."
|
|
36
|
+
if [[ -f "$REPO_ROOT/package-lock.json" ]]; then
|
|
37
|
+
npm ci --silent
|
|
38
|
+
elif [[ -f "$REPO_ROOT/pnpm-lock.yaml" ]]; then
|
|
39
|
+
require_cmd pnpm
|
|
40
|
+
pnpm install --frozen-lockfile
|
|
41
|
+
elif [[ -f "$REPO_ROOT/yarn.lock" ]]; then
|
|
42
|
+
require_cmd yarn
|
|
43
|
+
yarn install --frozen-lockfile
|
|
44
|
+
else
|
|
45
|
+
npm install
|
|
46
|
+
fi
|
|
47
|
+
log "Dependencies installed"
|
|
48
|
+
|
|
49
|
+
# 3. Environment file
|
|
50
|
+
if [[ -f "$REPO_ROOT/.env.example" ]] && [[ ! -f "$REPO_ROOT/.env" ]]; then
|
|
51
|
+
step "Creating .env from .env.example..."
|
|
52
|
+
cp "$REPO_ROOT/.env.example" "$REPO_ROOT/.env"
|
|
53
|
+
warn "Edit .env with your local values before starting"
|
|
54
|
+
else
|
|
55
|
+
log ".env already exists"
|
|
56
|
+
fi
|
|
57
|
+
|
|
58
|
+
# 4. DB migrations (if drizzle config found)
|
|
59
|
+
if [[ -f "$REPO_ROOT/drizzle.config.ts" ]] || [[ -f "$REPO_ROOT/drizzle.config.js" ]]; then
|
|
60
|
+
step "Running DB migrations..."
|
|
61
|
+
npm run db:push 2>/dev/null || warn "DB push failed (is database running?)"
|
|
62
|
+
fi
|
|
63
|
+
|
|
64
|
+
# 5. Build check
|
|
65
|
+
step "Checking build..."
|
|
66
|
+
npm run build 2>&1 || { warn "Build has issues (non-fatal for dev)"; }
|
|
67
|
+
|
|
68
|
+
echo ""
|
|
69
|
+
log "Setup complete! Run 'npm run dev' to start."
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
# ─────────────────────────────────────────────────
|
|
3
|
+
# Docker cleanup: dangling images, stopped containers, unused volumes.
|
|
4
|
+
#
|
|
5
|
+
# Usage:
|
|
6
|
+
# ./scripts/docker/cleanup.sh # Safe cleanup (dangling only)
|
|
7
|
+
# ./scripts/docker/cleanup.sh --all # Aggressive (all unused)
|
|
8
|
+
# ─────────────────────────────────────────────────
|
|
9
|
+
|
|
10
|
+
source "$(dirname "$0")/../common.sh"
|
|
11
|
+
|
|
12
|
+
require_cmd docker
|
|
13
|
+
|
|
14
|
+
MODE="${1:---safe}"
|
|
15
|
+
|
|
16
|
+
echo -e "${BLUE}=== Docker Cleanup ===${NC}"
|
|
17
|
+
|
|
18
|
+
case "$MODE" in
|
|
19
|
+
--all)
|
|
20
|
+
warn "Aggressive mode: removing ALL unused images, containers, volumes, networks"
|
|
21
|
+
confirm "This will free significant space but may remove cached build layers. Continue?" || exit 0
|
|
22
|
+
|
|
23
|
+
step "Removing stopped containers..."
|
|
24
|
+
docker container prune -f
|
|
25
|
+
|
|
26
|
+
step "Removing unused images (all)..."
|
|
27
|
+
docker image prune -a -f
|
|
28
|
+
|
|
29
|
+
step "Removing unused volumes..."
|
|
30
|
+
docker volume prune -f
|
|
31
|
+
|
|
32
|
+
step "Removing unused networks..."
|
|
33
|
+
docker network prune -f
|
|
34
|
+
;;
|
|
35
|
+
--safe|*)
|
|
36
|
+
step "Removing dangling images..."
|
|
37
|
+
docker image prune -f
|
|
38
|
+
|
|
39
|
+
step "Removing stopped containers..."
|
|
40
|
+
docker container prune -f
|
|
41
|
+
|
|
42
|
+
step "Removing dangling volumes..."
|
|
43
|
+
docker volume prune -f
|
|
44
|
+
;;
|
|
45
|
+
esac
|
|
46
|
+
|
|
47
|
+
echo ""
|
|
48
|
+
info "Space usage after cleanup:"
|
|
49
|
+
docker system df
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
# ─────────────────────────────────────────────────
|
|
3
|
+
# Quick security audit for Node.js projects.
|
|
4
|
+
#
|
|
5
|
+
# Checks:
|
|
6
|
+
# 1. npm audit (known vulnerabilities)
|
|
7
|
+
# 2. Outdated dependencies
|
|
8
|
+
# 3. .env files accidentally committed
|
|
9
|
+
# 4. Secrets in git history
|
|
10
|
+
# 5. File permissions
|
|
11
|
+
#
|
|
12
|
+
# Usage:
|
|
13
|
+
# ./scripts/monitoring/security-audit.sh
|
|
14
|
+
# ─────────────────────────────────────────────────
|
|
15
|
+
|
|
16
|
+
source "$(dirname "$0")/../common.sh"
|
|
17
|
+
|
|
18
|
+
echo -e "${BLUE}=== Security Audit ===${NC}"
|
|
19
|
+
echo ""
|
|
20
|
+
ISSUES=0
|
|
21
|
+
|
|
22
|
+
# 1. npm audit
|
|
23
|
+
step "Checking npm vulnerabilities..."
|
|
24
|
+
AUDIT_OUTPUT=$(npm audit --json 2>/dev/null || true)
|
|
25
|
+
VULNS=$(echo "$AUDIT_OUTPUT" | node -pe "JSON.parse(require('fs').readFileSync('/dev/stdin','utf8')).metadata?.vulnerabilities?.high || 0" 2>/dev/null || echo "?")
|
|
26
|
+
CRITICAL=$(echo "$AUDIT_OUTPUT" | node -pe "JSON.parse(require('fs').readFileSync('/dev/stdin','utf8')).metadata?.vulnerabilities?.critical || 0" 2>/dev/null || echo "?")
|
|
27
|
+
|
|
28
|
+
if [[ "$CRITICAL" != "0" ]] && [[ "$CRITICAL" != "?" ]]; then
|
|
29
|
+
error "Critical vulnerabilities: $CRITICAL"
|
|
30
|
+
ISSUES=$((ISSUES + 1))
|
|
31
|
+
elif [[ "$VULNS" != "0" ]] && [[ "$VULNS" != "?" ]]; then
|
|
32
|
+
warn "High vulnerabilities: $VULNS (run: npm audit fix)"
|
|
33
|
+
else
|
|
34
|
+
log "No critical/high vulnerabilities"
|
|
35
|
+
fi
|
|
36
|
+
|
|
37
|
+
# 2. .env files in git
|
|
38
|
+
step "Checking for committed .env files..."
|
|
39
|
+
ENV_FILES=$(git ls-files '*.env' '.env.*' 2>/dev/null | grep -v '.env.example' || true)
|
|
40
|
+
if [[ -n "$ENV_FILES" ]]; then
|
|
41
|
+
error "Found .env files in git:"
|
|
42
|
+
echo "$ENV_FILES" | while read -r f; do echo " - $f"; done
|
|
43
|
+
ISSUES=$((ISSUES + 1))
|
|
44
|
+
else
|
|
45
|
+
log "No .env files in git"
|
|
46
|
+
fi
|
|
47
|
+
|
|
48
|
+
# 3. Secrets in code
|
|
49
|
+
step "Scanning for hardcoded secrets..."
|
|
50
|
+
SECRET_HITS=$(grep -rn --include="*.ts" --include="*.js" --include="*.json" \
|
|
51
|
+
-E "(sk-[a-zA-Z0-9]{20,}|ghp_[a-zA-Z0-9]{36}|-----BEGIN.*PRIVATE KEY)" \
|
|
52
|
+
"$REPO_ROOT/src" "$REPO_ROOT/server" 2>/dev/null | grep -v node_modules | head -5 || true)
|
|
53
|
+
if [[ -n "$SECRET_HITS" ]]; then
|
|
54
|
+
error "Possible hardcoded secrets found:"
|
|
55
|
+
echo "$SECRET_HITS"
|
|
56
|
+
ISSUES=$((ISSUES + 1))
|
|
57
|
+
else
|
|
58
|
+
log "No hardcoded secrets detected"
|
|
59
|
+
fi
|
|
60
|
+
|
|
61
|
+
# 4. Outdated deps
|
|
62
|
+
step "Checking outdated dependencies..."
|
|
63
|
+
OUTDATED=$(npm outdated --json 2>/dev/null | node -pe "Object.keys(JSON.parse(require('fs').readFileSync('/dev/stdin','utf8'))).length" 2>/dev/null || echo "?")
|
|
64
|
+
if [[ "$OUTDATED" != "0" ]] && [[ "$OUTDATED" != "?" ]]; then
|
|
65
|
+
info "$OUTDATED outdated packages (run: npm outdated)"
|
|
66
|
+
else
|
|
67
|
+
log "All dependencies up to date"
|
|
68
|
+
fi
|
|
69
|
+
|
|
70
|
+
echo ""
|
|
71
|
+
if [[ $ISSUES -gt 0 ]]; then
|
|
72
|
+
error "$ISSUES issue(s) found. Review above."
|
|
73
|
+
exit 1
|
|
74
|
+
else
|
|
75
|
+
log "Security audit passed"
|
|
76
|
+
fi
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
# ─────────────────────────────────────────────────
|
|
3
|
+
# Server health check: disk, memory, CPU, services.
|
|
4
|
+
#
|
|
5
|
+
# Usage:
|
|
6
|
+
# ssh deploy@server < scripts/server/health-check.sh
|
|
7
|
+
# ./scripts/server/health-check.sh # Run locally
|
|
8
|
+
# ─────────────────────────────────────────────────
|
|
9
|
+
|
|
10
|
+
set -euo pipefail
|
|
11
|
+
|
|
12
|
+
RED='\033[0;31m'
|
|
13
|
+
GREEN='\033[0;32m'
|
|
14
|
+
YELLOW='\033[1;33m'
|
|
15
|
+
NC='\033[0m'
|
|
16
|
+
|
|
17
|
+
ok() { echo -e "${GREEN}✓${NC} $1"; }
|
|
18
|
+
warn() { echo -e "${YELLOW}⚠${NC} $1"; }
|
|
19
|
+
fail() { echo -e "${RED}✗${NC} $1"; }
|
|
20
|
+
|
|
21
|
+
echo "=== Health Check: $(hostname) ==="
|
|
22
|
+
echo "Time: $(date)"
|
|
23
|
+
echo ""
|
|
24
|
+
|
|
25
|
+
# Disk
|
|
26
|
+
DISK_PCT=$(df / | tail -1 | awk '{print $5}' | tr -d '%')
|
|
27
|
+
if [[ $DISK_PCT -lt 80 ]]; then
|
|
28
|
+
ok "Disk: ${DISK_PCT}%"
|
|
29
|
+
elif [[ $DISK_PCT -lt 90 ]]; then
|
|
30
|
+
warn "Disk: ${DISK_PCT}% (getting full)"
|
|
31
|
+
else
|
|
32
|
+
fail "Disk: ${DISK_PCT}% (CRITICAL)"
|
|
33
|
+
fi
|
|
34
|
+
|
|
35
|
+
# Memory
|
|
36
|
+
MEM_PCT=$(free | awk '/Mem:/ {printf "%.0f", $3/$2 * 100}')
|
|
37
|
+
if [[ $MEM_PCT -lt 80 ]]; then
|
|
38
|
+
ok "Memory: ${MEM_PCT}%"
|
|
39
|
+
elif [[ $MEM_PCT -lt 90 ]]; then
|
|
40
|
+
warn "Memory: ${MEM_PCT}%"
|
|
41
|
+
else
|
|
42
|
+
fail "Memory: ${MEM_PCT}% (CRITICAL)"
|
|
43
|
+
fi
|
|
44
|
+
|
|
45
|
+
# Load average
|
|
46
|
+
LOAD=$(cat /proc/loadavg | awk '{print $1}')
|
|
47
|
+
CORES=$(nproc)
|
|
48
|
+
LOAD_PCT=$(echo "$LOAD $CORES" | awk '{printf "%.0f", ($1/$2)*100}')
|
|
49
|
+
if [[ $LOAD_PCT -lt 70 ]]; then
|
|
50
|
+
ok "CPU load: ${LOAD} (${LOAD_PCT}% of ${CORES} cores)"
|
|
51
|
+
else
|
|
52
|
+
warn "CPU load: ${LOAD} (${LOAD_PCT}% of ${CORES} cores)"
|
|
53
|
+
fi
|
|
54
|
+
|
|
55
|
+
# Swap
|
|
56
|
+
if swapon --show | grep -q /; then
|
|
57
|
+
SWAP_USED=$(free | awk '/Swap:/ {if($2>0) printf "%.0f", $3/$2*100; else print "0"}')
|
|
58
|
+
if [[ $SWAP_USED -lt 50 ]]; then
|
|
59
|
+
ok "Swap: ${SWAP_USED}%"
|
|
60
|
+
else
|
|
61
|
+
warn "Swap: ${SWAP_USED}% (heavy swapping)"
|
|
62
|
+
fi
|
|
63
|
+
fi
|
|
64
|
+
|
|
65
|
+
# Services
|
|
66
|
+
echo ""
|
|
67
|
+
for svc in nginx docker pm2; do
|
|
68
|
+
if command -v $svc &>/dev/null; then
|
|
69
|
+
if systemctl is-active --quiet $svc 2>/dev/null || pgrep -x $svc >/dev/null 2>&1; then
|
|
70
|
+
ok "$svc: running"
|
|
71
|
+
else
|
|
72
|
+
fail "$svc: not running"
|
|
73
|
+
fi
|
|
74
|
+
fi
|
|
75
|
+
done
|
|
76
|
+
|
|
77
|
+
# Docker containers (if docker available)
|
|
78
|
+
if command -v docker &>/dev/null; then
|
|
79
|
+
echo ""
|
|
80
|
+
RUNNING=$(docker ps -q | wc -l)
|
|
81
|
+
TOTAL=$(docker ps -aq | wc -l)
|
|
82
|
+
ok "Docker: $RUNNING/$TOTAL containers running"
|
|
83
|
+
fi
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
# ─────────────────────────────────────────────────
|
|
3
|
+
# SSL setup via Let's Encrypt + auto-renewal.
|
|
4
|
+
#
|
|
5
|
+
# Usage:
|
|
6
|
+
# ./scripts/server/setup-ssl.sh example.com
|
|
7
|
+
# ./scripts/server/setup-ssl.sh example.com www.example.com
|
|
8
|
+
# ─────────────────────────────────────────────────
|
|
9
|
+
|
|
10
|
+
set -euo pipefail
|
|
11
|
+
|
|
12
|
+
DOMAINS=("$@")
|
|
13
|
+
if [[ ${#DOMAINS[@]} -eq 0 ]]; then
|
|
14
|
+
echo "Usage: setup-ssl.sh <domain> [domain2...]"
|
|
15
|
+
exit 1
|
|
16
|
+
fi
|
|
17
|
+
|
|
18
|
+
DOMAIN_FLAGS=""
|
|
19
|
+
for d in "${DOMAINS[@]}"; do
|
|
20
|
+
DOMAIN_FLAGS="$DOMAIN_FLAGS -d $d"
|
|
21
|
+
done
|
|
22
|
+
|
|
23
|
+
echo "▸ Obtaining SSL certificate for: ${DOMAINS[*]}"
|
|
24
|
+
certbot --nginx $DOMAIN_FLAGS --non-interactive --agree-tos --redirect \
|
|
25
|
+
--email "${SSL_EMAIL:-admin@${DOMAINS[0]}}"
|
|
26
|
+
|
|
27
|
+
echo "▸ Verifying auto-renewal..."
|
|
28
|
+
certbot renew --dry-run
|
|
29
|
+
|
|
30
|
+
echo "✓ SSL configured with auto-renewal"
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
# ─────────────────────────────────────────────────
|
|
3
|
+
# Fresh VPS setup: deploy user, SSH hardening, swap, firewall.
|
|
4
|
+
#
|
|
5
|
+
# Run as root on a fresh Ubuntu/Debian VPS:
|
|
6
|
+
# curl -sL https://raw.githubusercontent.com/UnderUndre/undev/main/scripts/server/setup-vps.sh | bash -s -- <deploy_user>
|
|
7
|
+
#
|
|
8
|
+
# Or locally:
|
|
9
|
+
# ssh root@<server> < scripts/server/setup-vps.sh
|
|
10
|
+
# ─────────────────────────────────────────────────
|
|
11
|
+
|
|
12
|
+
set -euo pipefail
|
|
13
|
+
|
|
14
|
+
DEPLOY_USER="${1:-deploy}"
|
|
15
|
+
SWAP_SIZE="${2:-2G}"
|
|
16
|
+
|
|
17
|
+
echo "=== VPS Setup ==="
|
|
18
|
+
echo "Deploy user: $DEPLOY_USER"
|
|
19
|
+
echo "Swap size: $SWAP_SIZE"
|
|
20
|
+
echo ""
|
|
21
|
+
|
|
22
|
+
# 1. System updates
|
|
23
|
+
echo "▸ Updating system..."
|
|
24
|
+
apt-get update -qq && apt-get upgrade -y -qq
|
|
25
|
+
|
|
26
|
+
# 2. Essential packages
|
|
27
|
+
echo "▸ Installing essentials..."
|
|
28
|
+
apt-get install -y -qq \
|
|
29
|
+
curl wget git unzip htop \
|
|
30
|
+
ufw fail2ban \
|
|
31
|
+
nginx certbot python3-certbot-nginx
|
|
32
|
+
|
|
33
|
+
# 3. Create deploy user
|
|
34
|
+
if ! id "$DEPLOY_USER" &>/dev/null; then
|
|
35
|
+
echo "▸ Creating user: $DEPLOY_USER"
|
|
36
|
+
adduser --disabled-password --gecos "" "$DEPLOY_USER"
|
|
37
|
+
usermod -aG sudo "$DEPLOY_USER"
|
|
38
|
+
echo "$DEPLOY_USER ALL=(ALL) NOPASSWD:ALL" > "/etc/sudoers.d/$DEPLOY_USER"
|
|
39
|
+
fi
|
|
40
|
+
|
|
41
|
+
# 4. SSH hardening
|
|
42
|
+
echo "▸ Hardening SSH..."
|
|
43
|
+
mkdir -p "/home/$DEPLOY_USER/.ssh"
|
|
44
|
+
chmod 700 "/home/$DEPLOY_USER/.ssh"
|
|
45
|
+
|
|
46
|
+
# Copy root authorized_keys to deploy user if exists
|
|
47
|
+
if [[ -f /root/.ssh/authorized_keys ]]; then
|
|
48
|
+
cp /root/.ssh/authorized_keys "/home/$DEPLOY_USER/.ssh/"
|
|
49
|
+
chown -R "$DEPLOY_USER:$DEPLOY_USER" "/home/$DEPLOY_USER/.ssh"
|
|
50
|
+
fi
|
|
51
|
+
|
|
52
|
+
# Disable root login and password auth
|
|
53
|
+
sed -i 's/^#\?PermitRootLogin.*/PermitRootLogin no/' /etc/ssh/sshd_config
|
|
54
|
+
sed -i 's/^#\?PasswordAuthentication.*/PasswordAuthentication no/' /etc/ssh/sshd_config
|
|
55
|
+
systemctl restart sshd
|
|
56
|
+
|
|
57
|
+
# 5. Firewall
|
|
58
|
+
echo "▸ Configuring firewall..."
|
|
59
|
+
ufw --force reset
|
|
60
|
+
ufw default deny incoming
|
|
61
|
+
ufw default allow outgoing
|
|
62
|
+
ufw allow ssh
|
|
63
|
+
ufw allow 'Nginx Full'
|
|
64
|
+
ufw --force enable
|
|
65
|
+
|
|
66
|
+
# 6. Swap
|
|
67
|
+
if ! swapon --show | grep -q /swapfile; then
|
|
68
|
+
echo "▸ Creating ${SWAP_SIZE} swap..."
|
|
69
|
+
fallocate -l "$SWAP_SIZE" /swapfile
|
|
70
|
+
chmod 600 /swapfile
|
|
71
|
+
mkswap /swapfile
|
|
72
|
+
swapon /swapfile
|
|
73
|
+
echo '/swapfile none swap sw 0 0' >> /etc/fstab
|
|
74
|
+
sysctl vm.swappiness=10
|
|
75
|
+
echo 'vm.swappiness=10' >> /etc/sysctl.conf
|
|
76
|
+
fi
|
|
77
|
+
|
|
78
|
+
# 7. Node.js via nvm (for deploy user)
|
|
79
|
+
echo "▸ Installing Node.js 20..."
|
|
80
|
+
su - "$DEPLOY_USER" -c '
|
|
81
|
+
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.40.0/install.sh | bash
|
|
82
|
+
export NVM_DIR="$HOME/.nvm"
|
|
83
|
+
[ -s "$NVM_DIR/nvm.sh" ] && . "$NVM_DIR/nvm.sh"
|
|
84
|
+
nvm install 20
|
|
85
|
+
nvm alias default 20
|
|
86
|
+
npm i -g pm2
|
|
87
|
+
'
|
|
88
|
+
|
|
89
|
+
echo ""
|
|
90
|
+
echo "=== VPS Setup Complete ==="
|
|
91
|
+
echo "SSH as: ssh $DEPLOY_USER@<server-ip>"
|
|
92
|
+
echo "Next: copy your SSH key to /home/$DEPLOY_USER/.ssh/authorized_keys"
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
# ─────────────────────────────────────────────────
|
|
2
|
+
# Environment Variables Template
|
|
3
|
+
# Copy to .env and fill in your values:
|
|
4
|
+
# cp .env.example .env
|
|
5
|
+
# ─────────────────────────────────────────────────
|
|
6
|
+
|
|
7
|
+
# App
|
|
8
|
+
NODE_ENV=development
|
|
9
|
+
PORT=3000
|
|
10
|
+
|
|
11
|
+
# Database (PostgreSQL)
|
|
12
|
+
DATABASE_URL=postgresql://postgres:postgres@localhost:5432/mydb
|
|
13
|
+
POSTGRES_HOST=localhost
|
|
14
|
+
POSTGRES_PORT=5432
|
|
15
|
+
POSTGRES_USER=postgres
|
|
16
|
+
POSTGRES_PASSWORD=<your-password>
|
|
17
|
+
POSTGRES_DB=mydb
|
|
18
|
+
|
|
19
|
+
# Auth (if using)
|
|
20
|
+
# AUTH_SECRET=<generate: openssl rand -hex 32>
|
|
21
|
+
|
|
22
|
+
# External APIs
|
|
23
|
+
# OPENAI_API_KEY=sk-...
|
|
24
|
+
# ANTHROPIC_API_KEY=sk-ant-...
|
|
25
|
+
|
|
26
|
+
# Deploy (for scripts/deploy/)
|
|
27
|
+
# PROD_SSH_HOST=deploy@your-server.com
|
|
28
|
+
# REMOTE_APP_DIR=/home/deploy/myapp
|
|
29
|
+
|
|
30
|
+
# Notifications (optional)
|
|
31
|
+
# TELEGRAM_BOT_TOKEN=
|
|
32
|
+
# TELEGRAM_CHAT_ID=
|