@panoptic-it-solutions/coolify-setup 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,92 @@
1
+ # @panoptic-it-solutions/coolify-setup
2
+
3
+ CLI tool for setting up Coolify deployment on Panoptic projects.
4
+
5
+ ## Usage
6
+
7
+ ```bash
8
+ npx @panoptic-it-solutions/coolify-setup
9
+ ```
10
+
11
+ Or install globally:
12
+
13
+ ```bash
14
+ npm i -g @panoptic-it-solutions/coolify-setup
15
+ coolify-setup
16
+ ```
17
+
18
+ ## What it does
19
+
20
+ This CLI tool automates the setup of Coolify deployment for your project:
21
+
22
+ 1. **Detects your project type** (Next.js or Node.js)
23
+ 2. **Detects your package manager** (pnpm, npm, or yarn)
24
+ 3. **Generates deployment files**:
25
+ - `Dockerfile` - Multi-stage build optimized for your project
26
+ - `docker-compose.yml` - Production configuration with optional services
27
+ - `docker-compose.build.yml` - Build configuration for CI
28
+ - `.github/workflows/build-deploy.yml` - GitHub Actions workflow
29
+ - `entrypoint.sh` - Container startup script with migration support
30
+ - `.claude/rules/coolify.md` - Coolify deployment rules for Claude
31
+ 4. **Sets up GitHub repository** (optional):
32
+ - Creates repo in Panoptic-IT-Solutions org
33
+ - Pushes main, staging, and develop branches
34
+ - Sets develop as default branch
35
+
36
+ ## Hardcoded Panoptic Defaults
37
+
38
+ - **Registry**: `10.0.0.2:5000`
39
+ - **GitHub Org**: `Panoptic-IT-Solutions`
40
+ - **CI Runner**: `self-hosted`
41
+ - **Branch Strategy**:
42
+ - `develop` - Default branch for development
43
+ - `staging` - Deploy target for all branches
44
+ - `main` - Production (manual promotion)
45
+
46
+ ## Optional Services
47
+
48
+ The CLI will prompt you to include:
49
+ - PostgreSQL (with automatic migration support)
50
+ - Redis
51
+ - MinIO (S3-compatible storage)
52
+
53
+ ## Migration Handling
54
+
55
+ If PostgreSQL is included, the CLI generates migration infrastructure:
56
+
57
+ ### Next.js Projects (with standalone output)
58
+
59
+ Next.js uses `output: 'standalone'` which creates a minimal deployment without full node_modules. To handle migrations:
60
+
61
+ 1. **esbuild is added** to devDependencies (automatically)
62
+ 2. **Migration script is bundled** at Docker build time into a single JS file with all dependencies
63
+ 3. **Bundled JS runs** with plain Node.js at container startup (no tsx needed)
64
+
65
+ Files generated:
66
+ - `lib/db/migrate.ts` - Migration script source (if not existing)
67
+ - `lib/db/migrate.bundle.js` - Bundled migration (created during Docker build)
68
+
69
+ ### Node.js Projects
70
+
71
+ Node.js projects keep full node_modules, so migrations run with tsx:
72
+
73
+ Files generated:
74
+ - `scripts/migrate.ts` - Migration script
75
+ - Uses `npx tsx` at runtime
76
+
77
+ ### Migration Behavior
78
+
79
+ - **Skips during Docker build** - Detects placeholder database URLs
80
+ - **Runs on container startup** - Before the application starts
81
+ - **Idempotent** - Drizzle tracks applied migrations
82
+
83
+ ## Requirements
84
+
85
+ - Node.js 18+
86
+ - GitHub CLI (`gh`) installed and authenticated (for repo creation)
87
+ - Git initialized in your project
88
+ - **For Next.js + PostgreSQL**: esbuild (automatically added to devDependencies)
89
+
90
+ ## License
91
+
92
+ MIT
@@ -0,0 +1,8 @@
1
+ export interface ProjectInfo {
2
+ type: 'nextjs' | 'node';
3
+ packageManager: 'pnpm' | 'npm' | 'yarn';
4
+ hasDockerfile: boolean;
5
+ hasDockerCompose: boolean;
6
+ name: string;
7
+ }
8
+ export declare function detectProject(): Promise<ProjectInfo>;
@@ -0,0 +1,34 @@
1
+ import { existsSync, readFileSync } from 'fs';
2
+ import { join } from 'path';
3
+ function exists(filename) {
4
+ return existsSync(join(process.cwd(), filename));
5
+ }
6
+ function getProjectName() {
7
+ try {
8
+ const packageJson = JSON.parse(readFileSync(join(process.cwd(), 'package.json'), 'utf-8'));
9
+ return packageJson.name?.replace(/^@[^/]+\//, '') || 'my-project';
10
+ }
11
+ catch {
12
+ return 'my-project';
13
+ }
14
+ }
15
+ export async function detectProject() {
16
+ const hasPackageJson = exists('package.json');
17
+ const hasNextConfig = exists('next.config.ts') || exists('next.config.js') || exists('next.config.mjs');
18
+ const hasPnpmLock = exists('pnpm-lock.yaml');
19
+ const hasYarnLock = exists('yarn.lock');
20
+ const hasDockerfile = exists('Dockerfile');
21
+ const hasDockerCompose = exists('docker-compose.yml');
22
+ let packageManager = 'npm';
23
+ if (hasPnpmLock)
24
+ packageManager = 'pnpm';
25
+ else if (hasYarnLock)
26
+ packageManager = 'yarn';
27
+ return {
28
+ type: hasNextConfig ? 'nextjs' : 'node',
29
+ packageManager,
30
+ hasDockerfile,
31
+ hasDockerCompose,
32
+ name: hasPackageJson ? getProjectName() : 'my-project',
33
+ };
34
+ }
@@ -0,0 +1,9 @@
1
+ export interface GenerateOptions {
2
+ projectName: string;
3
+ projectType: 'nextjs' | 'node';
4
+ packageManager: 'pnpm' | 'npm' | 'yarn';
5
+ includePostgres: boolean;
6
+ includeRedis: boolean;
7
+ includeMinio: boolean;
8
+ }
9
+ export declare function generateFiles(options: GenerateOptions): Promise<void>;
@@ -0,0 +1,97 @@
1
+ import { existsSync, mkdirSync, writeFileSync, readFileSync } from 'fs';
2
+ import { join, dirname } from 'path';
3
+ import { generateDockerfile, generateDockerCompose, generateDockerComposeBuild, generateWorkflow, generateEntrypoint, generateMigrateScript, generateClaudeRules, } from './templates/index.js';
4
+ function ensureDir(filePath) {
5
+ const dir = dirname(filePath);
6
+ if (!existsSync(dir)) {
7
+ mkdirSync(dir, { recursive: true });
8
+ }
9
+ }
10
+ function writeFile(relativePath, content) {
11
+ const fullPath = join(process.cwd(), relativePath);
12
+ ensureDir(fullPath);
13
+ writeFileSync(fullPath, content, 'utf-8');
14
+ }
15
+ function addEsbuildToPackageJson() {
16
+ const packageJsonPath = join(process.cwd(), 'package.json');
17
+ if (!existsSync(packageJsonPath)) {
18
+ console.log('Warning: package.json not found, skipping esbuild addition');
19
+ return false;
20
+ }
21
+ try {
22
+ const packageJson = JSON.parse(readFileSync(packageJsonPath, 'utf-8'));
23
+ // Ensure devDependencies exists
24
+ if (!packageJson.devDependencies) {
25
+ packageJson.devDependencies = {};
26
+ }
27
+ // Add esbuild if not already present
28
+ if (!packageJson.devDependencies.esbuild) {
29
+ packageJson.devDependencies.esbuild = '^0.25.0';
30
+ writeFileSync(packageJsonPath, JSON.stringify(packageJson, null, 2) + '\n', 'utf-8');
31
+ return true;
32
+ }
33
+ return false;
34
+ }
35
+ catch (error) {
36
+ console.log('Warning: Failed to update package.json:', error);
37
+ return false;
38
+ }
39
+ }
40
+ export async function generateFiles(options) {
41
+ const { projectName, projectType, packageManager, includePostgres, includeRedis, includeMinio, } = options;
42
+ // Generate Dockerfile
43
+ const dockerfile = generateDockerfile({
44
+ projectType,
45
+ packageManager,
46
+ includePostgres,
47
+ });
48
+ writeFile('Dockerfile', dockerfile);
49
+ // Generate docker-compose.yml
50
+ const dockerCompose = generateDockerCompose({
51
+ projectName,
52
+ includePostgres,
53
+ includeRedis,
54
+ includeMinio,
55
+ });
56
+ writeFile('docker-compose.yml', dockerCompose);
57
+ // Generate docker-compose.build.yml
58
+ const dockerComposeBuild = generateDockerComposeBuild({
59
+ projectName,
60
+ });
61
+ writeFile('docker-compose.build.yml', dockerComposeBuild);
62
+ // Generate GitHub workflow
63
+ const workflow = generateWorkflow({
64
+ projectName,
65
+ });
66
+ writeFile('.github/workflows/build-deploy.yml', workflow);
67
+ // Generate entrypoint.sh
68
+ const entrypoint = generateEntrypoint({
69
+ projectType,
70
+ includePostgres,
71
+ });
72
+ writeFile('entrypoint.sh', entrypoint);
73
+ // Generate Claude rules
74
+ const claudeRules = generateClaudeRules();
75
+ writeFile('.claude/rules/coolify.md', claudeRules);
76
+ // Generate migrate.ts if postgres is included
77
+ if (includePostgres) {
78
+ const migrateScript = generateMigrateScript({ projectType });
79
+ // For Next.js standalone projects, we need esbuild to bundle the migration script
80
+ if (projectType === 'nextjs') {
81
+ const added = addEsbuildToPackageJson();
82
+ if (added) {
83
+ console.log('Added esbuild to devDependencies (required for migration bundling)');
84
+ console.log('Run your package manager install command to install it');
85
+ }
86
+ // Write migrate.ts to lib/db/ for Next.js projects (will be bundled at build time)
87
+ const existingMigratePath = join(process.cwd(), 'lib/db/migrate.ts');
88
+ if (!existsSync(existingMigratePath)) {
89
+ writeFile('lib/db/migrate.ts', migrateScript);
90
+ }
91
+ }
92
+ else {
93
+ // For Node.js projects, write to scripts/ directory
94
+ writeFile('scripts/migrate.ts', migrateScript);
95
+ }
96
+ }
97
+ }
package/dist/git.d.ts ADDED
@@ -0,0 +1 @@
1
+ export declare function setupGitHub(projectName: string): Promise<void>;
package/dist/git.js ADDED
@@ -0,0 +1,94 @@
1
+ import { execSync, exec } from 'child_process';
2
+ import { promisify } from 'util';
3
+ const execAsync = promisify(exec);
4
+ const ORG = 'Panoptic-IT-Solutions';
5
+ function run(command) {
6
+ return execSync(command, { encoding: 'utf-8', stdio: 'pipe' }).trim();
7
+ }
8
+ async function runAsync(command) {
9
+ const { stdout } = await execAsync(command);
10
+ return stdout.trim();
11
+ }
12
+ function hasGitRemote() {
13
+ try {
14
+ run('git remote get-url origin');
15
+ return true;
16
+ }
17
+ catch {
18
+ return false;
19
+ }
20
+ }
21
+ function isGitRepo() {
22
+ try {
23
+ run('git rev-parse --git-dir');
24
+ return true;
25
+ }
26
+ catch {
27
+ return false;
28
+ }
29
+ }
30
+ export async function setupGitHub(projectName) {
31
+ // Initialize git if not already a repo
32
+ if (!isGitRepo()) {
33
+ run('git init');
34
+ run('git add .');
35
+ run('git commit -m "Initial commit"');
36
+ }
37
+ // Check if gh CLI is available
38
+ try {
39
+ run('gh --version');
40
+ }
41
+ catch {
42
+ throw new Error('GitHub CLI (gh) is not installed. Install it from https://cli.github.com/');
43
+ }
44
+ // Check if authenticated
45
+ try {
46
+ run('gh auth status');
47
+ }
48
+ catch {
49
+ throw new Error('Not authenticated with GitHub CLI. Run: gh auth login');
50
+ }
51
+ // Create repo if no remote exists
52
+ if (!hasGitRemote()) {
53
+ // Create the repository
54
+ await runAsync(`gh repo create ${ORG}/${projectName} --private --source=. --push`);
55
+ }
56
+ else {
57
+ // Remote exists, just push
58
+ run('git push origin HEAD');
59
+ }
60
+ // Ensure we're on main and push
61
+ const currentBranch = run('git branch --show-current');
62
+ if (currentBranch !== 'main') {
63
+ run('git checkout -b main 2>/dev/null || git checkout main');
64
+ }
65
+ // Push main
66
+ try {
67
+ run('git push origin main');
68
+ }
69
+ catch {
70
+ // May already be pushed
71
+ }
72
+ // Create staging branch from main
73
+ try {
74
+ run('git push origin main:staging');
75
+ }
76
+ catch {
77
+ // May already exist
78
+ }
79
+ // Create and checkout develop branch
80
+ try {
81
+ run('git checkout -b develop 2>/dev/null || git checkout develop');
82
+ run('git push origin develop -u');
83
+ }
84
+ catch {
85
+ // May already exist
86
+ }
87
+ // Set develop as default branch
88
+ try {
89
+ await runAsync(`gh repo edit ${ORG}/${projectName} --default-branch develop`);
90
+ }
91
+ catch {
92
+ // May fail if not owner or already set
93
+ }
94
+ }
@@ -0,0 +1,2 @@
1
+ #!/usr/bin/env node
2
+ export {};
package/dist/index.js ADDED
@@ -0,0 +1,102 @@
1
+ #!/usr/bin/env node
2
+ import chalk from 'chalk';
3
+ import ora from 'ora';
4
+ import prompts from 'prompts';
5
+ import { detectProject } from './detector.js';
6
+ import { generateFiles } from './generator.js';
7
+ import { setupGitHub } from './git.js';
8
+ async function main() {
9
+ console.log(chalk.bold.cyan('\n🚀 Panoptic Coolify Setup\n'));
10
+ // Detect project type
11
+ const spinner = ora('Scanning project...').start();
12
+ const project = await detectProject();
13
+ spinner.succeed(`Detected: ${chalk.green(project.type)} project (${project.packageManager})`);
14
+ if (project.hasDockerfile) {
15
+ console.log(chalk.yellow(' ⚠️ Existing Dockerfile found - will be overwritten'));
16
+ }
17
+ if (project.hasDockerCompose) {
18
+ console.log(chalk.yellow(' ⚠️ Existing docker-compose.yml found - will be overwritten'));
19
+ }
20
+ // Get project name
21
+ const response = await prompts([
22
+ {
23
+ type: 'text',
24
+ name: 'projectName',
25
+ message: 'Project name:',
26
+ initial: project.name,
27
+ validate: (value) => value.length > 0 || 'Project name is required',
28
+ },
29
+ {
30
+ type: 'confirm',
31
+ name: 'includePostgres',
32
+ message: 'Include PostgreSQL?',
33
+ initial: true,
34
+ },
35
+ {
36
+ type: 'confirm',
37
+ name: 'includeRedis',
38
+ message: 'Include Redis?',
39
+ initial: true,
40
+ },
41
+ {
42
+ type: 'confirm',
43
+ name: 'includeMinio',
44
+ message: 'Include MinIO (S3-compatible storage)?',
45
+ initial: false,
46
+ },
47
+ {
48
+ type: 'confirm',
49
+ name: 'createRepo',
50
+ message: 'Create GitHub repo in Panoptic-IT-Solutions?',
51
+ initial: true,
52
+ },
53
+ ]);
54
+ if (!response.projectName) {
55
+ console.log(chalk.red('\n❌ Setup cancelled'));
56
+ process.exit(1);
57
+ }
58
+ // Generate files
59
+ console.log(chalk.bold('\nGenerating files...'));
60
+ await generateFiles({
61
+ projectName: response.projectName,
62
+ projectType: project.type,
63
+ packageManager: project.packageManager,
64
+ includePostgres: response.includePostgres,
65
+ includeRedis: response.includeRedis,
66
+ includeMinio: response.includeMinio,
67
+ });
68
+ console.log(chalk.green(' ✓ Dockerfile'));
69
+ console.log(chalk.green(' ✓ docker-compose.yml'));
70
+ console.log(chalk.green(' ✓ docker-compose.build.yml'));
71
+ console.log(chalk.green(' ✓ .github/workflows/build-deploy.yml'));
72
+ console.log(chalk.green(' ✓ .claude/rules/coolify.md'));
73
+ console.log(chalk.green(' ✓ entrypoint.sh'));
74
+ if (response.includePostgres) {
75
+ console.log(chalk.green(' ✓ scripts/migrate.ts'));
76
+ }
77
+ // Setup GitHub
78
+ if (response.createRepo) {
79
+ console.log(chalk.bold('\nSetting up GitHub...'));
80
+ try {
81
+ await setupGitHub(response.projectName);
82
+ console.log(chalk.green(` ✓ Created repo: Panoptic-IT-Solutions/${response.projectName}`));
83
+ console.log(chalk.green(' ✓ Pushed main branch'));
84
+ console.log(chalk.green(' ✓ Created staging branch'));
85
+ console.log(chalk.green(' ✓ Created develop branch (default)'));
86
+ }
87
+ catch (error) {
88
+ console.log(chalk.red(` ✗ GitHub setup failed: ${error}`));
89
+ console.log(chalk.yellow(' You can manually run: gh repo create Panoptic-IT-Solutions/' + response.projectName));
90
+ }
91
+ }
92
+ console.log(chalk.bold.green('\n✅ Setup complete!\n'));
93
+ console.log('Next steps:');
94
+ console.log(chalk.cyan(' 1. Review the generated files'));
95
+ console.log(chalk.cyan(' 2. Update .env with your secrets'));
96
+ console.log(chalk.cyan(' 3. Push to develop to trigger first build'));
97
+ console.log(chalk.cyan(' 4. Configure Coolify to deploy from staging branch\n'));
98
+ }
99
+ main().catch((error) => {
100
+ console.error(chalk.red('Error:'), error);
101
+ process.exit(1);
102
+ });
@@ -0,0 +1 @@
1
+ export declare function generateClaudeRules(): string;
@@ -0,0 +1,105 @@
1
+ export function generateClaudeRules() {
2
+ return `# Coolify Deployment Rules
3
+
4
+ ## Overview
5
+
6
+ This project uses Coolify for deployment. Coolify has specific behaviors that require careful handling of Docker Compose configurations.
7
+
8
+ ## Critical Coolify Behaviors
9
+
10
+ ### 1. Helper Container Architecture
11
+
12
+ Coolify runs \`docker compose\` inside an ephemeral helper container, not directly on the Docker host. This means:
13
+
14
+ - **Bind mounts with relative paths will fail** - The helper container clones the repo to \`/artifacts/<deployment-uuid>/\`, but Docker resolves paths on the host where these files don't exist
15
+ - **Docker Compose \`configs\` feature will fail** - Same issue as bind mounts; config file paths are resolved on the host, not inside the helper container
16
+ - **Volume mounts work fine** - Named volumes are managed by Docker and work correctly
17
+
18
+ ### 2. Path Transformation
19
+
20
+ Coolify transforms relative bind mount paths like \`./config/file.yaml\` to \`/data/coolify/applications/<app-uuid>/config/file.yaml\`, but it does NOT copy the actual files to that location.
21
+
22
+ ## Rules for Configuration Files
23
+
24
+ ### DO: Bake configs into custom images
25
+
26
+ If a service needs configuration files (like Loki, Promtail, Grafana, Nginx, etc.), create a custom Dockerfile that COPYs the config into the image:
27
+
28
+ \`\`\`dockerfile
29
+ # Dockerfile.servicename
30
+ FROM base-image:version
31
+ COPY config/servicename.yaml /etc/servicename/config.yaml
32
+ \`\`\`
33
+
34
+ Then reference the custom image in docker-compose.yml:
35
+
36
+ \`\`\`yaml
37
+ servicename:
38
+ image: \${REGISTRY}/project-servicename:\${TAG}
39
+ # NO bind mounts or configs for config files
40
+ \`\`\`
41
+
42
+ ### DON'T: Use bind mounts for config files
43
+
44
+ \`\`\`yaml
45
+ # THIS WILL FAIL IN COOLIFY
46
+ servicename:
47
+ volumes:
48
+ - ./config/servicename.yaml:/etc/servicename/config.yaml
49
+ \`\`\`
50
+
51
+ ### DON'T: Use Docker Compose configs feature
52
+
53
+ \`\`\`yaml
54
+ # THIS WILL FAIL IN COOLIFY
55
+ servicename:
56
+ configs:
57
+ - source: myconfig
58
+ target: /etc/servicename/config.yaml
59
+
60
+ configs:
61
+ myconfig:
62
+ file: ./config/servicename.yaml
63
+ \`\`\`
64
+
65
+ ### DO: Use named volumes for persistent data
66
+
67
+ \`\`\`yaml
68
+ # This works correctly
69
+ servicename:
70
+ volumes:
71
+ - servicename-data:/var/lib/servicename
72
+
73
+ volumes:
74
+ servicename-data:
75
+ driver: local
76
+ \`\`\`
77
+
78
+ ## Build Workflow Requirements
79
+
80
+ When adding services that need baked-in configs:
81
+
82
+ 1. Create a \`Dockerfile.servicename\` that COPYs the config
83
+ 2. Add the service to \`docker-compose.build.yml\` with build context
84
+ 3. Update the GitHub Actions workflow to:
85
+ - Build the new image
86
+ - Tag and push as both \`\${COMMIT_SHA}\` and \`latest\`
87
+ - Sync the new Dockerfile to the deploy branch
88
+ - Update image tags in docker-compose.yml via sed
89
+
90
+ ## Private Registry Configuration
91
+
92
+ This project uses a private registry at \`10.0.0.2:5000\`. The staging server has \`insecure-registries\` configured, but Coolify's helper container may not inherit this. Pre-pulling images on the target server can help avoid registry issues.
93
+
94
+ ## Debugging Failed Deployments
95
+
96
+ 1. Check deployment logs: \`coolify deploy get <uuid> --format pretty\`
97
+ 2. Look for "bind source path does not exist" errors - indicates config file issue
98
+ 3. SSH to staging and check container logs: \`docker logs <container-name>\`
99
+ 4. Verify images exist in registry and can be pulled manually
100
+
101
+ ## Migration Handling
102
+
103
+ Database migrations are copied to \`/app/scripts/\` (not \`/app/lib/db/\`) to avoid conflicts with Next.js standalone output. The entrypoint.sh runs migrations before starting the server.
104
+ `;
105
+ }
@@ -0,0 +1,4 @@
1
+ export interface DockerComposeBuildOptions {
2
+ projectName: string;
3
+ }
4
+ export declare function generateDockerComposeBuild(options: DockerComposeBuildOptions): string;
@@ -0,0 +1,16 @@
1
+ const REGISTRY = '10.0.0.2:5000';
2
+ export function generateDockerComposeBuild(options) {
3
+ const { projectName } = options;
4
+ return `# Used by GitHub Actions to build and push images
5
+ # Do not use this file with Coolify directly
6
+
7
+ services:
8
+ app:
9
+ build:
10
+ context: .
11
+ dockerfile: Dockerfile
12
+ args:
13
+ SKIP_ENV_VALIDATION: "true"
14
+ image: ${REGISTRY}/\${COMPOSE_PROJECT_NAME:-${projectName}}-app:\${COMMIT_SHA:-latest}
15
+ `;
16
+ }
@@ -0,0 +1,7 @@
1
+ export interface DockerComposeOptions {
2
+ projectName: string;
3
+ includePostgres: boolean;
4
+ includeRedis: boolean;
5
+ includeMinio: boolean;
6
+ }
7
+ export declare function generateDockerCompose(options: DockerComposeOptions): string;
@@ -0,0 +1,86 @@
1
+ const REGISTRY = '10.0.0.2:5000';
2
+ export function generateDockerCompose(options) {
3
+ const { projectName, includePostgres, includeRedis, includeMinio } = options;
4
+ const services = [];
5
+ const volumes = [];
6
+ // App service
7
+ services.push(` app:
8
+ image: ${REGISTRY}/${projectName}-app:latest
9
+ restart: unless-stopped
10
+ ports:
11
+ - "3000:3000"
12
+ environment:
13
+ - NODE_ENV=production${includePostgres ? `
14
+ - POSTGRES_URL=postgres://\${POSTGRES_USER:-postgres}:\${POSTGRES_PASSWORD:-postgres}@postgres:5432/\${POSTGRES_DB:-app}` : ''}${includeRedis ? `
15
+ - REDIS_URL=redis://redis:6379` : ''}${includeMinio ? `
16
+ - MINIO_ENDPOINT=http://minio:9000
17
+ - MINIO_ACCESS_KEY=\${MINIO_ACCESS_KEY:-minioadmin}
18
+ - MINIO_SECRET_KEY=\${MINIO_SECRET_KEY:-minioadmin}
19
+ - MINIO_BUCKET=\${MINIO_BUCKET:-uploads}` : ''}
20
+ labels:
21
+ - coolify.autoUpdate=true
22
+ - coolify.preserveRepository=true${includePostgres || includeRedis || includeMinio ? `
23
+ depends_on:${includePostgres ? `
24
+ postgres:
25
+ condition: service_healthy` : ''}${includeRedis ? `
26
+ redis:
27
+ condition: service_healthy` : ''}${includeMinio ? `
28
+ minio:
29
+ condition: service_started` : ''}` : ''}`);
30
+ // PostgreSQL service
31
+ if (includePostgres) {
32
+ services.push(` postgres:
33
+ image: postgres:16-alpine
34
+ restart: unless-stopped
35
+ environment:
36
+ - POSTGRES_USER=\${POSTGRES_USER:-postgres}
37
+ - POSTGRES_PASSWORD=\${POSTGRES_PASSWORD:-postgres}
38
+ - POSTGRES_DB=\${POSTGRES_DB:-app}
39
+ volumes:
40
+ - postgres-data:/var/lib/postgresql/data
41
+ healthcheck:
42
+ test: ["CMD-SHELL", "pg_isready -U \${POSTGRES_USER:-postgres}"]
43
+ interval: 10s
44
+ timeout: 5s
45
+ retries: 5`);
46
+ volumes.push(' postgres-data:');
47
+ }
48
+ // Redis service
49
+ if (includeRedis) {
50
+ services.push(` redis:
51
+ image: redis:7-alpine
52
+ restart: unless-stopped
53
+ volumes:
54
+ - redis-data:/data
55
+ healthcheck:
56
+ test: ["CMD", "redis-cli", "ping"]
57
+ interval: 10s
58
+ timeout: 5s
59
+ retries: 5`);
60
+ volumes.push(' redis-data:');
61
+ }
62
+ // MinIO service
63
+ if (includeMinio) {
64
+ services.push(` minio:
65
+ image: minio/minio:latest
66
+ restart: unless-stopped
67
+ command: server /data --console-address ":9001"
68
+ environment:
69
+ - MINIO_ROOT_USER=\${MINIO_ACCESS_KEY:-minioadmin}
70
+ - MINIO_ROOT_PASSWORD=\${MINIO_SECRET_KEY:-minioadmin}
71
+ volumes:
72
+ - minio-data:/data
73
+ ports:
74
+ - "9001:9001"`);
75
+ volumes.push(' minio-data:');
76
+ }
77
+ let compose = `services:
78
+ ${services.join('\n\n')}`;
79
+ if (volumes.length > 0) {
80
+ compose += `
81
+
82
+ volumes:
83
+ ${volumes.join('\n')}`;
84
+ }
85
+ return compose + '\n';
86
+ }
@@ -0,0 +1,6 @@
1
+ export interface DockerfileOptions {
2
+ projectType: 'nextjs' | 'node';
3
+ packageManager: 'pnpm' | 'npm' | 'yarn';
4
+ includePostgres: boolean;
5
+ }
6
+ export declare function generateDockerfile(options: DockerfileOptions): string;
@@ -0,0 +1,175 @@
1
+ export function generateDockerfile(options) {
2
+ if (options.projectType === 'nextjs') {
3
+ return generateNextjsDockerfile(options);
4
+ }
5
+ return generateNodeDockerfile(options);
6
+ }
7
+ function generateNextjsDockerfile(options) {
8
+ const { packageManager, includePostgres } = options;
9
+ const installCmd = packageManager === 'pnpm'
10
+ ? 'pnpm install --frozen-lockfile'
11
+ : packageManager === 'yarn'
12
+ ? 'yarn install --frozen-lockfile'
13
+ : 'npm ci';
14
+ const buildCmd = packageManager === 'pnpm'
15
+ ? 'pnpm build'
16
+ : packageManager === 'yarn'
17
+ ? 'yarn build'
18
+ : 'npm run build';
19
+ const esbuildCmd = packageManager === 'pnpm'
20
+ ? 'pnpm exec esbuild'
21
+ : packageManager === 'yarn'
22
+ ? 'yarn esbuild'
23
+ : 'npx esbuild';
24
+ const packageFiles = packageManager === 'pnpm'
25
+ ? 'COPY package.json pnpm-lock.yaml ./'
26
+ : packageManager === 'yarn'
27
+ ? 'COPY package.json yarn.lock ./'
28
+ : 'COPY package.json package-lock.json ./';
29
+ const corepackSetup = packageManager === 'pnpm'
30
+ ? 'RUN corepack enable && corepack prepare pnpm@latest --activate'
31
+ : packageManager === 'yarn'
32
+ ? 'RUN corepack enable'
33
+ : '';
34
+ // For Next.js standalone, we bundle the migration script with esbuild at build time
35
+ // This avoids module resolution issues in the minimal standalone container
36
+ const migrationBundle = includePostgres ? `
37
+
38
+ # Build the migration bundle (single JS file with all deps baked in)
39
+ # This avoids module resolution issues in the standalone container
40
+ RUN ${esbuildCmd} lib/db/migrate.ts --bundle --platform=node --target=node22 --outfile=lib/db/migrate.bundle.js --external:dotenv` : '';
41
+ const migrationCopy = includePostgres ? `
42
+
43
+ # Copy database migrations and bundled migration script
44
+ COPY --from=builder --chown=nextjs:nodejs /app/lib/db/migrations ./lib/db/migrations
45
+ COPY --from=builder --chown=nextjs:nodejs /app/lib/db/migrate.bundle.js ./lib/db/migrate.bundle.js` : '';
46
+ return `# syntax=docker.io/docker/dockerfile:1
47
+
48
+ FROM node:22-alpine AS base
49
+
50
+ # Install package manager
51
+ ${corepackSetup}
52
+
53
+ # Dependencies stage
54
+ FROM base AS deps
55
+ WORKDIR /app
56
+
57
+ # Copy package files
58
+ ${packageFiles}
59
+
60
+ # Install dependencies
61
+ RUN ${installCmd}
62
+
63
+ # Builder stage
64
+ FROM base AS builder
65
+ WORKDIR /app
66
+
67
+ # Copy dependencies
68
+ COPY --from=deps /app/node_modules ./node_modules
69
+ COPY . .
70
+
71
+ # Build-time environment configuration
72
+ # These are placeholder values used only during the build process
73
+ # Real values are provided at runtime via environment variables
74
+ ENV SKIP_ENV_VALIDATION=true
75
+ ENV NEXT_TELEMETRY_DISABLED=1
76
+ ENV NODE_ENV=production
77
+
78
+ # Placeholder values for build (required by Next.js build process)
79
+ # These are overridden at runtime via docker-compose environment
80
+ ENV POSTGRES_URL=postgres://placeholder:placeholder@localhost:5432/placeholder
81
+ ENV REDIS_URL=redis://localhost:6379
82
+
83
+ # Build the application
84
+ RUN ${buildCmd}
85
+ ${migrationBundle}
86
+
87
+ # Runner stage
88
+ FROM base AS runner
89
+ WORKDIR /app
90
+
91
+ ENV NODE_ENV=production
92
+ ENV NEXT_TELEMETRY_DISABLED=1
93
+
94
+ # Create non-root user for security
95
+ RUN addgroup --system --gid 1001 nodejs
96
+ RUN adduser --system --uid 1001 nextjs
97
+
98
+ # Copy public assets
99
+ COPY --from=builder /app/public ./public
100
+
101
+ # Copy built application (standalone output)
102
+ COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./
103
+ COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static
104
+ ${migrationCopy}
105
+
106
+ # Copy entrypoint script
107
+ COPY --chown=nextjs:nodejs entrypoint.sh ./entrypoint.sh
108
+ RUN chmod +x ./entrypoint.sh
109
+
110
+ USER nextjs
111
+
112
+ EXPOSE 3000
113
+
114
+ ENV PORT=3000
115
+ ENV HOSTNAME="0.0.0.0"
116
+
117
+ CMD ["./entrypoint.sh"]
118
+ `;
119
+ }
120
+ function generateNodeDockerfile(options) {
121
+ const { packageManager, includePostgres } = options;
122
+ const installCmd = packageManager === 'pnpm'
123
+ ? 'pnpm install --frozen-lockfile --prod'
124
+ : packageManager === 'yarn'
125
+ ? 'yarn install --frozen-lockfile --production'
126
+ : 'npm ci --only=production';
127
+ const packageFiles = packageManager === 'pnpm'
128
+ ? 'COPY package.json pnpm-lock.yaml ./'
129
+ : packageManager === 'yarn'
130
+ ? 'COPY package.json yarn.lock ./'
131
+ : 'COPY package.json package-lock.json ./';
132
+ const corepackSetup = packageManager === 'pnpm'
133
+ ? 'RUN corepack enable && corepack prepare pnpm@latest --activate'
134
+ : packageManager === 'yarn'
135
+ ? 'RUN corepack enable'
136
+ : '';
137
+ const migrationCopy = includePostgres ? `
138
+ # Copy migration files
139
+ COPY --chown=node:node lib/db/migrations ./scripts/migrations
140
+ COPY --chown=node:node lib/db/migrate.ts ./scripts/migrate.ts` : '';
141
+ const tsxInstall = includePostgres ? `
142
+ # Install tsx globally for running TypeScript migration scripts
143
+ RUN npm install -g tsx` : '';
144
+ return `FROM node:22-alpine
145
+
146
+ # Install package manager
147
+ ${corepackSetup}
148
+
149
+ WORKDIR /app
150
+
151
+ # Copy package files
152
+ ${packageFiles}
153
+
154
+ # Install production dependencies
155
+ RUN ${installCmd}
156
+ ${tsxInstall}
157
+
158
+ # Copy application code
159
+ COPY --chown=node:node . .
160
+ ${migrationCopy}
161
+
162
+ # Copy entrypoint script
163
+ COPY --chown=node:node entrypoint.sh ./entrypoint.sh
164
+ RUN chmod +x ./entrypoint.sh
165
+
166
+ USER node
167
+
168
+ EXPOSE 3000
169
+
170
+ ENV NODE_ENV=production
171
+ ENV PORT=3000
172
+
173
+ CMD ["./entrypoint.sh"]
174
+ `;
175
+ }
@@ -0,0 +1,5 @@
1
+ export interface EntrypointOptions {
2
+ projectType: 'nextjs' | 'node';
3
+ includePostgres: boolean;
4
+ }
5
+ export declare function generateEntrypoint(options: EntrypointOptions): string;
@@ -0,0 +1,30 @@
1
+ export function generateEntrypoint(options) {
2
+ const { projectType, includePostgres } = options;
3
+ // For Next.js standalone, we run the bundled JS file (no tsx needed)
4
+ // For Node.js, we use tsx since full node_modules is available
5
+ const migrationStep = includePostgres
6
+ ? projectType === 'nextjs'
7
+ ? `
8
+ # Run database migrations (bundled JS with all deps baked in)
9
+ echo "⏳ Running database migrations..."
10
+ node lib/db/migrate.bundle.js
11
+ `
12
+ : `
13
+ # Run database migrations
14
+ echo "⏳ Running database migrations..."
15
+ npx tsx scripts/migrate.ts
16
+ `
17
+ : '';
18
+ const startCmd = projectType === 'nextjs'
19
+ ? 'exec node server.js'
20
+ : 'exec node dist/index.js';
21
+ return `#!/bin/sh
22
+ set -e
23
+
24
+ echo "🚀 Starting application..."
25
+ ${migrationStep}
26
+ # Start the application
27
+ echo "✅ Starting server..."
28
+ ${startCmd}
29
+ `;
30
+ }
@@ -0,0 +1,7 @@
1
+ export { generateDockerfile, type DockerfileOptions } from './dockerfile.js';
2
+ export { generateDockerCompose, type DockerComposeOptions } from './docker-compose.js';
3
+ export { generateDockerComposeBuild, type DockerComposeBuildOptions } from './docker-compose-build.js';
4
+ export { generateWorkflow, type WorkflowOptions } from './workflow.js';
5
+ export { generateEntrypoint, type EntrypointOptions } from './entrypoint.js';
6
+ export { generateMigrateScript, type MigrateScriptOptions } from './migrate.js';
7
+ export { generateClaudeRules } from './claude-rules.js';
@@ -0,0 +1,7 @@
1
+ export { generateDockerfile } from './dockerfile.js';
2
+ export { generateDockerCompose } from './docker-compose.js';
3
+ export { generateDockerComposeBuild } from './docker-compose-build.js';
4
+ export { generateWorkflow } from './workflow.js';
5
+ export { generateEntrypoint } from './entrypoint.js';
6
+ export { generateMigrateScript } from './migrate.js';
7
+ export { generateClaudeRules } from './claude-rules.js';
@@ -0,0 +1,4 @@
1
+ export interface MigrateScriptOptions {
2
+ projectType: 'nextjs' | 'node';
3
+ }
4
+ export declare function generateMigrateScript(options?: MigrateScriptOptions): string;
@@ -0,0 +1,48 @@
1
+ export function generateMigrateScript(options) {
2
+ const projectType = options?.projectType ?? 'nextjs';
3
+ // For Next.js, migrations are in lib/db/migrations (bundled with esbuild)
4
+ // For Node.js, migrations are in scripts/migrations (run with tsx)
5
+ const migrationsFolder = projectType === 'nextjs'
6
+ ? './lib/db/migrations'
7
+ : './scripts/migrations';
8
+ return `import { drizzle } from 'drizzle-orm/postgres-js';
9
+ import { migrate } from 'drizzle-orm/postgres-js/migrator';
10
+ import postgres from 'postgres';
11
+
12
+ const runMigrate = async () => {
13
+ // Only load dotenv in development (not needed in Docker where env vars are injected)
14
+ if (process.env.NODE_ENV !== 'production') {
15
+ const { config } = await import('dotenv');
16
+ config({ path: '.env.local' });
17
+ }
18
+
19
+ // Skip migrations during Docker build (placeholder values or explicit skip)
20
+ if (
21
+ !process.env.POSTGRES_URL ||
22
+ process.env.SKIP_ENV_VALIDATION === 'true' ||
23
+ process.env.POSTGRES_URL.includes('placeholder')
24
+ ) {
25
+ console.log('⏭️ Skipping migrations (build-time or no database configured)');
26
+ process.exit(0);
27
+ }
28
+
29
+ const connection = postgres(process.env.POSTGRES_URL, { max: 1 });
30
+ const db = drizzle(connection);
31
+
32
+ console.log('⏳ Running migrations...');
33
+
34
+ const start = Date.now();
35
+ await migrate(db, { migrationsFolder: '${migrationsFolder}' });
36
+ const end = Date.now();
37
+
38
+ console.log('✅ Migrations completed in', end - start, 'ms');
39
+ process.exit(0);
40
+ };
41
+
42
+ runMigrate().catch((err) => {
43
+ console.error('❌ Migration failed');
44
+ console.error(err);
45
+ process.exit(1);
46
+ });
47
+ `;
48
+ }
@@ -0,0 +1,4 @@
1
+ export interface WorkflowOptions {
2
+ projectName: string;
3
+ }
4
+ export declare function generateWorkflow(options: WorkflowOptions): string;
@@ -0,0 +1,77 @@
1
+ const REGISTRY = '10.0.0.2:5000';
2
+ export function generateWorkflow(options) {
3
+ const { projectName } = options;
4
+ return `name: Build and Deploy
5
+
6
+ on:
7
+ push:
8
+ branches:
9
+ - develop
10
+ - 'feature/**'
11
+ - 'fix/**'
12
+ - 'hotfix/**'
13
+
14
+ env:
15
+ REGISTRY: ${REGISTRY}
16
+ PROJECT_NAME: ${projectName}
17
+
18
+ jobs:
19
+ build-and-push:
20
+ runs-on: self-hosted
21
+ permissions:
22
+ contents: write
23
+ pull-requests: write
24
+
25
+ steps:
26
+ - name: Checkout
27
+ uses: actions/checkout@v4
28
+
29
+ - name: Get commit SHA
30
+ id: sha
31
+ run: echo "sha=\$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
32
+
33
+ - name: Build and push images
34
+ run: |
35
+ export COMMIT_SHA=\${{ steps.sha.outputs.sha }}
36
+ export COMPOSE_PROJECT_NAME=\${{ env.PROJECT_NAME }}
37
+
38
+ docker compose -f docker-compose.build.yml build
39
+ docker compose -f docker-compose.build.yml push
40
+
41
+ # Also tag and push as latest
42
+ docker tag \${{ env.REGISTRY }}/\${{ env.PROJECT_NAME }}-app:\${{ steps.sha.outputs.sha }} \${{ env.REGISTRY }}/\${{ env.PROJECT_NAME }}-app:latest
43
+ docker push \${{ env.REGISTRY }}/\${{ env.PROJECT_NAME }}-app:latest
44
+
45
+ - name: Determine target branch
46
+ id: target
47
+ run: |
48
+ # All branches deploy to staging
49
+ # Promotion to main is a manual step
50
+ echo "branch=staging" >> $GITHUB_OUTPUT
51
+
52
+ - name: Create deploy PR
53
+ env:
54
+ GH_TOKEN: \${{ secrets.GITHUB_TOKEN }}
55
+ run: |
56
+ TARGET_BRANCH=\${{ steps.target.outputs.branch }}
57
+ SOURCE_BRANCH=\${{ github.ref_name }}
58
+
59
+ # Check if PR already exists
60
+ EXISTING_PR=\$(gh pr list --head "\$SOURCE_BRANCH" --base "\$TARGET_BRANCH" --json number -q '.[0].number' || echo "")
61
+
62
+ if [ -n "\$EXISTING_PR" ]; then
63
+ echo "PR #\$EXISTING_PR already exists for \$SOURCE_BRANCH -> \$TARGET_BRANCH"
64
+ else
65
+ gh pr create \\
66
+ --title "Deploy: \$SOURCE_BRANCH -> \$TARGET_BRANCH" \\
67
+ --body "Automated deployment PR from \$SOURCE_BRANCH to \$TARGET_BRANCH.
68
+
69
+ **Commit:** \${{ steps.sha.outputs.sha }}
70
+ **Image:** \${{ env.REGISTRY }}/\${{ env.PROJECT_NAME }}-app:\${{ steps.sha.outputs.sha }}
71
+
72
+ Merge this PR to trigger deployment." \\
73
+ --base "\$TARGET_BRANCH" \\
74
+ --head "\$SOURCE_BRANCH" || echo "PR creation skipped (may already exist or no changes)"
75
+ fi
76
+ `;
77
+ }
package/package.json ADDED
@@ -0,0 +1,46 @@
1
+ {
2
+ "name": "@panoptic-it-solutions/coolify-setup",
3
+ "version": "1.0.0",
4
+ "description": "CLI tool for setting up Coolify deployment on Panoptic projects",
5
+ "type": "module",
6
+ "main": "dist/index.js",
7
+ "bin": {
8
+ "coolify-setup": "dist/index.js"
9
+ },
10
+ "scripts": {
11
+ "build": "tsc",
12
+ "dev": "tsx src/index.ts",
13
+ "start": "node dist/index.js",
14
+ "prepublishOnly": "npm run build"
15
+ },
16
+ "keywords": [
17
+ "coolify",
18
+ "docker",
19
+ "deployment",
20
+ "panoptic",
21
+ "cli"
22
+ ],
23
+ "author": "Panoptic IT Solutions",
24
+ "license": "MIT",
25
+ "repository": {
26
+ "type": "git",
27
+ "url": "https://github.com/Panoptic-IT-Solutions/coolify-setup.git"
28
+ },
29
+ "files": [
30
+ "dist"
31
+ ],
32
+ "dependencies": {
33
+ "chalk": "^5.3.0",
34
+ "ora": "^8.0.0",
35
+ "prompts": "^2.4.2"
36
+ },
37
+ "devDependencies": {
38
+ "@types/node": "^22.0.0",
39
+ "@types/prompts": "^2.4.9",
40
+ "tsx": "^4.19.0",
41
+ "typescript": "^5.6.0"
42
+ },
43
+ "engines": {
44
+ "node": ">=18"
45
+ }
46
+ }