@hraza01/skyhook 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +75 -0
- package/package.json +39 -0
- package/shell/deploy_core.sh +27 -0
- package/src/cli.js +53 -0
- package/src/config.js +42 -0
- package/src/dag_selection.js +46 -0
- package/src/deploy.js +153 -0
- package/src/errors.js +20 -0
- package/src/git_validation.js +75 -0
- package/src/index.js +86 -0
- package/src/logger.js +36 -0
- package/src/quotes.js +17 -0
- package/src/utils.js +19 -0
package/README.md
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
# Skyhook 🪂
|
|
2
|
+
|
|
3
|
+
**Skyhook** is a robust, interactive CLI tool for deploying Airflow DAGs to Google Cloud Composer. It replaces legacy bash scripts with a modern Node.js interface featuring Git validation, synchronized spinners, and a professional UI.
|
|
4
|
+
|
|
5
|
+
## About
|
|
6
|
+
|
|
7
|
+
> **Use Case**: Skyhook is optimized for development environments and teams where a full CI/CD pipeline is not yet established. While automated CI/CD remains the industry standard for production deployments, this utility provides a safe, structured, and efficient alternative for manual deployments, helping smaller teams iterate faster without sacrificing validation or safety.
|
|
8
|
+
|
|
9
|
+
## Features
|
|
10
|
+
|
|
11
|
+
- **🛡️ Safety First**: Automatically validates that your DAG folder is a Git repository, is on the `main` branch, and is fully synced with remote.
|
|
12
|
+
- **✨ Modern UI**: Built with `@clack/prompts` for a minimal, clean aesthetic.
|
|
13
|
+
- **🚀 Live Feedback**: Shows real-time file upload progress from `gsutil`.
|
|
14
|
+
- **🔌 Cross-Platform**: Works seamlessly on macOS, Linux, and Windows (WSL).
|
|
15
|
+
|
|
16
|
+
## Prerequisites
|
|
17
|
+
|
|
18
|
+
- **Node.js**: v20.5.0 or higher
|
|
19
|
+
- **Google Cloud SDK**: `gsutil` must be installed and **authenticated**.
|
|
20
|
+
- **Git**: Must be installed and available in the PATH.
|
|
21
|
+
- **Environment**: You must be inside your Composer's local Airflow development environment (project root) to run this tool.
|
|
22
|
+
|
|
23
|
+
## Installation
|
|
24
|
+
|
|
25
|
+
Install globally via NPM:
|
|
26
|
+
|
|
27
|
+
```console
|
|
28
|
+
$ npm install -g @hraza01/skyhook
|
|
29
|
+
```
|
|
30
|
+
|
|
31
|
+
## Updating
|
|
32
|
+
|
|
33
|
+
To update to the latest version:
|
|
34
|
+
|
|
35
|
+
```console
|
|
36
|
+
$ npm update -g @hraza01/skyhook
|
|
37
|
+
```
|
|
38
|
+
|
|
39
|
+
## Configuration
|
|
40
|
+
|
|
41
|
+
Skyhook requires two environment variables to function. It will **exit** if these are not provided.
|
|
42
|
+
|
|
43
|
+
| Variable | Description |
|
|
44
|
+
| :------------------ | :--------------------------------------------- |
|
|
45
|
+
| `GCS_BUCKET_NAME` | The name of your Composer GCS bucket. |
|
|
46
|
+
| `COMPOSER_URL_BASE` | The base URL for your environment's webserver. |
|
|
47
|
+
|
|
48
|
+
## Usage
|
|
49
|
+
|
|
50
|
+
### Option 1: VS Code Task (Recommended)
|
|
51
|
+
|
|
52
|
+
1. Copy the example task configuration from `vscode-example/tasks.json` to your project's `.vscode/tasks.json`.
|
|
53
|
+
2. Update the `env` variables in `tasks.json`.
|
|
54
|
+
3. Run the task **"Deploy DAG to Cloud Composer"**.
|
|
55
|
+
|
|
56
|
+
### Option 2: CLI (Manual)
|
|
57
|
+
|
|
58
|
+
Navigate to your Airflow project root (where the `dags/` folder is) and run:
|
|
59
|
+
|
|
60
|
+
```console
|
|
61
|
+
$ export GCS_BUCKET_NAME="<your-bucket-name>"
|
|
62
|
+
$ export COMPOSER_URL_BASE="<your-composer-webserver-url-base>"
|
|
63
|
+
|
|
64
|
+
$ skyhook
|
|
65
|
+
```
|
|
66
|
+
|
|
67
|
+
## Project Structure
|
|
68
|
+
|
|
69
|
+
For a detailed overview of the project structure and module descriptions, see:
|
|
70
|
+
|
|
71
|
+
📄 **[Project Structure Documentation](https://github.com/hraza01/skyhook/blob/main/docs/project-structure.md)**
|
|
72
|
+
|
|
73
|
+
## Thank You
|
|
74
|
+
|
|
75
|
+
Thank you for your attention and for taking the time to read the documentation! We hope this tool helps streamline your workflow.
|
package/package.json
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@hraza01/skyhook",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "Interactive CLI for Cloud Composer DAG deployment",
|
|
5
|
+
"main": "src/index.js",
|
|
6
|
+
"bin": {
|
|
7
|
+
"skyhook": "src/index.js"
|
|
8
|
+
},
|
|
9
|
+
"scripts": {
|
|
10
|
+
"test": "echo \"Error: no test specified\" && exit 1"
|
|
11
|
+
},
|
|
12
|
+
"keywords": [
|
|
13
|
+
"airflow",
|
|
14
|
+
"cloud-composer",
|
|
15
|
+
"dag",
|
|
16
|
+
"deployment",
|
|
17
|
+
"cli",
|
|
18
|
+
"gcp"
|
|
19
|
+
],
|
|
20
|
+
"author": "Hasan Raza",
|
|
21
|
+
"license": "MIT",
|
|
22
|
+
"type": "module",
|
|
23
|
+
"repository": {
|
|
24
|
+
"type": "git",
|
|
25
|
+
"url": "https://github.com/hraza01/skyhook.git"
|
|
26
|
+
},
|
|
27
|
+
"dependencies": {
|
|
28
|
+
"@clack/prompts": "^1.0.0",
|
|
29
|
+
"chalk": "^5.6.2",
|
|
30
|
+
"execa": "^9.6.1",
|
|
31
|
+
"terminal-link": "^5.0.0"
|
|
32
|
+
},
|
|
33
|
+
"engines": {
|
|
34
|
+
"node": ">=20.5.0"
|
|
35
|
+
},
|
|
36
|
+
"devDependencies": {
|
|
37
|
+
"prettier": "^3.8.1"
|
|
38
|
+
}
|
|
39
|
+
}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
set -e
|
|
3
|
+
|
|
4
|
+
# Configuration
|
|
5
|
+
if [ -z "$GCS_BUCKET_NAME" ]; then
|
|
6
|
+
echo "Error: GCS_BUCKET_NAME environment variable is not set."
|
|
7
|
+
exit 1
|
|
8
|
+
fi
|
|
9
|
+
|
|
10
|
+
# Input arguments
|
|
11
|
+
SOURCE_PATH="$1"
|
|
12
|
+
DEST_PATH="$2"
|
|
13
|
+
|
|
14
|
+
if [ -z "$SOURCE_PATH" ] || [ -z "$DEST_PATH" ]; then
|
|
15
|
+
echo "Error: Missing arguments. Usage: $0 <SOURCE_PATH> <DEST_PATH>"
|
|
16
|
+
exit 1
|
|
17
|
+
fi
|
|
18
|
+
|
|
19
|
+
# Validation is handled by the calling Node.js script. We assume valid inputs here.
|
|
20
|
+
echo "Syncing $SOURCE_PATH to $DEST_PATH..."
|
|
21
|
+
|
|
22
|
+
# Calculate file count (informational)
|
|
23
|
+
FILE_COUNT=$(find "$SOURCE_PATH" -type f -not -path '*/.git/*' -not -path '*/__pycache__/*' | wc -l | tr -d ' ')
|
|
24
|
+
echo "Uploading $FILE_COUNT files..."
|
|
25
|
+
|
|
26
|
+
# Perform Sync
|
|
27
|
+
gsutil -m rsync -r -x "\.git/.*|__pycache__/.*" "$SOURCE_PATH" "$DEST_PATH"
|
package/src/cli.js
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
import chalk from "chalk"
|
|
2
|
+
import { intro, spinner } from "@clack/prompts"
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Display version information
|
|
6
|
+
*/
|
|
7
|
+
export function showVersionInfo() {
|
|
8
|
+
console.log("skyhook v1.0.0")
|
|
9
|
+
process.exit(0)
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Display help information
|
|
14
|
+
*/
|
|
15
|
+
export function showHelpInfo() {
|
|
16
|
+
console.log(
|
|
17
|
+
chalk.cyan.bold("\nSkyhook - Cloud Composer DAG Deployment Utility\n"),
|
|
18
|
+
)
|
|
19
|
+
console.log("Usage:")
|
|
20
|
+
console.log(" skyhook [options] [path]\n")
|
|
21
|
+
console.log("Options:")
|
|
22
|
+
console.log(" -h, --help Show this help message")
|
|
23
|
+
console.log(" --version Show version number")
|
|
24
|
+
console.log(" -v, --verbose Enable verbose logging\n")
|
|
25
|
+
console.log("Environment Variables (Required):")
|
|
26
|
+
console.log(" GCS_BUCKET_NAME Your Composer GCS bucket name")
|
|
27
|
+
console.log(" COMPOSER_URL_BASE Your Composer webserver base URL\n")
|
|
28
|
+
console.log("For more information, visit:")
|
|
29
|
+
console.log(" https://github.com/hraza01/skyhook\n")
|
|
30
|
+
process.exit(0)
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* Display the intro banner
|
|
35
|
+
*/
|
|
36
|
+
export function showIntro() {
|
|
37
|
+
intro(
|
|
38
|
+
chalk.bgCyan(
|
|
39
|
+
chalk.black(" Skyhook / Cloud Composer Deployment Utility "),
|
|
40
|
+
),
|
|
41
|
+
)
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/**
|
|
45
|
+
* Create and return a configured spinner with cyan color
|
|
46
|
+
*/
|
|
47
|
+
export function createSpinner() {
|
|
48
|
+
return spinner({
|
|
49
|
+
frames: ["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"].map(
|
|
50
|
+
(frame) => chalk.cyan(frame),
|
|
51
|
+
),
|
|
52
|
+
})
|
|
53
|
+
}
|
package/src/config.js
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import { parseArgs } from "util"
|
|
2
|
+
import path from "path"
|
|
3
|
+
import { ConfigError } from "./errors.js"
|
|
4
|
+
|
|
5
|
+
const { values, positionals } = parseArgs({
|
|
6
|
+
options: {
|
|
7
|
+
verbose: {
|
|
8
|
+
type: "boolean",
|
|
9
|
+
short: "v",
|
|
10
|
+
},
|
|
11
|
+
version: {
|
|
12
|
+
type: "boolean",
|
|
13
|
+
},
|
|
14
|
+
help: {
|
|
15
|
+
type: "boolean",
|
|
16
|
+
short: "h",
|
|
17
|
+
},
|
|
18
|
+
},
|
|
19
|
+
allowPositionals: true,
|
|
20
|
+
})
|
|
21
|
+
|
|
22
|
+
export const verbose = values.verbose
|
|
23
|
+
export const showVersion = values.version
|
|
24
|
+
export const showHelp = values.help
|
|
25
|
+
|
|
26
|
+
// Helper to get root dir based on arg or CWD
|
|
27
|
+
export const ROOT_DIR = positionals[0]
|
|
28
|
+
? path.resolve(positionals[0])
|
|
29
|
+
: process.cwd()
|
|
30
|
+
export const DAGS_DIR = path.join(ROOT_DIR, "dags")
|
|
31
|
+
|
|
32
|
+
export function validateEnv() {
|
|
33
|
+
if (!process.env.GCS_BUCKET_NAME || !process.env.COMPOSER_URL_BASE) {
|
|
34
|
+
throw new ConfigError(
|
|
35
|
+
"Missing GCS_BUCKET_NAME or COMPOSER_URL_BASE environment variables.",
|
|
36
|
+
)
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
export const GCS_BUCKET_NAME = process.env.GCS_BUCKET_NAME
|
|
41
|
+
export const COMPOSER_URL_BASE = process.env.COMPOSER_URL_BASE
|
|
42
|
+
export const BUCKET_URL = `gs://${GCS_BUCKET_NAME}/dags`
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import fs from "fs"
|
|
2
|
+
import path from "path"
|
|
3
|
+
import { select, isCancel } from "@clack/prompts"
|
|
4
|
+
import { logger } from "./logger.js"
|
|
5
|
+
import { UserCancellationError } from "./errors.js"
|
|
6
|
+
|
|
7
|
+
export function scanDags(dagsDir, s) {
|
|
8
|
+
logger.info("SCAN", `Scanning directory: ${dagsDir}`)
|
|
9
|
+
s.start(`Looking for DAGs in: ${path.relative(process.cwd(), dagsDir)}`)
|
|
10
|
+
|
|
11
|
+
if (!fs.existsSync(dagsDir)) {
|
|
12
|
+
s.stop(`Directory '${dagsDir}' not found.`, 1)
|
|
13
|
+
throw new UserCancellationError("Operation cancelled.")
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
const folders = fs.readdirSync(dagsDir).filter((file) => {
|
|
17
|
+
return (
|
|
18
|
+
fs.statSync(path.join(dagsDir, file)).isDirectory() &&
|
|
19
|
+
!file.startsWith(".")
|
|
20
|
+
)
|
|
21
|
+
})
|
|
22
|
+
|
|
23
|
+
if (folders.length === 0) {
|
|
24
|
+
s.stop(`No folders found in ${dagsDir}`, 1)
|
|
25
|
+
throw new UserCancellationError("Operation cancelled.")
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
s.stop(`Found ${folders.length} Airflow DAGs.`)
|
|
29
|
+
logger.info("SCAN", `Found ${folders.length} valid DAG folders.`)
|
|
30
|
+
return folders
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
export async function selectDag(folders) {
|
|
34
|
+
const selectedFolder = await select({
|
|
35
|
+
message: "Select an Airflow DAG to deploy:",
|
|
36
|
+
options: folders.map((f) => ({ label: f, value: f })),
|
|
37
|
+
})
|
|
38
|
+
|
|
39
|
+
if (isCancel(selectedFolder)) {
|
|
40
|
+
logger.warn("SELECT", "User cancelled selection.")
|
|
41
|
+
throw new UserCancellationError("Deployment cancelled.")
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
logger.info("SELECT", `User selected: ${selectedFolder}`)
|
|
45
|
+
return selectedFolder
|
|
46
|
+
}
|
package/src/deploy.js
ADDED
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
import { execa } from "execa"
|
|
2
|
+
import chalk from "chalk"
|
|
3
|
+
import path from "path"
|
|
4
|
+
import { cancel, outro } from "@clack/prompts"
|
|
5
|
+
import terminalLink from "terminal-link"
|
|
6
|
+
import { fileURLToPath } from "url"
|
|
7
|
+
import { countFiles } from "./utils.js"
|
|
8
|
+
import { logger } from "./logger.js"
|
|
9
|
+
import { BUCKET_URL, COMPOSER_URL_BASE } from "./config.js"
|
|
10
|
+
|
|
11
|
+
const __filename = fileURLToPath(import.meta.url)
|
|
12
|
+
const __dirname = path.dirname(__filename)
|
|
13
|
+
|
|
14
|
+
export async function deployDag(selectedFolder, sourcePath, s, verbose) {
|
|
15
|
+
s.start("Starting GCS Upload...")
|
|
16
|
+
logger.info(
|
|
17
|
+
"DEPLOY",
|
|
18
|
+
`Starting deployment for ${selectedFolder} to ${BUCKET_URL}`,
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
try {
|
|
22
|
+
const deployScriptPath = path.resolve(
|
|
23
|
+
__dirname,
|
|
24
|
+
"../shell/deploy_core.sh",
|
|
25
|
+
)
|
|
26
|
+
const destination = `${BUCKET_URL}/${selectedFolder}`
|
|
27
|
+
const subprocess = execa(
|
|
28
|
+
"bash",
|
|
29
|
+
[deployScriptPath, sourcePath, destination],
|
|
30
|
+
{
|
|
31
|
+
all: true,
|
|
32
|
+
},
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
const ignorePatterns = [
|
|
36
|
+
"WARNING: gsutil rsync uses hashes",
|
|
37
|
+
"module's C extension",
|
|
38
|
+
"checksumming will run very slowly",
|
|
39
|
+
"gsutil help crcmod",
|
|
40
|
+
"Building synchronization state...",
|
|
41
|
+
"problems with multiprocessing",
|
|
42
|
+
"python.org/issue33725",
|
|
43
|
+
"parallel_process_count",
|
|
44
|
+
"multithreading is still available",
|
|
45
|
+
]
|
|
46
|
+
|
|
47
|
+
subprocess.all.on("data", (chunk) => {
|
|
48
|
+
const lines = chunk.toString().split("\n")
|
|
49
|
+
for (const line of lines) {
|
|
50
|
+
if (!line.trim()) continue
|
|
51
|
+
|
|
52
|
+
const shouldIgnore = ignorePatterns.some((pattern) =>
|
|
53
|
+
line.includes(pattern),
|
|
54
|
+
)
|
|
55
|
+
if (shouldIgnore) continue
|
|
56
|
+
|
|
57
|
+
if (line.includes("Uploading") && line.includes("files...")) {
|
|
58
|
+
s.message(line.trim())
|
|
59
|
+
} else if (line.includes("Copying")) {
|
|
60
|
+
// Output looks like: "Copying file://.../dags/my_folder/sub/file.py [Content-Type=...]"
|
|
61
|
+
// We want to extract: "my_folder/sub/file.py"
|
|
62
|
+
|
|
63
|
+
// 1. Remove metadata suffix (e.g. [Content-Type...])
|
|
64
|
+
let cleanLine = line.split("[")[0].trim()
|
|
65
|
+
|
|
66
|
+
// 2. Find the index where 'selectedFolder' starts
|
|
67
|
+
const folderIndex = cleanLine.indexOf(selectedFolder)
|
|
68
|
+
|
|
69
|
+
if (folderIndex !== -1) {
|
|
70
|
+
// Extract substring starting from selectedFolder
|
|
71
|
+
const relativePath = cleanLine.substring(folderIndex)
|
|
72
|
+
s.message(`Syncing: ${relativePath}...`)
|
|
73
|
+
logger.info("SYNC", `Syncing: ${relativePath}`)
|
|
74
|
+
} else {
|
|
75
|
+
// Fallback if folder name not found (unlikely but safe)
|
|
76
|
+
const match = cleanLine.match(/([^/]+)$/) // Get filename
|
|
77
|
+
if (match && match[1]) {
|
|
78
|
+
s.message(`Syncing: ${match[1]}...`)
|
|
79
|
+
logger.info("SYNC", `Syncing: ${match[1]}`)
|
|
80
|
+
} else {
|
|
81
|
+
s.message(`Syncing...`)
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
})
|
|
87
|
+
|
|
88
|
+
await subprocess
|
|
89
|
+
s.stop("GCS sync complete.")
|
|
90
|
+
|
|
91
|
+
// Show Summary
|
|
92
|
+
const fileCount = countFiles(sourcePath)
|
|
93
|
+
|
|
94
|
+
// Simple padding helper
|
|
95
|
+
const pad = (str, len) => str.padEnd(len)
|
|
96
|
+
const labelWidth = 15
|
|
97
|
+
|
|
98
|
+
const productionUrl = `${COMPOSER_URL_BASE}/dags/${selectedFolder}`
|
|
99
|
+
const link = terminalLink(
|
|
100
|
+
chalk.cyan.underline("Open in Cloud Console"),
|
|
101
|
+
productionUrl,
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
console.log("")
|
|
105
|
+
console.log(chalk.green("Deployment Summary"))
|
|
106
|
+
console.log("")
|
|
107
|
+
|
|
108
|
+
console.log(
|
|
109
|
+
`${chalk.dim(pad("Source", labelWidth))} ${chalk.reset(
|
|
110
|
+
`dags/${selectedFolder}`,
|
|
111
|
+
)}`,
|
|
112
|
+
)
|
|
113
|
+
console.log(
|
|
114
|
+
`${chalk.dim(pad("Destination", labelWidth))} ${chalk.reset(
|
|
115
|
+
`${BUCKET_URL}/${selectedFolder}`,
|
|
116
|
+
)}`,
|
|
117
|
+
)
|
|
118
|
+
console.log(
|
|
119
|
+
`${chalk.dim(pad("Files Synced", labelWidth))} ${chalk.reset(
|
|
120
|
+
`${fileCount} files`,
|
|
121
|
+
)}`,
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
console.log("")
|
|
125
|
+
console.log(
|
|
126
|
+
chalk.white(
|
|
127
|
+
`${chalk.bold(
|
|
128
|
+
selectedFolder,
|
|
129
|
+
)} is now in sync with git + Cloud Composer.`,
|
|
130
|
+
),
|
|
131
|
+
)
|
|
132
|
+
console.log("")
|
|
133
|
+
|
|
134
|
+
// Print URL in original location but single line
|
|
135
|
+
console.log(chalk.dim(pad("Composer URL", labelWidth)) + link)
|
|
136
|
+
console.log("")
|
|
137
|
+
|
|
138
|
+
outro(chalk.green.bold("Deployment Successful!"))
|
|
139
|
+
logger.info("DEPLOY", "Deployment steps completed successfully.")
|
|
140
|
+
} catch (error) {
|
|
141
|
+
logger.error("DEPLOY", `Deployment failed: ${error.message}`)
|
|
142
|
+
s.stop("Deployment Failed ❌", 1)
|
|
143
|
+
console.log(chalk.red("\nError Logs:"))
|
|
144
|
+
if (error.all) {
|
|
145
|
+
console.log(error.all)
|
|
146
|
+
} else {
|
|
147
|
+
console.log(error.message)
|
|
148
|
+
console.log(error)
|
|
149
|
+
}
|
|
150
|
+
cancel("Correct the errors above and try again.")
|
|
151
|
+
process.exit(1)
|
|
152
|
+
}
|
|
153
|
+
}
|
package/src/errors.js
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
export class UserCancellationError extends Error {
|
|
2
|
+
constructor(message = "Operation cancelled by user") {
|
|
3
|
+
super(message)
|
|
4
|
+
this.name = "UserCancellationError"
|
|
5
|
+
}
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
export class ValidationError extends Error {
|
|
9
|
+
constructor(message) {
|
|
10
|
+
super(message)
|
|
11
|
+
this.name = "ValidationError"
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export class ConfigError extends Error {
|
|
16
|
+
constructor(message) {
|
|
17
|
+
super(message)
|
|
18
|
+
this.name = "ConfigError"
|
|
19
|
+
}
|
|
20
|
+
}
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
import { execa } from "execa"
|
|
2
|
+
import chalk from "chalk"
|
|
3
|
+
import { ValidationError } from "./errors.js"
|
|
4
|
+
|
|
5
|
+
export async function validateGit(sourcePath, s) {
|
|
6
|
+
s.start("Validating Git status...")
|
|
7
|
+
|
|
8
|
+
// A. Check if valid git repo
|
|
9
|
+
try {
|
|
10
|
+
await execa("git", [
|
|
11
|
+
"-C",
|
|
12
|
+
sourcePath,
|
|
13
|
+
"rev-parse",
|
|
14
|
+
"--is-inside-work-tree",
|
|
15
|
+
])
|
|
16
|
+
} catch (e) {
|
|
17
|
+
s.stop("Validation Failed: Not a git repository.", 1)
|
|
18
|
+
throw new ValidationError("The DAG folder must be version controlled.")
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
// B. Check Branch Name
|
|
22
|
+
const { stdout: branch } = await execa("git", [
|
|
23
|
+
"-C",
|
|
24
|
+
sourcePath,
|
|
25
|
+
"rev-parse",
|
|
26
|
+
"--abbrev-ref",
|
|
27
|
+
"HEAD",
|
|
28
|
+
])
|
|
29
|
+
if (branch.trim() !== "main") {
|
|
30
|
+
s.stop(`Validation Failed: You are on branch '${branch.trim()}'.`, 1)
|
|
31
|
+
throw new ValidationError("You must be on the 'main' branch to deploy.")
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
// C. Check Sync Status
|
|
35
|
+
s.message("Checking remote sync status...")
|
|
36
|
+
await execa("git", ["-C", sourcePath, "fetch", "origin", "main"])
|
|
37
|
+
|
|
38
|
+
const { stdout: localHash } = await execa("git", [
|
|
39
|
+
"-C",
|
|
40
|
+
sourcePath,
|
|
41
|
+
"rev-parse",
|
|
42
|
+
"main",
|
|
43
|
+
])
|
|
44
|
+
const { stdout: remoteHash } = await execa("git", [
|
|
45
|
+
"-C",
|
|
46
|
+
sourcePath,
|
|
47
|
+
"rev-parse",
|
|
48
|
+
"origin/main",
|
|
49
|
+
])
|
|
50
|
+
|
|
51
|
+
if (localHash.trim() !== remoteHash.trim()) {
|
|
52
|
+
s.stop("Validation Failed: Branch is out of sync with origin/main.", 1)
|
|
53
|
+
|
|
54
|
+
// Optional: Show ahead/behind info
|
|
55
|
+
try {
|
|
56
|
+
const { stdout: counts } = await execa("git", [
|
|
57
|
+
"-C",
|
|
58
|
+
sourcePath,
|
|
59
|
+
"rev-list",
|
|
60
|
+
"--left-right",
|
|
61
|
+
"--count",
|
|
62
|
+
"main...origin/main",
|
|
63
|
+
])
|
|
64
|
+
const [ahead, behind] = counts.trim().split(/\s+/).map(Number)
|
|
65
|
+
if (ahead > 0)
|
|
66
|
+
console.log(chalk.yellow(` - Ahead by ${ahead} commit(s)`))
|
|
67
|
+
if (behind > 0)
|
|
68
|
+
console.log(chalk.yellow(` - Behind by ${behind} commit(s)`))
|
|
69
|
+
} catch (ignored) {}
|
|
70
|
+
|
|
71
|
+
throw new ValidationError("Please pull/push changes before deploying.")
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
s.stop("Git validation passed (main branch, in sync).")
|
|
75
|
+
}
|
package/src/index.js
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import chalk from "chalk"
|
|
4
|
+
import { cancel } from "@clack/prompts"
|
|
5
|
+
import {
|
|
6
|
+
validateEnv,
|
|
7
|
+
DAGS_DIR,
|
|
8
|
+
verbose,
|
|
9
|
+
showVersion,
|
|
10
|
+
showHelp,
|
|
11
|
+
} from "./config.js"
|
|
12
|
+
import {
|
|
13
|
+
showVersionInfo,
|
|
14
|
+
showHelpInfo,
|
|
15
|
+
showIntro,
|
|
16
|
+
createSpinner,
|
|
17
|
+
} from "./cli.js"
|
|
18
|
+
import { scanDags, selectDag } from "./dag_selection.js"
|
|
19
|
+
import { validateGit } from "./git_validation.js"
|
|
20
|
+
import { deployDag } from "./deploy.js"
|
|
21
|
+
import { fetchQuote } from "./quotes.js"
|
|
22
|
+
import { initLogger, logger } from "./logger.js"
|
|
23
|
+
import path from "path"
|
|
24
|
+
|
|
25
|
+
async function main() {
|
|
26
|
+
try {
|
|
27
|
+
console.clear()
|
|
28
|
+
initLogger(verbose)
|
|
29
|
+
logger.info("INIT", "Skyhook started")
|
|
30
|
+
|
|
31
|
+
// Handle --version flag
|
|
32
|
+
if (showVersion) showVersionInfo()
|
|
33
|
+
|
|
34
|
+
// Handle --help flag
|
|
35
|
+
if (showHelp) showHelpInfo()
|
|
36
|
+
|
|
37
|
+
validateEnv()
|
|
38
|
+
showIntro()
|
|
39
|
+
|
|
40
|
+
const s = createSpinner()
|
|
41
|
+
|
|
42
|
+
// 1. Scan
|
|
43
|
+
const folders = scanDags(DAGS_DIR, s)
|
|
44
|
+
|
|
45
|
+
// 2. Select
|
|
46
|
+
const selectedFolder = await selectDag(folders)
|
|
47
|
+
const sourcePath = path.join(DAGS_DIR, selectedFolder)
|
|
48
|
+
|
|
49
|
+
// 3. Validate
|
|
50
|
+
await validateGit(sourcePath, s)
|
|
51
|
+
|
|
52
|
+
// 4. Deploy
|
|
53
|
+
await deployDag(selectedFolder, sourcePath, s, verbose)
|
|
54
|
+
|
|
55
|
+
// 5. Post-Deployment Polish
|
|
56
|
+
const quote = await fetchQuote()
|
|
57
|
+
if (quote) {
|
|
58
|
+
console.log(chalk.italic.dim(`\n${quote}\n`))
|
|
59
|
+
}
|
|
60
|
+
} catch (error) {
|
|
61
|
+
if (error.name === "UserCancellationError") {
|
|
62
|
+
cancel(error.message)
|
|
63
|
+
process.exit(0)
|
|
64
|
+
} else if (error.name === "ValidationError") {
|
|
65
|
+
cancel(chalk.red(error.message))
|
|
66
|
+
process.exit(0)
|
|
67
|
+
} else if (error.name === "ConfigError") {
|
|
68
|
+
console.log(chalk.red("Configuration Error:"))
|
|
69
|
+
console.log(chalk.yellow(error.message))
|
|
70
|
+
process.exit(0)
|
|
71
|
+
} else {
|
|
72
|
+
throw error // Re-throw unexpected errors
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
main().catch((err) => {
|
|
78
|
+
console.error(chalk.red("\nUnexpected Error:"))
|
|
79
|
+
console.error(chalk.red(err.message || err))
|
|
80
|
+
|
|
81
|
+
if (verbose) {
|
|
82
|
+
console.error(chalk.dim(err.stack))
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
process.exit(1)
|
|
86
|
+
})
|
package/src/logger.js
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import fs from "fs"
|
|
2
|
+
import path from "path"
|
|
3
|
+
import { ROOT_DIR } from "./config.js"
|
|
4
|
+
|
|
5
|
+
const LOG_FILE = path.join(ROOT_DIR, "skyhook.log")
|
|
6
|
+
|
|
7
|
+
let isVerbose = false
|
|
8
|
+
|
|
9
|
+
export function initLogger(verbose = false) {
|
|
10
|
+
isVerbose = verbose
|
|
11
|
+
if (isVerbose) {
|
|
12
|
+
try {
|
|
13
|
+
fs.writeFileSync(LOG_FILE, "")
|
|
14
|
+
} catch (e) {
|
|
15
|
+
// Warning: unable to write logs
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export function log(step, level, message) {
|
|
21
|
+
if (!isVerbose) return
|
|
22
|
+
const timestamp = new Date().toISOString()
|
|
23
|
+
const formattedMessage = `[${timestamp}][${step}][${level}] ${message}`
|
|
24
|
+
|
|
25
|
+
try {
|
|
26
|
+
fs.appendFileSync(LOG_FILE, formattedMessage + "\n")
|
|
27
|
+
} catch (e) {
|
|
28
|
+
// Fail silently if logging fails
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
export const logger = {
|
|
33
|
+
info: (step, message) => log(step, "INFO", message),
|
|
34
|
+
warn: (step, message) => log(step, "WARN", message),
|
|
35
|
+
error: (step, message) => log(step, "ERROR", message),
|
|
36
|
+
}
|
package/src/quotes.js
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
export async function fetchQuote() {
|
|
2
|
+
const categories = ["inspiration", "excellence", "truth", "success"]
|
|
3
|
+
const category = categories[Math.floor(Math.random() * categories.length)]
|
|
4
|
+
|
|
5
|
+
try {
|
|
6
|
+
const response = await fetch(
|
|
7
|
+
`https://zenquotes.io/api/random/${category}`,
|
|
8
|
+
)
|
|
9
|
+
const data = await response.json()
|
|
10
|
+
if (data && data[0] && data[0].q && data[0].a) {
|
|
11
|
+
return `"${data[0].q}" — ${data[0].a}`
|
|
12
|
+
}
|
|
13
|
+
} catch (e) {
|
|
14
|
+
// Fallback or silence
|
|
15
|
+
}
|
|
16
|
+
return null
|
|
17
|
+
}
|
package/src/utils.js
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import fs from "fs"
|
|
2
|
+
import path from "path"
|
|
3
|
+
|
|
4
|
+
export function countFiles(dir) {
|
|
5
|
+
let results = 0
|
|
6
|
+
const list = fs.readdirSync(dir)
|
|
7
|
+
list.forEach((file) => {
|
|
8
|
+
file = path.resolve(dir, file)
|
|
9
|
+
const stat = fs.statSync(file)
|
|
10
|
+
if (stat && stat.isDirectory()) {
|
|
11
|
+
if (!file.includes(".git") && !file.includes("__pycache__")) {
|
|
12
|
+
results += countFiles(file)
|
|
13
|
+
}
|
|
14
|
+
} else {
|
|
15
|
+
results += 1
|
|
16
|
+
}
|
|
17
|
+
})
|
|
18
|
+
return results
|
|
19
|
+
}
|