@orkify/cli 1.0.0-beta.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +191 -0
- package/README.md +1701 -0
- package/bin/orkify +3 -0
- package/boot/systemd/orkify@.service +30 -0
- package/dist/agent-name.d.ts +4 -0
- package/dist/agent-name.js +42 -0
- package/dist/alerts/AlertEvaluator.d.ts +14 -0
- package/dist/alerts/AlertEvaluator.js +135 -0
- package/dist/cli/commands/autostart.d.ts +3 -0
- package/dist/cli/commands/autostart.js +11 -0
- package/dist/cli/commands/crash-test.d.ts +3 -0
- package/dist/cli/commands/crash-test.js +17 -0
- package/dist/cli/commands/daemon-reload.d.ts +3 -0
- package/dist/cli/commands/daemon-reload.js +72 -0
- package/dist/cli/commands/delete.d.ts +3 -0
- package/dist/cli/commands/delete.js +37 -0
- package/dist/cli/commands/deploy.d.ts +6 -0
- package/dist/cli/commands/deploy.js +266 -0
- package/dist/cli/commands/down.d.ts +3 -0
- package/dist/cli/commands/down.js +36 -0
- package/dist/cli/commands/flush.d.ts +3 -0
- package/dist/cli/commands/flush.js +28 -0
- package/dist/cli/commands/kill.d.ts +3 -0
- package/dist/cli/commands/kill.js +35 -0
- package/dist/cli/commands/list.d.ts +14 -0
- package/dist/cli/commands/list.js +361 -0
- package/dist/cli/commands/logs.d.ts +3 -0
- package/dist/cli/commands/logs.js +107 -0
- package/dist/cli/commands/mcp.d.ts +3 -0
- package/dist/cli/commands/mcp.js +151 -0
- package/dist/cli/commands/reload.d.ts +3 -0
- package/dist/cli/commands/reload.js +54 -0
- package/dist/cli/commands/restart.d.ts +3 -0
- package/dist/cli/commands/restart.js +43 -0
- package/dist/cli/commands/restore.d.ts +3 -0
- package/dist/cli/commands/restore.js +88 -0
- package/dist/cli/commands/run.d.ts +8 -0
- package/dist/cli/commands/run.js +212 -0
- package/dist/cli/commands/snap.d.ts +3 -0
- package/dist/cli/commands/snap.js +30 -0
- package/dist/cli/commands/up.d.ts +3 -0
- package/dist/cli/commands/up.js +125 -0
- package/dist/cli/crash-recovery.d.ts +2 -0
- package/dist/cli/crash-recovery.js +67 -0
- package/dist/cli/index.d.ts +3 -0
- package/dist/cli/index.js +46 -0
- package/dist/cli/parse.d.ts +28 -0
- package/dist/cli/parse.js +97 -0
- package/dist/cluster/ClusterWrapper.d.ts +18 -0
- package/dist/cluster/ClusterWrapper.js +602 -0
- package/dist/config/ConfigStore.d.ts +11 -0
- package/dist/config/ConfigStore.js +21 -0
- package/dist/config/schema.d.ts +103 -0
- package/dist/config/schema.js +49 -0
- package/dist/constants.d.ts +83 -0
- package/dist/constants.js +289 -0
- package/dist/cron/CronScheduler.d.ts +25 -0
- package/dist/cron/CronScheduler.js +149 -0
- package/dist/daemon/GracefulManager.d.ts +8 -0
- package/dist/daemon/GracefulManager.js +29 -0
- package/dist/daemon/ManagedProcess.d.ts +71 -0
- package/dist/daemon/ManagedProcess.js +1020 -0
- package/dist/daemon/Orchestrator.d.ts +51 -0
- package/dist/daemon/Orchestrator.js +416 -0
- package/dist/daemon/RotatingWriter.d.ts +27 -0
- package/dist/daemon/RotatingWriter.js +264 -0
- package/dist/daemon/index.d.ts +2 -0
- package/dist/daemon/index.js +106 -0
- package/dist/daemon/startDaemon.d.ts +30 -0
- package/dist/daemon/startDaemon.js +693 -0
- package/dist/deploy/CommandPoller.d.ts +13 -0
- package/dist/deploy/CommandPoller.js +53 -0
- package/dist/deploy/DeployExecutor.d.ts +33 -0
- package/dist/deploy/DeployExecutor.js +340 -0
- package/dist/deploy/config.d.ts +20 -0
- package/dist/deploy/config.js +161 -0
- package/dist/deploy/env.d.ts +2 -0
- package/dist/deploy/env.js +17 -0
- package/dist/deploy/tarball.d.ts +32 -0
- package/dist/deploy/tarball.js +243 -0
- package/dist/detect/framework.d.ts +2 -0
- package/dist/detect/framework.js +24 -0
- package/dist/ipc/DaemonClient.d.ts +31 -0
- package/dist/ipc/DaemonClient.js +248 -0
- package/dist/ipc/DaemonServer.d.ts +28 -0
- package/dist/ipc/DaemonServer.js +166 -0
- package/dist/ipc/MultiUserClient.d.ts +27 -0
- package/dist/ipc/MultiUserClient.js +203 -0
- package/dist/ipc/protocol.d.ts +7 -0
- package/dist/ipc/protocol.js +53 -0
- package/dist/ipc/restoreDaemon.d.ts +8 -0
- package/dist/ipc/restoreDaemon.js +19 -0
- package/dist/machine-id.d.ts +11 -0
- package/dist/machine-id.js +51 -0
- package/dist/mcp/auth.d.ts +118 -0
- package/dist/mcp/auth.js +245 -0
- package/dist/mcp/http.d.ts +20 -0
- package/dist/mcp/http.js +229 -0
- package/dist/mcp/index.d.ts +3 -0
- package/dist/mcp/index.js +8 -0
- package/dist/mcp/server.d.ts +37 -0
- package/dist/mcp/server.js +413 -0
- package/dist/probe/compute-fingerprint.d.ts +27 -0
- package/dist/probe/compute-fingerprint.js +65 -0
- package/dist/probe/parse-frames.d.ts +21 -0
- package/dist/probe/parse-frames.js +57 -0
- package/dist/probe/resolve-sourcemaps.d.ts +25 -0
- package/dist/probe/resolve-sourcemaps.js +281 -0
- package/dist/state/StateStore.d.ts +11 -0
- package/dist/state/StateStore.js +78 -0
- package/dist/telemetry/TelemetryReporter.d.ts +49 -0
- package/dist/telemetry/TelemetryReporter.js +451 -0
- package/dist/types/index.d.ts +373 -0
- package/dist/types/index.js +2 -0
- package/package.json +148 -0
- package/packages/cache/README.md +114 -0
- package/packages/cache/dist/CacheClient.d.ts +26 -0
- package/packages/cache/dist/CacheClient.d.ts.map +1 -0
- package/packages/cache/dist/CacheClient.js +174 -0
- package/packages/cache/dist/CacheClient.js.map +1 -0
- package/packages/cache/dist/CacheFileStore.d.ts +45 -0
- package/packages/cache/dist/CacheFileStore.d.ts.map +1 -0
- package/packages/cache/dist/CacheFileStore.js +446 -0
- package/packages/cache/dist/CacheFileStore.js.map +1 -0
- package/packages/cache/dist/CachePersistence.d.ts +9 -0
- package/packages/cache/dist/CachePersistence.d.ts.map +1 -0
- package/packages/cache/dist/CachePersistence.js +67 -0
- package/packages/cache/dist/CachePersistence.js.map +1 -0
- package/packages/cache/dist/CachePrimary.d.ts +25 -0
- package/packages/cache/dist/CachePrimary.d.ts.map +1 -0
- package/packages/cache/dist/CachePrimary.js +155 -0
- package/packages/cache/dist/CachePrimary.js.map +1 -0
- package/packages/cache/dist/CacheStore.d.ts +50 -0
- package/packages/cache/dist/CacheStore.d.ts.map +1 -0
- package/packages/cache/dist/CacheStore.js +271 -0
- package/packages/cache/dist/CacheStore.js.map +1 -0
- package/packages/cache/dist/constants.d.ts +6 -0
- package/packages/cache/dist/constants.d.ts.map +1 -0
- package/packages/cache/dist/constants.js +9 -0
- package/packages/cache/dist/constants.js.map +1 -0
- package/packages/cache/dist/index.d.ts +16 -0
- package/packages/cache/dist/index.d.ts.map +1 -0
- package/packages/cache/dist/index.js +86 -0
- package/packages/cache/dist/index.js.map +1 -0
- package/packages/cache/dist/serialize.d.ts +9 -0
- package/packages/cache/dist/serialize.d.ts.map +1 -0
- package/packages/cache/dist/serialize.js +40 -0
- package/packages/cache/dist/serialize.js.map +1 -0
- package/packages/cache/dist/types.d.ts +123 -0
- package/packages/cache/dist/types.d.ts.map +1 -0
- package/packages/cache/dist/types.js +2 -0
- package/packages/cache/dist/types.js.map +1 -0
- package/packages/cache/package.json +27 -0
- package/packages/cache/src/CacheClient.ts +227 -0
- package/packages/cache/src/CacheFileStore.ts +528 -0
- package/packages/cache/src/CachePersistence.ts +89 -0
- package/packages/cache/src/CachePrimary.ts +172 -0
- package/packages/cache/src/CacheStore.ts +308 -0
- package/packages/cache/src/constants.ts +10 -0
- package/packages/cache/src/index.ts +100 -0
- package/packages/cache/src/serialize.ts +49 -0
- package/packages/cache/src/types.ts +156 -0
- package/packages/cache/tsconfig.json +18 -0
- package/packages/cache/tsconfig.tsbuildinfo +1 -0
- package/packages/next/README.md +166 -0
- package/packages/next/dist/error-capture.d.ts +34 -0
- package/packages/next/dist/error-capture.d.ts.map +1 -0
- package/packages/next/dist/error-capture.js +130 -0
- package/packages/next/dist/error-capture.js.map +1 -0
- package/packages/next/dist/error-handler.d.ts +10 -0
- package/packages/next/dist/error-handler.d.ts.map +1 -0
- package/packages/next/dist/error-handler.js +186 -0
- package/packages/next/dist/error-handler.js.map +1 -0
- package/packages/next/dist/isr-cache.d.ts +9 -0
- package/packages/next/dist/isr-cache.d.ts.map +1 -0
- package/packages/next/dist/isr-cache.js +86 -0
- package/packages/next/dist/isr-cache.js.map +1 -0
- package/packages/next/dist/stream.d.ts +5 -0
- package/packages/next/dist/stream.d.ts.map +1 -0
- package/packages/next/dist/stream.js +22 -0
- package/packages/next/dist/stream.js.map +1 -0
- package/packages/next/dist/types.d.ts +33 -0
- package/packages/next/dist/types.d.ts.map +1 -0
- package/packages/next/dist/types.js +6 -0
- package/packages/next/dist/types.js.map +1 -0
- package/packages/next/dist/use-cache.d.ts +4 -0
- package/packages/next/dist/use-cache.d.ts.map +1 -0
- package/packages/next/dist/use-cache.js +86 -0
- package/packages/next/dist/use-cache.js.map +1 -0
- package/packages/next/dist/utils.d.ts +32 -0
- package/packages/next/dist/utils.d.ts.map +1 -0
- package/packages/next/dist/utils.js +88 -0
- package/packages/next/dist/utils.js.map +1 -0
- package/packages/next/package.json +52 -0
- package/packages/next/src/error-capture.ts +177 -0
- package/packages/next/src/error-handler.ts +221 -0
- package/packages/next/src/isr-cache.ts +100 -0
- package/packages/next/src/stream.ts +23 -0
- package/packages/next/src/types.ts +33 -0
- package/packages/next/src/use-cache.ts +99 -0
- package/packages/next/src/utils.ts +102 -0
- package/packages/next/tsconfig.json +19 -0
- package/packages/next/tsconfig.tsbuildinfo +1 -0
|
@@ -0,0 +1,243 @@
|
|
|
1
|
+
import ig from 'ignore';
|
|
2
|
+
import { createWriteStream, existsSync, mkdirSync, readdirSync, readFileSync, statSync, } from 'node:fs';
|
|
3
|
+
import { dirname, join, relative, resolve, sep } from 'node:path';
|
|
4
|
+
import { pipeline } from 'node:stream/promises';
|
|
5
|
+
import { createGzip } from 'node:zlib';
|
|
6
|
+
import { pack } from 'tar-stream';
|
|
7
|
+
import { ORKIFY_HOME } from '../constants.js';
|
|
8
|
+
// Always exclude these patterns
|
|
9
|
+
const ALWAYS_EXCLUDE = [
|
|
10
|
+
'node_modules',
|
|
11
|
+
'.git',
|
|
12
|
+
'.gitignore',
|
|
13
|
+
'.env',
|
|
14
|
+
'.env.local',
|
|
15
|
+
'.env.production',
|
|
16
|
+
'.env.staging',
|
|
17
|
+
'.orkify',
|
|
18
|
+
];
|
|
19
|
+
/** Walk up from startDir looking for a .git directory. */
|
|
20
|
+
function findGitRoot(startDir) {
|
|
21
|
+
let dir = startDir;
|
|
22
|
+
for (;;) {
|
|
23
|
+
if (existsSync(join(dir, '.git')))
|
|
24
|
+
return dir;
|
|
25
|
+
const parent = dirname(dir);
|
|
26
|
+
if (parent === dir)
|
|
27
|
+
return null;
|
|
28
|
+
dir = parent;
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
/** Parse a .gitignore file into an array of patterns. */
|
|
32
|
+
function readGitignorePatterns(filePath) {
|
|
33
|
+
if (!existsSync(filePath))
|
|
34
|
+
return [];
|
|
35
|
+
return readFileSync(filePath, 'utf-8')
|
|
36
|
+
.split('\n')
|
|
37
|
+
.map((l) => l.trim())
|
|
38
|
+
.filter((l) => l && !l.startsWith('#'));
|
|
39
|
+
}
|
|
40
|
+
/**
|
|
41
|
+
* Collect .gitignore filters from ancestor directories (gitRoot up to,
|
|
42
|
+
* but not including, projectDir). The projectDir's own .gitignore is
|
|
43
|
+
* handled during the walk so it isn't double-loaded.
|
|
44
|
+
*/
|
|
45
|
+
function collectAncestorFilters(projectDir) {
|
|
46
|
+
const filters = [];
|
|
47
|
+
const gitRoot = findGitRoot(projectDir);
|
|
48
|
+
if (!gitRoot)
|
|
49
|
+
return filters;
|
|
50
|
+
const relPath = relative(gitRoot, projectDir);
|
|
51
|
+
if (!relPath)
|
|
52
|
+
return filters; // projectDir IS the git root
|
|
53
|
+
const parts = relPath.split(sep);
|
|
54
|
+
let current = gitRoot;
|
|
55
|
+
// Git root .gitignore
|
|
56
|
+
const rootPatterns = readGitignorePatterns(join(current, '.gitignore'));
|
|
57
|
+
if (rootPatterns.length > 0) {
|
|
58
|
+
filters.push({ dir: current, ig: ig().add(rootPatterns) });
|
|
59
|
+
}
|
|
60
|
+
// Intermediate directories (not including projectDir itself)
|
|
61
|
+
for (let i = 0; i < parts.length - 1; i++) {
|
|
62
|
+
current = join(current, parts[i]);
|
|
63
|
+
const patterns = readGitignorePatterns(join(current, '.gitignore'));
|
|
64
|
+
if (patterns.length > 0) {
|
|
65
|
+
filters.push({ dir: current, ig: ig().add(patterns) });
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
return filters;
|
|
69
|
+
}
|
|
70
|
+
/** Check whether absPath is ignored by any of the gitignore filters. */
|
|
71
|
+
function isIgnoredByFilters(absPath, isDir, filters) {
|
|
72
|
+
for (const filter of filters) {
|
|
73
|
+
const rel = relative(filter.dir, absPath);
|
|
74
|
+
if (rel.startsWith('..') || rel === '')
|
|
75
|
+
continue;
|
|
76
|
+
const normalized = rel.split(sep).join('/') + (isDir ? '/' : '');
|
|
77
|
+
if (filter.ig.ignores(normalized))
|
|
78
|
+
return true;
|
|
79
|
+
}
|
|
80
|
+
return false;
|
|
81
|
+
}
|
|
82
|
+
/**
|
|
83
|
+
* Recursively walk a directory, collecting file paths while respecting
|
|
84
|
+
* .gitignore files found along the way and the built-in exclude list.
|
|
85
|
+
*/
|
|
86
|
+
function walkDirectory(dir, projectDir, parentFilters, builtinIgnore) {
|
|
87
|
+
const files = [];
|
|
88
|
+
// Load .gitignore from this directory (if any)
|
|
89
|
+
const patterns = readGitignorePatterns(join(dir, '.gitignore'));
|
|
90
|
+
const filters = patterns.length > 0 ? [...parentFilters, { dir, ig: ig().add(patterns) }] : parentFilters;
|
|
91
|
+
for (const entry of readdirSync(dir, { withFileTypes: true })) {
|
|
92
|
+
const absPath = join(dir, entry.name);
|
|
93
|
+
const relToProject = relative(projectDir, absPath).split(sep).join('/');
|
|
94
|
+
const isDir = entry.isDirectory();
|
|
95
|
+
// Check built-in excludes
|
|
96
|
+
if (builtinIgnore.ignores(relToProject + (isDir ? '/' : '')))
|
|
97
|
+
continue;
|
|
98
|
+
// Check gitignore filters
|
|
99
|
+
if (isIgnoredByFilters(absPath, isDir, filters))
|
|
100
|
+
continue;
|
|
101
|
+
if (isDir) {
|
|
102
|
+
files.push(...walkDirectory(absPath, projectDir, filters, builtinIgnore));
|
|
103
|
+
}
|
|
104
|
+
else if (entry.isFile()) {
|
|
105
|
+
files.push(absPath);
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
return files;
|
|
109
|
+
}
|
|
110
|
+
/**
|
|
111
|
+
* Scan package.json for `file:` dependencies. For each one, walk the
|
|
112
|
+
* referenced directory and collect its files. Returns a rewritten
|
|
113
|
+
* package.json string with paths pointing to `.file-deps/<name>/` and
|
|
114
|
+
* a map of the collected files — or null if there are no file deps.
|
|
115
|
+
*/
|
|
116
|
+
export function bundleFileDeps(projectDir) {
|
|
117
|
+
const pkgPath = join(projectDir, 'package.json');
|
|
118
|
+
if (!existsSync(pkgPath))
|
|
119
|
+
return null;
|
|
120
|
+
const pkg = JSON.parse(readFileSync(pkgPath, 'utf-8'));
|
|
121
|
+
const fileDeps = new Map();
|
|
122
|
+
// Track original relative path → new .file-deps path for lock file rewriting
|
|
123
|
+
const pathRewrites = new Map();
|
|
124
|
+
for (const section of ['dependencies', 'devDependencies']) {
|
|
125
|
+
const deps = pkg[section];
|
|
126
|
+
if (!deps)
|
|
127
|
+
continue;
|
|
128
|
+
for (const [name, spec] of Object.entries(deps)) {
|
|
129
|
+
if (!spec.startsWith('file:'))
|
|
130
|
+
continue;
|
|
131
|
+
const rawRelPath = spec.slice(5);
|
|
132
|
+
const depDir = resolve(projectDir, rawRelPath);
|
|
133
|
+
if (!existsSync(depDir)) {
|
|
134
|
+
throw new Error(`file: dependency "${name}" points to "${depDir}" which does not exist`);
|
|
135
|
+
}
|
|
136
|
+
const ancestorFilters = collectAncestorFilters(depDir);
|
|
137
|
+
const builtinIgnore = ig().add(ALWAYS_EXCLUDE);
|
|
138
|
+
const files = walkDirectory(depDir, depDir, ancestorFilters, builtinIgnore);
|
|
139
|
+
// Normalize the relative path (strip leading ./) to match lock file keys
|
|
140
|
+
const normalizedRel = relative(projectDir, depDir).split(sep).join('/');
|
|
141
|
+
const newPath = `.file-deps/${name}`;
|
|
142
|
+
deps[name] = `file:${newPath}`;
|
|
143
|
+
pathRewrites.set(normalizedRel, newPath);
|
|
144
|
+
fileDeps.set(name, { dir: depDir, files });
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
if (fileDeps.size === 0)
|
|
148
|
+
return null;
|
|
149
|
+
// Rewrite package-lock.json to match the new file: paths
|
|
150
|
+
let rewrittenLock = null;
|
|
151
|
+
const lockPath = join(projectDir, 'package-lock.json');
|
|
152
|
+
if (existsSync(lockPath)) {
|
|
153
|
+
const lock = JSON.parse(readFileSync(lockPath, 'utf-8'));
|
|
154
|
+
const packages = lock.packages;
|
|
155
|
+
if (packages) {
|
|
156
|
+
const renames = [];
|
|
157
|
+
for (const [oldRel, newRel] of pathRewrites) {
|
|
158
|
+
// Rewrite the package entry key (e.g. "packages/cache" → ".file-deps/@orkify/cache")
|
|
159
|
+
if (packages[oldRel]) {
|
|
160
|
+
renames.push([oldRel, newRel]);
|
|
161
|
+
}
|
|
162
|
+
// Rewrite node_modules link entries that resolve to the old path
|
|
163
|
+
for (const [key, val] of Object.entries(packages)) {
|
|
164
|
+
if (key.startsWith('node_modules/') && val.resolved === oldRel) {
|
|
165
|
+
val.resolved = newRel;
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
for (const [oldKey, newKey] of renames) {
|
|
170
|
+
packages[newKey] = packages[oldKey];
|
|
171
|
+
delete packages[oldKey];
|
|
172
|
+
}
|
|
173
|
+
// Also rewrite any file: specifiers in nested dependencies
|
|
174
|
+
for (const val of Object.values(packages)) {
|
|
175
|
+
const deps = val.dependencies;
|
|
176
|
+
if (!deps)
|
|
177
|
+
continue;
|
|
178
|
+
for (const [depName, depSpec] of Object.entries(deps)) {
|
|
179
|
+
if (typeof depSpec === 'string' && depSpec.startsWith('file:')) {
|
|
180
|
+
const rel = depSpec.slice(5);
|
|
181
|
+
if (pathRewrites.has(rel)) {
|
|
182
|
+
deps[depName] = `file:${pathRewrites.get(rel)}`;
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
rewrittenLock = JSON.stringify(lock, null, 2) + '\n';
|
|
189
|
+
}
|
|
190
|
+
return { rewrittenPkg: JSON.stringify(pkg, null, 2) + '\n', rewrittenLock, fileDeps };
|
|
191
|
+
}
|
|
192
|
+
export async function createTarball(projectDir, options) {
|
|
193
|
+
const artifactsDir = join(ORKIFY_HOME, 'tmp');
|
|
194
|
+
if (!existsSync(artifactsDir)) {
|
|
195
|
+
mkdirSync(artifactsDir, { recursive: true });
|
|
196
|
+
}
|
|
197
|
+
const tarPath = join(artifactsDir, `deploy-${Date.now()}.tar.gz`);
|
|
198
|
+
const ancestorFilters = collectAncestorFilters(projectDir);
|
|
199
|
+
const excludes = options?.excludeSourceMaps ? [...ALWAYS_EXCLUDE, '*.map'] : ALWAYS_EXCLUDE;
|
|
200
|
+
const builtinIgnore = ig().add(excludes);
|
|
201
|
+
const files = walkDirectory(projectDir, projectDir, ancestorFilters, builtinIgnore);
|
|
202
|
+
// Check for file: deps to bundle
|
|
203
|
+
const bundle = bundleFileDeps(projectDir);
|
|
204
|
+
const p = pack();
|
|
205
|
+
const gzip = createGzip();
|
|
206
|
+
const output = createWriteStream(tarPath);
|
|
207
|
+
const done = pipeline(p, gzip, output);
|
|
208
|
+
for (const file of files) {
|
|
209
|
+
const rel = relative(projectDir, file).split(sep).join('/');
|
|
210
|
+
// Skip package.json and package-lock.json if we need to rewrite them
|
|
211
|
+
if (bundle && (rel === 'package.json' || (rel === 'package-lock.json' && bundle.rewrittenLock)))
|
|
212
|
+
continue;
|
|
213
|
+
const stat = statSync(file);
|
|
214
|
+
p.entry({ name: rel, size: stat.size, mode: stat.mode, mtime: stat.mtime }, readFileSync(file));
|
|
215
|
+
}
|
|
216
|
+
if (bundle) {
|
|
217
|
+
// Add rewritten package.json
|
|
218
|
+
const content = Buffer.from(bundle.rewrittenPkg);
|
|
219
|
+
p.entry({ name: 'package.json', size: content.length }, content);
|
|
220
|
+
// Add rewritten package-lock.json
|
|
221
|
+
if (bundle.rewrittenLock) {
|
|
222
|
+
const lockContent = Buffer.from(bundle.rewrittenLock);
|
|
223
|
+
p.entry({ name: 'package-lock.json', size: lockContent.length }, lockContent);
|
|
224
|
+
}
|
|
225
|
+
// Add bundled file: dep contents
|
|
226
|
+
for (const [name, dep] of bundle.fileDeps) {
|
|
227
|
+
for (const file of dep.files) {
|
|
228
|
+
const rel = relative(dep.dir, file).split(sep).join('/');
|
|
229
|
+
const stat = statSync(file);
|
|
230
|
+
p.entry({
|
|
231
|
+
name: `.file-deps/${name}/${rel}`,
|
|
232
|
+
size: stat.size,
|
|
233
|
+
mode: stat.mode,
|
|
234
|
+
mtime: stat.mtime,
|
|
235
|
+
}, readFileSync(file));
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
p.finalize();
|
|
240
|
+
await done;
|
|
241
|
+
return tarPath;
|
|
242
|
+
}
|
|
243
|
+
//# sourceMappingURL=tarball.js.map
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { existsSync, readFileSync } from 'node:fs';
|
|
2
|
+
import { join } from 'node:path';
|
|
3
|
+
const NEXT_CONFIG_FILES = ['next.config.ts', 'next.config.js', 'next.config.mjs'];
|
|
4
|
+
export function detectFramework(cwd) {
|
|
5
|
+
// Primary: check package.json for `next` dependency
|
|
6
|
+
try {
|
|
7
|
+
const raw = readFileSync(join(cwd, 'package.json'), 'utf-8');
|
|
8
|
+
const pkg = JSON.parse(raw);
|
|
9
|
+
if (pkg.dependencies?.next || pkg.devDependencies?.next) {
|
|
10
|
+
return 'nextjs';
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
catch {
|
|
14
|
+
// Missing or malformed package.json — fall through to config file check
|
|
15
|
+
}
|
|
16
|
+
// Fallback: check for next.config.{ts,js,mjs}
|
|
17
|
+
for (const file of NEXT_CONFIG_FILES) {
|
|
18
|
+
if (existsSync(join(cwd, file))) {
|
|
19
|
+
return 'nextjs';
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
return undefined;
|
|
23
|
+
}
|
|
24
|
+
//# sourceMappingURL=framework.js.map
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import type { IPCRequest, IPCResponse } from '../types/index.js';
|
|
2
|
+
import { IPCMessageType } from '../constants.js';
|
|
3
|
+
export declare class DaemonClient {
|
|
4
|
+
private socket;
|
|
5
|
+
private messageParser;
|
|
6
|
+
private pendingRequests;
|
|
7
|
+
private streamHandlers;
|
|
8
|
+
private progressHandlers;
|
|
9
|
+
private spawnEnv;
|
|
10
|
+
connect(): Promise<void>;
|
|
11
|
+
private configureTelemetry;
|
|
12
|
+
private isDaemonRunning;
|
|
13
|
+
/**
|
|
14
|
+
* Attempt to acquire an exclusive lock for daemon startup.
|
|
15
|
+
* Uses O_EXCL for atomic create-if-not-exists.
|
|
16
|
+
* Returns true if lock acquired, false if another process holds it.
|
|
17
|
+
*/
|
|
18
|
+
private acquireLock;
|
|
19
|
+
private releaseLock;
|
|
20
|
+
private startDaemon;
|
|
21
|
+
private handleMessage;
|
|
22
|
+
send(request: IPCRequest, timeoutMs?: number): Promise<IPCResponse>;
|
|
23
|
+
request(type: (typeof IPCMessageType)[keyof typeof IPCMessageType], payload?: IPCRequest['payload'], timeoutMs?: number): Promise<IPCResponse>;
|
|
24
|
+
requestWithProgress(type: (typeof IPCMessageType)[keyof typeof IPCMessageType], payload: IPCRequest['payload'], onProgress: (data: unknown) => void, timeoutMs?: number): Promise<IPCResponse>;
|
|
25
|
+
streamLogs(target: number | string | undefined, onData: (data: unknown) => void): Promise<() => void>;
|
|
26
|
+
private cleanup;
|
|
27
|
+
setSpawnEnv(env: Record<string, string>): void;
|
|
28
|
+
disconnect(): void;
|
|
29
|
+
}
|
|
30
|
+
export declare const daemonClient: DaemonClient;
|
|
31
|
+
//# sourceMappingURL=DaemonClient.d.ts.map
|
|
@@ -0,0 +1,248 @@
|
|
|
1
|
+
import { spawn } from 'node:child_process';
|
|
2
|
+
import { closeSync, existsSync, mkdirSync, openSync, readFileSync, unlinkSync, writeFileSync, } from 'node:fs';
|
|
3
|
+
import { connect } from 'node:net';
|
|
4
|
+
import { dirname, join } from 'node:path';
|
|
5
|
+
import { fileURLToPath } from 'node:url';
|
|
6
|
+
import { DAEMON_LOCK_FILE, DAEMON_LOG_FILE, DAEMON_PID_FILE, DAEMON_STARTUP_TIMEOUT, IPC_CONNECT_TIMEOUT, IPC_RESPONSE_TIMEOUT, IPCMessageType, ORKIFY_HOME, SOCKET_PATH, TELEMETRY_DEFAULT_API_HOST, } from '../constants.js';
|
|
7
|
+
import { createMessageParser, createRequest, serialize } from './protocol.js';
|
|
8
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
9
|
+
const __dirname = dirname(__filename);
|
|
10
|
+
export class DaemonClient {
|
|
11
|
+
socket = null;
|
|
12
|
+
messageParser = createMessageParser();
|
|
13
|
+
pendingRequests = new Map();
|
|
14
|
+
streamHandlers = new Map();
|
|
15
|
+
progressHandlers = new Map();
|
|
16
|
+
spawnEnv = {};
|
|
17
|
+
async connect() {
|
|
18
|
+
if (this.socket) {
|
|
19
|
+
return;
|
|
20
|
+
}
|
|
21
|
+
// Check if daemon is running
|
|
22
|
+
const daemonAlreadyRunning = this.isDaemonRunning();
|
|
23
|
+
if (!daemonAlreadyRunning) {
|
|
24
|
+
await this.startDaemon();
|
|
25
|
+
}
|
|
26
|
+
await new Promise((resolve, reject) => {
|
|
27
|
+
const timeout = setTimeout(() => {
|
|
28
|
+
this.socket?.destroy();
|
|
29
|
+
this.cleanup();
|
|
30
|
+
reject(new Error('Connection timeout'));
|
|
31
|
+
}, IPC_CONNECT_TIMEOUT);
|
|
32
|
+
this.socket = connect(SOCKET_PATH);
|
|
33
|
+
this.socket.on('connect', () => {
|
|
34
|
+
clearTimeout(timeout);
|
|
35
|
+
resolve();
|
|
36
|
+
});
|
|
37
|
+
this.socket.on('data', (chunk) => {
|
|
38
|
+
const messages = this.messageParser(chunk);
|
|
39
|
+
for (const message of messages) {
|
|
40
|
+
this.handleMessage(message);
|
|
41
|
+
}
|
|
42
|
+
});
|
|
43
|
+
this.socket.on('error', (err) => {
|
|
44
|
+
clearTimeout(timeout);
|
|
45
|
+
this.cleanup();
|
|
46
|
+
reject(err);
|
|
47
|
+
});
|
|
48
|
+
this.socket.on('close', () => {
|
|
49
|
+
this.cleanup();
|
|
50
|
+
});
|
|
51
|
+
});
|
|
52
|
+
// If we connected to an already-running daemon and the CLI has
|
|
53
|
+
// ORKIFY_API_KEY, push the telemetry config so the daemon can
|
|
54
|
+
// enable telemetry even if it started without it.
|
|
55
|
+
if (daemonAlreadyRunning && process.env.ORKIFY_API_KEY) {
|
|
56
|
+
this.configureTelemetry();
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
configureTelemetry() {
|
|
60
|
+
if (!this.socket)
|
|
61
|
+
return;
|
|
62
|
+
const req = createRequest(IPCMessageType.CONFIGURE_TELEMETRY, {
|
|
63
|
+
apiKey: process.env.ORKIFY_API_KEY,
|
|
64
|
+
apiHost: process.env.ORKIFY_API_HOST || TELEMETRY_DEFAULT_API_HOST,
|
|
65
|
+
});
|
|
66
|
+
// Fire-and-forget — don't block the caller waiting for a response
|
|
67
|
+
this.socket.write(serialize(req));
|
|
68
|
+
}
|
|
69
|
+
isDaemonRunning() {
|
|
70
|
+
if (!existsSync(SOCKET_PATH)) {
|
|
71
|
+
return false;
|
|
72
|
+
}
|
|
73
|
+
if (existsSync(DAEMON_PID_FILE)) {
|
|
74
|
+
try {
|
|
75
|
+
const pid = parseInt(readFileSync(DAEMON_PID_FILE, 'utf-8').trim(), 10);
|
|
76
|
+
process.kill(pid, 0);
|
|
77
|
+
return true;
|
|
78
|
+
}
|
|
79
|
+
catch {
|
|
80
|
+
return false;
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
return false;
|
|
84
|
+
}
|
|
85
|
+
/**
|
|
86
|
+
* Attempt to acquire an exclusive lock for daemon startup.
|
|
87
|
+
* Uses O_EXCL for atomic create-if-not-exists.
|
|
88
|
+
* Returns true if lock acquired, false if another process holds it.
|
|
89
|
+
*/
|
|
90
|
+
acquireLock() {
|
|
91
|
+
try {
|
|
92
|
+
const fd = openSync(DAEMON_LOCK_FILE, 'wx');
|
|
93
|
+
writeFileSync(fd, String(process.pid));
|
|
94
|
+
closeSync(fd);
|
|
95
|
+
return true;
|
|
96
|
+
}
|
|
97
|
+
catch {
|
|
98
|
+
// Lock file exists — check if holder is still alive
|
|
99
|
+
try {
|
|
100
|
+
const holderPid = parseInt(readFileSync(DAEMON_LOCK_FILE, 'utf-8').trim(), 10);
|
|
101
|
+
process.kill(holderPid, 0); // throws if dead
|
|
102
|
+
return false; // holder is alive
|
|
103
|
+
}
|
|
104
|
+
catch {
|
|
105
|
+
// Holder is dead — take over stale lock
|
|
106
|
+
try {
|
|
107
|
+
unlinkSync(DAEMON_LOCK_FILE);
|
|
108
|
+
const fd = openSync(DAEMON_LOCK_FILE, 'wx');
|
|
109
|
+
writeFileSync(fd, String(process.pid));
|
|
110
|
+
closeSync(fd);
|
|
111
|
+
return true;
|
|
112
|
+
}
|
|
113
|
+
catch {
|
|
114
|
+
return false; // race with another takeover
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
releaseLock() {
|
|
120
|
+
try {
|
|
121
|
+
unlinkSync(DAEMON_LOCK_FILE);
|
|
122
|
+
}
|
|
123
|
+
catch {
|
|
124
|
+
// Ignore — may already be cleaned up
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
async startDaemon() {
|
|
128
|
+
if (!existsSync(ORKIFY_HOME)) {
|
|
129
|
+
mkdirSync(ORKIFY_HOME, { recursive: true });
|
|
130
|
+
}
|
|
131
|
+
if (!this.acquireLock()) {
|
|
132
|
+
// Another process is spawning the daemon — wait for socket instead
|
|
133
|
+
const startTime = Date.now();
|
|
134
|
+
while (Date.now() - startTime < DAEMON_STARTUP_TIMEOUT) {
|
|
135
|
+
if (existsSync(SOCKET_PATH)) {
|
|
136
|
+
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
137
|
+
return;
|
|
138
|
+
}
|
|
139
|
+
await new Promise((resolve) => setTimeout(resolve, 50));
|
|
140
|
+
}
|
|
141
|
+
throw new Error('Daemon failed to start (waited for lock holder)');
|
|
142
|
+
}
|
|
143
|
+
try {
|
|
144
|
+
const daemonScript = join(__dirname, '..', 'daemon', 'index.js');
|
|
145
|
+
const logFd = openSync(DAEMON_LOG_FILE, 'a');
|
|
146
|
+
const child = spawn(process.execPath, [daemonScript], {
|
|
147
|
+
detached: true,
|
|
148
|
+
stdio: ['ignore', logFd, logFd],
|
|
149
|
+
env: { ...process.env, ...this.spawnEnv },
|
|
150
|
+
});
|
|
151
|
+
this.spawnEnv = {};
|
|
152
|
+
child.unref();
|
|
153
|
+
// Wait for socket to be available
|
|
154
|
+
const startTime = Date.now();
|
|
155
|
+
while (Date.now() - startTime < DAEMON_STARTUP_TIMEOUT) {
|
|
156
|
+
if (existsSync(SOCKET_PATH)) {
|
|
157
|
+
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
158
|
+
return;
|
|
159
|
+
}
|
|
160
|
+
await new Promise((resolve) => setTimeout(resolve, 50));
|
|
161
|
+
}
|
|
162
|
+
throw new Error('Daemon failed to start');
|
|
163
|
+
}
|
|
164
|
+
finally {
|
|
165
|
+
this.releaseLock();
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
handleMessage(message) {
|
|
169
|
+
const response = message;
|
|
170
|
+
// Deploy progress — emit to handler without resolving the pending request
|
|
171
|
+
if (message.type === IPCMessageType.DEPLOY_PROGRESS) {
|
|
172
|
+
const handler = this.progressHandlers.get(response.id);
|
|
173
|
+
handler?.(response.data);
|
|
174
|
+
return;
|
|
175
|
+
}
|
|
176
|
+
const pending = this.pendingRequests.get(response.id);
|
|
177
|
+
if (pending) {
|
|
178
|
+
clearTimeout(pending.timeout);
|
|
179
|
+
this.pendingRequests.delete(response.id);
|
|
180
|
+
this.progressHandlers.delete(response.id);
|
|
181
|
+
pending.resolve(response);
|
|
182
|
+
return;
|
|
183
|
+
}
|
|
184
|
+
// Check for stream handlers (for logs)
|
|
185
|
+
const streamHandler = this.streamHandlers.get(response.id);
|
|
186
|
+
if (streamHandler && message.type === IPCMessageType.LOG_DATA) {
|
|
187
|
+
streamHandler(response.data);
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
async send(request, timeoutMs) {
|
|
191
|
+
const socket = this.socket;
|
|
192
|
+
if (!socket) {
|
|
193
|
+
throw new Error('Not connected to daemon');
|
|
194
|
+
}
|
|
195
|
+
return new Promise((resolve, reject) => {
|
|
196
|
+
const timeout = setTimeout(() => {
|
|
197
|
+
this.pendingRequests.delete(request.id);
|
|
198
|
+
reject(new Error('Request timeout'));
|
|
199
|
+
}, timeoutMs ?? IPC_RESPONSE_TIMEOUT);
|
|
200
|
+
this.pendingRequests.set(request.id, { resolve, reject, timeout });
|
|
201
|
+
socket.write(serialize(request));
|
|
202
|
+
});
|
|
203
|
+
}
|
|
204
|
+
async request(type, payload, timeoutMs) {
|
|
205
|
+
await this.connect();
|
|
206
|
+
const request = createRequest(type, payload);
|
|
207
|
+
return this.send(request, timeoutMs);
|
|
208
|
+
}
|
|
209
|
+
async requestWithProgress(type, payload, onProgress, timeoutMs) {
|
|
210
|
+
await this.connect();
|
|
211
|
+
const request = createRequest(type, payload);
|
|
212
|
+
this.progressHandlers.set(request.id, onProgress);
|
|
213
|
+
return this.send(request, timeoutMs);
|
|
214
|
+
}
|
|
215
|
+
async streamLogs(target, onData) {
|
|
216
|
+
await this.connect();
|
|
217
|
+
const socket = this.socket;
|
|
218
|
+
if (!socket) {
|
|
219
|
+
throw new Error('Not connected to daemon');
|
|
220
|
+
}
|
|
221
|
+
const request = createRequest(IPCMessageType.LOGS, { target, follow: true });
|
|
222
|
+
this.streamHandlers.set(request.id, onData);
|
|
223
|
+
socket.write(serialize(request));
|
|
224
|
+
return () => {
|
|
225
|
+
this.streamHandlers.delete(request.id);
|
|
226
|
+
};
|
|
227
|
+
}
|
|
228
|
+
cleanup() {
|
|
229
|
+
for (const pending of this.pendingRequests.values()) {
|
|
230
|
+
clearTimeout(pending.timeout);
|
|
231
|
+
pending.reject(new Error('Connection closed'));
|
|
232
|
+
}
|
|
233
|
+
this.pendingRequests.clear();
|
|
234
|
+
this.streamHandlers.clear();
|
|
235
|
+
this.socket = null;
|
|
236
|
+
}
|
|
237
|
+
setSpawnEnv(env) {
|
|
238
|
+
this.spawnEnv = env;
|
|
239
|
+
}
|
|
240
|
+
disconnect() {
|
|
241
|
+
if (this.socket) {
|
|
242
|
+
this.socket.end();
|
|
243
|
+
this.cleanup();
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
export const daemonClient = new DaemonClient();
|
|
248
|
+
//# sourceMappingURL=DaemonClient.js.map
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import { type Socket } from 'node:net';
|
|
2
|
+
import type { IPCRequest, IPCResponse } from '../types/index.js';
|
|
3
|
+
export type RequestHandler = (request: IPCRequest, client: ClientConnection) => IPCResponse | Promise<IPCResponse>;
|
|
4
|
+
export declare class ClientConnection {
|
|
5
|
+
private socket;
|
|
6
|
+
private messageParser;
|
|
7
|
+
constructor(socket: Socket);
|
|
8
|
+
send(message: IPCResponse): void;
|
|
9
|
+
onData(handler: (messages: IPCRequest[]) => void): void;
|
|
10
|
+
onClose(handler: () => void): void;
|
|
11
|
+
onError(handler: (err: Error) => void): void;
|
|
12
|
+
close(): void;
|
|
13
|
+
}
|
|
14
|
+
export declare class DaemonServer {
|
|
15
|
+
private server;
|
|
16
|
+
private clients;
|
|
17
|
+
private handlers;
|
|
18
|
+
private logSubscribers;
|
|
19
|
+
registerHandler(type: string, handler: RequestHandler): void;
|
|
20
|
+
subscribeToLogs(processName: string, client: ClientConnection, requestId: string): void;
|
|
21
|
+
unsubscribeFromLogs(processName: string, client: ClientConnection): void;
|
|
22
|
+
broadcastLog(processName: string, data: unknown): void;
|
|
23
|
+
start(): Promise<void>;
|
|
24
|
+
private handleRequest;
|
|
25
|
+
broadcast(message: IPCResponse): void;
|
|
26
|
+
stop(): Promise<void>;
|
|
27
|
+
}
|
|
28
|
+
//# sourceMappingURL=DaemonServer.d.ts.map
|