trigger.dev 3.0.0-beta.2 → 3.0.0-beta.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Containerfile.prod +15 -4
- package/dist/index.js +1748 -608
- package/dist/index.js.map +1 -1
- package/dist/templates/trigger.config.ts.template +2 -1
- package/dist/workers/dev/worker-facade.js +51 -54
- package/dist/workers/dev/worker-setup.js +8 -3
- package/dist/workers/prod/entry-point.js +99 -43
- package/dist/workers/prod/worker-facade.js +62 -54
- package/dist/workers/prod/worker-setup.js +5 -3
- package/package.json +7 -13
package/dist/index.js
CHANGED
|
@@ -57,7 +57,7 @@ var require_XDGAppPaths = __commonJS({
|
|
|
57
57
|
return typeof t;
|
|
58
58
|
}
|
|
59
59
|
function Adapt(adapter_) {
|
|
60
|
-
var meta = adapter_.meta,
|
|
60
|
+
var meta = adapter_.meta, path7 = adapter_.path, xdg = adapter_.xdg;
|
|
61
61
|
var XDGAppPaths_ = /* @__PURE__ */ function() {
|
|
62
62
|
function XDGAppPaths_2(options_) {
|
|
63
63
|
if (options_ === void 0) {
|
|
@@ -79,7 +79,7 @@ var require_XDGAppPaths = __commonJS({
|
|
|
79
79
|
meta.mainFilename()
|
|
80
80
|
];
|
|
81
81
|
var nameFallback = "$eval";
|
|
82
|
-
var name =
|
|
82
|
+
var name = path7.parse(((_c = namePriorityList.find(function(e) {
|
|
83
83
|
return isString(e);
|
|
84
84
|
})) !== null && _c !== void 0 ? _c : nameFallback) + suffix).name;
|
|
85
85
|
XDGAppPaths.$name = function $name() {
|
|
@@ -98,28 +98,28 @@ var require_XDGAppPaths = __commonJS({
|
|
|
98
98
|
return isIsolated(dirOptions) ? name : "";
|
|
99
99
|
}
|
|
100
100
|
XDGAppPaths.cache = function cache(dirOptions) {
|
|
101
|
-
return
|
|
101
|
+
return path7.join(xdg.cache(), finalPathSegment(dirOptions));
|
|
102
102
|
};
|
|
103
103
|
XDGAppPaths.config = function config(dirOptions) {
|
|
104
|
-
return
|
|
104
|
+
return path7.join(xdg.config(), finalPathSegment(dirOptions));
|
|
105
105
|
};
|
|
106
106
|
XDGAppPaths.data = function data(dirOptions) {
|
|
107
|
-
return
|
|
107
|
+
return path7.join(xdg.data(), finalPathSegment(dirOptions));
|
|
108
108
|
};
|
|
109
109
|
XDGAppPaths.runtime = function runtime(dirOptions) {
|
|
110
|
-
return xdg.runtime() ?
|
|
110
|
+
return xdg.runtime() ? path7.join(xdg.runtime(), finalPathSegment(dirOptions)) : void 0;
|
|
111
111
|
};
|
|
112
112
|
XDGAppPaths.state = function state(dirOptions) {
|
|
113
|
-
return
|
|
113
|
+
return path7.join(xdg.state(), finalPathSegment(dirOptions));
|
|
114
114
|
};
|
|
115
115
|
XDGAppPaths.configDirs = function configDirs(dirOptions) {
|
|
116
116
|
return xdg.configDirs().map(function(s) {
|
|
117
|
-
return
|
|
117
|
+
return path7.join(s, finalPathSegment(dirOptions));
|
|
118
118
|
});
|
|
119
119
|
};
|
|
120
120
|
XDGAppPaths.dataDirs = function dataDirs(dirOptions) {
|
|
121
121
|
return xdg.dataDirs().map(function(s) {
|
|
122
|
-
return
|
|
122
|
+
return path7.join(s, finalPathSegment(dirOptions));
|
|
123
123
|
});
|
|
124
124
|
};
|
|
125
125
|
return XDGAppPaths;
|
|
@@ -144,14 +144,14 @@ var require_XDG = __commonJS({
|
|
|
144
144
|
exports.__esModule = true;
|
|
145
145
|
exports.Adapt = void 0;
|
|
146
146
|
function Adapt(adapter_) {
|
|
147
|
-
var env = adapter_.env, osPaths = adapter_.osPaths,
|
|
147
|
+
var env = adapter_.env, osPaths = adapter_.osPaths, path7 = adapter_.path;
|
|
148
148
|
var isMacOS = /^darwin$/i.test(adapter_.process.platform);
|
|
149
149
|
var isWinOS = /^win/i.test(adapter_.process.platform);
|
|
150
150
|
function baseDir() {
|
|
151
151
|
return osPaths.home() || osPaths.temp();
|
|
152
152
|
}
|
|
153
153
|
function valOrPath(val, pathSegments) {
|
|
154
|
-
return val ||
|
|
154
|
+
return val || path7.join.apply(path7, pathSegments);
|
|
155
155
|
}
|
|
156
156
|
var linux = function() {
|
|
157
157
|
var cache = function() {
|
|
@@ -226,11 +226,11 @@ var require_XDG = __commonJS({
|
|
|
226
226
|
XDG.state = extension.state;
|
|
227
227
|
XDG.configDirs = function configDirs() {
|
|
228
228
|
var pathList = env.get("XDG_CONFIG_DIRS");
|
|
229
|
-
return __spreadArray([extension.config()], pathList ? pathList.split(
|
|
229
|
+
return __spreadArray([extension.config()], pathList ? pathList.split(path7.delimiter) : []);
|
|
230
230
|
};
|
|
231
231
|
XDG.dataDirs = function dataDirs() {
|
|
232
232
|
var pathList = env.get("XDG_DATA_DIRS");
|
|
233
|
-
return __spreadArray([extension.data()], pathList ? pathList.split(
|
|
233
|
+
return __spreadArray([extension.data()], pathList ? pathList.split(path7.delimiter) : []);
|
|
234
234
|
};
|
|
235
235
|
return XDG;
|
|
236
236
|
}
|
|
@@ -257,13 +257,13 @@ var require_OSPaths = __commonJS({
|
|
|
257
257
|
return !s;
|
|
258
258
|
}
|
|
259
259
|
function Adapt(adapter_) {
|
|
260
|
-
var env = adapter_.env, os2 = adapter_.os,
|
|
260
|
+
var env = adapter_.env, os2 = adapter_.os, path7 = adapter_.path;
|
|
261
261
|
var isWinOS = /^win/i.test(adapter_.process.platform);
|
|
262
262
|
function normalizePath(path_) {
|
|
263
263
|
return path_ ? adapter_.path.normalize(adapter_.path.join(path_, ".")) : void 0;
|
|
264
264
|
}
|
|
265
265
|
function home() {
|
|
266
|
-
var
|
|
266
|
+
var posix2 = function() {
|
|
267
267
|
return normalizePath((typeof os2.homedir === "function" ? os2.homedir() : void 0) || env.get("HOME"));
|
|
268
268
|
};
|
|
269
269
|
var windows = function() {
|
|
@@ -271,19 +271,19 @@ var require_OSPaths = __commonJS({
|
|
|
271
271
|
typeof os2.homedir === "function" ? os2.homedir() : void 0,
|
|
272
272
|
env.get("USERPROFILE"),
|
|
273
273
|
env.get("HOME"),
|
|
274
|
-
env.get("HOMEDRIVE") || env.get("HOMEPATH") ?
|
|
274
|
+
env.get("HOMEDRIVE") || env.get("HOMEPATH") ? path7.join(env.get("HOMEDRIVE") || "", env.get("HOMEPATH") || "") : void 0
|
|
275
275
|
];
|
|
276
276
|
return normalizePath(priorityList.find(function(v) {
|
|
277
277
|
return !isEmpty(v);
|
|
278
278
|
}));
|
|
279
279
|
};
|
|
280
|
-
return isWinOS ? windows() :
|
|
280
|
+
return isWinOS ? windows() : posix2();
|
|
281
281
|
}
|
|
282
282
|
function temp() {
|
|
283
283
|
function joinPathToBase(base, segments) {
|
|
284
|
-
return base ?
|
|
284
|
+
return base ? path7.join.apply(path7, __spreadArray([base], segments)) : void 0;
|
|
285
285
|
}
|
|
286
|
-
function
|
|
286
|
+
function posix2() {
|
|
287
287
|
var fallback = "/tmp";
|
|
288
288
|
var priorityList = [
|
|
289
289
|
typeof os2.tmpdir === "function" ? os2.tmpdir() : void 0,
|
|
@@ -331,7 +331,7 @@ var require_OSPaths = __commonJS({
|
|
|
331
331
|
});
|
|
332
332
|
return v && normalizePath(v()) || fallback;
|
|
333
333
|
}
|
|
334
|
-
return isWinOS ? windows() :
|
|
334
|
+
return isWinOS ? windows() : posix2();
|
|
335
335
|
}
|
|
336
336
|
var OSPaths_ = /* @__PURE__ */ function() {
|
|
337
337
|
function OSPaths_2() {
|
|
@@ -385,7 +385,7 @@ var require_node = __commonJS({
|
|
|
385
385
|
exports.__esModule = true;
|
|
386
386
|
exports.adapter = void 0;
|
|
387
387
|
var os2 = __importStar(__require("os"));
|
|
388
|
-
var
|
|
388
|
+
var path7 = __importStar(__require("path"));
|
|
389
389
|
exports.adapter = {
|
|
390
390
|
atImportPermissions: { env: true },
|
|
391
391
|
env: {
|
|
@@ -394,7 +394,7 @@ var require_node = __commonJS({
|
|
|
394
394
|
}
|
|
395
395
|
},
|
|
396
396
|
os: os2,
|
|
397
|
-
path:
|
|
397
|
+
path: path7,
|
|
398
398
|
process
|
|
399
399
|
};
|
|
400
400
|
}
|
|
@@ -447,7 +447,7 @@ var require_node2 = __commonJS({
|
|
|
447
447
|
};
|
|
448
448
|
exports.__esModule = true;
|
|
449
449
|
exports.adapter = void 0;
|
|
450
|
-
var
|
|
450
|
+
var path7 = __importStar(__require("path"));
|
|
451
451
|
var os_paths_1 = __importDefault(require_mod_cjs());
|
|
452
452
|
exports.adapter = {
|
|
453
453
|
atImportPermissions: { env: true },
|
|
@@ -457,7 +457,7 @@ var require_node2 = __commonJS({
|
|
|
457
457
|
}
|
|
458
458
|
},
|
|
459
459
|
osPaths: os_paths_1["default"],
|
|
460
|
-
path:
|
|
460
|
+
path: path7,
|
|
461
461
|
process
|
|
462
462
|
};
|
|
463
463
|
}
|
|
@@ -510,7 +510,7 @@ var require_node3 = __commonJS({
|
|
|
510
510
|
};
|
|
511
511
|
exports.__esModule = true;
|
|
512
512
|
exports.adapter = void 0;
|
|
513
|
-
var
|
|
513
|
+
var path7 = __importStar(__require("path"));
|
|
514
514
|
var xdg_portable_1 = __importDefault(require_mod_cjs2());
|
|
515
515
|
exports.adapter = {
|
|
516
516
|
atImportPermissions: { env: true, read: true },
|
|
@@ -525,7 +525,7 @@ var require_node3 = __commonJS({
|
|
|
525
525
|
return process.pkg ? process.execPath : void 0;
|
|
526
526
|
}
|
|
527
527
|
},
|
|
528
|
-
path:
|
|
528
|
+
path: path7,
|
|
529
529
|
process,
|
|
530
530
|
xdg: xdg_portable_1["default"]
|
|
531
531
|
};
|
|
@@ -777,31 +777,31 @@ var require_retry2 = __commonJS({
|
|
|
777
777
|
import { Command as Command2 } from "commander";
|
|
778
778
|
|
|
779
779
|
// src/commands/deploy.ts
|
|
780
|
-
import { intro as
|
|
780
|
+
import { intro as intro4, log as log5, outro as outro5 } from "@clack/prompts";
|
|
781
781
|
import { depot } from "@depot/cli";
|
|
782
782
|
import { context, trace as trace2 } from "@opentelemetry/api";
|
|
783
783
|
import {
|
|
784
|
+
TaskMetadataFailedToParseData,
|
|
784
785
|
detectDependencyVersion,
|
|
785
|
-
flattenAttributes as flattenAttributes2
|
|
786
|
-
recordSpanException as recordSpanException4
|
|
786
|
+
flattenAttributes as flattenAttributes2
|
|
787
787
|
} from "@trigger.dev/core/v3";
|
|
788
|
-
import
|
|
788
|
+
import { recordSpanException as recordSpanException4 } from "@trigger.dev/core/v3/workers";
|
|
789
789
|
import { Option as CommandOption } from "commander";
|
|
790
790
|
import { build as build2 } from "esbuild";
|
|
791
791
|
import { execa as execa2 } from "execa";
|
|
792
|
-
import { resolve as importResolve } from "import-meta-resolve";
|
|
793
792
|
import { createHash } from "node:crypto";
|
|
794
793
|
import { readFileSync as readFileSync2 } from "node:fs";
|
|
795
794
|
import { copyFile, mkdir, readFile as readFile2, writeFile as writeFile2 } from "node:fs/promises";
|
|
796
|
-
import { dirname, join as
|
|
795
|
+
import { dirname, join as join6, posix, relative as relative3 } from "node:path";
|
|
797
796
|
import { setTimeout as setTimeout2 } from "node:timers/promises";
|
|
798
|
-
import
|
|
797
|
+
import terminalLink2 from "terminal-link";
|
|
799
798
|
import invariant from "tiny-invariant";
|
|
800
799
|
import { z as z4 } from "zod";
|
|
801
800
|
|
|
802
801
|
// package.json
|
|
803
|
-
var version = "3.0.0-beta.
|
|
802
|
+
var version = "3.0.0-beta.21";
|
|
804
803
|
var dependencies = {
|
|
804
|
+
"@anatine/esbuild-decorators": "^0.2.19",
|
|
805
805
|
"@clack/prompts": "^0.7.0",
|
|
806
806
|
"@depot/cli": "0.0.1-cli.2.55.0",
|
|
807
807
|
"@opentelemetry/api": "^1.8.0",
|
|
@@ -816,7 +816,7 @@ var dependencies = {
|
|
|
816
816
|
"@opentelemetry/sdk-trace-base": "^1.22.0",
|
|
817
817
|
"@opentelemetry/sdk-trace-node": "^1.22.0",
|
|
818
818
|
"@opentelemetry/semantic-conventions": "^1.22.0",
|
|
819
|
-
"@trigger.dev/core": "workspace
|
|
819
|
+
"@trigger.dev/core": "workspace:3.0.0-beta.21",
|
|
820
820
|
"@types/degit": "^2.8.3",
|
|
821
821
|
chalk: "^5.2.0",
|
|
822
822
|
chokidar: "^3.5.3",
|
|
@@ -833,12 +833,10 @@ var dependencies = {
|
|
|
833
833
|
"import-meta-resolve": "^4.0.0",
|
|
834
834
|
ink: "^4.4.1",
|
|
835
835
|
"jsonc-parser": "^3.2.1",
|
|
836
|
-
jsonlines: "^0.1.1",
|
|
837
836
|
liquidjs: "^10.9.2",
|
|
838
837
|
"mock-fs": "^5.2.0",
|
|
839
838
|
nanoid: "^4.0.2",
|
|
840
839
|
"node-fetch": "^3.3.0",
|
|
841
|
-
"npm-check-updates": "^16.12.2",
|
|
842
840
|
"object-hash": "^3.0.0",
|
|
843
841
|
"p-debounce": "^4.0.0",
|
|
844
842
|
"p-throttle": "^6.1.0",
|
|
@@ -850,7 +848,6 @@ var dependencies = {
|
|
|
850
848
|
"simple-git": "^3.19.0",
|
|
851
849
|
"socket.io-client": "^4.7.4",
|
|
852
850
|
"source-map-support": "^0.5.21",
|
|
853
|
-
"supports-color": "^9.4.0",
|
|
854
851
|
"terminal-link": "^3.0.0",
|
|
855
852
|
"tiny-invariant": "^1.2.0",
|
|
856
853
|
"tsconfig-paths": "^4.2.0",
|
|
@@ -894,13 +891,12 @@ var package_default = {
|
|
|
894
891
|
type: "module",
|
|
895
892
|
exports: "./dist/index.js",
|
|
896
893
|
bin: {
|
|
897
|
-
|
|
894
|
+
triggerdev: "./dist/index.js"
|
|
898
895
|
},
|
|
899
896
|
devDependencies: {
|
|
900
897
|
"@trigger.dev/core-apps": "workspace:*",
|
|
901
898
|
"@trigger.dev/tsconfig": "workspace:*",
|
|
902
899
|
"@types/gradient-string": "^1.1.2",
|
|
903
|
-
"@types/jsonlines": "^0.1.5",
|
|
904
900
|
"@types/mock-fs": "^4.13.1",
|
|
905
901
|
"@types/node": "18",
|
|
906
902
|
"@types/object-hash": "^3.0.6",
|
|
@@ -908,8 +904,8 @@ var package_default = {
|
|
|
908
904
|
"@types/semver": "^7.3.13",
|
|
909
905
|
"@types/ws": "^8.5.3",
|
|
910
906
|
"cpy-cli": "^5.0.0",
|
|
907
|
+
nodemon: "^3.0.1",
|
|
911
908
|
"npm-run-all": "^4.1.5",
|
|
912
|
-
"npm-watch": "^0.11.0",
|
|
913
909
|
open: "^10.0.3",
|
|
914
910
|
"p-retry": "^6.1.0",
|
|
915
911
|
rimraf: "^3.0.2",
|
|
@@ -919,9 +915,6 @@ var package_default = {
|
|
|
919
915
|
vitest: "^0.34.4",
|
|
920
916
|
"xdg-app-paths": "^8.3.0"
|
|
921
917
|
},
|
|
922
|
-
watch: {
|
|
923
|
-
"build:prod-containerfile": "src/Containerfile.prod"
|
|
924
|
-
},
|
|
925
918
|
scripts: {
|
|
926
919
|
typecheck: "tsc -p tsconfig.check.json",
|
|
927
920
|
build: "npm run clean && run-p build:**",
|
|
@@ -931,7 +924,7 @@ var package_default = {
|
|
|
931
924
|
dev: "npm run clean && run-p dev:**",
|
|
932
925
|
"dev:main": "tsup --watch",
|
|
933
926
|
"dev:workers": "tsup --config tsup.workers.config.ts --watch",
|
|
934
|
-
"dev:
|
|
927
|
+
"dev:test": "nodemon -w src/Containerfile.prod -x npm run build:prod-containerfile",
|
|
935
928
|
clean: "rimraf dist",
|
|
936
929
|
start: "node dist/index.js",
|
|
937
930
|
test: "vitest"
|
|
@@ -1147,7 +1140,8 @@ async function zodfetch(schema, url, requestInit) {
|
|
|
1147
1140
|
}
|
|
1148
1141
|
|
|
1149
1142
|
// src/cli/common.ts
|
|
1150
|
-
import { flattenAttributes
|
|
1143
|
+
import { flattenAttributes } from "@trigger.dev/core/v3";
|
|
1144
|
+
import { recordSpanException } from "@trigger.dev/core/v3/workers";
|
|
1151
1145
|
import { z } from "zod";
|
|
1152
1146
|
|
|
1153
1147
|
// src/telemetry/tracing.ts
|
|
@@ -1157,6 +1151,10 @@ import { Resource, detectResourcesSync, processDetectorSync } from "@opentelemet
|
|
|
1157
1151
|
import { NodeTracerProvider, SimpleSpanProcessor } from "@opentelemetry/sdk-trace-node";
|
|
1158
1152
|
import { FetchInstrumentation } from "@opentelemetry/instrumentation-fetch";
|
|
1159
1153
|
import { DiagConsoleLogger, DiagLogLevel, diag, trace } from "@opentelemetry/api";
|
|
1154
|
+
import {
|
|
1155
|
+
SEMRESATTRS_SERVICE_NAME,
|
|
1156
|
+
SEMRESATTRS_SERVICE_VERSION
|
|
1157
|
+
} from "@opentelemetry/semantic-conventions";
|
|
1160
1158
|
function initializeTracing() {
|
|
1161
1159
|
if (process.argv.includes("--skip-telemetry") || process.env.TRIGGER_DEV_SKIP_TELEMETRY) {
|
|
1162
1160
|
return;
|
|
@@ -1168,7 +1166,8 @@ function initializeTracing() {
|
|
|
1168
1166
|
detectors: [processDetectorSync]
|
|
1169
1167
|
}).merge(
|
|
1170
1168
|
new Resource({
|
|
1171
|
-
|
|
1169
|
+
[SEMRESATTRS_SERVICE_NAME]: "trigger.dev cli v3",
|
|
1170
|
+
[SEMRESATTRS_SERVICE_VERSION]: version
|
|
1172
1171
|
})
|
|
1173
1172
|
);
|
|
1174
1173
|
const traceProvider = new NodeTracerProvider({
|
|
@@ -1185,10 +1184,9 @@ function initializeTracing() {
|
|
|
1185
1184
|
});
|
|
1186
1185
|
const spanExporter = new OTLPTraceExporter({
|
|
1187
1186
|
url: "https://otel.baselime.io/v1",
|
|
1188
|
-
timeoutMillis:
|
|
1187
|
+
timeoutMillis: 5e3,
|
|
1189
1188
|
headers: {
|
|
1190
|
-
"x-api-key": "
|
|
1191
|
-
// this is a joke
|
|
1189
|
+
"x-api-key": "b6e0fbbaf8dc2524773d2152ae2e9eb5c7fbaa52"
|
|
1192
1190
|
}
|
|
1193
1191
|
});
|
|
1194
1192
|
const spanProcessor = new SimpleSpanProcessor(spanExporter);
|
|
@@ -1201,7 +1199,7 @@ function initializeTracing() {
|
|
|
1201
1199
|
}
|
|
1202
1200
|
var provider = initializeTracing();
|
|
1203
1201
|
function getTracer() {
|
|
1204
|
-
return trace.getTracer("trigger.dev cli", version);
|
|
1202
|
+
return trace.getTracer("trigger.dev cli v3", version);
|
|
1205
1203
|
}
|
|
1206
1204
|
|
|
1207
1205
|
// src/cli/common.ts
|
|
@@ -1307,7 +1305,7 @@ var Logger = class {
|
|
|
1307
1305
|
const kind = LOGGER_LEVEL_FORMAT_TYPE_MAP[level];
|
|
1308
1306
|
if (kind) {
|
|
1309
1307
|
const [firstLine, ...otherLines] = message.split("\n");
|
|
1310
|
-
const notes = otherLines.length > 0 ? otherLines.map((
|
|
1308
|
+
const notes = otherLines.length > 0 ? otherLines.map((text3) => ({ text: text3 })) : void 0;
|
|
1311
1309
|
return formatMessagesSync([{ text: firstLine, notes }], {
|
|
1312
1310
|
color: true,
|
|
1313
1311
|
kind,
|
|
@@ -1322,6 +1320,83 @@ var logger = new Logger();
|
|
|
1322
1320
|
|
|
1323
1321
|
// src/cli/common.ts
|
|
1324
1322
|
import { outro } from "@clack/prompts";
|
|
1323
|
+
|
|
1324
|
+
// src/utilities/cliOutput.ts
|
|
1325
|
+
import { log } from "@clack/prompts";
|
|
1326
|
+
import chalk2 from "chalk";
|
|
1327
|
+
var green = "#4FFF54";
|
|
1328
|
+
var purple = "#735BF3";
|
|
1329
|
+
function chalkGreen(text3) {
|
|
1330
|
+
return chalk2.hex(green)(text3);
|
|
1331
|
+
}
|
|
1332
|
+
function chalkPurple(text3) {
|
|
1333
|
+
return chalk2.hex(purple)(text3);
|
|
1334
|
+
}
|
|
1335
|
+
function chalkGrey(text3) {
|
|
1336
|
+
return chalk2.hex("#878C99")(text3);
|
|
1337
|
+
}
|
|
1338
|
+
function chalkError(text3) {
|
|
1339
|
+
return chalk2.hex("#E11D48")(text3);
|
|
1340
|
+
}
|
|
1341
|
+
function chalkWarning(text3) {
|
|
1342
|
+
return chalk2.yellow(text3);
|
|
1343
|
+
}
|
|
1344
|
+
function chalkSuccess(text3) {
|
|
1345
|
+
return chalk2.hex("#28BF5C")(text3);
|
|
1346
|
+
}
|
|
1347
|
+
function chalkLink(text3) {
|
|
1348
|
+
return chalk2.underline.hex("#D7D9DD")(text3);
|
|
1349
|
+
}
|
|
1350
|
+
function chalkWorker(text3) {
|
|
1351
|
+
return chalk2.hex("#FFFF89")(text3);
|
|
1352
|
+
}
|
|
1353
|
+
function chalkTask(text3) {
|
|
1354
|
+
return chalk2.hex("#60A5FA")(text3);
|
|
1355
|
+
}
|
|
1356
|
+
function chalkRun(text3) {
|
|
1357
|
+
return chalk2.hex("#A78BFA")(text3);
|
|
1358
|
+
}
|
|
1359
|
+
function logo() {
|
|
1360
|
+
return `${chalk2.hex(green).bold("Trigger")}${chalk2.hex(purple).bold(".dev")}`;
|
|
1361
|
+
}
|
|
1362
|
+
function prettyPrintDate(date = /* @__PURE__ */ new Date()) {
|
|
1363
|
+
let formattedDate = new Intl.DateTimeFormat("en-US", {
|
|
1364
|
+
month: "short",
|
|
1365
|
+
day: "2-digit",
|
|
1366
|
+
hour: "2-digit",
|
|
1367
|
+
minute: "2-digit",
|
|
1368
|
+
second: "2-digit",
|
|
1369
|
+
hour12: false
|
|
1370
|
+
}).format(date);
|
|
1371
|
+
formattedDate += "." + ("00" + date.getMilliseconds()).slice(-3);
|
|
1372
|
+
return formattedDate;
|
|
1373
|
+
}
|
|
1374
|
+
function prettyError(header, body, footer) {
|
|
1375
|
+
const prefix = "Error: ";
|
|
1376
|
+
const indent = Array(prefix.length).fill(" ").join("");
|
|
1377
|
+
const spacing = "\n\n";
|
|
1378
|
+
const prettyPrefix = chalkError(prefix);
|
|
1379
|
+
const withIndents = (text3) => text3?.split("\n").map((line) => `${indent}${line}`).join("\n");
|
|
1380
|
+
const prettyBody = withIndents(body);
|
|
1381
|
+
const prettyFooter = withIndents(footer);
|
|
1382
|
+
log.error(
|
|
1383
|
+
`${prettyPrefix}${header}${prettyBody ? `${spacing}${prettyBody}` : ""}${prettyFooter ? `${spacing}${prettyFooter}` : ""}`
|
|
1384
|
+
);
|
|
1385
|
+
}
|
|
1386
|
+
function prettyWarning(header, body, footer) {
|
|
1387
|
+
const prefix = "Warning: ";
|
|
1388
|
+
const indent = Array(prefix.length).fill(" ").join("");
|
|
1389
|
+
const spacing = "\n\n";
|
|
1390
|
+
const prettyPrefix = chalkWarning(prefix);
|
|
1391
|
+
const withIndents = (text3) => text3?.split("\n").map((line) => `${indent}${line}`).join("\n");
|
|
1392
|
+
const prettyBody = withIndents(body);
|
|
1393
|
+
const prettyFooter = withIndents(footer);
|
|
1394
|
+
log.warn(
|
|
1395
|
+
`${prettyPrefix}${header}${prettyBody ? `${spacing}${prettyBody}` : ""}${prettyFooter ? `${spacing}${prettyFooter}` : ""}`
|
|
1396
|
+
);
|
|
1397
|
+
}
|
|
1398
|
+
|
|
1399
|
+
// src/cli/common.ts
|
|
1325
1400
|
var CommonCommandOptions = z.object({
|
|
1326
1401
|
apiUrl: z.string().optional(),
|
|
1327
1402
|
logLevel: z.enum(["debug", "info", "log", "warn", "error", "none"]).default("log"),
|
|
@@ -1331,7 +1406,7 @@ var CommonCommandOptions = z.object({
|
|
|
1331
1406
|
function commonOptions(command) {
|
|
1332
1407
|
return command.option("--profile <profile>", "The login profile to use", "default").option("-a, --api-url <value>", "Override the API URL", "https://api.trigger.dev").option(
|
|
1333
1408
|
"-l, --log-level <level>",
|
|
1334
|
-
"The log level to use (debug, info, log, warn, error, none)",
|
|
1409
|
+
"The CLI log level to use (debug, info, log, warn, error, none). This does not effect the log level of your trigger.dev tasks.",
|
|
1335
1410
|
"log"
|
|
1336
1411
|
).option("--skip-telemetry", "Opt-out of sending telemetry");
|
|
1337
1412
|
}
|
|
@@ -1377,7 +1452,7 @@ async function wrapCommandAction(name, schema, options, action) {
|
|
|
1377
1452
|
} else if (e instanceof SkipCommandError) {
|
|
1378
1453
|
} else {
|
|
1379
1454
|
recordSpanException(span, e);
|
|
1380
|
-
logger.
|
|
1455
|
+
logger.log(`${chalkError("X Error:")} ${e instanceof Error ? e.message : String(e)}`);
|
|
1381
1456
|
}
|
|
1382
1457
|
span.end();
|
|
1383
1458
|
throw e;
|
|
@@ -1420,31 +1495,34 @@ import fsModule, { writeFile } from "fs/promises";
|
|
|
1420
1495
|
import fs from "node:fs";
|
|
1421
1496
|
import { tmpdir } from "node:os";
|
|
1422
1497
|
import pathModule from "node:path";
|
|
1423
|
-
async function createFile(
|
|
1424
|
-
await fsModule.mkdir(pathModule.dirname(
|
|
1425
|
-
await fsModule.writeFile(
|
|
1426
|
-
return
|
|
1498
|
+
async function createFile(path7, contents) {
|
|
1499
|
+
await fsModule.mkdir(pathModule.dirname(path7), { recursive: true });
|
|
1500
|
+
await fsModule.writeFile(path7, contents);
|
|
1501
|
+
return path7;
|
|
1502
|
+
}
|
|
1503
|
+
async function pathExists(path7) {
|
|
1504
|
+
return fsSync.existsSync(path7);
|
|
1427
1505
|
}
|
|
1428
|
-
async function
|
|
1429
|
-
|
|
1506
|
+
async function removeFile(path7) {
|
|
1507
|
+
await fsModule.unlink(path7);
|
|
1430
1508
|
}
|
|
1431
|
-
async function readFile(
|
|
1432
|
-
return await fsModule.readFile(
|
|
1509
|
+
async function readFile(path7) {
|
|
1510
|
+
return await fsModule.readFile(path7, "utf8");
|
|
1433
1511
|
}
|
|
1434
|
-
async function readJSONFile(
|
|
1435
|
-
const fileContents = await fsModule.readFile(
|
|
1512
|
+
async function readJSONFile(path7) {
|
|
1513
|
+
const fileContents = await fsModule.readFile(path7, "utf8");
|
|
1436
1514
|
return JSON.parse(fileContents);
|
|
1437
1515
|
}
|
|
1438
|
-
async function writeJSONFile(
|
|
1439
|
-
await writeFile(
|
|
1516
|
+
async function writeJSONFile(path7, json, pretty = false) {
|
|
1517
|
+
await writeFile(path7, JSON.stringify(json, void 0, pretty ? 2 : void 0), "utf8");
|
|
1440
1518
|
}
|
|
1441
|
-
function readJSONFileSync(
|
|
1442
|
-
const fileContents = fsSync.readFileSync(
|
|
1519
|
+
function readJSONFileSync(path7) {
|
|
1520
|
+
const fileContents = fsSync.readFileSync(path7, "utf8");
|
|
1443
1521
|
return JSON.parse(fileContents);
|
|
1444
1522
|
}
|
|
1445
|
-
function safeDeleteFileSync(
|
|
1523
|
+
function safeDeleteFileSync(path7) {
|
|
1446
1524
|
try {
|
|
1447
|
-
fs.unlinkSync(
|
|
1525
|
+
fs.unlinkSync(path7);
|
|
1448
1526
|
} catch (error) {
|
|
1449
1527
|
}
|
|
1450
1528
|
}
|
|
@@ -1467,16 +1545,26 @@ function createTaskFileImports(taskFiles) {
|
|
|
1467
1545
|
async function gatherTaskFiles(config) {
|
|
1468
1546
|
const taskFiles = [];
|
|
1469
1547
|
for (const triggerDir of config.triggerDirectories) {
|
|
1470
|
-
const files = await
|
|
1471
|
-
|
|
1472
|
-
|
|
1473
|
-
|
|
1474
|
-
|
|
1548
|
+
const files = await gatherTaskFilesFromDir(triggerDir, triggerDir, config);
|
|
1549
|
+
taskFiles.push(...files);
|
|
1550
|
+
}
|
|
1551
|
+
return taskFiles;
|
|
1552
|
+
}
|
|
1553
|
+
async function gatherTaskFilesFromDir(dirPath, triggerDir, config) {
|
|
1554
|
+
const taskFiles = [];
|
|
1555
|
+
const files = await fs2.promises.readdir(dirPath, { withFileTypes: true });
|
|
1556
|
+
for (const file of files) {
|
|
1557
|
+
if (!file.isFile()) {
|
|
1558
|
+
const fullPath = join(dirPath, file.name);
|
|
1559
|
+
taskFiles.push(...await gatherTaskFilesFromDir(fullPath, triggerDir, config));
|
|
1560
|
+
} else {
|
|
1561
|
+
if (!file.name.endsWith(".js") && !file.name.endsWith(".ts") && !file.name.endsWith(".jsx") && !file.name.endsWith(".tsx")) {
|
|
1475
1562
|
continue;
|
|
1476
|
-
|
|
1563
|
+
}
|
|
1564
|
+
const fullPath = join(dirPath, file.name);
|
|
1477
1565
|
const filePath = relative(config.projectDir, fullPath);
|
|
1478
|
-
const
|
|
1479
|
-
const
|
|
1566
|
+
const importName = filePath.replace(/\..+$/, "").replace(/[^a-zA-Z0-9_$]/g, "_");
|
|
1567
|
+
const importPath = filePath.replace(/\\/g, "/");
|
|
1480
1568
|
taskFiles.push({ triggerDir, importPath, importName, filePath });
|
|
1481
1569
|
}
|
|
1482
1570
|
}
|
|
@@ -1493,9 +1581,12 @@ async function getTriggerDirectories(dirPath) {
|
|
|
1493
1581
|
const entries = await fs2.promises.readdir(dirPath, { withFileTypes: true });
|
|
1494
1582
|
const triggerDirectories = [];
|
|
1495
1583
|
for (const entry of entries) {
|
|
1496
|
-
if (!entry.isDirectory() || IGNORED_DIRS.includes(entry.name))
|
|
1584
|
+
if (!entry.isDirectory() || IGNORED_DIRS.includes(entry.name) || entry.name.startsWith("."))
|
|
1497
1585
|
continue;
|
|
1498
1586
|
const fullPath = join(dirPath, entry.name);
|
|
1587
|
+
if (fullPath.endsWith("app/api/trigger")) {
|
|
1588
|
+
continue;
|
|
1589
|
+
}
|
|
1499
1590
|
if (entry.name === "trigger") {
|
|
1500
1591
|
triggerDirectories.push(fullPath);
|
|
1501
1592
|
}
|
|
@@ -1506,6 +1597,7 @@ async function getTriggerDirectories(dirPath) {
|
|
|
1506
1597
|
|
|
1507
1598
|
// src/utilities/configFiles.ts
|
|
1508
1599
|
import { build } from "esbuild";
|
|
1600
|
+
import { esbuildDecorators } from "@anatine/esbuild-decorators";
|
|
1509
1601
|
function getGlobalConfigFolderPath() {
|
|
1510
1602
|
const configDir = mod_esm_default("trigger").config();
|
|
1511
1603
|
return configDir;
|
|
@@ -1535,6 +1627,11 @@ function readAuthConfigProfile(profile = "default") {
|
|
|
1535
1627
|
return void 0;
|
|
1536
1628
|
}
|
|
1537
1629
|
}
|
|
1630
|
+
function deleteAuthConfigProfile(profile = "default") {
|
|
1631
|
+
const existingConfig = readAuthConfigFile() || {};
|
|
1632
|
+
delete existingConfig[profile];
|
|
1633
|
+
writeAuthConfigFile(existingConfig);
|
|
1634
|
+
}
|
|
1538
1635
|
function readAuthConfigFile() {
|
|
1539
1636
|
try {
|
|
1540
1637
|
const authConfigFilePath = getAuthConfigFilePath();
|
|
@@ -1564,6 +1661,15 @@ async function getConfigPath(dir, fileName) {
|
|
|
1564
1661
|
});
|
|
1565
1662
|
return await findUp(fileName ? [fileName] : CONFIG_FILES, { cwd: dir });
|
|
1566
1663
|
}
|
|
1664
|
+
async function findFilePath(dir, fileName) {
|
|
1665
|
+
const result = await findUp([fileName], { cwd: dir });
|
|
1666
|
+
logger.debug("Searched for the file", {
|
|
1667
|
+
dir,
|
|
1668
|
+
fileName,
|
|
1669
|
+
result
|
|
1670
|
+
});
|
|
1671
|
+
return result;
|
|
1672
|
+
}
|
|
1567
1673
|
async function readConfig(dir, options) {
|
|
1568
1674
|
const absoluteDir = path2.resolve(process.cwd(), dir);
|
|
1569
1675
|
const configPath = await getConfigPath(dir, options?.configFile);
|
|
@@ -1597,11 +1703,19 @@ async function readConfig(dir, options) {
|
|
|
1597
1703
|
platform: "node",
|
|
1598
1704
|
target: ["es2018", "node18"],
|
|
1599
1705
|
outfile: builtConfigFilePath,
|
|
1600
|
-
logLevel: "silent"
|
|
1706
|
+
logLevel: "silent",
|
|
1707
|
+
plugins: [
|
|
1708
|
+
esbuildDecorators({
|
|
1709
|
+
cwd: absoluteDir,
|
|
1710
|
+
tsx: false,
|
|
1711
|
+
force: false
|
|
1712
|
+
})
|
|
1713
|
+
]
|
|
1601
1714
|
});
|
|
1602
1715
|
const userConfigModule = await import(builtConfigFileHref);
|
|
1603
1716
|
const rawConfig = await normalizeConfig(
|
|
1604
|
-
userConfigModule
|
|
1717
|
+
userConfigModule?.config,
|
|
1718
|
+
options?.projectRef ? { project: options?.projectRef } : void 0
|
|
1605
1719
|
);
|
|
1606
1720
|
const config = Config.parse(rawConfig);
|
|
1607
1721
|
return {
|
|
@@ -1610,82 +1724,36 @@ async function readConfig(dir, options) {
|
|
|
1610
1724
|
path: configPath
|
|
1611
1725
|
};
|
|
1612
1726
|
}
|
|
1613
|
-
async function resolveConfig(
|
|
1727
|
+
async function resolveConfig(path7, config) {
|
|
1614
1728
|
if (!config.triggerDirectories) {
|
|
1615
|
-
config.triggerDirectories = await findTriggerDirectories(
|
|
1729
|
+
config.triggerDirectories = await findTriggerDirectories(path7);
|
|
1616
1730
|
}
|
|
1617
1731
|
config.triggerDirectories = resolveTriggerDirectories(config.triggerDirectories);
|
|
1732
|
+
logger.debug("Resolved trigger directories", { triggerDirectories: config.triggerDirectories });
|
|
1618
1733
|
if (!config.triggerUrl) {
|
|
1619
1734
|
config.triggerUrl = CLOUD_API_URL;
|
|
1620
1735
|
}
|
|
1621
1736
|
if (!config.projectDir) {
|
|
1622
|
-
config.projectDir =
|
|
1737
|
+
config.projectDir = path7;
|
|
1623
1738
|
}
|
|
1624
1739
|
if (!config.tsconfigPath) {
|
|
1625
|
-
config.tsconfigPath = await
|
|
1740
|
+
config.tsconfigPath = await findFilePath(path7, "tsconfig.json");
|
|
1626
1741
|
}
|
|
1627
1742
|
return config;
|
|
1628
1743
|
}
|
|
1629
|
-
async function normalizeConfig(config) {
|
|
1744
|
+
async function normalizeConfig(config, overrides) {
|
|
1745
|
+
let normalized = config;
|
|
1630
1746
|
if (typeof config === "function") {
|
|
1631
|
-
|
|
1747
|
+
normalized = await config();
|
|
1632
1748
|
}
|
|
1633
|
-
|
|
1749
|
+
normalized = { ...normalized, ...overrides };
|
|
1750
|
+
return normalized;
|
|
1634
1751
|
}
|
|
1635
1752
|
|
|
1636
1753
|
// src/utilities/initialBanner.ts
|
|
1637
|
-
import { spinner } from "@clack/prompts";
|
|
1638
1754
|
import chalk3 from "chalk";
|
|
1639
|
-
import supportsColor from "supports-color";
|
|
1640
1755
|
import checkForUpdate from "update-check";
|
|
1641
1756
|
|
|
1642
|
-
// src/utilities/cliOutput.ts
|
|
1643
|
-
import chalk2 from "chalk";
|
|
1644
|
-
var green = "#4FFF54";
|
|
1645
|
-
var purple = "#735BF3";
|
|
1646
|
-
function chalkPurple(text2) {
|
|
1647
|
-
return chalk2.hex(purple)(text2);
|
|
1648
|
-
}
|
|
1649
|
-
function chalkGrey(text2) {
|
|
1650
|
-
return chalk2.hex("#878C99")(text2);
|
|
1651
|
-
}
|
|
1652
|
-
function chalkError(text2) {
|
|
1653
|
-
return chalk2.hex("#E11D48")(text2);
|
|
1654
|
-
}
|
|
1655
|
-
function chalkWarning(text2) {
|
|
1656
|
-
return chalk2.yellow(text2);
|
|
1657
|
-
}
|
|
1658
|
-
function chalkSuccess(text2) {
|
|
1659
|
-
return chalk2.hex("#28BF5C")(text2);
|
|
1660
|
-
}
|
|
1661
|
-
function chalkLink(text2) {
|
|
1662
|
-
return chalk2.underline.hex("#D7D9DD")(text2);
|
|
1663
|
-
}
|
|
1664
|
-
function chalkWorker(text2) {
|
|
1665
|
-
return chalk2.hex("#FFFF89")(text2);
|
|
1666
|
-
}
|
|
1667
|
-
function chalkTask(text2) {
|
|
1668
|
-
return chalk2.hex("#60A5FA")(text2);
|
|
1669
|
-
}
|
|
1670
|
-
function chalkRun(text2) {
|
|
1671
|
-
return chalk2.hex("#A78BFA")(text2);
|
|
1672
|
-
}
|
|
1673
|
-
function logo() {
|
|
1674
|
-
return `${chalk2.hex(green).bold("Trigger")}${chalk2.hex(purple).bold(".dev")}`;
|
|
1675
|
-
}
|
|
1676
|
-
function prettyPrintDate(date = /* @__PURE__ */ new Date()) {
|
|
1677
|
-
let formattedDate = new Intl.DateTimeFormat("en-US", {
|
|
1678
|
-
month: "short",
|
|
1679
|
-
day: "2-digit",
|
|
1680
|
-
hour: "2-digit",
|
|
1681
|
-
minute: "2-digit",
|
|
1682
|
-
second: "2-digit",
|
|
1683
|
-
hour12: false
|
|
1684
|
-
}).format(date);
|
|
1685
|
-
formattedDate += "." + ("00" + date.getMilliseconds()).slice(-3);
|
|
1686
|
-
return formattedDate;
|
|
1687
|
-
}
|
|
1688
|
-
|
|
1689
1757
|
// src/utilities/getVersion.ts
|
|
1690
1758
|
import path3 from "path";
|
|
1691
1759
|
function getVersion() {
|
|
@@ -1694,13 +1762,32 @@ function getVersion() {
|
|
|
1694
1762
|
return packageJsonContent.version ?? "1.0.0";
|
|
1695
1763
|
}
|
|
1696
1764
|
|
|
1765
|
+
// src/utilities/windows.ts
|
|
1766
|
+
import { log as log2, spinner as clackSpinner } from "@clack/prompts";
|
|
1767
|
+
var isWindows = process.platform === "win32";
|
|
1768
|
+
function escapeImportPath(path7) {
|
|
1769
|
+
return isWindows ? path7.replaceAll("\\", "\\\\") : path7;
|
|
1770
|
+
}
|
|
1771
|
+
var ballmerSpinner = () => ({
|
|
1772
|
+
start: (msg) => {
|
|
1773
|
+
log2.step(msg ?? "");
|
|
1774
|
+
},
|
|
1775
|
+
stop: (msg, code) => {
|
|
1776
|
+
log2.message(msg ?? "");
|
|
1777
|
+
},
|
|
1778
|
+
message: (msg) => {
|
|
1779
|
+
log2.message(msg ?? "");
|
|
1780
|
+
}
|
|
1781
|
+
});
|
|
1782
|
+
var spinner = () => isWindows ? ballmerSpinner() : clackSpinner();
|
|
1783
|
+
|
|
1697
1784
|
// src/utilities/initialBanner.ts
|
|
1698
1785
|
async function printInitialBanner(performUpdateCheck = true) {
|
|
1699
|
-
const
|
|
1700
|
-
const
|
|
1701
|
-
${logo()} ${chalkGrey(`(${
|
|
1786
|
+
const cliVersion = getVersion();
|
|
1787
|
+
const text3 = `
|
|
1788
|
+
${logo()} ${chalkGrey(`(${cliVersion})`)}
|
|
1702
1789
|
`;
|
|
1703
|
-
logger.info(
|
|
1790
|
+
logger.info(text3);
|
|
1704
1791
|
let maybeNewVersion;
|
|
1705
1792
|
if (performUpdateCheck) {
|
|
1706
1793
|
const loadingSpinner = spinner();
|
|
@@ -1708,7 +1795,7 @@ ${logo()} ${chalkGrey(`(${packageVersion})`)}
|
|
|
1708
1795
|
maybeNewVersion = await updateCheck();
|
|
1709
1796
|
if (maybeNewVersion !== void 0) {
|
|
1710
1797
|
loadingSpinner.stop(`Update available ${chalk3.green(maybeNewVersion)}`);
|
|
1711
|
-
const currentMajor = parseInt(
|
|
1798
|
+
const currentMajor = parseInt(cliVersion.split(".")[0]);
|
|
1712
1799
|
const newMajor = parseInt(maybeNewVersion.split(".")[0]);
|
|
1713
1800
|
if (newMajor > currentMajor) {
|
|
1714
1801
|
logger.warn(
|
|
@@ -1723,18 +1810,26 @@ After installation, run Trigger.dev with \`npx trigger.dev\`.`
|
|
|
1723
1810
|
}
|
|
1724
1811
|
}
|
|
1725
1812
|
async function printStandloneInitialBanner(performUpdateCheck = true) {
|
|
1726
|
-
const
|
|
1727
|
-
let text2 = `
|
|
1728
|
-
${logo()} ${chalkGrey("(v3 Developer Preview)")}`;
|
|
1813
|
+
const cliVersion = getVersion();
|
|
1729
1814
|
if (performUpdateCheck) {
|
|
1730
1815
|
const maybeNewVersion = await updateCheck();
|
|
1731
1816
|
if (maybeNewVersion !== void 0) {
|
|
1732
|
-
|
|
1817
|
+
logger.log(`
|
|
1818
|
+
${logo()} ${chalkGrey(`(${cliVersion} -> ${chalk3.green(maybeNewVersion)})`)}`);
|
|
1819
|
+
} else {
|
|
1820
|
+
logger.log(`
|
|
1821
|
+
${logo()} ${chalkGrey(`(${cliVersion})`)}`);
|
|
1733
1822
|
}
|
|
1823
|
+
} else {
|
|
1824
|
+
logger.log(`
|
|
1825
|
+
${logo()} ${chalkGrey(`(${cliVersion})`)}`);
|
|
1734
1826
|
}
|
|
1735
|
-
logger.log(
|
|
1827
|
+
logger.log(`${chalkGrey("-".repeat(54))}`);
|
|
1736
1828
|
}
|
|
1737
|
-
function printDevBanner() {
|
|
1829
|
+
function printDevBanner(printTopBorder = true) {
|
|
1830
|
+
if (printTopBorder) {
|
|
1831
|
+
logger.log(chalkGrey("-".repeat(54)));
|
|
1832
|
+
}
|
|
1738
1833
|
logger.log(
|
|
1739
1834
|
`${chalkGrey("Key:")} ${chalkWorker("Version")} ${chalkGrey("|")} ${chalkTask(
|
|
1740
1835
|
"Task"
|
|
@@ -1746,7 +1841,7 @@ async function doUpdateCheck() {
|
|
|
1746
1841
|
let update = null;
|
|
1747
1842
|
try {
|
|
1748
1843
|
update = await checkForUpdate(package_default, {
|
|
1749
|
-
distTag: package_default.version.startsWith("
|
|
1844
|
+
distTag: package_default.version.startsWith("3.0.0-beta") ? "beta" : "latest"
|
|
1750
1845
|
});
|
|
1751
1846
|
} catch (err) {
|
|
1752
1847
|
}
|
|
@@ -1764,26 +1859,20 @@ async function installPackages(packages, options) {
|
|
|
1764
1859
|
const cwd = options?.cwd ?? process.cwd();
|
|
1765
1860
|
logger.debug("Installing packages", { packages });
|
|
1766
1861
|
await setPackageJsonDeps(join3(cwd, "package.json"), packages);
|
|
1767
|
-
|
|
1862
|
+
await execa(
|
|
1768
1863
|
"npm",
|
|
1769
1864
|
["install", "--install-strategy", "nested", "--ignore-scripts", "--no-audit", "--no-fund"],
|
|
1770
1865
|
{
|
|
1771
1866
|
cwd,
|
|
1772
|
-
stderr: "
|
|
1867
|
+
stderr: "pipe"
|
|
1773
1868
|
}
|
|
1774
1869
|
);
|
|
1775
|
-
await new Promise((res, rej) => {
|
|
1776
|
-
childProcess2.on("error", (e) => rej(e));
|
|
1777
|
-
childProcess2.on("close", () => res());
|
|
1778
|
-
});
|
|
1779
|
-
await childProcess2;
|
|
1780
|
-
return;
|
|
1781
1870
|
}
|
|
1782
|
-
function detectPackageNameFromImportPath(
|
|
1783
|
-
if (
|
|
1784
|
-
return
|
|
1871
|
+
function detectPackageNameFromImportPath(path7) {
|
|
1872
|
+
if (path7.startsWith("@")) {
|
|
1873
|
+
return path7.split("/").slice(0, 2).join("/");
|
|
1785
1874
|
} else {
|
|
1786
|
-
return
|
|
1875
|
+
return path7.split("/")[0];
|
|
1787
1876
|
}
|
|
1788
1877
|
}
|
|
1789
1878
|
function stripWorkspaceFromVersion(version2) {
|
|
@@ -1799,16 +1888,16 @@ function parsePackageName(packageSpecifier) {
|
|
|
1799
1888
|
}
|
|
1800
1889
|
return { name: packageSpecifier };
|
|
1801
1890
|
}
|
|
1802
|
-
async function setPackageJsonDeps(
|
|
1891
|
+
async function setPackageJsonDeps(path7, deps) {
|
|
1803
1892
|
try {
|
|
1804
|
-
const existingPackageJson = await readJSONFile(
|
|
1893
|
+
const existingPackageJson = await readJSONFile(path7);
|
|
1805
1894
|
const newPackageJson = {
|
|
1806
1895
|
...existingPackageJson,
|
|
1807
1896
|
dependencies: {
|
|
1808
1897
|
...deps
|
|
1809
1898
|
}
|
|
1810
1899
|
};
|
|
1811
|
-
await writeJSONFile(
|
|
1900
|
+
await writeJSONFile(path7, newPackageJson);
|
|
1812
1901
|
} catch (error) {
|
|
1813
1902
|
const defaultPackageJson = {
|
|
1814
1903
|
name: "temp",
|
|
@@ -1816,13 +1905,13 @@ async function setPackageJsonDeps(path6, deps) {
|
|
|
1816
1905
|
description: "",
|
|
1817
1906
|
dependencies: deps
|
|
1818
1907
|
};
|
|
1819
|
-
await writeJSONFile(
|
|
1908
|
+
await writeJSONFile(path7, defaultPackageJson);
|
|
1820
1909
|
}
|
|
1821
1910
|
}
|
|
1822
1911
|
|
|
1823
1912
|
// src/commands/login.ts
|
|
1824
|
-
import { intro as intro2, log, outro as
|
|
1825
|
-
import { recordSpanException as recordSpanException3 } from "@trigger.dev/core/v3";
|
|
1913
|
+
import { intro as intro2, log as log3, outro as outro3, select } from "@clack/prompts";
|
|
1914
|
+
import { recordSpanException as recordSpanException3 } from "@trigger.dev/core/v3/workers";
|
|
1826
1915
|
|
|
1827
1916
|
// ../../node_modules/.pnpm/open@10.0.3/node_modules/open/index.js
|
|
1828
1917
|
import process6 from "node:process";
|
|
@@ -2192,14 +2281,14 @@ var baseOpen = async (options) => {
|
|
|
2192
2281
|
}
|
|
2193
2282
|
const subprocess = childProcess.spawn(command, cliArguments, childProcessOptions);
|
|
2194
2283
|
if (options.wait) {
|
|
2195
|
-
return new Promise((
|
|
2284
|
+
return new Promise((resolve5, reject) => {
|
|
2196
2285
|
subprocess.once("error", reject);
|
|
2197
2286
|
subprocess.once("close", (exitCode) => {
|
|
2198
2287
|
if (!options.allowNonzeroExitCode && exitCode > 0) {
|
|
2199
2288
|
reject(new Error(`Exited with code ${exitCode}`));
|
|
2200
2289
|
return;
|
|
2201
2290
|
}
|
|
2202
|
-
|
|
2291
|
+
resolve5(subprocess);
|
|
2203
2292
|
});
|
|
2204
2293
|
});
|
|
2205
2294
|
}
|
|
@@ -2316,7 +2405,7 @@ var decorateErrorWithCounts = (error, attemptNumber, options) => {
|
|
|
2316
2405
|
return error;
|
|
2317
2406
|
};
|
|
2318
2407
|
async function pRetry(input, options) {
|
|
2319
|
-
return new Promise((
|
|
2408
|
+
return new Promise((resolve5, reject) => {
|
|
2320
2409
|
options = {
|
|
2321
2410
|
onFailedAttempt() {
|
|
2322
2411
|
},
|
|
@@ -2339,7 +2428,7 @@ async function pRetry(input, options) {
|
|
|
2339
2428
|
try {
|
|
2340
2429
|
const result = await input(attemptNumber);
|
|
2341
2430
|
cleanUp();
|
|
2342
|
-
|
|
2431
|
+
resolve5(result);
|
|
2343
2432
|
} catch (error) {
|
|
2344
2433
|
try {
|
|
2345
2434
|
if (!(error instanceof Error)) {
|
|
@@ -2369,10 +2458,10 @@ async function pRetry(input, options) {
|
|
|
2369
2458
|
import { z as z3 } from "zod";
|
|
2370
2459
|
|
|
2371
2460
|
// src/commands/whoami.ts
|
|
2372
|
-
import { intro, note,
|
|
2461
|
+
import { intro, note, outro as outro2 } from "@clack/prompts";
|
|
2373
2462
|
|
|
2374
2463
|
// src/utilities/session.ts
|
|
2375
|
-
import { recordSpanException as recordSpanException2 } from "@trigger.dev/core/v3";
|
|
2464
|
+
import { recordSpanException as recordSpanException2 } from "@trigger.dev/core/v3/workers";
|
|
2376
2465
|
var tracer2 = getTracer();
|
|
2377
2466
|
async function isLoggedIn(profile = "default") {
|
|
2378
2467
|
return await tracer2.startActiveSpan("isLoggedIn", async (span) => {
|
|
@@ -2447,16 +2536,23 @@ async function whoAmI(options, embedded = false) {
|
|
|
2447
2536
|
if (!embedded) {
|
|
2448
2537
|
intro(`Displaying your account details [${options?.profile ?? "default"}]`);
|
|
2449
2538
|
}
|
|
2450
|
-
const loadingSpinner =
|
|
2539
|
+
const loadingSpinner = spinner();
|
|
2451
2540
|
loadingSpinner.start("Checking your account details");
|
|
2452
2541
|
const authentication = await isLoggedIn(options?.profile);
|
|
2453
2542
|
if (!authentication.ok) {
|
|
2454
2543
|
if (authentication.error === "fetch failed") {
|
|
2455
2544
|
loadingSpinner.stop("Fetch failed. Platform down?");
|
|
2456
2545
|
} else {
|
|
2457
|
-
|
|
2458
|
-
|
|
2459
|
-
|
|
2546
|
+
if (embedded) {
|
|
2547
|
+
loadingSpinner.stop(
|
|
2548
|
+
`Failed to check account details. You may want to run \`trigger.dev logout --profile ${options?.profile ?? "default"}\` and try again.`
|
|
2549
|
+
);
|
|
2550
|
+
} else {
|
|
2551
|
+
loadingSpinner.stop(
|
|
2552
|
+
`You must login first. Use \`trigger.dev login --profile ${options?.profile ?? "default"}\` to login.`
|
|
2553
|
+
);
|
|
2554
|
+
outro2("Whoami failed");
|
|
2555
|
+
}
|
|
2460
2556
|
}
|
|
2461
2557
|
return {
|
|
2462
2558
|
success: false,
|
|
@@ -2521,6 +2617,29 @@ async function login(options) {
|
|
|
2521
2617
|
if (!opts.embedded) {
|
|
2522
2618
|
intro2("Logging in to Trigger.dev");
|
|
2523
2619
|
}
|
|
2620
|
+
const accessTokenFromEnv = process.env.TRIGGER_ACCESS_TOKEN;
|
|
2621
|
+
if (accessTokenFromEnv) {
|
|
2622
|
+
const auth = {
|
|
2623
|
+
accessToken: accessTokenFromEnv,
|
|
2624
|
+
apiUrl: process.env.TRIGGER_API_URL ?? "https://api.trigger.dev"
|
|
2625
|
+
};
|
|
2626
|
+
const apiClient3 = new CliApiClient(auth.apiUrl, auth.accessToken);
|
|
2627
|
+
const userData = await apiClient3.whoAmI();
|
|
2628
|
+
if (!userData.success) {
|
|
2629
|
+
throw new Error(userData.error);
|
|
2630
|
+
}
|
|
2631
|
+
return {
|
|
2632
|
+
ok: true,
|
|
2633
|
+
profile: options?.profile ?? "default",
|
|
2634
|
+
userId: userData.data.userId,
|
|
2635
|
+
email: userData.data.email,
|
|
2636
|
+
dashboardUrl: userData.data.dashboardUrl,
|
|
2637
|
+
auth: {
|
|
2638
|
+
accessToken: auth.accessToken,
|
|
2639
|
+
apiUrl: auth.apiUrl
|
|
2640
|
+
}
|
|
2641
|
+
};
|
|
2642
|
+
}
|
|
2524
2643
|
const authConfig = readAuthConfigProfile(options?.profile);
|
|
2525
2644
|
if (authConfig && authConfig.accessToken) {
|
|
2526
2645
|
const whoAmIResult = await whoAmI(
|
|
@@ -2529,10 +2648,18 @@ async function login(options) {
|
|
|
2529
2648
|
skipTelemetry: !span.isRecording(),
|
|
2530
2649
|
logLevel: logger.loggerLevel
|
|
2531
2650
|
},
|
|
2532
|
-
|
|
2651
|
+
true
|
|
2533
2652
|
);
|
|
2534
2653
|
if (!whoAmIResult.success) {
|
|
2535
|
-
|
|
2654
|
+
prettyError("Unable to validate existing personal access token", whoAmIResult.error);
|
|
2655
|
+
if (!opts.embedded) {
|
|
2656
|
+
outro3(
|
|
2657
|
+
`Login failed using stored token. To fix, first logout using \`trigger.dev logout${options?.profile ? ` --profile ${options.profile}` : ""}\` and then try again.`
|
|
2658
|
+
);
|
|
2659
|
+
throw new SkipLoggingError(whoAmIResult.error);
|
|
2660
|
+
} else {
|
|
2661
|
+
throw new Error(whoAmIResult.error);
|
|
2662
|
+
}
|
|
2536
2663
|
} else {
|
|
2537
2664
|
if (!opts.embedded) {
|
|
2538
2665
|
const continueOption = await select({
|
|
@@ -2550,7 +2677,7 @@ async function login(options) {
|
|
|
2550
2677
|
initialValue: false
|
|
2551
2678
|
});
|
|
2552
2679
|
if (continueOption !== true) {
|
|
2553
|
-
|
|
2680
|
+
outro3("Already logged in");
|
|
2554
2681
|
span.setAttributes({
|
|
2555
2682
|
"cli.userId": whoAmIResult.data.userId,
|
|
2556
2683
|
"cli.email": whoAmIResult.data.email,
|
|
@@ -2591,16 +2718,16 @@ async function login(options) {
|
|
|
2591
2718
|
}
|
|
2592
2719
|
}
|
|
2593
2720
|
if (opts.embedded) {
|
|
2594
|
-
|
|
2721
|
+
log3.step("You must login to continue.");
|
|
2595
2722
|
}
|
|
2596
2723
|
const apiClient2 = new CliApiClient(authConfig?.apiUrl ?? opts.defaultApiUrl);
|
|
2597
2724
|
const authorizationCodeResult = await createAuthorizationCode(apiClient2);
|
|
2598
|
-
|
|
2725
|
+
log3.step(
|
|
2599
2726
|
`Please visit the following URL to login:
|
|
2600
2727
|
${chalkLink(authorizationCodeResult.url)}`
|
|
2601
2728
|
);
|
|
2602
2729
|
await open_default(authorizationCodeResult.url);
|
|
2603
|
-
const getPersonalAccessTokenSpinner =
|
|
2730
|
+
const getPersonalAccessTokenSpinner = spinner();
|
|
2604
2731
|
getPersonalAccessTokenSpinner.start("Waiting for you to login");
|
|
2605
2732
|
try {
|
|
2606
2733
|
const indexResult = await pRetry(
|
|
@@ -2629,9 +2756,9 @@ ${chalkLink(authorizationCodeResult.url)}`
|
|
|
2629
2756
|
throw new Error(whoAmIResult.error);
|
|
2630
2757
|
}
|
|
2631
2758
|
if (opts.embedded) {
|
|
2632
|
-
|
|
2759
|
+
log3.step("Logged in successfully");
|
|
2633
2760
|
} else {
|
|
2634
|
-
|
|
2761
|
+
outro3("Logged in successfully");
|
|
2635
2762
|
}
|
|
2636
2763
|
span.end();
|
|
2637
2764
|
return {
|
|
@@ -2648,7 +2775,7 @@ ${chalkLink(authorizationCodeResult.url)}`
|
|
|
2648
2775
|
} catch (e) {
|
|
2649
2776
|
getPersonalAccessTokenSpinner.stop(`Failed to get access token`);
|
|
2650
2777
|
if (e instanceof AbortError) {
|
|
2651
|
-
|
|
2778
|
+
log3.error(e.message);
|
|
2652
2779
|
}
|
|
2653
2780
|
recordSpanException3(span, e);
|
|
2654
2781
|
span.end();
|
|
@@ -2697,7 +2824,7 @@ async function getPersonalAccessToken(apiClient2, authorizationCode) {
|
|
|
2697
2824
|
async function createAuthorizationCode(apiClient2) {
|
|
2698
2825
|
return await tracer.startActiveSpan("createAuthorizationCode", async (span) => {
|
|
2699
2826
|
try {
|
|
2700
|
-
const createAuthCodeSpinner =
|
|
2827
|
+
const createAuthCodeSpinner = spinner();
|
|
2701
2828
|
createAuthCodeSpinner.start("Creating authorition code");
|
|
2702
2829
|
const authorizationCodeResult = await apiClient2.createAuthorizationCode();
|
|
2703
2830
|
if (!authorizationCodeResult.success) {
|
|
@@ -2721,10 +2848,43 @@ ${authorizationCodeResult.error}`
|
|
|
2721
2848
|
});
|
|
2722
2849
|
}
|
|
2723
2850
|
|
|
2851
|
+
// src/commands/deploy.ts
|
|
2852
|
+
import { esbuildDecorators as esbuildDecorators2 } from "@anatine/esbuild-decorators";
|
|
2853
|
+
import { Glob } from "glob";
|
|
2854
|
+
|
|
2724
2855
|
// src/utilities/build.ts
|
|
2725
2856
|
import { readFileSync } from "node:fs";
|
|
2726
2857
|
import { extname, isAbsolute } from "node:path";
|
|
2727
2858
|
import tsConfigPaths from "tsconfig-paths";
|
|
2859
|
+
function bundleTriggerDevCore(buildIdentifier, tsconfigPath) {
|
|
2860
|
+
return {
|
|
2861
|
+
name: "trigger-bundle-core",
|
|
2862
|
+
setup(build3) {
|
|
2863
|
+
build3.onResolve({ filter: /.*/ }, (args) => {
|
|
2864
|
+
if (!args.path.startsWith("@trigger.dev/core/v3")) {
|
|
2865
|
+
return void 0;
|
|
2866
|
+
}
|
|
2867
|
+
const triggerSdkPath = __require.resolve("@trigger.dev/sdk/v3", { paths: [process.cwd()] });
|
|
2868
|
+
logger.debug(`[${buildIdentifier}][trigger-bundle-core] Resolved @trigger.dev/sdk/v3`, {
|
|
2869
|
+
...args,
|
|
2870
|
+
triggerSdkPath
|
|
2871
|
+
});
|
|
2872
|
+
const resolvedPath = __require.resolve(args.path, {
|
|
2873
|
+
paths: [triggerSdkPath]
|
|
2874
|
+
});
|
|
2875
|
+
logger.debug(`[${buildIdentifier}][trigger-bundle-core] Externalizing ${args.path}`, {
|
|
2876
|
+
...args,
|
|
2877
|
+
triggerSdkPath,
|
|
2878
|
+
resolvedPath
|
|
2879
|
+
});
|
|
2880
|
+
return {
|
|
2881
|
+
path: resolvedPath,
|
|
2882
|
+
external: false
|
|
2883
|
+
};
|
|
2884
|
+
});
|
|
2885
|
+
}
|
|
2886
|
+
};
|
|
2887
|
+
}
|
|
2728
2888
|
function workerSetupImportConfigPlugin(configPath) {
|
|
2729
2889
|
return {
|
|
2730
2890
|
name: "trigger-worker-setup",
|
|
@@ -2736,7 +2896,9 @@ function workerSetupImportConfigPlugin(configPath) {
|
|
|
2736
2896
|
let workerSetupContents = readFileSync(args.path, "utf-8");
|
|
2737
2897
|
workerSetupContents = workerSetupContents.replace(
|
|
2738
2898
|
"__SETUP_IMPORTED_PROJECT_CONFIG__",
|
|
2739
|
-
`import * as setupImportedConfigExports from "${
|
|
2899
|
+
`import * as setupImportedConfigExports from "${escapeImportPath(
|
|
2900
|
+
configPath
|
|
2901
|
+
)}"; const setupImportedConfig = setupImportedConfigExports.config;`
|
|
2740
2902
|
);
|
|
2741
2903
|
logger.debug("Loading worker setup", {
|
|
2742
2904
|
args,
|
|
@@ -2871,12 +3033,644 @@ function getLoaderForFile(file) {
|
|
|
2871
3033
|
throw new Error(`Cannot get loader for file ${file}`);
|
|
2872
3034
|
}
|
|
2873
3035
|
|
|
3036
|
+
// src/utilities/deployErrors.ts
|
|
3037
|
+
import chalk4 from "chalk";
|
|
3038
|
+
import { relative as relative2 } from "node:path";
|
|
3039
|
+
import { groupTaskMetadataIssuesByTask } from "@trigger.dev/core/v3";
|
|
3040
|
+
import terminalLink from "terminal-link";
|
|
3041
|
+
|
|
3042
|
+
// src/utilities/links.ts
|
|
3043
|
+
var docs = {
|
|
3044
|
+
config: {
|
|
3045
|
+
home: "https://trigger.dev/docs/v3/trigger-config",
|
|
3046
|
+
esm: "https://trigger.dev/docs/v3/trigger-config#esm-only-packages",
|
|
3047
|
+
prisma: "https://trigger.dev/docs/v3/trigger-config#prisma-and-other-generators",
|
|
3048
|
+
additionalPackages: "https://trigger.dev/docs/v3/trigger-config#prisma-and-other-generators"
|
|
3049
|
+
}
|
|
3050
|
+
};
|
|
3051
|
+
var getInTouch = "https://trigger.dev/contact";
|
|
3052
|
+
|
|
3053
|
+
// src/utilities/deployErrors.ts
|
|
3054
|
+
function errorIsErrorLike(error) {
|
|
3055
|
+
return error instanceof Error || typeof error === "object" && error !== null && "message" in error;
|
|
3056
|
+
}
|
|
3057
|
+
function parseBuildErrorStack(error) {
|
|
3058
|
+
if (typeof error === "string") {
|
|
3059
|
+
return error;
|
|
3060
|
+
}
|
|
3061
|
+
if (errorIsErrorLike(error)) {
|
|
3062
|
+
if (typeof error.stack === "string") {
|
|
3063
|
+
const isErrRequireEsm = error.stack.includes("ERR_REQUIRE_ESM");
|
|
3064
|
+
let moduleName = null;
|
|
3065
|
+
if (isErrRequireEsm) {
|
|
3066
|
+
const moduleRegex = /node_modules\/(@[^\/]+\/[^\/]+|[^\/]+)\/[^\/]+\s/;
|
|
3067
|
+
const match = moduleRegex.exec(error.stack);
|
|
3068
|
+
if (match) {
|
|
3069
|
+
moduleName = match[1];
|
|
3070
|
+
return {
|
|
3071
|
+
type: "esm-require-error",
|
|
3072
|
+
moduleName
|
|
3073
|
+
};
|
|
3074
|
+
}
|
|
3075
|
+
}
|
|
3076
|
+
} else {
|
|
3077
|
+
return error.message;
|
|
3078
|
+
}
|
|
3079
|
+
}
|
|
3080
|
+
}
|
|
3081
|
+
function logESMRequireError(parsedError, resolvedConfig) {
|
|
3082
|
+
logger.log(
|
|
3083
|
+
`
|
|
3084
|
+
${chalkError("X Error:")} The ${chalkPurple(
|
|
3085
|
+
parsedError.moduleName
|
|
3086
|
+
)} module is being required even though it's ESM only, and builds only support CommonJS. There are two ${chalk4.underline(
|
|
3087
|
+
"possible"
|
|
3088
|
+
)} ways to fix this:`
|
|
3089
|
+
);
|
|
3090
|
+
logger.log(
|
|
3091
|
+
`
|
|
3092
|
+
${chalkGrey("\u25CB")} Dynamically import the module in your code: ${chalkGrey(
|
|
3093
|
+
`const myModule = await import("${parsedError.moduleName}");`
|
|
3094
|
+
)}`
|
|
3095
|
+
);
|
|
3096
|
+
if (resolvedConfig.status === "file") {
|
|
3097
|
+
const relativePath = relative2(resolvedConfig.config.projectDir, resolvedConfig.path).replace(
|
|
3098
|
+
/\\/g,
|
|
3099
|
+
"/"
|
|
3100
|
+
);
|
|
3101
|
+
logger.log(
|
|
3102
|
+
`${chalkGrey("\u25CB")} ${chalk4.underline("Or")} add ${chalkPurple(
|
|
3103
|
+
parsedError.moduleName
|
|
3104
|
+
)} to the ${chalkGreen("dependenciesToBundle")} array in your config file ${chalkGrey(
|
|
3105
|
+
`(${relativePath})`
|
|
3106
|
+
)}. This will bundle the module with your code.
|
|
3107
|
+
`
|
|
3108
|
+
);
|
|
3109
|
+
} else {
|
|
3110
|
+
logger.log(
|
|
3111
|
+
`${chalkGrey("\u25CB")} ${chalk4.underline("Or")} add ${chalkPurple(
|
|
3112
|
+
parsedError.moduleName
|
|
3113
|
+
)} to the ${chalkGreen("dependenciesToBundle")} array in your config file ${chalkGrey(
|
|
3114
|
+
"(you'll need to create one)"
|
|
3115
|
+
)}. This will bundle the module with your code.
|
|
3116
|
+
`
|
|
3117
|
+
);
|
|
3118
|
+
}
|
|
3119
|
+
logger.log(
|
|
3120
|
+
`${chalkGrey("\u25CB")} For more info see the ${terminalLink("relevant docs", docs.config.esm)}.
|
|
3121
|
+
`
|
|
3122
|
+
);
|
|
3123
|
+
}
|
|
3124
|
+
function parseNpmInstallError(error) {
|
|
3125
|
+
if (typeof error === "string") {
|
|
3126
|
+
return error;
|
|
3127
|
+
}
|
|
3128
|
+
if (error instanceof Error) {
|
|
3129
|
+
if (typeof error.stack === "string") {
|
|
3130
|
+
const isPackageNotFoundError = error.stack.includes("ERR! 404 Not Found") && error.stack.includes("is not in this registry");
|
|
3131
|
+
let packageName = null;
|
|
3132
|
+
if (isPackageNotFoundError) {
|
|
3133
|
+
const packageNameRegex = /'([^']+)' is not in this registry/;
|
|
3134
|
+
const match = packageNameRegex.exec(error.stack);
|
|
3135
|
+
if (match) {
|
|
3136
|
+
packageName = match[1];
|
|
3137
|
+
}
|
|
3138
|
+
}
|
|
3139
|
+
if (packageName) {
|
|
3140
|
+
return {
|
|
3141
|
+
type: "package-not-found-error",
|
|
3142
|
+
packageName
|
|
3143
|
+
};
|
|
3144
|
+
}
|
|
3145
|
+
const noMatchingVersionRegex = /No matching version found for ([^\s]+)\s/;
|
|
3146
|
+
const noMatchingVersionMatch = noMatchingVersionRegex.exec(error.stack);
|
|
3147
|
+
if (noMatchingVersionMatch) {
|
|
3148
|
+
return {
|
|
3149
|
+
type: "no-matching-version-error",
|
|
3150
|
+
packageName: noMatchingVersionMatch[1].replace(/.$/, "")
|
|
3151
|
+
};
|
|
3152
|
+
}
|
|
3153
|
+
return error.message;
|
|
3154
|
+
} else {
|
|
3155
|
+
return error.message;
|
|
3156
|
+
}
|
|
3157
|
+
}
|
|
3158
|
+
return "Unknown error";
|
|
3159
|
+
}
|
|
3160
|
+
function logTaskMetadataParseError(zodIssues, tasks) {
|
|
3161
|
+
logger.log(
|
|
3162
|
+
`
|
|
3163
|
+
${chalkError("X Error:")} Failed to start. The following ${zodIssues.length === 1 ? "task issue was" : "task issues were"} found:`
|
|
3164
|
+
);
|
|
3165
|
+
const groupedIssues = groupTaskMetadataIssuesByTask(tasks, zodIssues);
|
|
3166
|
+
for (const key in groupedIssues) {
|
|
3167
|
+
const taskWithIssues = groupedIssues[key];
|
|
3168
|
+
if (!taskWithIssues) {
|
|
3169
|
+
continue;
|
|
3170
|
+
}
|
|
3171
|
+
logger.log(
|
|
3172
|
+
`
|
|
3173
|
+
${chalkWarning("\u276F")} ${taskWithIssues.exportName} ${chalkGrey("in")} ${taskWithIssues.filePath}`
|
|
3174
|
+
);
|
|
3175
|
+
for (const issue of taskWithIssues.issues) {
|
|
3176
|
+
if (issue.path) {
|
|
3177
|
+
logger.log(` ${chalkError("x")} ${issue.path} ${chalkGrey(issue.message)}`);
|
|
3178
|
+
} else {
|
|
3179
|
+
logger.log(` ${chalkError("x")} ${chalkGrey(issue.message)}`);
|
|
3180
|
+
}
|
|
3181
|
+
}
|
|
3182
|
+
}
|
|
3183
|
+
}
|
|
3184
|
+
|
|
3185
|
+
// src/utilities/javascriptProject.ts
|
|
3186
|
+
import { $ } from "execa";
|
|
3187
|
+
import { join as join4 } from "node:path";
|
|
3188
|
+
|
|
3189
|
+
// src/utilities/getUserPackageManager.ts
|
|
3190
|
+
import { findUp as findUp2 } from "find-up";
|
|
3191
|
+
import { basename } from "path";
|
|
3192
|
+
async function getUserPackageManager(path7) {
|
|
3193
|
+
const packageManager = await detectPackageManager(path7);
|
|
3194
|
+
logger.debug("Detected package manager", { packageManager });
|
|
3195
|
+
return packageManager;
|
|
3196
|
+
}
|
|
3197
|
+
async function detectPackageManager(path7) {
|
|
3198
|
+
try {
|
|
3199
|
+
return await detectPackageManagerFromArtifacts(path7);
|
|
3200
|
+
} catch (error) {
|
|
3201
|
+
return detectPackageManagerFromCurrentCommand();
|
|
3202
|
+
}
|
|
3203
|
+
}
|
|
3204
|
+
function detectPackageManagerFromCurrentCommand() {
|
|
3205
|
+
const userAgent = process.env.npm_config_user_agent;
|
|
3206
|
+
if (userAgent) {
|
|
3207
|
+
if (userAgent.startsWith("yarn")) {
|
|
3208
|
+
return "yarn";
|
|
3209
|
+
} else if (userAgent.startsWith("pnpm")) {
|
|
3210
|
+
return "pnpm";
|
|
3211
|
+
} else {
|
|
3212
|
+
return "npm";
|
|
3213
|
+
}
|
|
3214
|
+
} else {
|
|
3215
|
+
return "npm";
|
|
3216
|
+
}
|
|
3217
|
+
}
|
|
3218
|
+
async function detectPackageManagerFromArtifacts(path7) {
|
|
3219
|
+
const artifacts = {
|
|
3220
|
+
yarn: "yarn.lock",
|
|
3221
|
+
pnpm: "pnpm-lock.yaml",
|
|
3222
|
+
npm: "package-lock.json",
|
|
3223
|
+
npmShrinkwrap: "npm-shrinkwrap.json"
|
|
3224
|
+
};
|
|
3225
|
+
const foundPath = await findUp2(Object.values(artifacts), { cwd: path7 });
|
|
3226
|
+
if (!foundPath) {
|
|
3227
|
+
throw new Error("Could not detect package manager from artifacts");
|
|
3228
|
+
}
|
|
3229
|
+
logger.debug("Found path from package manager artifacts", { foundPath });
|
|
3230
|
+
switch (basename(foundPath)) {
|
|
3231
|
+
case artifacts.yarn:
|
|
3232
|
+
return "yarn";
|
|
3233
|
+
case artifacts.pnpm:
|
|
3234
|
+
return "pnpm";
|
|
3235
|
+
case artifacts.npm:
|
|
3236
|
+
case artifacts.npmShrinkwrap:
|
|
3237
|
+
return "npm";
|
|
3238
|
+
default:
|
|
3239
|
+
throw new Error(`Unhandled package manager detection path: ${foundPath}`);
|
|
3240
|
+
}
|
|
3241
|
+
}
|
|
3242
|
+
|
|
3243
|
+
// src/utilities/assertExhaustive.ts
|
|
3244
|
+
function assertExhaustive(x) {
|
|
3245
|
+
throw new Error("Unexpected object: " + x);
|
|
3246
|
+
}
|
|
3247
|
+
|
|
3248
|
+
// src/utilities/javascriptProject.ts
|
|
3249
|
+
var BuiltInModules = /* @__PURE__ */ new Set([
|
|
3250
|
+
"assert",
|
|
3251
|
+
"async_hooks",
|
|
3252
|
+
"buffer",
|
|
3253
|
+
"child_process",
|
|
3254
|
+
"cluster",
|
|
3255
|
+
"console",
|
|
3256
|
+
"constants",
|
|
3257
|
+
"crypto",
|
|
3258
|
+
"dgram",
|
|
3259
|
+
"dns",
|
|
3260
|
+
"domain",
|
|
3261
|
+
"events",
|
|
3262
|
+
"fs",
|
|
3263
|
+
"http",
|
|
3264
|
+
"http2",
|
|
3265
|
+
"https",
|
|
3266
|
+
"inspector",
|
|
3267
|
+
"module",
|
|
3268
|
+
"net",
|
|
3269
|
+
"os",
|
|
3270
|
+
"path",
|
|
3271
|
+
"perf_hooks",
|
|
3272
|
+
"process",
|
|
3273
|
+
"punycode",
|
|
3274
|
+
"querystring",
|
|
3275
|
+
"readline",
|
|
3276
|
+
"repl",
|
|
3277
|
+
"stream",
|
|
3278
|
+
"string_decoder",
|
|
3279
|
+
"timers",
|
|
3280
|
+
"tls",
|
|
3281
|
+
"trace_events",
|
|
3282
|
+
"tty",
|
|
3283
|
+
"url",
|
|
3284
|
+
"util",
|
|
3285
|
+
"v8",
|
|
3286
|
+
"vm",
|
|
3287
|
+
"worker_threads",
|
|
3288
|
+
"zlib"
|
|
3289
|
+
]);
|
|
3290
|
+
var JavascriptProject = class {
|
|
3291
|
+
constructor(projectPath) {
|
|
3292
|
+
this.projectPath = projectPath;
|
|
3293
|
+
}
|
|
3294
|
+
_packageJson;
|
|
3295
|
+
_packageManager;
|
|
3296
|
+
get packageJson() {
|
|
3297
|
+
if (!this._packageJson) {
|
|
3298
|
+
this._packageJson = readJSONFileSync(join4(this.projectPath, "package.json"));
|
|
3299
|
+
}
|
|
3300
|
+
return this._packageJson;
|
|
3301
|
+
}
|
|
3302
|
+
get scripts() {
|
|
3303
|
+
return {
|
|
3304
|
+
postinstall: this.packageJson.scripts?.postinstall ?? ""
|
|
3305
|
+
};
|
|
3306
|
+
}
|
|
3307
|
+
async install() {
|
|
3308
|
+
const command = await this.#getCommand();
|
|
3309
|
+
try {
|
|
3310
|
+
await command.installDependencies({
|
|
3311
|
+
cwd: this.projectPath
|
|
3312
|
+
});
|
|
3313
|
+
} catch (error) {
|
|
3314
|
+
logger.debug(`Failed to install dependencies using ${command.name}`, {
|
|
3315
|
+
error
|
|
3316
|
+
});
|
|
3317
|
+
}
|
|
3318
|
+
}
|
|
3319
|
+
async resolve(packageName, options) {
|
|
3320
|
+
if (BuiltInModules.has(packageName)) {
|
|
3321
|
+
return void 0;
|
|
3322
|
+
}
|
|
3323
|
+
const opts = { allowDev: false, ...options };
|
|
3324
|
+
const packageJsonVersion = this.packageJson.dependencies?.[packageName];
|
|
3325
|
+
if (typeof packageJsonVersion === "string") {
|
|
3326
|
+
return packageJsonVersion;
|
|
3327
|
+
}
|
|
3328
|
+
if (opts.allowDev) {
|
|
3329
|
+
const devPackageJsonVersion = this.packageJson.devDependencies?.[packageName];
|
|
3330
|
+
if (typeof devPackageJsonVersion === "string") {
|
|
3331
|
+
return devPackageJsonVersion;
|
|
3332
|
+
}
|
|
3333
|
+
}
|
|
3334
|
+
const command = await this.#getCommand();
|
|
3335
|
+
try {
|
|
3336
|
+
const version2 = await command.resolveDependencyVersion(packageName, {
|
|
3337
|
+
cwd: this.projectPath
|
|
3338
|
+
});
|
|
3339
|
+
if (version2) {
|
|
3340
|
+
return version2;
|
|
3341
|
+
}
|
|
3342
|
+
} catch (error) {
|
|
3343
|
+
logger.debug(`Failed to resolve dependency version using ${command.name}`, {
|
|
3344
|
+
packageName,
|
|
3345
|
+
error
|
|
3346
|
+
});
|
|
3347
|
+
}
|
|
3348
|
+
}
|
|
3349
|
+
async #getCommand() {
|
|
3350
|
+
const packageManager = await this.getPackageManager();
|
|
3351
|
+
switch (packageManager) {
|
|
3352
|
+
case "npm":
|
|
3353
|
+
return new NPMCommands();
|
|
3354
|
+
case "pnpm":
|
|
3355
|
+
return new PNPMCommands();
|
|
3356
|
+
case "yarn":
|
|
3357
|
+
return new YarnCommands();
|
|
3358
|
+
default:
|
|
3359
|
+
assertExhaustive(packageManager);
|
|
3360
|
+
}
|
|
3361
|
+
}
|
|
3362
|
+
async getPackageManager() {
|
|
3363
|
+
if (!this._packageManager) {
|
|
3364
|
+
this._packageManager = await getUserPackageManager(this.projectPath);
|
|
3365
|
+
}
|
|
3366
|
+
return this._packageManager;
|
|
3367
|
+
}
|
|
3368
|
+
};
|
|
3369
|
+
var PNPMCommands = class {
|
|
3370
|
+
get name() {
|
|
3371
|
+
return "pnpm";
|
|
3372
|
+
}
|
|
3373
|
+
get cmd() {
|
|
3374
|
+
return process.platform === "win32" ? "pnpm.cmd" : "pnpm";
|
|
3375
|
+
}
|
|
3376
|
+
async installDependencies(options) {
|
|
3377
|
+
const { stdout, stderr } = await $({ cwd: options.cwd })`${this.cmd} install`;
|
|
3378
|
+
logger.debug(`Installing dependencies using ${this.name}`, { stdout, stderr });
|
|
3379
|
+
}
|
|
3380
|
+
async resolveDependencyVersion(packageName, options) {
|
|
3381
|
+
const { stdout } = await $({ cwd: options.cwd })`${this.cmd} list ${packageName} -r --json`;
|
|
3382
|
+
const result = JSON.parse(stdout);
|
|
3383
|
+
logger.debug(`Resolving ${packageName} version using ${this.name}`, { result });
|
|
3384
|
+
for (const dep of result) {
|
|
3385
|
+
const dependency = dep.dependencies?.[packageName];
|
|
3386
|
+
if (dependency) {
|
|
3387
|
+
return dependency.version;
|
|
3388
|
+
}
|
|
3389
|
+
}
|
|
3390
|
+
}
|
|
3391
|
+
};
|
|
3392
|
+
var NPMCommands = class {
|
|
3393
|
+
get name() {
|
|
3394
|
+
return "npm";
|
|
3395
|
+
}
|
|
3396
|
+
get cmd() {
|
|
3397
|
+
return process.platform === "win32" ? "npm.cmd" : "npm";
|
|
3398
|
+
}
|
|
3399
|
+
async installDependencies(options) {
|
|
3400
|
+
const { stdout, stderr } = await $({ cwd: options.cwd })`${this.cmd} install`;
|
|
3401
|
+
logger.debug(`Installing dependencies using ${this.name}`, { stdout, stderr });
|
|
3402
|
+
}
|
|
3403
|
+
async resolveDependencyVersion(packageName, options) {
|
|
3404
|
+
const { stdout } = await $({ cwd: options.cwd })`${this.cmd} list ${packageName} --json`;
|
|
3405
|
+
const output = JSON.parse(stdout);
|
|
3406
|
+
logger.debug(`Resolving ${packageName} version using ${this.name}`, { output });
|
|
3407
|
+
return this.#recursivelySearchDependencies(output.dependencies, packageName);
|
|
3408
|
+
}
|
|
3409
|
+
#recursivelySearchDependencies(dependencies2, packageName) {
|
|
3410
|
+
for (const [name, dependency] of Object.entries(dependencies2)) {
|
|
3411
|
+
if (name === packageName) {
|
|
3412
|
+
return dependency.version;
|
|
3413
|
+
}
|
|
3414
|
+
if (dependency.dependencies) {
|
|
3415
|
+
const result = this.#recursivelySearchDependencies(dependency.dependencies, packageName);
|
|
3416
|
+
if (result) {
|
|
3417
|
+
return result;
|
|
3418
|
+
}
|
|
3419
|
+
}
|
|
3420
|
+
}
|
|
3421
|
+
}
|
|
3422
|
+
};
|
|
3423
|
+
var YarnCommands = class {
|
|
3424
|
+
get name() {
|
|
3425
|
+
return "yarn";
|
|
3426
|
+
}
|
|
3427
|
+
get cmd() {
|
|
3428
|
+
return process.platform === "win32" ? "yarn.cmd" : "yarn";
|
|
3429
|
+
}
|
|
3430
|
+
async installDependencies(options) {
|
|
3431
|
+
const { stdout, stderr } = await $({ cwd: options.cwd })`${this.cmd} install`;
|
|
3432
|
+
logger.debug(`Installing dependencies using ${this.name}`, { stdout, stderr });
|
|
3433
|
+
}
|
|
3434
|
+
async resolveDependencyVersion(packageName, options) {
|
|
3435
|
+
const { stdout } = await $({ cwd: options.cwd })`${this.cmd} info ${packageName} --json`;
|
|
3436
|
+
const lines = stdout.split("\n");
|
|
3437
|
+
logger.debug(`Resolving ${packageName} version using ${this.name}`, { lines });
|
|
3438
|
+
for (const line of lines) {
|
|
3439
|
+
const json = JSON.parse(line);
|
|
3440
|
+
if (json.value === packageName) {
|
|
3441
|
+
return json.children.Version;
|
|
3442
|
+
}
|
|
3443
|
+
}
|
|
3444
|
+
}
|
|
3445
|
+
};
|
|
3446
|
+
|
|
3447
|
+
// src/utilities/resolveInternalFilePath.ts
|
|
3448
|
+
import path5 from "path";
|
|
3449
|
+
import { fileURLToPath as fileURLToPath3 } from "url";
|
|
3450
|
+
function cliRootPath() {
|
|
3451
|
+
const __filename2 = fileURLToPath3(import.meta.url);
|
|
3452
|
+
const __dirname2 = path5.dirname(__filename2);
|
|
3453
|
+
return __dirname2;
|
|
3454
|
+
}
|
|
3455
|
+
|
|
3456
|
+
// src/utilities/safeJsonParse.ts
|
|
3457
|
+
function safeJsonParse(json) {
|
|
3458
|
+
if (!json) {
|
|
3459
|
+
return void 0;
|
|
3460
|
+
}
|
|
3461
|
+
try {
|
|
3462
|
+
return JSON.parse(json);
|
|
3463
|
+
} catch {
|
|
3464
|
+
return void 0;
|
|
3465
|
+
}
|
|
3466
|
+
}
|
|
3467
|
+
|
|
3468
|
+
// src/commands/update.ts
|
|
3469
|
+
import { confirm, intro as intro3, isCancel, log as log4, outro as outro4 } from "@clack/prompts";
|
|
3470
|
+
import { join as join5, resolve as resolve2 } from "path";
|
|
3471
|
+
var UpdateCommandOptions = CommonCommandOptions.pick({
|
|
3472
|
+
logLevel: true,
|
|
3473
|
+
skipTelemetry: true
|
|
3474
|
+
});
|
|
3475
|
+
function configureUpdateCommand(program2) {
|
|
3476
|
+
return program2.command("update").description("Updates all @trigger.dev/* packages to match the CLI version").argument("[path]", "The path to the directory that contains the package.json file", ".").option(
|
|
3477
|
+
"-l, --log-level <level>",
|
|
3478
|
+
"The CLI log level to use (debug, info, log, warn, error, none). This does not effect the log level of your trigger.dev tasks.",
|
|
3479
|
+
"log"
|
|
3480
|
+
).option("--skip-telemetry", "Opt-out of sending telemetry").action(async (path7, options) => {
|
|
3481
|
+
wrapCommandAction("dev", UpdateCommandOptions, options, async (opts) => {
|
|
3482
|
+
await printStandloneInitialBanner(true);
|
|
3483
|
+
await updateCommand(path7, opts);
|
|
3484
|
+
});
|
|
3485
|
+
});
|
|
3486
|
+
}
|
|
3487
|
+
var triggerPackageFilter = /^@trigger\.dev/;
|
|
3488
|
+
async function updateCommand(dir, options) {
|
|
3489
|
+
await updateTriggerPackages(dir, options);
|
|
3490
|
+
}
|
|
3491
|
+
async function updateTriggerPackages(dir, options, embedded, requireUpdate) {
|
|
3492
|
+
let hasOutput = false;
|
|
3493
|
+
if (!embedded) {
|
|
3494
|
+
intro3("Updating packages");
|
|
3495
|
+
}
|
|
3496
|
+
const projectPath = resolve2(process.cwd(), dir);
|
|
3497
|
+
const { packageJson, readonlyPackageJson, packageJsonPath } = await getPackageJson(projectPath);
|
|
3498
|
+
if (!packageJson) {
|
|
3499
|
+
log4.error("Failed to load package.json. Try to re-run with `-l debug` to see what's going on.");
|
|
3500
|
+
return false;
|
|
3501
|
+
}
|
|
3502
|
+
const cliVersion = getVersion();
|
|
3503
|
+
const newCliVersion = await updateCheck();
|
|
3504
|
+
if (newCliVersion) {
|
|
3505
|
+
prettyWarning(
|
|
3506
|
+
"You're not running the latest CLI version, please consider updating ASAP",
|
|
3507
|
+
`Current: ${cliVersion}
|
|
3508
|
+
Latest: ${newCliVersion}`,
|
|
3509
|
+
"Run latest: npx trigger.dev@beta"
|
|
3510
|
+
);
|
|
3511
|
+
hasOutput = true;
|
|
3512
|
+
}
|
|
3513
|
+
const triggerDependencies = getTriggerDependencies(packageJson);
|
|
3514
|
+
function getVersionMismatches(deps, targetVersion) {
|
|
3515
|
+
const mismatches = [];
|
|
3516
|
+
for (const dep of deps) {
|
|
3517
|
+
if (dep.version === targetVersion) {
|
|
3518
|
+
continue;
|
|
3519
|
+
}
|
|
3520
|
+
mismatches.push(dep);
|
|
3521
|
+
}
|
|
3522
|
+
return mismatches;
|
|
3523
|
+
}
|
|
3524
|
+
const versionMismatches = getVersionMismatches(triggerDependencies, cliVersion);
|
|
3525
|
+
if (versionMismatches.length === 0) {
|
|
3526
|
+
if (!embedded) {
|
|
3527
|
+
outro4(`Nothing to do${newCliVersion ? " ..but you should really update your CLI!" : ""}`);
|
|
3528
|
+
return hasOutput;
|
|
3529
|
+
}
|
|
3530
|
+
return hasOutput;
|
|
3531
|
+
}
|
|
3532
|
+
prettyWarning(
|
|
3533
|
+
"Mismatch between your CLI version and installed packages",
|
|
3534
|
+
"We recommend pinned versions for guaranteed compatibility"
|
|
3535
|
+
);
|
|
3536
|
+
if (!process.stdout.isTTY) {
|
|
3537
|
+
outro4("Deploy failed");
|
|
3538
|
+
console.log(
|
|
3539
|
+
`ERROR: Version mismatch detected while running in CI. This won't end well. Aborting.
|
|
3540
|
+
|
|
3541
|
+
Please run the dev command locally and check that your CLI version matches the one printed below. Additionally, all \`@trigger.dev/*\` packages also need to match this version.
|
|
3542
|
+
|
|
3543
|
+
If your local CLI version doesn't match the one below, you may want to pin the CLI version in this CI step. To do that, just replace \`trigger.dev@beta\` with \`trigger.dev@<FULL_VERSION>\`, for example: \`npx trigger.dev@3.0.0-beta.17 deploy\`
|
|
3544
|
+
|
|
3545
|
+
CLI version: ${cliVersion}
|
|
3546
|
+
|
|
3547
|
+
Current package versions that don't match the CLI:
|
|
3548
|
+
${versionMismatches.map((dep) => `- ${dep.name}@${dep.version}`).join("\n")}
|
|
3549
|
+
`
|
|
3550
|
+
);
|
|
3551
|
+
process.exit(1);
|
|
3552
|
+
}
|
|
3553
|
+
log4.message("");
|
|
3554
|
+
const userWantsToUpdate = await updateConfirmation(versionMismatches, cliVersion);
|
|
3555
|
+
if (isCancel(userWantsToUpdate)) {
|
|
3556
|
+
throw new OutroCommandError();
|
|
3557
|
+
}
|
|
3558
|
+
if (!userWantsToUpdate) {
|
|
3559
|
+
if (requireUpdate) {
|
|
3560
|
+
outro4("You shall not pass!");
|
|
3561
|
+
logger.log(
|
|
3562
|
+
`${chalkError(
|
|
3563
|
+
"X Error:"
|
|
3564
|
+
)} Update required: Version mismatches are a common source of bugs and errors. Please update or use \`--skip-update-check\` at your own risk.
|
|
3565
|
+
`
|
|
3566
|
+
);
|
|
3567
|
+
process.exit(1);
|
|
3568
|
+
}
|
|
3569
|
+
if (!embedded) {
|
|
3570
|
+
outro4("You've been warned!");
|
|
3571
|
+
}
|
|
3572
|
+
return hasOutput;
|
|
3573
|
+
}
|
|
3574
|
+
const installSpinner = spinner();
|
|
3575
|
+
installSpinner.start("Writing new package.json file");
|
|
3576
|
+
const packageJsonBackupPath = `${packageJsonPath}.bak`;
|
|
3577
|
+
await writeJSONFile(packageJsonBackupPath, readonlyPackageJson, true);
|
|
3578
|
+
const exitHandler = async (sig) => {
|
|
3579
|
+
log4.warn(
|
|
3580
|
+
`You may have to manually roll back any package.json changes. Backup written to ${packageJsonBackupPath}`
|
|
3581
|
+
);
|
|
3582
|
+
};
|
|
3583
|
+
process.prependOnceListener("exit", exitHandler);
|
|
3584
|
+
mutatePackageJsonWithUpdatedPackages(packageJson, versionMismatches, cliVersion);
|
|
3585
|
+
await writeJSONFile(packageJsonPath, packageJson, true);
|
|
3586
|
+
async function revertPackageJsonChanges() {
|
|
3587
|
+
await writeJSONFile(packageJsonPath, readonlyPackageJson, true);
|
|
3588
|
+
await removeFile(packageJsonBackupPath);
|
|
3589
|
+
}
|
|
3590
|
+
installSpinner.message("Installing new package versions");
|
|
3591
|
+
const jsProject = new JavascriptProject(projectPath);
|
|
3592
|
+
let packageManager;
|
|
3593
|
+
try {
|
|
3594
|
+
packageManager = await jsProject.getPackageManager();
|
|
3595
|
+
installSpinner.message(`Installing new package versions with ${packageManager}`);
|
|
3596
|
+
await jsProject.install();
|
|
3597
|
+
} catch (error) {
|
|
3598
|
+
installSpinner.stop(
|
|
3599
|
+
`Failed to install new package versions${packageManager ? ` with ${packageManager}` : ""}`
|
|
3600
|
+
);
|
|
3601
|
+
process.removeListener("exit", exitHandler);
|
|
3602
|
+
await revertPackageJsonChanges();
|
|
3603
|
+
throw error;
|
|
3604
|
+
}
|
|
3605
|
+
installSpinner.stop("Installed new package versions");
|
|
3606
|
+
process.removeListener("exit", exitHandler);
|
|
3607
|
+
await removeFile(packageJsonBackupPath);
|
|
3608
|
+
if (!embedded) {
|
|
3609
|
+
outro4(
|
|
3610
|
+
`Packages updated${newCliVersion ? " ..but you should really update your CLI too!" : ""}`
|
|
3611
|
+
);
|
|
3612
|
+
}
|
|
3613
|
+
return hasOutput;
|
|
3614
|
+
}
|
|
3615
|
+
function getTriggerDependencies(packageJson) {
|
|
3616
|
+
const deps = [];
|
|
3617
|
+
for (const type of ["dependencies", "devDependencies"]) {
|
|
3618
|
+
for (const [name, version2] of Object.entries(packageJson[type] ?? {})) {
|
|
3619
|
+
if (!version2) {
|
|
3620
|
+
continue;
|
|
3621
|
+
}
|
|
3622
|
+
if (version2.startsWith("workspace")) {
|
|
3623
|
+
continue;
|
|
3624
|
+
}
|
|
3625
|
+
if (!triggerPackageFilter.test(name)) {
|
|
3626
|
+
continue;
|
|
3627
|
+
}
|
|
3628
|
+
const ignoredPackages = ["@trigger.dev/companyicons"];
|
|
3629
|
+
if (ignoredPackages.includes(name)) {
|
|
3630
|
+
continue;
|
|
3631
|
+
}
|
|
3632
|
+
deps.push({ type, name, version: version2 });
|
|
3633
|
+
}
|
|
3634
|
+
}
|
|
3635
|
+
return deps;
|
|
3636
|
+
}
|
|
3637
|
+
function mutatePackageJsonWithUpdatedPackages(packageJson, depsToUpdate, targetVersion) {
|
|
3638
|
+
for (const { type, name, version: version2 } of depsToUpdate) {
|
|
3639
|
+
if (!packageJson[type]) {
|
|
3640
|
+
throw new Error(
|
|
3641
|
+
`No ${type} entry found in package.json. Please try to upgrade manually instead.`
|
|
3642
|
+
);
|
|
3643
|
+
}
|
|
3644
|
+
packageJson[type][name] = targetVersion;
|
|
3645
|
+
}
|
|
3646
|
+
}
|
|
3647
|
+
function printUpdateTable(depsToUpdate, targetVersion) {
|
|
3648
|
+
log4.message("Suggested updates");
|
|
3649
|
+
const tableData = depsToUpdate.map((dep) => ({
|
|
3650
|
+
package: dep.name,
|
|
3651
|
+
old: dep.version,
|
|
3652
|
+
new: targetVersion
|
|
3653
|
+
}));
|
|
3654
|
+
logger.table(tableData);
|
|
3655
|
+
}
|
|
3656
|
+
async function updateConfirmation(depsToUpdate, targetVersion) {
|
|
3657
|
+
printUpdateTable(depsToUpdate, targetVersion);
|
|
3658
|
+
let confirmMessage = "Would you like to apply those updates?";
|
|
3659
|
+
return await confirm({
|
|
3660
|
+
message: confirmMessage
|
|
3661
|
+
});
|
|
3662
|
+
}
|
|
3663
|
+
async function getPackageJson(absoluteProjectPath) {
|
|
3664
|
+
const packageJsonPath = join5(absoluteProjectPath, "package.json");
|
|
3665
|
+
const readonlyPackageJson = Object.freeze(await readJSONFile(packageJsonPath));
|
|
3666
|
+
const packageJson = structuredClone(readonlyPackageJson);
|
|
3667
|
+
return { packageJson, readonlyPackageJson, packageJsonPath };
|
|
3668
|
+
}
|
|
3669
|
+
|
|
2874
3670
|
// src/commands/deploy.ts
|
|
2875
|
-
import { Glob } from "glob";
|
|
2876
3671
|
var DeployCommandOptions = CommonCommandOptions.extend({
|
|
2877
3672
|
skipTypecheck: z4.boolean().default(false),
|
|
2878
3673
|
skipDeploy: z4.boolean().default(false),
|
|
2879
|
-
ignoreEnvVarCheck: z4.boolean().default(false),
|
|
2880
3674
|
env: z4.enum(["prod", "staging"]),
|
|
2881
3675
|
loadImage: z4.boolean().default(false),
|
|
2882
3676
|
buildPlatform: z4.enum(["linux/amd64", "linux/arm64"]).default("linux/amd64"),
|
|
@@ -2886,7 +3680,9 @@ var DeployCommandOptions = CommonCommandOptions.extend({
|
|
|
2886
3680
|
config: z4.string().optional(),
|
|
2887
3681
|
projectRef: z4.string().optional(),
|
|
2888
3682
|
outputMetafile: z4.string().optional(),
|
|
2889
|
-
apiUrl: z4.string().optional()
|
|
3683
|
+
apiUrl: z4.string().optional(),
|
|
3684
|
+
saveLogs: z4.boolean().default(false),
|
|
3685
|
+
skipUpdateCheck: z4.boolean().default(false)
|
|
2890
3686
|
});
|
|
2891
3687
|
function configureDeployCommand(program2) {
|
|
2892
3688
|
return commonOptions(
|
|
@@ -2894,12 +3690,9 @@ function configureDeployCommand(program2) {
|
|
|
2894
3690
|
"-e, --env <env>",
|
|
2895
3691
|
"Deploy to a specific environment (currently only prod and staging are supported)",
|
|
2896
3692
|
"prod"
|
|
2897
|
-
).option("--skip-typecheck", "Whether to skip the pre-build typecheck").option(
|
|
2898
|
-
"--ignore-env-var-check",
|
|
2899
|
-
"Detected missing environment variables won't block deployment"
|
|
2900
|
-
).option("-c, --config <config file>", "The name of the config file, found at [path]").option(
|
|
3693
|
+
).option("--skip-typecheck", "Whether to skip the pre-build typecheck").option("--skip-update-check", "Skip checking for @trigger.dev package updates").option("-c, --config <config file>", "The name of the config file, found at [path]").option(
|
|
2901
3694
|
"-p, --project-ref <project ref>",
|
|
2902
|
-
"The project ref. Required if there is no config file."
|
|
3695
|
+
"The project ref. Required if there is no config file. This will override the project specified in the config file."
|
|
2903
3696
|
)
|
|
2904
3697
|
).addOption(
|
|
2905
3698
|
new CommandOption(
|
|
@@ -2921,6 +3714,11 @@ function configureDeployCommand(program2) {
|
|
|
2921
3714
|
"--tag <tag>",
|
|
2922
3715
|
"(Coming soon) Specify the tag to use when pushing the image to the registry"
|
|
2923
3716
|
).hideHelp()
|
|
3717
|
+
).addOption(
|
|
3718
|
+
new CommandOption(
|
|
3719
|
+
"--ignore-env-var-check",
|
|
3720
|
+
"(deprecated) Detected missing environment variables won't block deployment"
|
|
3721
|
+
).hideHelp()
|
|
2924
3722
|
).addOption(new CommandOption("-D, --skip-deploy", "Skip deploying the image").hideHelp()).addOption(
|
|
2925
3723
|
new CommandOption("--load-image", "Load the built image into your local docker").hideHelp()
|
|
2926
3724
|
).addOption(
|
|
@@ -2933,10 +3731,15 @@ function configureDeployCommand(program2) {
|
|
|
2933
3731
|
"--output-metafile <path>",
|
|
2934
3732
|
"If provided, will save the esbuild metafile for the build to the specified path"
|
|
2935
3733
|
).hideHelp()
|
|
2936
|
-
).
|
|
3734
|
+
).addOption(
|
|
3735
|
+
new CommandOption(
|
|
3736
|
+
"--save-logs",
|
|
3737
|
+
"If provided, will save logs even for successful builds"
|
|
3738
|
+
).hideHelp()
|
|
3739
|
+
).action(async (path7, options) => {
|
|
2937
3740
|
await handleTelemetry(async () => {
|
|
2938
3741
|
await printStandloneInitialBanner(true);
|
|
2939
|
-
await deployCommand(
|
|
3742
|
+
await deployCommand(path7, options);
|
|
2940
3743
|
});
|
|
2941
3744
|
});
|
|
2942
3745
|
}
|
|
@@ -2947,7 +3750,10 @@ async function deployCommand(dir, options) {
|
|
|
2947
3750
|
}
|
|
2948
3751
|
async function _deployCommand(dir, options) {
|
|
2949
3752
|
const span = trace2.getSpan(context.active());
|
|
2950
|
-
|
|
3753
|
+
intro4("Deploying project");
|
|
3754
|
+
if (!options.skipUpdateCheck) {
|
|
3755
|
+
await updateTriggerPackages(dir, { ...options }, true, true);
|
|
3756
|
+
}
|
|
2951
3757
|
const authorization = await login({
|
|
2952
3758
|
embedded: true,
|
|
2953
3759
|
defaultApiUrl: options.apiUrl,
|
|
@@ -2990,7 +3796,7 @@ async function _deployCommand(dir, options) {
|
|
|
2990
3796
|
throw new Error(deploymentEnv.error);
|
|
2991
3797
|
}
|
|
2992
3798
|
const environmentClient = new CliApiClient(authorization.auth.apiUrl, deploymentEnv.data.apiKey);
|
|
2993
|
-
|
|
3799
|
+
log5.step(
|
|
2994
3800
|
`Preparing to deploy "${deploymentEnv.data.name}" (${resolvedConfig.config.project}) to ${options.env}`
|
|
2995
3801
|
);
|
|
2996
3802
|
const compilation = await compileProject(
|
|
@@ -2999,15 +3805,6 @@ async function _deployCommand(dir, options) {
|
|
|
2999
3805
|
resolvedConfig.status === "file" ? resolvedConfig.path : void 0
|
|
3000
3806
|
);
|
|
3001
3807
|
logger.debug("Compilation result", { compilation });
|
|
3002
|
-
if (compilation.envVars.length > 0) {
|
|
3003
|
-
await checkEnvVars(
|
|
3004
|
-
compilation.envVars ?? [],
|
|
3005
|
-
resolvedConfig.config,
|
|
3006
|
-
options,
|
|
3007
|
-
environmentClient,
|
|
3008
|
-
authorization.dashboardUrl
|
|
3009
|
-
);
|
|
3010
|
-
}
|
|
3011
3808
|
const deploymentResponse = await environmentClient.initializeDeployment({
|
|
3012
3809
|
contentHash: compilation.contentHash,
|
|
3013
3810
|
userId: authorization.userId
|
|
@@ -3021,7 +3818,7 @@ async function _deployCommand(dir, options) {
|
|
|
3021
3818
|
);
|
|
3022
3819
|
}
|
|
3023
3820
|
const version2 = deploymentResponse.data.version;
|
|
3024
|
-
const deploymentSpinner =
|
|
3821
|
+
const deploymentSpinner = spinner();
|
|
3025
3822
|
deploymentSpinner.start(`Deploying version ${version2}`);
|
|
3026
3823
|
const selfHostedRegistryHost = deploymentResponse.data.registryHost ?? options.registry;
|
|
3027
3824
|
const registryHost = selfHostedRegistryHost ?? "registry.trigger.dev";
|
|
@@ -3046,28 +3843,56 @@ async function _deployCommand(dir, options) {
|
|
|
3046
3843
|
"Failed to initialize deployment. The deployment does not have any external build data. To deploy this project, you must use the --self-hosted flag to build and push the image yourself."
|
|
3047
3844
|
);
|
|
3048
3845
|
}
|
|
3049
|
-
return buildAndPushImage(
|
|
3050
|
-
|
|
3051
|
-
|
|
3052
|
-
|
|
3053
|
-
|
|
3054
|
-
|
|
3055
|
-
|
|
3056
|
-
|
|
3057
|
-
|
|
3058
|
-
|
|
3059
|
-
|
|
3060
|
-
|
|
3061
|
-
|
|
3062
|
-
|
|
3063
|
-
|
|
3064
|
-
|
|
3846
|
+
return buildAndPushImage(
|
|
3847
|
+
{
|
|
3848
|
+
registryHost,
|
|
3849
|
+
auth: authorization.auth.accessToken,
|
|
3850
|
+
imageTag: deploymentResponse.data.imageTag,
|
|
3851
|
+
buildId: deploymentResponse.data.externalBuildData.buildId,
|
|
3852
|
+
buildToken: deploymentResponse.data.externalBuildData.buildToken,
|
|
3853
|
+
buildProjectId: deploymentResponse.data.externalBuildData.projectId,
|
|
3854
|
+
cwd: compilation.path,
|
|
3855
|
+
projectId: resolvedConfig.config.project,
|
|
3856
|
+
deploymentId: deploymentResponse.data.id,
|
|
3857
|
+
deploymentVersion: deploymentResponse.data.version,
|
|
3858
|
+
contentHash: deploymentResponse.data.contentHash,
|
|
3859
|
+
projectRef: resolvedConfig.config.project,
|
|
3860
|
+
loadImage: options.loadImage,
|
|
3861
|
+
buildPlatform: options.buildPlatform
|
|
3862
|
+
},
|
|
3863
|
+
deploymentSpinner
|
|
3864
|
+
);
|
|
3065
3865
|
};
|
|
3066
3866
|
const image = await buildImage();
|
|
3867
|
+
const warnings = checkLogsForWarnings(image.logs);
|
|
3868
|
+
if (!warnings.ok) {
|
|
3869
|
+
await failDeploy(
|
|
3870
|
+
deploymentResponse.data.shortCode,
|
|
3871
|
+
warnings.summary,
|
|
3872
|
+
image.logs,
|
|
3873
|
+
deploymentSpinner,
|
|
3874
|
+
warnings.warnings,
|
|
3875
|
+
warnings.errors
|
|
3876
|
+
);
|
|
3877
|
+
throw new SkipLoggingError(`Failed to build project image: ${warnings.summary}`);
|
|
3878
|
+
}
|
|
3067
3879
|
if (!image.ok) {
|
|
3068
|
-
|
|
3880
|
+
await failDeploy(
|
|
3881
|
+
deploymentResponse.data.shortCode,
|
|
3882
|
+
image.error,
|
|
3883
|
+
image.logs,
|
|
3884
|
+
deploymentSpinner,
|
|
3885
|
+
warnings.warnings
|
|
3886
|
+
);
|
|
3069
3887
|
throw new SkipLoggingError(`Failed to build project image: ${image.error}`);
|
|
3070
3888
|
}
|
|
3889
|
+
const preExitTasks = async () => {
|
|
3890
|
+
printWarnings(warnings.warnings);
|
|
3891
|
+
if (options.saveLogs) {
|
|
3892
|
+
const logPath = await saveLogs(deploymentResponse.data.shortCode, image.logs);
|
|
3893
|
+
log5.info(`Build logs have been saved to ${logPath}`);
|
|
3894
|
+
}
|
|
3895
|
+
};
|
|
3071
3896
|
const imageReference = options.selfHosted ? `${selfHostedRegistryHost ? `${selfHostedRegistryHost}/` : ""}${image.image}${image.digest ? `@${image.digest}` : ""}` : `${registryHost}/${image.image}${image.digest ? `@${image.digest}` : ""}`;
|
|
3072
3897
|
span?.setAttributes({
|
|
3073
3898
|
"image.reference": imageReference
|
|
@@ -3076,6 +3901,7 @@ async function _deployCommand(dir, options) {
|
|
|
3076
3901
|
deploymentSpinner.stop(
|
|
3077
3902
|
`Project image built: ${imageReference}. Skipping deployment as requested`
|
|
3078
3903
|
);
|
|
3904
|
+
await preExitTasks();
|
|
3079
3905
|
throw new SkipCommandError("Skipping deployment as requested");
|
|
3080
3906
|
}
|
|
3081
3907
|
deploymentSpinner.message(
|
|
@@ -3090,6 +3916,7 @@ async function _deployCommand(dir, options) {
|
|
|
3090
3916
|
);
|
|
3091
3917
|
if (!startIndexingResponse.success) {
|
|
3092
3918
|
deploymentSpinner.stop(`Failed to start indexing: ${startIndexingResponse.error}`);
|
|
3919
|
+
await preExitTasks();
|
|
3093
3920
|
throw new SkipLoggingError(`Failed to start indexing: ${startIndexingResponse.error}`);
|
|
3094
3921
|
}
|
|
3095
3922
|
const finishedDeployment = await waitForDeploymentToFinish(
|
|
@@ -3098,26 +3925,33 @@ async function _deployCommand(dir, options) {
|
|
|
3098
3925
|
);
|
|
3099
3926
|
if (!finishedDeployment) {
|
|
3100
3927
|
deploymentSpinner.stop(`Deployment failed to complete`);
|
|
3928
|
+
await preExitTasks();
|
|
3101
3929
|
throw new SkipLoggingError("Deployment failed to complete: unknown issue");
|
|
3102
3930
|
}
|
|
3103
3931
|
if (typeof finishedDeployment === "string") {
|
|
3104
3932
|
deploymentSpinner.stop(`Deployment failed to complete: ${finishedDeployment}`);
|
|
3933
|
+
await preExitTasks();
|
|
3105
3934
|
throw new SkipLoggingError(`Deployment failed to complete: ${finishedDeployment}`);
|
|
3106
3935
|
}
|
|
3107
|
-
const deploymentLink =
|
|
3936
|
+
const deploymentLink = terminalLink2(
|
|
3108
3937
|
"View deployment",
|
|
3109
3938
|
`${authorization.dashboardUrl}/projects/v3/${resolvedConfig.config.project}/deployments/${finishedDeployment.shortCode}`
|
|
3110
3939
|
);
|
|
3111
3940
|
switch (finishedDeployment.status) {
|
|
3112
3941
|
case "DEPLOYED": {
|
|
3113
|
-
|
|
3942
|
+
if (warnings.warnings.length > 0) {
|
|
3943
|
+
deploymentSpinner.stop("Deployment completed with warnings");
|
|
3944
|
+
} else {
|
|
3945
|
+
deploymentSpinner.stop("Deployment completed");
|
|
3946
|
+
}
|
|
3947
|
+
await preExitTasks();
|
|
3114
3948
|
const taskCount = finishedDeployment.worker?.tasks.length ?? 0;
|
|
3115
3949
|
if (taskCount === 0) {
|
|
3116
|
-
|
|
3950
|
+
outro5(
|
|
3117
3951
|
`Version ${version2} deployed with no detected tasks. Please make sure you are exporting tasks in your project. ${deploymentLink}`
|
|
3118
3952
|
);
|
|
3119
3953
|
} else {
|
|
3120
|
-
|
|
3954
|
+
outro5(
|
|
3121
3955
|
`Version ${version2} deployed with ${taskCount} detected task${taskCount === 1 ? "" : "s"} ${deploymentLink}`
|
|
3122
3956
|
);
|
|
3123
3957
|
}
|
|
@@ -3125,10 +3959,29 @@ async function _deployCommand(dir, options) {
|
|
|
3125
3959
|
}
|
|
3126
3960
|
case "FAILED": {
|
|
3127
3961
|
if (finishedDeployment.errorData) {
|
|
3128
|
-
|
|
3129
|
-
|
|
3130
|
-
|
|
3131
|
-
|
|
3962
|
+
if (finishedDeployment.errorData.name === "TaskMetadataParseError") {
|
|
3963
|
+
const errorJson = safeJsonParse(finishedDeployment.errorData.stack);
|
|
3964
|
+
if (errorJson) {
|
|
3965
|
+
const parsedError2 = TaskMetadataFailedToParseData.safeParse(errorJson);
|
|
3966
|
+
if (parsedError2.success) {
|
|
3967
|
+
deploymentSpinner.stop(`Deployment encountered an error. ${deploymentLink}`);
|
|
3968
|
+
logTaskMetadataParseError(parsedError2.data.zodIssues, parsedError2.data.tasks);
|
|
3969
|
+
await preExitTasks();
|
|
3970
|
+
throw new SkipLoggingError(
|
|
3971
|
+
`Deployment encountered an error: ${finishedDeployment.errorData.name}`
|
|
3972
|
+
);
|
|
3973
|
+
}
|
|
3974
|
+
}
|
|
3975
|
+
}
|
|
3976
|
+
const parsedError = finishedDeployment.errorData.stack ? parseBuildErrorStack(finishedDeployment.errorData) ?? finishedDeployment.errorData.message : finishedDeployment.errorData.message;
|
|
3977
|
+
if (typeof parsedError === "string") {
|
|
3978
|
+
deploymentSpinner.stop(`Deployment encountered an error. ${deploymentLink}`);
|
|
3979
|
+
logger.log(`${chalkError("X Error:")} ${parsedError}`);
|
|
3980
|
+
} else {
|
|
3981
|
+
deploymentSpinner.stop(`Deployment encountered an error. ${deploymentLink}`);
|
|
3982
|
+
logESMRequireError(parsedError, resolvedConfig);
|
|
3983
|
+
}
|
|
3984
|
+
await preExitTasks();
|
|
3132
3985
|
throw new SkipLoggingError(
|
|
3133
3986
|
`Deployment encountered an error: ${finishedDeployment.errorData.name}`
|
|
3134
3987
|
);
|
|
@@ -3136,62 +3989,132 @@ async function _deployCommand(dir, options) {
|
|
|
3136
3989
|
deploymentSpinner.stop(
|
|
3137
3990
|
`Deployment failed with an unknown error. Please contact eric@trigger.dev for help. ${deploymentLink}`
|
|
3138
3991
|
);
|
|
3992
|
+
await preExitTasks();
|
|
3139
3993
|
throw new SkipLoggingError("Deployment failed with an unknown error");
|
|
3140
3994
|
}
|
|
3141
3995
|
}
|
|
3142
3996
|
case "CANCELED": {
|
|
3143
3997
|
deploymentSpinner.stop(`Deployment was canceled. ${deploymentLink}`);
|
|
3998
|
+
await preExitTasks();
|
|
3144
3999
|
throw new SkipLoggingError("Deployment was canceled");
|
|
3145
4000
|
}
|
|
3146
4001
|
case "TIMED_OUT": {
|
|
3147
4002
|
deploymentSpinner.stop(`Deployment timed out. ${deploymentLink}`);
|
|
4003
|
+
await preExitTasks();
|
|
3148
4004
|
throw new SkipLoggingError("Deployment timed out");
|
|
3149
4005
|
}
|
|
3150
4006
|
}
|
|
3151
4007
|
}
|
|
3152
|
-
|
|
3153
|
-
|
|
3154
|
-
|
|
3155
|
-
|
|
3156
|
-
|
|
3157
|
-
|
|
3158
|
-
|
|
3159
|
-
|
|
3160
|
-
|
|
3161
|
-
|
|
3162
|
-
|
|
3163
|
-
|
|
3164
|
-
|
|
3165
|
-
|
|
3166
|
-
|
|
3167
|
-
|
|
3168
|
-
|
|
3169
|
-
|
|
3170
|
-
|
|
3171
|
-
|
|
3172
|
-
`${apiUrl}/projects/v3/${config.project}/environment-variables`
|
|
3173
|
-
)
|
|
3174
|
-
)}`
|
|
3175
|
-
);
|
|
3176
|
-
span.setAttributes({
|
|
3177
|
-
"envVars.missing": missingEnvironmentVariables
|
|
3178
|
-
});
|
|
3179
|
-
if (!options.ignoreEnvVarCheck) {
|
|
3180
|
-
throw new SkipLoggingError("Found missing environment variables");
|
|
3181
|
-
} else {
|
|
3182
|
-
span.end();
|
|
3183
|
-
return;
|
|
3184
|
-
}
|
|
3185
|
-
}
|
|
3186
|
-
environmentVariablesSpinner.stop(`Environment variable check passed`);
|
|
3187
|
-
}
|
|
3188
|
-
span.end();
|
|
3189
|
-
} catch (e) {
|
|
3190
|
-
recordSpanException4(span, e);
|
|
3191
|
-
span.end();
|
|
3192
|
-
throw e;
|
|
4008
|
+
function printErrors(errors) {
|
|
4009
|
+
for (const error of errors ?? []) {
|
|
4010
|
+
log5.error(`${chalkError("Error:")} ${error}`);
|
|
4011
|
+
}
|
|
4012
|
+
}
|
|
4013
|
+
function printWarnings(warnings) {
|
|
4014
|
+
for (const warning of warnings ?? []) {
|
|
4015
|
+
log5.warn(`${chalkWarning("Warning:")} ${warning}`);
|
|
4016
|
+
}
|
|
4017
|
+
}
|
|
4018
|
+
function checkLogsForWarnings(logs) {
|
|
4019
|
+
const warnings = [
|
|
4020
|
+
{
|
|
4021
|
+
regex: /prisma:warn We could not find your Prisma schema/,
|
|
4022
|
+
message: `Prisma generate failed to find the default schema. Did you include it in config.additionalFiles? ${terminalLink2(
|
|
4023
|
+
"Config docs",
|
|
4024
|
+
docs.config.prisma
|
|
4025
|
+
)}
|
|
4026
|
+
Custom schema paths require a postinstall script like this: \`prisma generate --schema=./custom/path/to/schema.prisma\``,
|
|
4027
|
+
shouldFail: true
|
|
3193
4028
|
}
|
|
3194
|
-
|
|
4029
|
+
];
|
|
4030
|
+
const errorMessages2 = [];
|
|
4031
|
+
const warningMessages = [];
|
|
4032
|
+
let shouldFail = false;
|
|
4033
|
+
for (const warning of warnings) {
|
|
4034
|
+
const matches = logs.match(warning.regex);
|
|
4035
|
+
if (!matches) {
|
|
4036
|
+
continue;
|
|
4037
|
+
}
|
|
4038
|
+
const message = getMessageFromTemplate(warning.message, matches.groups);
|
|
4039
|
+
if (warning.shouldFail) {
|
|
4040
|
+
shouldFail = true;
|
|
4041
|
+
errorMessages2.push(message);
|
|
4042
|
+
} else {
|
|
4043
|
+
warningMessages.push(message);
|
|
4044
|
+
}
|
|
4045
|
+
}
|
|
4046
|
+
if (shouldFail) {
|
|
4047
|
+
return {
|
|
4048
|
+
ok: false,
|
|
4049
|
+
summary: "Build succeeded with critical warnings. Will not proceed",
|
|
4050
|
+
warnings: warningMessages,
|
|
4051
|
+
errors: errorMessages2
|
|
4052
|
+
};
|
|
4053
|
+
}
|
|
4054
|
+
return {
|
|
4055
|
+
ok: true,
|
|
4056
|
+
warnings: warningMessages
|
|
4057
|
+
};
|
|
4058
|
+
}
|
|
4059
|
+
function checkLogsForErrors(logs) {
|
|
4060
|
+
const errors = [
|
|
4061
|
+
{
|
|
4062
|
+
regex: /Error: Provided --schema at (?<schema>.*) doesn't exist/,
|
|
4063
|
+
message: `Prisma generate failed to find the specified schema at "$schema".
|
|
4064
|
+
Did you include it in config.additionalFiles? ${terminalLink2(
|
|
4065
|
+
"Config docs",
|
|
4066
|
+
docs.config.prisma
|
|
4067
|
+
)}`
|
|
4068
|
+
},
|
|
4069
|
+
{
|
|
4070
|
+
regex: /sh: 1: (?<packageOrBinary>.*): not found/,
|
|
4071
|
+
message: `$packageOrBinary not found
|
|
4072
|
+
|
|
4073
|
+
If it's a package: Include it in ${terminalLink2(
|
|
4074
|
+
"config.additionalPackages",
|
|
4075
|
+
docs.config.prisma
|
|
4076
|
+
)}
|
|
4077
|
+
If it's a binary: Please ${terminalLink2(
|
|
4078
|
+
"get in touch",
|
|
4079
|
+
getInTouch
|
|
4080
|
+
)} and we'll see what we can do!`
|
|
4081
|
+
}
|
|
4082
|
+
];
|
|
4083
|
+
for (const error of errors) {
|
|
4084
|
+
const matches = logs.match(error.regex);
|
|
4085
|
+
if (!matches) {
|
|
4086
|
+
continue;
|
|
4087
|
+
}
|
|
4088
|
+
const message = getMessageFromTemplate(error.message, matches.groups);
|
|
4089
|
+
log5.error(`${chalkError("Error:")} ${message}`);
|
|
4090
|
+
break;
|
|
4091
|
+
}
|
|
4092
|
+
}
|
|
4093
|
+
function getMessageFromTemplate(template, replacer) {
|
|
4094
|
+
let message = template;
|
|
4095
|
+
if (replacer) {
|
|
4096
|
+
for (const [key, value] of Object.entries(replacer)) {
|
|
4097
|
+
message = message.replaceAll(`$${key}`, value);
|
|
4098
|
+
}
|
|
4099
|
+
}
|
|
4100
|
+
return message;
|
|
4101
|
+
}
|
|
4102
|
+
async function saveLogs(shortCode, logs) {
|
|
4103
|
+
const logPath = join6(await createTempDir(), `build-${shortCode}.log`);
|
|
4104
|
+
await writeFile2(logPath, logs);
|
|
4105
|
+
return logPath;
|
|
4106
|
+
}
|
|
4107
|
+
async function failDeploy(shortCode, errorSummary, logs, deploymentSpinner, warnings, errors) {
|
|
4108
|
+
deploymentSpinner.stop(`Failed to deploy project`);
|
|
4109
|
+
if (logs.trim() !== "") {
|
|
4110
|
+
const logPath = await saveLogs(shortCode, logs);
|
|
4111
|
+
printWarnings(warnings);
|
|
4112
|
+
printErrors(errors);
|
|
4113
|
+
checkLogsForErrors(logs);
|
|
4114
|
+
outro5(`${chalkError("Error:")} ${errorSummary}. Full build logs have been saved to ${logPath}`);
|
|
4115
|
+
} else {
|
|
4116
|
+
outro5(`${chalkError("Error:")} ${errorSummary}.`);
|
|
4117
|
+
}
|
|
3195
4118
|
}
|
|
3196
4119
|
async function waitForDeploymentToFinish(deploymentId, client, timeoutInSeconds = 60) {
|
|
3197
4120
|
return tracer.startActiveSpan("waitForDeploymentToFinish", async (span) => {
|
|
@@ -3227,7 +4150,7 @@ async function waitForDeploymentToFinish(deploymentId, client, timeoutInSeconds
|
|
|
3227
4150
|
}
|
|
3228
4151
|
});
|
|
3229
4152
|
}
|
|
3230
|
-
async function buildAndPushImage(options) {
|
|
4153
|
+
async function buildAndPushImage(options, updater) {
|
|
3231
4154
|
return tracer.startActiveSpan("buildAndPushImage", async (span) => {
|
|
3232
4155
|
span.setAttributes({
|
|
3233
4156
|
"options.registryHost": options.registryHost,
|
|
@@ -3283,15 +4206,24 @@ async function buildAndPushImage(options) {
|
|
|
3283
4206
|
});
|
|
3284
4207
|
const errors = [];
|
|
3285
4208
|
try {
|
|
3286
|
-
await new Promise((res, rej) => {
|
|
4209
|
+
const processCode = await new Promise((res, rej) => {
|
|
3287
4210
|
childProcess2.stderr?.on("data", (data) => {
|
|
3288
|
-
const
|
|
3289
|
-
|
|
3290
|
-
|
|
4211
|
+
const text3 = data.toString();
|
|
4212
|
+
const lines = text3.split("\n").filter(Boolean);
|
|
4213
|
+
errors.push(...lines);
|
|
4214
|
+
logger.debug(text3);
|
|
3291
4215
|
});
|
|
3292
4216
|
childProcess2.on("error", (e) => rej(e));
|
|
3293
|
-
childProcess2.on("close", () => res());
|
|
4217
|
+
childProcess2.on("close", (code) => res(code));
|
|
3294
4218
|
});
|
|
4219
|
+
const logs = extractLogs(errors);
|
|
4220
|
+
if (processCode !== 0) {
|
|
4221
|
+
return {
|
|
4222
|
+
ok: false,
|
|
4223
|
+
error: `Error building image`,
|
|
4224
|
+
logs
|
|
4225
|
+
};
|
|
4226
|
+
}
|
|
3295
4227
|
const digest = extractImageDigest(errors);
|
|
3296
4228
|
span.setAttributes({
|
|
3297
4229
|
"image.digest": digest
|
|
@@ -3300,6 +4232,7 @@ async function buildAndPushImage(options) {
|
|
|
3300
4232
|
return {
|
|
3301
4233
|
ok: true,
|
|
3302
4234
|
image: options.imageTag,
|
|
4235
|
+
logs,
|
|
3303
4236
|
digest
|
|
3304
4237
|
};
|
|
3305
4238
|
} catch (e) {
|
|
@@ -3307,7 +4240,8 @@ async function buildAndPushImage(options) {
|
|
|
3307
4240
|
span.end();
|
|
3308
4241
|
return {
|
|
3309
4242
|
ok: false,
|
|
3310
|
-
error: e instanceof Error ? e.message : JSON.stringify(e)
|
|
4243
|
+
error: e instanceof Error ? e.message : JSON.stringify(e),
|
|
4244
|
+
logs: extractLogs(errors)
|
|
3311
4245
|
};
|
|
3312
4246
|
}
|
|
3313
4247
|
});
|
|
@@ -3353,15 +4287,22 @@ async function buildAndPushSelfHostedImage(options) {
|
|
|
3353
4287
|
const errors = [];
|
|
3354
4288
|
let digest;
|
|
3355
4289
|
try {
|
|
3356
|
-
await new Promise((res, rej) => {
|
|
4290
|
+
const processCode = await new Promise((res, rej) => {
|
|
3357
4291
|
buildProcess.stderr?.on("data", (data) => {
|
|
3358
|
-
const
|
|
3359
|
-
errors.push(
|
|
3360
|
-
logger.debug(
|
|
4292
|
+
const text3 = data.toString();
|
|
4293
|
+
errors.push(text3);
|
|
4294
|
+
logger.debug(text3);
|
|
3361
4295
|
});
|
|
3362
4296
|
buildProcess.on("error", (e) => rej(e));
|
|
3363
|
-
buildProcess.on("close", () => res());
|
|
4297
|
+
buildProcess.on("close", (code) => res(code));
|
|
3364
4298
|
});
|
|
4299
|
+
if (processCode !== 0) {
|
|
4300
|
+
return {
|
|
4301
|
+
ok: false,
|
|
4302
|
+
error: "Error building image",
|
|
4303
|
+
logs: extractLogs(errors)
|
|
4304
|
+
};
|
|
4305
|
+
}
|
|
3365
4306
|
digest = extractImageDigest(errors);
|
|
3366
4307
|
span.setAttributes({
|
|
3367
4308
|
"image.digest": digest
|
|
@@ -3371,7 +4312,8 @@ async function buildAndPushSelfHostedImage(options) {
|
|
|
3371
4312
|
span.end();
|
|
3372
4313
|
return {
|
|
3373
4314
|
ok: false,
|
|
3374
|
-
error: e instanceof Error ? e.message : JSON.stringify(e)
|
|
4315
|
+
error: e instanceof Error ? e.message : JSON.stringify(e),
|
|
4316
|
+
logs: extractLogs(errors)
|
|
3375
4317
|
};
|
|
3376
4318
|
}
|
|
3377
4319
|
const pushArgs = ["push", imageRef].filter(Boolean);
|
|
@@ -3382,25 +4324,33 @@ async function buildAndPushSelfHostedImage(options) {
|
|
|
3382
4324
|
cwd: options.cwd
|
|
3383
4325
|
});
|
|
3384
4326
|
try {
|
|
3385
|
-
await new Promise((res, rej) => {
|
|
4327
|
+
const processCode = await new Promise((res, rej) => {
|
|
3386
4328
|
pushProcess.stdout?.on("data", (data) => {
|
|
3387
|
-
const
|
|
3388
|
-
logger.debug(
|
|
4329
|
+
const text3 = data.toString();
|
|
4330
|
+
logger.debug(text3);
|
|
3389
4331
|
});
|
|
3390
4332
|
pushProcess.stderr?.on("data", (data) => {
|
|
3391
|
-
const
|
|
3392
|
-
logger.debug(
|
|
4333
|
+
const text3 = data.toString();
|
|
4334
|
+
logger.debug(text3);
|
|
3393
4335
|
});
|
|
3394
4336
|
pushProcess.on("error", (e) => rej(e));
|
|
3395
|
-
pushProcess.on("close", () => res());
|
|
4337
|
+
pushProcess.on("close", (code) => res(code));
|
|
3396
4338
|
});
|
|
4339
|
+
if (processCode !== 0) {
|
|
4340
|
+
return {
|
|
4341
|
+
ok: false,
|
|
4342
|
+
error: "Error pushing image",
|
|
4343
|
+
logs: extractLogs(errors)
|
|
4344
|
+
};
|
|
4345
|
+
}
|
|
3397
4346
|
span.end();
|
|
3398
4347
|
} catch (e) {
|
|
3399
4348
|
recordSpanException4(span, e);
|
|
3400
4349
|
span.end();
|
|
3401
4350
|
return {
|
|
3402
4351
|
ok: false,
|
|
3403
|
-
error: e instanceof Error ? e.message : JSON.stringify(e)
|
|
4352
|
+
error: e instanceof Error ? e.message : JSON.stringify(e),
|
|
4353
|
+
logs: extractLogs(errors)
|
|
3404
4354
|
};
|
|
3405
4355
|
}
|
|
3406
4356
|
}
|
|
@@ -3408,21 +4358,25 @@ async function buildAndPushSelfHostedImage(options) {
|
|
|
3408
4358
|
return {
|
|
3409
4359
|
ok: true,
|
|
3410
4360
|
image: options.imageTag,
|
|
3411
|
-
digest
|
|
4361
|
+
digest,
|
|
4362
|
+
logs: extractLogs(errors)
|
|
3412
4363
|
};
|
|
3413
4364
|
});
|
|
3414
4365
|
}
|
|
3415
4366
|
function extractImageDigest(outputs) {
|
|
3416
|
-
const imageDigestRegex = /sha256:[a-f0-9]{64}/;
|
|
4367
|
+
const imageDigestRegex = /pushing manifest for .+(?<digest>sha256:[a-f0-9]{64})/;
|
|
3417
4368
|
for (const line of outputs) {
|
|
3418
|
-
|
|
3419
|
-
|
|
3420
|
-
|
|
3421
|
-
|
|
3422
|
-
}
|
|
4369
|
+
const imageDigestMatch = line.match(imageDigestRegex);
|
|
4370
|
+
const digest = imageDigestMatch?.groups?.digest;
|
|
4371
|
+
if (digest) {
|
|
4372
|
+
return digest;
|
|
3423
4373
|
}
|
|
3424
4374
|
}
|
|
3425
4375
|
}
|
|
4376
|
+
function extractLogs(outputs) {
|
|
4377
|
+
const cleanedOutputs = outputs.map((line) => line.trim()).filter((line) => line !== "");
|
|
4378
|
+
return cleanedOutputs.map((line) => line.trim()).join("\n");
|
|
4379
|
+
}
|
|
3426
4380
|
async function compileProject(config, options, configPath) {
|
|
3427
4381
|
return await tracer.startActiveSpan("compileProject", async (span) => {
|
|
3428
4382
|
try {
|
|
@@ -3432,25 +4386,25 @@ async function compileProject(config, options, configPath) {
|
|
|
3432
4386
|
throw new Error("Typecheck failed, aborting deployment");
|
|
3433
4387
|
}
|
|
3434
4388
|
}
|
|
3435
|
-
const compileSpinner =
|
|
4389
|
+
const compileSpinner = spinner();
|
|
3436
4390
|
compileSpinner.start(`Building project in ${config.projectDir}`);
|
|
3437
4391
|
const taskFiles = await gatherTaskFiles(config);
|
|
3438
4392
|
const workerFacade = readFileSync2(
|
|
3439
|
-
|
|
3440
|
-
"file://",
|
|
3441
|
-
""
|
|
3442
|
-
),
|
|
4393
|
+
join6(cliRootPath(), "workers", "prod", "worker-facade.js"),
|
|
3443
4394
|
"utf-8"
|
|
3444
4395
|
);
|
|
3445
|
-
const workerSetupPath =
|
|
3446
|
-
|
|
3447
|
-
|
|
3448
|
-
|
|
4396
|
+
const workerSetupPath = join6(cliRootPath(), "workers", "prod", "worker-setup.js");
|
|
4397
|
+
let workerContents = workerFacade.replace("__TASKS__", createTaskFileImports(taskFiles)).replace(
|
|
4398
|
+
"__WORKER_SETUP__",
|
|
4399
|
+
`import { tracingSDK } from "${escapeImportPath(workerSetupPath)}";`
|
|
4400
|
+
);
|
|
3449
4401
|
if (configPath) {
|
|
3450
4402
|
logger.debug("Importing project config from", { configPath });
|
|
3451
4403
|
workerContents = workerContents.replace(
|
|
3452
4404
|
"__IMPORTED_PROJECT_CONFIG__",
|
|
3453
|
-
`import * as importedConfigExports from "${
|
|
4405
|
+
`import * as importedConfigExports from "${escapeImportPath(
|
|
4406
|
+
configPath
|
|
4407
|
+
)}"; const importedConfig = importedConfigExports.config; const handleError = importedConfigExports.handleError;`
|
|
3454
4408
|
);
|
|
3455
4409
|
} else {
|
|
3456
4410
|
workerContents = workerContents.replace(
|
|
@@ -3486,7 +4440,12 @@ async function compileProject(config, options, configPath) {
|
|
|
3486
4440
|
config.dependenciesToBundle,
|
|
3487
4441
|
config.tsconfigPath
|
|
3488
4442
|
),
|
|
3489
|
-
workerSetupImportConfigPlugin(configPath)
|
|
4443
|
+
workerSetupImportConfigPlugin(configPath),
|
|
4444
|
+
esbuildDecorators2({
|
|
4445
|
+
tsconfig: config.tsconfigPath,
|
|
4446
|
+
tsx: true,
|
|
4447
|
+
force: false
|
|
4448
|
+
})
|
|
3490
4449
|
]
|
|
3491
4450
|
});
|
|
3492
4451
|
if (result.errors.length > 0) {
|
|
@@ -3499,13 +4458,10 @@ async function compileProject(config, options, configPath) {
|
|
|
3499
4458
|
throw new Error("Build failed, aborting deployment");
|
|
3500
4459
|
}
|
|
3501
4460
|
if (options.outputMetafile) {
|
|
3502
|
-
await writeJSONFile(
|
|
4461
|
+
await writeJSONFile(join6(options.outputMetafile, "worker.json"), result.metafile);
|
|
3503
4462
|
}
|
|
3504
4463
|
const entryPointContents = readFileSync2(
|
|
3505
|
-
|
|
3506
|
-
"file://",
|
|
3507
|
-
""
|
|
3508
|
-
),
|
|
4464
|
+
join6(cliRootPath(), "workers", "prod", "entry-point.js"),
|
|
3509
4465
|
"utf-8"
|
|
3510
4466
|
);
|
|
3511
4467
|
const entryPointResult = await build2({
|
|
@@ -3548,58 +4504,68 @@ async function compileProject(config, options, configPath) {
|
|
|
3548
4504
|
}
|
|
3549
4505
|
if (options.outputMetafile) {
|
|
3550
4506
|
await writeJSONFile(
|
|
3551
|
-
|
|
4507
|
+
join6(options.outputMetafile, "entry-point.json"),
|
|
3552
4508
|
entryPointResult.metafile
|
|
3553
4509
|
);
|
|
3554
4510
|
}
|
|
3555
4511
|
const tempDir = await createTempDir();
|
|
3556
4512
|
logger.debug(`Writing compiled files to ${tempDir}`);
|
|
3557
|
-
const metaOutput = result.metafile.outputs[
|
|
4513
|
+
const metaOutput = result.metafile.outputs[posix.join("out", "stdin.js")];
|
|
3558
4514
|
invariant(metaOutput, "Meta output for the result build is missing");
|
|
3559
|
-
const entryPointMetaOutput = entryPointResult.metafile.outputs[
|
|
4515
|
+
const entryPointMetaOutput = entryPointResult.metafile.outputs[posix.join("out", "stdin.js")];
|
|
3560
4516
|
invariant(entryPointMetaOutput, "Meta output for the entryPoint build is missing");
|
|
3561
4517
|
const workerOutputFile = result.outputFiles.find(
|
|
3562
|
-
(file) => file.path ===
|
|
4518
|
+
(file) => file.path === join6(config.projectDir, "out", "stdin.js")
|
|
3563
4519
|
);
|
|
3564
4520
|
invariant(workerOutputFile, "Output file for the result build is missing");
|
|
3565
4521
|
const workerSourcemapFile = result.outputFiles.find(
|
|
3566
|
-
(file) => file.path ===
|
|
4522
|
+
(file) => file.path === join6(config.projectDir, "out", "stdin.js.map")
|
|
3567
4523
|
);
|
|
3568
4524
|
invariant(workerSourcemapFile, "Sourcemap file for the result build is missing");
|
|
3569
4525
|
const entryPointOutputFile = entryPointResult.outputFiles.find(
|
|
3570
|
-
(file) => file.path ===
|
|
4526
|
+
(file) => file.path === join6(config.projectDir, "out", "stdin.js")
|
|
3571
4527
|
);
|
|
3572
4528
|
invariant(entryPointOutputFile, "Output file for the entryPoint build is missing");
|
|
3573
4529
|
await writeFile2(
|
|
3574
|
-
|
|
4530
|
+
join6(tempDir, "worker.js"),
|
|
3575
4531
|
`${workerOutputFile.text}
|
|
3576
4532
|
//# sourceMappingURL=worker.js.map`
|
|
3577
4533
|
);
|
|
3578
|
-
await writeFile2(
|
|
3579
|
-
await writeFile2(
|
|
4534
|
+
await writeFile2(join6(tempDir, "worker.js.map"), workerSourcemapFile.text);
|
|
4535
|
+
await writeFile2(join6(tempDir, "index.js"), entryPointOutputFile.text);
|
|
3580
4536
|
logger.debug("Getting the imports for the worker and entryPoint builds", {
|
|
3581
4537
|
workerImports: metaOutput.imports,
|
|
3582
4538
|
entryPointImports: entryPointMetaOutput.imports
|
|
3583
4539
|
});
|
|
3584
4540
|
const allImports = [...metaOutput.imports, ...entryPointMetaOutput.imports];
|
|
3585
|
-
const
|
|
3586
|
-
const dependencies2 = await gatherRequiredDependencies(
|
|
3587
|
-
allImports,
|
|
3588
|
-
config,
|
|
3589
|
-
externalPackageJson
|
|
3590
|
-
);
|
|
4541
|
+
const javascriptProject = new JavascriptProject(config.projectDir);
|
|
4542
|
+
const dependencies2 = await gatherRequiredDependencies(allImports, config, javascriptProject);
|
|
3591
4543
|
const packageJsonContents = {
|
|
3592
4544
|
name: "trigger-worker",
|
|
3593
4545
|
version: "0.0.0",
|
|
3594
4546
|
description: "",
|
|
3595
4547
|
dependencies: dependencies2,
|
|
3596
4548
|
scripts: {
|
|
3597
|
-
|
|
4549
|
+
...javascriptProject.scripts
|
|
3598
4550
|
}
|
|
3599
4551
|
};
|
|
3600
|
-
await writeJSONFile(
|
|
3601
|
-
await copyAdditionalFiles(config, tempDir);
|
|
3602
|
-
|
|
4552
|
+
await writeJSONFile(join6(tempDir, "package.json"), packageJsonContents);
|
|
4553
|
+
const copyResult = await copyAdditionalFiles(config, tempDir);
|
|
4554
|
+
if (!copyResult.ok) {
|
|
4555
|
+
compileSpinner.stop("Project built with warnings");
|
|
4556
|
+
log5.warn(
|
|
4557
|
+
`No additionalFiles matches for:
|
|
4558
|
+
|
|
4559
|
+
${copyResult.noMatches.map((glob) => `- "${glob}"`).join("\n")}
|
|
4560
|
+
|
|
4561
|
+
If this is unexpected you should check your ${terminalLink2(
|
|
4562
|
+
"glob patterns",
|
|
4563
|
+
"https://github.com/isaacs/node-glob?tab=readme-ov-file#glob-primer"
|
|
4564
|
+
)} are valid.`
|
|
4565
|
+
);
|
|
4566
|
+
} else {
|
|
4567
|
+
compileSpinner.stop("Project built successfully");
|
|
4568
|
+
}
|
|
3603
4569
|
const resolvingDependenciesResult = await resolveDependencies(
|
|
3604
4570
|
tempDir,
|
|
3605
4571
|
packageJsonContents,
|
|
@@ -3607,29 +4573,20 @@ async function compileProject(config, options, configPath) {
|
|
|
3607
4573
|
options
|
|
3608
4574
|
);
|
|
3609
4575
|
if (!resolvingDependenciesResult) {
|
|
3610
|
-
throw new
|
|
4576
|
+
throw new SkipLoggingError("Failed to resolve dependencies");
|
|
3611
4577
|
}
|
|
3612
|
-
const containerFilePath =
|
|
3613
|
-
|
|
3614
|
-
).href.replace("file://", "");
|
|
3615
|
-
await copyFile(containerFilePath, join4(tempDir, "Containerfile"));
|
|
4578
|
+
const containerFilePath = join6(cliRootPath(), "Containerfile.prod");
|
|
4579
|
+
await copyFile(containerFilePath, join6(tempDir, "Containerfile"));
|
|
3616
4580
|
const contentHasher = createHash("sha256");
|
|
3617
4581
|
contentHasher.update(Buffer.from(entryPointOutputFile.text));
|
|
3618
4582
|
contentHasher.update(Buffer.from(workerOutputFile.text));
|
|
3619
4583
|
contentHasher.update(Buffer.from(JSON.stringify(dependencies2)));
|
|
3620
4584
|
const contentHash = contentHasher.digest("hex");
|
|
3621
|
-
const workerSetupEnvVars = await findAllEnvironmentVariableReferencesInFile(workerSetupPath);
|
|
3622
|
-
const workerFacadeEnvVars = findAllEnvironmentVariableReferences(workerContents);
|
|
3623
|
-
const envVars = findAllEnvironmentVariableReferences(workerOutputFile.text);
|
|
3624
|
-
const finalEnvVars = envVars.filter(
|
|
3625
|
-
(envVar) => !workerFacadeEnvVars.includes(envVar) && !workerSetupEnvVars.includes(envVar)
|
|
3626
|
-
);
|
|
3627
4585
|
span.setAttributes({
|
|
3628
|
-
contentHash
|
|
3629
|
-
envVars: finalEnvVars
|
|
4586
|
+
contentHash
|
|
3630
4587
|
});
|
|
3631
4588
|
span.end();
|
|
3632
|
-
return { path: tempDir, contentHash
|
|
4589
|
+
return { path: tempDir, contentHash };
|
|
3633
4590
|
} catch (e) {
|
|
3634
4591
|
recordSpanException4(span, e);
|
|
3635
4592
|
span.end();
|
|
@@ -3639,13 +4596,13 @@ async function compileProject(config, options, configPath) {
|
|
|
3639
4596
|
}
|
|
3640
4597
|
async function resolveDependencies(projectDir, packageJsonContents, config, options) {
|
|
3641
4598
|
return await tracer.startActiveSpan("resolveDependencies", async (span) => {
|
|
3642
|
-
const resolvingDepsSpinner =
|
|
4599
|
+
const resolvingDepsSpinner = spinner();
|
|
3643
4600
|
resolvingDepsSpinner.start("Resolving dependencies");
|
|
3644
4601
|
const hasher = createHash("sha256");
|
|
3645
4602
|
hasher.update(JSON.stringify(packageJsonContents));
|
|
3646
4603
|
const digest = hasher.digest("hex").slice(0, 16);
|
|
3647
|
-
const cacheDir =
|
|
3648
|
-
const cachePath =
|
|
4604
|
+
const cacheDir = join6(config.projectDir, ".trigger", "cache");
|
|
4605
|
+
const cachePath = join6(cacheDir, `${digest}.json`);
|
|
3649
4606
|
span.setAttributes({
|
|
3650
4607
|
"packageJson.digest": digest,
|
|
3651
4608
|
"cache.path": cachePath,
|
|
@@ -3654,7 +4611,7 @@ async function resolveDependencies(projectDir, packageJsonContents, config, opti
|
|
|
3654
4611
|
try {
|
|
3655
4612
|
const cachedPackageLock = await readFile2(cachePath, "utf-8");
|
|
3656
4613
|
logger.debug(`Using cached package-lock.json for ${digest}`);
|
|
3657
|
-
await writeFile2(
|
|
4614
|
+
await writeFile2(join6(projectDir, "package-lock.json"), cachedPackageLock);
|
|
3658
4615
|
span.setAttributes({
|
|
3659
4616
|
"cache.hit": true
|
|
3660
4617
|
});
|
|
@@ -3677,21 +4634,44 @@ async function resolveDependencies(projectDir, packageJsonContents, config, opti
|
|
|
3677
4634
|
cwd: projectDir,
|
|
3678
4635
|
stdio: logger.loggerLevel === "debug" ? "inherit" : "pipe"
|
|
3679
4636
|
});
|
|
3680
|
-
const packageLockContents = await readFile2(
|
|
4637
|
+
const packageLockContents = await readFile2(join6(projectDir, "package-lock.json"), "utf-8");
|
|
3681
4638
|
logger.debug(`Writing package-lock.json to cache for ${digest}`);
|
|
3682
4639
|
await mkdir(cacheDir, { recursive: true });
|
|
3683
4640
|
await writeFile2(cachePath, packageLockContents);
|
|
3684
|
-
await writeFile2(
|
|
4641
|
+
await writeFile2(join6(projectDir, "package-lock.json"), packageLockContents);
|
|
3685
4642
|
span.end();
|
|
3686
4643
|
resolvingDepsSpinner.stop("Dependencies resolved");
|
|
3687
4644
|
return true;
|
|
3688
4645
|
} catch (installError) {
|
|
3689
|
-
logger.debug(`Failed to resolve dependencies: ${JSON.stringify(installError)}`);
|
|
3690
4646
|
recordSpanException4(span, installError);
|
|
3691
4647
|
span.end();
|
|
3692
|
-
|
|
3693
|
-
|
|
3694
|
-
|
|
4648
|
+
const parsedError = parseNpmInstallError(installError);
|
|
4649
|
+
if (typeof parsedError === "string") {
|
|
4650
|
+
resolvingDepsSpinner.stop(`Failed to resolve dependencies: ${parsedError}`);
|
|
4651
|
+
} else {
|
|
4652
|
+
switch (parsedError.type) {
|
|
4653
|
+
case "package-not-found-error": {
|
|
4654
|
+
resolvingDepsSpinner.stop(`Failed to resolve dependencies`);
|
|
4655
|
+
logger.log(
|
|
4656
|
+
`
|
|
4657
|
+
${chalkError("X Error:")} The package ${chalkPurple(
|
|
4658
|
+
parsedError.packageName
|
|
4659
|
+
)} could not be found in the npm registry.`
|
|
4660
|
+
);
|
|
4661
|
+
break;
|
|
4662
|
+
}
|
|
4663
|
+
case "no-matching-version-error": {
|
|
4664
|
+
resolvingDepsSpinner.stop(`Failed to resolve dependencies`);
|
|
4665
|
+
logger.log(
|
|
4666
|
+
`
|
|
4667
|
+
${chalkError("X Error:")} The package ${chalkPurple(
|
|
4668
|
+
parsedError.packageName
|
|
4669
|
+
)} could not resolve because the version doesn't exist`
|
|
4670
|
+
);
|
|
4671
|
+
break;
|
|
4672
|
+
}
|
|
4673
|
+
}
|
|
4674
|
+
}
|
|
3695
4675
|
return false;
|
|
3696
4676
|
}
|
|
3697
4677
|
}
|
|
@@ -3700,7 +4680,7 @@ async function resolveDependencies(projectDir, packageJsonContents, config, opti
|
|
|
3700
4680
|
async function typecheckProject(config, options) {
|
|
3701
4681
|
return await tracer.startActiveSpan("typecheckProject", async (span) => {
|
|
3702
4682
|
try {
|
|
3703
|
-
const typecheckSpinner =
|
|
4683
|
+
const typecheckSpinner = spinner();
|
|
3704
4684
|
typecheckSpinner.start("Typechecking project");
|
|
3705
4685
|
const tscTypecheck = execa2("npm", ["exec", "tsc", "--", "--noEmit"], {
|
|
3706
4686
|
cwd: config.projectDir
|
|
@@ -3710,8 +4690,8 @@ async function typecheckProject(config, options) {
|
|
|
3710
4690
|
tscTypecheck.stdout?.on("data", (chunk) => stdouts.push(chunk.toString()));
|
|
3711
4691
|
tscTypecheck.stderr?.on("data", (chunk) => stderrs.push(chunk.toString()));
|
|
3712
4692
|
try {
|
|
3713
|
-
await new Promise((
|
|
3714
|
-
tscTypecheck.addListener("exit", (code) => code === 0 ?
|
|
4693
|
+
await new Promise((resolve5, reject) => {
|
|
4694
|
+
tscTypecheck.addListener("exit", (code) => code === 0 ? resolve5(code) : reject(code));
|
|
3715
4695
|
});
|
|
3716
4696
|
} catch (error) {
|
|
3717
4697
|
typecheckSpinner.stop(
|
|
@@ -3735,7 +4715,7 @@ async function typecheckProject(config, options) {
|
|
|
3735
4715
|
}
|
|
3736
4716
|
});
|
|
3737
4717
|
}
|
|
3738
|
-
async function gatherRequiredDependencies(imports, config,
|
|
4718
|
+
async function gatherRequiredDependencies(imports, config, project) {
|
|
3739
4719
|
const dependencies2 = {};
|
|
3740
4720
|
for (const file of imports) {
|
|
3741
4721
|
if (file.kind !== "require-call" && file.kind !== "dynamic-import" || !file.external) {
|
|
@@ -3745,7 +4725,7 @@ async function gatherRequiredDependencies(imports, config, projectPackageJson) {
|
|
|
3745
4725
|
if (dependencies2[packageName]) {
|
|
3746
4726
|
continue;
|
|
3747
4727
|
}
|
|
3748
|
-
const externalDependencyVersion = (
|
|
4728
|
+
const externalDependencyVersion = await project.resolve(packageName);
|
|
3749
4729
|
if (externalDependencyVersion) {
|
|
3750
4730
|
dependencies2[packageName] = stripWorkspaceFromVersion(externalDependencyVersion);
|
|
3751
4731
|
continue;
|
|
@@ -3765,16 +4745,17 @@ async function gatherRequiredDependencies(imports, config, projectPackageJson) {
|
|
|
3765
4745
|
dependencies2[packageParts.name] = packageParts.version;
|
|
3766
4746
|
continue;
|
|
3767
4747
|
} else {
|
|
3768
|
-
const externalDependencyVersion = {
|
|
3769
|
-
|
|
3770
|
-
|
|
3771
|
-
}[packageName];
|
|
4748
|
+
const externalDependencyVersion = await project.resolve(packageParts.name, {
|
|
4749
|
+
allowDev: true
|
|
4750
|
+
});
|
|
3772
4751
|
if (externalDependencyVersion) {
|
|
3773
4752
|
dependencies2[packageParts.name] = externalDependencyVersion;
|
|
3774
4753
|
continue;
|
|
3775
4754
|
} else {
|
|
3776
|
-
logger.
|
|
3777
|
-
|
|
4755
|
+
logger.log(
|
|
4756
|
+
`${chalkWarning("X Warning:")} Could not find version for package ${chalkPurple(
|
|
4757
|
+
packageName
|
|
4758
|
+
)}, add a version specifier to the package name (e.g. ${packageParts.name}@latest) or add it to your project's package.json`
|
|
3778
4759
|
);
|
|
3779
4760
|
}
|
|
3780
4761
|
}
|
|
@@ -3784,8 +4765,9 @@ async function gatherRequiredDependencies(imports, config, projectPackageJson) {
|
|
|
3784
4765
|
}
|
|
3785
4766
|
async function copyAdditionalFiles(config, tempDir) {
|
|
3786
4767
|
const additionalFiles = config.additionalFiles ?? [];
|
|
4768
|
+
const noMatches = [];
|
|
3787
4769
|
if (additionalFiles.length === 0) {
|
|
3788
|
-
return;
|
|
4770
|
+
return { ok: true };
|
|
3789
4771
|
}
|
|
3790
4772
|
return await tracer.startActiveSpan(
|
|
3791
4773
|
"copyAdditionalFiles",
|
|
@@ -3799,22 +4781,55 @@ async function copyAdditionalFiles(config, tempDir) {
|
|
|
3799
4781
|
logger.debug(`Copying files to ${tempDir}`, {
|
|
3800
4782
|
additionalFiles
|
|
3801
4783
|
});
|
|
3802
|
-
const
|
|
4784
|
+
const globOptions = {
|
|
3803
4785
|
withFileTypes: true,
|
|
3804
4786
|
ignore: ["node_modules"],
|
|
3805
4787
|
cwd: config.projectDir,
|
|
3806
4788
|
nodir: true
|
|
3807
|
-
}
|
|
3808
|
-
|
|
3809
|
-
|
|
3810
|
-
|
|
3811
|
-
|
|
3812
|
-
)
|
|
3813
|
-
|
|
3814
|
-
|
|
3815
|
-
|
|
4789
|
+
};
|
|
4790
|
+
const globs = [];
|
|
4791
|
+
let i = 0;
|
|
4792
|
+
for (const additionalFile of additionalFiles) {
|
|
4793
|
+
let glob;
|
|
4794
|
+
if (i === 0) {
|
|
4795
|
+
glob = new Glob(additionalFile, globOptions);
|
|
4796
|
+
} else {
|
|
4797
|
+
const previousGlob = globs[i - 1];
|
|
4798
|
+
if (!previousGlob) {
|
|
4799
|
+
logger.error("No previous glob, this shouldn't happen", { i, additionalFiles });
|
|
4800
|
+
continue;
|
|
4801
|
+
}
|
|
4802
|
+
glob = new Glob(additionalFile, previousGlob);
|
|
4803
|
+
}
|
|
4804
|
+
if (!(Symbol.asyncIterator in glob)) {
|
|
4805
|
+
logger.error("Glob should be an async iterator", { glob });
|
|
4806
|
+
throw new Error("Unrecoverable error while copying additional files");
|
|
4807
|
+
}
|
|
4808
|
+
let matches = 0;
|
|
4809
|
+
for await (const file of glob) {
|
|
4810
|
+
matches++;
|
|
4811
|
+
const pathInsideTempDir = relative3(config.projectDir, file.fullpath()).split(posix.sep).filter((p) => p !== "..").join(posix.sep);
|
|
4812
|
+
const relativeDestinationPath = join6(tempDir, pathInsideTempDir);
|
|
4813
|
+
logger.debug(`Copying file ${file.fullpath()} to ${relativeDestinationPath}`);
|
|
4814
|
+
await mkdir(dirname(relativeDestinationPath), { recursive: true });
|
|
4815
|
+
await copyFile(file.fullpath(), relativeDestinationPath);
|
|
4816
|
+
}
|
|
4817
|
+
if (matches === 0) {
|
|
4818
|
+
noMatches.push(additionalFile);
|
|
4819
|
+
}
|
|
4820
|
+
globs[i] = glob;
|
|
4821
|
+
i++;
|
|
3816
4822
|
}
|
|
3817
4823
|
span.end();
|
|
4824
|
+
if (noMatches.length > 0) {
|
|
4825
|
+
return {
|
|
4826
|
+
ok: false,
|
|
4827
|
+
noMatches
|
|
4828
|
+
};
|
|
4829
|
+
}
|
|
4830
|
+
return {
|
|
4831
|
+
ok: true
|
|
4832
|
+
};
|
|
3818
4833
|
} catch (error) {
|
|
3819
4834
|
recordSpanException4(span, error);
|
|
3820
4835
|
span.end();
|
|
@@ -3825,7 +4840,7 @@ async function copyAdditionalFiles(config, tempDir) {
|
|
|
3825
4840
|
}
|
|
3826
4841
|
async function ensureLoggedIntoDockerRegistry(registryHost, auth) {
|
|
3827
4842
|
const tmpDir = await createTempDir();
|
|
3828
|
-
const dockerConfigPath =
|
|
4843
|
+
const dockerConfigPath = join6(tmpDir, "config.json");
|
|
3829
4844
|
await writeJSONFile(dockerConfigPath, {
|
|
3830
4845
|
auths: {
|
|
3831
4846
|
[registryHost]: {
|
|
@@ -3836,43 +4851,20 @@ async function ensureLoggedIntoDockerRegistry(registryHost, auth) {
|
|
|
3836
4851
|
logger.debug(`Writing docker config to ${dockerConfigPath}`);
|
|
3837
4852
|
return tmpDir;
|
|
3838
4853
|
}
|
|
3839
|
-
async function findAllEnvironmentVariableReferencesInFile(filePath) {
|
|
3840
|
-
const fileContents = await readFile2(filePath, "utf-8");
|
|
3841
|
-
return findAllEnvironmentVariableReferences(fileContents);
|
|
3842
|
-
}
|
|
3843
|
-
var IGNORED_ENV_VARS = ["NODE_ENV", "SHELL", "HOME", "PWD", "LOGNAME", "USER", "PATH", "DEBUG"];
|
|
3844
|
-
function findAllEnvironmentVariableReferences(code) {
|
|
3845
|
-
const regex = /\bprocess\.env\.([a-zA-Z_][a-zA-Z0-9_]*)\b/g;
|
|
3846
|
-
const matches = code.matchAll(regex);
|
|
3847
|
-
const matchesArray = Array.from(matches, (match) => match[1]).filter(Boolean);
|
|
3848
|
-
const filteredMatches = matchesArray.filter((match) => !IGNORED_ENV_VARS.includes(match));
|
|
3849
|
-
return Array.from(new Set(filteredMatches));
|
|
3850
|
-
}
|
|
3851
|
-
function arrayToSentence(items) {
|
|
3852
|
-
if (items.length === 1 && typeof items[0] === "string") {
|
|
3853
|
-
return items[0];
|
|
3854
|
-
}
|
|
3855
|
-
if (items.length === 2) {
|
|
3856
|
-
return `${items[0]} and ${items[1]}`;
|
|
3857
|
-
}
|
|
3858
|
-
return `${items.slice(0, -1).join(", ")}, and ${items[items.length - 1]}`;
|
|
3859
|
-
}
|
|
3860
4854
|
|
|
3861
4855
|
// src/commands/dev.tsx
|
|
3862
4856
|
import {
|
|
3863
|
-
ZodMessageHandler as ZodMessageHandler2,
|
|
3864
|
-
ZodMessageSender as ZodMessageSender2,
|
|
3865
4857
|
clientWebsocketMessages,
|
|
3866
4858
|
detectDependencyVersion as detectDependencyVersion2,
|
|
3867
4859
|
serverWebsocketMessages
|
|
3868
4860
|
} from "@trigger.dev/core/v3";
|
|
4861
|
+
import { ZodMessageHandler as ZodMessageHandler2, ZodMessageSender as ZodMessageSender2 } from "@trigger.dev/core/v3/zodMessageHandler";
|
|
3869
4862
|
import { watch } from "chokidar";
|
|
3870
4863
|
import { context as context2 } from "esbuild";
|
|
3871
|
-
import { resolve as importResolve2 } from "import-meta-resolve";
|
|
3872
4864
|
import { render, useInput } from "ink";
|
|
3873
4865
|
import { createHash as createHash2 } from "node:crypto";
|
|
3874
4866
|
import fs7, { readFileSync as readFileSync3 } from "node:fs";
|
|
3875
|
-
import { basename, dirname as dirname3, join as
|
|
4867
|
+
import { basename as basename2, dirname as dirname3, join as join7, normalize } from "node:path";
|
|
3876
4868
|
import pDebounce from "p-debounce";
|
|
3877
4869
|
import { WebSocket } from "partysocket";
|
|
3878
4870
|
import React, { Suspense, useEffect } from "react";
|
|
@@ -3888,23 +4880,30 @@ var UncaughtExceptionError = class extends Error {
|
|
|
3888
4880
|
this.name = "UncaughtExceptionError";
|
|
3889
4881
|
}
|
|
3890
4882
|
};
|
|
4883
|
+
var TaskMetadataParseError = class extends Error {
|
|
4884
|
+
constructor(zodIssues, tasks) {
|
|
4885
|
+
super(`Failed to parse task metadata`);
|
|
4886
|
+
this.zodIssues = zodIssues;
|
|
4887
|
+
this.tasks = tasks;
|
|
4888
|
+
this.name = "TaskMetadataParseError";
|
|
4889
|
+
}
|
|
4890
|
+
};
|
|
3891
4891
|
|
|
3892
4892
|
// src/workers/dev/backgroundWorker.ts
|
|
3893
4893
|
import {
|
|
3894
4894
|
SemanticInternalAttributes,
|
|
3895
4895
|
TaskRunErrorCodes,
|
|
3896
|
-
ZodMessageHandler,
|
|
3897
|
-
ZodMessageSender,
|
|
3898
4896
|
childToWorkerMessages,
|
|
3899
4897
|
correctErrorStackTrace,
|
|
3900
4898
|
formatDurationMilliseconds,
|
|
3901
4899
|
workerToChildMessages
|
|
3902
4900
|
} from "@trigger.dev/core/v3";
|
|
4901
|
+
import { ZodMessageHandler, ZodMessageSender } from "@trigger.dev/core/v3/zodMessageHandler";
|
|
3903
4902
|
import dotenv from "dotenv";
|
|
3904
4903
|
import { Evt } from "evt";
|
|
3905
4904
|
import { fork } from "node:child_process";
|
|
3906
|
-
import { dirname as dirname2, resolve as
|
|
3907
|
-
import
|
|
4905
|
+
import { dirname as dirname2, resolve as resolve3 } from "node:path";
|
|
4906
|
+
import terminalLink3 from "terminal-link";
|
|
3908
4907
|
var BackgroundWorkerCoordinator = class {
|
|
3909
4908
|
constructor(baseURL) {
|
|
3910
4909
|
this.baseURL = baseURL;
|
|
@@ -3989,7 +4988,7 @@ var BackgroundWorkerCoordinator = class {
|
|
|
3989
4988
|
const logsUrl = `${this.baseURL}/runs/${execution.run.id}`;
|
|
3990
4989
|
const pipe = chalkGrey("|");
|
|
3991
4990
|
const bullet = chalkGrey("\u25CB");
|
|
3992
|
-
const link = chalkLink(
|
|
4991
|
+
const link = chalkLink(terminalLink3("View logs", logsUrl));
|
|
3993
4992
|
let timestampPrefix = chalkGrey(prettyPrintDate(payload.execution.attempt.startedAt));
|
|
3994
4993
|
const workerPrefix = chalkWorker(record.version);
|
|
3995
4994
|
const taskPrefix = chalkTask(execution.task.id);
|
|
@@ -4062,8 +5061,8 @@ var CancelledProcessError = class extends Error {
|
|
|
4062
5061
|
}
|
|
4063
5062
|
};
|
|
4064
5063
|
var BackgroundWorker = class {
|
|
4065
|
-
constructor(
|
|
4066
|
-
this.path =
|
|
5064
|
+
constructor(path7, params) {
|
|
5065
|
+
this.path = path7;
|
|
4067
5066
|
this.params = params;
|
|
4068
5067
|
}
|
|
4069
5068
|
_initialized = false;
|
|
@@ -4097,7 +5096,13 @@ var BackgroundWorker = class {
|
|
|
4097
5096
|
await installPackages(this.params.dependencies, { cwd: dirname2(this.path) });
|
|
4098
5097
|
}
|
|
4099
5098
|
let resolved = false;
|
|
4100
|
-
|
|
5099
|
+
const cwd = dirname2(this.path);
|
|
5100
|
+
const fullEnv = {
|
|
5101
|
+
...this.params.env,
|
|
5102
|
+
...this.#readEnvVars()
|
|
5103
|
+
};
|
|
5104
|
+
logger.debug("Initializing worker", { path: this.path, cwd, fullEnv });
|
|
5105
|
+
this.tasks = await new Promise((resolve5, reject) => {
|
|
4101
5106
|
const child = fork(this.path, {
|
|
4102
5107
|
stdio: [
|
|
4103
5108
|
/*stdin*/
|
|
@@ -4108,10 +5113,8 @@ var BackgroundWorker = class {
|
|
|
4108
5113
|
"pipe",
|
|
4109
5114
|
"ipc"
|
|
4110
5115
|
],
|
|
4111
|
-
|
|
4112
|
-
|
|
4113
|
-
...this.#readEnvVars()
|
|
4114
|
-
}
|
|
5116
|
+
cwd,
|
|
5117
|
+
env: fullEnv
|
|
4115
5118
|
});
|
|
4116
5119
|
const timeout = setTimeout(() => {
|
|
4117
5120
|
if (resolved) {
|
|
@@ -4126,13 +5129,18 @@ var BackgroundWorker = class {
|
|
|
4126
5129
|
if (message.type === "TASKS_READY" && !resolved) {
|
|
4127
5130
|
clearTimeout(timeout);
|
|
4128
5131
|
resolved = true;
|
|
4129
|
-
|
|
5132
|
+
resolve5(message.payload.tasks);
|
|
4130
5133
|
child.kill();
|
|
4131
5134
|
} else if (message.type === "UNCAUGHT_EXCEPTION") {
|
|
4132
5135
|
clearTimeout(timeout);
|
|
4133
5136
|
resolved = true;
|
|
4134
5137
|
reject(new UncaughtExceptionError(message.payload.error, message.payload.origin));
|
|
4135
5138
|
child.kill();
|
|
5139
|
+
} else if (message.type === "TASKS_FAILED_TO_PARSE") {
|
|
5140
|
+
clearTimeout(timeout);
|
|
5141
|
+
resolved = true;
|
|
5142
|
+
reject(new TaskMetadataParseError(message.payload.zodIssues, message.payload.tasks));
|
|
5143
|
+
child.kill();
|
|
4136
5144
|
}
|
|
4137
5145
|
});
|
|
4138
5146
|
child.on("exit", (code) => {
|
|
@@ -4142,6 +5150,9 @@ var BackgroundWorker = class {
|
|
|
4142
5150
|
reject(new Error(`Worker exited with code ${code}`));
|
|
4143
5151
|
}
|
|
4144
5152
|
});
|
|
5153
|
+
child.stdout?.on("data", (data) => {
|
|
5154
|
+
logger.log(data.toString());
|
|
5155
|
+
});
|
|
4145
5156
|
});
|
|
4146
5157
|
this._initialized = true;
|
|
4147
5158
|
}
|
|
@@ -4158,7 +5169,7 @@ var BackgroundWorker = class {
|
|
|
4158
5169
|
}
|
|
4159
5170
|
if (!this._taskRunProcesses.has(payload.execution.run.id)) {
|
|
4160
5171
|
const taskRunProcess = new TaskRunProcess(
|
|
4161
|
-
payload.execution
|
|
5172
|
+
payload.execution,
|
|
4162
5173
|
this.path,
|
|
4163
5174
|
{
|
|
4164
5175
|
...this.params.env,
|
|
@@ -4253,7 +5264,7 @@ var BackgroundWorker = class {
|
|
|
4253
5264
|
const result = {};
|
|
4254
5265
|
dotenv.config({
|
|
4255
5266
|
processEnv: result,
|
|
4256
|
-
path: [".env", ".env.local", ".env.development.local"].map((p) =>
|
|
5267
|
+
path: [".env", ".env.local", ".env.development.local"].map((p) => resolve3(process.cwd(), p))
|
|
4257
5268
|
});
|
|
4258
5269
|
process.env.TRIGGER_API_URL && (result.TRIGGER_API_URL = process.env.TRIGGER_API_URL);
|
|
4259
5270
|
delete result.TRIGGER_API_URL;
|
|
@@ -4268,9 +5279,9 @@ var BackgroundWorker = class {
|
|
|
4268
5279
|
}
|
|
4269
5280
|
};
|
|
4270
5281
|
var TaskRunProcess = class {
|
|
4271
|
-
constructor(
|
|
4272
|
-
this.
|
|
4273
|
-
this.path =
|
|
5282
|
+
constructor(execution, path7, env, metadata, worker) {
|
|
5283
|
+
this.execution = execution;
|
|
5284
|
+
this.path = path7;
|
|
4274
5285
|
this.env = env;
|
|
4275
5286
|
this.metadata = metadata;
|
|
4276
5287
|
this.worker = worker;
|
|
@@ -4300,9 +5311,20 @@ var TaskRunProcess = class {
|
|
|
4300
5311
|
await this.cleanup(true);
|
|
4301
5312
|
}
|
|
4302
5313
|
async initialize() {
|
|
4303
|
-
|
|
4304
|
-
|
|
4305
|
-
|
|
5314
|
+
const fullEnv = {
|
|
5315
|
+
...this.execution.run.isTest ? { TRIGGER_LOG_LEVEL: "debug" } : {},
|
|
5316
|
+
...this.env,
|
|
5317
|
+
OTEL_RESOURCE_ATTRIBUTES: JSON.stringify({
|
|
5318
|
+
[SemanticInternalAttributes.PROJECT_DIR]: this.worker.projectConfig.projectDir
|
|
5319
|
+
}),
|
|
5320
|
+
OTEL_EXPORTER_OTLP_COMPRESSION: "none",
|
|
5321
|
+
...this.worker.debugOtel ? { OTEL_LOG_LEVEL: "debug" } : {}
|
|
5322
|
+
};
|
|
5323
|
+
const cwd = dirname2(this.path);
|
|
5324
|
+
logger.debug(`[${this.execution.run.id}] initializing task run process`, {
|
|
5325
|
+
env: fullEnv,
|
|
5326
|
+
path: this.path,
|
|
5327
|
+
cwd
|
|
4306
5328
|
});
|
|
4307
5329
|
this._child = fork(this.path, {
|
|
4308
5330
|
stdio: [
|
|
@@ -4314,15 +5336,8 @@ var TaskRunProcess = class {
|
|
|
4314
5336
|
"pipe",
|
|
4315
5337
|
"ipc"
|
|
4316
5338
|
],
|
|
4317
|
-
cwd
|
|
4318
|
-
env:
|
|
4319
|
-
...this.env,
|
|
4320
|
-
OTEL_RESOURCE_ATTRIBUTES: JSON.stringify({
|
|
4321
|
-
[SemanticInternalAttributes.PROJECT_DIR]: this.worker.projectConfig.projectDir
|
|
4322
|
-
}),
|
|
4323
|
-
OTEL_EXPORTER_OTLP_COMPRESSION: "none",
|
|
4324
|
-
...this.worker.debugOtel ? { OTEL_LOG_LEVEL: "debug" } : {}
|
|
4325
|
-
},
|
|
5339
|
+
cwd,
|
|
5340
|
+
env: fullEnv,
|
|
4326
5341
|
execArgv: this.worker.debuggerOn ? ["--inspect-brk", "--trace-uncaught", "--no-warnings=ExperimentalWarning"] : ["--trace-uncaught", "--no-warnings=ExperimentalWarning"]
|
|
4327
5342
|
});
|
|
4328
5343
|
this._child.on("message", this.#handleMessage.bind(this));
|
|
@@ -4334,18 +5349,24 @@ var TaskRunProcess = class {
|
|
|
4334
5349
|
if (kill && this._isBeingKilled) {
|
|
4335
5350
|
return;
|
|
4336
5351
|
}
|
|
4337
|
-
logger.debug(`[${this.
|
|
5352
|
+
logger.debug(`[${this.execution.run.id}] cleaning up task run process`, { kill });
|
|
4338
5353
|
await this._sender.send("CLEANUP", {
|
|
4339
5354
|
flush: true,
|
|
4340
5355
|
kill
|
|
4341
5356
|
});
|
|
4342
5357
|
this._isBeingKilled = kill;
|
|
5358
|
+
setTimeout(() => {
|
|
5359
|
+
if (this._child && !this._child.killed) {
|
|
5360
|
+
logger.debug(`[${this.execution.run.id}] killing task run process after timeout`);
|
|
5361
|
+
this._child.kill();
|
|
5362
|
+
}
|
|
5363
|
+
}, 5e3);
|
|
4343
5364
|
}
|
|
4344
5365
|
async executeTaskRun(payload) {
|
|
4345
5366
|
let resolver;
|
|
4346
5367
|
let rejecter;
|
|
4347
|
-
const promise = new Promise((
|
|
4348
|
-
resolver =
|
|
5368
|
+
const promise = new Promise((resolve5, reject) => {
|
|
5369
|
+
resolver = resolve5;
|
|
4349
5370
|
rejecter = reject;
|
|
4350
5371
|
});
|
|
4351
5372
|
this._attemptStatuses.set(payload.execution.attempt.id, "PENDING");
|
|
@@ -4365,10 +5386,13 @@ var TaskRunProcess = class {
|
|
|
4365
5386
|
if (!completion.ok && typeof completion.retry !== "undefined") {
|
|
4366
5387
|
return;
|
|
4367
5388
|
}
|
|
4368
|
-
if (execution.run.id === this.
|
|
5389
|
+
if (execution.run.id === this.execution.run.id) {
|
|
4369
5390
|
return;
|
|
4370
5391
|
}
|
|
4371
|
-
logger.debug(`[${this.
|
|
5392
|
+
logger.debug(`[${this.execution.run.id}] task run completed notification`, {
|
|
5393
|
+
completion,
|
|
5394
|
+
execution
|
|
5395
|
+
});
|
|
4372
5396
|
this._sender.send("TASK_RUN_COMPLETED_NOTIFICATION", {
|
|
4373
5397
|
completion,
|
|
4374
5398
|
execution
|
|
@@ -4393,6 +5417,7 @@ var TaskRunProcess = class {
|
|
|
4393
5417
|
break;
|
|
4394
5418
|
}
|
|
4395
5419
|
case "READY_TO_DISPOSE": {
|
|
5420
|
+
logger.debug(`[${this.execution.run.id}] task run process is ready to dispose`);
|
|
4396
5421
|
this.#kill();
|
|
4397
5422
|
break;
|
|
4398
5423
|
}
|
|
@@ -4406,7 +5431,7 @@ var TaskRunProcess = class {
|
|
|
4406
5431
|
}
|
|
4407
5432
|
}
|
|
4408
5433
|
async #handleExit(code) {
|
|
4409
|
-
logger.debug(`[${this.
|
|
5434
|
+
logger.debug(`[${this.execution.run.id}] task run process exiting`, { code });
|
|
4410
5435
|
for (const [id, status] of this._attemptStatuses.entries()) {
|
|
4411
5436
|
if (status === "PENDING") {
|
|
4412
5437
|
this._attemptStatuses.set(id, "REJECTED");
|
|
@@ -4428,10 +5453,14 @@ var TaskRunProcess = class {
|
|
|
4428
5453
|
}
|
|
4429
5454
|
#handleLog(data) {
|
|
4430
5455
|
if (!this._currentExecution) {
|
|
5456
|
+
logger.log(`${chalkGrey("\u25CB")} ${chalkGrey(prettyPrintDate(/* @__PURE__ */ new Date()))} ${data.toString()}`);
|
|
4431
5457
|
return;
|
|
4432
5458
|
}
|
|
5459
|
+
const runId = chalkRun(
|
|
5460
|
+
`${this._currentExecution.run.id}.${this._currentExecution.attempt.number}`
|
|
5461
|
+
);
|
|
4433
5462
|
logger.log(
|
|
4434
|
-
|
|
5463
|
+
`${chalkGrey("\u25CB")} ${chalkGrey(prettyPrintDate(/* @__PURE__ */ new Date()))} ${runId} ${data.toString()}`
|
|
4435
5464
|
);
|
|
4436
5465
|
}
|
|
4437
5466
|
#handleStdErr(data) {
|
|
@@ -4439,63 +5468,106 @@ var TaskRunProcess = class {
|
|
|
4439
5468
|
return;
|
|
4440
5469
|
}
|
|
4441
5470
|
if (!this._currentExecution) {
|
|
4442
|
-
logger.
|
|
5471
|
+
logger.log(`${chalkError("\u25CB")} ${chalkGrey(prettyPrintDate(/* @__PURE__ */ new Date()))} ${data.toString()}`);
|
|
4443
5472
|
return;
|
|
4444
5473
|
}
|
|
4445
|
-
|
|
4446
|
-
|
|
5474
|
+
const runId = chalkRun(
|
|
5475
|
+
`${this._currentExecution.run.id}.${this._currentExecution.attempt.number}`
|
|
5476
|
+
);
|
|
5477
|
+
logger.log(
|
|
5478
|
+
`${chalkError("\u25CB")} ${chalkGrey(prettyPrintDate(/* @__PURE__ */ new Date()))} ${runId} ${data.toString()}`
|
|
4447
5479
|
);
|
|
4448
5480
|
}
|
|
4449
5481
|
#kill() {
|
|
4450
5482
|
if (this._child && !this._child.killed) {
|
|
5483
|
+
logger.debug(`[${this.execution.run.id}] killing task run process`);
|
|
4451
5484
|
this._child?.kill();
|
|
4452
5485
|
}
|
|
4453
5486
|
}
|
|
4454
5487
|
};
|
|
4455
5488
|
|
|
5489
|
+
// src/utilities/runtimeCheck.ts
|
|
5490
|
+
function runtimeCheck(minimumMajor, minimumMinor) {
|
|
5491
|
+
if (typeof process === "undefined") {
|
|
5492
|
+
throw "The dev CLI can only be run in a Node.js compatible environment";
|
|
5493
|
+
}
|
|
5494
|
+
const [major = 0, minor = 0] = process.versions.node.split(".").map(Number);
|
|
5495
|
+
const isBun = typeof process.versions.bun === "string";
|
|
5496
|
+
if (major < minimumMajor || major === minimumMajor && minor < minimumMinor) {
|
|
5497
|
+
if (isBun) {
|
|
5498
|
+
throw `The dev CLI requires at least Node.js ${minimumMajor}.${minimumMinor}. You are running Bun ${process.versions.bun}, which is compatible with Node.js ${process.versions.node}`;
|
|
5499
|
+
} else {
|
|
5500
|
+
throw `The dev CLI requires at least Node.js ${minimumMajor}.${minimumMinor}. You are running Node.js ${process.versions.node}`;
|
|
5501
|
+
}
|
|
5502
|
+
}
|
|
5503
|
+
logger.debug(
|
|
5504
|
+
`Node.js version: ${process.versions.node}${isBun ? ` (Bun ${process.versions.bun})` : ""}`
|
|
5505
|
+
);
|
|
5506
|
+
}
|
|
5507
|
+
|
|
4456
5508
|
// src/commands/dev.tsx
|
|
5509
|
+
import { findUp as findUp3, pathExists as pathExists2 } from "find-up";
|
|
5510
|
+
import { esbuildDecorators as esbuildDecorators3 } from "@anatine/esbuild-decorators";
|
|
4457
5511
|
var apiClient;
|
|
4458
5512
|
var DevCommandOptions = CommonCommandOptions.extend({
|
|
4459
5513
|
debugger: z5.boolean().default(false),
|
|
4460
5514
|
debugOtel: z5.boolean().default(false),
|
|
4461
5515
|
config: z5.string().optional(),
|
|
4462
|
-
projectRef: z5.string().optional()
|
|
5516
|
+
projectRef: z5.string().optional(),
|
|
5517
|
+
skipUpdateCheck: z5.boolean().default(false)
|
|
4463
5518
|
});
|
|
4464
5519
|
function configureDevCommand(program2) {
|
|
4465
5520
|
return commonOptions(
|
|
4466
|
-
program2.command("dev").description("Run your Trigger.dev tasks locally").argument("[path]", "The path to the project", ".").option("-c, --config <config file>", "The name of the config file, found at [path]").option(
|
|
5521
|
+
program2.command("dev").description("Run your Trigger.dev tasks locally").argument("[path]", "The path to the project", ".").option("-c, --config <config file>", "The name of the config file, found at [path].").option(
|
|
4467
5522
|
"-p, --project-ref <project ref>",
|
|
4468
5523
|
"The project ref. Required if there is no config file."
|
|
4469
|
-
).option("--debugger", "Enable the debugger").option("--debug-otel", "Enable OpenTelemetry debugging")
|
|
4470
|
-
).action(async (
|
|
5524
|
+
).option("--debugger", "Enable the debugger").option("--debug-otel", "Enable OpenTelemetry debugging").option("--skip-update-check", "Skip checking for @trigger.dev package updates")
|
|
5525
|
+
).action(async (path7, options) => {
|
|
4471
5526
|
wrapCommandAction("dev", DevCommandOptions, options, async (opts) => {
|
|
4472
|
-
await devCommand(
|
|
5527
|
+
await devCommand(path7, opts);
|
|
4473
5528
|
});
|
|
4474
5529
|
});
|
|
4475
5530
|
}
|
|
5531
|
+
var MINIMUM_NODE_MAJOR = 18;
|
|
5532
|
+
var MINIMUM_NODE_MINOR = 16;
|
|
4476
5533
|
async function devCommand(dir, options) {
|
|
5534
|
+
try {
|
|
5535
|
+
runtimeCheck(MINIMUM_NODE_MAJOR, MINIMUM_NODE_MINOR);
|
|
5536
|
+
} catch (e) {
|
|
5537
|
+
logger.log(`${chalkError("X Error:")} ${e}`);
|
|
5538
|
+
process.exitCode = 1;
|
|
5539
|
+
return;
|
|
5540
|
+
}
|
|
4477
5541
|
const authorization = await isLoggedIn(options.profile);
|
|
4478
5542
|
if (!authorization.ok) {
|
|
4479
5543
|
if (authorization.error === "fetch failed") {
|
|
4480
|
-
logger.
|
|
5544
|
+
logger.log(
|
|
5545
|
+
`${chalkError(
|
|
5546
|
+
"X Error:"
|
|
5547
|
+
)} Connecting to the server failed. Please check your internet connection or contact eric@trigger.dev for help.`
|
|
5548
|
+
);
|
|
4481
5549
|
} else {
|
|
4482
|
-
logger.
|
|
5550
|
+
logger.log(`${chalkError("X Error:")} You must login first. Use the \`login\` CLI command.`);
|
|
4483
5551
|
}
|
|
4484
5552
|
process.exitCode = 1;
|
|
4485
5553
|
return;
|
|
4486
5554
|
}
|
|
4487
|
-
const devInstance = await startDev(dir, options, authorization.auth);
|
|
5555
|
+
const devInstance = await startDev(dir, options, authorization.auth, authorization.dashboardUrl);
|
|
4488
5556
|
const { waitUntilExit } = devInstance.devReactElement;
|
|
4489
5557
|
await waitUntilExit();
|
|
4490
5558
|
}
|
|
4491
|
-
async function startDev(dir, options, authorization) {
|
|
5559
|
+
async function startDev(dir, options, authorization, dashboardUrl) {
|
|
4492
5560
|
let rerender;
|
|
4493
5561
|
try {
|
|
4494
5562
|
if (options.logLevel) {
|
|
4495
5563
|
logger.loggerLevel = options.logLevel;
|
|
4496
5564
|
}
|
|
4497
5565
|
await printStandloneInitialBanner(true);
|
|
4498
|
-
|
|
5566
|
+
let displayedUpdateMessage = false;
|
|
5567
|
+
if (!options.skipUpdateCheck) {
|
|
5568
|
+
displayedUpdateMessage = await updateTriggerPackages(dir, { ...options }, true, true);
|
|
5569
|
+
}
|
|
5570
|
+
printDevBanner(displayedUpdateMessage);
|
|
4499
5571
|
logger.debug("Starting dev session", { dir, options, authorization });
|
|
4500
5572
|
let config = await readConfig(dir, {
|
|
4501
5573
|
projectRef: options.projectRef,
|
|
@@ -4526,6 +5598,7 @@ async function startDev(dir, options, authorization) {
|
|
|
4526
5598
|
return /* @__PURE__ */ React.createElement(
|
|
4527
5599
|
DevUI,
|
|
4528
5600
|
{
|
|
5601
|
+
dashboardUrl,
|
|
4529
5602
|
config: configParam,
|
|
4530
5603
|
apiUrl,
|
|
4531
5604
|
apiKey: devEnv.data.apiKey,
|
|
@@ -4557,6 +5630,7 @@ async function startDev(dir, options, authorization) {
|
|
|
4557
5630
|
}
|
|
4558
5631
|
function useDev({
|
|
4559
5632
|
config,
|
|
5633
|
+
dashboardUrl,
|
|
4560
5634
|
apiUrl,
|
|
4561
5635
|
apiKey,
|
|
4562
5636
|
environmentClient,
|
|
@@ -4586,7 +5660,7 @@ function useDev({
|
|
|
4586
5660
|
}
|
|
4587
5661
|
});
|
|
4588
5662
|
const backgroundWorkerCoordinator = new BackgroundWorkerCoordinator(
|
|
4589
|
-
`${
|
|
5663
|
+
`${dashboardUrl}/projects/v3/${config.project}`
|
|
4590
5664
|
);
|
|
4591
5665
|
websocket.addEventListener("open", async (event) => {
|
|
4592
5666
|
});
|
|
@@ -4658,22 +5732,21 @@ function useDev({
|
|
|
4658
5732
|
}
|
|
4659
5733
|
let latestWorkerContentHash;
|
|
4660
5734
|
const taskFiles = await gatherTaskFiles(config);
|
|
4661
|
-
const
|
|
4662
|
-
|
|
4663
|
-
|
|
4664
|
-
|
|
4665
|
-
|
|
4666
|
-
"
|
|
5735
|
+
const workerFacadePath = join7(cliRootPath(), "workers", "dev", "worker-facade.js");
|
|
5736
|
+
const workerFacade = readFileSync3(workerFacadePath, "utf-8");
|
|
5737
|
+
const workerSetupPath = join7(cliRootPath(), "workers", "dev", "worker-setup.js");
|
|
5738
|
+
let entryPointContents = workerFacade.replace("__TASKS__", createTaskFileImports(taskFiles)).replace(
|
|
5739
|
+
"__WORKER_SETUP__",
|
|
5740
|
+
`import { tracingSDK, sender } from "${escapeImportPath(workerSetupPath)}";`
|
|
4667
5741
|
);
|
|
4668
|
-
const workerSetupPath = new URL(
|
|
4669
|
-
importResolve2("./workers/dev/worker-setup.js", import.meta.url)
|
|
4670
|
-
).href.replace("file://", "");
|
|
4671
|
-
let entryPointContents = workerFacade.replace("__TASKS__", createTaskFileImports(taskFiles)).replace("__WORKER_SETUP__", `import { tracingSDK, sender } from "${workerSetupPath}";`);
|
|
4672
5742
|
if (configPath) {
|
|
5743
|
+
configPath = normalize(configPath);
|
|
4673
5744
|
logger.debug("Importing project config from", { configPath });
|
|
4674
5745
|
entryPointContents = entryPointContents.replace(
|
|
4675
5746
|
"__IMPORTED_PROJECT_CONFIG__",
|
|
4676
|
-
`import * as importedConfigExports from "${
|
|
5747
|
+
`import * as importedConfigExports from "${escapeImportPath(
|
|
5748
|
+
configPath
|
|
5749
|
+
)}"; const importedConfig = importedConfigExports.config; const handleError = importedConfigExports.handleError;`
|
|
4677
5750
|
);
|
|
4678
5751
|
} else {
|
|
4679
5752
|
entryPointContents = entryPointContents.replace(
|
|
@@ -4706,12 +5779,18 @@ function useDev({
|
|
|
4706
5779
|
__PROJECT_CONFIG__: JSON.stringify(config)
|
|
4707
5780
|
},
|
|
4708
5781
|
plugins: [
|
|
5782
|
+
bundleTriggerDevCore("workerFacade", config.tsconfigPath),
|
|
4709
5783
|
bundleDependenciesPlugin(
|
|
4710
5784
|
"workerFacade",
|
|
4711
5785
|
(config.dependenciesToBundle ?? []).concat([/^@trigger.dev/]),
|
|
4712
5786
|
config.tsconfigPath
|
|
4713
5787
|
),
|
|
4714
5788
|
workerSetupImportConfigPlugin(configPath),
|
|
5789
|
+
esbuildDecorators3({
|
|
5790
|
+
tsconfig: config.tsconfigPath,
|
|
5791
|
+
tsx: true,
|
|
5792
|
+
force: false
|
|
5793
|
+
}),
|
|
4715
5794
|
{
|
|
4716
5795
|
name: "trigger.dev v3",
|
|
4717
5796
|
setup(build3) {
|
|
@@ -4725,19 +5804,19 @@ function useDev({
|
|
|
4725
5804
|
if (!firstBuild) {
|
|
4726
5805
|
logger.log(chalkGrey("\u25CB Building background worker\u2026"));
|
|
4727
5806
|
}
|
|
4728
|
-
const metaOutputKey =
|
|
5807
|
+
const metaOutputKey = join7("out", `stdin.js`).replace(/\\/g, "/");
|
|
4729
5808
|
const metaOutput = result.metafile.outputs[metaOutputKey];
|
|
4730
5809
|
if (!metaOutput) {
|
|
4731
5810
|
throw new Error(`Could not find metafile`);
|
|
4732
5811
|
}
|
|
4733
|
-
const outputFileKey =
|
|
5812
|
+
const outputFileKey = join7(config.projectDir, metaOutputKey);
|
|
4734
5813
|
const outputFile = result.outputFiles.find((file) => file.path === outputFileKey);
|
|
4735
5814
|
if (!outputFile) {
|
|
4736
5815
|
throw new Error(
|
|
4737
5816
|
`Could not find output file for entry point ${metaOutput.entryPoint}`
|
|
4738
5817
|
);
|
|
4739
5818
|
}
|
|
4740
|
-
const sourceMapFileKey =
|
|
5819
|
+
const sourceMapFileKey = join7(config.projectDir, `${metaOutputKey}.map`);
|
|
4741
5820
|
const sourceMapFile = result.outputFiles.find(
|
|
4742
5821
|
(file) => file.path === sourceMapFileKey
|
|
4743
5822
|
);
|
|
@@ -4748,10 +5827,10 @@ function useDev({
|
|
|
4748
5827
|
logger.log(chalkGrey("\u25CB No changes detected, skipping build\u2026"));
|
|
4749
5828
|
return;
|
|
4750
5829
|
}
|
|
4751
|
-
const fullPath =
|
|
5830
|
+
const fullPath = join7(config.projectDir, ".trigger", `${contentHash}.js`);
|
|
4752
5831
|
const sourceMapPath = `${fullPath}.map`;
|
|
4753
5832
|
const outputFileWithSourceMap = `${outputFile.text}
|
|
4754
|
-
//# sourceMappingURL=${
|
|
5833
|
+
//# sourceMappingURL=${basename2(sourceMapPath)}`;
|
|
4755
5834
|
await fs7.promises.mkdir(dirname3(fullPath), { recursive: true });
|
|
4756
5835
|
await fs7.promises.writeFile(fullPath, outputFileWithSourceMap);
|
|
4757
5836
|
logger.debug(`Wrote background worker to ${fullPath}`);
|
|
@@ -4761,7 +5840,7 @@ function useDev({
|
|
|
4761
5840
|
await fs7.promises.writeFile(sourceMapPath2, sourceMapFile.text);
|
|
4762
5841
|
}
|
|
4763
5842
|
const environmentVariablesResponse = await environmentClient.getEnvironmentVariables(config.project);
|
|
4764
|
-
const processEnv = gatherProcessEnv();
|
|
5843
|
+
const processEnv = await gatherProcessEnv();
|
|
4765
5844
|
const backgroundWorker = new BackgroundWorker(fullPath, {
|
|
4766
5845
|
projectConfig: config,
|
|
4767
5846
|
dependencies: dependencies2,
|
|
@@ -4779,8 +5858,15 @@ function useDev({
|
|
|
4779
5858
|
latestWorkerContentHash = contentHash;
|
|
4780
5859
|
let packageVersion;
|
|
4781
5860
|
const taskResources = [];
|
|
4782
|
-
if (!backgroundWorker.tasks) {
|
|
4783
|
-
|
|
5861
|
+
if (!backgroundWorker.tasks || backgroundWorker.tasks.length === 0) {
|
|
5862
|
+
logger.log(
|
|
5863
|
+
`${chalkError(
|
|
5864
|
+
"X Error:"
|
|
5865
|
+
)} Worker failed to build: no tasks found. Searched in ${config.triggerDirectories.join(
|
|
5866
|
+
", "
|
|
5867
|
+
)}`
|
|
5868
|
+
);
|
|
5869
|
+
return;
|
|
4784
5870
|
}
|
|
4785
5871
|
for (const task of backgroundWorker.tasks) {
|
|
4786
5872
|
taskResources.push(task);
|
|
@@ -4799,6 +5885,9 @@ function useDev({
|
|
|
4799
5885
|
);
|
|
4800
5886
|
return;
|
|
4801
5887
|
}
|
|
5888
|
+
logger.debug("Creating background worker with tasks", {
|
|
5889
|
+
tasks: taskResources
|
|
5890
|
+
});
|
|
4802
5891
|
const backgroundWorkerBody = {
|
|
4803
5892
|
localOnly: true,
|
|
4804
5893
|
metadata: {
|
|
@@ -4829,17 +5918,52 @@ function useDev({
|
|
|
4829
5918
|
backgroundWorker
|
|
4830
5919
|
);
|
|
4831
5920
|
} catch (e) {
|
|
4832
|
-
if (e instanceof
|
|
5921
|
+
if (e instanceof TaskMetadataParseError) {
|
|
5922
|
+
logTaskMetadataParseError(e.zodIssues, e.tasks);
|
|
5923
|
+
return;
|
|
5924
|
+
} else if (e instanceof UncaughtExceptionError) {
|
|
5925
|
+
const parsedBuildError = parseBuildErrorStack(e.originalError);
|
|
5926
|
+
if (parsedBuildError && typeof parsedBuildError !== "string") {
|
|
5927
|
+
logESMRequireError(
|
|
5928
|
+
parsedBuildError,
|
|
5929
|
+
configPath ? { status: "file", path: configPath, config } : { status: "in-memory", config }
|
|
5930
|
+
);
|
|
5931
|
+
return;
|
|
5932
|
+
} else {
|
|
5933
|
+
}
|
|
4833
5934
|
if (e.originalError.stack) {
|
|
4834
|
-
logger.
|
|
5935
|
+
logger.log(
|
|
5936
|
+
`${chalkError("X Error:")} Worker failed to start`,
|
|
5937
|
+
e.originalError.stack
|
|
5938
|
+
);
|
|
4835
5939
|
}
|
|
4836
5940
|
return;
|
|
4837
5941
|
}
|
|
4838
|
-
|
|
4839
|
-
|
|
4840
|
-
|
|
5942
|
+
const parsedError = parseNpmInstallError(e);
|
|
5943
|
+
if (typeof parsedError === "string") {
|
|
5944
|
+
logger.log(`${chalkError("X Error:")} ${parsedError}`);
|
|
5945
|
+
} else {
|
|
5946
|
+
switch (parsedError.type) {
|
|
5947
|
+
case "package-not-found-error": {
|
|
5948
|
+
logger.log(
|
|
5949
|
+
`
|
|
5950
|
+
${chalkError("X Error:")} The package ${chalkPurple(
|
|
5951
|
+
parsedError.packageName
|
|
5952
|
+
)} could not be found in the npm registry.`
|
|
5953
|
+
);
|
|
5954
|
+
break;
|
|
5955
|
+
}
|
|
5956
|
+
case "no-matching-version-error": {
|
|
5957
|
+
logger.log(
|
|
5958
|
+
`
|
|
5959
|
+
${chalkError("X Error:")} The package ${chalkPurple(
|
|
5960
|
+
parsedError.packageName
|
|
5961
|
+
)} could not resolve because the version doesn't exist`
|
|
5962
|
+
);
|
|
5963
|
+
break;
|
|
5964
|
+
}
|
|
5965
|
+
}
|
|
4841
5966
|
}
|
|
4842
|
-
logger.error(`Background worker failed to start: ${e}`);
|
|
4843
5967
|
}
|
|
4844
5968
|
});
|
|
4845
5969
|
}
|
|
@@ -4850,17 +5974,17 @@ function useDev({
|
|
|
4850
5974
|
}
|
|
4851
5975
|
const throttledRebuild = pDebounce(runBuild, 250, { before: true });
|
|
4852
5976
|
const taskFileWatcher = watch(
|
|
4853
|
-
config.triggerDirectories.map((triggerDir) => `${triggerDir}
|
|
5977
|
+
config.triggerDirectories.map((triggerDir) => `${triggerDir}/**/*.ts`),
|
|
4854
5978
|
{
|
|
4855
5979
|
ignoreInitial: true
|
|
4856
5980
|
}
|
|
4857
5981
|
);
|
|
4858
|
-
taskFileWatcher.on("add", async (
|
|
5982
|
+
taskFileWatcher.on("add", async (path7) => {
|
|
4859
5983
|
throttledRebuild().catch((error) => {
|
|
4860
5984
|
logger.error(error);
|
|
4861
5985
|
});
|
|
4862
5986
|
});
|
|
4863
|
-
taskFileWatcher.on("unlink", async (
|
|
5987
|
+
taskFileWatcher.on("unlink", async (path7) => {
|
|
4864
5988
|
throttledRebuild().catch((error) => {
|
|
4865
5989
|
logger.error(error);
|
|
4866
5990
|
});
|
|
@@ -4920,7 +6044,7 @@ async function gatherRequiredDependencies2(outputMeta, config) {
|
|
|
4920
6044
|
}
|
|
4921
6045
|
}
|
|
4922
6046
|
if (config.additionalPackages) {
|
|
4923
|
-
const projectPackageJson = await readJSONFile(
|
|
6047
|
+
const projectPackageJson = await readJSONFile(join7(config.projectDir, "package.json"));
|
|
4924
6048
|
for (const packageName of config.additionalPackages) {
|
|
4925
6049
|
if (dependencies2[packageName]) {
|
|
4926
6050
|
continue;
|
|
@@ -4950,16 +6074,14 @@ async function gatherRequiredDependencies2(outputMeta, config) {
|
|
|
4950
6074
|
function createDuplicateTaskIdOutputErrorMessage(duplicateTaskIds, taskResources) {
|
|
4951
6075
|
const duplicateTable = duplicateTaskIds.map((id) => {
|
|
4952
6076
|
const tasks = taskResources.filter((task) => task.id === id);
|
|
4953
|
-
return `
|
|
4954
|
-
${tasks.map((task) => `${task.filePath} -> ${task.exportName}`).join("\n")}`;
|
|
4955
|
-
}).join("\n\n");
|
|
4956
|
-
return `Duplicate task ids detected:
|
|
4957
|
-
|
|
4958
|
-
${duplicateTable}
|
|
6077
|
+
return `
|
|
4959
6078
|
|
|
4960
|
-
|
|
6079
|
+
${chalkTask(id)} was found in:${tasks.map((task) => `
|
|
6080
|
+
${task.filePath} -> ${task.exportName}`).join("")}`;
|
|
6081
|
+
}).join("");
|
|
6082
|
+
return `Duplicate ${chalkTask("task id")} detected:${duplicateTable}`;
|
|
4961
6083
|
}
|
|
4962
|
-
function gatherProcessEnv() {
|
|
6084
|
+
async function gatherProcessEnv() {
|
|
4963
6085
|
const env = {
|
|
4964
6086
|
NODE_ENV: process.env.NODE_ENV ?? "development",
|
|
4965
6087
|
PATH: process.env.PATH,
|
|
@@ -4970,31 +6092,54 @@ function gatherProcessEnv() {
|
|
|
4970
6092
|
NVM_BIN: process.env.NVM_BIN,
|
|
4971
6093
|
LANG: process.env.LANG,
|
|
4972
6094
|
TERM: process.env.TERM,
|
|
4973
|
-
NODE_PATH: process.env.NODE_PATH,
|
|
6095
|
+
NODE_PATH: await amendNodePathWithPnpmNodeModules(process.env.NODE_PATH),
|
|
4974
6096
|
HOME: process.env.HOME,
|
|
4975
6097
|
BUN_INSTALL: process.env.BUN_INSTALL
|
|
4976
6098
|
};
|
|
4977
6099
|
return Object.fromEntries(Object.entries(env).filter(([key, value]) => value !== void 0));
|
|
4978
6100
|
}
|
|
6101
|
+
async function amendNodePathWithPnpmNodeModules(nodePath) {
|
|
6102
|
+
const pnpmModulesPath = await findPnpmNodeModulesPath();
|
|
6103
|
+
if (!pnpmModulesPath) {
|
|
6104
|
+
return nodePath;
|
|
6105
|
+
}
|
|
6106
|
+
if (nodePath) {
|
|
6107
|
+
if (nodePath.includes(pnpmModulesPath)) {
|
|
6108
|
+
return nodePath;
|
|
6109
|
+
}
|
|
6110
|
+
return `${nodePath}:${pnpmModulesPath}`;
|
|
6111
|
+
}
|
|
6112
|
+
return pnpmModulesPath;
|
|
6113
|
+
}
|
|
6114
|
+
async function findPnpmNodeModulesPath() {
|
|
6115
|
+
return await findUp3(
|
|
6116
|
+
async (directory) => {
|
|
6117
|
+
const pnpmModules = join7(directory, "node_modules", ".pnpm", "node_modules");
|
|
6118
|
+
const hasPnpmNodeModules = await pathExists2(pnpmModules);
|
|
6119
|
+
if (hasPnpmNodeModules) {
|
|
6120
|
+
return pnpmModules;
|
|
6121
|
+
}
|
|
6122
|
+
},
|
|
6123
|
+
{ type: "directory" }
|
|
6124
|
+
);
|
|
6125
|
+
}
|
|
4979
6126
|
|
|
4980
6127
|
// src/commands/init.ts
|
|
4981
|
-
import { intro as
|
|
6128
|
+
import { intro as intro5, isCancel as isCancel2, log as log6, outro as outro6, select as select2, text } from "@clack/prompts";
|
|
4982
6129
|
import { context as context3, trace as trace3 } from "@opentelemetry/api";
|
|
4983
|
-
import {
|
|
4984
|
-
|
|
4985
|
-
recordSpanException as recordSpanException5
|
|
4986
|
-
} from "@trigger.dev/core/v3";
|
|
6130
|
+
import { flattenAttributes as flattenAttributes3 } from "@trigger.dev/core/v3";
|
|
6131
|
+
import { recordSpanException as recordSpanException5 } from "@trigger.dev/core/v3/workers";
|
|
4987
6132
|
import chalk5 from "chalk";
|
|
4988
6133
|
import { execa as execa3 } from "execa";
|
|
4989
6134
|
import { applyEdits, modify } from "jsonc-parser";
|
|
4990
6135
|
import { writeFile as writeFile3 } from "node:fs/promises";
|
|
4991
|
-
import { join as
|
|
4992
|
-
import
|
|
6136
|
+
import { join as join8, relative as relative4, resolve as resolve4 } from "node:path";
|
|
6137
|
+
import terminalLink4 from "terminal-link";
|
|
4993
6138
|
import { z as z6 } from "zod";
|
|
4994
6139
|
|
|
4995
6140
|
// src/utilities/createFileFromTemplate.ts
|
|
4996
6141
|
import fs8 from "fs/promises";
|
|
4997
|
-
import
|
|
6142
|
+
import path6 from "path";
|
|
4998
6143
|
async function createFileFromTemplate(params) {
|
|
4999
6144
|
let template = await readFile(params.templatePath);
|
|
5000
6145
|
if (await pathExists(params.outputPath) && !params.override) {
|
|
@@ -5005,7 +6150,7 @@ async function createFileFromTemplate(params) {
|
|
|
5005
6150
|
}
|
|
5006
6151
|
try {
|
|
5007
6152
|
const output = replaceAll(template, params.replacements);
|
|
5008
|
-
const directoryName =
|
|
6153
|
+
const directoryName = path6.dirname(params.outputPath);
|
|
5009
6154
|
await fs8.mkdir(directoryName, { recursive: true });
|
|
5010
6155
|
await fs8.writeFile(params.outputPath, output);
|
|
5011
6156
|
return {
|
|
@@ -5033,51 +6178,6 @@ function replaceAll(input, replacements) {
|
|
|
5033
6178
|
return output;
|
|
5034
6179
|
}
|
|
5035
6180
|
|
|
5036
|
-
// src/utilities/getUserPackageManager.ts
|
|
5037
|
-
import pathModule2 from "path";
|
|
5038
|
-
async function getUserPackageManager(path6) {
|
|
5039
|
-
try {
|
|
5040
|
-
return await detectPackageManagerFromArtifacts(path6);
|
|
5041
|
-
} catch (error) {
|
|
5042
|
-
return detectPackageManagerFromCurrentCommand();
|
|
5043
|
-
}
|
|
5044
|
-
}
|
|
5045
|
-
function detectPackageManagerFromCurrentCommand() {
|
|
5046
|
-
const userAgent = process.env.npm_config_user_agent;
|
|
5047
|
-
if (userAgent) {
|
|
5048
|
-
if (userAgent.startsWith("yarn")) {
|
|
5049
|
-
return "yarn";
|
|
5050
|
-
} else if (userAgent.startsWith("pnpm")) {
|
|
5051
|
-
return "pnpm";
|
|
5052
|
-
} else {
|
|
5053
|
-
return "npm";
|
|
5054
|
-
}
|
|
5055
|
-
} else {
|
|
5056
|
-
return "npm";
|
|
5057
|
-
}
|
|
5058
|
-
}
|
|
5059
|
-
async function detectPackageManagerFromArtifacts(path6) {
|
|
5060
|
-
const packageFiles = [
|
|
5061
|
-
{ name: "yarn.lock", pm: "yarn" },
|
|
5062
|
-
{ name: "pnpm-lock.yaml", pm: "pnpm" },
|
|
5063
|
-
{ name: "package-lock.json", pm: "npm" },
|
|
5064
|
-
{ name: "npm-shrinkwrap.json", pm: "npm" }
|
|
5065
|
-
];
|
|
5066
|
-
for (const { name, pm } of packageFiles) {
|
|
5067
|
-
const exists = await pathExists(pathModule2.join(path6, name));
|
|
5068
|
-
if (exists) {
|
|
5069
|
-
return pm;
|
|
5070
|
-
}
|
|
5071
|
-
}
|
|
5072
|
-
throw new Error("Could not detect package manager from artifacts");
|
|
5073
|
-
}
|
|
5074
|
-
|
|
5075
|
-
// src/utilities/resolveInternalFilePath.ts
|
|
5076
|
-
import { resolve as importResolve3 } from "import-meta-resolve";
|
|
5077
|
-
function resolveInternalFilePath(filePath) {
|
|
5078
|
-
return new URL(importResolve3(filePath, import.meta.url)).href.replace("file://", "");
|
|
5079
|
-
}
|
|
5080
|
-
|
|
5081
6181
|
// src/commands/init.ts
|
|
5082
6182
|
var InitCommandOptions = CommonCommandOptions.extend({
|
|
5083
6183
|
projectRef: z6.string().optional(),
|
|
@@ -5095,10 +6195,10 @@ function configureInitCommand(program2) {
|
|
|
5095
6195
|
"The version of the @trigger.dev/sdk package to install",
|
|
5096
6196
|
"beta"
|
|
5097
6197
|
).option("--skip-package-install", "Skip installing the @trigger.dev/sdk package").option("--override-config", "Override the existing config file if it exists")
|
|
5098
|
-
).action(async (
|
|
6198
|
+
).action(async (path7, options) => {
|
|
5099
6199
|
await handleTelemetry(async () => {
|
|
5100
6200
|
await printStandloneInitialBanner(true);
|
|
5101
|
-
await initCommand(
|
|
6201
|
+
await initCommand(path7, options);
|
|
5102
6202
|
});
|
|
5103
6203
|
});
|
|
5104
6204
|
}
|
|
@@ -5109,7 +6209,7 @@ async function initCommand(dir, options) {
|
|
|
5109
6209
|
}
|
|
5110
6210
|
async function _initCommand(dir, options) {
|
|
5111
6211
|
const span = trace3.getSpan(context3.active());
|
|
5112
|
-
|
|
6212
|
+
intro5("Initializing project");
|
|
5113
6213
|
const authorization = await login({
|
|
5114
6214
|
embedded: true,
|
|
5115
6215
|
defaultApiUrl: options.apiUrl,
|
|
@@ -5133,7 +6233,7 @@ async function _initCommand(dir, options) {
|
|
|
5133
6233
|
if (!options.overrideConfig) {
|
|
5134
6234
|
try {
|
|
5135
6235
|
const result = await readConfig(dir);
|
|
5136
|
-
|
|
6236
|
+
outro6(
|
|
5137
6237
|
result.status === "file" ? `Project already initialized: Found config file at ${result.path}. Pass --override-config to override` : "Project already initialized"
|
|
5138
6238
|
);
|
|
5139
6239
|
return;
|
|
@@ -5150,55 +6250,56 @@ async function _initCommand(dir, options) {
|
|
|
5150
6250
|
...flattenAttributes3(selectedProject, "cli.project")
|
|
5151
6251
|
});
|
|
5152
6252
|
logger.debug("Selected project", selectedProject);
|
|
5153
|
-
|
|
6253
|
+
log6.step(`Configuring project "${selectedProject.name}" (${selectedProject.externalRef})`);
|
|
5154
6254
|
if (!options.skipPackageInstall) {
|
|
5155
6255
|
await installPackages2(dir, options);
|
|
5156
6256
|
} else {
|
|
5157
|
-
|
|
6257
|
+
log6.info("Skipping package installation");
|
|
5158
6258
|
}
|
|
5159
6259
|
const triggerDir = await createTriggerDir(dir, options);
|
|
5160
6260
|
await writeConfigFile(dir, selectedProject, options, triggerDir);
|
|
5161
6261
|
await addConfigFileToTsConfig(dir, options);
|
|
5162
6262
|
await gitIgnoreDotTriggerDir(dir, options);
|
|
5163
|
-
const projectDashboard =
|
|
6263
|
+
const projectDashboard = terminalLink4(
|
|
5164
6264
|
"project dashboard",
|
|
5165
6265
|
`${authorization.dashboardUrl}/projects/v3/${selectedProject.externalRef}`
|
|
5166
6266
|
);
|
|
5167
|
-
|
|
5168
|
-
|
|
5169
|
-
|
|
6267
|
+
log6.success("Successfully initialized project for Trigger.dev v3 \u{1FAE1}");
|
|
6268
|
+
log6.info("Next steps:");
|
|
6269
|
+
log6.info(
|
|
5170
6270
|
` 1. To start developing, run ${chalk5.green(
|
|
5171
6271
|
`npx trigger.dev@${options.tag} dev`
|
|
5172
6272
|
)} in your project directory`
|
|
5173
6273
|
);
|
|
5174
|
-
|
|
5175
|
-
|
|
5176
|
-
` 3. Head over to our ${
|
|
6274
|
+
log6.info(` 2. Visit your ${projectDashboard} to view your newly created tasks.`);
|
|
6275
|
+
log6.info(
|
|
6276
|
+
` 3. Head over to our ${terminalLink4(
|
|
5177
6277
|
"v3 docs",
|
|
5178
6278
|
"https://trigger.dev/docs/v3"
|
|
5179
6279
|
)} to learn more.`
|
|
5180
6280
|
);
|
|
5181
|
-
|
|
5182
|
-
` 4. Need help? Join our ${
|
|
6281
|
+
log6.info(
|
|
6282
|
+
` 4. Need help? Join our ${terminalLink4(
|
|
5183
6283
|
"Discord community",
|
|
5184
6284
|
"https://trigger.dev/discord"
|
|
5185
6285
|
)} or email us at ${chalk5.cyan("help@trigger.dev")}`
|
|
5186
6286
|
);
|
|
5187
|
-
|
|
6287
|
+
outro6(`Project initialized successfully. Happy coding!`);
|
|
5188
6288
|
}
|
|
5189
6289
|
async function createTriggerDir(dir, options) {
|
|
5190
6290
|
return await tracer.startActiveSpan("createTriggerDir", async (span) => {
|
|
5191
6291
|
try {
|
|
5192
|
-
const defaultValue =
|
|
6292
|
+
const defaultValue = join8(dir, "src", "trigger");
|
|
5193
6293
|
const location = await text({
|
|
5194
6294
|
message: "Where would you like to create the Trigger.dev directory?",
|
|
5195
6295
|
defaultValue,
|
|
5196
6296
|
placeholder: defaultValue
|
|
5197
6297
|
});
|
|
5198
|
-
if (
|
|
6298
|
+
if (isCancel2(location)) {
|
|
5199
6299
|
throw new OutroCommandError();
|
|
5200
6300
|
}
|
|
5201
|
-
const triggerDir =
|
|
6301
|
+
const triggerDir = resolve4(process.cwd(), location);
|
|
6302
|
+
logger.debug({ triggerDir });
|
|
5202
6303
|
span.setAttributes({
|
|
5203
6304
|
"cli.triggerDir": triggerDir
|
|
5204
6305
|
});
|
|
@@ -5216,7 +6317,7 @@ async function createTriggerDir(dir, options) {
|
|
|
5216
6317
|
}
|
|
5217
6318
|
]
|
|
5218
6319
|
});
|
|
5219
|
-
if (
|
|
6320
|
+
if (isCancel2(exampleSelection)) {
|
|
5220
6321
|
throw new OutroCommandError();
|
|
5221
6322
|
}
|
|
5222
6323
|
const example = exampleSelection;
|
|
@@ -5224,20 +6325,20 @@ async function createTriggerDir(dir, options) {
|
|
|
5224
6325
|
"cli.example": example
|
|
5225
6326
|
});
|
|
5226
6327
|
if (example === "none") {
|
|
5227
|
-
await createFile(
|
|
5228
|
-
|
|
6328
|
+
await createFile(join8(triggerDir, ".gitkeep"), "");
|
|
6329
|
+
log6.step(`Created directory at ${location}`);
|
|
5229
6330
|
span.end();
|
|
5230
6331
|
return { location, isCustomValue: location !== defaultValue };
|
|
5231
6332
|
}
|
|
5232
|
-
const
|
|
5233
|
-
const outputPath =
|
|
6333
|
+
const templatePath = join8(cliRootPath(), "templates", "examples", `${example}.ts.template`);
|
|
6334
|
+
const outputPath = join8(triggerDir, "example.ts");
|
|
5234
6335
|
await createFileFromTemplate({
|
|
5235
|
-
templatePath
|
|
6336
|
+
templatePath,
|
|
5236
6337
|
outputPath,
|
|
5237
6338
|
replacements: {}
|
|
5238
6339
|
});
|
|
5239
|
-
const relativeOutputPath =
|
|
5240
|
-
|
|
6340
|
+
const relativeOutputPath = relative4(process.cwd(), outputPath);
|
|
6341
|
+
log6.step(`Created example file at ${relativeOutputPath}`);
|
|
5241
6342
|
span.end();
|
|
5242
6343
|
return { location, isCustomValue: location !== defaultValue };
|
|
5243
6344
|
} catch (e) {
|
|
@@ -5252,15 +6353,15 @@ async function createTriggerDir(dir, options) {
|
|
|
5252
6353
|
async function gitIgnoreDotTriggerDir(dir, options) {
|
|
5253
6354
|
return await tracer.startActiveSpan("gitIgnoreDotTriggerDir", async (span) => {
|
|
5254
6355
|
try {
|
|
5255
|
-
const projectDir =
|
|
5256
|
-
const gitIgnorePath =
|
|
6356
|
+
const projectDir = resolve4(process.cwd(), dir);
|
|
6357
|
+
const gitIgnorePath = join8(projectDir, ".gitignore");
|
|
5257
6358
|
span.setAttributes({
|
|
5258
6359
|
"cli.projectDir": projectDir,
|
|
5259
6360
|
"cli.gitIgnorePath": gitIgnorePath
|
|
5260
6361
|
});
|
|
5261
6362
|
if (!await pathExists(gitIgnorePath)) {
|
|
5262
6363
|
await createFile(gitIgnorePath, ".trigger");
|
|
5263
|
-
|
|
6364
|
+
log6.step(`Added .trigger to .gitignore`);
|
|
5264
6365
|
span.end();
|
|
5265
6366
|
return;
|
|
5266
6367
|
}
|
|
@@ -5272,7 +6373,7 @@ async function gitIgnoreDotTriggerDir(dir, options) {
|
|
|
5272
6373
|
const newGitIgnoreContent = `${gitIgnoreContent}
|
|
5273
6374
|
.trigger`;
|
|
5274
6375
|
await writeFile3(gitIgnorePath, newGitIgnoreContent, "utf-8");
|
|
5275
|
-
|
|
6376
|
+
log6.step(`Added .trigger to .gitignore`);
|
|
5276
6377
|
span.end();
|
|
5277
6378
|
} catch (e) {
|
|
5278
6379
|
if (!(e instanceof SkipCommandError)) {
|
|
@@ -5286,8 +6387,8 @@ async function gitIgnoreDotTriggerDir(dir, options) {
|
|
|
5286
6387
|
async function addConfigFileToTsConfig(dir, options) {
|
|
5287
6388
|
return await tracer.startActiveSpan("createTriggerDir", async (span) => {
|
|
5288
6389
|
try {
|
|
5289
|
-
const projectDir =
|
|
5290
|
-
const tsconfigPath =
|
|
6390
|
+
const projectDir = resolve4(process.cwd(), dir);
|
|
6391
|
+
const tsconfigPath = join8(projectDir, "tsconfig.json");
|
|
5291
6392
|
span.setAttributes({
|
|
5292
6393
|
"cli.projectDir": projectDir,
|
|
5293
6394
|
"cli.tsconfigPath": tsconfigPath
|
|
@@ -5305,7 +6406,7 @@ async function addConfigFileToTsConfig(dir, options) {
|
|
|
5305
6406
|
const newTsconfigContent = applyEdits(tsconfigContent, edits);
|
|
5306
6407
|
logger.debug("new tsconfig.json content", { newTsconfigContent });
|
|
5307
6408
|
await writeFile3(tsconfigPath, newTsconfigContent, "utf-8");
|
|
5308
|
-
|
|
6409
|
+
log6.step(`Added trigger.config.ts to tsconfig.json`);
|
|
5309
6410
|
span.end();
|
|
5310
6411
|
} catch (e) {
|
|
5311
6412
|
if (!(e instanceof SkipCommandError)) {
|
|
@@ -5318,9 +6419,9 @@ async function addConfigFileToTsConfig(dir, options) {
|
|
|
5318
6419
|
}
|
|
5319
6420
|
async function installPackages2(dir, options) {
|
|
5320
6421
|
return await tracer.startActiveSpan("installPackages", async (span) => {
|
|
5321
|
-
const installSpinner =
|
|
6422
|
+
const installSpinner = spinner();
|
|
5322
6423
|
try {
|
|
5323
|
-
const projectDir =
|
|
6424
|
+
const projectDir = resolve4(process.cwd(), dir);
|
|
5324
6425
|
const pkgManager = await getUserPackageManager(projectDir);
|
|
5325
6426
|
span.setAttributes({
|
|
5326
6427
|
"cli.projectDir": projectDir,
|
|
@@ -5370,11 +6471,11 @@ async function installPackages2(dir, options) {
|
|
|
5370
6471
|
async function writeConfigFile(dir, project, options, triggerDir) {
|
|
5371
6472
|
return await tracer.startActiveSpan("writeConfigFile", async (span) => {
|
|
5372
6473
|
try {
|
|
5373
|
-
const spnnr =
|
|
6474
|
+
const spnnr = spinner();
|
|
5374
6475
|
spnnr.start("Creating config file");
|
|
5375
|
-
const projectDir =
|
|
5376
|
-
const templatePath =
|
|
5377
|
-
const outputPath =
|
|
6476
|
+
const projectDir = resolve4(process.cwd(), dir);
|
|
6477
|
+
const templatePath = join8(cliRootPath(), "templates", "trigger.config.ts.template");
|
|
6478
|
+
const outputPath = join8(projectDir, "trigger.config.ts");
|
|
5378
6479
|
span.setAttributes({
|
|
5379
6480
|
"cli.projectDir": projectDir,
|
|
5380
6481
|
"cli.templatePath": templatePath,
|
|
@@ -5390,7 +6491,7 @@ async function writeConfigFile(dir, project, options, triggerDir) {
|
|
|
5390
6491
|
outputPath,
|
|
5391
6492
|
override: options.overrideConfig
|
|
5392
6493
|
});
|
|
5393
|
-
const relativePathToOutput =
|
|
6494
|
+
const relativePathToOutput = relative4(process.cwd(), outputPath);
|
|
5394
6495
|
spnnr.stop(
|
|
5395
6496
|
result.success ? `Config file created at ${relativePathToOutput}` : `Failed to create config file: ${result.error}`
|
|
5396
6497
|
);
|
|
@@ -5414,7 +6515,7 @@ async function selectProject(apiClient2, dashboardUrl, projectRef) {
|
|
|
5414
6515
|
if (projectRef) {
|
|
5415
6516
|
const projectResponse = await apiClient2.getProject(projectRef);
|
|
5416
6517
|
if (!projectResponse.success) {
|
|
5417
|
-
|
|
6518
|
+
log6.error(
|
|
5418
6519
|
`--project-ref ${projectRef} is not a valid project ref. Request to fetch data resulted in: ${projectResponse.error}`
|
|
5419
6520
|
);
|
|
5420
6521
|
throw new SkipCommandError(projectResponse.error);
|
|
@@ -5430,11 +6531,11 @@ async function selectProject(apiClient2, dashboardUrl, projectRef) {
|
|
|
5430
6531
|
throw new Error(`Failed to get projects: ${projectsResponse.error}`);
|
|
5431
6532
|
}
|
|
5432
6533
|
if (projectsResponse.data.length === 0) {
|
|
5433
|
-
const newProjectLink =
|
|
6534
|
+
const newProjectLink = terminalLink4(
|
|
5434
6535
|
"Create new project",
|
|
5435
6536
|
`${dashboardUrl}/projects/new?version=v3`
|
|
5436
6537
|
);
|
|
5437
|
-
|
|
6538
|
+
outro6(`You don't have any projects yet. ${newProjectLink}`);
|
|
5438
6539
|
throw new SkipCommandError();
|
|
5439
6540
|
}
|
|
5440
6541
|
const selectedProject = await select2({
|
|
@@ -5445,7 +6546,7 @@ async function selectProject(apiClient2, dashboardUrl, projectRef) {
|
|
|
5445
6546
|
hint: project.organization.title
|
|
5446
6547
|
}))
|
|
5447
6548
|
});
|
|
5448
|
-
if (
|
|
6549
|
+
if (isCancel2(selectedProject)) {
|
|
5449
6550
|
throw new OutroCommandError();
|
|
5450
6551
|
}
|
|
5451
6552
|
const projectData = projectsResponse.data.find(
|
|
@@ -5472,12 +6573,14 @@ async function selectProject(apiClient2, dashboardUrl, projectRef) {
|
|
|
5472
6573
|
// src/commands/logout.ts
|
|
5473
6574
|
var LogoutCommandOptions = CommonCommandOptions;
|
|
5474
6575
|
function configureLogoutCommand(program2) {
|
|
5475
|
-
return commonOptions(program2.command("logout").description("Logout of Trigger.dev")).action(
|
|
5476
|
-
|
|
5477
|
-
await
|
|
5478
|
-
|
|
5479
|
-
|
|
5480
|
-
|
|
6576
|
+
return commonOptions(program2.command("logout").description("Logout of Trigger.dev")).action(
|
|
6577
|
+
async (options) => {
|
|
6578
|
+
await handleTelemetry(async () => {
|
|
6579
|
+
await printInitialBanner(false);
|
|
6580
|
+
await logoutCommand(options);
|
|
6581
|
+
});
|
|
6582
|
+
}
|
|
6583
|
+
);
|
|
5481
6584
|
}
|
|
5482
6585
|
async function logoutCommand(options) {
|
|
5483
6586
|
return await wrapCommandAction("logoutCommand", LogoutCommandOptions, options, async (opts) => {
|
|
@@ -5487,11 +6590,46 @@ async function logoutCommand(options) {
|
|
|
5487
6590
|
async function logout(options) {
|
|
5488
6591
|
const config = readAuthConfigProfile(options.profile);
|
|
5489
6592
|
if (!config?.accessToken) {
|
|
5490
|
-
logger.info(`You are already logged out [${options.profile
|
|
6593
|
+
logger.info(`You are already logged out [${options.profile}]`);
|
|
6594
|
+
return;
|
|
6595
|
+
}
|
|
6596
|
+
deleteAuthConfigProfile(options.profile);
|
|
6597
|
+
logger.info(`Logged out of Trigger.dev [${options.profile}]`);
|
|
6598
|
+
}
|
|
6599
|
+
|
|
6600
|
+
// src/commands/list-profiles.ts
|
|
6601
|
+
import { log as log7, outro as outro7 } from "@clack/prompts";
|
|
6602
|
+
var ListProfilesOptions = CommonCommandOptions;
|
|
6603
|
+
function configureListProfilesCommand(program2) {
|
|
6604
|
+
return program2.command("list-profiles").description("List all of your CLI profiles").option(
|
|
6605
|
+
"-l, --log-level <level>",
|
|
6606
|
+
"The CLI log level to use (debug, info, log, warn, error, none). This does not effect the log level of your trigger.dev tasks.",
|
|
6607
|
+
"log"
|
|
6608
|
+
).option("--skip-telemetry", "Opt-out of sending telemetry").action(async (options) => {
|
|
6609
|
+
await handleTelemetry(async () => {
|
|
6610
|
+
await printInitialBanner(true);
|
|
6611
|
+
await listProfilesCommand(options);
|
|
6612
|
+
});
|
|
6613
|
+
});
|
|
6614
|
+
}
|
|
6615
|
+
async function listProfilesCommand(options) {
|
|
6616
|
+
return await wrapCommandAction("listProfiles", ListProfilesOptions, options, async (opts) => {
|
|
6617
|
+
return await listProfiles(opts);
|
|
6618
|
+
});
|
|
6619
|
+
}
|
|
6620
|
+
async function listProfiles(options) {
|
|
6621
|
+
const authConfig = readAuthConfigFile();
|
|
6622
|
+
if (!authConfig) {
|
|
6623
|
+
logger.info("No profiles found");
|
|
5491
6624
|
return;
|
|
5492
6625
|
}
|
|
5493
|
-
|
|
5494
|
-
|
|
6626
|
+
const profiles = Object.keys(authConfig);
|
|
6627
|
+
log7.message("Profiles:");
|
|
6628
|
+
for (const profile of profiles) {
|
|
6629
|
+
const profileConfig = authConfig[profile];
|
|
6630
|
+
log7.info(`${profile}${profileConfig?.apiUrl ? ` - ${chalkGrey(profileConfig.apiUrl)}` : ""}`);
|
|
6631
|
+
}
|
|
6632
|
+
outro7("Retrieve account info by running whoami --profile <profile>");
|
|
5495
6633
|
}
|
|
5496
6634
|
|
|
5497
6635
|
// src/cli/index.ts
|
|
@@ -5503,6 +6641,8 @@ configureDevCommand(program);
|
|
|
5503
6641
|
configureDeployCommand(program);
|
|
5504
6642
|
configureWhoamiCommand(program);
|
|
5505
6643
|
configureLogoutCommand(program);
|
|
6644
|
+
configureListProfilesCommand(program);
|
|
6645
|
+
configureUpdateCommand(program);
|
|
5506
6646
|
|
|
5507
6647
|
// src/index.ts
|
|
5508
6648
|
var main = async () => {
|