@tailor-platform/sdk 1.26.0 → 1.27.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. package/CHANGELOG.md +8 -0
  2. package/dist/application-CBJFUKrU.mjs +4701 -0
  3. package/dist/application-CBJFUKrU.mjs.map +1 -0
  4. package/dist/application-WyZetOky.mjs +11 -0
  5. package/dist/cli/index.mjs +140 -31
  6. package/dist/cli/index.mjs.map +1 -1
  7. package/dist/cli/lib.d.mts +6 -6
  8. package/dist/cli/lib.mjs +10 -8
  9. package/dist/cli/lib.mjs.map +1 -1
  10. package/dist/client-C2_wgujH.mjs +6 -0
  11. package/dist/{application-D9xahQRQ.mjs → client-bTbnbQbB.mjs} +8 -4877
  12. package/dist/{application-D9xahQRQ.mjs.map → client-bTbnbQbB.mjs.map} +1 -1
  13. package/dist/configure/index.d.mts +4 -4
  14. package/dist/crash-report-Cot_9Esm.mjs +6 -0
  15. package/dist/crash-report-Ju8cQF-l.mjs +414 -0
  16. package/dist/crash-report-Ju8cQF-l.mjs.map +1 -0
  17. package/dist/{enum-constants-6uK0VI_s.mjs → enum-constants-D1nfn0qD.mjs} +1 -1
  18. package/dist/{enum-constants-6uK0VI_s.mjs.map → enum-constants-D1nfn0qD.mjs.map} +1 -1
  19. package/dist/{env-CSsVESbH.d.mts → env-BuMbIknz.d.mts} +2 -2
  20. package/dist/{file-utils-2T9w20FP.mjs → file-utils-Bctuzn3x.mjs} +1 -1
  21. package/dist/{file-utils-2T9w20FP.mjs.map → file-utils-Bctuzn3x.mjs.map} +1 -1
  22. package/dist/{index-DgRShBpu.d.mts → index-B0Lrzywd.d.mts} +2 -2
  23. package/dist/{index-BtYPY8ya.d.mts → index-CbnLNm14.d.mts} +2 -2
  24. package/dist/{index-DkJbItB-.d.mts → index-CyapgSFI.d.mts} +2 -2
  25. package/dist/{index-BKy-OC5C.d.mts → index-D1AM_02Y.d.mts} +2 -2
  26. package/dist/{index-BJg0DTbR.d.mts → index-cD9sQLTh.d.mts} +15 -15
  27. package/dist/{interceptor-BPiIBTk_.mjs → interceptor-B0d_GrI5.mjs} +1 -1
  28. package/dist/{interceptor-BPiIBTk_.mjs.map → interceptor-B0d_GrI5.mjs.map} +1 -1
  29. package/dist/{kysely-type-cMNbsQ6k.mjs → kysely-type-B_IecdK9.mjs} +1 -1
  30. package/dist/{kysely-type-cMNbsQ6k.mjs.map → kysely-type-B_IecdK9.mjs.map} +1 -1
  31. package/dist/logger-CqezTedh.mjs +181 -0
  32. package/dist/logger-CqezTedh.mjs.map +1 -0
  33. package/dist/{package-json-CVUv8Y9T.mjs → package-json-D3x2nBPB.mjs} +1 -1
  34. package/dist/{package-json-CVUv8Y9T.mjs.map → package-json-D3x2nBPB.mjs.map} +1 -1
  35. package/dist/package-json-DHfTiUCS.mjs +4 -0
  36. package/dist/plugin/builtin/enum-constants/index.d.mts +1 -1
  37. package/dist/plugin/builtin/enum-constants/index.mjs +1 -1
  38. package/dist/plugin/builtin/file-utils/index.d.mts +1 -1
  39. package/dist/plugin/builtin/file-utils/index.mjs +1 -1
  40. package/dist/plugin/builtin/kysely-type/index.d.mts +1 -1
  41. package/dist/plugin/builtin/kysely-type/index.mjs +1 -1
  42. package/dist/plugin/builtin/seed/index.d.mts +1 -1
  43. package/dist/plugin/builtin/seed/index.mjs +1 -1
  44. package/dist/plugin/index.d.mts +2 -2
  45. package/dist/{plugin-B1hNwcCC.d.mts → plugin-D3a0-qe0.d.mts} +6 -2
  46. package/dist/{query-B8ml6ClT.mjs → query-CgGbAmUg.mjs} +9 -6
  47. package/dist/query-CgGbAmUg.mjs.map +1 -0
  48. package/dist/{seed-CCVRLibh.mjs → seed-CWkIDWMb.mjs} +1 -1
  49. package/dist/{seed-CCVRLibh.mjs.map → seed-CWkIDWMb.mjs.map} +1 -1
  50. package/dist/{telemetry-DDQZRqHK.mjs → telemetry-BevrwWwF.mjs} +1 -1
  51. package/dist/{telemetry-0w8OupuQ.mjs → telemetry-VvNfsyEE.mjs} +2 -2
  52. package/dist/{telemetry-0w8OupuQ.mjs.map → telemetry-VvNfsyEE.mjs.map} +1 -1
  53. package/dist/utils/test/index.d.mts +2 -2
  54. package/dist/{workflow.generated-Bm4b8hEk.d.mts → workflow.generated-BsgIlrH-.d.mts} +2 -2
  55. package/docs/cli/crash-report.md +107 -0
  56. package/docs/cli-reference.md +11 -0
  57. package/docs/services/resolver.md +32 -0
  58. package/package.json +1 -1
  59. package/dist/application-CxH6Yp54.mjs +0 -9
  60. package/dist/package-json-Bj76LPsV.mjs +0 -4
  61. package/dist/query-B8ml6ClT.mjs.map +0 -1
@@ -1,212 +1,12 @@
1
- import { n as isSdkBranded } from "./brand-GZnI4eYb.mjs";
2
- import { t as readPackageJson } from "./package-json-CVUv8Y9T.mjs";
3
- import { n as seedPlugin, r as isPluginGeneratedType, t as SeedGeneratorID } from "./seed-CCVRLibh.mjs";
4
- import { n as enumConstantsPlugin, t as EnumConstantsGeneratorID } from "./enum-constants-6uK0VI_s.mjs";
5
- import { n as fileUtilsPlugin, t as FileUtilsGeneratorID } from "./file-utils-2T9w20FP.mjs";
6
- import { n as kyselyTypePlugin, t as KyselyGeneratorID } from "./kysely-type-cMNbsQ6k.mjs";
7
- import { createRequire } from "node:module";
1
+ import { n as logger } from "./logger-CqezTedh.mjs";
2
+ import { t as readPackageJson } from "./package-json-D3x2nBPB.mjs";
8
3
  import { z } from "zod";
9
- import * as fs$1 from "node:fs";
10
- import { mkdirSync, readFileSync, rmSync, writeFileSync } from "node:fs";
11
- import { formatWithOptions } from "node:util";
12
- import * as path from "pathe";
13
- import { join, resolve } from "pathe";
14
- import chalk from "chalk";
15
- import { formatDistanceToNowStrict } from "date-fns";
16
- import { getBorderCharacters, table } from "table";
17
4
  import { OAuth2Client } from "@badgateway/oauth2-client";
18
5
  import { MethodOptions_IdempotencyLevel, file_google_protobuf_descriptor, file_google_protobuf_duration, file_google_protobuf_field_mask, file_google_protobuf_struct, file_google_protobuf_timestamp } from "@bufbuild/protobuf/wkt";
19
6
  import { Code, ConnectError, createClient } from "@connectrpc/connect";
20
7
  import { createConnectTransport } from "@connectrpc/connect-node";
21
8
  import { enumDesc, fileDesc, messageDesc, serviceDesc, tsEnum } from "@bufbuild/protobuf/codegenv2";
22
- import { resolveTSConfig } from "pkg-types";
23
- import * as os from "node:os";
24
- import { parseTOML, parseYAML, stringifyYAML } from "confbox";
25
- import { findUpSync } from "find-up-simple";
26
- import ml from "multiline-ts";
27
- import { xdgConfig } from "xdg-basedir";
28
- import * as crypto from "node:crypto";
29
- import * as rolldown from "rolldown";
30
- import * as fs from "node:fs/promises";
31
- import { parseSync } from "oxc-parser";
32
- import { pathToFileURL } from "node:url";
33
- import * as inflection from "inflection";
34
- import * as globals from "globals";
35
9
 
36
- //#region src/cli/shared/logger.ts
37
- /**
38
- * Error thrown when a prompt is attempted in a CI environment
39
- */
40
- var CIPromptError = class extends Error {
41
- constructor(message) {
42
- super(message ?? "Interactive prompts are not available in CI environments. Use --yes flag to skip confirmation prompts.");
43
- this.name = "CIPromptError";
44
- }
45
- };
46
- /**
47
- * Semantic style functions for inline text styling
48
- */
49
- const styles = {
50
- success: chalk.green,
51
- error: chalk.red,
52
- warning: chalk.yellow,
53
- info: chalk.cyan,
54
- create: chalk.green,
55
- update: chalk.yellow,
56
- delete: chalk.red,
57
- unchanged: chalk.gray,
58
- bold: chalk.bold,
59
- dim: chalk.gray,
60
- highlight: chalk.cyanBright,
61
- successBright: chalk.greenBright,
62
- errorBright: chalk.redBright,
63
- resourceType: chalk.bold,
64
- resourceName: chalk.cyan,
65
- path: chalk.cyan,
66
- value: chalk.white,
67
- placeholder: chalk.gray.italic
68
- };
69
- /**
70
- * Standardized symbols for CLI output
71
- */
72
- const symbols = {
73
- success: chalk.green("✓"),
74
- error: chalk.red("✖"),
75
- warning: chalk.yellow("⚠"),
76
- info: chalk.cyan("i"),
77
- create: chalk.green("+"),
78
- update: chalk.yellow("~"),
79
- delete: chalk.red("-"),
80
- replace: chalk.magenta("±"),
81
- bullet: chalk.gray("•"),
82
- arrow: chalk.gray("→")
83
- };
84
- let _jsonMode = false;
85
- const TYPE_ICONS = {
86
- info: "ℹ",
87
- success: "✔",
88
- warn: "⚠",
89
- error: "✖",
90
- debug: "⚙",
91
- trace: "→",
92
- log: ""
93
- };
94
- const TYPE_COLORS = {
95
- info: chalk.cyan,
96
- success: chalk.green,
97
- warn: chalk.yellow,
98
- error: chalk.red,
99
- debug: chalk.gray,
100
- trace: chalk.gray,
101
- log: (text) => text
102
- };
103
- /**
104
- * Formats a log line with the appropriate prefix and indentation
105
- * @param opts - Formatting options
106
- * @returns Formatted log line
107
- */
108
- function formatLogLine(opts) {
109
- const { mode, indent, type, message, timestamp } = opts;
110
- const indentPrefix = indent > 0 ? " ".repeat(indent) : "";
111
- const colorFn = TYPE_COLORS[type] || ((text) => text);
112
- if (mode === "plain") return `${indentPrefix}${colorFn(message)}\n`;
113
- const icon = TYPE_ICONS[type] || "";
114
- const coloredOutput = colorFn(`${icon ? `${icon} ` : ""}${message}`);
115
- return `${indentPrefix}${timestamp ?? ""}${coloredOutput}\n`;
116
- }
117
- /**
118
- * Writes a formatted log line to stderr.
119
- * @param type - Log type (info, success, warn, error, log)
120
- * @param message - Log message
121
- * @param opts - Log options (mode and indent)
122
- */
123
- function writeLog(type, message, opts) {
124
- const mode = opts?.mode ?? "default";
125
- const output = formatLogLine({
126
- mode,
127
- indent: opts?.indent ?? 0,
128
- type,
129
- message: formatWithOptions({ breakLength: process.stdout.columns || 80 }, message),
130
- timestamp: mode === "stream" ? `${(/* @__PURE__ */ new Date()).toLocaleTimeString()} ` : ""
131
- });
132
- process.stderr.write(output);
133
- }
134
- const logger = {
135
- get jsonMode() {
136
- return _jsonMode;
137
- },
138
- set jsonMode(value) {
139
- _jsonMode = value;
140
- },
141
- info(message, opts) {
142
- writeLog("info", message, opts);
143
- },
144
- success(message, opts) {
145
- writeLog("success", message, opts);
146
- },
147
- warn(message, opts) {
148
- writeLog("warn", message, opts);
149
- },
150
- error(message, opts) {
151
- writeLog("error", message, opts);
152
- },
153
- log(message) {
154
- writeLog("log", message, { mode: "plain" });
155
- },
156
- newline() {
157
- process.stderr.write("\n");
158
- },
159
- debug(message) {
160
- if (process.env.DEBUG === "true") writeLog("log", styles.dim(message), { mode: "plain" });
161
- },
162
- out(data, options) {
163
- if (typeof data === "string") {
164
- process.stdout.write(data.endsWith("\n") ? data : data + "\n");
165
- return;
166
- }
167
- if (this.jsonMode) {
168
- console.log(JSON.stringify(data));
169
- return;
170
- }
171
- const display = options?.display;
172
- const formatValue = (value, pretty = false) => {
173
- if (options?.showNull && value === null) return "NULL";
174
- if (value === null || value === void 0) return "N/A";
175
- if (value instanceof Date) return formatDistanceToNowStrict(value, { addSuffix: true });
176
- if (typeof value === "object") return pretty ? JSON.stringify(value, null, 2) : JSON.stringify(value);
177
- return String(value);
178
- };
179
- const isExcluded = (key) => {
180
- return display !== void 0 && key in display && display[key] === null;
181
- };
182
- const transformValue = (key, value, item, pretty = false) => {
183
- if (display && key in display) {
184
- const transformer = display[key];
185
- if (transformer) return transformer(value, item);
186
- }
187
- return formatValue(value, pretty);
188
- };
189
- if (!Array.isArray(data)) {
190
- const t = table(Object.entries(data).filter(([key]) => !isExcluded(key)).map(([key, value]) => [key, transformValue(key, value, data, true)]), {
191
- singleLine: false,
192
- border: getBorderCharacters("norc")
193
- });
194
- process.stdout.write(t);
195
- return;
196
- }
197
- if (data.length === 0) return;
198
- const headers = Array.from(new Set(data.flatMap((item) => Object.keys(item)))).filter((h) => !isExcluded(h));
199
- const t = table([headers, ...data.map((item) => headers.map((header) => transformValue(header, item[header], item)))], {
200
- border: getBorderCharacters("norc"),
201
- drawHorizontalLine: (lineIndex, rowCount) => {
202
- return lineIndex === 0 || lineIndex === 1 || lineIndex === rowCount;
203
- }
204
- });
205
- process.stdout.write(t);
206
- }
207
- };
208
-
209
- //#endregion
210
10
  //#region ../tailor-proto/src/buf/validate/validate_pb.js
211
11
  /**
212
12
  * Describes the file buf/validate/validate.proto.
@@ -362,7 +162,7 @@ const TenantProviderConfig_TenantProviderType = /* @__PURE__ */ tsEnum(TenantPro
362
162
  * Describes the message tailor.v1.AuthInvoker.
363
163
  * Use `create(AuthInvokerSchema)` to create a new message.
364
164
  */
365
- const AuthInvokerSchema$1 = /* @__PURE__ */ messageDesc(file_tailor_v1_auth_resource, 4);
165
+ const AuthInvokerSchema = /* @__PURE__ */ messageDesc(file_tailor_v1_auth_resource, 4);
366
166
  /**
367
167
  * Describes the enum tailor.v1.AuthSCIMConfig.AuthorizationType.
368
168
  */
@@ -571,13 +371,13 @@ const file_tailor_v1_idp_resource = /* @__PURE__ */ fileDesc("Chx0YWlsb3IvdjEvaW
571
371
  /**
572
372
  * Describes the enum tailor.v1.IdPLang.
573
373
  */
574
- const IdPLangSchema$1 = /* @__PURE__ */ enumDesc(file_tailor_v1_idp_resource, 0);
374
+ const IdPLangSchema = /* @__PURE__ */ enumDesc(file_tailor_v1_idp_resource, 0);
575
375
  /**
576
376
  * IdPLang represents the IETF BCP 47 language subtag.
577
377
  *
578
378
  * @generated from enum tailor.v1.IdPLang
579
379
  */
580
- const IdPLang = /* @__PURE__ */ tsEnum(IdPLangSchema$1);
380
+ const IdPLang = /* @__PURE__ */ tsEnum(IdPLangSchema);
581
381
 
582
382
  //#endregion
583
383
  //#region ../tailor-proto/src/tailor/v1/idp_pb.js
@@ -909,7 +709,7 @@ function initOAuth2Client() {
909
709
  * @returns Configured Operator client
910
710
  */
911
711
  async function initOperatorClient(accessToken) {
912
- const { createTracingInterceptor } = await import("./interceptor-BPiIBTk_.mjs");
712
+ const { createTracingInterceptor } = await import("./interceptor-B0d_GrI5.mjs");
913
713
  return createClient(OperatorService, createConnectTransport({
914
714
  httpVersion: "2",
915
715
  baseUrl: platformBaseUrl,
@@ -1153,4674 +953,5 @@ async function fetchMachineUserToken(url, clientId, clientSecret) {
1153
953
  }
1154
954
 
1155
955
  //#endregion
1156
- //#region src/cli/shared/context.ts
1157
- const pfConfigSchema = z.object({
1158
- version: z.literal(1),
1159
- users: z.partialRecord(z.string(), z.object({
1160
- access_token: z.string(),
1161
- refresh_token: z.string(),
1162
- token_expires_at: z.string()
1163
- })),
1164
- profiles: z.partialRecord(z.string(), z.object({
1165
- user: z.string(),
1166
- workspace_id: z.string()
1167
- })),
1168
- current_user: z.string().nullable()
1169
- });
1170
- function platformConfigPath() {
1171
- if (!xdgConfig) throw new Error("User home directory not found");
1172
- return path.join(xdgConfig, "tailor-platform", "config.yaml");
1173
- }
1174
- /**
1175
- * Read Tailor Platform CLI configuration, migrating from tailorctl if necessary.
1176
- * @returns Parsed platform configuration
1177
- */
1178
- function readPlatformConfig() {
1179
- const configPath = platformConfigPath();
1180
- if (!fs$1.existsSync(configPath)) {
1181
- logger.warn(`Config not found at ${configPath}, migrating from tailorctl config...`);
1182
- const tcConfig = readTailorctlConfig();
1183
- const pfConfig = tcConfig ? fromTailorctlConfig(tcConfig) : {
1184
- version: 1,
1185
- users: {},
1186
- profiles: {},
1187
- current_user: null
1188
- };
1189
- writePlatformConfig(pfConfig);
1190
- return pfConfig;
1191
- }
1192
- const rawConfig = parseYAML(fs$1.readFileSync(configPath, "utf-8"));
1193
- return pfConfigSchema.parse(rawConfig);
1194
- }
1195
- /**
1196
- * Write Tailor Platform CLI configuration to disk.
1197
- * @param config - Platform configuration to write
1198
- */
1199
- function writePlatformConfig(config) {
1200
- const configPath = platformConfigPath();
1201
- fs$1.mkdirSync(path.dirname(configPath), { recursive: true });
1202
- fs$1.writeFileSync(configPath, stringifyYAML(config));
1203
- }
1204
- const tcContextConfigSchema = z.object({
1205
- username: z.string().optional(),
1206
- controlplaneaccesstoken: z.string().optional(),
1207
- controlplanerefreshtoken: z.string().optional(),
1208
- controlplanetokenexpiresat: z.string().optional(),
1209
- workspaceid: z.string().optional()
1210
- });
1211
- const tcConfigSchema = z.object({ global: z.object({ context: z.string().optional() }).optional() }).catchall(tcContextConfigSchema.optional());
1212
- function readTailorctlConfig() {
1213
- const configPath = path.join(os.homedir(), ".tailorctl", "config");
1214
- if (!fs$1.existsSync(configPath)) return;
1215
- const rawConfig = parseTOML(fs$1.readFileSync(configPath, "utf-8"));
1216
- return tcConfigSchema.parse(rawConfig);
1217
- }
1218
- function fromTailorctlConfig(config) {
1219
- const users = {};
1220
- const profiles = {};
1221
- let currentUser = null;
1222
- const currentContext = config.global?.context || "default";
1223
- for (const [key, val] of Object.entries(config)) {
1224
- if (key === "global") continue;
1225
- const context = val;
1226
- if (!context.username || !context.controlplaneaccesstoken || !context.controlplanerefreshtoken || !context.controlplanetokenexpiresat || !context.workspaceid) continue;
1227
- if (key === currentContext) currentUser = context.username;
1228
- profiles[key] = {
1229
- user: context.username,
1230
- workspace_id: context.workspaceid
1231
- };
1232
- const user = users[context.username];
1233
- if (!user || new Date(user.token_expires_at) < new Date(context.controlplanetokenexpiresat)) users[context.username] = {
1234
- access_token: context.controlplaneaccesstoken,
1235
- refresh_token: context.controlplanerefreshtoken,
1236
- token_expires_at: context.controlplanetokenexpiresat
1237
- };
1238
- }
1239
- return {
1240
- version: 1,
1241
- users,
1242
- profiles,
1243
- current_user: currentUser
1244
- };
1245
- }
1246
- function validateUUID(value, source) {
1247
- const result = z.uuid().safeParse(value);
1248
- if (!result.success) throw new Error(`Invalid value from ${source}: must be a valid UUID`);
1249
- return result.data;
1250
- }
1251
- /**
1252
- * Load workspace ID from command options, environment variables, or platform config.
1253
- * In CLI context, env fallback is also handled by politty's arg env option.
1254
- * Priority: opts/workspaceId > env/workspaceId > opts/profile > error
1255
- * @param opts - Workspace and profile options
1256
- * @returns Resolved workspace ID
1257
- */
1258
- function loadWorkspaceId(opts) {
1259
- if (opts?.workspaceId) return validateUUID(opts.workspaceId, "--workspace-id option");
1260
- if (process.env.TAILOR_PLATFORM_WORKSPACE_ID) return validateUUID(process.env.TAILOR_PLATFORM_WORKSPACE_ID, "TAILOR_PLATFORM_WORKSPACE_ID environment variable");
1261
- const profile = opts?.profile || process.env.TAILOR_PLATFORM_PROFILE;
1262
- if (profile) {
1263
- const wsId = readPlatformConfig().profiles[profile]?.workspace_id;
1264
- if (!wsId) throw new Error(`Profile "${profile}" not found`);
1265
- return validateUUID(wsId, `profile "${profile}"`);
1266
- }
1267
- throw new Error(ml`
1268
- Workspace ID not found.
1269
- Please specify workspace ID via --workspace-id option or TAILOR_PLATFORM_WORKSPACE_ID environment variable.
1270
- `);
1271
- }
1272
- /**
1273
- * Load access token from environment variables, command options, or platform config.
1274
- * In CLI context, profile env fallback is also handled by politty's arg env option.
1275
- * Priority: env/TAILOR_PLATFORM_TOKEN > env/TAILOR_TOKEN (deprecated) > opts/profile > env/profile > config/currentUser > error
1276
- * @param opts - Profile options
1277
- * @returns Resolved access token
1278
- */
1279
- async function loadAccessToken(opts) {
1280
- if (process.env.TAILOR_PLATFORM_TOKEN) return process.env.TAILOR_PLATFORM_TOKEN;
1281
- if (process.env.TAILOR_TOKEN) {
1282
- logger.warn("TAILOR_TOKEN is deprecated. Please use TAILOR_PLATFORM_TOKEN instead.");
1283
- return process.env.TAILOR_TOKEN;
1284
- }
1285
- const pfConfig = readPlatformConfig();
1286
- let user;
1287
- const profile = opts?.useProfile ? opts.profile || process.env.TAILOR_PLATFORM_PROFILE : void 0;
1288
- if (profile) {
1289
- const u = pfConfig.profiles[profile]?.user;
1290
- if (!u) throw new Error(`Profile "${profile}" not found`);
1291
- user = u;
1292
- } else {
1293
- const u = pfConfig.current_user;
1294
- if (!u) throw new Error(ml`
1295
- Tailor Platform token not found.
1296
- Please specify token via TAILOR_PLATFORM_TOKEN environment variable or login using 'tailor-sdk login' command.
1297
- `);
1298
- user = u;
1299
- }
1300
- return await fetchLatestToken(pfConfig, user);
1301
- }
1302
- /**
1303
- * Fetch the latest access token, refreshing if necessary.
1304
- * @param config - Platform config
1305
- * @param user - User name
1306
- * @returns Latest access token
1307
- */
1308
- async function fetchLatestToken(config, user) {
1309
- const tokens = config.users[user];
1310
- if (!tokens) throw new Error(ml`
1311
- User "${user}" not found.
1312
- Please verify your user name and login using 'tailor-sdk login' command.
1313
- `);
1314
- if (new Date(tokens.token_expires_at) > /* @__PURE__ */ new Date()) return tokens.access_token;
1315
- const client = initOAuth2Client();
1316
- let resp;
1317
- try {
1318
- resp = await client.refreshToken({
1319
- accessToken: tokens.access_token,
1320
- refreshToken: tokens.refresh_token,
1321
- expiresAt: Date.parse(tokens.token_expires_at)
1322
- });
1323
- } catch {
1324
- throw new Error(ml`
1325
- Failed to refresh token. Your session may have expired.
1326
- Please run 'tailor-sdk login' and try again.
1327
- `);
1328
- }
1329
- config.users[user] = {
1330
- access_token: resp.accessToken,
1331
- refresh_token: resp.refreshToken,
1332
- token_expires_at: new Date(resp.expiresAt).toISOString()
1333
- };
1334
- writePlatformConfig(config);
1335
- return resp.accessToken;
1336
- }
1337
- const DEFAULT_CONFIG_FILENAME = "tailor.config.ts";
1338
- /**
1339
- * Load config path from command options, environment variables, or search parent directories.
1340
- * In CLI context, env fallback is also handled by politty's arg env option.
1341
- * Priority: opts/config > env/config > search parent directories
1342
- * @param configPath - Optional explicit config path
1343
- * @returns Resolved config path or undefined
1344
- */
1345
- function loadConfigPath(configPath) {
1346
- if (configPath) return configPath;
1347
- if (process.env.TAILOR_PLATFORM_SDK_CONFIG_PATH) return process.env.TAILOR_PLATFORM_SDK_CONFIG_PATH;
1348
- return findUpSync(DEFAULT_CONFIG_FILENAME);
1349
- }
1350
-
1351
- //#endregion
1352
- //#region src/cli/cache/hasher.ts
1353
- /**
1354
- * Compute the SHA-256 hex digest of an arbitrary string.
1355
- * @param content - The string content to hash
1356
- * @returns Hex-encoded SHA-256 hash
1357
- */
1358
- function hashContent(content) {
1359
- return crypto.createHash("sha256").update(content, "utf-8").digest("hex");
1360
- }
1361
- /**
1362
- * Read a file and return its SHA-256 hex digest.
1363
- * @param filePath - Absolute path to the file
1364
- * @returns Hex-encoded SHA-256 hash of the file content
1365
- */
1366
- function hashFile(filePath) {
1367
- const content = fs$1.readFileSync(filePath);
1368
- return crypto.createHash("sha256").update(content).digest("hex");
1369
- }
1370
- /**
1371
- * Compute a deterministic SHA-256 hash for multiple files.
1372
- *
1373
- * Paths are sorted alphabetically before hashing so that the result
1374
- * is independent of the order the paths are supplied (e.g. glob ordering).
1375
- * Each file's individual hash is concatenated and then hashed again.
1376
- * @param filePaths - Array of absolute file paths
1377
- * @returns Hex-encoded SHA-256 hash representing all files
1378
- */
1379
- function hashFiles(filePaths) {
1380
- return hashContent([...filePaths].sort().map((fp) => hashFile(fp)).join(""));
1381
- }
1382
-
1383
- //#endregion
1384
- //#region src/cli/shared/dist-dir.ts
1385
- let distPath = null;
1386
- const getDistDir = () => {
1387
- const configured = process.env.TAILOR_SDK_OUTPUT_DIR;
1388
- if (configured && configured !== distPath) distPath = configured;
1389
- else if (distPath === null) distPath = configured || ".tailor-sdk";
1390
- return distPath;
1391
- };
1392
-
1393
- //#endregion
1394
- //#region src/cli/cache/dep-collector-plugin.ts
1395
- /**
1396
- * Create a rolldown plugin that collects all resolved module paths during a build.
1397
- * The plugin is purely observational and does not modify any code or behavior.
1398
- * Collected paths exclude node_modules and generated entry files.
1399
- * node_modules changes (package upgrades) are not tracked per-bundle;
1400
- * lockfile hash and SDK version changes invalidate the entire cache.
1401
- * @returns An object containing the plugin and a getResult function that returns sorted, deduplicated paths
1402
- */
1403
- function createDepCollectorPlugin() {
1404
- const collectedPaths = /* @__PURE__ */ new Set();
1405
- const plugin = {
1406
- name: "cache-dep-collector",
1407
- load: {
1408
- filter: { id: { include: [/\.[^/]+$/] } },
1409
- handler(id) {
1410
- if (!id.includes("node_modules") && !id.endsWith(".entry.js")) collectedPaths.add(id);
1411
- return null;
1412
- }
1413
- }
1414
- };
1415
- function getResult() {
1416
- return Array.from(collectedPaths).sort();
1417
- }
1418
- return {
1419
- plugin,
1420
- getResult
1421
- };
1422
- }
1423
-
1424
- //#endregion
1425
- //#region src/cli/cache/bundle-cache.ts
1426
- function buildCacheKey(kind, name) {
1427
- return `${kind}:${name}`;
1428
- }
1429
- function combineHash(fileHash, contextHash) {
1430
- if (!contextHash) return fileHash;
1431
- return hashContent(fileHash + contextHash);
1432
- }
1433
- /**
1434
- * Compute a context hash for cache invalidation across bundlers.
1435
- *
1436
- * Combines the source file path, serialized trigger context, tsconfig hash,
1437
- * sourcemap mode, and an optional prefix (e.g., serialized env variables)
1438
- * into a single SHA-256 hash.
1439
- * @param params - Context hash computation parameters
1440
- * @returns SHA-256 hex digest of the combined context
1441
- */
1442
- function computeBundlerContextHash(params) {
1443
- const { sourceFile, serializedTriggerContext, tsconfig, inlineSourcemap, prefix } = params;
1444
- return hashContent((prefix ?? "") + path.resolve(sourceFile) + serializedTriggerContext + (tsconfig ? hashFile(tsconfig) : "") + String(inlineSourcemap ?? false));
1445
- }
1446
- /**
1447
- * Run a build with optional cache restore/save around it.
1448
- * When caching is active, attempts to restore from cache first,
1449
- * and saves the build result (with collected dependencies) on a cache miss.
1450
- * @param params - Cache and build parameters
1451
- */
1452
- async function withCache(params) {
1453
- const { cache, kind, name, sourceFile, outputPath, contextHash, build } = params;
1454
- if (!cache) {
1455
- await build([]);
1456
- return;
1457
- }
1458
- if (cache.tryRestore({
1459
- kind,
1460
- name,
1461
- outputPath,
1462
- contextHash
1463
- })) {
1464
- logger.debug(` ${styles.dim("cached")}: ${name}`);
1465
- return;
1466
- }
1467
- const { plugin, getResult } = createDepCollectorPlugin();
1468
- await build([plugin]);
1469
- cache.save({
1470
- kind,
1471
- name,
1472
- sourceFile,
1473
- outputPath,
1474
- dependencyPaths: getResult(),
1475
- contextHash
1476
- });
1477
- }
1478
- /**
1479
- * Create a bundle cache backed by the given store.
1480
- * @param store - The cache store for persistence
1481
- * @returns A BundleCache instance
1482
- */
1483
- function createBundleCache(store) {
1484
- function tryRestore(params) {
1485
- const cacheKey = buildCacheKey(params.kind, params.name);
1486
- const entry = store.getEntry(cacheKey);
1487
- if (!entry) return false;
1488
- let currentHash;
1489
- try {
1490
- currentHash = combineHash(hashFiles(entry.dependencyPaths), params.contextHash);
1491
- } catch {
1492
- return false;
1493
- }
1494
- if (currentHash !== entry.inputHash) return false;
1495
- return store.restoreBundleOutput(cacheKey, params.outputPath);
1496
- }
1497
- function save(params) {
1498
- const { kind, name, sourceFile, outputPath, dependencyPaths, contextHash } = params;
1499
- const cacheKey = buildCacheKey(kind, name);
1500
- const allDeps = dependencyPaths.includes(sourceFile) ? dependencyPaths : [sourceFile, ...dependencyPaths];
1501
- const inputHash = combineHash(hashFiles(allDeps), contextHash);
1502
- const contentHash = hashFile(outputPath);
1503
- store.storeBundleOutput(cacheKey, outputPath);
1504
- const outputFiles = [{
1505
- outputPath,
1506
- contentHash
1507
- }];
1508
- const mapPath = `${outputPath}.map`;
1509
- if (fs$1.existsSync(mapPath)) outputFiles.push({
1510
- outputPath: mapPath,
1511
- contentHash: hashFile(mapPath)
1512
- });
1513
- store.setEntry(cacheKey, {
1514
- kind: "bundle",
1515
- inputHash,
1516
- dependencyPaths: allDeps,
1517
- outputFiles,
1518
- createdAt: (/* @__PURE__ */ new Date()).toISOString()
1519
- });
1520
- }
1521
- return {
1522
- tryRestore,
1523
- save
1524
- };
1525
- }
1526
-
1527
- //#endregion
1528
- //#region src/cli/shared/plugin-import.ts
1529
- /**
1530
- * Collect base directories for resolving plugin import paths.
1531
- * @param configPath - Path to tailor.config.ts
1532
- * @returns Ordered list of base directories
1533
- */
1534
- function getPluginImportBaseDirs(configPath) {
1535
- if (configPath) return [path.dirname(configPath)];
1536
- return [process.cwd()];
1537
- }
1538
- /**
1539
- * Resolve a relative plugin import path against candidate base directories.
1540
- * @param pluginImportPath - Relative plugin import path
1541
- * @param baseDirs - Candidate base directories
1542
- * @returns Absolute path if found, otherwise null
1543
- */
1544
- function resolveRelativePluginImportPath(pluginImportPath, baseDirs) {
1545
- if (!pluginImportPath.startsWith(".")) return null;
1546
- for (const baseDir of baseDirs) {
1547
- const absolutePath = path.resolve(baseDir, pluginImportPath);
1548
- if (fs$1.existsSync(absolutePath)) return absolutePath;
1549
- }
1550
- return null;
1551
- }
1552
-
1553
- //#endregion
1554
- //#region src/types/plugin.ts
1555
- /**
1556
- * Checks if a plugin executor uses file-based resolution.
1557
- * @param executor - The plugin executor to check.
1558
- * @returns True if the executor uses file-based resolution.
1559
- */
1560
- function isPluginExecutorWithFile(executor) {
1561
- return "resolve" in executor && "context" in executor;
1562
- }
1563
-
1564
- //#endregion
1565
- //#region src/cli/commands/generate/plugin-executor-generator.ts
1566
- /**
1567
- * Plugin Executor Generator
1568
- *
1569
- * Generates TypeScript files for plugin-generated executors.
1570
- * Supports both legacy format (inline trigger/operation) and new format (executorFile/context).
1571
- */
1572
- /**
1573
- * Generate TypeScript files for plugin-generated executors.
1574
- * These files will be processed by the standard executor bundler.
1575
- * @param executors - Array of plugin executor information
1576
- * @param outputDir - Base output directory (e.g., .tailor-sdk)
1577
- * @param typeGenerationResult - Result from plugin type generation (for import resolution)
1578
- * @param sourceTypeInfoMap - Map of source type names to their source info
1579
- * @param configPath - Path to tailor.config.ts (used for resolving plugin import paths)
1580
- * @returns Array of generated file paths
1581
- */
1582
- function generatePluginExecutorFiles(executors, outputDir, typeGenerationResult, sourceTypeInfoMap, configPath) {
1583
- if (executors.length === 0) return [];
1584
- const generatedFiles = [];
1585
- const baseDirs = getPluginImportBaseDirs(configPath);
1586
- for (const info of executors) {
1587
- const filePath = generateSingleExecutorFile(info, outputDir, typeGenerationResult, sourceTypeInfoMap, baseDirs);
1588
- generatedFiles.push(filePath);
1589
- const relativePath = path.relative(process.cwd(), filePath);
1590
- logger.log(` Plugin Executor File: ${styles.success(relativePath)} from plugin ${styles.info(info.pluginId)}`);
1591
- }
1592
- return generatedFiles;
1593
- }
1594
- /**
1595
- * Generate a single executor file.
1596
- * @param info - Plugin executor metadata and definition
1597
- * @param outputDir - Base output directory (e.g., .tailor-sdk)
1598
- * @param typeGenerationResult - Result from plugin type generation
1599
- * @param sourceTypeInfoMap - Map of source type names to their source info
1600
- * @param baseDirs - Base directories for resolving plugin import paths
1601
- * @returns Absolute path to the generated file
1602
- */
1603
- function generateSingleExecutorFile(info, outputDir, typeGenerationResult, sourceTypeInfoMap, baseDirs = []) {
1604
- const pluginDir = sanitizePluginId$1(info.pluginId);
1605
- const executorOutputDir = path.join(outputDir, pluginDir, "executors");
1606
- fs$1.mkdirSync(executorOutputDir, { recursive: true });
1607
- const fileName = sanitizeExecutorFileName(info.executor.name);
1608
- const filePath = path.join(executorOutputDir, `${fileName}.ts`);
1609
- let content;
1610
- if (isPluginExecutorWithFile(info.executor)) content = generateExecutorFileContentNew(info, info.executor, outputDir, typeGenerationResult, sourceTypeInfoMap, baseDirs);
1611
- else content = generateExecutorFileContentLegacy(info.executor);
1612
- fs$1.writeFileSync(filePath, content);
1613
- return filePath;
1614
- }
1615
- /**
1616
- * Generate TypeScript file content for new format executor (dynamic import).
1617
- * Uses the executor's resolve function to dynamically import the module.
1618
- * @param info - Plugin executor information
1619
- * @param executor - Executor definition with resolve
1620
- * @param outputDir - Base output directory
1621
- * @param typeGenerationResult - Result from plugin type generation
1622
- * @param sourceTypeInfoMap - Map of source type names to their source info
1623
- * @param baseDirs - Base directories for resolving plugin import paths
1624
- * @returns TypeScript source code for executor file
1625
- */
1626
- function generateExecutorFileContentNew(info, executor, outputDir, typeGenerationResult, sourceTypeInfoMap, baseDirs = []) {
1627
- const { resolve, context } = executor;
1628
- const pluginDir = sanitizePluginId$1(info.pluginId);
1629
- const executorOutputDir = path.join(outputDir, pluginDir, "executors");
1630
- const executorImportPath = resolveExecutorImportPath(resolve, info.pluginImportPath, executorOutputDir, baseDirs);
1631
- const typeImports = collectTypeImports(context, outputDir, info.pluginId, typeGenerationResult, sourceTypeInfoMap);
1632
- const imports = [];
1633
- for (const [, importInfo] of typeImports) imports.push(`import { ${importInfo.variableName} } from "${importInfo.importPath}";`);
1634
- const contextCode = generateContextCode(context, typeImports);
1635
- return ml`
1636
- /**
1637
- * Auto-generated executor by plugin: ${info.pluginId}
1638
- * DO NOT EDIT - This file is generated by @tailor-platform/sdk
1639
- */
1640
- ${imports.join("\n")}
1641
-
1642
- const { default: executorFactory } = await import(${JSON.stringify(executorImportPath)});
1643
- if (typeof executorFactory !== "function") {
1644
- throw new Error(
1645
- "Plugin executor module must export a default function created by withPluginContext().",
1646
- );
1647
- }
1648
- export default executorFactory(${contextCode});
1649
- `;
1650
- }
1651
- /**
1652
- * Collect type imports needed for context.
1653
- * @param context - Executor context values from plugin
1654
- * @param outputDir - Base output directory for generated files
1655
- * @param pluginId - Plugin identifier used for output paths
1656
- * @param typeGenerationResult - Result from plugin type generation
1657
- * @param sourceTypeInfoMap - Map of source type names to their source info
1658
- * @returns Map of context keys to their import information
1659
- */
1660
- function collectTypeImports(context, outputDir, pluginId, typeGenerationResult, sourceTypeInfoMap) {
1661
- const typeImports = /* @__PURE__ */ new Map();
1662
- const pluginDir = sanitizePluginId$1(pluginId);
1663
- const executorDir = path.join(outputDir, pluginDir, "executors");
1664
- for (const [key, value] of Object.entries(context)) if (isTypeObject(value)) {
1665
- const typeName = value.name;
1666
- const sourceInfo = sourceTypeInfoMap?.get(typeName);
1667
- const variableName = sourceInfo?.exportName ?? toCamelCase$1(typeName);
1668
- let importPath;
1669
- let isGeneratedType = false;
1670
- if (typeGenerationResult?.typeFilePaths.has(typeName)) {
1671
- const typeFilePath = typeGenerationResult.typeFilePaths.get(typeName);
1672
- const absoluteTypePath = path.join(outputDir, typeFilePath);
1673
- importPath = path.relative(executorDir, absoluteTypePath).replace(/\.ts$/, "");
1674
- if (!importPath.startsWith(".")) importPath = `./${importPath}`;
1675
- isGeneratedType = true;
1676
- } else if (sourceInfo) {
1677
- const sourceFilePath = sourceInfo.filePath;
1678
- importPath = path.relative(executorDir, sourceFilePath).replace(/\.ts$/, "");
1679
- if (!importPath.startsWith(".")) importPath = `./${importPath}`;
1680
- } else importPath = `../../../../tailordb/${toKebabCase$1(typeName)}`;
1681
- typeImports.set(key, {
1682
- variableName,
1683
- importPath,
1684
- isGeneratedType
1685
- });
1686
- }
1687
- return typeImports;
1688
- }
1689
- /**
1690
- * Generate TypeScript code for context object.
1691
- * @param context - Executor context values from plugin
1692
- * @param typeImports - Resolved type import information for context keys
1693
- * @returns TypeScript object literal code
1694
- */
1695
- function generateContextCode(context, typeImports) {
1696
- const entries = [];
1697
- for (const [key, value] of Object.entries(context)) if (isTypeObject(value)) {
1698
- const importInfo = typeImports.get(key);
1699
- if (importInfo) entries.push(` ${key}: ${importInfo.variableName}`);
1700
- } else if (value !== void 0) entries.push(` ${key}: ${JSON.stringify(value)}`);
1701
- return `{\n${entries.join(",\n")},\n}`;
1702
- }
1703
- /**
1704
- * Check if a value is a TailorDB type object.
1705
- * @param value - Value to inspect
1706
- * @returns True if value is a type object with name and fields
1707
- */
1708
- function isTypeObject(value) {
1709
- return typeof value === "object" && value !== null && "name" in value && "fields" in value && typeof value.name === "string";
1710
- }
1711
- /**
1712
- * Generate TypeScript file content for legacy format executor (trigger/operation).
1713
- * @param executor - Legacy executor definition
1714
- * @returns TypeScript source code for executor file
1715
- */
1716
- function generateExecutorFileContentLegacy(executor) {
1717
- const triggerCode = generateTriggerCode(executor.trigger);
1718
- const operationCode = generateOperationCode(executor.operation);
1719
- const injectDeclarations = generateInjectDeclarations(executor.operation.kind === "function" ? executor.operation.inject : void 0);
1720
- const descriptionLine = executor.description ? `\n description: ${JSON.stringify(executor.description)},` : "";
1721
- return ml`
1722
- /**
1723
- * Auto-generated executor by plugin
1724
- * DO NOT EDIT - This file is generated by @tailor-platform/sdk
1725
- */
1726
- import { createExecutor } from "@tailor-platform/sdk";
1727
- ${injectDeclarations}
1728
- export default createExecutor({
1729
- name: ${JSON.stringify(executor.name)},${descriptionLine}
1730
- trigger: ${triggerCode},
1731
- operation: ${operationCode},
1732
- });
1733
- `;
1734
- }
1735
- /**
1736
- * Generate const declarations for injected variables.
1737
- * @param inject - Map of injected values keyed by variable name
1738
- * @returns TypeScript const declarations or empty string
1739
- */
1740
- function generateInjectDeclarations(inject) {
1741
- if (!inject || Object.keys(inject).length === 0) return "";
1742
- return `\n// Injected variables from plugin\n${Object.entries(inject).map(([name, value]) => `const ${name} = ${JSON.stringify(value)};`).join("\n")}\n`;
1743
- }
1744
- /**
1745
- * Generate TypeScript code for trigger configuration.
1746
- * @param trigger - Trigger configuration for executor
1747
- * @returns TypeScript code for trigger object
1748
- */
1749
- function generateTriggerCode(trigger) {
1750
- switch (trigger.kind) {
1751
- case "recordCreated":
1752
- case "recordUpdated":
1753
- case "recordDeleted": return `{
1754
- kind: ${JSON.stringify(trigger.kind)},
1755
- typeName: ${JSON.stringify(trigger.typeName)},
1756
- }`;
1757
- case "schedule": return `{
1758
- kind: "schedule",
1759
- cron: ${JSON.stringify(trigger.cron)},
1760
- timezone: ${JSON.stringify(trigger.timezone ?? "UTC")},
1761
- }`;
1762
- case "incomingWebhook": return `{
1763
- kind: "incomingWebhook",
1764
- }`;
1765
- default: throw new Error(`Unknown trigger kind: ${trigger.kind}`);
1766
- }
1767
- }
1768
- /**
1769
- * Generate TypeScript code for operation configuration.
1770
- * @param operation - Operation configuration for executor
1771
- * @returns TypeScript code for operation object
1772
- */
1773
- function generateOperationCode(operation) {
1774
- switch (operation.kind) {
1775
- case "graphql": {
1776
- const appNameLine = operation.appName ? `\n appName: ${JSON.stringify(operation.appName)},` : "";
1777
- const variablesLine = operation.variables ? `\n variables: ${operation.variables},` : "";
1778
- return `{
1779
- kind: "graphql",
1780
- query: \`${escapeTemplateLiteral(operation.query)}\`,${appNameLine}${variablesLine}
1781
- }`;
1782
- }
1783
- case "function": return `{
1784
- kind: "function",
1785
- body: ${operation.body},
1786
- }`;
1787
- case "webhook": return `{
1788
- kind: "webhook",
1789
- url: () => ${JSON.stringify(operation.url)},
1790
- }`;
1791
- case "workflow": return `{
1792
- kind: "workflow",
1793
- workflowName: ${JSON.stringify(operation.workflowName)},
1794
- }`;
1795
- default: throw new Error(`Unknown operation kind: ${operation.kind}`);
1796
- }
1797
- }
1798
- /**
1799
- * Escape special characters in template literal content.
1800
- * @param str - Raw template literal content
1801
- * @returns Escaped string safe for template literals
1802
- */
1803
- function escapeTemplateLiteral(str) {
1804
- return str.replace(/\\/g, "\\\\").replace(/`/g, "\\`").replace(/\$\{/g, "\\${");
1805
- }
1806
- const require = createRequire(import.meta.url);
1807
- /**
1808
- * Resolve the import path for a plugin executor module.
1809
- * @param resolve - Executor resolve function
1810
- * @param pluginImportPath - Plugin's import path
1811
- * @param executorOutputDir - Directory where the generated executor will be written
1812
- * @param baseDirs - Base directories for resolving plugin import paths
1813
- * @returns Import path string for the executor module
1814
- */
1815
- function resolveExecutorImportPath(resolve, pluginImportPath, executorOutputDir, baseDirs) {
1816
- const specifier = extractDynamicImportSpecifier(resolve);
1817
- if (!specifier.startsWith(".")) return specifier;
1818
- const pluginBaseDir = resolvePluginBaseDir(pluginImportPath, baseDirs);
1819
- if (!pluginBaseDir) throw new Error(`Unable to resolve plugin import base for "${pluginImportPath}". Tried base dirs: ${baseDirs.join(", ") || "(none)"}. Use an absolute import specifier in resolve(), or ensure the plugin path is resolvable.`);
1820
- const absolutePath = path.resolve(pluginBaseDir, specifier);
1821
- let relativePath = path.relative(executorOutputDir, absolutePath).replace(/\\/g, "/");
1822
- relativePath = stripSourceExtension(relativePath);
1823
- if (!relativePath.startsWith(".")) relativePath = `./${relativePath}`;
1824
- return relativePath;
1825
- }
1826
- /**
1827
- * Extract the dynamic import specifier from a resolve function.
1828
- * @param resolve - Executor resolve function
1829
- * @returns The module specifier string
1830
- */
1831
- function extractDynamicImportSpecifier(resolve) {
1832
- const match = resolve.toString().match(/import\s*\(\s*["']([^"']+)["']\s*\)/);
1833
- if (!match) throw new Error(`resolve() must return a dynamic import, e.g. \`async () => await import("./executors/on-create")\`.`);
1834
- return match[1];
1835
- }
1836
- /**
1837
- * Resolve plugin base directory for relative imports.
1838
- * @param pluginImportPath - Plugin import path
1839
- * @param baseDirs - Base directories for resolving plugin import paths
1840
- * @returns Directory path or null if not resolvable
1841
- */
1842
- function resolvePluginBaseDir(pluginImportPath, baseDirs) {
1843
- if (pluginImportPath.startsWith(".")) {
1844
- const resolvedPath = resolveRelativePluginImportPath(pluginImportPath, baseDirs) ?? path.resolve(baseDirs[0] ?? process.cwd(), pluginImportPath);
1845
- if (fs$1.existsSync(resolvedPath)) return fs$1.statSync(resolvedPath).isDirectory() ? resolvedPath : path.dirname(resolvedPath);
1846
- return path.extname(resolvedPath) ? path.dirname(resolvedPath) : resolvedPath;
1847
- }
1848
- for (const baseDir of baseDirs) try {
1849
- const resolved = require.resolve(pluginImportPath, { paths: [baseDir] });
1850
- return path.dirname(resolved);
1851
- } catch {
1852
- continue;
1853
- }
1854
- return null;
1855
- }
1856
- /**
1857
- * Strip TypeScript source extensions from import paths.
1858
- * @param importPath - Path to normalize
1859
- * @returns Path without .ts/.tsx extension
1860
- */
1861
- function stripSourceExtension(importPath) {
1862
- return importPath.replace(/\.(ts|tsx)$/, "");
1863
- }
1864
- /**
1865
- * Convert plugin ID to safe directory name.
1866
- * @param pluginId - Plugin identifier (e.g., "@scope/name")
1867
- * @returns Safe directory name
1868
- */
1869
- function sanitizePluginId$1(pluginId) {
1870
- return pluginId.replace(/^@/, "").replace(/\//g, "-");
1871
- }
1872
- /**
1873
- * Convert executor name to safe filename.
1874
- * @param executorName - Executor name
1875
- * @returns Safe filename without extension
1876
- */
1877
- function sanitizeExecutorFileName(executorName) {
1878
- const sanitized = path.basename(executorName).replace(/\.[^/.]+$/, "").replace(/[^a-zA-Z0-9_-]/g, "-");
1879
- if (!sanitized) throw new Error(`Invalid executor name: "${executorName}"`);
1880
- return sanitized;
1881
- }
1882
- /**
1883
- * Convert string to camelCase.
1884
- * @param str - Input string to convert
1885
- * @returns camelCase string
1886
- */
1887
- function toCamelCase$1(str) {
1888
- const result = str.replace(/[-_\s]+(.)?/g, (_, c) => c ? c.toUpperCase() : "");
1889
- return result.charAt(0).toLowerCase() + result.slice(1);
1890
- }
1891
- /**
1892
- * Convert string to kebab-case.
1893
- * @param str - Input string to convert
1894
- * @returns kebab-case string
1895
- */
1896
- function toKebabCase$1(str) {
1897
- return str.replace(/([a-z])([A-Z])/g, "$1-$2").replace(/[\s_]+/g, "-").toLowerCase();
1898
- }
1899
-
1900
- //#endregion
1901
- //#region src/cli/commands/generate/plugin-type-generator.ts
1902
- /**
1903
- * Plugin Type Generator
1904
- *
1905
- * Generates TypeScript files for plugin-generated types (TailorDB types).
1906
- * These files can be imported by plugin executors to reference generated types.
1907
- */
1908
- function isFieldDefinition(value) {
1909
- return typeof value === "object" && value !== null;
1910
- }
1911
- /**
1912
- * Generate TypeScript files for plugin-generated types.
1913
- * These files export the type definition and can be imported by executor files.
1914
- * @param types - Array of plugin type information
1915
- * @param outputDir - Base output directory (e.g., .tailor-sdk)
1916
- * @returns Generation result with file paths
1917
- */
1918
- function generatePluginTypeFiles(types, outputDir) {
1919
- const typeFilePaths = /* @__PURE__ */ new Map();
1920
- const generatedFiles = [];
1921
- if (types.length === 0) return {
1922
- typeFilePaths,
1923
- generatedFiles
1924
- };
1925
- const seenTypeNames = /* @__PURE__ */ new Map();
1926
- for (const info of types) {
1927
- const existing = seenTypeNames.get(info.type.name);
1928
- if (existing) throw new Error(`Duplicate plugin-generated type name "${info.type.name}" detected. First: plugin "${existing.pluginId}" (kind: "${existing.kind}", source type: "${existing.sourceTypeName}"), Second: plugin "${info.pluginId}" (kind: "${info.kind}", source type: "${info.sourceTypeName}"). Plugin-generated type names must be unique.`);
1929
- seenTypeNames.set(info.type.name, info);
1930
- const pluginDir = sanitizePluginId(info.pluginId);
1931
- const typeOutputDir = path.join(outputDir, pluginDir, "types");
1932
- fs$1.mkdirSync(typeOutputDir, { recursive: true });
1933
- const fileName = `${toKebabCase(info.type.name)}.ts`;
1934
- const filePath = path.join(typeOutputDir, fileName);
1935
- const content = generateTypeFileContent(info);
1936
- fs$1.writeFileSync(filePath, content);
1937
- generatedFiles.push(filePath);
1938
- const relativePath = path.relative(outputDir, filePath);
1939
- typeFilePaths.set(info.type.name, relativePath);
1940
- const displayPath = path.relative(process.cwd(), filePath);
1941
- logger.log(` Plugin Type File: ${styles.success(displayPath)} (${styles.dim(info.kind)}) from plugin ${styles.info(info.pluginId)}`);
1942
- }
1943
- return {
1944
- typeFilePaths,
1945
- generatedFiles
1946
- };
1947
- }
1948
- /**
1949
- * Generate TypeScript file content for a single type.
1950
- * @param info - Plugin type information
1951
- * @returns TypeScript source code
1952
- */
1953
- function generateTypeFileContent(info) {
1954
- const { type, pluginId, sourceTypeName, kind } = info;
1955
- const variableName = toCamelCase(type.name);
1956
- const fieldsCode = generateFieldsCode(type);
1957
- return ml`
1958
- /**
1959
- * Auto-generated type by plugin: ${pluginId}
1960
- * Source type: ${sourceTypeName}
1961
- * Kind: ${kind}
1962
- *
1963
- * DO NOT EDIT - This file is generated by @tailor-platform/sdk
1964
- */
1965
- import { db } from "@tailor-platform/sdk";
1966
-
1967
- export const ${variableName} = db.type(${JSON.stringify(type.name)}, ${fieldsCode});
1968
-
1969
- export type ${type.name} = typeof ${variableName};
1970
- `;
1971
- }
1972
- /**
1973
- * Generate TypeScript code for field definitions.
1974
- * This creates a simplified version of the type's fields.
1975
- * @param type - TailorDB type
1976
- * @returns TypeScript code for fields object
1977
- */
1978
- function generateFieldsCode(type) {
1979
- const fieldEntries = [];
1980
- for (const [fieldName, field] of Object.entries(type.fields)) {
1981
- if (!isFieldDefinition(field)) continue;
1982
- const fieldCode = generateSingleFieldCode(field);
1983
- if (fieldCode) fieldEntries.push(` ${fieldName}: ${fieldCode}`);
1984
- }
1985
- return `{\n${fieldEntries.join(",\n")},\n}`;
1986
- }
1987
- /**
1988
- * Map from TailorDB type to SDK method name.
1989
- */
1990
- const typeToMethodMap = {
1991
- string: "string",
1992
- integer: "int",
1993
- float: "float",
1994
- boolean: "bool",
1995
- uuid: "uuid",
1996
- datetime: "datetime",
1997
- date: "date",
1998
- time: "time",
1999
- enum: "enum",
2000
- nested: "nested"
2001
- };
2002
- /**
2003
- * Generate TypeScript code for a single field definition.
2004
- * @param field - Field definition object
2005
- * @returns TypeScript code for the field
2006
- */
2007
- function generateSingleFieldCode(field) {
2008
- const fieldType = field.type;
2009
- if (!fieldType) return null;
2010
- const method = typeToMethodMap[fieldType] ?? fieldType;
2011
- const metadata = field._metadata ?? field.metadata ?? {};
2012
- const optionParts = [];
2013
- if (metadata.required === false) optionParts.push("optional: true");
2014
- const optionsArg = optionParts.length > 0 ? `{ ${optionParts.join(", ")} }` : "";
2015
- if (fieldType === "enum") {
2016
- const allowedValues = metadata.allowedValues;
2017
- let enumValues = [];
2018
- if (Array.isArray(allowedValues)) enumValues = allowedValues.map((v) => v.value);
2019
- let code = `db.enum(${JSON.stringify(enumValues)}${optionsArg ? `, ${optionsArg}` : ""})`;
2020
- if (metadata.index) code += ".index()";
2021
- if (metadata.unique) code += ".unique()";
2022
- if (metadata.description) code += `.description(${JSON.stringify(metadata.description)})`;
2023
- return code;
2024
- }
2025
- let code = `db.${method}(${optionsArg})`;
2026
- if (metadata.index) code += ".index()";
2027
- if (metadata.unique) code += ".unique()";
2028
- if (metadata.description) code += `.description(${JSON.stringify(metadata.description)})`;
2029
- return code;
2030
- }
2031
- /**
2032
- * Convert plugin ID to safe directory name.
2033
- * @param pluginId - Plugin identifier (e.g., "@tailor-platform/change-history")
2034
- * @returns Safe directory name (e.g., "tailor-platform-change-history")
2035
- */
2036
- function sanitizePluginId(pluginId) {
2037
- return pluginId.replace(/^@/, "").replace(/\//g, "-");
2038
- }
2039
- /**
2040
- * Convert string to kebab-case.
2041
- * @param str - Input string
2042
- * @returns kebab-case string
2043
- */
2044
- function toKebabCase(str) {
2045
- return str.replace(/([a-z])([A-Z])/g, "$1-$2").replace(/[\s_]+/g, "-").toLowerCase();
2046
- }
2047
- /**
2048
- * Convert string to camelCase.
2049
- * @param str - Input string
2050
- * @returns camelCase string
2051
- */
2052
- function toCamelCase(str) {
2053
- const result = str.replace(/[-_\s]+(.)?/g, (_, c) => c ? c.toUpperCase() : "");
2054
- return result.charAt(0).toLowerCase() + result.slice(1);
2055
- }
2056
-
2057
- //#endregion
2058
- //#region src/cli/services/stale-cleanup.ts
2059
- /**
2060
- * Remove stale `.entry.js` files from the output directory.
2061
- *
2062
- * Must be called before parallel bundling; concurrent builds
2063
- * sharing the same output directory would otherwise conflict.
2064
- * @param outputDir - Directory to clean
2065
- */
2066
- async function removeStaleEntryFiles(outputDir) {
2067
- const files = await fs.readdir(outputDir);
2068
- await Promise.all(files.filter((file) => file.endsWith(".entry.js")).map((file) => fs.rm(path.join(outputDir, file), { force: true })));
2069
- }
2070
-
2071
- //#endregion
2072
- //#region src/cli/services/file-loader.ts
2073
- const DEFAULT_IGNORE_PATTERNS = ["**/*.test.ts", "**/*.spec.ts"];
2074
- /**
2075
- * Load files matching the given patterns, excluding files that match ignore patterns.
2076
- * By default, test files (*.test.ts, *.spec.ts) are excluded unless ignores is explicitly specified.
2077
- * @param config - Configuration with files patterns and optional ignores patterns
2078
- * @returns Array of absolute file paths
2079
- */
2080
- function loadFilesWithIgnores(config) {
2081
- const ignorePatterns = config.ignores ?? DEFAULT_IGNORE_PATTERNS;
2082
- const ignoreFiles = /* @__PURE__ */ new Set();
2083
- for (const ignorePattern of ignorePatterns) {
2084
- const absoluteIgnorePattern = path.resolve(process.cwd(), ignorePattern);
2085
- try {
2086
- fs$1.globSync(absoluteIgnorePattern).forEach((file) => ignoreFiles.add(file));
2087
- } catch (error) {
2088
- logger.warn(`Failed to glob ignore pattern "${ignorePattern}": ${String(error)}`);
2089
- }
2090
- }
2091
- const files = [];
2092
- for (const pattern of config.files) {
2093
- const absolutePattern = path.resolve(process.cwd(), pattern);
2094
- try {
2095
- const filteredFiles = fs$1.globSync(absolutePattern).filter((file) => !ignoreFiles.has(file));
2096
- files.push(...filteredFiles);
2097
- } catch (error) {
2098
- logger.warn(`Failed to glob pattern "${pattern}": ${String(error)}`);
2099
- }
2100
- }
2101
- return files;
2102
- }
2103
-
2104
- //#endregion
2105
- //#region src/cli/services/workflow/ast-utils.ts
2106
- /**
2107
- * Check if a module source is from the Tailor SDK package (including subpaths)
2108
- * @param source - Module source string
2109
- * @returns True if the source is from the Tailor SDK package
2110
- */
2111
- function isTailorSdkSource(source) {
2112
- return /^@tailor-platform\/sdk(\/|$)/.test(source);
2113
- }
2114
- /**
2115
- * Get the source string from a dynamic import or require call
2116
- * @param node - AST node to inspect
2117
- * @returns Resolved import/require source string or null
2118
- */
2119
- function getImportSource(node) {
2120
- if (!node) return null;
2121
- if (node.type === "ImportExpression") {
2122
- const source = node.source;
2123
- if (source.type === "Literal" && typeof source.value === "string") return source.value;
2124
- }
2125
- if (node.type === "CallExpression") {
2126
- const callExpr = node;
2127
- if (callExpr.callee.type === "Identifier" && callExpr.callee.name === "require") {
2128
- const arg = callExpr.arguments[0];
2129
- if (arg && "type" in arg && arg.type === "Literal" && "value" in arg && typeof arg.value === "string") return arg.value;
2130
- }
2131
- }
2132
- return null;
2133
- }
2134
- /**
2135
- * Unwrap AwaitExpression to get the inner expression
2136
- * @param node - AST expression node
2137
- * @returns Inner expression if node is an AwaitExpression
2138
- */
2139
- function unwrapAwait(node) {
2140
- if (node?.type === "AwaitExpression") return node.argument;
2141
- return node;
2142
- }
2143
- /**
2144
- * Check if a node is a string literal
2145
- * @param node - AST expression node
2146
- * @returns True if node is a string literal
2147
- */
2148
- function isStringLiteral(node) {
2149
- return node?.type === "Literal" && typeof node.value === "string";
2150
- }
2151
- /**
2152
- * Check if a node is a function expression (arrow or regular)
2153
- * @param node - AST expression node
2154
- * @returns True if node is a function expression
2155
- */
2156
- function isFunctionExpression(node) {
2157
- return node?.type === "ArrowFunctionExpression" || node?.type === "FunctionExpression";
2158
- }
2159
- /**
2160
- * Find a property in an object expression
2161
- * @param properties - Object properties to search
2162
- * @param name - Property name to find
2163
- * @returns Found property info or null
2164
- */
2165
- function findProperty(properties, name) {
2166
- for (const prop of properties) if (prop.type === "Property") {
2167
- const objProp = prop;
2168
- if ((objProp.key.type === "Identifier" ? objProp.key.name : objProp.key.type === "Literal" ? objProp.key.value : null) === name) return {
2169
- key: objProp.key,
2170
- value: objProp.value,
2171
- start: objProp.start,
2172
- end: objProp.end
2173
- };
2174
- }
2175
- return null;
2176
- }
2177
- /**
2178
- * Apply string replacements to source code
2179
- * Replacements are applied from end to start to maintain positions
2180
- * @param source - Original source code
2181
- * @param replacements - Replacements to apply
2182
- * @returns Transformed source code
2183
- */
2184
- function applyReplacements(source, replacements) {
2185
- const sorted = [...replacements].sort((a, b) => b.start - a.start);
2186
- let result = source;
2187
- for (const r of sorted) result = result.slice(0, r.start) + r.text + result.slice(r.end);
2188
- return result;
2189
- }
2190
- /**
2191
- * Find the end of a statement including any trailing newline
2192
- * @param source - Source code
2193
- * @param position - Start position of the statement
2194
- * @returns Index of the end of the statement including trailing newline
2195
- */
2196
- function findStatementEnd(source, position) {
2197
- let i = position;
2198
- while (i < source.length && (source[i] === ";" || source[i] === " " || source[i] === " ")) i++;
2199
- if (i < source.length && source[i] === "\n") i++;
2200
- return i;
2201
- }
2202
- /**
2203
- * Resolve a relative path from a base directory
2204
- * Simple implementation that handles ./ and ../ prefixes
2205
- * @param baseDir - Base directory
2206
- * @param relativePath - Relative path to resolve
2207
- * @returns Resolved absolute path
2208
- */
2209
- function resolvePath(baseDir, relativePath) {
2210
- const parts = relativePath.replace(/\\/g, "/").split("/");
2211
- const baseParts = baseDir.replace(/\\/g, "/").split("/");
2212
- for (const part of parts) if (part === ".") {} else if (part === "..") baseParts.pop();
2213
- else baseParts.push(part);
2214
- return baseParts.join("/");
2215
- }
2216
-
2217
- //#endregion
2218
- //#region src/cli/services/workflow/sdk-binding-collector.ts
2219
- /**
2220
- * Collect all import bindings for a specific function from the Tailor SDK package
2221
- * Returns a Set of local names that refer to the function
2222
- * @param program - Parsed TypeScript program
2223
- * @param functionName - Function name to collect bindings for
2224
- * @returns Set of local names bound to the SDK function
2225
- */
2226
- function collectSdkBindings(program, functionName) {
2227
- const bindings = /* @__PURE__ */ new Set();
2228
- function walk(node) {
2229
- if (!node || typeof node !== "object") return;
2230
- const nodeType = node.type;
2231
- if (nodeType === "ImportDeclaration") {
2232
- const importDecl = node;
2233
- const source = importDecl.source?.value;
2234
- if (typeof source === "string" && isTailorSdkSource(source)) {
2235
- for (const specifier of importDecl.specifiers || []) if (specifier.type === "ImportSpecifier") {
2236
- const importSpec = specifier;
2237
- const imported = importSpec.imported.type === "Identifier" ? importSpec.imported.name : importSpec.imported.value;
2238
- if (imported === functionName) bindings.add(importSpec.local?.name || imported);
2239
- } else if (specifier.type === "ImportDefaultSpecifier" || specifier.type === "ImportNamespaceSpecifier") {
2240
- const spec = specifier;
2241
- bindings.add(`__namespace__:${spec.local?.name}`);
2242
- }
2243
- }
2244
- }
2245
- if (nodeType === "VariableDeclaration") {
2246
- const varDecl = node;
2247
- for (const decl of varDecl.declarations || []) {
2248
- const source = getImportSource(unwrapAwait(decl.init));
2249
- if (source && isTailorSdkSource(source)) {
2250
- const id = decl.id;
2251
- if (id?.type === "Identifier") bindings.add(`__namespace__:${id.name}`);
2252
- else if (id?.type === "ObjectPattern") {
2253
- const objPattern = id;
2254
- for (const prop of objPattern.properties || []) if (prop.type === "Property") {
2255
- const bindingProp = prop;
2256
- const keyName = bindingProp.key.type === "Identifier" ? bindingProp.key.name : bindingProp.key.value;
2257
- if (keyName === functionName) {
2258
- const localName = bindingProp.value.type === "Identifier" ? bindingProp.value.name : keyName;
2259
- bindings.add(localName ?? "");
2260
- }
2261
- }
2262
- }
2263
- }
2264
- }
2265
- }
2266
- for (const key of Object.keys(node)) {
2267
- const child = node[key];
2268
- if (Array.isArray(child)) child.forEach((c) => walk(c));
2269
- else if (child && typeof child === "object") walk(child);
2270
- }
2271
- }
2272
- walk(program);
2273
- return bindings;
2274
- }
2275
- /**
2276
- * Check if a CallExpression is a call to a specific SDK function
2277
- * @param node - AST node to inspect
2278
- * @param bindings - Collected SDK bindings
2279
- * @param functionName - SDK function name
2280
- * @returns True if node is a call to the SDK function
2281
- */
2282
- function isSdkFunctionCall(node, bindings, functionName) {
2283
- if (node.type !== "CallExpression") return false;
2284
- const callee = node.callee;
2285
- if (callee.type === "Identifier") {
2286
- const identifier = callee;
2287
- return bindings.has(identifier.name);
2288
- }
2289
- if (callee.type === "MemberExpression") {
2290
- const memberExpr = callee;
2291
- if (!memberExpr.computed) {
2292
- const object = memberExpr.object;
2293
- const property = memberExpr.property;
2294
- if (object.type === "Identifier" && bindings.has(`__namespace__:${object.name}`) && property.name === functionName) return true;
2295
- }
2296
- }
2297
- return false;
2298
- }
2299
-
2300
- //#endregion
2301
- //#region src/cli/services/workflow/job-detector.ts
2302
- /**
2303
- * Find all workflow jobs by detecting createWorkflowJob calls from `@tailor-platform/sdk`
2304
- * @param program - Parsed TypeScript program
2305
- * @param _sourceText - Source code text (currently unused)
2306
- * @returns Detected job locations
2307
- */
2308
- function findAllJobs(program, _sourceText) {
2309
- const jobs = [];
2310
- const bindings = collectSdkBindings(program, "createWorkflowJob");
2311
- function walk(node, parents = []) {
2312
- if (!node || typeof node !== "object") return;
2313
- if (isSdkFunctionCall(node, bindings, "createWorkflowJob")) {
2314
- const args = node.arguments;
2315
- if (args?.length >= 1 && args[0]?.type === "ObjectExpression") {
2316
- const configObj = args[0];
2317
- const nameProp = findProperty(configObj.properties, "name");
2318
- const bodyProp = findProperty(configObj.properties, "body");
2319
- if (nameProp && isStringLiteral(nameProp.value) && bodyProp && isFunctionExpression(bodyProp.value)) {
2320
- let statementRange;
2321
- let exportName;
2322
- for (let i = parents.length - 1; i >= 0; i--) {
2323
- const parent = parents[i];
2324
- if (parent.type === "VariableDeclarator") {
2325
- const declarator = parent;
2326
- if (declarator.id?.type === "Identifier") exportName = declarator.id.name;
2327
- }
2328
- if (parent.type === "ExportNamedDeclaration" || parent.type === "VariableDeclaration") statementRange = {
2329
- start: parent.start,
2330
- end: parent.end
2331
- };
2332
- }
2333
- jobs.push({
2334
- name: nameProp.value.value,
2335
- exportName,
2336
- nameRange: {
2337
- start: nameProp.start,
2338
- end: nameProp.end
2339
- },
2340
- bodyValueRange: {
2341
- start: bodyProp.value.start,
2342
- end: bodyProp.value.end
2343
- },
2344
- statementRange
2345
- });
2346
- }
2347
- }
2348
- }
2349
- const newParents = [...parents, node];
2350
- for (const key of Object.keys(node)) {
2351
- const child = node[key];
2352
- if (Array.isArray(child)) child.forEach((c) => walk(c, newParents));
2353
- else if (child && typeof child === "object") walk(child, newParents);
2354
- }
2355
- }
2356
- walk(program);
2357
- return jobs;
2358
- }
2359
- /**
2360
- * Build a map from export name to job name from detected jobs
2361
- * @param jobs - Detected job locations
2362
- * @returns Map from export name to job name
2363
- */
2364
- function buildJobNameMap(jobs) {
2365
- const map = /* @__PURE__ */ new Map();
2366
- for (const job of jobs) if (job.exportName) map.set(job.exportName, job.name);
2367
- return map;
2368
- }
2369
- /**
2370
- * Detect all .trigger() calls in the source code
2371
- * Returns information about each trigger call for transformation
2372
- * @param program - Parsed TypeScript program
2373
- * @param sourceText - Source code text
2374
- * @returns Detected trigger calls
2375
- */
2376
- function detectTriggerCalls(program, sourceText) {
2377
- const calls = [];
2378
- function walk(node, parent = null) {
2379
- if (!node || typeof node !== "object") return;
2380
- if (node.type === "CallExpression") {
2381
- const callExpr = node;
2382
- const callee = callExpr.callee;
2383
- if (callee.type === "MemberExpression") {
2384
- const memberExpr = callee;
2385
- if (!memberExpr.computed && memberExpr.object.type === "Identifier" && memberExpr.property.name === "trigger") {
2386
- const identifierName = memberExpr.object.name;
2387
- let argsText = "";
2388
- if (callExpr.arguments.length > 0) {
2389
- const firstArg = callExpr.arguments[0];
2390
- const lastArg = callExpr.arguments[callExpr.arguments.length - 1];
2391
- if (firstArg && lastArg && "start" in firstArg && "end" in lastArg) argsText = sourceText.slice(firstArg.start, lastArg.end);
2392
- }
2393
- const hasAwait = parent?.type === "AwaitExpression";
2394
- const awaitExpr = hasAwait ? parent : null;
2395
- const callRange = {
2396
- start: callExpr.start,
2397
- end: callExpr.end
2398
- };
2399
- const fullRange = awaitExpr ? {
2400
- start: awaitExpr.start,
2401
- end: awaitExpr.end
2402
- } : callRange;
2403
- calls.push({
2404
- identifierName,
2405
- callRange,
2406
- argsText,
2407
- hasAwait,
2408
- fullRange
2409
- });
2410
- }
2411
- }
2412
- }
2413
- for (const key of Object.keys(node)) {
2414
- const child = node[key];
2415
- if (Array.isArray(child)) child.forEach((c) => walk(c, node));
2416
- else if (child && typeof child === "object") walk(child, node);
2417
- }
2418
- }
2419
- walk(program);
2420
- return calls;
2421
- }
2422
-
2423
- //#endregion
2424
- //#region src/cli/services/workflow/workflow-detector.ts
2425
- /**
2426
- * Find all workflows by detecting createWorkflow calls from `@tailor-platform/sdk`
2427
- * @param program - Parsed TypeScript program
2428
- * @param _sourceText - Source code text (currently unused)
2429
- * @returns Detected workflows
2430
- */
2431
- function findAllWorkflows(program, _sourceText) {
2432
- const workflows = [];
2433
- const bindings = collectSdkBindings(program, "createWorkflow");
2434
- function walk(node, parents = []) {
2435
- if (!node || typeof node !== "object") return;
2436
- if (isSdkFunctionCall(node, bindings, "createWorkflow")) {
2437
- const args = node.arguments;
2438
- if (args?.length >= 1 && args[0]?.type === "ObjectExpression") {
2439
- const configObj = args[0];
2440
- const nameProp = findProperty(configObj.properties, "name");
2441
- if (nameProp && isStringLiteral(nameProp.value)) {
2442
- let exportName;
2443
- let isDefaultExport = false;
2444
- for (let i = parents.length - 1; i >= 0; i--) {
2445
- const parent = parents[i];
2446
- if (parent.type === "VariableDeclarator") {
2447
- const declarator = parent;
2448
- if (declarator.id?.type === "Identifier") {
2449
- exportName = declarator.id.name;
2450
- break;
2451
- }
2452
- }
2453
- if (parent.type === "ExportDefaultDeclaration") isDefaultExport = true;
2454
- }
2455
- workflows.push({
2456
- name: nameProp.value.value,
2457
- exportName,
2458
- isDefaultExport
2459
- });
2460
- }
2461
- }
2462
- }
2463
- const newParents = [...parents, node];
2464
- for (const key of Object.keys(node)) {
2465
- const child = node[key];
2466
- if (Array.isArray(child)) child.forEach((c) => walk(c, newParents));
2467
- else if (child && typeof child === "object") walk(child, newParents);
2468
- }
2469
- }
2470
- walk(program);
2471
- return workflows;
2472
- }
2473
- /**
2474
- * Build a map from export name to workflow name from detected workflows
2475
- * @param workflows - Detected workflows
2476
- * @returns Map from export name to workflow name
2477
- */
2478
- function buildWorkflowNameMap(workflows) {
2479
- const map = /* @__PURE__ */ new Map();
2480
- for (const workflow of workflows) if (workflow.exportName) map.set(workflow.exportName, workflow.name);
2481
- return map;
2482
- }
2483
- /**
2484
- * Detect default imports in a source file and return a map from local name to import source
2485
- * @param program - Parsed TypeScript program
2486
- * @returns Map from local name to import source
2487
- */
2488
- function detectDefaultImports(program) {
2489
- const imports = /* @__PURE__ */ new Map();
2490
- function walk(node) {
2491
- if (!node || typeof node !== "object") return;
2492
- if (node.type === "ImportDeclaration") {
2493
- const importDecl = node;
2494
- const source = importDecl.source?.value;
2495
- if (typeof source === "string") {
2496
- for (const specifier of importDecl.specifiers || []) if (specifier.type === "ImportDefaultSpecifier") {
2497
- const spec = specifier;
2498
- if (spec.local?.name) imports.set(spec.local.name, source);
2499
- }
2500
- }
2501
- }
2502
- for (const key of Object.keys(node)) {
2503
- const child = node[key];
2504
- if (Array.isArray(child)) child.forEach((c) => walk(c));
2505
- else if (child && typeof child === "object") walk(child);
2506
- }
2507
- }
2508
- walk(program);
2509
- return imports;
2510
- }
2511
-
2512
- //#endregion
2513
- //#region src/cli/services/workflow/trigger-transformer.ts
2514
- /**
2515
- * Extract authInvoker info from a config object expression
2516
- * Returns the authInvoker value text and whether it's a shorthand property
2517
- * @param configArg - Config argument node
2518
- * @param sourceText - Source code text
2519
- * @returns Extracted authInvoker info, if any
2520
- */
2521
- function extractAuthInvokerInfo(configArg, sourceText) {
2522
- if (!configArg || typeof configArg !== "object") return void 0;
2523
- if (configArg.type !== "ObjectExpression") return void 0;
2524
- const objExpr = configArg;
2525
- for (const prop of objExpr.properties) {
2526
- if (prop.type !== "Property") continue;
2527
- const objProp = prop;
2528
- if ((objProp.key.type === "Identifier" ? objProp.key.name : objProp.key.type === "Literal" ? objProp.key.value : null) === "authInvoker") {
2529
- if (objProp.shorthand) return {
2530
- isShorthand: true,
2531
- valueText: "authInvoker"
2532
- };
2533
- return {
2534
- isShorthand: false,
2535
- valueText: sourceText.slice(objProp.value.start, objProp.value.end)
2536
- };
2537
- }
2538
- }
2539
- }
2540
- /**
2541
- * Detect .trigger() calls for known workflows and jobs
2542
- * Only detects calls where the identifier is in workflowNames or jobNames
2543
- * @param program - The parsed AST program
2544
- * @param sourceText - The source code text
2545
- * @param workflowNames - Set of known workflow identifier names
2546
- * @param jobNames - Set of known job identifier names
2547
- * @returns Detected trigger call metadata
2548
- */
2549
- function detectExtendedTriggerCalls(program, sourceText, workflowNames, jobNames) {
2550
- const calls = [];
2551
- function walk(node, parent = null) {
2552
- if (!node || typeof node !== "object") return;
2553
- if (node.type === "CallExpression") {
2554
- const callExpr = node;
2555
- const callee = callExpr.callee;
2556
- if (callee.type === "MemberExpression") {
2557
- const memberExpr = callee;
2558
- if (!memberExpr.computed && memberExpr.object.type === "Identifier" && memberExpr.property.name === "trigger") {
2559
- const identifierName = memberExpr.object.name;
2560
- const isWorkflow = workflowNames.has(identifierName);
2561
- const isJob = jobNames.has(identifierName);
2562
- if (!isWorkflow && !isJob) return;
2563
- const argCount = callExpr.arguments.length;
2564
- let argsText = "";
2565
- if (argCount > 0) {
2566
- const firstArg = callExpr.arguments[0];
2567
- if (firstArg && "start" in firstArg && "end" in firstArg) argsText = sourceText.slice(firstArg.start, firstArg.end);
2568
- }
2569
- const hasAwait = parent?.type === "AwaitExpression";
2570
- const awaitExpr = hasAwait ? parent : null;
2571
- if (isWorkflow && argCount >= 2) {
2572
- const secondArg = callExpr.arguments[1];
2573
- const authInvoker = extractAuthInvokerInfo(secondArg, sourceText);
2574
- if (authInvoker) calls.push({
2575
- kind: "workflow",
2576
- identifierName,
2577
- callRange: {
2578
- start: callExpr.start,
2579
- end: callExpr.end
2580
- },
2581
- argsText,
2582
- authInvoker,
2583
- hasAwait: false
2584
- });
2585
- } else if (isJob) calls.push({
2586
- kind: "job",
2587
- identifierName,
2588
- callRange: {
2589
- start: callExpr.start,
2590
- end: callExpr.end
2591
- },
2592
- argsText,
2593
- hasAwait,
2594
- fullRange: awaitExpr ? {
2595
- start: awaitExpr.start,
2596
- end: awaitExpr.end
2597
- } : void 0
2598
- });
2599
- }
2600
- }
2601
- }
2602
- for (const key of Object.keys(node)) {
2603
- const child = node[key];
2604
- if (Array.isArray(child)) child.forEach((c) => walk(c, node));
2605
- else if (child && typeof child === "object") walk(child, node);
2606
- }
2607
- }
2608
- walk(program);
2609
- return calls;
2610
- }
2611
- /**
2612
- * Transform trigger calls for resolver/executor/workflow functions
2613
- * Handles both job.trigger() and workflow.trigger() calls
2614
- * @param source - The source code to transform
2615
- * @param workflowNameMap - Map from variable name to workflow name
2616
- * @param jobNameMap - Map from variable name to job name
2617
- * @param workflowFileMap - Map from file path (without extension) to workflow name for default exports
2618
- * @param currentFilePath - Path of the current file being transformed (for resolving relative imports)
2619
- * @returns Transformed source code with trigger calls rewritten
2620
- */
2621
- function transformFunctionTriggers(source, workflowNameMap, jobNameMap, workflowFileMap, currentFilePath) {
2622
- const { program } = parseSync("input.ts", source);
2623
- const localWorkflowNameMap = new Map(workflowNameMap);
2624
- if (workflowFileMap && currentFilePath) {
2625
- const defaultImports = detectDefaultImports(program);
2626
- const currentDir = currentFilePath.replace(/[/\\][^/\\]+$/, "");
2627
- for (const [localName, importSource] of defaultImports) {
2628
- if (!importSource.startsWith(".")) continue;
2629
- const resolvedPath = resolvePath(currentDir, importSource);
2630
- const workflowName = workflowFileMap.get(resolvedPath);
2631
- if (workflowName) localWorkflowNameMap.set(localName, workflowName);
2632
- }
2633
- }
2634
- const triggerCalls = detectExtendedTriggerCalls(program, source, new Set(localWorkflowNameMap.keys()), new Set(jobNameMap.keys()));
2635
- const replacements = [];
2636
- for (const call of triggerCalls) if (call.kind === "workflow" && call.authInvoker) {
2637
- const workflowName = localWorkflowNameMap.get(call.identifierName);
2638
- if (workflowName) {
2639
- const authInvokerExpr = call.authInvoker.isShorthand ? "authInvoker" : call.authInvoker.valueText;
2640
- const transformedCall = `tailor.workflow.triggerWorkflow("${workflowName}", ${call.argsText || "undefined"}, { authInvoker: ${authInvokerExpr} })`;
2641
- replacements.push({
2642
- start: call.callRange.start,
2643
- end: call.callRange.end,
2644
- text: transformedCall
2645
- });
2646
- }
2647
- } else if (call.kind === "job") {
2648
- const jobName = jobNameMap.get(call.identifierName);
2649
- if (jobName) {
2650
- const transformedCall = `tailor.workflow.triggerJobFunction("${jobName}", ${call.argsText || "undefined"})`;
2651
- const range = call.hasAwait && call.fullRange ? call.fullRange : call.callRange;
2652
- replacements.push({
2653
- start: range.start,
2654
- end: range.end,
2655
- text: transformedCall
2656
- });
2657
- }
2658
- }
2659
- return applyReplacements(source, replacements);
2660
- }
2661
-
2662
- //#endregion
2663
- //#region src/cli/shared/trigger-context.ts
2664
- /**
2665
- * Normalize a file path by removing extension and resolving to absolute path
2666
- * @param filePath - File path to normalize
2667
- * @returns Normalized absolute path without extension
2668
- */
2669
- function normalizeFilePath(filePath) {
2670
- const absolutePath = path.resolve(filePath);
2671
- const ext = path.extname(absolutePath);
2672
- return absolutePath.slice(0, -ext.length);
2673
- }
2674
- /**
2675
- * Build trigger context from workflow configuration
2676
- * Scans workflow files to collect workflow and job mappings
2677
- * @param workflowConfig - Workflow file loading configuration
2678
- * @returns Trigger context built from workflow sources
2679
- */
2680
- async function buildTriggerContext(workflowConfig) {
2681
- const workflowNameMap = /* @__PURE__ */ new Map();
2682
- const jobNameMap = /* @__PURE__ */ new Map();
2683
- const workflowFileMap = /* @__PURE__ */ new Map();
2684
- if (!workflowConfig) return {
2685
- workflowNameMap,
2686
- jobNameMap,
2687
- workflowFileMap
2688
- };
2689
- const workflowFiles = loadFilesWithIgnores(workflowConfig);
2690
- for (const file of workflowFiles) try {
2691
- const source = await fs$1.promises.readFile(file, "utf-8");
2692
- const { program } = parseSync("input.ts", source);
2693
- const workflows = findAllWorkflows(program, source);
2694
- const workflowMap = buildWorkflowNameMap(workflows);
2695
- for (const [exportName, workflowName] of workflowMap) workflowNameMap.set(exportName, workflowName);
2696
- for (const workflow of workflows) if (workflow.isDefaultExport) {
2697
- const normalizedPath = normalizeFilePath(file);
2698
- workflowFileMap.set(normalizedPath, workflow.name);
2699
- }
2700
- const jobMap = buildJobNameMap(findAllJobs(program, source));
2701
- for (const [exportName, jobName] of jobMap) jobNameMap.set(exportName, jobName);
2702
- } catch (error) {
2703
- const errorMessage = error instanceof Error ? error.message : String(error);
2704
- logger.warn(`Failed to process workflow file ${file}: ${errorMessage}`, { mode: "stream" });
2705
- continue;
2706
- }
2707
- return {
2708
- workflowNameMap,
2709
- jobNameMap,
2710
- workflowFileMap
2711
- };
2712
- }
2713
- function sortedMapToJson(m) {
2714
- return JSON.stringify([...m.entries()].sort(([a], [b]) => a.localeCompare(b)));
2715
- }
2716
- /**
2717
- * Serialize trigger context to a deterministic string for cache hashing.
2718
- * Returns an empty string if no context is provided.
2719
- * @param ctx - Trigger context to serialize
2720
- * @returns Deterministic string representation
2721
- */
2722
- function serializeTriggerContext(ctx) {
2723
- if (!ctx) return "";
2724
- return sortedMapToJson(ctx.workflowNameMap) + sortedMapToJson(ctx.jobNameMap) + sortedMapToJson(ctx.workflowFileMap);
2725
- }
2726
- /**
2727
- * Create a rolldown plugin for transforming trigger calls
2728
- * Returns undefined if no trigger context is provided
2729
- * @param triggerContext - Trigger context to use for transformations
2730
- * @returns Rolldown plugin or undefined when no context
2731
- */
2732
- function createTriggerTransformPlugin(triggerContext) {
2733
- if (!triggerContext) return;
2734
- return {
2735
- name: "trigger-transform",
2736
- transform: {
2737
- filter: { id: { include: [/\.ts$/, /\.js$/] } },
2738
- handler(code, id) {
2739
- if (!code.includes(".trigger(")) return null;
2740
- return { code: transformFunctionTriggers(code, triggerContext.workflowNameMap, triggerContext.jobNameMap, triggerContext.workflowFileMap, id) };
2741
- }
2742
- }
2743
- };
2744
- }
2745
-
2746
- //#endregion
2747
- //#region src/cli/services/auth/bundler.ts
2748
- /**
2749
- * Bundle a single auth hook handler into dist/auth-hooks/.
2750
- *
2751
- * Follows the same pattern as the executor bundler:
2752
- * 1. Generate an entry file that re-exports the handler as `main`
2753
- * 2. Bundle with rolldown + tree-shaking
2754
- * @param options - Bundle options
2755
- */
2756
- async function bundleAuthHooks(options) {
2757
- const { configPath, authName, handlerAccessPath, triggerContext, cache, inlineSourcemap } = options;
2758
- logger.newline();
2759
- logger.log(`Bundling auth hook for ${styles.info(`"${authName}"`)}`);
2760
- const outputDir = path.resolve(getDistDir(), "auth-hooks");
2761
- fs$1.mkdirSync(outputDir, { recursive: true });
2762
- await removeStaleEntryFiles(outputDir);
2763
- let tsconfig;
2764
- try {
2765
- tsconfig = await resolveTSConfig();
2766
- } catch {
2767
- tsconfig = void 0;
2768
- }
2769
- const functionName = `auth-hook--${authName}--before-login`;
2770
- const outputPath = path.join(outputDir, `${functionName}.js`);
2771
- const absoluteConfigPath = path.resolve(configPath);
2772
- await withCache({
2773
- cache,
2774
- kind: "auth-hook",
2775
- name: functionName,
2776
- sourceFile: absoluteConfigPath,
2777
- outputPath,
2778
- contextHash: computeBundlerContextHash({
2779
- sourceFile: absoluteConfigPath,
2780
- serializedTriggerContext: serializeTriggerContext(triggerContext),
2781
- tsconfig,
2782
- inlineSourcemap
2783
- }),
2784
- async build(cachePlugins) {
2785
- const entryPath = path.join(outputDir, `${functionName}.entry.js`);
2786
- const entryContent = ml`
2787
- import _config from "${absoluteConfigPath}";
2788
- const __auth_hook_function = _config.${handlerAccessPath};
2789
- export { __auth_hook_function as main };
2790
- `;
2791
- fs$1.writeFileSync(entryPath, entryContent);
2792
- const triggerPlugin = createTriggerTransformPlugin(triggerContext);
2793
- const plugins = triggerPlugin ? [triggerPlugin] : [];
2794
- plugins.push(...cachePlugins);
2795
- await rolldown.build(rolldown.defineConfig({
2796
- input: entryPath,
2797
- output: {
2798
- file: outputPath,
2799
- format: "esm",
2800
- sourcemap: inlineSourcemap ? "inline" : true,
2801
- minify: inlineSourcemap ? { mangle: { keepNames: true } } : true,
2802
- codeSplitting: false
2803
- },
2804
- tsconfig,
2805
- plugins,
2806
- treeshake: {
2807
- moduleSideEffects: false,
2808
- annotations: true,
2809
- unknownGlobalSideEffects: false
2810
- },
2811
- logLevel: "silent"
2812
- }));
2813
- }
2814
- });
2815
- logger.log(`${styles.success("Bundled")} auth hook for ${styles.info(`"${authName}"`)}`);
2816
- }
2817
-
2818
- //#endregion
2819
- //#region src/parser/service/tailordb/hooks-validate-precompiled-expr.ts
2820
- const PRECOMPILED_EXPR_KEY = "__precompiledScriptExpr";
2821
- /**
2822
- * Attach a precompiled script expression to a function object.
2823
- * @param fn - Function metadata object.
2824
- * @param expr - Precompiled script expression.
2825
- */
2826
- function setPrecompiledScriptExpr(fn, expr) {
2827
- fn[PRECOMPILED_EXPR_KEY] = expr;
2828
- }
2829
- /**
2830
- * Read a precompiled script expression from a function object.
2831
- * @param fn - Function metadata object.
2832
- * @returns Precompiled script expression if attached.
2833
- */
2834
- function getPrecompiledScriptExpr(fn) {
2835
- const value = fn[PRECOMPILED_EXPR_KEY];
2836
- return typeof value === "string" ? value : void 0;
2837
- }
2838
-
2839
- //#endregion
2840
- //#region src/parser/service/tailordb/field.ts
2841
- const tailorUserMap = `{ id: user.id, type: user.type, workspaceId: user.workspace_id, attributes: user.attribute_map, attributeList: user.attributes }`;
2842
- /**
2843
- * Convert a function to a string representation.
2844
- * Handles method shorthand syntax (e.g., `create() { ... }`) by converting it to
2845
- * a function expression (e.g., `function create() { ... }`).
2846
- * @param fn - Function to stringify
2847
- * @returns Stringified function source
2848
- */
2849
- const stringifyFunction = (fn) => {
2850
- const src = fn.toString().trim();
2851
- if (/^[a-zA-Z_$][a-zA-Z0-9_$]*\s*\(/.test(src) && !src.startsWith("function") && !src.startsWith("(") && !src.includes("=>")) return `function ${src}`;
2852
- return src;
2853
- };
2854
- /**
2855
- * Convert a hook function to a script expression.
2856
- * @param fn - Hook function
2857
- * @returns JavaScript expression calling the hook
2858
- */
2859
- const convertHookToExpr = (fn) => {
2860
- const precompiledExpr = getPrecompiledScriptExpr(fn);
2861
- if (precompiledExpr) return precompiledExpr;
2862
- return `(${stringifyFunction(fn)})({ value: _value, data: _data, user: ${tailorUserMap} })`;
2863
- };
2864
- /**
2865
- * Parse TailorDBField into OperatorFieldConfig.
2866
- * This transforms user-defined functions into script expressions.
2867
- * @param field - TailorDB field definition
2868
- * @returns Parsed operator field configuration
2869
- */
2870
- function parseFieldConfig(field) {
2871
- const metadata = field.metadata;
2872
- const fieldType = field.type;
2873
- const rawRelation = field.rawRelation;
2874
- const nestedFields = field.fields;
2875
- return {
2876
- type: fieldType,
2877
- ...metadata,
2878
- rawRelation,
2879
- ...fieldType === "nested" && nestedFields && Object.keys(nestedFields).length > 0 ? { fields: Object.entries(nestedFields).reduce((acc, [key, nestedField]) => {
2880
- acc[key] = parseFieldConfig(nestedField);
2881
- return acc;
2882
- }, {}) } : {},
2883
- validate: metadata.validate?.map((v) => {
2884
- const { fn, message } = typeof v === "function" ? {
2885
- fn: v,
2886
- message: `failed by \`${v.toString().trim()}\``
2887
- } : {
2888
- fn: v[0],
2889
- message: v[1]
2890
- };
2891
- return {
2892
- script: { expr: getPrecompiledScriptExpr(fn) ?? `(${fn.toString().trim()})({ value: _value, data: _data, user: ${tailorUserMap} })` },
2893
- errorMessage: message
2894
- };
2895
- }),
2896
- hooks: metadata.hooks ? {
2897
- create: metadata.hooks.create ? { expr: convertHookToExpr(metadata.hooks.create) } : void 0,
2898
- update: metadata.hooks.update ? { expr: convertHookToExpr(metadata.hooks.update) } : void 0
2899
- } : void 0,
2900
- serial: metadata.serial ? {
2901
- start: metadata.serial.start,
2902
- maxValue: metadata.serial.maxValue,
2903
- format: "format" in metadata.serial ? metadata.serial.format : void 0
2904
- } : void 0
2905
- };
2906
- }
2907
-
2908
- //#endregion
2909
- //#region src/parser/service/tailordb/permission.ts
2910
- const operatorMap = {
2911
- "=": "eq",
2912
- "!=": "ne",
2913
- in: "in",
2914
- "not in": "nin",
2915
- hasAny: "hasAny",
2916
- "not hasAny": "nhasAny"
2917
- };
2918
- function normalizeOperand(operand) {
2919
- if (typeof operand === "object" && "user" in operand) return { user: operand.user === "id" ? "_id" : operand.user };
2920
- return operand;
2921
- }
2922
- function normalizeConditions(conditions) {
2923
- return conditions.map((cond) => {
2924
- const [left, operator, right] = cond;
2925
- return [
2926
- normalizeOperand(left),
2927
- operatorMap[operator],
2928
- normalizeOperand(right)
2929
- ];
2930
- });
2931
- }
2932
- function isObjectFormat(p) {
2933
- return typeof p === "object" && p !== null && "conditions" in p;
2934
- }
2935
- function isSingleArrayConditionFormat(cond) {
2936
- return cond.length >= 2 && typeof cond[1] === "string";
2937
- }
2938
- /**
2939
- * Normalize record-level permissions into a standard structure.
2940
- * @param permission - Tailor type permission
2941
- * @returns Normalized record permissions
2942
- */
2943
- function normalizePermission(permission) {
2944
- return Object.keys(permission).reduce((acc, action) => {
2945
- acc[action] = permission[action].map((p) => normalizeActionPermission(p));
2946
- return acc;
2947
- }, {});
2948
- }
2949
- /**
2950
- * Normalize GraphQL permissions into a standard structure.
2951
- * @param permission - Tailor GQL permission
2952
- * @returns Normalized GQL permissions
2953
- */
2954
- function normalizeGqlPermission(permission) {
2955
- return permission.map((policy) => normalizeGqlPolicy(policy));
2956
- }
2957
- function normalizeGqlPolicy(policy) {
2958
- return {
2959
- conditions: policy.conditions ? normalizeConditions(policy.conditions) : [],
2960
- actions: policy.actions === "all" ? ["all"] : policy.actions,
2961
- permit: policy.permit ? "allow" : "deny",
2962
- description: policy.description
2963
- };
2964
- }
2965
- /**
2966
- * Parse raw permissions into normalized permissions.
2967
- * This is the main entry point for permission parsing in the parser layer.
2968
- * @param rawPermissions - Raw permissions definition
2969
- * @returns Normalized permissions
2970
- */
2971
- function parsePermissions(rawPermissions) {
2972
- return {
2973
- ...rawPermissions.record && { record: normalizePermission(rawPermissions.record) },
2974
- ...rawPermissions.gql && { gql: normalizeGqlPermission(rawPermissions.gql) }
2975
- };
2976
- }
2977
- /**
2978
- * Normalize a single action permission into the standard format.
2979
- * @param permission - Raw permission definition
2980
- * @returns Normalized action permission
2981
- */
2982
- function normalizeActionPermission(permission) {
2983
- if (isObjectFormat(permission)) {
2984
- const conditions = permission.conditions;
2985
- return {
2986
- conditions: normalizeConditions(isSingleArrayConditionFormat(conditions) ? [conditions] : conditions),
2987
- permit: permission.permit ? "allow" : "deny",
2988
- description: permission.description
2989
- };
2990
- }
2991
- if (!Array.isArray(permission)) throw new Error("Invalid permission format");
2992
- if (isSingleArrayConditionFormat(permission)) {
2993
- const [op1, operator, op2, permit] = [...permission, true];
2994
- return {
2995
- conditions: normalizeConditions([[
2996
- op1,
2997
- operator,
2998
- op2
2999
- ]]),
3000
- permit: permit ? "allow" : "deny"
3001
- };
3002
- }
3003
- const conditions = [];
3004
- const conditionArray = permission;
3005
- let conditionArrayPermit = true;
3006
- for (const item of conditionArray) {
3007
- if (typeof item === "boolean") {
3008
- conditionArrayPermit = item;
3009
- continue;
3010
- }
3011
- conditions.push(item);
3012
- }
3013
- return {
3014
- conditions: normalizeConditions(conditions),
3015
- permit: conditionArrayPermit ? "allow" : "deny"
3016
- };
3017
- }
3018
-
3019
- //#endregion
3020
- //#region src/parser/service/tailordb/relation.ts
3021
- const relationTypes = {
3022
- "1-1": "1-1",
3023
- oneToOne: "1-1",
3024
- "n-1": "n-1",
3025
- manyToOne: "n-1",
3026
- "N-1": "n-1",
3027
- keyOnly: "keyOnly"
3028
- };
3029
- const relationTypesKeys = Object.keys(relationTypes);
3030
- function fieldRef(context) {
3031
- return `Field "${context.fieldName}" on type "${context.typeName}"`;
3032
- }
3033
- /**
3034
- * Validate relation configuration.
3035
- * @param rawRelation - Raw relation configuration from TailorDB type definition
3036
- * @param context - Context information for the relation (type name, field name, all type names)
3037
- */
3038
- function validateRelationConfig(rawRelation, context) {
3039
- if (!rawRelation.type) throw new Error(`${fieldRef(context)} has a relation but is missing the required 'type' property. Valid values: ${relationTypesKeys.join(", ")}.`);
3040
- if (!(rawRelation.type in relationTypes)) throw new Error(`${fieldRef(context)} has invalid relation type '${rawRelation.type}'. Valid values: ${relationTypesKeys.join(", ")}.`);
3041
- if (rawRelation.toward.type !== "self" && !context.allTypeNames.has(rawRelation.toward.type)) throw new Error(`${fieldRef(context)} references unknown type "${rawRelation.toward.type}".`);
3042
- }
3043
- /**
3044
- * Process raw relation config and compute derived metadata values.
3045
- * @param rawRelation - Raw relation configuration
3046
- * @param context - Context information for the relation
3047
- * @param isArrayField - Whether the target field is an array field
3048
- * @returns Computed relation metadata to apply to field config
3049
- */
3050
- function processRelationMetadata(rawRelation, context, isArrayField = false) {
3051
- const isUnique = relationTypes[rawRelation.type] === "1-1";
3052
- const key = rawRelation.toward.key ?? "id";
3053
- const targetTypeName = rawRelation.toward.type === "self" ? context.typeName : rawRelation.toward.type;
3054
- const shouldSetIndex = !isArrayField;
3055
- const shouldSetUnique = !isArrayField && isUnique;
3056
- return {
3057
- index: shouldSetIndex,
3058
- foreignKey: true,
3059
- relationType: rawRelation.type,
3060
- unique: shouldSetUnique,
3061
- foreignKeyType: targetTypeName,
3062
- foreignKeyField: key
3063
- };
3064
- }
3065
- /**
3066
- * Build relation info for creating forward/backward relationships.
3067
- * Returns undefined for keyOnly relations.
3068
- * @param rawRelation - Raw relation configuration
3069
- * @param context - Context information for the relation
3070
- * @returns Relation information or undefined for keyOnly relations
3071
- */
3072
- function buildRelationInfo(rawRelation, context) {
3073
- if (rawRelation.type === "keyOnly") return;
3074
- const isUnique = relationTypes[rawRelation.type] === "1-1";
3075
- const key = rawRelation.toward.key ?? "id";
3076
- const targetTypeName = rawRelation.toward.type === "self" ? context.typeName : rawRelation.toward.type;
3077
- let forwardName = rawRelation.toward.as;
3078
- if (!forwardName) if (rawRelation.toward.type === "self") forwardName = context.fieldName.replace(/(ID|Id|id)$/u, "");
3079
- else forwardName = inflection.camelize(targetTypeName, true);
3080
- return {
3081
- targetType: targetTypeName,
3082
- forwardName,
3083
- backwardName: rawRelation.backward ?? "",
3084
- key,
3085
- unique: isUnique
3086
- };
3087
- }
3088
- /**
3089
- * Apply processed relation metadata to field config.
3090
- * @param fieldConfig - Original operator field configuration
3091
- * @param metadata - Processed relation metadata to apply
3092
- * @returns Field config with relation metadata applied
3093
- */
3094
- function applyRelationMetadataToFieldConfig(fieldConfig, metadata) {
3095
- return {
3096
- ...fieldConfig,
3097
- index: metadata.index,
3098
- foreignKey: metadata.foreignKey,
3099
- unique: metadata.unique,
3100
- foreignKeyType: metadata.foreignKeyType,
3101
- foreignKeyField: metadata.foreignKeyField
3102
- };
3103
- }
3104
-
3105
- //#endregion
3106
- //#region src/parser/service/tailordb/type-parser.ts
3107
- /**
3108
- * Parse multiple TailorDB types, build relationships, and validate uniqueness.
3109
- * This is the main entry point for parsing TailorDB types.
3110
- * @param rawTypes - Raw TailorDB types keyed by name
3111
- * @param namespace - TailorDB namespace name
3112
- * @param typeSourceInfo - Optional type source information
3113
- * @returns Parsed types
3114
- */
3115
- function parseTypes(rawTypes, namespace, typeSourceInfo) {
3116
- const types = {};
3117
- const allTypeNames = new Set(Object.keys(rawTypes));
3118
- for (const [typeName, type] of Object.entries(rawTypes)) types[typeName] = parseTailorDBType(type, allTypeNames, rawTypes);
3119
- buildBackwardRelationships(types, namespace, typeSourceInfo);
3120
- validatePluralFormUniqueness(types, namespace, typeSourceInfo);
3121
- return types;
3122
- }
3123
- /**
3124
- * Parse a TailorDBTypeSchemaOutput into a TailorDBType.
3125
- * @param type - TailorDB type to parse
3126
- * @param allTypeNames - Set of all TailorDB type names
3127
- * @param rawTypes - All raw TailorDB types keyed by name
3128
- * @returns Parsed TailorDB type
3129
- */
3130
- function parseTailorDBType(type, allTypeNames, rawTypes) {
3131
- const metadata = type.metadata;
3132
- const pluralForm = metadata.settings?.pluralForm || inflection.pluralize(type.name);
3133
- const fields = {};
3134
- const forwardRelationships = {};
3135
- for (const [fieldName, fieldDef] of Object.entries(type.fields)) {
3136
- let fieldConfig = parseFieldConfig(fieldDef);
3137
- const rawRelation = fieldConfig.rawRelation;
3138
- const context = {
3139
- typeName: type.name,
3140
- fieldName,
3141
- allTypeNames
3142
- };
3143
- if (rawRelation) {
3144
- validateRelationConfig(rawRelation, context);
3145
- if ([
3146
- "n-1",
3147
- "manyToOne",
3148
- "N-1"
3149
- ].includes(rawRelation.type) && fieldConfig.unique) throw new Error(`Field "${fieldName}" on type "${type.name}": cannot set unique on n-1 (manyToOne) relation. Use 1-1 (oneToOne) relation instead, or remove the unique constraint.`);
3150
- const relationMetadata = processRelationMetadata(rawRelation, context, fieldConfig.array);
3151
- fieldConfig = applyRelationMetadataToFieldConfig(fieldConfig, relationMetadata);
3152
- }
3153
- if (fieldConfig.array && fieldConfig.index) throw new Error(`Field "${fieldName}" on type "${type.name}": index cannot be set on array fields`);
3154
- if (fieldConfig.array && fieldConfig.unique) throw new Error(`Field "${fieldName}" on type "${type.name}": unique cannot be set on array fields`);
3155
- const parsedField = {
3156
- name: fieldName,
3157
- config: fieldConfig
3158
- };
3159
- const relationInfo = rawRelation ? buildRelationInfo(rawRelation, context) : void 0;
3160
- if (relationInfo) {
3161
- parsedField.relation = { ...relationInfo };
3162
- const targetType = rawTypes[relationInfo.targetType];
3163
- forwardRelationships[relationInfo.forwardName] = {
3164
- name: relationInfo.forwardName,
3165
- targetType: relationInfo.targetType,
3166
- targetField: fieldName,
3167
- sourceField: relationInfo.key,
3168
- isArray: false,
3169
- description: targetType?.metadata?.description || ""
3170
- };
3171
- }
3172
- fields[fieldName] = parsedField;
3173
- }
3174
- return {
3175
- name: type.name,
3176
- pluralForm,
3177
- description: metadata.description,
3178
- fields,
3179
- forwardRelationships,
3180
- backwardRelationships: {},
3181
- settings: metadata.settings || {},
3182
- permissions: parsePermissions(metadata.permissions || {}),
3183
- indexes: metadata.indexes,
3184
- files: metadata.files
3185
- };
3186
- }
3187
- /**
3188
- * Build backward relationships between parsed types.
3189
- * Also validates that backward relation names are unique within each type.
3190
- * @param types - Parsed types
3191
- * @param namespace - TailorDB namespace name
3192
- * @param typeSourceInfo - Optional type source information
3193
- */
3194
- function buildBackwardRelationships(types, namespace, typeSourceInfo) {
3195
- const backwardNameSources = {};
3196
- for (const typeName of Object.keys(types)) backwardNameSources[typeName] = {};
3197
- for (const [typeName, type] of Object.entries(types)) for (const [otherTypeName, otherType] of Object.entries(types)) for (const [fieldName, field] of Object.entries(otherType.fields)) if (field.relation && field.relation.targetType === typeName) {
3198
- let backwardName = field.relation.backwardName;
3199
- if (!backwardName) {
3200
- const lowerName = inflection.camelize(otherTypeName, true);
3201
- backwardName = field.relation.unique ? inflection.singularize(lowerName) : inflection.pluralize(lowerName);
3202
- }
3203
- if (!backwardNameSources[typeName][backwardName]) backwardNameSources[typeName][backwardName] = [];
3204
- backwardNameSources[typeName][backwardName].push({
3205
- sourceType: otherTypeName,
3206
- fieldName
3207
- });
3208
- type.backwardRelationships[backwardName] = {
3209
- name: backwardName,
3210
- targetType: otherTypeName,
3211
- targetField: fieldName,
3212
- sourceField: field.relation.key,
3213
- isArray: !field.relation.unique,
3214
- description: otherType.description || ""
3215
- };
3216
- }
3217
- const errors = [];
3218
- for (const [targetTypeName, backwardNames] of Object.entries(backwardNameSources)) {
3219
- const targetType = types[targetTypeName];
3220
- const targetTypeSourceInfo = typeSourceInfo?.[targetTypeName];
3221
- const targetLocation = targetTypeSourceInfo ? isPluginGeneratedType(targetTypeSourceInfo) ? ` (plugin: ${targetTypeSourceInfo.pluginId})` : ` (${targetTypeSourceInfo.filePath})` : "";
3222
- for (const [backwardName, sources] of Object.entries(backwardNames)) {
3223
- if (sources.length > 1) {
3224
- const sourceList = sources.map((s) => {
3225
- const sourceInfo = typeSourceInfo?.[s.sourceType];
3226
- const location = sourceInfo ? isPluginGeneratedType(sourceInfo) ? ` (plugin: ${sourceInfo.pluginId})` : ` (${sourceInfo.filePath})` : "";
3227
- return `${s.sourceType}.${s.fieldName}${location}`;
3228
- }).join(", ");
3229
- errors.push(`Backward relation name "${backwardName}" on type "${targetTypeName}" is duplicated from: ${sourceList}. Use the "backward" option in .relation() to specify unique names.`);
3230
- }
3231
- if (backwardName in targetType.fields) {
3232
- const source = sources[0];
3233
- const sourceInfo = typeSourceInfo?.[source.sourceType];
3234
- const sourceLocation = sourceInfo ? isPluginGeneratedType(sourceInfo) ? ` (plugin: ${sourceInfo.pluginId})` : ` (${sourceInfo.filePath})` : "";
3235
- errors.push(`Backward relation name "${backwardName}" from ${source.sourceType}.${source.fieldName}${sourceLocation} conflicts with existing field "${backwardName}" on type "${targetTypeName}"${targetLocation}. Use the "backward" option in .relation() to specify a different name.`);
3236
- }
3237
- if (targetType.files && backwardName in targetType.files) {
3238
- const source = sources[0];
3239
- const sourceInfo = typeSourceInfo?.[source.sourceType];
3240
- const sourceLocation = sourceInfo ? isPluginGeneratedType(sourceInfo) ? ` (plugin: ${sourceInfo.pluginId})` : ` (${sourceInfo.filePath})` : "";
3241
- errors.push(`Backward relation name "${backwardName}" from ${source.sourceType}.${source.fieldName}${sourceLocation} conflicts with files field "${backwardName}" on type "${targetTypeName}"${targetLocation}. Use the "backward" option in .relation() to specify a different name.`);
3242
- }
3243
- }
3244
- }
3245
- if (errors.length > 0) throw new Error(`Backward relation name conflicts detected in TailorDB service "${namespace}".\n${errors.map((e) => ` - ${e}`).join("\n")}`);
3246
- }
3247
- /**
3248
- * Validate GraphQL query field name uniqueness.
3249
- * Checks for:
3250
- * 1. Each type's singular query name != plural query name
3251
- * 2. No duplicate query names across all types
3252
- * @param types - Parsed types
3253
- * @param namespace - TailorDB namespace name
3254
- * @param typeSourceInfo - Optional type source information
3255
- */
3256
- function validatePluralFormUniqueness(types, namespace, typeSourceInfo) {
3257
- const errors = [];
3258
- for (const [, parsedType] of Object.entries(types)) {
3259
- const singularQuery = inflection.camelize(parsedType.name, true);
3260
- if (singularQuery === inflection.camelize(parsedType.pluralForm, true)) {
3261
- const sourceInfo = typeSourceInfo?.[parsedType.name];
3262
- const location = sourceInfo ? isPluginGeneratedType(sourceInfo) ? ` (plugin: ${sourceInfo.pluginId})` : ` (${sourceInfo.filePath})` : "";
3263
- errors.push(`Type "${parsedType.name}"${location} has identical singular and plural query names "${singularQuery}". Use db.type(["${parsedType.name}", "UniquePluralForm"], {...}) to set a unique pluralForm.`);
3264
- }
3265
- }
3266
- const queryNameToSource = {};
3267
- for (const parsedType of Object.values(types)) {
3268
- const singularQuery = inflection.camelize(parsedType.name, true);
3269
- const pluralQuery = inflection.camelize(parsedType.pluralForm, true);
3270
- if (!queryNameToSource[singularQuery]) queryNameToSource[singularQuery] = [];
3271
- queryNameToSource[singularQuery].push({
3272
- typeName: parsedType.name,
3273
- kind: "singular"
3274
- });
3275
- if (singularQuery !== pluralQuery) {
3276
- if (!queryNameToSource[pluralQuery]) queryNameToSource[pluralQuery] = [];
3277
- queryNameToSource[pluralQuery].push({
3278
- typeName: parsedType.name,
3279
- kind: "plural"
3280
- });
3281
- }
3282
- }
3283
- const duplicates = Object.entries(queryNameToSource).filter(([, sources]) => sources.length > 1);
3284
- for (const [queryName, sources] of duplicates) {
3285
- const sourceList = sources.map((s) => {
3286
- const sourceInfo = typeSourceInfo?.[s.typeName];
3287
- const location = sourceInfo ? isPluginGeneratedType(sourceInfo) ? ` (plugin: ${sourceInfo.pluginId})` : ` (${sourceInfo.filePath})` : "";
3288
- return `"${s.typeName}"${location} (${s.kind})`;
3289
- }).join(", ");
3290
- errors.push(`GraphQL query field "${queryName}" conflicts between: ${sourceList}`);
3291
- }
3292
- if (errors.length > 0) throw new Error(`GraphQL field name conflicts detected in TailorDB service "${namespace}".\n${errors.map((e) => ` - ${e}`).join("\n")}`);
3293
- }
3294
-
3295
- //#endregion
3296
- //#region src/parser/service/common.ts
3297
- const functionSchema = z.custom((val) => typeof val === "function");
3298
-
3299
- //#endregion
3300
- //#region src/parser/service/tailordb/schema.ts
3301
- /**
3302
- * Normalize GqlOperationsConfig (alias or object) to GqlOperations object.
3303
- * "query" alias expands to read-only mode: { create: false, update: false, delete: false, read: true }
3304
- * @param config - The config to normalize
3305
- * @returns The normalized GqlOperations object
3306
- */
3307
- function normalizeGqlOperations(config) {
3308
- if (config === "query") return {
3309
- create: false,
3310
- update: false,
3311
- delete: false,
3312
- read: true
3313
- };
3314
- return config;
3315
- }
3316
- /**
3317
- * Zod schema for GqlOperations configuration with normalization transform.
3318
- * Accepts "query" alias or detailed object, normalizes to GqlOperations object.
3319
- */
3320
- const GqlOperationsSchema = z.union([z.literal("query"), z.object({
3321
- create: z.boolean().optional().describe("Enable create mutation (default: true)"),
3322
- update: z.boolean().optional().describe("Enable update mutation (default: true)"),
3323
- delete: z.boolean().optional().describe("Enable delete mutation (default: true)"),
3324
- read: z.boolean().optional().describe("Enable read queries - get, list, aggregation (default: true)")
3325
- })]).describe("Configuration for GraphQL operations on a TailorDB type.\nAll operations are enabled by default (undefined or true = enabled, false = disabled).").transform((val) => normalizeGqlOperations(val));
3326
- const TailorFieldTypeSchema$1 = z.enum([
3327
- "uuid",
3328
- "string",
3329
- "boolean",
3330
- "integer",
3331
- "float",
3332
- "decimal",
3333
- "enum",
3334
- "date",
3335
- "datetime",
3336
- "time",
3337
- "nested"
3338
- ]);
3339
- const AllowedValueSchema$1 = z.object({
3340
- value: z.string(),
3341
- description: z.string().optional()
3342
- });
3343
- const DBFieldMetadataSchema = z.object({
3344
- required: z.boolean().optional().describe("Whether the field is required"),
3345
- array: z.boolean().optional().describe("Whether the field is an array"),
3346
- description: z.string().optional().describe("Field description"),
3347
- typeName: z.string().optional().describe("Type name for nested or enum fields"),
3348
- allowedValues: z.array(AllowedValueSchema$1).optional().describe("Allowed values for enum fields"),
3349
- index: z.boolean().optional().describe("Whether the field is indexed for faster queries"),
3350
- unique: z.boolean().optional().describe("Whether the field value must be unique"),
3351
- vector: z.boolean().optional().describe("Whether the field is a vector field for similarity search"),
3352
- foreignKey: z.boolean().optional().describe("Whether the field is a foreign key"),
3353
- foreignKeyType: z.string().optional().describe("Target type name for foreign key relations"),
3354
- foreignKeyField: z.string().optional().describe("Target field name for foreign key relations"),
3355
- hooks: z.object({
3356
- create: functionSchema.optional().describe("Hook function called on record creation"),
3357
- update: functionSchema.optional().describe("Hook function called on record update")
3358
- }).optional().describe("Lifecycle hooks for the field"),
3359
- validate: z.array(z.union([functionSchema, z.tuple([functionSchema, z.string()])])).optional().describe("Validation functions for the field"),
3360
- serial: z.object({
3361
- start: z.number().describe("Starting value for the serial sequence"),
3362
- maxValue: z.number().optional().describe("Maximum value for the serial sequence"),
3363
- format: z.string().optional().describe("Format string for serial value (string type only)")
3364
- }).optional().describe("Serial (auto-increment) configuration"),
3365
- scale: z.number().int().min(0).max(12).optional().describe("Decimal scale (number of digits after decimal point, 0-12)")
3366
- });
3367
- const RelationTypeSchema = z.enum(relationTypesKeys);
3368
- const RawRelationConfigSchema = z.object({
3369
- type: RelationTypeSchema.describe("Relation cardinality type"),
3370
- toward: z.object({
3371
- type: z.string().describe("Target type name, or 'self' for self-relations"),
3372
- as: z.string().optional().describe("Custom forward relation name"),
3373
- key: z.string().optional().describe("Target field to join on (default: 'id')")
3374
- }),
3375
- backward: z.string().optional().describe("Backward relation name on the target type")
3376
- });
3377
- const TailorDBFieldSchema = z.lazy(() => z.object({
3378
- type: TailorFieldTypeSchema$1,
3379
- fields: z.record(z.string(), TailorDBFieldSchema).optional(),
3380
- metadata: DBFieldMetadataSchema,
3381
- rawRelation: RawRelationConfigSchema.optional()
3382
- }));
3383
- /**
3384
- * Schema for TailorDB type settings.
3385
- * Normalizes gqlOperations from alias ("query") to object format.
3386
- */
3387
- const TailorDBTypeSettingsSchema = z.object({
3388
- pluralForm: z.string().optional().describe("Custom plural form of the type name for GraphQL"),
3389
- aggregation: z.boolean().optional().describe("Enable aggregation queries for this type"),
3390
- bulkUpsert: z.boolean().optional().describe("Enable bulk upsert mutation for this type"),
3391
- gqlOperations: GqlOperationsSchema.optional().describe("Configure GraphQL operations for this type. Use \"query\" for read-only mode, or an object for granular control."),
3392
- publishEvents: z.boolean().optional().describe("Enable publishing events for this type.\nWhen enabled, record creation/update/deletion events are published.\nIf not specified, this is automatically set to true when an executor uses this type\nwith recordCreated/recordUpdated/recordDeleted triggers. If explicitly set to false\nwhile an executor uses this type, an error will be thrown during apply.")
3393
- });
3394
- const GQL_PERMISSION_INVALID_OPERAND_MESSAGE = "operand is not supported in gqlPermission. Use permission() for record-level conditions.";
3395
- const GqlPermissionOperandSchema = z.union([
3396
- z.object({ user: z.string() }).strict(),
3397
- z.string(),
3398
- z.boolean(),
3399
- z.array(z.string()),
3400
- z.array(z.boolean())
3401
- ], { error: (issue) => {
3402
- if (typeof issue.input === "object" && issue.input !== null) {
3403
- const keys = Object.keys(issue.input);
3404
- if (keys.length === 1) return `"${keys[0]}" ${GQL_PERMISSION_INVALID_OPERAND_MESSAGE}`;
3405
- return "Operand object must have exactly 1 key";
3406
- }
3407
- return "Invalid operand in gqlPermission";
3408
- } });
3409
- const RecordPermissionOperandSchema = z.union([
3410
- GqlPermissionOperandSchema,
3411
- z.object({ record: z.string() }),
3412
- z.object({ oldRecord: z.string() }),
3413
- z.object({ newRecord: z.string() })
3414
- ]);
3415
- const PermissionOperatorSchema = z.enum([
3416
- "=",
3417
- "!=",
3418
- "in",
3419
- "not in",
3420
- "hasAny",
3421
- "not hasAny"
3422
- ]);
3423
- const RecordPermissionConditionSchema = z.tuple([
3424
- RecordPermissionOperandSchema,
3425
- PermissionOperatorSchema,
3426
- RecordPermissionOperandSchema
3427
- ]).readonly();
3428
- const GqlPermissionConditionSchema = z.tuple([
3429
- GqlPermissionOperandSchema,
3430
- PermissionOperatorSchema,
3431
- GqlPermissionOperandSchema
3432
- ]).readonly();
3433
- const ActionPermissionSchema = z.union([
3434
- z.object({
3435
- conditions: z.union([RecordPermissionConditionSchema, z.array(RecordPermissionConditionSchema).readonly()]),
3436
- description: z.string().optional(),
3437
- permit: z.boolean().optional()
3438
- }),
3439
- z.tuple([
3440
- RecordPermissionOperandSchema,
3441
- PermissionOperatorSchema,
3442
- RecordPermissionOperandSchema
3443
- ]).readonly(),
3444
- z.tuple([
3445
- RecordPermissionOperandSchema,
3446
- PermissionOperatorSchema,
3447
- RecordPermissionOperandSchema,
3448
- z.boolean()
3449
- ]).readonly(),
3450
- z.array(z.union([RecordPermissionConditionSchema, z.boolean()])).refine((arr) => {
3451
- const boolIndex = arr.findIndex((item) => typeof item === "boolean");
3452
- return boolIndex === -1 || boolIndex === arr.length - 1;
3453
- }, { message: "Boolean permit flag must only appear at the end" }).readonly()
3454
- ]);
3455
- const GqlPermissionActionSchema = z.enum([
3456
- "read",
3457
- "create",
3458
- "update",
3459
- "delete",
3460
- "aggregate",
3461
- "bulkUpsert"
3462
- ]);
3463
- const GqlPermissionPolicySchema = z.object({
3464
- conditions: z.array(GqlPermissionConditionSchema).readonly(),
3465
- actions: z.union([z.literal("all"), z.array(GqlPermissionActionSchema).readonly()]),
3466
- permit: z.boolean().optional(),
3467
- description: z.string().optional()
3468
- });
3469
- const RawPermissionsSchema = z.object({
3470
- record: z.object({
3471
- create: z.array(ActionPermissionSchema).readonly(),
3472
- read: z.array(ActionPermissionSchema).readonly(),
3473
- update: z.array(ActionPermissionSchema).readonly(),
3474
- delete: z.array(ActionPermissionSchema).readonly()
3475
- }).optional(),
3476
- gql: z.array(GqlPermissionPolicySchema).readonly().optional()
3477
- });
3478
- const TailorDBTypeSchema = z.object({
3479
- name: z.string(),
3480
- fields: z.record(z.string(), TailorDBFieldSchema),
3481
- metadata: z.object({
3482
- name: z.string(),
3483
- description: z.string().optional(),
3484
- settings: TailorDBTypeSettingsSchema.optional(),
3485
- permissions: RawPermissionsSchema,
3486
- files: z.record(z.string(), z.string()),
3487
- indexes: z.record(z.string(), z.object({
3488
- fields: z.array(z.string()),
3489
- unique: z.boolean().optional()
3490
- })).optional()
3491
- })
3492
- });
3493
- const TailorDBMigrationConfigSchema = z.object({
3494
- directory: z.string().describe("Directory containing migration files"),
3495
- machineUser: z.string().optional().describe("Machine user name for migration execution")
3496
- });
3497
- /**
3498
- * Schema for TailorDB service configuration.
3499
- * Normalizes gqlOperations from alias ("query") to object format.
3500
- */
3501
- const TailorDBServiceConfigSchema = z.object({
3502
- files: z.array(z.string()).describe("Glob patterns for TailorDB type definition files"),
3503
- ignores: z.array(z.string()).optional().describe("Glob patterns to exclude from type discovery"),
3504
- erdSite: z.string().optional().describe("URL for the ERD (Entity Relationship Diagram) site"),
3505
- migration: TailorDBMigrationConfigSchema.optional().describe("Migration configuration"),
3506
- gqlOperations: GqlOperationsSchema.optional().describe("Default GraphQL operations for all types in this service")
3507
- });
3508
-
3509
- //#endregion
3510
- //#region src/cli/services/tailordb/es-builtins.ts
3511
- const globalsMap = globals.default ?? globals;
3512
- /**
3513
- * Runtime globals available in the PF execution environment.
3514
- * Identifiers in this set are excluded from free variable detection
3515
- * since they are always available in the runtime environment.
3516
- *
3517
- * Combines globals.builtin (ECMAScript language builtins) and
3518
- * globals['shared-node-browser'] (shared runtime globals like
3519
- * console, fetch, setTimeout, etc.) from the `globals` npm package.
3520
- */
3521
- const ES_BUILTINS = new Set([...Object.keys(globalsMap.builtin ?? {}), ...Object.keys(globalsMap["shared-node-browser"] ?? {})]);
3522
-
3523
- //#endregion
3524
- //#region src/cli/services/tailordb/hooks-validate-bundler.ts
3525
- /**
3526
- * Recursively extract binding names from a destructuring pattern node.
3527
- * @param pattern - The binding pattern AST node.
3528
- * @param bindings - Set to collect binding names into.
3529
- */
3530
- function collectBindingsFromPattern(pattern, bindings) {
3531
- switch (pattern.type) {
3532
- case "Identifier":
3533
- bindings.add(pattern.name);
3534
- break;
3535
- case "ObjectPattern":
3536
- for (const prop of pattern.properties) if (prop.type === "RestElement") collectBindingsFromPattern(prop.argument, bindings);
3537
- else collectBindingsFromPattern(prop.value, bindings);
3538
- break;
3539
- case "ArrayPattern":
3540
- for (const elem of pattern.elements) if (elem) if (elem.type === "RestElement") collectBindingsFromPattern(elem.argument, bindings);
3541
- else collectBindingsFromPattern(elem, bindings);
3542
- break;
3543
- case "AssignmentPattern":
3544
- collectBindingsFromPattern(pattern.left, bindings);
3545
- break;
3546
- }
3547
- }
3548
- /** Fields that contain TypeScript type annotations (not runtime references). */
3549
- const TS_TYPE_FIELDS = new Set([
3550
- "typeAnnotation",
3551
- "typeParameters",
3552
- "returnType",
3553
- "superTypeArguments",
3554
- "typeArguments"
3555
- ]);
3556
- function isBindingPattern(param) {
3557
- return param.type !== "TSParameterProperty";
3558
- }
3559
- function toScriptFunction(value) {
3560
- if (typeof value !== "function") return void 0;
3561
- return value;
3562
- }
3563
- function collectScriptTargets(type) {
3564
- const targets = [];
3565
- const collectFieldTargets = (field) => {
3566
- const metadata = field.metadata;
3567
- const createHook = toScriptFunction(metadata.hooks?.create);
3568
- if (createHook) targets.push({
3569
- fn: createHook,
3570
- kind: "hooks"
3571
- });
3572
- const updateHook = toScriptFunction(metadata.hooks?.update);
3573
- if (updateHook) targets.push({
3574
- fn: updateHook,
3575
- kind: "hooks"
3576
- });
3577
- for (const validateInput of metadata.validate ?? []) if (typeof validateInput === "function") {
3578
- const validateFn = toScriptFunction(validateInput);
3579
- if (validateFn) targets.push({
3580
- fn: validateFn,
3581
- kind: "validate"
3582
- });
3583
- } else {
3584
- const validateFn = toScriptFunction(validateInput[0]);
3585
- if (validateFn) targets.push({
3586
- fn: validateFn,
3587
- kind: "validate"
3588
- });
3589
- }
3590
- if (field.type === "nested" && field.fields) for (const nestedField of Object.values(field.fields)) collectFieldTargets(nestedField);
3591
- };
3592
- for (const field of Object.values(type.fields)) collectFieldTargets(field);
3593
- return targets;
3594
- }
3595
- /**
3596
- * Parse a code string with oxc-parser and return identifiers that are referenced
3597
- * but never bound anywhere in the snippet (free variables), excluding ES builtins.
3598
- * @param code - Valid JavaScript code to analyze.
3599
- * @returns Set of undefined variable names.
3600
- */
3601
- function findUndefinedReferences(code) {
3602
- const { program } = parseSync("_.js", code);
3603
- const references = /* @__PURE__ */ new Set();
3604
- const bindings = /* @__PURE__ */ new Set();
3605
- const walk = (node) => {
3606
- if (!node) return;
3607
- switch (node.type) {
3608
- case "VariableDeclarator":
3609
- collectBindingsFromPattern(node.id, bindings);
3610
- walk(node.init);
3611
- return;
3612
- case "FunctionDeclaration":
3613
- case "FunctionExpression":
3614
- if (node.id) bindings.add(node.id.name);
3615
- for (const param of node.params) if (isBindingPattern(param)) {
3616
- collectBindingsFromPattern(param, bindings);
3617
- walk(param);
3618
- }
3619
- walk(node.body);
3620
- return;
3621
- case "ArrowFunctionExpression":
3622
- for (const param of node.params) if (isBindingPattern(param)) {
3623
- collectBindingsFromPattern(param, bindings);
3624
- walk(param);
3625
- }
3626
- walk(node.body);
3627
- return;
3628
- case "ClassDeclaration":
3629
- case "ClassExpression":
3630
- if (node.id) bindings.add(node.id.name);
3631
- walk(node.superClass);
3632
- walk(node.body);
3633
- return;
3634
- case "CatchClause":
3635
- if (node.param) collectBindingsFromPattern(node.param, bindings);
3636
- walk(node.body);
3637
- return;
3638
- case "MemberExpression":
3639
- walk(node.object);
3640
- if (node.computed) walk(node.property);
3641
- return;
3642
- case "Property":
3643
- if (node.computed) walk(node.key);
3644
- walk(node.value);
3645
- return;
3646
- case "LabeledStatement":
3647
- walk(node.body);
3648
- return;
3649
- case "Identifier":
3650
- references.add(node.name);
3651
- return;
3652
- }
3653
- const rec = node;
3654
- for (const [key, value] of Object.entries(rec)) {
3655
- if (key === "type" || TS_TYPE_FIELDS.has(key)) continue;
3656
- if (Array.isArray(value)) for (const item of value) walk(item);
3657
- else if (value && typeof value === "object" && "type" in value) walk(value);
3658
- }
3659
- };
3660
- walk(program);
3661
- const freeVars = /* @__PURE__ */ new Set();
3662
- for (const ref of references) if (!bindings.has(ref) && !ES_BUILTINS.has(ref)) freeVars.add(ref);
3663
- return freeVars;
3664
- }
3665
- /**
3666
- * Collect all Identifier names from a TypeScript/JavaScript code string using oxc-parser.
3667
- * @param code - Code string to analyze.
3668
- * @returns Set of identifier names found in the code.
3669
- */
3670
- function collectIdentifierNames(code) {
3671
- const { program } = parseSync("_.ts", code);
3672
- const names = /* @__PURE__ */ new Set();
3673
- const walk = (node) => {
3674
- if (!node || typeof node !== "object") return;
3675
- const record = node;
3676
- if (record.type === "Identifier" && typeof record.name === "string") names.add(record.name);
3677
- for (const [key, value] of Object.entries(record)) {
3678
- if (key === "property" && record.type === "MemberExpression" && !record.computed) continue;
3679
- if (key === "key" && record.type === "Property" && !record.computed) continue;
3680
- if (TS_TYPE_FIELDS.has(key)) continue;
3681
- if (Array.isArray(value)) for (const item of value) walk(item);
3682
- else if (value && typeof value === "object" && "type" in value) walk(value);
3683
- }
3684
- };
3685
- walk(program);
3686
- return names;
3687
- }
3688
- /**
3689
- * Collect top-level bindings (imports and declarations) from a TypeScript source file.
3690
- * @param sourceFilePath - Absolute path to the source file.
3691
- * @returns Map of binding name to SourceBinding.
3692
- */
3693
- function collectSourceBindings(sourceFilePath) {
3694
- const source = readFileSync(sourceFilePath, "utf-8");
3695
- const { program } = parseSync(sourceFilePath, source);
3696
- const bindings = /* @__PURE__ */ new Map();
3697
- for (const stmt of program.body) if (stmt.type === "ImportDeclaration") {
3698
- const importDecl = stmt;
3699
- const text = source.slice(importDecl.start, importDecl.end);
3700
- if (importDecl.specifiers) for (const spec of importDecl.specifiers) bindings.set(spec.local.name, {
3701
- name: spec.local.name,
3702
- sourceText: text,
3703
- kind: "import"
3704
- });
3705
- } else if (stmt.type === "VariableDeclaration") {
3706
- const varDecl = stmt;
3707
- const text = source.slice(varDecl.start, varDecl.end);
3708
- for (const decl of varDecl.declarations) if (decl.id.type === "Identifier") bindings.set(decl.id.name, {
3709
- name: decl.id.name,
3710
- sourceText: text,
3711
- kind: "declaration"
3712
- });
3713
- } else if (stmt.type === "FunctionDeclaration") {
3714
- const funcDecl = stmt;
3715
- if (funcDecl.id) {
3716
- const text = source.slice(funcDecl.start, funcDecl.end);
3717
- bindings.set(funcDecl.id.name, {
3718
- name: funcDecl.id.name,
3719
- sourceText: text,
3720
- kind: "declaration"
3721
- });
3722
- }
3723
- } else if (stmt.type === "ExportNamedDeclaration") {
3724
- const innerDecl = stmt.declaration;
3725
- if (!innerDecl) continue;
3726
- if (innerDecl.type === "VariableDeclaration") {
3727
- const varDecl = innerDecl;
3728
- const text = source.slice(varDecl.start, varDecl.end);
3729
- for (const decl of varDecl.declarations) if (decl.id.type === "Identifier") bindings.set(decl.id.name, {
3730
- name: decl.id.name,
3731
- sourceText: text,
3732
- kind: "declaration"
3733
- });
3734
- } else if (innerDecl.type === "FunctionDeclaration") {
3735
- const funcDecl = innerDecl;
3736
- if (funcDecl.id) {
3737
- const text = source.slice(funcDecl.start, funcDecl.end);
3738
- bindings.set(funcDecl.id.name, {
3739
- name: funcDecl.id.name,
3740
- sourceText: text,
3741
- kind: "declaration"
3742
- });
3743
- }
3744
- }
3745
- }
3746
- return bindings;
3747
- }
3748
- /**
3749
- * Resolve all bindings needed by a function, recursively including
3750
- * dependencies of top-level declarations.
3751
- * @param freeVars - Set of free variable names extracted from the function.
3752
- * @param sourceBindings - Available bindings from the source file.
3753
- * @returns Object with needed import statements and declaration texts.
3754
- */
3755
- function resolveNeededBindings(freeVars, sourceBindings) {
3756
- const neededImports = /* @__PURE__ */ new Set();
3757
- const neededDeclarations = /* @__PURE__ */ new Set();
3758
- const unresolved = [];
3759
- const resolved = /* @__PURE__ */ new Set();
3760
- const resolveVars = (vars) => {
3761
- for (const varName of vars) {
3762
- if (resolved.has(varName)) continue;
3763
- resolved.add(varName);
3764
- const binding = sourceBindings.get(varName);
3765
- if (!binding) {
3766
- unresolved.push(varName);
3767
- continue;
3768
- }
3769
- if (binding.kind === "import") neededImports.add(binding.sourceText);
3770
- else {
3771
- const identifiers = collectIdentifierNames(binding.sourceText);
3772
- const referencedVars = /* @__PURE__ */ new Set();
3773
- for (const id of identifiers) if (id !== varName && sourceBindings.has(id)) referencedVars.add(id);
3774
- resolveVars(referencedVars);
3775
- neededDeclarations.add(binding.sourceText);
3776
- }
3777
- }
3778
- };
3779
- resolveVars(freeVars);
3780
- return {
3781
- imports: [...neededImports],
3782
- declarations: [...neededDeclarations],
3783
- unresolved
3784
- };
3785
- }
3786
- function buildPrecompiledExpr(bundleCode) {
3787
- return `(() => {
3788
- const module = { exports: {} };
3789
- const exports = module.exports;
3790
- ${bundleCode}\n return module.exports.main({ value: _value, data: _data, user: ${tailorUserMap} });\n})()`;
3791
- }
3792
- /**
3793
- * Build entry file content from already-resolved imports and declarations.
3794
- * @param imports - Import statement texts.
3795
- * @param declarations - Declaration statement texts.
3796
- * @param fnSource - The function source code.
3797
- * @param sourceFilePath - Path to the source file for resolving relative imports.
3798
- * @returns Entry file content string.
3799
- */
3800
- function buildMinimalEntryFromResolved(imports, declarations, fnSource, sourceFilePath) {
3801
- const sourceDir = resolve(sourceFilePath, "..").replace(/\\/g, "/");
3802
- return [
3803
- ...imports.map((imp) => imp.replace(/from\s+["'](\.[^"']+)["']/g, (_match, relPath) => `from "${resolve(sourceDir, relPath).replace(/\\/g, "/")}"`)),
3804
- ...declarations,
3805
- `export function main(input) { return (${fnSource})(input); }`
3806
- ].join("\n");
3807
- }
3808
- async function bundleScriptTarget(args) {
3809
- const { fn, kind, sourceFilePath, sourceBindings, tempDir, targetIndex, tsconfig } = args;
3810
- const fnSource = stringifyFunction(fn);
3811
- const inlineExpr = `(${fnSource})({ value: _value, data: _data, user: ${tailorUserMap} })`;
3812
- const freeVars = findUndefinedReferences(`const __fn = ${fnSource};`);
3813
- if (freeVars.size === 0) return inlineExpr;
3814
- const { imports, declarations, unresolved } = resolveNeededBindings(freeVars, sourceBindings);
3815
- if (unresolved.length > 0) throw new Error(`${kind} in ${sourceFilePath} captures unresolvable variables (${unresolved.join(", ")}). Hooks and validators must not reference variables that cannot be resolved from the source file.
3816
- ${kind}: ${fnSource}`);
3817
- const entryContent = buildMinimalEntryFromResolved(imports, declarations, fnSource, sourceFilePath);
3818
- const entryPath = join(tempDir, `tailordb-script-${targetIndex}.entry.ts`);
3819
- const outputPath = join(tempDir, `tailordb-script-${targetIndex}.bundle.cjs`);
3820
- writeFileSync(entryPath, entryContent);
3821
- await rolldown.build(rolldown.defineConfig({
3822
- input: entryPath,
3823
- output: {
3824
- file: outputPath,
3825
- format: "cjs",
3826
- sourcemap: false,
3827
- minify: true,
3828
- codeSplitting: false
3829
- },
3830
- tsconfig,
3831
- treeshake: {
3832
- moduleSideEffects: false,
3833
- annotations: true,
3834
- unknownGlobalSideEffects: false
3835
- },
3836
- logLevel: "silent"
3837
- }));
3838
- return buildPrecompiledExpr(readFileSync(outputPath, "utf-8"));
3839
- }
3840
- /**
3841
- * Precompile TailorDB hooks/validators into self-contained script expressions using rolldown.
3842
- * Uses oxc-parser AST walking to extract free variables from functions, then builds
3843
- * minimal entry points containing only the needed imports and declarations.
3844
- * @param type - TailorDB type schema output.
3845
- * @param sourceFilePath - Source file where the type is defined.
3846
- * @param tsconfig - Resolved tsconfig path, or undefined if not found.
3847
- */
3848
- async function precompileTailorDBTypeScripts(type, sourceFilePath, tsconfig) {
3849
- const targets = collectScriptTargets(type);
3850
- if (targets.length === 0) return;
3851
- const sourceBindings = collectSourceBindings(sourceFilePath);
3852
- const tempDir = resolve(getDistDir(), "hooks-validate-scripts", type.name);
3853
- mkdirSync(tempDir, { recursive: true });
3854
- try {
3855
- const results = await Promise.allSettled(targets.map((target, index) => bundleScriptTarget({
3856
- fn: target.fn,
3857
- kind: target.kind,
3858
- sourceFilePath,
3859
- sourceBindings,
3860
- tempDir,
3861
- targetIndex: index,
3862
- tsconfig
3863
- })));
3864
- const firstError = results.find((r) => r.status === "rejected");
3865
- if (firstError && firstError.status === "rejected") throw firstError.reason;
3866
- for (const [index, result] of results.entries()) if (result.status === "fulfilled") setPrecompiledScriptExpr(targets[index].fn, result.value);
3867
- } finally {
3868
- rmSync(tempDir, {
3869
- recursive: true,
3870
- force: true
3871
- });
3872
- }
3873
- }
3874
-
3875
- //#endregion
3876
- //#region src/cli/services/tailordb/service.ts
3877
- /**
3878
- * Creates a new TailorDBService instance.
3879
- * @param params - Parameters for creating the service
3880
- * @returns A new TailorDBService instance
3881
- */
3882
- function createTailorDBService(params) {
3883
- const { namespace, config, pluginManager } = params;
3884
- const rawTypes = {};
3885
- let types = {};
3886
- const typeSourceInfo = {};
3887
- const pluginAttachments = /* @__PURE__ */ new Map();
3888
- let loadPromise;
3889
- const doParseTypes = () => {
3890
- const allTypes = {};
3891
- for (const fileTypes of Object.values(rawTypes)) for (const [typeName, type] of Object.entries(fileTypes)) allTypes[typeName] = type;
3892
- types = parseTypes(allTypes, namespace, typeSourceInfo);
3893
- };
3894
- /**
3895
- * Process plugins for a type and add generated types to rawTypes
3896
- * @param rawType - The raw TailorDB type being processed
3897
- * @param attachments - Plugin attachments for this type
3898
- * @param sourceFilePath - The file path where the type was loaded from
3899
- */
3900
- const processPluginsForType = async (rawType, attachments, sourceFilePath) => {
3901
- if (!pluginManager) return;
3902
- let currentType = rawType;
3903
- for (const attachment of attachments) {
3904
- const result = await pluginManager.processAttachment({
3905
- type: currentType,
3906
- typeConfig: attachment.config,
3907
- namespace,
3908
- pluginId: attachment.pluginId
3909
- });
3910
- if (!result.success) {
3911
- logger.error(result.error);
3912
- throw new Error(result.error);
3913
- }
3914
- const output = result.output;
3915
- const extendFields = output.extends?.fields;
3916
- if (extendFields && Object.keys(extendFields).length > 0) {
3917
- const extendedType = pluginManager.extendType({
3918
- originalType: currentType,
3919
- extendFields,
3920
- pluginId: attachment.pluginId
3921
- });
3922
- rawTypes[sourceFilePath][currentType.name] = extendedType;
3923
- currentType = extendedType;
3924
- logger.log(` Extended: ${styles.success(currentType.name)} with ${styles.highlight(Object.keys(extendFields).length.toString())} fields by plugin ${styles.info(attachment.pluginId)}`);
3925
- }
3926
- const plugin = pluginManager.getPlugin(attachment.pluginId);
3927
- for (const [kind, generatedType] of Object.entries(output.types ?? {})) {
3928
- rawTypes[sourceFilePath][generatedType.name] = generatedType;
3929
- typeSourceInfo[generatedType.name] = {
3930
- exportName: generatedType.name,
3931
- pluginId: attachment.pluginId,
3932
- pluginImportPath: pluginManager.getPluginImportPath(attachment.pluginId) ?? "",
3933
- originalFilePath: sourceFilePath,
3934
- originalExportName: typeSourceInfo[rawType.name]?.exportName || rawType.name,
3935
- generatedTypeKind: kind,
3936
- pluginConfig: plugin?.pluginConfig,
3937
- namespace
3938
- };
3939
- logger.log(` Generated: ${styles.success(generatedType.name)} by plugin ${styles.info(attachment.pluginId)}`);
3940
- }
3941
- }
3942
- };
3943
- const loadTypeFile = async (typeFile, tsconfig) => {
3944
- rawTypes[typeFile] = {};
3945
- const loadedTypes = {};
3946
- try {
3947
- const module = await import(pathToFileURL(typeFile).href);
3948
- for (const exportName of Object.keys(module)) {
3949
- const exportedValue = module[exportName];
3950
- const result = TailorDBTypeSchema.safeParse(exportedValue);
3951
- if (!result.success) {
3952
- if (isSdkBranded(exportedValue, "tailordb-type")) throw result.error;
3953
- continue;
3954
- }
3955
- const relativePath = path.relative(process.cwd(), typeFile);
3956
- logger.log(`Type: ${styles.successBright(`"${result.data.name}"`)} loaded from ${styles.path(relativePath)}`);
3957
- await precompileTailorDBTypeScripts(result.data, typeFile, tsconfig);
3958
- rawTypes[typeFile][result.data.name] = result.data;
3959
- loadedTypes[result.data.name] = result.data;
3960
- typeSourceInfo[result.data.name] = {
3961
- filePath: typeFile,
3962
- exportName
3963
- };
3964
- if (exportedValue.plugins && Array.isArray(exportedValue.plugins) && exportedValue.plugins.length > 0) {
3965
- pluginAttachments.set(exportedValue.name, [...exportedValue.plugins]);
3966
- logger.log(` Plugin attachments: ${styles.info(exportedValue.plugins.map((p) => p.pluginId).join(", "))}`);
3967
- await processPluginsForType(exportedValue, exportedValue.plugins, typeFile);
3968
- }
3969
- }
3970
- } catch (error) {
3971
- const relativePath = path.relative(process.cwd(), typeFile);
3972
- logger.error(`Failed to load type from ${styles.bold(relativePath)}`);
3973
- logger.error(String(error));
3974
- throw error;
3975
- }
3976
- return loadedTypes;
3977
- };
3978
- return {
3979
- namespace,
3980
- config,
3981
- get types() {
3982
- return types;
3983
- },
3984
- get typeSourceInfo() {
3985
- return typeSourceInfo;
3986
- },
3987
- get pluginAttachments() {
3988
- return pluginAttachments;
3989
- },
3990
- loadTypes: async () => {
3991
- if (!loadPromise) loadPromise = (async () => {
3992
- if (!config.files || config.files.length === 0) return;
3993
- const typeFiles = loadFilesWithIgnores(config);
3994
- let tsconfig;
3995
- try {
3996
- tsconfig = await resolveTSConfig();
3997
- } catch {
3998
- tsconfig = void 0;
3999
- }
4000
- logger.newline();
4001
- logger.log(`Found ${styles.highlight(typeFiles.length.toString())} type files for TailorDB service ${styles.highlight(`"${namespace}"`)}`);
4002
- if (pluginManager) for (const typeFile of typeFiles) await loadTypeFile(typeFile, tsconfig);
4003
- else await Promise.all(typeFiles.map((typeFile) => loadTypeFile(typeFile, tsconfig)));
4004
- doParseTypes();
4005
- return types;
4006
- })();
4007
- return loadPromise;
4008
- },
4009
- processNamespacePlugins: async () => {
4010
- if (!pluginManager) return;
4011
- const results = await pluginManager.processNamespacePlugins(namespace);
4012
- const pluginGeneratedKey = "__plugin_generated__";
4013
- if (!rawTypes[pluginGeneratedKey]) rawTypes[pluginGeneratedKey] = {};
4014
- let hasGeneratedTypes = false;
4015
- for (const { pluginId, config, result } of results) {
4016
- if (!result.success) {
4017
- logger.error(result.error);
4018
- throw new Error(result.error);
4019
- }
4020
- const output = result.output;
4021
- for (const [kind, generatedType] of Object.entries(output.types ?? {})) {
4022
- rawTypes[pluginGeneratedKey][generatedType.name] = generatedType;
4023
- hasGeneratedTypes = true;
4024
- typeSourceInfo[generatedType.name] = {
4025
- exportName: generatedType.name,
4026
- pluginId,
4027
- pluginImportPath: pluginManager.getPluginImportPath(pluginId) ?? "",
4028
- originalFilePath: "",
4029
- originalExportName: "",
4030
- generatedTypeKind: kind,
4031
- pluginConfig: config,
4032
- namespace
4033
- };
4034
- logger.log(` Generated: ${styles.success(generatedType.name)} by namespace plugin ${styles.info(pluginId)}`);
4035
- }
4036
- }
4037
- if (hasGeneratedTypes) doParseTypes();
4038
- }
4039
- };
4040
- }
4041
-
4042
- //#endregion
4043
- //#region src/parser/service/resolver/schema.ts
4044
- const TailorFieldTypeSchema = z.enum([
4045
- "uuid",
4046
- "string",
4047
- "boolean",
4048
- "integer",
4049
- "float",
4050
- "decimal",
4051
- "enum",
4052
- "date",
4053
- "datetime",
4054
- "time",
4055
- "nested"
4056
- ]);
4057
- const QueryTypeSchema = z.union([z.literal("query"), z.literal("mutation")]).describe("GraphQL operation type");
4058
- const AllowedValueSchema = z.object({
4059
- value: z.string().describe("The allowed value"),
4060
- description: z.string().optional().describe("Description of the allowed value")
4061
- });
4062
- const FieldMetadataSchema = z.object({
4063
- required: z.boolean().optional().describe("Whether the field is required"),
4064
- array: z.boolean().optional().describe("Whether the field is an array"),
4065
- description: z.string().optional().describe("Field description"),
4066
- allowedValues: z.array(AllowedValueSchema).optional().describe("Allowed values for enum fields"),
4067
- hooks: z.object({
4068
- create: functionSchema.optional().describe("Hook function called on creation"),
4069
- update: functionSchema.optional().describe("Hook function called on update")
4070
- }).optional().describe("Lifecycle hooks"),
4071
- typeName: z.string().optional().describe("Type name for nested or enum fields")
4072
- });
4073
- const TailorFieldSchema = z.object({
4074
- type: TailorFieldTypeSchema.describe("Field data type"),
4075
- metadata: FieldMetadataSchema.describe("Field metadata configuration"),
4076
- get fields() {
4077
- return z.record(z.string(), TailorFieldSchema);
4078
- }
4079
- });
4080
- const ResolverSchema = z.object({
4081
- operation: QueryTypeSchema.describe("GraphQL operation type (query or mutation)"),
4082
- name: z.string().describe("Resolver name"),
4083
- description: z.string().optional().describe("Resolver description"),
4084
- input: z.record(z.string(), TailorFieldSchema).optional().describe("Input field definitions"),
4085
- body: functionSchema.describe("Resolver implementation function"),
4086
- output: TailorFieldSchema.describe("Output field definition"),
4087
- publishEvents: z.boolean().optional().describe("Enable publishing events from this resolver")
4088
- });
4089
-
4090
- //#endregion
4091
- //#region src/parser/service/auth/schema.ts
4092
- const AuthInvokerSchema = z.object({
4093
- namespace: z.string().describe("Auth namespace"),
4094
- machineUserName: z.string().describe("Machine user name for authentication")
4095
- });
4096
- const secretValueSchema = z.object({
4097
- vaultName: z.string().describe("Vault name containing the secret"),
4098
- secretKey: z.string().describe("Key of the secret in the vault")
4099
- });
4100
- const OIDCSchema = z.object({
4101
- name: z.string().describe("Identity provider name"),
4102
- kind: z.literal("OIDC"),
4103
- clientID: z.string().describe("OAuth2 client ID"),
4104
- clientSecret: secretValueSchema.describe("OAuth2 client secret"),
4105
- providerURL: z.string().describe("OIDC provider URL"),
4106
- issuerURL: z.string().optional().describe("OIDC issuer URL (defaults to providerURL)"),
4107
- usernameClaim: z.string().optional().describe("JWT claim to use as username")
4108
- });
4109
- const SAMLSchema = z.object({
4110
- name: z.string().describe("Identity provider name"),
4111
- kind: z.literal("SAML"),
4112
- enableSignRequest: z.boolean().default(false).describe("Enable signing of SAML requests"),
4113
- metadataURL: z.string().optional().describe("URL to fetch SAML metadata (mutually exclusive with rawMetadata)"),
4114
- rawMetadata: z.string().optional().describe("Raw SAML metadata XML (mutually exclusive with metadataURL)")
4115
- }).refine((value) => {
4116
- return value.metadataURL !== void 0 !== (value.rawMetadata !== void 0);
4117
- }, "Provide either metadataURL or rawMetadata");
4118
- const IDTokenSchema = z.object({
4119
- name: z.string().describe("Identity provider name"),
4120
- kind: z.literal("IDToken"),
4121
- providerURL: z.string().describe("ID token provider URL"),
4122
- issuerURL: z.string().optional().describe("ID token issuer URL"),
4123
- clientID: z.string().describe("Client ID for ID token validation"),
4124
- usernameClaim: z.string().optional().describe("JWT claim to use as username")
4125
- });
4126
- const BuiltinIdPSchema = z.object({
4127
- name: z.string().describe("Identity provider name"),
4128
- kind: z.literal("BuiltInIdP"),
4129
- namespace: z.string().describe("IdP namespace"),
4130
- clientName: z.string().describe("OAuth2 client name in the IdP")
4131
- });
4132
- const IdProviderSchema = z.discriminatedUnion("kind", [
4133
- OIDCSchema,
4134
- SAMLSchema,
4135
- IDTokenSchema,
4136
- BuiltinIdPSchema
4137
- ]);
4138
- const OAuth2ClientGrantTypeSchema = z.union([z.literal("authorization_code"), z.literal("refresh_token")]).describe("OAuth2 grant type");
4139
- const OAuth2ClientSchema = z.object({
4140
- description: z.string().optional().describe("Client description"),
4141
- grantTypes: z.array(OAuth2ClientGrantTypeSchema).default(["authorization_code", "refresh_token"]).describe("Allowed OAuth2 grant types"),
4142
- redirectURIs: z.array(z.union([
4143
- z.templateLiteral(["https://", z.string()]),
4144
- z.templateLiteral(["http://", z.string()]),
4145
- z.templateLiteral([z.string(), ":url"]),
4146
- z.templateLiteral([
4147
- z.string(),
4148
- ":url/",
4149
- z.string()
4150
- ])
4151
- ])).describe("Allowed redirect URIs"),
4152
- clientType: z.union([
4153
- z.literal("confidential"),
4154
- z.literal("public"),
4155
- z.literal("browser")
4156
- ]).optional().describe("OAuth2 client type"),
4157
- accessTokenLifetimeSeconds: z.number().int().min(60, "Minimum access token lifetime is 60 seconds").max(86400, "Maximum access token lifetime is 1 day (86400 seconds)").optional().describe("Access token lifetime in seconds (60-86400)").transform((val) => val ? {
4158
- seconds: BigInt(val),
4159
- nanos: 0
4160
- } : void 0),
4161
- refreshTokenLifetimeSeconds: z.number().int().min(60, "Minimum refresh token lifetime is 60 seconds").max(604800, "Maximum refresh token lifetime is 7 days (604800 seconds)").optional().describe("Refresh token lifetime in seconds (60-604800)").transform((val) => val ? {
4162
- seconds: BigInt(val),
4163
- nanos: 0
4164
- } : void 0),
4165
- requireDpop: z.boolean().optional().describe("Require DPoP (Demonstrating Proof-of-Possession) for token requests")
4166
- }).refine((data) => !(data.clientType === "browser" && data.requireDpop === true), {
4167
- message: "requireDpop cannot be set to true for browser clients as they don't support DPoP",
4168
- path: ["requireDpop"]
4169
- });
4170
- const SCIMAuthorizationSchema = z.object({
4171
- type: z.union([z.literal("oauth2"), z.literal("bearer")]).describe("SCIM authorization type"),
4172
- bearerSecret: secretValueSchema.optional().describe("Bearer token secret (required for bearer type)")
4173
- });
4174
- const SCIMAttributeTypeSchema = z.union([
4175
- z.literal("string"),
4176
- z.literal("number"),
4177
- z.literal("boolean"),
4178
- z.literal("datetime"),
4179
- z.literal("complex")
4180
- ]).describe("SCIM attribute data type");
4181
- const SCIMAttributeSchema = z.object({
4182
- type: SCIMAttributeTypeSchema.describe("Attribute data type"),
4183
- name: z.string().describe("Attribute name"),
4184
- description: z.string().optional().describe("Attribute description"),
4185
- mutability: z.union([
4186
- z.literal("readOnly"),
4187
- z.literal("readWrite"),
4188
- z.literal("writeOnly")
4189
- ]).optional().describe("Attribute mutability"),
4190
- required: z.boolean().optional().describe("Whether the attribute is required"),
4191
- multiValued: z.boolean().optional().describe("Whether the attribute can have multiple values"),
4192
- uniqueness: z.union([
4193
- z.literal("none"),
4194
- z.literal("server"),
4195
- z.literal("global")
4196
- ]).optional().describe("Uniqueness constraint"),
4197
- canonicalValues: z.array(z.string()).nullable().optional().describe("List of canonical values"),
4198
- get subAttributes() {
4199
- return z.array(SCIMAttributeSchema).nullable().optional();
4200
- }
4201
- });
4202
- const SCIMSchemaSchema = z.object({
4203
- name: z.string().describe("SCIM schema name"),
4204
- attributes: z.array(SCIMAttributeSchema).describe("Schema attributes")
4205
- });
4206
- const SCIMAttributeMappingSchema = z.object({
4207
- tailorDBField: z.string().describe("TailorDB field name to map to"),
4208
- scimPath: z.string().describe("SCIM attribute path")
4209
- });
4210
- const SCIMResourceSchema = z.object({
4211
- name: z.string().describe("SCIM resource name"),
4212
- tailorDBNamespace: z.string().describe("TailorDB namespace for the resource"),
4213
- tailorDBType: z.string().describe("TailorDB type name for the resource"),
4214
- coreSchema: SCIMSchemaSchema.describe("Core SCIM schema definition"),
4215
- attributeMapping: z.array(SCIMAttributeMappingSchema).describe("Attribute mapping configuration")
4216
- });
4217
- const SCIMSchema = z.object({
4218
- machineUserName: z.string().describe("Machine user name for SCIM operations"),
4219
- authorization: SCIMAuthorizationSchema.describe("SCIM authorization configuration"),
4220
- resources: z.array(SCIMResourceSchema).describe("SCIM resource definitions")
4221
- });
4222
- const TenantProviderSchema = z.object({
4223
- namespace: z.string().describe("TailorDB namespace for the tenant type"),
4224
- type: z.string().describe("TailorDB type name for tenants"),
4225
- signatureField: z.string().describe("Field used as the tenant signature")
4226
- });
4227
- const UserProfileSchema = z.object({
4228
- type: z.object({
4229
- name: z.string(),
4230
- fields: z.any(),
4231
- metadata: z.any(),
4232
- hooks: z.any(),
4233
- validate: z.any(),
4234
- features: z.any(),
4235
- indexes: z.any(),
4236
- files: z.any(),
4237
- permission: z.any(),
4238
- gqlPermission: z.any(),
4239
- _output: z.any()
4240
- }),
4241
- usernameField: z.string(),
4242
- attributes: z.record(z.string(), z.literal(true)).optional(),
4243
- attributeList: z.array(z.string()).optional()
4244
- });
4245
- const ValueOperandSchema = z.union([
4246
- z.string(),
4247
- z.boolean(),
4248
- z.array(z.string()),
4249
- z.array(z.boolean())
4250
- ]);
4251
- const MachineUserSchema = z.object({
4252
- attributes: z.record(z.string(), ValueOperandSchema).optional(),
4253
- attributeList: z.array(z.uuid()).optional()
4254
- });
4255
- const BeforeLoginHookSchema = z.object({
4256
- handler: z.function(),
4257
- invoker: z.string()
4258
- });
4259
- const AuthConfigBaseSchema = z.object({
4260
- name: z.string().describe("Auth service name"),
4261
- hooks: z.object({ beforeLogin: BeforeLoginHookSchema.optional().describe("Before login auth hook") }).optional().describe("Auth hooks"),
4262
- machineUsers: z.record(z.string(), MachineUserSchema).optional().describe("Machine user definitions"),
4263
- oauth2Clients: z.record(z.string(), OAuth2ClientSchema).optional().describe("OAuth2 client definitions"),
4264
- idProvider: IdProviderSchema.optional().describe("Identity provider configuration"),
4265
- scim: SCIMSchema.optional().describe("SCIM provisioning configuration"),
4266
- tenantProvider: TenantProviderSchema.optional().describe("Multi-tenant provider configuration"),
4267
- publishSessionEvents: z.boolean().optional().describe("Enable publishing session events")
4268
- });
4269
- const AuthConfigSchema = z.union([AuthConfigBaseSchema.extend({
4270
- userProfile: z.undefined().optional(),
4271
- machineUserAttributes: z.undefined().optional()
4272
- }), z.xor([AuthConfigBaseSchema.extend({
4273
- userProfile: UserProfileSchema,
4274
- machineUserAttributes: z.undefined().optional()
4275
- }), AuthConfigBaseSchema.extend({
4276
- userProfile: z.undefined().optional(),
4277
- machineUserAttributes: z.record(z.string(), TailorFieldSchema)
4278
- })])]).brand("AuthConfig");
4279
-
4280
- //#endregion
4281
- //#region src/cli/services/auth/service.ts
4282
- /**
4283
- * Creates a new AuthService instance.
4284
- * @param config - The auth configuration
4285
- * @param tailorDBServices - The TailorDB services
4286
- * @param externalTailorDBNamespaces - External TailorDB namespaces
4287
- * @returns A new AuthService instance
4288
- */
4289
- function createAuthService(config, tailorDBServices, externalTailorDBNamespaces) {
4290
- const parsedConfig = {
4291
- ...config,
4292
- idProvider: IdProviderSchema.optional().parse(config.idProvider)
4293
- };
4294
- let userProfile;
4295
- return {
4296
- config,
4297
- tailorDBServices,
4298
- externalTailorDBNamespaces,
4299
- parsedConfig,
4300
- get userProfile() {
4301
- return userProfile;
4302
- },
4303
- resolveNamespaces: async () => {
4304
- if (!config.userProfile) return;
4305
- if (config.userProfile.namespace) {
4306
- userProfile = {
4307
- ...config.userProfile,
4308
- namespace: config.userProfile.namespace
4309
- };
4310
- return;
4311
- }
4312
- const totalNamespaceCount = tailorDBServices.length + externalTailorDBNamespaces.length;
4313
- let userProfileNamespace;
4314
- if (totalNamespaceCount === 1) userProfileNamespace = tailorDBServices[0]?.namespace ?? externalTailorDBNamespaces[0];
4315
- else {
4316
- await Promise.all(tailorDBServices.map((tailordb) => tailordb.loadTypes()));
4317
- const userProfileTypeName = typeof config.userProfile.type === "object" && "name" in config.userProfile.type ? config.userProfile.type.name : void 0;
4318
- if (userProfileTypeName) for (const service of tailorDBServices) {
4319
- const types = service.types;
4320
- if (Object.prototype.hasOwnProperty.call(types, userProfileTypeName)) {
4321
- userProfileNamespace = service.namespace;
4322
- break;
4323
- }
4324
- }
4325
- if (!userProfileNamespace) throw new Error(`userProfile type "${config.userProfile.type.name}" not found in any TailorDB namespace`);
4326
- }
4327
- userProfile = {
4328
- ...config.userProfile,
4329
- namespace: userProfileNamespace
4330
- };
4331
- }
4332
- };
4333
- }
4334
-
4335
- //#endregion
4336
- //#region src/parser/service/executor/schema.ts
4337
- const RecordTriggerSchema = z.object({
4338
- kind: z.enum([
4339
- "recordCreated",
4340
- "recordUpdated",
4341
- "recordDeleted"
4342
- ]).describe("Record event type"),
4343
- typeName: z.string().describe("TailorDB type name to watch for events"),
4344
- condition: functionSchema.optional().describe("Condition function to filter events")
4345
- });
4346
- const ResolverExecutedTriggerSchema = z.object({
4347
- kind: z.literal("resolverExecuted"),
4348
- resolverName: z.string().describe("Name of the resolver to trigger on"),
4349
- condition: functionSchema.optional().describe("Condition function to filter events")
4350
- });
4351
- const ScheduleTriggerSchema = z.object({
4352
- kind: z.literal("schedule"),
4353
- cron: z.string().describe("CRON expression for the schedule"),
4354
- timezone: z.string().optional().default("UTC").describe("Timezone for the CRON schedule (default: UTC)")
4355
- });
4356
- const IncomingWebhookTriggerSchema = z.object({ kind: z.literal("incomingWebhook") });
4357
- const IdpUserTriggerSchema = z.object({ kind: z.enum([
4358
- "idpUserCreated",
4359
- "idpUserUpdated",
4360
- "idpUserDeleted"
4361
- ]).describe("IdP user event type") });
4362
- const AuthAccessTokenTriggerSchema = z.object({ kind: z.enum([
4363
- "authAccessTokenIssued",
4364
- "authAccessTokenRefreshed",
4365
- "authAccessTokenRevoked"
4366
- ]).describe("Auth access token event type") });
4367
- const TriggerSchema = z.discriminatedUnion("kind", [
4368
- RecordTriggerSchema,
4369
- ResolverExecutedTriggerSchema,
4370
- ScheduleTriggerSchema,
4371
- IncomingWebhookTriggerSchema,
4372
- IdpUserTriggerSchema,
4373
- AuthAccessTokenTriggerSchema
4374
- ]);
4375
- const FunctionOperationSchema = z.object({
4376
- kind: z.enum(["function", "jobFunction"]),
4377
- body: functionSchema.describe("Function implementation"),
4378
- authInvoker: AuthInvokerSchema.optional().describe("Auth invoker for the function execution")
4379
- });
4380
- const GqlOperationSchema = z.object({
4381
- kind: z.literal("graphql"),
4382
- appName: z.string().optional().describe("Target application name for the GraphQL query"),
4383
- query: z.preprocess((val) => String(val), z.string().describe("GraphQL query string")),
4384
- variables: functionSchema.optional().describe("Function to compute GraphQL variables"),
4385
- authInvoker: AuthInvokerSchema.optional().describe("Auth invoker for the GraphQL execution")
4386
- });
4387
- const WebhookOperationSchema = z.object({
4388
- kind: z.literal("webhook"),
4389
- url: functionSchema.describe("Function returning the webhook URL"),
4390
- requestBody: functionSchema.optional().describe("Function to compute the request body"),
4391
- headers: z.record(z.string(), z.union([z.string(), z.object({
4392
- vault: z.string(),
4393
- key: z.string()
4394
- })])).optional().describe("HTTP headers for the webhook request")
4395
- });
4396
- const WorkflowOperationSchema = z.preprocess((val) => {
4397
- if (val == null || typeof val !== "object" || !("workflow" in val) || typeof val.workflow !== "object" || val.workflow === null) return val;
4398
- const { workflow, ...rest } = val;
4399
- return {
4400
- ...rest,
4401
- workflowName: workflow.name
4402
- };
4403
- }, z.object({
4404
- kind: z.literal("workflow"),
4405
- workflowName: z.string().describe("Name of the workflow to execute"),
4406
- args: z.union([z.record(z.string(), z.unknown()), functionSchema]).optional().describe("Arguments to pass to the workflow"),
4407
- authInvoker: AuthInvokerSchema.optional().describe("Auth invoker for the workflow execution")
4408
- }));
4409
- const OperationSchema = z.union([
4410
- FunctionOperationSchema,
4411
- GqlOperationSchema,
4412
- WebhookOperationSchema,
4413
- WorkflowOperationSchema
4414
- ]);
4415
- const ExecutorSchema = z.object({
4416
- name: z.string().describe("Executor name"),
4417
- description: z.string().optional().describe("Executor description"),
4418
- disabled: z.boolean().optional().default(false).describe("Whether the executor is disabled"),
4419
- trigger: TriggerSchema.describe("Event trigger configuration"),
4420
- operation: OperationSchema.describe("Operation to execute when triggered")
4421
- });
4422
-
4423
- //#endregion
4424
- //#region src/cli/services/executor/loader.ts
4425
- /**
4426
- * Load and validate an executor definition from a file.
4427
- * @param executorFilePath - Path to the executor file
4428
- * @returns Parsed executor or null if invalid
4429
- */
4430
- async function loadExecutor(executorFilePath) {
4431
- const executor = (await import(pathToFileURL(executorFilePath).href)).default;
4432
- const parseResult = ExecutorSchema.safeParse(executor);
4433
- if (!parseResult.success) return null;
4434
- return parseResult.data;
4435
- }
4436
-
4437
- //#endregion
4438
- //#region src/cli/services/executor/bundler.ts
4439
- /**
4440
- * Bundle executors from the specified configuration
4441
- *
4442
- * This function:
4443
- * 1. Creates entry file that extracts operation.body
4444
- * 2. Bundles in a single step with tree-shaking
4445
- * @param options - Bundle executor options
4446
- * @returns Promise that resolves when bundling completes
4447
- */
4448
- async function bundleExecutors(options) {
4449
- const { config, triggerContext, additionalFiles = [], cache, inlineSourcemap } = options;
4450
- const files = [...loadFilesWithIgnores(config), ...additionalFiles];
4451
- if (files.length === 0) {
4452
- logger.warn(`No executor files found for patterns: ${config.files?.join(", ") ?? "(none)"}`);
4453
- return;
4454
- }
4455
- logger.newline();
4456
- logger.log(`Bundling ${styles.highlight(files.length.toString())} files for ${styles.info("\"executor\"")}`);
4457
- const executors = [];
4458
- for (const file of files) {
4459
- const executor = await loadExecutor(file);
4460
- if (!executor) {
4461
- logger.debug(` Skipping: ${file} (could not be loaded)`);
4462
- continue;
4463
- }
4464
- if (!["function", "jobFunction"].includes(executor.operation.kind)) {
4465
- logger.debug(` Skipping: ${executor.name} (not a function executor)`);
4466
- continue;
4467
- }
4468
- executors.push({
4469
- name: executor.name,
4470
- sourceFile: file
4471
- });
4472
- }
4473
- if (executors.length === 0) {
4474
- logger.debug(" No function executors to bundle");
4475
- return;
4476
- }
4477
- const outputDir = path.resolve(getDistDir(), "executors");
4478
- fs$1.mkdirSync(outputDir, { recursive: true });
4479
- await removeStaleEntryFiles(outputDir);
4480
- let tsconfig;
4481
- try {
4482
- tsconfig = await resolveTSConfig();
4483
- } catch {
4484
- tsconfig = void 0;
4485
- }
4486
- await Promise.all(executors.map((executor) => bundleSingleExecutor(executor, outputDir, tsconfig, triggerContext, cache, inlineSourcemap)));
4487
- logger.log(`${styles.success("Bundled")} ${styles.info("\"executor\"")}`);
4488
- }
4489
- async function bundleSingleExecutor(executor, outputDir, tsconfig, triggerContext, cache, inlineSourcemap) {
4490
- const outputPath = path.join(outputDir, `${executor.name}.js`);
4491
- const serializedTriggerContext = serializeTriggerContext(triggerContext);
4492
- const contextHash = computeBundlerContextHash({
4493
- sourceFile: executor.sourceFile,
4494
- serializedTriggerContext,
4495
- tsconfig,
4496
- inlineSourcemap
4497
- });
4498
- await withCache({
4499
- cache,
4500
- kind: "executor",
4501
- name: executor.name,
4502
- sourceFile: executor.sourceFile,
4503
- outputPath,
4504
- contextHash,
4505
- async build(cachePlugins) {
4506
- const entryPath = path.join(outputDir, `${executor.name}.entry.js`);
4507
- const entryContent = ml`
4508
- import _internalExecutor from "${path.resolve(executor.sourceFile)}";
4509
-
4510
- const __executor_function = _internalExecutor.operation.body;
4511
-
4512
- export { __executor_function as main };
4513
- `;
4514
- fs$1.writeFileSync(entryPath, entryContent);
4515
- const triggerPlugin = createTriggerTransformPlugin(triggerContext);
4516
- const plugins = triggerPlugin ? [triggerPlugin] : [];
4517
- plugins.push(...cachePlugins);
4518
- await rolldown.build(rolldown.defineConfig({
4519
- input: entryPath,
4520
- output: {
4521
- file: outputPath,
4522
- format: "esm",
4523
- sourcemap: inlineSourcemap ? "inline" : true,
4524
- minify: inlineSourcemap ? { mangle: { keepNames: true } } : true,
4525
- codeSplitting: false
4526
- },
4527
- tsconfig,
4528
- plugins,
4529
- treeshake: {
4530
- moduleSideEffects: false,
4531
- annotations: true,
4532
- unknownGlobalSideEffects: false
4533
- },
4534
- logLevel: "silent"
4535
- }));
4536
- }
4537
- });
4538
- }
4539
-
4540
- //#endregion
4541
- //#region src/cli/services/executor/service.ts
4542
- /**
4543
- * Creates a new ExecutorService instance.
4544
- * @param params - Parameters for creating the service
4545
- * @returns A new ExecutorService instance
4546
- */
4547
- function createExecutorService(params) {
4548
- const { config } = params;
4549
- const executors = {};
4550
- const pluginExecutors = [];
4551
- let loadPromise;
4552
- const loadExecutorForFile = async (executorFile) => {
4553
- try {
4554
- const executorModule = await import(pathToFileURL(executorFile).href);
4555
- const result = ExecutorSchema.safeParse(executorModule.default);
4556
- if (result.success) {
4557
- const relativePath = path.relative(process.cwd(), executorFile);
4558
- logger.log(`Executor: ${styles.successBright(`"${result.data.name}"`)} loaded from ${styles.path(relativePath)}`);
4559
- executors[executorFile] = result.data;
4560
- return result.data;
4561
- }
4562
- if (isSdkBranded(executorModule.default, "executor")) throw result.error;
4563
- } catch (error) {
4564
- const relativePath = path.relative(process.cwd(), executorFile);
4565
- logger.error(`Failed to load executor from ${styles.bold(relativePath)}`);
4566
- logger.error(String(error));
4567
- throw error;
4568
- }
4569
- };
4570
- return {
4571
- config,
4572
- get executors() {
4573
- return executors;
4574
- },
4575
- get pluginExecutors() {
4576
- return pluginExecutors;
4577
- },
4578
- loadExecutors: async () => {
4579
- if (!loadPromise) loadPromise = (async () => {
4580
- if (!config.files || config.files.length === 0) return;
4581
- const executorFiles = loadFilesWithIgnores(config);
4582
- logger.newline();
4583
- logger.log(`Found ${styles.highlight(executorFiles.length.toString())} executor files`);
4584
- await Promise.all(executorFiles.map((executorFile) => loadExecutorForFile(executorFile)));
4585
- return executors;
4586
- })();
4587
- return loadPromise;
4588
- },
4589
- loadPluginExecutorFiles: async (filePaths) => {
4590
- if (filePaths.length === 0) return;
4591
- logger.newline();
4592
- logger.log(`Loading ${styles.highlight(filePaths.length.toString())} plugin-generated executor files`);
4593
- for (const filePath of filePaths) {
4594
- const executor = await loadExecutorForFile(filePath);
4595
- if (executor) pluginExecutors.push({
4596
- executor,
4597
- pluginId: "plugin-generated",
4598
- sourceTypeName: void 0
4599
- });
4600
- }
4601
- }
4602
- };
4603
- }
4604
-
4605
- //#endregion
4606
- //#region src/cli/services/resolver/loader.ts
4607
- /**
4608
- * Load and validate a resolver definition from a file.
4609
- * @param resolverFilePath - Path to the resolver file
4610
- * @returns Parsed resolver or null if invalid
4611
- */
4612
- async function loadResolver(resolverFilePath) {
4613
- const resolver = (await import(pathToFileURL(resolverFilePath).href)).default;
4614
- const parseResult = ResolverSchema.safeParse(resolver);
4615
- if (!parseResult.success) return null;
4616
- return parseResult.data;
4617
- }
4618
-
4619
- //#endregion
4620
- //#region src/cli/services/resolver/bundler.ts
4621
- /**
4622
- * Bundle resolvers for the specified namespace
4623
- *
4624
- * This function:
4625
- * 1. Uses a transform plugin to add validation wrapper during bundling
4626
- * 2. Creates entry file
4627
- * 3. Bundles in a single step with tree-shaking
4628
- * @param namespace - Resolver namespace name
4629
- * @param config - Resolver file loading configuration
4630
- * @param triggerContext - Trigger context for workflow/job transformations
4631
- * @param cache - Optional bundle cache for skipping unchanged builds
4632
- * @param inlineSourcemap - Whether to enable inline sourcemaps
4633
- * @returns Promise that resolves when bundling completes
4634
- */
4635
- async function bundleResolvers(namespace, config, triggerContext, cache, inlineSourcemap) {
4636
- const files = loadFilesWithIgnores(config);
4637
- if (files.length === 0) {
4638
- logger.warn(`No resolver files found for patterns: ${config.files?.join(", ") ?? "(none)"}`);
4639
- return;
4640
- }
4641
- logger.newline();
4642
- logger.log(`Bundling ${styles.highlight(files.length.toString())} files for ${styles.info(`"${namespace}"`)}`);
4643
- const resolvers = [];
4644
- for (const file of files) {
4645
- const resolver = await loadResolver(file);
4646
- if (!resolver) {
4647
- logger.debug(` Skipping: ${file} (could not be loaded)`);
4648
- continue;
4649
- }
4650
- resolvers.push({
4651
- name: resolver.name,
4652
- sourceFile: file
4653
- });
4654
- }
4655
- const outputDir = path.resolve(getDistDir(), "resolvers");
4656
- fs$1.mkdirSync(outputDir, { recursive: true });
4657
- await removeStaleEntryFiles(outputDir);
4658
- let tsconfig;
4659
- try {
4660
- tsconfig = await resolveTSConfig();
4661
- } catch {
4662
- tsconfig = void 0;
4663
- }
4664
- await Promise.all(resolvers.map((resolver) => bundleSingleResolver(resolver, outputDir, tsconfig, triggerContext, cache, inlineSourcemap)));
4665
- logger.log(`${styles.success("Bundled")} ${styles.info(`"${namespace}"`)}`);
4666
- }
4667
- async function bundleSingleResolver(resolver, outputDir, tsconfig, triggerContext, cache, inlineSourcemap) {
4668
- const outputPath = path.join(outputDir, `${resolver.name}.js`);
4669
- const serializedTriggerContext = serializeTriggerContext(triggerContext);
4670
- const contextHash = computeBundlerContextHash({
4671
- sourceFile: resolver.sourceFile,
4672
- serializedTriggerContext,
4673
- tsconfig,
4674
- inlineSourcemap
4675
- });
4676
- await withCache({
4677
- cache,
4678
- kind: "resolver",
4679
- name: resolver.name,
4680
- sourceFile: resolver.sourceFile,
4681
- outputPath,
4682
- contextHash,
4683
- async build(cachePlugins) {
4684
- const entryPath = path.join(outputDir, `${resolver.name}.entry.js`);
4685
- const entryContent = ml`
4686
- import _internalResolver from "${path.resolve(resolver.sourceFile)}";
4687
- import { t } from "@tailor-platform/sdk";
4688
-
4689
- const $tailor_resolver_body = async (context) => {
4690
- if (_internalResolver.input) {
4691
- const result = t.object(_internalResolver.input).parse({
4692
- value: context.input,
4693
- data: context.input,
4694
- user: context.user,
4695
- });
4696
-
4697
- if (result.issues) {
4698
- const errorMessages = result.issues
4699
- .map(issue => {
4700
- const path = issue.path ? issue.path.join('.') : '';
4701
- return path ? \` \${path}: \${issue.message}\` : issue.message;
4702
- })
4703
- .join('\\n');
4704
- throw new Error(\`Failed to input validation:\\n\${errorMessages}\`);
4705
- }
4706
- }
4707
-
4708
- return _internalResolver.body(context);
4709
- };
4710
-
4711
- export { $tailor_resolver_body as main };
4712
- `;
4713
- fs$1.writeFileSync(entryPath, entryContent);
4714
- const triggerPlugin = createTriggerTransformPlugin(triggerContext);
4715
- const plugins = triggerPlugin ? [triggerPlugin] : [];
4716
- plugins.push(...cachePlugins);
4717
- await rolldown.build(rolldown.defineConfig({
4718
- input: entryPath,
4719
- output: {
4720
- file: outputPath,
4721
- format: "esm",
4722
- sourcemap: inlineSourcemap ? "inline" : true,
4723
- minify: inlineSourcemap ? { mangle: { keepNames: true } } : true,
4724
- codeSplitting: false
4725
- },
4726
- tsconfig,
4727
- plugins,
4728
- treeshake: {
4729
- moduleSideEffects: false,
4730
- annotations: true,
4731
- unknownGlobalSideEffects: false
4732
- },
4733
- logLevel: "silent"
4734
- }));
4735
- }
4736
- });
4737
- }
4738
-
4739
- //#endregion
4740
- //#region src/cli/services/resolver/service.ts
4741
- /**
4742
- * Creates a new ResolverService instance.
4743
- * @param namespace - The namespace for this resolver service
4744
- * @param config - The resolver service configuration
4745
- * @returns A new ResolverService instance
4746
- */
4747
- function createResolverService(namespace, config) {
4748
- const resolvers = {};
4749
- const loadResolverForFile = async (resolverFile) => {
4750
- try {
4751
- const resolverModule = await import(pathToFileURL(resolverFile).href);
4752
- const result = ResolverSchema.safeParse(resolverModule.default);
4753
- if (result.success) {
4754
- const relativePath = path.relative(process.cwd(), resolverFile);
4755
- logger.log(`Resolver: ${styles.successBright(`"${result.data.name}"`)} loaded from ${styles.path(relativePath)}`);
4756
- resolvers[resolverFile] = result.data;
4757
- return result.data;
4758
- }
4759
- if (isSdkBranded(resolverModule.default, "resolver")) throw result.error;
4760
- } catch (error) {
4761
- const relativePath = path.relative(process.cwd(), resolverFile);
4762
- logger.error(`Failed to load resolver from ${styles.bold(relativePath)}`);
4763
- logger.error(String(error));
4764
- throw error;
4765
- }
4766
- };
4767
- return {
4768
- namespace,
4769
- config,
4770
- get resolvers() {
4771
- return resolvers;
4772
- },
4773
- loadResolvers: async () => {
4774
- if (Object.keys(resolvers).length > 0) return;
4775
- if (!config.files || config.files.length === 0) return;
4776
- const resolverFiles = loadFilesWithIgnores(config);
4777
- logger.log(`Found ${styles.highlight(resolverFiles.length.toString())} resolver files for service ${styles.highlight(`"${namespace}"`)}`);
4778
- await Promise.all(resolverFiles.map((resolverFile) => loadResolverForFile(resolverFile)));
4779
- }
4780
- };
4781
- }
4782
-
4783
- //#endregion
4784
- //#region src/cli/services/workflow/source-transformer.ts
4785
- /**
4786
- * Find variable declarations by export names
4787
- * Returns a map of export name to statement range
4788
- * @param program - Parsed TypeScript program
4789
- * @returns Map of export name to statement range
4790
- */
4791
- function findVariableDeclarationsByName(program) {
4792
- const declarations = /* @__PURE__ */ new Map();
4793
- function walk(node) {
4794
- if (!node || typeof node !== "object") return;
4795
- const nodeType = node.type;
4796
- if (nodeType === "VariableDeclaration") {
4797
- const varDecl = node;
4798
- for (const decl of varDecl.declarations || []) if (decl.id?.type === "Identifier" && decl.id.name) {
4799
- if (!declarations.has(decl.id.name)) declarations.set(decl.id.name, {
4800
- start: varDecl.start,
4801
- end: varDecl.end
4802
- });
4803
- }
4804
- }
4805
- if (nodeType === "ExportNamedDeclaration") {
4806
- const exportDecl = node;
4807
- const declaration = exportDecl.declaration;
4808
- if (declaration?.type === "VariableDeclaration") {
4809
- const varDecl = declaration;
4810
- for (const decl of varDecl.declarations || []) if (decl.id?.type === "Identifier" && decl.id.name) declarations.set(decl.id.name, {
4811
- start: exportDecl.start,
4812
- end: exportDecl.end
4813
- });
4814
- }
4815
- }
4816
- for (const key of Object.keys(node)) {
4817
- const child = node[key];
4818
- if (Array.isArray(child)) child.forEach((c) => walk(c));
4819
- else if (child && typeof child === "object") walk(child);
4820
- }
4821
- }
4822
- walk(program);
4823
- return declarations;
4824
- }
4825
- /**
4826
- * Find createWorkflow default export declarations
4827
- * Returns the range of the export statement to remove
4828
- * @param program - Parsed TypeScript program
4829
- * @returns Range of the default export statement or null
4830
- */
4831
- function findWorkflowDefaultExport(program) {
4832
- const bindings = collectSdkBindings(program, "createWorkflow");
4833
- for (const statement of program.body) if (statement.type === "ExportDefaultDeclaration") {
4834
- const exportDecl = statement;
4835
- const declaration = exportDecl.declaration;
4836
- if (isSdkFunctionCall(declaration, bindings, "createWorkflow")) return {
4837
- start: exportDecl.start,
4838
- end: exportDecl.end
4839
- };
4840
- if (declaration.type === "Identifier") return {
4841
- start: exportDecl.start,
4842
- end: exportDecl.end
4843
- };
4844
- }
4845
- return null;
4846
- }
4847
- /**
4848
- * Transform workflow source code
4849
- * - Transform .trigger() calls to tailor.workflow.triggerJobFunction()
4850
- * - Other jobs: remove entire variable declaration
4851
- * @param source - The source code to transform
4852
- * @param targetJobName - The name of the target job (from job config)
4853
- * @param targetJobExportName - The export name of the target job (optional, for enhanced detection)
4854
- * @param otherJobExportNames - Export names of other jobs to remove (optional, for enhanced detection)
4855
- * @param allJobsMap - Map from export name to job name for trigger transformation (optional)
4856
- * @returns Transformed workflow source code
4857
- */
4858
- function transformWorkflowSource(source, targetJobName, targetJobExportName, otherJobExportNames, allJobsMap) {
4859
- const { program } = parseSync("input.ts", source);
4860
- const detectedJobs = findAllJobs(program, source);
4861
- const jobNameMap = allJobsMap ?? buildJobNameMap(detectedJobs);
4862
- const allDeclarations = findVariableDeclarationsByName(program);
4863
- const triggerCalls = detectTriggerCalls(program, source);
4864
- const replacements = [];
4865
- const removedRanges = [];
4866
- const isInsideRemovedRange = (pos) => {
4867
- return removedRanges.some((r) => pos >= r.start && pos < r.end);
4868
- };
4869
- const isAlreadyMarkedForRemoval = (start) => {
4870
- return removedRanges.some((r) => r.start === start);
4871
- };
4872
- for (const job of detectedJobs) {
4873
- if (job.name === targetJobName) continue;
4874
- if (job.statementRange && !isAlreadyMarkedForRemoval(job.statementRange.start)) {
4875
- const endPos = findStatementEnd(source, job.statementRange.end);
4876
- removedRanges.push({
4877
- start: job.statementRange.start,
4878
- end: endPos
4879
- });
4880
- replacements.push({
4881
- start: job.statementRange.start,
4882
- end: endPos,
4883
- text: ""
4884
- });
4885
- } else if (!job.statementRange) replacements.push({
4886
- start: job.bodyValueRange.start,
4887
- end: job.bodyValueRange.end,
4888
- text: "() => {}"
4889
- });
4890
- }
4891
- if (otherJobExportNames) for (const exportName of otherJobExportNames) {
4892
- if (exportName === targetJobExportName) continue;
4893
- const declRange = allDeclarations.get(exportName);
4894
- if (declRange && !isAlreadyMarkedForRemoval(declRange.start)) {
4895
- const endPos = findStatementEnd(source, declRange.end);
4896
- removedRanges.push({
4897
- start: declRange.start,
4898
- end: endPos
4899
- });
4900
- replacements.push({
4901
- start: declRange.start,
4902
- end: endPos,
4903
- text: ""
4904
- });
4905
- }
4906
- }
4907
- const workflowExport = findWorkflowDefaultExport(program);
4908
- if (workflowExport && !isAlreadyMarkedForRemoval(workflowExport.start)) {
4909
- const endPos = findStatementEnd(source, workflowExport.end);
4910
- removedRanges.push({
4911
- start: workflowExport.start,
4912
- end: endPos
4913
- });
4914
- replacements.push({
4915
- start: workflowExport.start,
4916
- end: endPos,
4917
- text: ""
4918
- });
4919
- }
4920
- for (const call of triggerCalls) {
4921
- if (isInsideRemovedRange(call.callRange.start)) continue;
4922
- const jobName = jobNameMap.get(call.identifierName);
4923
- if (jobName) {
4924
- const transformedCall = `tailor.workflow.triggerJobFunction("${jobName}", ${call.argsText || "undefined"})`;
4925
- replacements.push({
4926
- start: call.fullRange.start,
4927
- end: call.fullRange.end,
4928
- text: transformedCall
4929
- });
4930
- }
4931
- }
4932
- return applyReplacements(source, replacements);
4933
- }
4934
-
4935
- //#endregion
4936
- //#region src/cli/services/workflow/bundler.ts
4937
- /**
4938
- * Bundle workflow jobs
4939
- *
4940
- * This function:
4941
- * 1. Detects which jobs are actually used (mainJobs + their dependencies)
4942
- * 2. Uses a transform plugin to transform trigger calls during bundling
4943
- * 3. Creates entry file and bundles with tree-shaking
4944
- *
4945
- * Returns metadata about which jobs each workflow uses.
4946
- * @param allJobs - All available job infos
4947
- * @param mainJobNames - Names of main jobs
4948
- * @param env - Environment variables to inject
4949
- * @param triggerContext - Trigger context for transformations
4950
- * @param cache - Optional bundle cache for skipping unchanged builds
4951
- * @param inlineSourcemap - Whether to enable inline sourcemaps
4952
- * @returns Workflow job bundling result
4953
- */
4954
- async function bundleWorkflowJobs(allJobs, mainJobNames, env = {}, triggerContext, cache, inlineSourcemap) {
4955
- if (allJobs.length === 0) {
4956
- logger.warn("No workflow jobs to bundle");
4957
- return { mainJobDeps: {} };
4958
- }
4959
- const { usedJobs, mainJobDeps } = await filterUsedJobs(allJobs, mainJobNames);
4960
- logger.newline();
4961
- logger.log(`Bundling ${styles.highlight(usedJobs.length.toString())} files for ${styles.info("\"workflow-job\"")}`);
4962
- const outputDir = path.resolve(getDistDir(), "workflow-jobs");
4963
- fs$1.mkdirSync(outputDir, { recursive: true });
4964
- const currentJobNames = new Set(usedJobs.map((j) => j.name));
4965
- const existingFiles = fs$1.readdirSync(outputDir);
4966
- for (const file of existingFiles) if (file.endsWith(".js") && !currentJobNames.has(path.basename(file, ".js"))) fs$1.rmSync(path.join(outputDir, file), { force: true });
4967
- else if (file.endsWith(".js.map") && !currentJobNames.has(path.basename(file, ".js.map"))) fs$1.rmSync(path.join(outputDir, file), { force: true });
4968
- let tsconfig;
4969
- try {
4970
- tsconfig = await resolveTSConfig();
4971
- } catch {
4972
- tsconfig = void 0;
4973
- }
4974
- await Promise.all(usedJobs.map((job) => bundleSingleJob(job, usedJobs, outputDir, tsconfig, env, triggerContext, cache, inlineSourcemap)));
4975
- logger.log(`${styles.success("Bundled")} ${styles.info("\"workflow-job\"")}`);
4976
- return { mainJobDeps };
4977
- }
4978
- /**
4979
- * Filter jobs to only include those that are actually used.
4980
- * A job is "used" if:
4981
- * - It's a mainJob of a workflow
4982
- * - It's called via .trigger() from another used job (transitively)
4983
- *
4984
- * Also returns a map of mainJob -> all jobs it depends on (for metadata).
4985
- * @param allJobs - All available job infos
4986
- * @param mainJobNames - Names of main jobs
4987
- * @returns Used jobs and main job dependency map
4988
- */
4989
- async function filterUsedJobs(allJobs, mainJobNames) {
4990
- if (allJobs.length === 0 || mainJobNames.length === 0) return {
4991
- usedJobs: [],
4992
- mainJobDeps: {}
4993
- };
4994
- const jobsBySourceFile = /* @__PURE__ */ new Map();
4995
- for (const job of allJobs) {
4996
- const existing = jobsBySourceFile.get(job.sourceFile) || [];
4997
- existing.push(job);
4998
- jobsBySourceFile.set(job.sourceFile, existing);
4999
- }
5000
- const exportNameToJobName = /* @__PURE__ */ new Map();
5001
- for (const job of allJobs) exportNameToJobName.set(job.exportName, job.name);
5002
- const dependencies = /* @__PURE__ */ new Map();
5003
- const fileResults = await Promise.all(Array.from(jobsBySourceFile.entries()).map(async ([sourceFile, jobs]) => {
5004
- try {
5005
- const source = await fs$1.promises.readFile(sourceFile, "utf-8");
5006
- const { program } = parseSync("input.ts", source);
5007
- const detectedJobs = findAllJobs(program, source);
5008
- const localExportNameToJobName = /* @__PURE__ */ new Map();
5009
- for (const detected of detectedJobs) if (detected.exportName) localExportNameToJobName.set(detected.exportName, detected.name);
5010
- const triggerCalls = detectTriggerCalls(program, source);
5011
- const jobDependencies = [];
5012
- for (const job of jobs) {
5013
- const detectedJob = detectedJobs.find((d) => d.name === job.name);
5014
- if (!detectedJob) continue;
5015
- const jobDeps = /* @__PURE__ */ new Set();
5016
- for (const call of triggerCalls) if (detectedJob.bodyValueRange && call.callRange.start >= detectedJob.bodyValueRange.start && call.callRange.end <= detectedJob.bodyValueRange.end) {
5017
- const triggeredJobName = localExportNameToJobName.get(call.identifierName) || exportNameToJobName.get(call.identifierName);
5018
- if (triggeredJobName) jobDeps.add(triggeredJobName);
5019
- }
5020
- if (jobDeps.size > 0) jobDependencies.push({
5021
- jobName: job.name,
5022
- deps: jobDeps
5023
- });
5024
- }
5025
- return jobDependencies;
5026
- } catch {
5027
- return [];
5028
- }
5029
- }));
5030
- for (const jobDependencies of fileResults) for (const { jobName, deps } of jobDependencies) dependencies.set(jobName, deps);
5031
- const usedJobNames = /* @__PURE__ */ new Set();
5032
- const mainJobDeps = {};
5033
- function collectDeps(jobName, collected) {
5034
- if (collected.has(jobName)) return;
5035
- collected.add(jobName);
5036
- const deps = dependencies.get(jobName);
5037
- if (deps) for (const dep of deps) collectDeps(dep, collected);
5038
- }
5039
- for (const mainJobName of mainJobNames) {
5040
- const depsForMainJob = /* @__PURE__ */ new Set();
5041
- collectDeps(mainJobName, depsForMainJob);
5042
- mainJobDeps[mainJobName] = Array.from(depsForMainJob);
5043
- for (const dep of depsForMainJob) usedJobNames.add(dep);
5044
- }
5045
- return {
5046
- usedJobs: allJobs.filter((job) => usedJobNames.has(job.name)),
5047
- mainJobDeps
5048
- };
5049
- }
5050
- async function bundleSingleJob(job, allJobs, outputDir, tsconfig, env, triggerContext, cache, inlineSourcemap) {
5051
- const outputPath = path.join(outputDir, `${job.name}.js`);
5052
- const serializedTriggerContext = serializeTriggerContext(triggerContext);
5053
- const sortedEnvPrefix = JSON.stringify(Object.fromEntries(Object.entries(env).sort(([a], [b]) => a.localeCompare(b))));
5054
- const contextHash = computeBundlerContextHash({
5055
- sourceFile: job.sourceFile,
5056
- serializedTriggerContext,
5057
- tsconfig,
5058
- inlineSourcemap,
5059
- prefix: sortedEnvPrefix
5060
- });
5061
- await withCache({
5062
- cache,
5063
- kind: "workflow-job",
5064
- name: job.name,
5065
- sourceFile: job.sourceFile,
5066
- outputPath,
5067
- contextHash,
5068
- async build(cachePlugins) {
5069
- const entryPath = path.join(outputDir, `${job.name}.entry.js`);
5070
- const absoluteSourcePath = path.resolve(job.sourceFile);
5071
- const entryContent = ml`
5072
- import { ${job.exportName} } from "${absoluteSourcePath}";
5073
-
5074
- export async function main(input) {
5075
- const env = ${JSON.stringify(env)};
5076
- return await ${job.exportName}.body(input, { env });
5077
- }
5078
- `;
5079
- fs$1.writeFileSync(entryPath, entryContent);
5080
- const otherJobExportNames = allJobs.filter((j) => j.name !== job.name).map((j) => j.exportName);
5081
- const allJobsMap = /* @__PURE__ */ new Map();
5082
- for (const j of allJobs) allJobsMap.set(j.exportName, j.name);
5083
- const plugins = [{
5084
- name: "workflow-transform",
5085
- transform: {
5086
- filter: { id: { include: [/\.ts$/, /\.js$/] } },
5087
- handler(code, id) {
5088
- if (!code.includes("createWorkflowJob") && !code.includes("createWorkflow") && !code.includes(".trigger(")) return null;
5089
- let transformed = transformWorkflowSource(code, job.name, job.exportName, otherJobExportNames, allJobsMap);
5090
- if (triggerContext && transformed.includes(".trigger(")) transformed = transformFunctionTriggers(transformed, triggerContext.workflowNameMap, triggerContext.jobNameMap, triggerContext.workflowFileMap, id);
5091
- return { code: transformed };
5092
- }
5093
- }
5094
- }, ...cachePlugins];
5095
- await rolldown.build(rolldown.defineConfig({
5096
- input: entryPath,
5097
- output: {
5098
- file: outputPath,
5099
- format: "esm",
5100
- sourcemap: inlineSourcemap ? "inline" : true,
5101
- minify: inlineSourcemap ? { mangle: { keepNames: true } } : true,
5102
- codeSplitting: false
5103
- },
5104
- tsconfig,
5105
- plugins,
5106
- treeshake: {
5107
- moduleSideEffects: false,
5108
- annotations: true,
5109
- unknownGlobalSideEffects: false
5110
- },
5111
- logLevel: "silent"
5112
- }));
5113
- }
5114
- });
5115
- }
5116
-
5117
- //#endregion
5118
- //#region src/parser/service/workflow/schema.ts
5119
- const WorkflowJobSchema = z.object({
5120
- name: z.string().describe("Job name (must be unique across the project)"),
5121
- trigger: functionSchema.describe("Trigger function that initiates the job"),
5122
- body: functionSchema.describe("Job implementation function")
5123
- });
5124
- const durationUnits = [
5125
- "ms",
5126
- "s",
5127
- "m"
5128
- ];
5129
- const unitToSeconds = {
5130
- ms: 1 / 1e3,
5131
- s: 1,
5132
- m: 60
5133
- };
5134
- function durationToSeconds(duration) {
5135
- const match = duration.match(/^(\d+)(ms|s|m)$/);
5136
- if (!match) return 0;
5137
- return parseInt(match[1], 10) * unitToSeconds[match[2]];
5138
- }
5139
- const baseDurationSchema = z.templateLiteral([z.number().int().positive(), z.enum(durationUnits)]);
5140
- const durationSchema = (maxSeconds) => baseDurationSchema.refine((val) => durationToSeconds(val) <= maxSeconds, { message: `Duration must be at most ${maxSeconds} seconds` });
5141
- const RetryPolicySchema = z.object({
5142
- maxRetries: z.number().int().min(1).max(10).describe("Maximum number of retries (1-10)"),
5143
- initialBackoff: durationSchema(3600).describe("Initial backoff duration (e.g., '1s', '500ms', '1m', max 1h)"),
5144
- maxBackoff: durationSchema(86400).describe("Maximum backoff duration (e.g., '30s', '5m', max 24h)"),
5145
- backoffMultiplier: z.number().min(1).describe("Backoff multiplier (>= 1)")
5146
- }).refine((data) => durationToSeconds(data.initialBackoff) <= durationToSeconds(data.maxBackoff), {
5147
- message: "initialBackoff must be less than or equal to maxBackoff",
5148
- path: ["initialBackoff"]
5149
- }).refine((data) => durationToSeconds(data.initialBackoff) > 0, {
5150
- message: "initialBackoff must be greater than 0",
5151
- path: ["initialBackoff"]
5152
- });
5153
- const WorkflowSchema = z.object({
5154
- name: z.string().describe("Workflow name"),
5155
- mainJob: WorkflowJobSchema.describe("Main job that starts the workflow"),
5156
- retryPolicy: RetryPolicySchema.optional().describe("Retry policy for the workflow")
5157
- });
5158
-
5159
- //#endregion
5160
- //#region src/cli/services/workflow/service.ts
5161
- /**
5162
- * Creates a new WorkflowService instance.
5163
- * @param params - Parameters for creating the service
5164
- * @returns A new WorkflowService instance
5165
- */
5166
- function createWorkflowService(params) {
5167
- const { config } = params;
5168
- let workflows = {};
5169
- let workflowSources = [];
5170
- let jobs = [];
5171
- let fileCount = 0;
5172
- let loaded = false;
5173
- return {
5174
- config,
5175
- get workflows() {
5176
- return workflows;
5177
- },
5178
- get workflowSources() {
5179
- return workflowSources;
5180
- },
5181
- get jobs() {
5182
- return jobs;
5183
- },
5184
- get fileCount() {
5185
- return fileCount;
5186
- },
5187
- loadWorkflows: async () => {
5188
- if (loaded) return;
5189
- const result = await loadAndCollectJobs(config);
5190
- workflows = result.workflows;
5191
- workflowSources = result.workflowSources;
5192
- jobs = result.jobs;
5193
- fileCount = result.fileCount;
5194
- loaded = true;
5195
- },
5196
- printLoadedWorkflows: () => {
5197
- if (fileCount === 0) return;
5198
- logger.newline();
5199
- logger.log(`Found ${styles.highlight(fileCount.toString())} workflow files`);
5200
- for (const { workflow, sourceFile } of workflowSources) {
5201
- const relativePath = path.relative(process.cwd(), sourceFile);
5202
- logger.log(`Workflow: ${styles.successBright(`"${workflow.name}"`)} loaded from ${styles.path(relativePath)}`);
5203
- }
5204
- }
5205
- };
5206
- }
5207
- /**
5208
- * Load workflow files and collect all jobs in a single pass.
5209
- * Dependencies are detected at bundle time via AST analysis.
5210
- * @param config - Workflow service configuration
5211
- * @returns Loaded workflows and collected jobs
5212
- */
5213
- async function loadAndCollectJobs(config) {
5214
- const workflows = {};
5215
- const workflowSources = [];
5216
- const collectedJobs = [];
5217
- if (!config.files || config.files.length === 0) return {
5218
- workflows,
5219
- workflowSources,
5220
- jobs: collectedJobs,
5221
- fileCount: 0
5222
- };
5223
- const workflowFiles = loadFilesWithIgnores(config);
5224
- const fileCount = workflowFiles.length;
5225
- const allJobsMap = /* @__PURE__ */ new Map();
5226
- const loadResults = await Promise.all(workflowFiles.map(async (workflowFile) => {
5227
- const { jobs, workflow } = await loadFileContent(workflowFile);
5228
- return {
5229
- workflowFile,
5230
- jobs,
5231
- workflow
5232
- };
5233
- }));
5234
- for (const { workflowFile, jobs, workflow } of loadResults) {
5235
- if (workflow) {
5236
- workflowSources.push({
5237
- workflow,
5238
- sourceFile: workflowFile
5239
- });
5240
- workflows[workflowFile] = workflow;
5241
- }
5242
- for (const job of jobs) {
5243
- const existing = allJobsMap.get(job.name);
5244
- if (existing) throw new Error(`Duplicate job name "${job.name}" found:\n - ${existing.sourceFile} (export: ${existing.exportName})\n - ${job.sourceFile} (export: ${job.exportName})\nEach job must have a unique name.`);
5245
- allJobsMap.set(job.name, job);
5246
- collectedJobs.push(job);
5247
- }
5248
- }
5249
- return {
5250
- workflows,
5251
- workflowSources,
5252
- jobs: collectedJobs,
5253
- fileCount
5254
- };
5255
- }
5256
- /**
5257
- * Load a single file and extract jobs and workflow
5258
- * @param filePath - Path to the workflow file
5259
- * @returns Extracted jobs and workflow
5260
- */
5261
- async function loadFileContent(filePath) {
5262
- const jobs = [];
5263
- let workflow = null;
5264
- try {
5265
- const module = await import(pathToFileURL(filePath).href);
5266
- for (const [exportName, exportValue] of Object.entries(module)) {
5267
- if (exportName === "default") {
5268
- const workflowResult = WorkflowSchema.safeParse(exportValue);
5269
- if (workflowResult.success) workflow = workflowResult.data;
5270
- else if (isSdkBranded(exportValue, ["workflow", "workflow-job"])) throw workflowResult.error;
5271
- continue;
5272
- }
5273
- const jobResult = WorkflowJobSchema.safeParse(exportValue);
5274
- if (jobResult.success) jobs.push({
5275
- name: jobResult.data.name,
5276
- exportName,
5277
- sourceFile: filePath
5278
- });
5279
- else if (isSdkBranded(exportValue, ["workflow", "workflow-job"])) throw jobResult.error;
5280
- }
5281
- } catch (error) {
5282
- const relativePath = path.relative(process.cwd(), filePath);
5283
- logger.error(`${styles.error("Failed to load workflow from")} ${styles.errorBright(relativePath)}`);
5284
- logger.error(String(error));
5285
- throw error;
5286
- }
5287
- return {
5288
- jobs,
5289
- workflow
5290
- };
5291
- }
5292
-
5293
- //#endregion
5294
- //#region src/parser/generator-config/index.ts
5295
- const DependencyKindSchema = z.enum([
5296
- "tailordb",
5297
- "resolver",
5298
- "executor"
5299
- ]);
5300
- const KyselyTypeConfigSchema = z.tuple([z.literal("@tailor-platform/kysely-type"), z.object({ distPath: z.string() })]);
5301
- const SeedConfigSchema = z.tuple([z.literal("@tailor-platform/seed"), z.object({
5302
- distPath: z.string(),
5303
- machineUserName: z.string().optional()
5304
- })]);
5305
- const EnumConstantsConfigSchema = z.tuple([z.literal("@tailor-platform/enum-constants"), z.object({ distPath: z.string() })]);
5306
- const FileUtilsConfigSchema = z.tuple([z.literal("@tailor-platform/file-utils"), z.object({ distPath: z.string() })]);
5307
- const CodeGeneratorSchema = z.object({
5308
- id: z.string(),
5309
- description: z.string(),
5310
- dependencies: z.array(DependencyKindSchema),
5311
- processType: z.function().optional(),
5312
- processResolver: z.function().optional(),
5313
- processExecutor: z.function().optional(),
5314
- processTailorDBNamespace: z.function().optional(),
5315
- processResolverNamespace: z.function().optional(),
5316
- aggregate: z.function({ output: z.any() })
5317
- });
5318
- const BaseGeneratorConfigSchema = z.union([
5319
- KyselyTypeConfigSchema,
5320
- SeedConfigSchema,
5321
- EnumConstantsConfigSchema,
5322
- FileUtilsConfigSchema,
5323
- CodeGeneratorSchema
5324
- ]);
5325
-
5326
- //#endregion
5327
- //#region src/parser/plugin-config/schema.ts
5328
- const PluginConfigSchema = z.object({
5329
- id: z.string(),
5330
- description: z.string(),
5331
- importPath: z.string().optional(),
5332
- pluginConfig: z.unknown().optional(),
5333
- typeConfigRequired: z.union([z.boolean(), functionSchema]).optional(),
5334
- onTypeLoaded: functionSchema.optional(),
5335
- onNamespaceLoaded: functionSchema.optional(),
5336
- onTailorDBReady: functionSchema.optional(),
5337
- onResolverReady: functionSchema.optional(),
5338
- onExecutorReady: functionSchema.optional()
5339
- }).passthrough().refine((p) => {
5340
- return !(p.onTypeLoaded || p.onNamespaceLoaded) || !!p.importPath;
5341
- }, { message: "importPath is required when plugin has definition-time hooks (onTypeLoaded/onNamespaceLoaded)" }).transform((plugin) => plugin);
5342
-
5343
- //#endregion
5344
- //#region src/plugin/builtin/registry.ts
5345
- const builtinPlugins = new Map([
5346
- [KyselyGeneratorID, (options) => kyselyTypePlugin(options)],
5347
- [SeedGeneratorID, (options) => seedPlugin(options)],
5348
- [EnumConstantsGeneratorID, (options) => enumConstantsPlugin(options)],
5349
- [FileUtilsGeneratorID, (options) => fileUtilsPlugin(options)]
5350
- ]);
5351
-
5352
- //#endregion
5353
- //#region src/cli/shared/mock.ts
5354
- globalThis.tailordb = { Client: class {
5355
- constructor(_config) {}
5356
- async connect() {}
5357
- async end() {}
5358
- async queryObject() {
5359
- return {};
5360
- }
5361
- } };
5362
-
5363
- //#endregion
5364
- //#region src/cli/shared/config-loader.ts
5365
- const GeneratorConfigSchema = CodeGeneratorSchema.brand("CodeGenerator");
5366
- /**
5367
- * Load Tailor configuration file and associated generators and plugins.
5368
- * @param configPath - Optional explicit config path
5369
- * @returns Loaded config, generators, plugins, and config path
5370
- */
5371
- async function loadConfig(configPath) {
5372
- const foundPath = loadConfigPath(configPath);
5373
- if (!foundPath) throw new Error("Configuration file not found: tailor.config.ts not found in current or parent directories");
5374
- const resolvedPath = path.resolve(process.cwd(), foundPath);
5375
- if (!fs$1.existsSync(resolvedPath)) throw new Error(`Configuration file not found: ${configPath}`);
5376
- const configModule = await import(pathToFileURL(resolvedPath).href);
5377
- if (!configModule || !configModule.default) throw new Error("Invalid Tailor config module: default export not found");
5378
- const allGenerators = [];
5379
- const allPlugins = [];
5380
- for (const value of Object.values(configModule)) if (Array.isArray(value)) {
5381
- const generatorParsed = value.reduce((acc, item) => {
5382
- if (!acc.success) return acc;
5383
- const baseResult = BaseGeneratorConfigSchema.safeParse(item);
5384
- if (baseResult.success && Array.isArray(baseResult.data)) {
5385
- const [id, options] = baseResult.data;
5386
- const pluginFactory = builtinPlugins.get(id);
5387
- if (pluginFactory) {
5388
- acc.convertedPlugins.push(pluginFactory(options));
5389
- return acc;
5390
- }
5391
- }
5392
- const result = GeneratorConfigSchema.safeParse(item);
5393
- if (result.success) acc.items.push(result.data);
5394
- else acc.success = false;
5395
- return acc;
5396
- }, {
5397
- success: true,
5398
- items: [],
5399
- convertedPlugins: []
5400
- });
5401
- if (generatorParsed.success && (generatorParsed.items.length > 0 || generatorParsed.convertedPlugins.length > 0)) {
5402
- allGenerators.push(...generatorParsed.items);
5403
- allPlugins.push(...generatorParsed.convertedPlugins);
5404
- continue;
5405
- }
5406
- const pluginParsed = value.reduce((acc, item) => {
5407
- if (!acc.success) return acc;
5408
- const result = PluginConfigSchema.safeParse(item);
5409
- if (result.success) acc.items.push(result.data);
5410
- else acc.success = false;
5411
- return acc;
5412
- }, {
5413
- success: true,
5414
- items: []
5415
- });
5416
- if (pluginParsed.success && pluginParsed.items.length > 0) allPlugins.push(...pluginParsed.items);
5417
- }
5418
- return {
5419
- config: {
5420
- ...configModule.default,
5421
- path: resolvedPath
5422
- },
5423
- generators: allGenerators,
5424
- plugins: allPlugins
5425
- };
5426
- }
5427
-
5428
- //#endregion
5429
- //#region src/cli/shared/inline-sourcemap.ts
5430
- /**
5431
- * Resolve whether inline sourcemaps should be enabled.
5432
- *
5433
- * Resolution order:
5434
- * 1. Config value (`inlineSourcemap` in defineConfig) — if explicitly set
5435
- * 2. Environment variable `TAILOR_ENABLE_INLINE_SOURCEMAP` — if explicitly set
5436
- * 3. Default: `true`
5437
- * @param configValue - The `inlineSourcemap` value from AppConfig
5438
- * @returns Whether inline sourcemaps should be enabled
5439
- */
5440
- function resolveInlineSourcemap(configValue) {
5441
- if (configValue !== void 0) return configValue;
5442
- if (process.env.TAILOR_ENABLE_INLINE_SOURCEMAP !== void 0) return process.env.TAILOR_ENABLE_INLINE_SOURCEMAP === "true";
5443
- return true;
5444
- }
5445
-
5446
- //#endregion
5447
- //#region src/parser/service/idp/schema.ts
5448
- /**
5449
- * Normalize IdPGqlOperationsConfig (alias or object) to IdPGqlOperations object.
5450
- * "query" alias expands to read-only mode: { create: false, update: false, delete: false, read: true, sendPasswordResetEmail: false }
5451
- * @param config - The config to normalize
5452
- * @returns The normalized IdPGqlOperations object
5453
- */
5454
- function normalizeIdPGqlOperations(config) {
5455
- if (config === "query") return {
5456
- create: false,
5457
- update: false,
5458
- delete: false,
5459
- read: true,
5460
- sendPasswordResetEmail: false
5461
- };
5462
- return config;
5463
- }
5464
- /**
5465
- * Zod schema for IdPGqlOperations configuration with normalization transform.
5466
- * Accepts "query" alias or detailed object, normalizes to IdPGqlOperations object.
5467
- */
5468
- const IdPGqlOperationsSchema = z.union([z.literal("query"), z.object({
5469
- create: z.boolean().optional().describe("Enable _createUser mutation (default: true)"),
5470
- update: z.boolean().optional().describe("Enable _updateUser mutation (default: true)"),
5471
- delete: z.boolean().optional().describe("Enable _deleteUser mutation (default: true)"),
5472
- read: z.boolean().optional().describe("Enable _users and _user queries (default: true)"),
5473
- sendPasswordResetEmail: z.boolean().optional().describe("Enable _sendPasswordResetEmail mutation (default: true)")
5474
- })]).describe("Configuration for GraphQL operations on IdP users.\nAll operations are enabled by default (undefined or true = enabled, false = disabled).").transform((val) => normalizeIdPGqlOperations(val));
5475
- const IdPLangSchema = z.enum(["en", "ja"]).describe("IdP UI language");
5476
- const IdPUserAuthPolicySchema = z.object({
5477
- useNonEmailIdentifier: z.boolean().optional().describe("Use non-email identifier for usernames"),
5478
- allowSelfPasswordReset: z.boolean().optional().describe("Allow users to reset their own passwords"),
5479
- passwordRequireUppercase: z.boolean().optional().describe("Require uppercase letters in passwords"),
5480
- passwordRequireLowercase: z.boolean().optional().describe("Require lowercase letters in passwords"),
5481
- passwordRequireNonAlphanumeric: z.boolean().optional().describe("Require non-alphanumeric characters in passwords"),
5482
- passwordRequireNumeric: z.boolean().optional().describe("Require numeric characters in passwords"),
5483
- passwordMinLength: z.number().int().refine((val) => val >= 6 && val <= 30, { message: "passwordMinLength must be between 6 and 30" }).optional().describe("Minimum password length (6-30)"),
5484
- passwordMaxLength: z.number().int().refine((val) => val >= 6 && val <= 4096, { message: "passwordMaxLength must be between 6 and 4096" }).optional().describe("Maximum password length (6-4096)"),
5485
- allowedEmailDomains: z.array(z.string()).optional().describe("Restrict registration to these email domains"),
5486
- allowGoogleOauth: z.boolean().optional().describe("Enable Google OAuth login"),
5487
- allowMicrosoftOauth: z.boolean().optional().describe("Enable Microsoft OAuth login"),
5488
- disablePasswordAuth: z.boolean().optional().describe("Disable password-based authentication")
5489
- }).refine((data) => data.passwordMinLength === void 0 || data.passwordMaxLength === void 0 || data.passwordMinLength <= data.passwordMaxLength, {
5490
- message: "passwordMinLength must be less than or equal to passwordMaxLength",
5491
- path: ["passwordMinLength"]
5492
- }).refine((data) => !data.allowedEmailDomains || data.allowedEmailDomains.length === 0 || !data.useNonEmailIdentifier, {
5493
- message: "allowedEmailDomains cannot be set when useNonEmailIdentifier is true",
5494
- path: ["allowedEmailDomains"]
5495
- }).refine((data) => data.allowGoogleOauth === void 0 || data.allowGoogleOauth === false || !data.useNonEmailIdentifier, {
5496
- message: "allowGoogleOauth cannot be set when useNonEmailIdentifier is true",
5497
- path: ["allowGoogleOauth"]
5498
- }).refine((data) => !data.allowGoogleOauth || data.allowedEmailDomains && data.allowedEmailDomains.length > 0, {
5499
- message: "allowGoogleOauth requires allowedEmailDomains to be set",
5500
- path: ["allowGoogleOauth"]
5501
- }).refine((data) => !data.allowMicrosoftOauth || !data.useNonEmailIdentifier, {
5502
- message: "allowMicrosoftOauth cannot be set when useNonEmailIdentifier is true",
5503
- path: ["allowMicrosoftOauth"]
5504
- }).refine((data) => !data.allowMicrosoftOauth || data.allowedEmailDomains && data.allowedEmailDomains.length > 0, {
5505
- message: "allowMicrosoftOauth requires allowedEmailDomains to be set",
5506
- path: ["allowMicrosoftOauth"]
5507
- }).refine((data) => !data.allowMicrosoftOauth || data.disablePasswordAuth === true, {
5508
- message: "allowMicrosoftOauth requires disablePasswordAuth to be enabled",
5509
- path: ["allowMicrosoftOauth"]
5510
- }).refine((data) => !data.disablePasswordAuth || data.allowGoogleOauth === true || data.allowMicrosoftOauth === true, {
5511
- message: "disablePasswordAuth requires allowGoogleOauth or allowMicrosoftOauth to be enabled",
5512
- path: ["disablePasswordAuth"]
5513
- }).refine((data) => !data.disablePasswordAuth || !data.allowSelfPasswordReset, {
5514
- message: "disablePasswordAuth cannot be used with allowSelfPasswordReset",
5515
- path: ["disablePasswordAuth"]
5516
- });
5517
- const IdPSchema = z.object({
5518
- name: z.string().describe("IdP service name"),
5519
- authorization: z.union([
5520
- z.literal("insecure"),
5521
- z.literal("loggedIn"),
5522
- z.object({ cel: z.string() })
5523
- ]).describe("Authorization mode for IdP API access"),
5524
- clients: z.array(z.string()).describe("OAuth2 client names that can use this IdP"),
5525
- lang: IdPLangSchema.optional().describe("UI language for IdP pages"),
5526
- userAuthPolicy: IdPUserAuthPolicySchema.transform((input) => IdPUserAuthPolicySchema.parse(input ?? {})).optional().describe("User authentication policy configuration"),
5527
- publishUserEvents: z.boolean().optional().describe("Enable publishing user lifecycle events"),
5528
- gqlOperations: IdPGqlOperationsSchema.optional().describe("Configure which GraphQL operations are enabled")
5529
- }).brand("IdPConfig");
5530
-
5531
- //#endregion
5532
- //#region src/parser/service/secrets/schema.ts
5533
- const nameSchema = z.string().regex(/^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$/);
5534
- const secretsVaultSchema = z.record(nameSchema, z.string());
5535
- const SecretsSchema = z.record(nameSchema, secretsVaultSchema);
5536
-
5537
- //#endregion
5538
- //#region src/parser/service/staticwebsite/schema.ts
5539
- const StaticWebsiteSchema = z.object({
5540
- name: z.string().describe("Static website name"),
5541
- description: z.string().optional().describe("Static website description"),
5542
- allowedIpAddresses: z.array(z.string()).optional().describe("IP addresses allowed to access the website")
5543
- }).brand("StaticWebsiteConfig");
5544
-
5545
- //#endregion
5546
- //#region src/cli/services/application.ts
5547
- function defineTailorDB(config, pluginManager) {
5548
- const tailorDBServices = [];
5549
- const externalTailorDBNamespaces = [];
5550
- const subgraphs = [];
5551
- if (!config) return {
5552
- tailorDBServices,
5553
- externalTailorDBNamespaces,
5554
- subgraphs
5555
- };
5556
- for (const [namespace, serviceConfig] of Object.entries(config)) {
5557
- if ("external" in serviceConfig) externalTailorDBNamespaces.push(namespace);
5558
- else {
5559
- const tailorDB = createTailorDBService({
5560
- namespace,
5561
- config: TailorDBServiceConfigSchema.parse(serviceConfig),
5562
- pluginManager
5563
- });
5564
- tailorDBServices.push(tailorDB);
5565
- }
5566
- subgraphs.push({
5567
- Type: "tailordb",
5568
- Name: namespace
5569
- });
5570
- }
5571
- return {
5572
- tailorDBServices,
5573
- externalTailorDBNamespaces,
5574
- subgraphs
5575
- };
5576
- }
5577
- function defineResolver(config) {
5578
- const resolverServices = [];
5579
- const subgraphs = [];
5580
- if (!config) return {
5581
- resolverServices,
5582
- subgraphs
5583
- };
5584
- for (const [namespace, serviceConfig] of Object.entries(config)) {
5585
- if (!("external" in serviceConfig)) {
5586
- const resolverService = createResolverService(namespace, serviceConfig);
5587
- resolverServices.push(resolverService);
5588
- }
5589
- subgraphs.push({
5590
- Type: "pipeline",
5591
- Name: namespace
5592
- });
5593
- }
5594
- return {
5595
- resolverServices,
5596
- subgraphs
5597
- };
5598
- }
5599
- function defineIdp(config) {
5600
- const idpServices = [];
5601
- const subgraphs = [];
5602
- if (!config) return {
5603
- idpServices,
5604
- subgraphs
5605
- };
5606
- const idpNames = /* @__PURE__ */ new Set();
5607
- config.forEach((idpConfig) => {
5608
- const name = idpConfig.name;
5609
- if (idpNames.has(name)) throw new Error(`IdP with name "${name}" already defined.`);
5610
- idpNames.add(name);
5611
- if (!("external" in idpConfig)) {
5612
- const idp = IdPSchema.parse(idpConfig);
5613
- idpServices.push(idp);
5614
- }
5615
- subgraphs.push({
5616
- Type: "idp",
5617
- Name: name
5618
- });
5619
- });
5620
- return {
5621
- idpServices,
5622
- subgraphs
5623
- };
5624
- }
5625
- function defineAuth(config, tailorDBServices, externalTailorDBNamespaces) {
5626
- const subgraphs = [];
5627
- if (!config) return {
5628
- authService: void 0,
5629
- subgraphs
5630
- };
5631
- let authService;
5632
- if (!("external" in config)) authService = createAuthService(config, tailorDBServices, externalTailorDBNamespaces);
5633
- subgraphs.push({
5634
- Type: "auth",
5635
- Name: config.name
5636
- });
5637
- return {
5638
- authService,
5639
- subgraphs
5640
- };
5641
- }
5642
- function defineExecutor(config, hasPluginExecutors) {
5643
- if (!config && !hasPluginExecutors) return;
5644
- return createExecutorService({ config: config ?? { files: [] } });
5645
- }
5646
- function defineWorkflow(config) {
5647
- if (!config) return;
5648
- return createWorkflowService({ config });
5649
- }
5650
- function defineStaticWebsites(websites) {
5651
- const staticWebsiteServices = [];
5652
- const websiteNames = /* @__PURE__ */ new Set();
5653
- (websites ?? []).forEach((config) => {
5654
- const website = StaticWebsiteSchema.parse(config);
5655
- if (websiteNames.has(website.name)) throw new Error(`Static website with name "${website.name}" already defined.`);
5656
- websiteNames.add(website.name);
5657
- staticWebsiteServices.push(website);
5658
- });
5659
- return staticWebsiteServices;
5660
- }
5661
- function defineSecretManager(config) {
5662
- if (!config) return [];
5663
- const data = Object.fromEntries(Object.entries(config));
5664
- const parsed = SecretsSchema.parse(data);
5665
- return Object.entries(parsed).map(([vaultName, vaultSecrets]) => ({
5666
- vaultName,
5667
- secrets: Object.entries(vaultSecrets).map(([name, value]) => ({
5668
- name,
5669
- value
5670
- }))
5671
- }));
5672
- }
5673
- function defineServices(config, pluginManager) {
5674
- const tailordbResult = defineTailorDB(config.db, pluginManager);
5675
- return {
5676
- tailordbResult,
5677
- resolverResult: defineResolver(config.resolver),
5678
- idpResult: defineIdp(config.idp),
5679
- authResult: defineAuth(config.auth, tailordbResult.tailorDBServices, tailordbResult.externalTailorDBNamespaces),
5680
- staticWebsiteServices: defineStaticWebsites(config.staticWebsites),
5681
- secrets: defineSecretManager(config.secrets)
5682
- };
5683
- }
5684
- function buildApplication(params) {
5685
- const application = {
5686
- name: params.config.name,
5687
- config: params.config,
5688
- subgraphs: [
5689
- ...params.tailordbResult.subgraphs,
5690
- ...params.resolverResult.subgraphs,
5691
- ...params.idpResult.subgraphs,
5692
- ...params.authResult.subgraphs
5693
- ],
5694
- tailorDBServices: params.tailordbResult.tailorDBServices,
5695
- externalTailorDBNamespaces: params.tailordbResult.externalTailorDBNamespaces,
5696
- resolverServices: params.resolverResult.resolverServices,
5697
- idpServices: params.idpResult.idpServices,
5698
- authService: params.authResult.authService,
5699
- executorService: params.executorService,
5700
- workflowService: params.workflowService,
5701
- staticWebsiteServices: params.staticWebsiteServices,
5702
- secrets: params.secrets,
5703
- env: params.env,
5704
- get applications() {
5705
- return [application];
5706
- }
5707
- };
5708
- return application;
5709
- }
5710
- /**
5711
- * Define a Tailor application from the given configuration.
5712
- * This is a lightweight, synchronous function that creates the application
5713
- * structure without loading types or bundling files.
5714
- * @param params - Parameters for defining the application
5715
- * @returns Configured application instance
5716
- */
5717
- function defineApplication(params) {
5718
- const { config, pluginManager } = params;
5719
- const services = defineServices(config, pluginManager);
5720
- const executorService = defineExecutor(config.executor, false);
5721
- const workflowService = defineWorkflow(config.workflow);
5722
- return buildApplication({
5723
- config,
5724
- ...services,
5725
- executorService,
5726
- workflowService,
5727
- env: config.env ?? {}
5728
- });
5729
- }
5730
- /**
5731
- * Generate plugin type and executor files if a plugin manager is provided.
5732
- * Collects source type info from TailorDB services and delegates to PluginManager.
5733
- * @param pluginManager - Plugin manager instance (skips if undefined)
5734
- * @param tailorDBServices - TailorDB services to collect type source info from
5735
- * @param configPath - Path to tailor.config.ts for resolving plugin imports
5736
- * @returns Generated executor file paths
5737
- */
5738
- function generatePluginFilesIfNeeded(pluginManager, tailorDBServices, configPath) {
5739
- if (!pluginManager) return [];
5740
- const sourceTypeInfoMap = /* @__PURE__ */ new Map();
5741
- for (const db of tailorDBServices) {
5742
- const typeSourceInfo = db.typeSourceInfo;
5743
- for (const [typeName, sourceInfo] of Object.entries(typeSourceInfo)) if (sourceInfo.filePath) sourceTypeInfoMap.set(typeName, {
5744
- filePath: sourceInfo.filePath,
5745
- exportName: sourceInfo.exportName
5746
- });
5747
- }
5748
- return pluginManager.generatePluginFiles({
5749
- outputDir: path.join(getDistDir(), "plugin"),
5750
- sourceTypeInfoMap,
5751
- configPath,
5752
- typeGenerator: generatePluginTypeFiles,
5753
- executorGenerator: generatePluginExecutorFiles
5754
- });
5755
- }
5756
- /**
5757
- * Load and fully initialize a Tailor application.
5758
- * This performs all I/O-heavy operations: loading types, processing plugins,
5759
- * generating plugin files, bundling, and loading definitions for validation.
5760
- * @param params - Parameters for defining and loading the application
5761
- * @returns Fully initialized application with workflow results
5762
- */
5763
- async function loadApplication(params) {
5764
- const { config, pluginManager, bundleCache } = params;
5765
- const { tailordbResult, resolverResult, idpResult, authResult, staticWebsiteServices, secrets } = defineServices(config, pluginManager);
5766
- for (const tailordb of tailordbResult.tailorDBServices) {
5767
- await tailordb.loadTypes();
5768
- await tailordb.processNamespacePlugins();
5769
- }
5770
- const pluginExecutorFiles = generatePluginFilesIfNeeded(pluginManager, tailordbResult.tailorDBServices, config.path);
5771
- const executorService = defineExecutor(config.executor, pluginExecutorFiles.length > 0);
5772
- const workflowService = defineWorkflow(config.workflow);
5773
- if (workflowService) await workflowService.loadWorkflows();
5774
- const triggerContext = await buildTriggerContext(config.workflow);
5775
- const inlineSourcemap = resolveInlineSourcemap(config.inlineSourcemap);
5776
- for (const pipeline of resolverResult.resolverServices) await bundleResolvers(pipeline.namespace, pipeline.config, triggerContext, bundleCache, inlineSourcemap);
5777
- if (executorService) await bundleExecutors({
5778
- config: executorService.config,
5779
- triggerContext,
5780
- additionalFiles: [...pluginExecutorFiles],
5781
- cache: bundleCache,
5782
- inlineSourcemap
5783
- });
5784
- let workflowBuildResult;
5785
- if (workflowService && workflowService.jobs.length > 0) {
5786
- const mainJobNames = workflowService.workflowSources.map((ws) => ws.workflow.mainJob.name);
5787
- workflowBuildResult = await bundleWorkflowJobs(workflowService.jobs, mainJobNames, config.env ?? {}, triggerContext, bundleCache, inlineSourcemap);
5788
- }
5789
- if (authResult.authService?.config.hooks?.beforeLogin) {
5790
- const authName = authResult.authService.config.name;
5791
- await bundleAuthHooks({
5792
- configPath: config.path,
5793
- authName,
5794
- handlerAccessPath: `auth.hooks.beforeLogin.handler`,
5795
- triggerContext,
5796
- cache: bundleCache,
5797
- inlineSourcemap
5798
- });
5799
- }
5800
- for (const pipeline of resolverResult.resolverServices) await pipeline.loadResolvers();
5801
- if (executorService) {
5802
- await executorService.loadExecutors();
5803
- if (pluginExecutorFiles.length > 0) await executorService.loadPluginExecutorFiles([...pluginExecutorFiles]);
5804
- }
5805
- if (workflowService) workflowService.printLoadedWorkflows();
5806
- logger.newline();
5807
- return {
5808
- application: buildApplication({
5809
- config,
5810
- tailordbResult,
5811
- resolverResult,
5812
- idpResult,
5813
- authResult,
5814
- executorService,
5815
- workflowService,
5816
- staticWebsiteServices,
5817
- secrets,
5818
- env: config.env ?? {}
5819
- }),
5820
- workflowBuildResult
5821
- };
5822
- }
5823
-
5824
- //#endregion
5825
- export { AuthSCIMAttribute_Type as $, userAgent as A, PipelineResolver_OperationType as B, fetchAll as C, initOperatorClient as D, initOAuth2Client as E, TailorDBGQLPermission_Operator as F, ExecutorTargetType as G, FunctionExecution_Status as H, TailorDBGQLPermission_Permit as I, AuthIDPConfig_AuthType as J, ExecutorTriggerType as K, TailorDBType_Permission_Operator as L, WorkflowExecution_Status as M, WorkflowJobExecution_Status as N, platformBaseUrl as O, TailorDBGQLPermission_Action as P, AuthSCIMAttribute_Mutability as Q, TailorDBType_Permission_Permit as R, writePlatformConfig as S, fetchUserInfo as T, FunctionExecution_Type as U, IdPLang as V, ExecutorJobStatus as W, AuthOAuth2Client_ClientType as X, AuthInvokerSchema$1 as Y, AuthOAuth2Client_GrantType as Z, hashFile as _, loadConfig as a, GetApplicationSchemaHealthResponse_ApplicationSchemaHealthStatus as at, loadWorkspaceId as b, ExecutorSchema as c, FilterSchema as ct, TailorDBTypeSchema as d, Subgraph_ServiceType as dt, AuthSCIMAttribute_Uniqueness as et, stringifyFunction as f, CIPromptError as ft, getDistDir as g, createBundleCache as h, symbols as ht, resolveInlineSourcemap as i, UserProfileProviderConfig_UserProfileProviderType as it, WorkspacePlatformUserRole as j, resolveStaticWebsiteUrls as k, OAuth2ClientSchema as l, PageDirection as lt, loadFilesWithIgnores as m, styles as mt, generatePluginFilesIfNeeded as n, PATScope as nt, WorkflowJobSchema as o, ConditionSchema as ot, tailorUserMap as p, logger as pt, AuthHookPoint as q, loadApplication as r, TenantProviderConfig_TenantProviderType as rt, createExecutorService as s, Condition_Operator as st, defineApplication as t, AuthSCIMConfig_AuthorizationType as tt, ResolverSchema as u, ApplicationSchemaUpdateAttemptStatus as ut, fetchLatestToken as v, fetchMachineUserToken as w, readPlatformConfig as x, loadAccessToken as y, TailorDBType_PermitAction as z };
5826
- //# sourceMappingURL=application-D9xahQRQ.mjs.map
956
+ export { AuthInvokerSchema as A, GetApplicationSchemaHealthResponse_ApplicationSchemaHealthStatus as B, FunctionExecution_Status as C, ExecutorTriggerType as D, ExecutorTargetType as E, AuthSCIMAttribute_Uniqueness as F, ApplicationSchemaUpdateAttemptStatus as G, Condition_Operator as H, AuthSCIMConfig_AuthorizationType as I, Subgraph_ServiceType as K, PATScope as L, AuthOAuth2Client_GrantType as M, AuthSCIMAttribute_Mutability as N, AuthHookPoint as O, AuthSCIMAttribute_Type as P, TenantProviderConfig_TenantProviderType as R, IdPLang as S, ExecutorJobStatus as T, FilterSchema as U, ConditionSchema as V, PageDirection as W, TailorDBGQLPermission_Permit as _, formatRequestParams as a, TailorDBType_PermitAction as b, parseMethodName as c, userAgent as d, WorkspacePlatformUserRole as f, TailorDBGQLPermission_Operator as g, TailorDBGQLPermission_Action as h, fetchUserInfo as i, AuthOAuth2Client_ClientType as j, AuthIDPConfig_AuthType as k, platformBaseUrl as l, WorkflowJobExecution_Status as m, fetchAll as n, initOAuth2Client as o, WorkflowExecution_Status as p, fetchMachineUserToken as r, initOperatorClient as s, MAX_PAGE_SIZE as t, resolveStaticWebsiteUrls as u, TailorDBType_Permission_Operator as v, FunctionExecution_Type as w, PipelineResolver_OperationType as x, TailorDBType_Permission_Permit as y, UserProfileProviderConfig_UserProfileProviderType as z };
957
+ //# sourceMappingURL=client-bTbnbQbB.mjs.map