@tailor-platform/sdk 1.25.4 → 1.27.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. package/CHANGELOG.md +28 -0
  2. package/dist/application-CBJFUKrU.mjs +4701 -0
  3. package/dist/application-CBJFUKrU.mjs.map +1 -0
  4. package/dist/application-WyZetOky.mjs +11 -0
  5. package/dist/cli/index.mjs +290 -32
  6. package/dist/cli/index.mjs.map +1 -1
  7. package/dist/cli/lib.d.mts +350 -8
  8. package/dist/cli/lib.mjs +10 -8
  9. package/dist/cli/lib.mjs.map +1 -1
  10. package/dist/client-C2_wgujH.mjs +6 -0
  11. package/dist/{application-91Th6tm6.mjs → client-bTbnbQbB.mjs} +24 -4795
  12. package/dist/client-bTbnbQbB.mjs.map +1 -0
  13. package/dist/configure/index.d.mts +5 -5
  14. package/dist/configure/index.mjs.map +1 -1
  15. package/dist/crash-report-Cot_9Esm.mjs +6 -0
  16. package/dist/crash-report-Ju8cQF-l.mjs +414 -0
  17. package/dist/crash-report-Ju8cQF-l.mjs.map +1 -0
  18. package/dist/{enum-constants-6uK0VI_s.mjs → enum-constants-D1nfn0qD.mjs} +1 -1
  19. package/dist/{enum-constants-6uK0VI_s.mjs.map → enum-constants-D1nfn0qD.mjs.map} +1 -1
  20. package/dist/{env-uBeVwE9B.d.mts → env-BuMbIknz.d.mts} +2 -2
  21. package/dist/{file-utils-2T9w20FP.mjs → file-utils-Bctuzn3x.mjs} +1 -1
  22. package/dist/{file-utils-2T9w20FP.mjs.map → file-utils-Bctuzn3x.mjs.map} +1 -1
  23. package/dist/{index-BD-K97-C.d.mts → index-B0Lrzywd.d.mts} +2 -2
  24. package/dist/{index-cZilKprY.d.mts → index-CbnLNm14.d.mts} +2 -2
  25. package/dist/{index-D1J5SfyK.d.mts → index-CyapgSFI.d.mts} +2 -2
  26. package/dist/{index-CT53egux.d.mts → index-D1AM_02Y.d.mts} +2 -2
  27. package/dist/{index-Bu12qy3m.d.mts → index-cD9sQLTh.d.mts} +15 -15
  28. package/dist/{interceptor-BPiIBTk_.mjs → interceptor-B0d_GrI5.mjs} +1 -1
  29. package/dist/{interceptor-BPiIBTk_.mjs.map → interceptor-B0d_GrI5.mjs.map} +1 -1
  30. package/dist/{kysely-type-cMNbsQ6k.mjs → kysely-type-B_IecdK9.mjs} +1 -1
  31. package/dist/{kysely-type-cMNbsQ6k.mjs.map → kysely-type-B_IecdK9.mjs.map} +1 -1
  32. package/dist/logger-CqezTedh.mjs +181 -0
  33. package/dist/logger-CqezTedh.mjs.map +1 -0
  34. package/dist/{package-json-CVUv8Y9T.mjs → package-json-D3x2nBPB.mjs} +1 -1
  35. package/dist/{package-json-CVUv8Y9T.mjs.map → package-json-D3x2nBPB.mjs.map} +1 -1
  36. package/dist/package-json-DHfTiUCS.mjs +4 -0
  37. package/dist/plugin/builtin/enum-constants/index.d.mts +1 -1
  38. package/dist/plugin/builtin/enum-constants/index.mjs +1 -1
  39. package/dist/plugin/builtin/file-utils/index.d.mts +1 -1
  40. package/dist/plugin/builtin/file-utils/index.mjs +1 -1
  41. package/dist/plugin/builtin/kysely-type/index.d.mts +1 -1
  42. package/dist/plugin/builtin/kysely-type/index.mjs +1 -1
  43. package/dist/plugin/builtin/seed/index.d.mts +1 -1
  44. package/dist/plugin/builtin/seed/index.mjs +1 -1
  45. package/dist/plugin/index.d.mts +2 -2
  46. package/dist/{plugin-zY5wvV82.d.mts → plugin-D3a0-qe0.d.mts} +20 -4
  47. package/dist/{query-kb_4EQp4.mjs → query-CgGbAmUg.mjs} +460 -361
  48. package/dist/query-CgGbAmUg.mjs.map +1 -0
  49. package/dist/{seed-CCVRLibh.mjs → seed-CWkIDWMb.mjs} +1 -1
  50. package/dist/{seed-CCVRLibh.mjs.map → seed-CWkIDWMb.mjs.map} +1 -1
  51. package/dist/{telemetry-DDQZRqHK.mjs → telemetry-BevrwWwF.mjs} +1 -1
  52. package/dist/{telemetry-0w8OupuQ.mjs → telemetry-VvNfsyEE.mjs} +2 -2
  53. package/dist/{telemetry-0w8OupuQ.mjs.map → telemetry-VvNfsyEE.mjs.map} +1 -1
  54. package/dist/utils/test/index.d.mts +2 -2
  55. package/dist/{workflow.generated-v1LXRuB6.d.mts → workflow.generated-BsgIlrH-.d.mts} +2 -2
  56. package/docs/cli/crash-report.md +107 -0
  57. package/docs/cli/setup.md +82 -0
  58. package/docs/cli-reference.md +19 -0
  59. package/docs/services/auth.md +33 -0
  60. package/docs/services/resolver.md +32 -0
  61. package/package.json +4 -4
  62. package/dist/application-91Th6tm6.mjs.map +0 -1
  63. package/dist/application-DegTCDd8.mjs +0 -9
  64. package/dist/package-json-Bj76LPsV.mjs +0 -4
  65. package/dist/query-kb_4EQp4.mjs.map +0 -1
@@ -0,0 +1,4701 @@
1
+ import { n as isSdkBranded } from "./brand-GZnI4eYb.mjs";
2
+ import { n as logger, r as styles } from "./logger-CqezTedh.mjs";
3
+ import { o as initOAuth2Client } from "./client-bTbnbQbB.mjs";
4
+ import { n as seedPlugin, r as isPluginGeneratedType, t as SeedGeneratorID } from "./seed-CWkIDWMb.mjs";
5
+ import { n as enumConstantsPlugin, t as EnumConstantsGeneratorID } from "./enum-constants-D1nfn0qD.mjs";
6
+ import { n as fileUtilsPlugin, t as FileUtilsGeneratorID } from "./file-utils-Bctuzn3x.mjs";
7
+ import { n as kyselyTypePlugin, t as KyselyGeneratorID } from "./kysely-type-B_IecdK9.mjs";
8
+ import { createRequire } from "node:module";
9
+ import { z } from "zod";
10
+ import * as fs$1 from "node:fs";
11
+ import { mkdirSync, readFileSync, rmSync, writeFileSync } from "node:fs";
12
+ import * as path from "pathe";
13
+ import { join, resolve } from "pathe";
14
+ import { resolveTSConfig } from "pkg-types";
15
+ import * as os from "node:os";
16
+ import { parseTOML, parseYAML, stringifyYAML } from "confbox";
17
+ import { findUpSync } from "find-up-simple";
18
+ import ml from "multiline-ts";
19
+ import { xdgConfig } from "xdg-basedir";
20
+ import * as crypto from "node:crypto";
21
+ import * as rolldown from "rolldown";
22
+ import * as fs from "node:fs/promises";
23
+ import { parseSync } from "oxc-parser";
24
+ import { pathToFileURL } from "node:url";
25
+ import * as inflection from "inflection";
26
+ import * as globals from "globals";
27
+
28
+ //#region src/cli/shared/context.ts
29
+ const pfConfigSchema = z.object({
30
+ version: z.literal(1),
31
+ users: z.partialRecord(z.string(), z.object({
32
+ access_token: z.string(),
33
+ refresh_token: z.string(),
34
+ token_expires_at: z.string()
35
+ })),
36
+ profiles: z.partialRecord(z.string(), z.object({
37
+ user: z.string(),
38
+ workspace_id: z.string()
39
+ })),
40
+ current_user: z.string().nullable()
41
+ });
42
+ function platformConfigPath() {
43
+ if (!xdgConfig) throw new Error("User home directory not found");
44
+ return path.join(xdgConfig, "tailor-platform", "config.yaml");
45
+ }
46
+ /**
47
+ * Read Tailor Platform CLI configuration, migrating from tailorctl if necessary.
48
+ * @returns Parsed platform configuration
49
+ */
50
+ function readPlatformConfig() {
51
+ const configPath = platformConfigPath();
52
+ if (!fs$1.existsSync(configPath)) {
53
+ logger.warn(`Config not found at ${configPath}, migrating from tailorctl config...`);
54
+ const tcConfig = readTailorctlConfig();
55
+ const pfConfig = tcConfig ? fromTailorctlConfig(tcConfig) : {
56
+ version: 1,
57
+ users: {},
58
+ profiles: {},
59
+ current_user: null
60
+ };
61
+ writePlatformConfig(pfConfig);
62
+ return pfConfig;
63
+ }
64
+ const rawConfig = parseYAML(fs$1.readFileSync(configPath, "utf-8"));
65
+ return pfConfigSchema.parse(rawConfig);
66
+ }
67
+ /**
68
+ * Write Tailor Platform CLI configuration to disk.
69
+ * @param config - Platform configuration to write
70
+ */
71
+ function writePlatformConfig(config) {
72
+ const configPath = platformConfigPath();
73
+ fs$1.mkdirSync(path.dirname(configPath), { recursive: true });
74
+ fs$1.writeFileSync(configPath, stringifyYAML(config));
75
+ }
76
+ const tcContextConfigSchema = z.object({
77
+ username: z.string().optional(),
78
+ controlplaneaccesstoken: z.string().optional(),
79
+ controlplanerefreshtoken: z.string().optional(),
80
+ controlplanetokenexpiresat: z.string().optional(),
81
+ workspaceid: z.string().optional()
82
+ });
83
+ const tcConfigSchema = z.object({ global: z.object({ context: z.string().optional() }).optional() }).catchall(tcContextConfigSchema.optional());
84
+ function readTailorctlConfig() {
85
+ const configPath = path.join(os.homedir(), ".tailorctl", "config");
86
+ if (!fs$1.existsSync(configPath)) return;
87
+ const rawConfig = parseTOML(fs$1.readFileSync(configPath, "utf-8"));
88
+ return tcConfigSchema.parse(rawConfig);
89
+ }
90
+ function fromTailorctlConfig(config) {
91
+ const users = {};
92
+ const profiles = {};
93
+ let currentUser = null;
94
+ const currentContext = config.global?.context || "default";
95
+ for (const [key, val] of Object.entries(config)) {
96
+ if (key === "global") continue;
97
+ const context = val;
98
+ if (!context.username || !context.controlplaneaccesstoken || !context.controlplanerefreshtoken || !context.controlplanetokenexpiresat || !context.workspaceid) continue;
99
+ if (key === currentContext) currentUser = context.username;
100
+ profiles[key] = {
101
+ user: context.username,
102
+ workspace_id: context.workspaceid
103
+ };
104
+ const user = users[context.username];
105
+ if (!user || new Date(user.token_expires_at) < new Date(context.controlplanetokenexpiresat)) users[context.username] = {
106
+ access_token: context.controlplaneaccesstoken,
107
+ refresh_token: context.controlplanerefreshtoken,
108
+ token_expires_at: context.controlplanetokenexpiresat
109
+ };
110
+ }
111
+ return {
112
+ version: 1,
113
+ users,
114
+ profiles,
115
+ current_user: currentUser
116
+ };
117
+ }
118
+ function validateUUID(value, source) {
119
+ const result = z.uuid().safeParse(value);
120
+ if (!result.success) throw new Error(`Invalid value from ${source}: must be a valid UUID`);
121
+ return result.data;
122
+ }
123
+ /**
124
+ * Load workspace ID from command options, environment variables, or platform config.
125
+ * In CLI context, env fallback is also handled by politty's arg env option.
126
+ * Priority: opts/workspaceId > env/workspaceId > opts/profile > error
127
+ * @param opts - Workspace and profile options
128
+ * @returns Resolved workspace ID
129
+ */
130
+ function loadWorkspaceId(opts) {
131
+ if (opts?.workspaceId) return validateUUID(opts.workspaceId, "--workspace-id option");
132
+ if (process.env.TAILOR_PLATFORM_WORKSPACE_ID) return validateUUID(process.env.TAILOR_PLATFORM_WORKSPACE_ID, "TAILOR_PLATFORM_WORKSPACE_ID environment variable");
133
+ const profile = opts?.profile || process.env.TAILOR_PLATFORM_PROFILE;
134
+ if (profile) {
135
+ const wsId = readPlatformConfig().profiles[profile]?.workspace_id;
136
+ if (!wsId) throw new Error(`Profile "${profile}" not found`);
137
+ return validateUUID(wsId, `profile "${profile}"`);
138
+ }
139
+ throw new Error(ml`
140
+ Workspace ID not found.
141
+ Please specify workspace ID via --workspace-id option or TAILOR_PLATFORM_WORKSPACE_ID environment variable.
142
+ `);
143
+ }
144
+ /**
145
+ * Load access token from environment variables, command options, or platform config.
146
+ * In CLI context, profile env fallback is also handled by politty's arg env option.
147
+ * Priority: env/TAILOR_PLATFORM_TOKEN > env/TAILOR_TOKEN (deprecated) > opts/profile > env/profile > config/currentUser > error
148
+ * @param opts - Profile options
149
+ * @returns Resolved access token
150
+ */
151
+ async function loadAccessToken(opts) {
152
+ if (process.env.TAILOR_PLATFORM_TOKEN) return process.env.TAILOR_PLATFORM_TOKEN;
153
+ if (process.env.TAILOR_TOKEN) {
154
+ logger.warn("TAILOR_TOKEN is deprecated. Please use TAILOR_PLATFORM_TOKEN instead.");
155
+ return process.env.TAILOR_TOKEN;
156
+ }
157
+ const pfConfig = readPlatformConfig();
158
+ let user;
159
+ const profile = opts?.useProfile ? opts.profile || process.env.TAILOR_PLATFORM_PROFILE : void 0;
160
+ if (profile) {
161
+ const u = pfConfig.profiles[profile]?.user;
162
+ if (!u) throw new Error(`Profile "${profile}" not found`);
163
+ user = u;
164
+ } else {
165
+ const u = pfConfig.current_user;
166
+ if (!u) throw new Error(ml`
167
+ Tailor Platform token not found.
168
+ Please specify token via TAILOR_PLATFORM_TOKEN environment variable or login using 'tailor-sdk login' command.
169
+ `);
170
+ user = u;
171
+ }
172
+ return await fetchLatestToken(pfConfig, user);
173
+ }
174
+ /**
175
+ * Fetch the latest access token, refreshing if necessary.
176
+ * @param config - Platform config
177
+ * @param user - User name
178
+ * @returns Latest access token
179
+ */
180
+ async function fetchLatestToken(config, user) {
181
+ const tokens = config.users[user];
182
+ if (!tokens) throw new Error(ml`
183
+ User "${user}" not found.
184
+ Please verify your user name and login using 'tailor-sdk login' command.
185
+ `);
186
+ if (new Date(tokens.token_expires_at) > /* @__PURE__ */ new Date()) return tokens.access_token;
187
+ const client = initOAuth2Client();
188
+ let resp;
189
+ try {
190
+ resp = await client.refreshToken({
191
+ accessToken: tokens.access_token,
192
+ refreshToken: tokens.refresh_token,
193
+ expiresAt: Date.parse(tokens.token_expires_at)
194
+ });
195
+ } catch {
196
+ throw new Error(ml`
197
+ Failed to refresh token. Your session may have expired.
198
+ Please run 'tailor-sdk login' and try again.
199
+ `);
200
+ }
201
+ config.users[user] = {
202
+ access_token: resp.accessToken,
203
+ refresh_token: resp.refreshToken,
204
+ token_expires_at: new Date(resp.expiresAt).toISOString()
205
+ };
206
+ writePlatformConfig(config);
207
+ return resp.accessToken;
208
+ }
209
+ const DEFAULT_CONFIG_FILENAME = "tailor.config.ts";
210
+ /**
211
+ * Load config path from command options, environment variables, or search parent directories.
212
+ * In CLI context, env fallback is also handled by politty's arg env option.
213
+ * Priority: opts/config > env/config > search parent directories
214
+ * @param configPath - Optional explicit config path
215
+ * @returns Resolved config path or undefined
216
+ */
217
+ function loadConfigPath(configPath) {
218
+ if (configPath) return configPath;
219
+ if (process.env.TAILOR_PLATFORM_SDK_CONFIG_PATH) return process.env.TAILOR_PLATFORM_SDK_CONFIG_PATH;
220
+ return findUpSync(DEFAULT_CONFIG_FILENAME);
221
+ }
222
+
223
+ //#endregion
224
+ //#region src/cli/cache/hasher.ts
225
+ /**
226
+ * Compute the SHA-256 hex digest of an arbitrary string.
227
+ * @param content - The string content to hash
228
+ * @returns Hex-encoded SHA-256 hash
229
+ */
230
+ function hashContent(content) {
231
+ return crypto.createHash("sha256").update(content, "utf-8").digest("hex");
232
+ }
233
+ /**
234
+ * Read a file and return its SHA-256 hex digest.
235
+ * @param filePath - Absolute path to the file
236
+ * @returns Hex-encoded SHA-256 hash of the file content
237
+ */
238
+ function hashFile(filePath) {
239
+ const content = fs$1.readFileSync(filePath);
240
+ return crypto.createHash("sha256").update(content).digest("hex");
241
+ }
242
+ /**
243
+ * Compute a deterministic SHA-256 hash for multiple files.
244
+ *
245
+ * Paths are sorted alphabetically before hashing so that the result
246
+ * is independent of the order the paths are supplied (e.g. glob ordering).
247
+ * Each file's individual hash is concatenated and then hashed again.
248
+ * @param filePaths - Array of absolute file paths
249
+ * @returns Hex-encoded SHA-256 hash representing all files
250
+ */
251
+ function hashFiles(filePaths) {
252
+ return hashContent([...filePaths].sort().map((fp) => hashFile(fp)).join(""));
253
+ }
254
+
255
+ //#endregion
256
+ //#region src/cli/shared/dist-dir.ts
257
+ let distPath = null;
258
+ const getDistDir = () => {
259
+ const configured = process.env.TAILOR_SDK_OUTPUT_DIR;
260
+ if (configured && configured !== distPath) distPath = configured;
261
+ else if (distPath === null) distPath = configured || ".tailor-sdk";
262
+ return distPath;
263
+ };
264
+
265
+ //#endregion
266
+ //#region src/cli/cache/dep-collector-plugin.ts
267
+ /**
268
+ * Create a rolldown plugin that collects all resolved module paths during a build.
269
+ * The plugin is purely observational and does not modify any code or behavior.
270
+ * Collected paths exclude node_modules and generated entry files.
271
+ * node_modules changes (package upgrades) are not tracked per-bundle;
272
+ * lockfile hash and SDK version changes invalidate the entire cache.
273
+ * @returns An object containing the plugin and a getResult function that returns sorted, deduplicated paths
274
+ */
275
+ function createDepCollectorPlugin() {
276
+ const collectedPaths = /* @__PURE__ */ new Set();
277
+ const plugin = {
278
+ name: "cache-dep-collector",
279
+ load: {
280
+ filter: { id: { include: [/\.[^/]+$/] } },
281
+ handler(id) {
282
+ if (!id.includes("node_modules") && !id.endsWith(".entry.js")) collectedPaths.add(id);
283
+ return null;
284
+ }
285
+ }
286
+ };
287
+ function getResult() {
288
+ return Array.from(collectedPaths).sort();
289
+ }
290
+ return {
291
+ plugin,
292
+ getResult
293
+ };
294
+ }
295
+
296
+ //#endregion
297
+ //#region src/cli/cache/bundle-cache.ts
298
+ function buildCacheKey(kind, name) {
299
+ return `${kind}:${name}`;
300
+ }
301
+ function combineHash(fileHash, contextHash) {
302
+ if (!contextHash) return fileHash;
303
+ return hashContent(fileHash + contextHash);
304
+ }
305
+ /**
306
+ * Compute a context hash for cache invalidation across bundlers.
307
+ *
308
+ * Combines the source file path, serialized trigger context, tsconfig hash,
309
+ * sourcemap mode, and an optional prefix (e.g., serialized env variables)
310
+ * into a single SHA-256 hash.
311
+ * @param params - Context hash computation parameters
312
+ * @returns SHA-256 hex digest of the combined context
313
+ */
314
+ function computeBundlerContextHash(params) {
315
+ const { sourceFile, serializedTriggerContext, tsconfig, inlineSourcemap, prefix } = params;
316
+ return hashContent((prefix ?? "") + path.resolve(sourceFile) + serializedTriggerContext + (tsconfig ? hashFile(tsconfig) : "") + String(inlineSourcemap ?? false));
317
+ }
318
+ /**
319
+ * Run a build with optional cache restore/save around it.
320
+ * When caching is active, attempts to restore from cache first,
321
+ * and saves the build result (with collected dependencies) on a cache miss.
322
+ * @param params - Cache and build parameters
323
+ */
324
+ async function withCache(params) {
325
+ const { cache, kind, name, sourceFile, outputPath, contextHash, build } = params;
326
+ if (!cache) {
327
+ await build([]);
328
+ return;
329
+ }
330
+ if (cache.tryRestore({
331
+ kind,
332
+ name,
333
+ outputPath,
334
+ contextHash
335
+ })) {
336
+ logger.debug(` ${styles.dim("cached")}: ${name}`);
337
+ return;
338
+ }
339
+ const { plugin, getResult } = createDepCollectorPlugin();
340
+ await build([plugin]);
341
+ cache.save({
342
+ kind,
343
+ name,
344
+ sourceFile,
345
+ outputPath,
346
+ dependencyPaths: getResult(),
347
+ contextHash
348
+ });
349
+ }
350
+ /**
351
+ * Create a bundle cache backed by the given store.
352
+ * @param store - The cache store for persistence
353
+ * @returns A BundleCache instance
354
+ */
355
+ function createBundleCache(store) {
356
+ function tryRestore(params) {
357
+ const cacheKey = buildCacheKey(params.kind, params.name);
358
+ const entry = store.getEntry(cacheKey);
359
+ if (!entry) return false;
360
+ let currentHash;
361
+ try {
362
+ currentHash = combineHash(hashFiles(entry.dependencyPaths), params.contextHash);
363
+ } catch {
364
+ return false;
365
+ }
366
+ if (currentHash !== entry.inputHash) return false;
367
+ return store.restoreBundleOutput(cacheKey, params.outputPath);
368
+ }
369
+ function save(params) {
370
+ const { kind, name, sourceFile, outputPath, dependencyPaths, contextHash } = params;
371
+ const cacheKey = buildCacheKey(kind, name);
372
+ const allDeps = dependencyPaths.includes(sourceFile) ? dependencyPaths : [sourceFile, ...dependencyPaths];
373
+ const inputHash = combineHash(hashFiles(allDeps), contextHash);
374
+ const contentHash = hashFile(outputPath);
375
+ store.storeBundleOutput(cacheKey, outputPath);
376
+ const outputFiles = [{
377
+ outputPath,
378
+ contentHash
379
+ }];
380
+ const mapPath = `${outputPath}.map`;
381
+ if (fs$1.existsSync(mapPath)) outputFiles.push({
382
+ outputPath: mapPath,
383
+ contentHash: hashFile(mapPath)
384
+ });
385
+ store.setEntry(cacheKey, {
386
+ kind: "bundle",
387
+ inputHash,
388
+ dependencyPaths: allDeps,
389
+ outputFiles,
390
+ createdAt: (/* @__PURE__ */ new Date()).toISOString()
391
+ });
392
+ }
393
+ return {
394
+ tryRestore,
395
+ save
396
+ };
397
+ }
398
+
399
+ //#endregion
400
+ //#region src/cli/shared/plugin-import.ts
401
+ /**
402
+ * Collect base directories for resolving plugin import paths.
403
+ * @param configPath - Path to tailor.config.ts
404
+ * @returns Ordered list of base directories
405
+ */
406
+ function getPluginImportBaseDirs(configPath) {
407
+ if (configPath) return [path.dirname(configPath)];
408
+ return [process.cwd()];
409
+ }
410
+ /**
411
+ * Resolve a relative plugin import path against candidate base directories.
412
+ * @param pluginImportPath - Relative plugin import path
413
+ * @param baseDirs - Candidate base directories
414
+ * @returns Absolute path if found, otherwise null
415
+ */
416
+ function resolveRelativePluginImportPath(pluginImportPath, baseDirs) {
417
+ if (!pluginImportPath.startsWith(".")) return null;
418
+ for (const baseDir of baseDirs) {
419
+ const absolutePath = path.resolve(baseDir, pluginImportPath);
420
+ if (fs$1.existsSync(absolutePath)) return absolutePath;
421
+ }
422
+ return null;
423
+ }
424
+
425
+ //#endregion
426
+ //#region src/types/plugin.ts
427
+ /**
428
+ * Checks if a plugin executor uses file-based resolution.
429
+ * @param executor - The plugin executor to check.
430
+ * @returns True if the executor uses file-based resolution.
431
+ */
432
+ function isPluginExecutorWithFile(executor) {
433
+ return "resolve" in executor && "context" in executor;
434
+ }
435
+
436
+ //#endregion
437
+ //#region src/cli/commands/generate/plugin-executor-generator.ts
438
+ /**
439
+ * Plugin Executor Generator
440
+ *
441
+ * Generates TypeScript files for plugin-generated executors.
442
+ * Supports both legacy format (inline trigger/operation) and new format (executorFile/context).
443
+ */
444
+ /**
445
+ * Generate TypeScript files for plugin-generated executors.
446
+ * These files will be processed by the standard executor bundler.
447
+ * @param executors - Array of plugin executor information
448
+ * @param outputDir - Base output directory (e.g., .tailor-sdk)
449
+ * @param typeGenerationResult - Result from plugin type generation (for import resolution)
450
+ * @param sourceTypeInfoMap - Map of source type names to their source info
451
+ * @param configPath - Path to tailor.config.ts (used for resolving plugin import paths)
452
+ * @returns Array of generated file paths
453
+ */
454
+ function generatePluginExecutorFiles(executors, outputDir, typeGenerationResult, sourceTypeInfoMap, configPath) {
455
+ if (executors.length === 0) return [];
456
+ const generatedFiles = [];
457
+ const baseDirs = getPluginImportBaseDirs(configPath);
458
+ for (const info of executors) {
459
+ const filePath = generateSingleExecutorFile(info, outputDir, typeGenerationResult, sourceTypeInfoMap, baseDirs);
460
+ generatedFiles.push(filePath);
461
+ const relativePath = path.relative(process.cwd(), filePath);
462
+ logger.log(` Plugin Executor File: ${styles.success(relativePath)} from plugin ${styles.info(info.pluginId)}`);
463
+ }
464
+ return generatedFiles;
465
+ }
466
+ /**
467
+ * Generate a single executor file.
468
+ * @param info - Plugin executor metadata and definition
469
+ * @param outputDir - Base output directory (e.g., .tailor-sdk)
470
+ * @param typeGenerationResult - Result from plugin type generation
471
+ * @param sourceTypeInfoMap - Map of source type names to their source info
472
+ * @param baseDirs - Base directories for resolving plugin import paths
473
+ * @returns Absolute path to the generated file
474
+ */
475
+ function generateSingleExecutorFile(info, outputDir, typeGenerationResult, sourceTypeInfoMap, baseDirs = []) {
476
+ const pluginDir = sanitizePluginId$1(info.pluginId);
477
+ const executorOutputDir = path.join(outputDir, pluginDir, "executors");
478
+ fs$1.mkdirSync(executorOutputDir, { recursive: true });
479
+ const fileName = sanitizeExecutorFileName(info.executor.name);
480
+ const filePath = path.join(executorOutputDir, `${fileName}.ts`);
481
+ let content;
482
+ if (isPluginExecutorWithFile(info.executor)) content = generateExecutorFileContentNew(info, info.executor, outputDir, typeGenerationResult, sourceTypeInfoMap, baseDirs);
483
+ else content = generateExecutorFileContentLegacy(info.executor);
484
+ fs$1.writeFileSync(filePath, content);
485
+ return filePath;
486
+ }
487
+ /**
488
+ * Generate TypeScript file content for new format executor (dynamic import).
489
+ * Uses the executor's resolve function to dynamically import the module.
490
+ * @param info - Plugin executor information
491
+ * @param executor - Executor definition with resolve
492
+ * @param outputDir - Base output directory
493
+ * @param typeGenerationResult - Result from plugin type generation
494
+ * @param sourceTypeInfoMap - Map of source type names to their source info
495
+ * @param baseDirs - Base directories for resolving plugin import paths
496
+ * @returns TypeScript source code for executor file
497
+ */
498
+ function generateExecutorFileContentNew(info, executor, outputDir, typeGenerationResult, sourceTypeInfoMap, baseDirs = []) {
499
+ const { resolve, context } = executor;
500
+ const pluginDir = sanitizePluginId$1(info.pluginId);
501
+ const executorOutputDir = path.join(outputDir, pluginDir, "executors");
502
+ const executorImportPath = resolveExecutorImportPath(resolve, info.pluginImportPath, executorOutputDir, baseDirs);
503
+ const typeImports = collectTypeImports(context, outputDir, info.pluginId, typeGenerationResult, sourceTypeInfoMap);
504
+ const imports = [];
505
+ for (const [, importInfo] of typeImports) imports.push(`import { ${importInfo.variableName} } from "${importInfo.importPath}";`);
506
+ const contextCode = generateContextCode(context, typeImports);
507
+ return ml`
508
+ /**
509
+ * Auto-generated executor by plugin: ${info.pluginId}
510
+ * DO NOT EDIT - This file is generated by @tailor-platform/sdk
511
+ */
512
+ ${imports.join("\n")}
513
+
514
+ const { default: executorFactory } = await import(${JSON.stringify(executorImportPath)});
515
+ if (typeof executorFactory !== "function") {
516
+ throw new Error(
517
+ "Plugin executor module must export a default function created by withPluginContext().",
518
+ );
519
+ }
520
+ export default executorFactory(${contextCode});
521
+ `;
522
+ }
523
+ /**
524
+ * Collect type imports needed for context.
525
+ * @param context - Executor context values from plugin
526
+ * @param outputDir - Base output directory for generated files
527
+ * @param pluginId - Plugin identifier used for output paths
528
+ * @param typeGenerationResult - Result from plugin type generation
529
+ * @param sourceTypeInfoMap - Map of source type names to their source info
530
+ * @returns Map of context keys to their import information
531
+ */
532
+ function collectTypeImports(context, outputDir, pluginId, typeGenerationResult, sourceTypeInfoMap) {
533
+ const typeImports = /* @__PURE__ */ new Map();
534
+ const pluginDir = sanitizePluginId$1(pluginId);
535
+ const executorDir = path.join(outputDir, pluginDir, "executors");
536
+ for (const [key, value] of Object.entries(context)) if (isTypeObject(value)) {
537
+ const typeName = value.name;
538
+ const sourceInfo = sourceTypeInfoMap?.get(typeName);
539
+ const variableName = sourceInfo?.exportName ?? toCamelCase$1(typeName);
540
+ let importPath;
541
+ let isGeneratedType = false;
542
+ if (typeGenerationResult?.typeFilePaths.has(typeName)) {
543
+ const typeFilePath = typeGenerationResult.typeFilePaths.get(typeName);
544
+ const absoluteTypePath = path.join(outputDir, typeFilePath);
545
+ importPath = path.relative(executorDir, absoluteTypePath).replace(/\.ts$/, "");
546
+ if (!importPath.startsWith(".")) importPath = `./${importPath}`;
547
+ isGeneratedType = true;
548
+ } else if (sourceInfo) {
549
+ const sourceFilePath = sourceInfo.filePath;
550
+ importPath = path.relative(executorDir, sourceFilePath).replace(/\.ts$/, "");
551
+ if (!importPath.startsWith(".")) importPath = `./${importPath}`;
552
+ } else importPath = `../../../../tailordb/${toKebabCase$1(typeName)}`;
553
+ typeImports.set(key, {
554
+ variableName,
555
+ importPath,
556
+ isGeneratedType
557
+ });
558
+ }
559
+ return typeImports;
560
+ }
561
+ /**
562
+ * Generate TypeScript code for context object.
563
+ * @param context - Executor context values from plugin
564
+ * @param typeImports - Resolved type import information for context keys
565
+ * @returns TypeScript object literal code
566
+ */
567
+ function generateContextCode(context, typeImports) {
568
+ const entries = [];
569
+ for (const [key, value] of Object.entries(context)) if (isTypeObject(value)) {
570
+ const importInfo = typeImports.get(key);
571
+ if (importInfo) entries.push(` ${key}: ${importInfo.variableName}`);
572
+ } else if (value !== void 0) entries.push(` ${key}: ${JSON.stringify(value)}`);
573
+ return `{\n${entries.join(",\n")},\n}`;
574
+ }
575
+ /**
576
+ * Check if a value is a TailorDB type object.
577
+ * @param value - Value to inspect
578
+ * @returns True if value is a type object with name and fields
579
+ */
580
+ function isTypeObject(value) {
581
+ return typeof value === "object" && value !== null && "name" in value && "fields" in value && typeof value.name === "string";
582
+ }
583
+ /**
584
+ * Generate TypeScript file content for legacy format executor (trigger/operation).
585
+ * @param executor - Legacy executor definition
586
+ * @returns TypeScript source code for executor file
587
+ */
588
+ function generateExecutorFileContentLegacy(executor) {
589
+ const triggerCode = generateTriggerCode(executor.trigger);
590
+ const operationCode = generateOperationCode(executor.operation);
591
+ const injectDeclarations = generateInjectDeclarations(executor.operation.kind === "function" ? executor.operation.inject : void 0);
592
+ const descriptionLine = executor.description ? `\n description: ${JSON.stringify(executor.description)},` : "";
593
+ return ml`
594
+ /**
595
+ * Auto-generated executor by plugin
596
+ * DO NOT EDIT - This file is generated by @tailor-platform/sdk
597
+ */
598
+ import { createExecutor } from "@tailor-platform/sdk";
599
+ ${injectDeclarations}
600
+ export default createExecutor({
601
+ name: ${JSON.stringify(executor.name)},${descriptionLine}
602
+ trigger: ${triggerCode},
603
+ operation: ${operationCode},
604
+ });
605
+ `;
606
+ }
607
+ /**
608
+ * Generate const declarations for injected variables.
609
+ * @param inject - Map of injected values keyed by variable name
610
+ * @returns TypeScript const declarations or empty string
611
+ */
612
+ function generateInjectDeclarations(inject) {
613
+ if (!inject || Object.keys(inject).length === 0) return "";
614
+ return `\n// Injected variables from plugin\n${Object.entries(inject).map(([name, value]) => `const ${name} = ${JSON.stringify(value)};`).join("\n")}\n`;
615
+ }
616
+ /**
617
+ * Generate TypeScript code for trigger configuration.
618
+ * @param trigger - Trigger configuration for executor
619
+ * @returns TypeScript code for trigger object
620
+ */
621
+ function generateTriggerCode(trigger) {
622
+ switch (trigger.kind) {
623
+ case "recordCreated":
624
+ case "recordUpdated":
625
+ case "recordDeleted": return `{
626
+ kind: ${JSON.stringify(trigger.kind)},
627
+ typeName: ${JSON.stringify(trigger.typeName)},
628
+ }`;
629
+ case "schedule": return `{
630
+ kind: "schedule",
631
+ cron: ${JSON.stringify(trigger.cron)},
632
+ timezone: ${JSON.stringify(trigger.timezone ?? "UTC")},
633
+ }`;
634
+ case "incomingWebhook": return `{
635
+ kind: "incomingWebhook",
636
+ }`;
637
+ default: throw new Error(`Unknown trigger kind: ${trigger.kind}`);
638
+ }
639
+ }
640
+ /**
641
+ * Generate TypeScript code for operation configuration.
642
+ * @param operation - Operation configuration for executor
643
+ * @returns TypeScript code for operation object
644
+ */
645
+ function generateOperationCode(operation) {
646
+ switch (operation.kind) {
647
+ case "graphql": {
648
+ const appNameLine = operation.appName ? `\n appName: ${JSON.stringify(operation.appName)},` : "";
649
+ const variablesLine = operation.variables ? `\n variables: ${operation.variables},` : "";
650
+ return `{
651
+ kind: "graphql",
652
+ query: \`${escapeTemplateLiteral(operation.query)}\`,${appNameLine}${variablesLine}
653
+ }`;
654
+ }
655
+ case "function": return `{
656
+ kind: "function",
657
+ body: ${operation.body},
658
+ }`;
659
+ case "webhook": return `{
660
+ kind: "webhook",
661
+ url: () => ${JSON.stringify(operation.url)},
662
+ }`;
663
+ case "workflow": return `{
664
+ kind: "workflow",
665
+ workflowName: ${JSON.stringify(operation.workflowName)},
666
+ }`;
667
+ default: throw new Error(`Unknown operation kind: ${operation.kind}`);
668
+ }
669
+ }
670
+ /**
671
+ * Escape special characters in template literal content.
672
+ * @param str - Raw template literal content
673
+ * @returns Escaped string safe for template literals
674
+ */
675
+ function escapeTemplateLiteral(str) {
676
+ return str.replace(/\\/g, "\\\\").replace(/`/g, "\\`").replace(/\$\{/g, "\\${");
677
+ }
678
+ const require = createRequire(import.meta.url);
679
+ /**
680
+ * Resolve the import path for a plugin executor module.
681
+ * @param resolve - Executor resolve function
682
+ * @param pluginImportPath - Plugin's import path
683
+ * @param executorOutputDir - Directory where the generated executor will be written
684
+ * @param baseDirs - Base directories for resolving plugin import paths
685
+ * @returns Import path string for the executor module
686
+ */
687
+ function resolveExecutorImportPath(resolve, pluginImportPath, executorOutputDir, baseDirs) {
688
+ const specifier = extractDynamicImportSpecifier(resolve);
689
+ if (!specifier.startsWith(".")) return specifier;
690
+ const pluginBaseDir = resolvePluginBaseDir(pluginImportPath, baseDirs);
691
+ if (!pluginBaseDir) throw new Error(`Unable to resolve plugin import base for "${pluginImportPath}". Tried base dirs: ${baseDirs.join(", ") || "(none)"}. Use an absolute import specifier in resolve(), or ensure the plugin path is resolvable.`);
692
+ const absolutePath = path.resolve(pluginBaseDir, specifier);
693
+ let relativePath = path.relative(executorOutputDir, absolutePath).replace(/\\/g, "/");
694
+ relativePath = stripSourceExtension(relativePath);
695
+ if (!relativePath.startsWith(".")) relativePath = `./${relativePath}`;
696
+ return relativePath;
697
+ }
698
+ /**
699
+ * Extract the dynamic import specifier from a resolve function.
700
+ * @param resolve - Executor resolve function
701
+ * @returns The module specifier string
702
+ */
703
+ function extractDynamicImportSpecifier(resolve) {
704
+ const match = resolve.toString().match(/import\s*\(\s*["']([^"']+)["']\s*\)/);
705
+ if (!match) throw new Error(`resolve() must return a dynamic import, e.g. \`async () => await import("./executors/on-create")\`.`);
706
+ return match[1];
707
+ }
708
+ /**
709
+ * Resolve plugin base directory for relative imports.
710
+ * @param pluginImportPath - Plugin import path
711
+ * @param baseDirs - Base directories for resolving plugin import paths
712
+ * @returns Directory path or null if not resolvable
713
+ */
714
+ function resolvePluginBaseDir(pluginImportPath, baseDirs) {
715
+ if (pluginImportPath.startsWith(".")) {
716
+ const resolvedPath = resolveRelativePluginImportPath(pluginImportPath, baseDirs) ?? path.resolve(baseDirs[0] ?? process.cwd(), pluginImportPath);
717
+ if (fs$1.existsSync(resolvedPath)) return fs$1.statSync(resolvedPath).isDirectory() ? resolvedPath : path.dirname(resolvedPath);
718
+ return path.extname(resolvedPath) ? path.dirname(resolvedPath) : resolvedPath;
719
+ }
720
+ for (const baseDir of baseDirs) try {
721
+ const resolved = require.resolve(pluginImportPath, { paths: [baseDir] });
722
+ return path.dirname(resolved);
723
+ } catch {
724
+ continue;
725
+ }
726
+ return null;
727
+ }
728
+ /**
729
+ * Strip TypeScript source extensions from import paths.
730
+ * @param importPath - Path to normalize
731
+ * @returns Path without .ts/.tsx extension
732
+ */
733
+ function stripSourceExtension(importPath) {
734
+ return importPath.replace(/\.(ts|tsx)$/, "");
735
+ }
736
+ /**
737
+ * Convert plugin ID to safe directory name.
738
+ * @param pluginId - Plugin identifier (e.g., "@scope/name")
739
+ * @returns Safe directory name
740
+ */
741
+ function sanitizePluginId$1(pluginId) {
742
+ return pluginId.replace(/^@/, "").replace(/\//g, "-");
743
+ }
744
+ /**
745
+ * Convert executor name to safe filename.
746
+ * @param executorName - Executor name
747
+ * @returns Safe filename without extension
748
+ */
749
+ function sanitizeExecutorFileName(executorName) {
750
+ const sanitized = path.basename(executorName).replace(/\.[^/.]+$/, "").replace(/[^a-zA-Z0-9_-]/g, "-");
751
+ if (!sanitized) throw new Error(`Invalid executor name: "${executorName}"`);
752
+ return sanitized;
753
+ }
754
+ /**
755
+ * Convert string to camelCase.
756
+ * @param str - Input string to convert
757
+ * @returns camelCase string
758
+ */
759
+ function toCamelCase$1(str) {
760
+ const result = str.replace(/[-_\s]+(.)?/g, (_, c) => c ? c.toUpperCase() : "");
761
+ return result.charAt(0).toLowerCase() + result.slice(1);
762
+ }
763
+ /**
764
+ * Convert string to kebab-case.
765
+ * @param str - Input string to convert
766
+ * @returns kebab-case string
767
+ */
768
+ function toKebabCase$1(str) {
769
+ return str.replace(/([a-z])([A-Z])/g, "$1-$2").replace(/[\s_]+/g, "-").toLowerCase();
770
+ }
771
+
772
+ //#endregion
773
+ //#region src/cli/commands/generate/plugin-type-generator.ts
774
+ /**
775
+ * Plugin Type Generator
776
+ *
777
+ * Generates TypeScript files for plugin-generated types (TailorDB types).
778
+ * These files can be imported by plugin executors to reference generated types.
779
+ */
780
+ function isFieldDefinition(value) {
781
+ return typeof value === "object" && value !== null;
782
+ }
783
+ /**
784
+ * Generate TypeScript files for plugin-generated types.
785
+ * These files export the type definition and can be imported by executor files.
786
+ * @param types - Array of plugin type information
787
+ * @param outputDir - Base output directory (e.g., .tailor-sdk)
788
+ * @returns Generation result with file paths
789
+ */
790
+ function generatePluginTypeFiles(types, outputDir) {
791
+ const typeFilePaths = /* @__PURE__ */ new Map();
792
+ const generatedFiles = [];
793
+ if (types.length === 0) return {
794
+ typeFilePaths,
795
+ generatedFiles
796
+ };
797
+ const seenTypeNames = /* @__PURE__ */ new Map();
798
+ for (const info of types) {
799
+ const existing = seenTypeNames.get(info.type.name);
800
+ if (existing) throw new Error(`Duplicate plugin-generated type name "${info.type.name}" detected. First: plugin "${existing.pluginId}" (kind: "${existing.kind}", source type: "${existing.sourceTypeName}"), Second: plugin "${info.pluginId}" (kind: "${info.kind}", source type: "${info.sourceTypeName}"). Plugin-generated type names must be unique.`);
801
+ seenTypeNames.set(info.type.name, info);
802
+ const pluginDir = sanitizePluginId(info.pluginId);
803
+ const typeOutputDir = path.join(outputDir, pluginDir, "types");
804
+ fs$1.mkdirSync(typeOutputDir, { recursive: true });
805
+ const fileName = `${toKebabCase(info.type.name)}.ts`;
806
+ const filePath = path.join(typeOutputDir, fileName);
807
+ const content = generateTypeFileContent(info);
808
+ fs$1.writeFileSync(filePath, content);
809
+ generatedFiles.push(filePath);
810
+ const relativePath = path.relative(outputDir, filePath);
811
+ typeFilePaths.set(info.type.name, relativePath);
812
+ const displayPath = path.relative(process.cwd(), filePath);
813
+ logger.log(` Plugin Type File: ${styles.success(displayPath)} (${styles.dim(info.kind)}) from plugin ${styles.info(info.pluginId)}`);
814
+ }
815
+ return {
816
+ typeFilePaths,
817
+ generatedFiles
818
+ };
819
+ }
820
+ /**
821
+ * Generate TypeScript file content for a single type.
822
+ * @param info - Plugin type information
823
+ * @returns TypeScript source code
824
+ */
825
+ function generateTypeFileContent(info) {
826
+ const { type, pluginId, sourceTypeName, kind } = info;
827
+ const variableName = toCamelCase(type.name);
828
+ const fieldsCode = generateFieldsCode(type);
829
+ return ml`
830
+ /**
831
+ * Auto-generated type by plugin: ${pluginId}
832
+ * Source type: ${sourceTypeName}
833
+ * Kind: ${kind}
834
+ *
835
+ * DO NOT EDIT - This file is generated by @tailor-platform/sdk
836
+ */
837
+ import { db } from "@tailor-platform/sdk";
838
+
839
+ export const ${variableName} = db.type(${JSON.stringify(type.name)}, ${fieldsCode});
840
+
841
+ export type ${type.name} = typeof ${variableName};
842
+ `;
843
+ }
844
+ /**
845
+ * Generate TypeScript code for field definitions.
846
+ * This creates a simplified version of the type's fields.
847
+ * @param type - TailorDB type
848
+ * @returns TypeScript code for fields object
849
+ */
850
+ function generateFieldsCode(type) {
851
+ const fieldEntries = [];
852
+ for (const [fieldName, field] of Object.entries(type.fields)) {
853
+ if (!isFieldDefinition(field)) continue;
854
+ const fieldCode = generateSingleFieldCode(field);
855
+ if (fieldCode) fieldEntries.push(` ${fieldName}: ${fieldCode}`);
856
+ }
857
+ return `{\n${fieldEntries.join(",\n")},\n}`;
858
+ }
859
+ /**
860
+ * Map from TailorDB type to SDK method name.
861
+ */
862
+ const typeToMethodMap = {
863
+ string: "string",
864
+ integer: "int",
865
+ float: "float",
866
+ boolean: "bool",
867
+ uuid: "uuid",
868
+ datetime: "datetime",
869
+ date: "date",
870
+ time: "time",
871
+ enum: "enum",
872
+ nested: "nested"
873
+ };
874
+ /**
875
+ * Generate TypeScript code for a single field definition.
876
+ * @param field - Field definition object
877
+ * @returns TypeScript code for the field
878
+ */
879
+ function generateSingleFieldCode(field) {
880
+ const fieldType = field.type;
881
+ if (!fieldType) return null;
882
+ const method = typeToMethodMap[fieldType] ?? fieldType;
883
+ const metadata = field._metadata ?? field.metadata ?? {};
884
+ const optionParts = [];
885
+ if (metadata.required === false) optionParts.push("optional: true");
886
+ const optionsArg = optionParts.length > 0 ? `{ ${optionParts.join(", ")} }` : "";
887
+ if (fieldType === "enum") {
888
+ const allowedValues = metadata.allowedValues;
889
+ let enumValues = [];
890
+ if (Array.isArray(allowedValues)) enumValues = allowedValues.map((v) => v.value);
891
+ let code = `db.enum(${JSON.stringify(enumValues)}${optionsArg ? `, ${optionsArg}` : ""})`;
892
+ if (metadata.index) code += ".index()";
893
+ if (metadata.unique) code += ".unique()";
894
+ if (metadata.description) code += `.description(${JSON.stringify(metadata.description)})`;
895
+ return code;
896
+ }
897
+ let code = `db.${method}(${optionsArg})`;
898
+ if (metadata.index) code += ".index()";
899
+ if (metadata.unique) code += ".unique()";
900
+ if (metadata.description) code += `.description(${JSON.stringify(metadata.description)})`;
901
+ return code;
902
+ }
903
+ /**
904
+ * Convert plugin ID to safe directory name.
905
+ * @param pluginId - Plugin identifier (e.g., "@tailor-platform/change-history")
906
+ * @returns Safe directory name (e.g., "tailor-platform-change-history")
907
+ */
908
+ function sanitizePluginId(pluginId) {
909
+ return pluginId.replace(/^@/, "").replace(/\//g, "-");
910
+ }
911
+ /**
912
+ * Convert string to kebab-case.
913
+ * @param str - Input string
914
+ * @returns kebab-case string
915
+ */
916
+ function toKebabCase(str) {
917
+ return str.replace(/([a-z])([A-Z])/g, "$1-$2").replace(/[\s_]+/g, "-").toLowerCase();
918
+ }
919
+ /**
920
+ * Convert string to camelCase.
921
+ * @param str - Input string
922
+ * @returns camelCase string
923
+ */
924
+ function toCamelCase(str) {
925
+ const result = str.replace(/[-_\s]+(.)?/g, (_, c) => c ? c.toUpperCase() : "");
926
+ return result.charAt(0).toLowerCase() + result.slice(1);
927
+ }
928
+
929
+ //#endregion
930
+ //#region src/cli/services/stale-cleanup.ts
931
+ /**
932
+ * Remove stale `.entry.js` files from the output directory.
933
+ *
934
+ * Must be called before parallel bundling; concurrent builds
935
+ * sharing the same output directory would otherwise conflict.
936
+ * @param outputDir - Directory to clean
937
+ */
938
+ async function removeStaleEntryFiles(outputDir) {
939
+ const files = await fs.readdir(outputDir);
940
+ await Promise.all(files.filter((file) => file.endsWith(".entry.js")).map((file) => fs.rm(path.join(outputDir, file), { force: true })));
941
+ }
942
+
943
+ //#endregion
944
+ //#region src/cli/services/file-loader.ts
945
+ const DEFAULT_IGNORE_PATTERNS = ["**/*.test.ts", "**/*.spec.ts"];
946
+ /**
947
+ * Load files matching the given patterns, excluding files that match ignore patterns.
948
+ * By default, test files (*.test.ts, *.spec.ts) are excluded unless ignores is explicitly specified.
949
+ * @param config - Configuration with files patterns and optional ignores patterns
950
+ * @returns Array of absolute file paths
951
+ */
952
+ function loadFilesWithIgnores(config) {
953
+ const ignorePatterns = config.ignores ?? DEFAULT_IGNORE_PATTERNS;
954
+ const ignoreFiles = /* @__PURE__ */ new Set();
955
+ for (const ignorePattern of ignorePatterns) {
956
+ const absoluteIgnorePattern = path.resolve(process.cwd(), ignorePattern);
957
+ try {
958
+ fs$1.globSync(absoluteIgnorePattern).forEach((file) => ignoreFiles.add(file));
959
+ } catch (error) {
960
+ logger.warn(`Failed to glob ignore pattern "${ignorePattern}": ${String(error)}`);
961
+ }
962
+ }
963
+ const files = [];
964
+ for (const pattern of config.files) {
965
+ const absolutePattern = path.resolve(process.cwd(), pattern);
966
+ try {
967
+ const filteredFiles = fs$1.globSync(absolutePattern).filter((file) => !ignoreFiles.has(file));
968
+ files.push(...filteredFiles);
969
+ } catch (error) {
970
+ logger.warn(`Failed to glob pattern "${pattern}": ${String(error)}`);
971
+ }
972
+ }
973
+ return files;
974
+ }
975
+
976
+ //#endregion
977
+ //#region src/cli/services/workflow/ast-utils.ts
978
+ /**
979
+ * Check if a module source is from the Tailor SDK package (including subpaths)
980
+ * @param source - Module source string
981
+ * @returns True if the source is from the Tailor SDK package
982
+ */
983
+ function isTailorSdkSource(source) {
984
+ return /^@tailor-platform\/sdk(\/|$)/.test(source);
985
+ }
986
+ /**
987
+ * Get the source string from a dynamic import or require call
988
+ * @param node - AST node to inspect
989
+ * @returns Resolved import/require source string or null
990
+ */
991
+ function getImportSource(node) {
992
+ if (!node) return null;
993
+ if (node.type === "ImportExpression") {
994
+ const source = node.source;
995
+ if (source.type === "Literal" && typeof source.value === "string") return source.value;
996
+ }
997
+ if (node.type === "CallExpression") {
998
+ const callExpr = node;
999
+ if (callExpr.callee.type === "Identifier" && callExpr.callee.name === "require") {
1000
+ const arg = callExpr.arguments[0];
1001
+ if (arg && "type" in arg && arg.type === "Literal" && "value" in arg && typeof arg.value === "string") return arg.value;
1002
+ }
1003
+ }
1004
+ return null;
1005
+ }
1006
+ /**
1007
+ * Unwrap AwaitExpression to get the inner expression
1008
+ * @param node - AST expression node
1009
+ * @returns Inner expression if node is an AwaitExpression
1010
+ */
1011
+ function unwrapAwait(node) {
1012
+ if (node?.type === "AwaitExpression") return node.argument;
1013
+ return node;
1014
+ }
1015
+ /**
1016
+ * Check if a node is a string literal
1017
+ * @param node - AST expression node
1018
+ * @returns True if node is a string literal
1019
+ */
1020
+ function isStringLiteral(node) {
1021
+ return node?.type === "Literal" && typeof node.value === "string";
1022
+ }
1023
+ /**
1024
+ * Check if a node is a function expression (arrow or regular)
1025
+ * @param node - AST expression node
1026
+ * @returns True if node is a function expression
1027
+ */
1028
+ function isFunctionExpression(node) {
1029
+ return node?.type === "ArrowFunctionExpression" || node?.type === "FunctionExpression";
1030
+ }
1031
+ /**
1032
+ * Find a property in an object expression
1033
+ * @param properties - Object properties to search
1034
+ * @param name - Property name to find
1035
+ * @returns Found property info or null
1036
+ */
1037
+ function findProperty(properties, name) {
1038
+ for (const prop of properties) if (prop.type === "Property") {
1039
+ const objProp = prop;
1040
+ if ((objProp.key.type === "Identifier" ? objProp.key.name : objProp.key.type === "Literal" ? objProp.key.value : null) === name) return {
1041
+ key: objProp.key,
1042
+ value: objProp.value,
1043
+ start: objProp.start,
1044
+ end: objProp.end
1045
+ };
1046
+ }
1047
+ return null;
1048
+ }
1049
+ /**
1050
+ * Apply string replacements to source code
1051
+ * Replacements are applied from end to start to maintain positions
1052
+ * @param source - Original source code
1053
+ * @param replacements - Replacements to apply
1054
+ * @returns Transformed source code
1055
+ */
1056
+ function applyReplacements(source, replacements) {
1057
+ const sorted = [...replacements].sort((a, b) => b.start - a.start);
1058
+ let result = source;
1059
+ for (const r of sorted) result = result.slice(0, r.start) + r.text + result.slice(r.end);
1060
+ return result;
1061
+ }
1062
+ /**
1063
+ * Find the end of a statement including any trailing newline
1064
+ * @param source - Source code
1065
+ * @param position - Start position of the statement
1066
+ * @returns Index of the end of the statement including trailing newline
1067
+ */
1068
+ function findStatementEnd(source, position) {
1069
+ let i = position;
1070
+ while (i < source.length && (source[i] === ";" || source[i] === " " || source[i] === " ")) i++;
1071
+ if (i < source.length && source[i] === "\n") i++;
1072
+ return i;
1073
+ }
1074
+ /**
1075
+ * Resolve a relative path from a base directory
1076
+ * Simple implementation that handles ./ and ../ prefixes
1077
+ * @param baseDir - Base directory
1078
+ * @param relativePath - Relative path to resolve
1079
+ * @returns Resolved absolute path
1080
+ */
1081
+ function resolvePath(baseDir, relativePath) {
1082
+ const parts = relativePath.replace(/\\/g, "/").split("/");
1083
+ const baseParts = baseDir.replace(/\\/g, "/").split("/");
1084
+ for (const part of parts) if (part === ".") {} else if (part === "..") baseParts.pop();
1085
+ else baseParts.push(part);
1086
+ return baseParts.join("/");
1087
+ }
1088
+
1089
+ //#endregion
1090
+ //#region src/cli/services/workflow/sdk-binding-collector.ts
1091
+ /**
1092
+ * Collect all import bindings for a specific function from the Tailor SDK package
1093
+ * Returns a Set of local names that refer to the function
1094
+ * @param program - Parsed TypeScript program
1095
+ * @param functionName - Function name to collect bindings for
1096
+ * @returns Set of local names bound to the SDK function
1097
+ */
1098
+ function collectSdkBindings(program, functionName) {
1099
+ const bindings = /* @__PURE__ */ new Set();
1100
+ function walk(node) {
1101
+ if (!node || typeof node !== "object") return;
1102
+ const nodeType = node.type;
1103
+ if (nodeType === "ImportDeclaration") {
1104
+ const importDecl = node;
1105
+ const source = importDecl.source?.value;
1106
+ if (typeof source === "string" && isTailorSdkSource(source)) {
1107
+ for (const specifier of importDecl.specifiers || []) if (specifier.type === "ImportSpecifier") {
1108
+ const importSpec = specifier;
1109
+ const imported = importSpec.imported.type === "Identifier" ? importSpec.imported.name : importSpec.imported.value;
1110
+ if (imported === functionName) bindings.add(importSpec.local?.name || imported);
1111
+ } else if (specifier.type === "ImportDefaultSpecifier" || specifier.type === "ImportNamespaceSpecifier") {
1112
+ const spec = specifier;
1113
+ bindings.add(`__namespace__:${spec.local?.name}`);
1114
+ }
1115
+ }
1116
+ }
1117
+ if (nodeType === "VariableDeclaration") {
1118
+ const varDecl = node;
1119
+ for (const decl of varDecl.declarations || []) {
1120
+ const source = getImportSource(unwrapAwait(decl.init));
1121
+ if (source && isTailorSdkSource(source)) {
1122
+ const id = decl.id;
1123
+ if (id?.type === "Identifier") bindings.add(`__namespace__:${id.name}`);
1124
+ else if (id?.type === "ObjectPattern") {
1125
+ const objPattern = id;
1126
+ for (const prop of objPattern.properties || []) if (prop.type === "Property") {
1127
+ const bindingProp = prop;
1128
+ const keyName = bindingProp.key.type === "Identifier" ? bindingProp.key.name : bindingProp.key.value;
1129
+ if (keyName === functionName) {
1130
+ const localName = bindingProp.value.type === "Identifier" ? bindingProp.value.name : keyName;
1131
+ bindings.add(localName ?? "");
1132
+ }
1133
+ }
1134
+ }
1135
+ }
1136
+ }
1137
+ }
1138
+ for (const key of Object.keys(node)) {
1139
+ const child = node[key];
1140
+ if (Array.isArray(child)) child.forEach((c) => walk(c));
1141
+ else if (child && typeof child === "object") walk(child);
1142
+ }
1143
+ }
1144
+ walk(program);
1145
+ return bindings;
1146
+ }
1147
+ /**
1148
+ * Check if a CallExpression is a call to a specific SDK function
1149
+ * @param node - AST node to inspect
1150
+ * @param bindings - Collected SDK bindings
1151
+ * @param functionName - SDK function name
1152
+ * @returns True if node is a call to the SDK function
1153
+ */
1154
+ function isSdkFunctionCall(node, bindings, functionName) {
1155
+ if (node.type !== "CallExpression") return false;
1156
+ const callee = node.callee;
1157
+ if (callee.type === "Identifier") {
1158
+ const identifier = callee;
1159
+ return bindings.has(identifier.name);
1160
+ }
1161
+ if (callee.type === "MemberExpression") {
1162
+ const memberExpr = callee;
1163
+ if (!memberExpr.computed) {
1164
+ const object = memberExpr.object;
1165
+ const property = memberExpr.property;
1166
+ if (object.type === "Identifier" && bindings.has(`__namespace__:${object.name}`) && property.name === functionName) return true;
1167
+ }
1168
+ }
1169
+ return false;
1170
+ }
1171
+
1172
+ //#endregion
1173
+ //#region src/cli/services/workflow/job-detector.ts
1174
+ /**
1175
+ * Find all workflow jobs by detecting createWorkflowJob calls from `@tailor-platform/sdk`
1176
+ * @param program - Parsed TypeScript program
1177
+ * @param _sourceText - Source code text (currently unused)
1178
+ * @returns Detected job locations
1179
+ */
1180
+ function findAllJobs(program, _sourceText) {
1181
+ const jobs = [];
1182
+ const bindings = collectSdkBindings(program, "createWorkflowJob");
1183
+ function walk(node, parents = []) {
1184
+ if (!node || typeof node !== "object") return;
1185
+ if (isSdkFunctionCall(node, bindings, "createWorkflowJob")) {
1186
+ const args = node.arguments;
1187
+ if (args?.length >= 1 && args[0]?.type === "ObjectExpression") {
1188
+ const configObj = args[0];
1189
+ const nameProp = findProperty(configObj.properties, "name");
1190
+ const bodyProp = findProperty(configObj.properties, "body");
1191
+ if (nameProp && isStringLiteral(nameProp.value) && bodyProp && isFunctionExpression(bodyProp.value)) {
1192
+ let statementRange;
1193
+ let exportName;
1194
+ for (let i = parents.length - 1; i >= 0; i--) {
1195
+ const parent = parents[i];
1196
+ if (parent.type === "VariableDeclarator") {
1197
+ const declarator = parent;
1198
+ if (declarator.id?.type === "Identifier") exportName = declarator.id.name;
1199
+ }
1200
+ if (parent.type === "ExportNamedDeclaration" || parent.type === "VariableDeclaration") statementRange = {
1201
+ start: parent.start,
1202
+ end: parent.end
1203
+ };
1204
+ }
1205
+ jobs.push({
1206
+ name: nameProp.value.value,
1207
+ exportName,
1208
+ nameRange: {
1209
+ start: nameProp.start,
1210
+ end: nameProp.end
1211
+ },
1212
+ bodyValueRange: {
1213
+ start: bodyProp.value.start,
1214
+ end: bodyProp.value.end
1215
+ },
1216
+ statementRange
1217
+ });
1218
+ }
1219
+ }
1220
+ }
1221
+ const newParents = [...parents, node];
1222
+ for (const key of Object.keys(node)) {
1223
+ const child = node[key];
1224
+ if (Array.isArray(child)) child.forEach((c) => walk(c, newParents));
1225
+ else if (child && typeof child === "object") walk(child, newParents);
1226
+ }
1227
+ }
1228
+ walk(program);
1229
+ return jobs;
1230
+ }
1231
+ /**
1232
+ * Build a map from export name to job name from detected jobs
1233
+ * @param jobs - Detected job locations
1234
+ * @returns Map from export name to job name
1235
+ */
1236
+ function buildJobNameMap(jobs) {
1237
+ const map = /* @__PURE__ */ new Map();
1238
+ for (const job of jobs) if (job.exportName) map.set(job.exportName, job.name);
1239
+ return map;
1240
+ }
1241
+ /**
1242
+ * Detect all .trigger() calls in the source code
1243
+ * Returns information about each trigger call for transformation
1244
+ * @param program - Parsed TypeScript program
1245
+ * @param sourceText - Source code text
1246
+ * @returns Detected trigger calls
1247
+ */
1248
+ function detectTriggerCalls(program, sourceText) {
1249
+ const calls = [];
1250
+ function walk(node, parent = null) {
1251
+ if (!node || typeof node !== "object") return;
1252
+ if (node.type === "CallExpression") {
1253
+ const callExpr = node;
1254
+ const callee = callExpr.callee;
1255
+ if (callee.type === "MemberExpression") {
1256
+ const memberExpr = callee;
1257
+ if (!memberExpr.computed && memberExpr.object.type === "Identifier" && memberExpr.property.name === "trigger") {
1258
+ const identifierName = memberExpr.object.name;
1259
+ let argsText = "";
1260
+ if (callExpr.arguments.length > 0) {
1261
+ const firstArg = callExpr.arguments[0];
1262
+ const lastArg = callExpr.arguments[callExpr.arguments.length - 1];
1263
+ if (firstArg && lastArg && "start" in firstArg && "end" in lastArg) argsText = sourceText.slice(firstArg.start, lastArg.end);
1264
+ }
1265
+ const hasAwait = parent?.type === "AwaitExpression";
1266
+ const awaitExpr = hasAwait ? parent : null;
1267
+ const callRange = {
1268
+ start: callExpr.start,
1269
+ end: callExpr.end
1270
+ };
1271
+ const fullRange = awaitExpr ? {
1272
+ start: awaitExpr.start,
1273
+ end: awaitExpr.end
1274
+ } : callRange;
1275
+ calls.push({
1276
+ identifierName,
1277
+ callRange,
1278
+ argsText,
1279
+ hasAwait,
1280
+ fullRange
1281
+ });
1282
+ }
1283
+ }
1284
+ }
1285
+ for (const key of Object.keys(node)) {
1286
+ const child = node[key];
1287
+ if (Array.isArray(child)) child.forEach((c) => walk(c, node));
1288
+ else if (child && typeof child === "object") walk(child, node);
1289
+ }
1290
+ }
1291
+ walk(program);
1292
+ return calls;
1293
+ }
1294
+
1295
+ //#endregion
1296
+ //#region src/cli/services/workflow/workflow-detector.ts
1297
+ /**
1298
+ * Find all workflows by detecting createWorkflow calls from `@tailor-platform/sdk`
1299
+ * @param program - Parsed TypeScript program
1300
+ * @param _sourceText - Source code text (currently unused)
1301
+ * @returns Detected workflows
1302
+ */
1303
+ function findAllWorkflows(program, _sourceText) {
1304
+ const workflows = [];
1305
+ const bindings = collectSdkBindings(program, "createWorkflow");
1306
+ function walk(node, parents = []) {
1307
+ if (!node || typeof node !== "object") return;
1308
+ if (isSdkFunctionCall(node, bindings, "createWorkflow")) {
1309
+ const args = node.arguments;
1310
+ if (args?.length >= 1 && args[0]?.type === "ObjectExpression") {
1311
+ const configObj = args[0];
1312
+ const nameProp = findProperty(configObj.properties, "name");
1313
+ if (nameProp && isStringLiteral(nameProp.value)) {
1314
+ let exportName;
1315
+ let isDefaultExport = false;
1316
+ for (let i = parents.length - 1; i >= 0; i--) {
1317
+ const parent = parents[i];
1318
+ if (parent.type === "VariableDeclarator") {
1319
+ const declarator = parent;
1320
+ if (declarator.id?.type === "Identifier") {
1321
+ exportName = declarator.id.name;
1322
+ break;
1323
+ }
1324
+ }
1325
+ if (parent.type === "ExportDefaultDeclaration") isDefaultExport = true;
1326
+ }
1327
+ workflows.push({
1328
+ name: nameProp.value.value,
1329
+ exportName,
1330
+ isDefaultExport
1331
+ });
1332
+ }
1333
+ }
1334
+ }
1335
+ const newParents = [...parents, node];
1336
+ for (const key of Object.keys(node)) {
1337
+ const child = node[key];
1338
+ if (Array.isArray(child)) child.forEach((c) => walk(c, newParents));
1339
+ else if (child && typeof child === "object") walk(child, newParents);
1340
+ }
1341
+ }
1342
+ walk(program);
1343
+ return workflows;
1344
+ }
1345
+ /**
1346
+ * Build a map from export name to workflow name from detected workflows
1347
+ * @param workflows - Detected workflows
1348
+ * @returns Map from export name to workflow name
1349
+ */
1350
+ function buildWorkflowNameMap(workflows) {
1351
+ const map = /* @__PURE__ */ new Map();
1352
+ for (const workflow of workflows) if (workflow.exportName) map.set(workflow.exportName, workflow.name);
1353
+ return map;
1354
+ }
1355
+ /**
1356
+ * Detect default imports in a source file and return a map from local name to import source
1357
+ * @param program - Parsed TypeScript program
1358
+ * @returns Map from local name to import source
1359
+ */
1360
+ function detectDefaultImports(program) {
1361
+ const imports = /* @__PURE__ */ new Map();
1362
+ function walk(node) {
1363
+ if (!node || typeof node !== "object") return;
1364
+ if (node.type === "ImportDeclaration") {
1365
+ const importDecl = node;
1366
+ const source = importDecl.source?.value;
1367
+ if (typeof source === "string") {
1368
+ for (const specifier of importDecl.specifiers || []) if (specifier.type === "ImportDefaultSpecifier") {
1369
+ const spec = specifier;
1370
+ if (spec.local?.name) imports.set(spec.local.name, source);
1371
+ }
1372
+ }
1373
+ }
1374
+ for (const key of Object.keys(node)) {
1375
+ const child = node[key];
1376
+ if (Array.isArray(child)) child.forEach((c) => walk(c));
1377
+ else if (child && typeof child === "object") walk(child);
1378
+ }
1379
+ }
1380
+ walk(program);
1381
+ return imports;
1382
+ }
1383
+
1384
+ //#endregion
1385
+ //#region src/cli/services/workflow/trigger-transformer.ts
1386
+ /**
1387
+ * Extract authInvoker info from a config object expression
1388
+ * Returns the authInvoker value text and whether it's a shorthand property
1389
+ * @param configArg - Config argument node
1390
+ * @param sourceText - Source code text
1391
+ * @returns Extracted authInvoker info, if any
1392
+ */
1393
+ function extractAuthInvokerInfo(configArg, sourceText) {
1394
+ if (!configArg || typeof configArg !== "object") return void 0;
1395
+ if (configArg.type !== "ObjectExpression") return void 0;
1396
+ const objExpr = configArg;
1397
+ for (const prop of objExpr.properties) {
1398
+ if (prop.type !== "Property") continue;
1399
+ const objProp = prop;
1400
+ if ((objProp.key.type === "Identifier" ? objProp.key.name : objProp.key.type === "Literal" ? objProp.key.value : null) === "authInvoker") {
1401
+ if (objProp.shorthand) return {
1402
+ isShorthand: true,
1403
+ valueText: "authInvoker"
1404
+ };
1405
+ return {
1406
+ isShorthand: false,
1407
+ valueText: sourceText.slice(objProp.value.start, objProp.value.end)
1408
+ };
1409
+ }
1410
+ }
1411
+ }
1412
+ /**
1413
+ * Detect .trigger() calls for known workflows and jobs
1414
+ * Only detects calls where the identifier is in workflowNames or jobNames
1415
+ * @param program - The parsed AST program
1416
+ * @param sourceText - The source code text
1417
+ * @param workflowNames - Set of known workflow identifier names
1418
+ * @param jobNames - Set of known job identifier names
1419
+ * @returns Detected trigger call metadata
1420
+ */
1421
+ function detectExtendedTriggerCalls(program, sourceText, workflowNames, jobNames) {
1422
+ const calls = [];
1423
+ function walk(node, parent = null) {
1424
+ if (!node || typeof node !== "object") return;
1425
+ if (node.type === "CallExpression") {
1426
+ const callExpr = node;
1427
+ const callee = callExpr.callee;
1428
+ if (callee.type === "MemberExpression") {
1429
+ const memberExpr = callee;
1430
+ if (!memberExpr.computed && memberExpr.object.type === "Identifier" && memberExpr.property.name === "trigger") {
1431
+ const identifierName = memberExpr.object.name;
1432
+ const isWorkflow = workflowNames.has(identifierName);
1433
+ const isJob = jobNames.has(identifierName);
1434
+ if (!isWorkflow && !isJob) return;
1435
+ const argCount = callExpr.arguments.length;
1436
+ let argsText = "";
1437
+ if (argCount > 0) {
1438
+ const firstArg = callExpr.arguments[0];
1439
+ if (firstArg && "start" in firstArg && "end" in firstArg) argsText = sourceText.slice(firstArg.start, firstArg.end);
1440
+ }
1441
+ const hasAwait = parent?.type === "AwaitExpression";
1442
+ const awaitExpr = hasAwait ? parent : null;
1443
+ if (isWorkflow && argCount >= 2) {
1444
+ const secondArg = callExpr.arguments[1];
1445
+ const authInvoker = extractAuthInvokerInfo(secondArg, sourceText);
1446
+ if (authInvoker) calls.push({
1447
+ kind: "workflow",
1448
+ identifierName,
1449
+ callRange: {
1450
+ start: callExpr.start,
1451
+ end: callExpr.end
1452
+ },
1453
+ argsText,
1454
+ authInvoker,
1455
+ hasAwait: false
1456
+ });
1457
+ } else if (isJob) calls.push({
1458
+ kind: "job",
1459
+ identifierName,
1460
+ callRange: {
1461
+ start: callExpr.start,
1462
+ end: callExpr.end
1463
+ },
1464
+ argsText,
1465
+ hasAwait,
1466
+ fullRange: awaitExpr ? {
1467
+ start: awaitExpr.start,
1468
+ end: awaitExpr.end
1469
+ } : void 0
1470
+ });
1471
+ }
1472
+ }
1473
+ }
1474
+ for (const key of Object.keys(node)) {
1475
+ const child = node[key];
1476
+ if (Array.isArray(child)) child.forEach((c) => walk(c, node));
1477
+ else if (child && typeof child === "object") walk(child, node);
1478
+ }
1479
+ }
1480
+ walk(program);
1481
+ return calls;
1482
+ }
1483
+ /**
1484
+ * Transform trigger calls for resolver/executor/workflow functions
1485
+ * Handles both job.trigger() and workflow.trigger() calls
1486
+ * @param source - The source code to transform
1487
+ * @param workflowNameMap - Map from variable name to workflow name
1488
+ * @param jobNameMap - Map from variable name to job name
1489
+ * @param workflowFileMap - Map from file path (without extension) to workflow name for default exports
1490
+ * @param currentFilePath - Path of the current file being transformed (for resolving relative imports)
1491
+ * @returns Transformed source code with trigger calls rewritten
1492
+ */
1493
+ function transformFunctionTriggers(source, workflowNameMap, jobNameMap, workflowFileMap, currentFilePath) {
1494
+ const { program } = parseSync("input.ts", source);
1495
+ const localWorkflowNameMap = new Map(workflowNameMap);
1496
+ if (workflowFileMap && currentFilePath) {
1497
+ const defaultImports = detectDefaultImports(program);
1498
+ const currentDir = currentFilePath.replace(/[/\\][^/\\]+$/, "");
1499
+ for (const [localName, importSource] of defaultImports) {
1500
+ if (!importSource.startsWith(".")) continue;
1501
+ const resolvedPath = resolvePath(currentDir, importSource);
1502
+ const workflowName = workflowFileMap.get(resolvedPath);
1503
+ if (workflowName) localWorkflowNameMap.set(localName, workflowName);
1504
+ }
1505
+ }
1506
+ const triggerCalls = detectExtendedTriggerCalls(program, source, new Set(localWorkflowNameMap.keys()), new Set(jobNameMap.keys()));
1507
+ const replacements = [];
1508
+ for (const call of triggerCalls) if (call.kind === "workflow" && call.authInvoker) {
1509
+ const workflowName = localWorkflowNameMap.get(call.identifierName);
1510
+ if (workflowName) {
1511
+ const authInvokerExpr = call.authInvoker.isShorthand ? "authInvoker" : call.authInvoker.valueText;
1512
+ const transformedCall = `tailor.workflow.triggerWorkflow("${workflowName}", ${call.argsText || "undefined"}, { authInvoker: ${authInvokerExpr} })`;
1513
+ replacements.push({
1514
+ start: call.callRange.start,
1515
+ end: call.callRange.end,
1516
+ text: transformedCall
1517
+ });
1518
+ }
1519
+ } else if (call.kind === "job") {
1520
+ const jobName = jobNameMap.get(call.identifierName);
1521
+ if (jobName) {
1522
+ const transformedCall = `tailor.workflow.triggerJobFunction("${jobName}", ${call.argsText || "undefined"})`;
1523
+ const range = call.hasAwait && call.fullRange ? call.fullRange : call.callRange;
1524
+ replacements.push({
1525
+ start: range.start,
1526
+ end: range.end,
1527
+ text: transformedCall
1528
+ });
1529
+ }
1530
+ }
1531
+ return applyReplacements(source, replacements);
1532
+ }
1533
+
1534
+ //#endregion
1535
+ //#region src/cli/shared/trigger-context.ts
1536
+ /**
1537
+ * Normalize a file path by removing extension and resolving to absolute path
1538
+ * @param filePath - File path to normalize
1539
+ * @returns Normalized absolute path without extension
1540
+ */
1541
+ function normalizeFilePath(filePath) {
1542
+ const absolutePath = path.resolve(filePath);
1543
+ const ext = path.extname(absolutePath);
1544
+ return absolutePath.slice(0, -ext.length);
1545
+ }
1546
+ /**
1547
+ * Build trigger context from workflow configuration
1548
+ * Scans workflow files to collect workflow and job mappings
1549
+ * @param workflowConfig - Workflow file loading configuration
1550
+ * @returns Trigger context built from workflow sources
1551
+ */
1552
+ async function buildTriggerContext(workflowConfig) {
1553
+ const workflowNameMap = /* @__PURE__ */ new Map();
1554
+ const jobNameMap = /* @__PURE__ */ new Map();
1555
+ const workflowFileMap = /* @__PURE__ */ new Map();
1556
+ if (!workflowConfig) return {
1557
+ workflowNameMap,
1558
+ jobNameMap,
1559
+ workflowFileMap
1560
+ };
1561
+ const workflowFiles = loadFilesWithIgnores(workflowConfig);
1562
+ for (const file of workflowFiles) try {
1563
+ const source = await fs$1.promises.readFile(file, "utf-8");
1564
+ const { program } = parseSync("input.ts", source);
1565
+ const workflows = findAllWorkflows(program, source);
1566
+ const workflowMap = buildWorkflowNameMap(workflows);
1567
+ for (const [exportName, workflowName] of workflowMap) workflowNameMap.set(exportName, workflowName);
1568
+ for (const workflow of workflows) if (workflow.isDefaultExport) {
1569
+ const normalizedPath = normalizeFilePath(file);
1570
+ workflowFileMap.set(normalizedPath, workflow.name);
1571
+ }
1572
+ const jobMap = buildJobNameMap(findAllJobs(program, source));
1573
+ for (const [exportName, jobName] of jobMap) jobNameMap.set(exportName, jobName);
1574
+ } catch (error) {
1575
+ const errorMessage = error instanceof Error ? error.message : String(error);
1576
+ logger.warn(`Failed to process workflow file ${file}: ${errorMessage}`, { mode: "stream" });
1577
+ continue;
1578
+ }
1579
+ return {
1580
+ workflowNameMap,
1581
+ jobNameMap,
1582
+ workflowFileMap
1583
+ };
1584
+ }
1585
+ function sortedMapToJson(m) {
1586
+ return JSON.stringify([...m.entries()].sort(([a], [b]) => a.localeCompare(b)));
1587
+ }
1588
+ /**
1589
+ * Serialize trigger context to a deterministic string for cache hashing.
1590
+ * Returns an empty string if no context is provided.
1591
+ * @param ctx - Trigger context to serialize
1592
+ * @returns Deterministic string representation
1593
+ */
1594
+ function serializeTriggerContext(ctx) {
1595
+ if (!ctx) return "";
1596
+ return sortedMapToJson(ctx.workflowNameMap) + sortedMapToJson(ctx.jobNameMap) + sortedMapToJson(ctx.workflowFileMap);
1597
+ }
1598
+ /**
1599
+ * Create a rolldown plugin for transforming trigger calls
1600
+ * Returns undefined if no trigger context is provided
1601
+ * @param triggerContext - Trigger context to use for transformations
1602
+ * @returns Rolldown plugin or undefined when no context
1603
+ */
1604
+ function createTriggerTransformPlugin(triggerContext) {
1605
+ if (!triggerContext) return;
1606
+ return {
1607
+ name: "trigger-transform",
1608
+ transform: {
1609
+ filter: { id: { include: [/\.ts$/, /\.js$/] } },
1610
+ handler(code, id) {
1611
+ if (!code.includes(".trigger(")) return null;
1612
+ return { code: transformFunctionTriggers(code, triggerContext.workflowNameMap, triggerContext.jobNameMap, triggerContext.workflowFileMap, id) };
1613
+ }
1614
+ }
1615
+ };
1616
+ }
1617
+
1618
+ //#endregion
1619
+ //#region src/cli/services/auth/bundler.ts
1620
+ /**
1621
+ * Bundle a single auth hook handler into dist/auth-hooks/.
1622
+ *
1623
+ * Follows the same pattern as the executor bundler:
1624
+ * 1. Generate an entry file that re-exports the handler as `main`
1625
+ * 2. Bundle with rolldown + tree-shaking
1626
+ * @param options - Bundle options
1627
+ */
1628
+ async function bundleAuthHooks(options) {
1629
+ const { configPath, authName, handlerAccessPath, triggerContext, cache, inlineSourcemap } = options;
1630
+ logger.newline();
1631
+ logger.log(`Bundling auth hook for ${styles.info(`"${authName}"`)}`);
1632
+ const outputDir = path.resolve(getDistDir(), "auth-hooks");
1633
+ fs$1.mkdirSync(outputDir, { recursive: true });
1634
+ await removeStaleEntryFiles(outputDir);
1635
+ let tsconfig;
1636
+ try {
1637
+ tsconfig = await resolveTSConfig();
1638
+ } catch {
1639
+ tsconfig = void 0;
1640
+ }
1641
+ const functionName = `auth-hook--${authName}--before-login`;
1642
+ const outputPath = path.join(outputDir, `${functionName}.js`);
1643
+ const absoluteConfigPath = path.resolve(configPath);
1644
+ await withCache({
1645
+ cache,
1646
+ kind: "auth-hook",
1647
+ name: functionName,
1648
+ sourceFile: absoluteConfigPath,
1649
+ outputPath,
1650
+ contextHash: computeBundlerContextHash({
1651
+ sourceFile: absoluteConfigPath,
1652
+ serializedTriggerContext: serializeTriggerContext(triggerContext),
1653
+ tsconfig,
1654
+ inlineSourcemap
1655
+ }),
1656
+ async build(cachePlugins) {
1657
+ const entryPath = path.join(outputDir, `${functionName}.entry.js`);
1658
+ const entryContent = ml`
1659
+ import _config from "${absoluteConfigPath}";
1660
+ const __auth_hook_function = _config.${handlerAccessPath};
1661
+ export { __auth_hook_function as main };
1662
+ `;
1663
+ fs$1.writeFileSync(entryPath, entryContent);
1664
+ const triggerPlugin = createTriggerTransformPlugin(triggerContext);
1665
+ const plugins = triggerPlugin ? [triggerPlugin] : [];
1666
+ plugins.push(...cachePlugins);
1667
+ await rolldown.build(rolldown.defineConfig({
1668
+ input: entryPath,
1669
+ output: {
1670
+ file: outputPath,
1671
+ format: "esm",
1672
+ sourcemap: inlineSourcemap ? "inline" : true,
1673
+ minify: inlineSourcemap ? { mangle: { keepNames: true } } : true,
1674
+ codeSplitting: false
1675
+ },
1676
+ tsconfig,
1677
+ plugins,
1678
+ treeshake: {
1679
+ moduleSideEffects: false,
1680
+ annotations: true,
1681
+ unknownGlobalSideEffects: false
1682
+ },
1683
+ logLevel: "silent"
1684
+ }));
1685
+ }
1686
+ });
1687
+ logger.log(`${styles.success("Bundled")} auth hook for ${styles.info(`"${authName}"`)}`);
1688
+ }
1689
+
1690
+ //#endregion
1691
+ //#region src/parser/service/tailordb/hooks-validate-precompiled-expr.ts
1692
+ const PRECOMPILED_EXPR_KEY = "__precompiledScriptExpr";
1693
+ /**
1694
+ * Attach a precompiled script expression to a function object.
1695
+ * @param fn - Function metadata object.
1696
+ * @param expr - Precompiled script expression.
1697
+ */
1698
+ function setPrecompiledScriptExpr(fn, expr) {
1699
+ fn[PRECOMPILED_EXPR_KEY] = expr;
1700
+ }
1701
+ /**
1702
+ * Read a precompiled script expression from a function object.
1703
+ * @param fn - Function metadata object.
1704
+ * @returns Precompiled script expression if attached.
1705
+ */
1706
+ function getPrecompiledScriptExpr(fn) {
1707
+ const value = fn[PRECOMPILED_EXPR_KEY];
1708
+ return typeof value === "string" ? value : void 0;
1709
+ }
1710
+
1711
+ //#endregion
1712
+ //#region src/parser/service/tailordb/field.ts
1713
+ const tailorUserMap = `{ id: user.id, type: user.type, workspaceId: user.workspace_id, attributes: user.attribute_map, attributeList: user.attributes }`;
1714
+ /**
1715
+ * Convert a function to a string representation.
1716
+ * Handles method shorthand syntax (e.g., `create() { ... }`) by converting it to
1717
+ * a function expression (e.g., `function create() { ... }`).
1718
+ * @param fn - Function to stringify
1719
+ * @returns Stringified function source
1720
+ */
1721
+ const stringifyFunction = (fn) => {
1722
+ const src = fn.toString().trim();
1723
+ if (/^[a-zA-Z_$][a-zA-Z0-9_$]*\s*\(/.test(src) && !src.startsWith("function") && !src.startsWith("(") && !src.includes("=>")) return `function ${src}`;
1724
+ return src;
1725
+ };
1726
+ /**
1727
+ * Convert a hook function to a script expression.
1728
+ * @param fn - Hook function
1729
+ * @returns JavaScript expression calling the hook
1730
+ */
1731
+ const convertHookToExpr = (fn) => {
1732
+ const precompiledExpr = getPrecompiledScriptExpr(fn);
1733
+ if (precompiledExpr) return precompiledExpr;
1734
+ return `(${stringifyFunction(fn)})({ value: _value, data: _data, user: ${tailorUserMap} })`;
1735
+ };
1736
+ /**
1737
+ * Parse TailorDBField into OperatorFieldConfig.
1738
+ * This transforms user-defined functions into script expressions.
1739
+ * @param field - TailorDB field definition
1740
+ * @returns Parsed operator field configuration
1741
+ */
1742
+ function parseFieldConfig(field) {
1743
+ const metadata = field.metadata;
1744
+ const fieldType = field.type;
1745
+ const rawRelation = field.rawRelation;
1746
+ const nestedFields = field.fields;
1747
+ return {
1748
+ type: fieldType,
1749
+ ...metadata,
1750
+ rawRelation,
1751
+ ...fieldType === "nested" && nestedFields && Object.keys(nestedFields).length > 0 ? { fields: Object.entries(nestedFields).reduce((acc, [key, nestedField]) => {
1752
+ acc[key] = parseFieldConfig(nestedField);
1753
+ return acc;
1754
+ }, {}) } : {},
1755
+ validate: metadata.validate?.map((v) => {
1756
+ const { fn, message } = typeof v === "function" ? {
1757
+ fn: v,
1758
+ message: `failed by \`${v.toString().trim()}\``
1759
+ } : {
1760
+ fn: v[0],
1761
+ message: v[1]
1762
+ };
1763
+ return {
1764
+ script: { expr: getPrecompiledScriptExpr(fn) ?? `(${fn.toString().trim()})({ value: _value, data: _data, user: ${tailorUserMap} })` },
1765
+ errorMessage: message
1766
+ };
1767
+ }),
1768
+ hooks: metadata.hooks ? {
1769
+ create: metadata.hooks.create ? { expr: convertHookToExpr(metadata.hooks.create) } : void 0,
1770
+ update: metadata.hooks.update ? { expr: convertHookToExpr(metadata.hooks.update) } : void 0
1771
+ } : void 0,
1772
+ serial: metadata.serial ? {
1773
+ start: metadata.serial.start,
1774
+ maxValue: metadata.serial.maxValue,
1775
+ format: "format" in metadata.serial ? metadata.serial.format : void 0
1776
+ } : void 0
1777
+ };
1778
+ }
1779
+
1780
+ //#endregion
1781
+ //#region src/parser/service/tailordb/permission.ts
1782
+ const operatorMap = {
1783
+ "=": "eq",
1784
+ "!=": "ne",
1785
+ in: "in",
1786
+ "not in": "nin",
1787
+ hasAny: "hasAny",
1788
+ "not hasAny": "nhasAny"
1789
+ };
1790
+ function normalizeOperand(operand) {
1791
+ if (typeof operand === "object" && "user" in operand) return { user: operand.user === "id" ? "_id" : operand.user };
1792
+ return operand;
1793
+ }
1794
+ function normalizeConditions(conditions) {
1795
+ return conditions.map((cond) => {
1796
+ const [left, operator, right] = cond;
1797
+ return [
1798
+ normalizeOperand(left),
1799
+ operatorMap[operator],
1800
+ normalizeOperand(right)
1801
+ ];
1802
+ });
1803
+ }
1804
+ function isObjectFormat(p) {
1805
+ return typeof p === "object" && p !== null && "conditions" in p;
1806
+ }
1807
+ function isSingleArrayConditionFormat(cond) {
1808
+ return cond.length >= 2 && typeof cond[1] === "string";
1809
+ }
1810
+ /**
1811
+ * Normalize record-level permissions into a standard structure.
1812
+ * @param permission - Tailor type permission
1813
+ * @returns Normalized record permissions
1814
+ */
1815
+ function normalizePermission(permission) {
1816
+ return Object.keys(permission).reduce((acc, action) => {
1817
+ acc[action] = permission[action].map((p) => normalizeActionPermission(p));
1818
+ return acc;
1819
+ }, {});
1820
+ }
1821
+ /**
1822
+ * Normalize GraphQL permissions into a standard structure.
1823
+ * @param permission - Tailor GQL permission
1824
+ * @returns Normalized GQL permissions
1825
+ */
1826
+ function normalizeGqlPermission(permission) {
1827
+ return permission.map((policy) => normalizeGqlPolicy(policy));
1828
+ }
1829
+ function normalizeGqlPolicy(policy) {
1830
+ return {
1831
+ conditions: policy.conditions ? normalizeConditions(policy.conditions) : [],
1832
+ actions: policy.actions === "all" ? ["all"] : policy.actions,
1833
+ permit: policy.permit ? "allow" : "deny",
1834
+ description: policy.description
1835
+ };
1836
+ }
1837
+ /**
1838
+ * Parse raw permissions into normalized permissions.
1839
+ * This is the main entry point for permission parsing in the parser layer.
1840
+ * @param rawPermissions - Raw permissions definition
1841
+ * @returns Normalized permissions
1842
+ */
1843
+ function parsePermissions(rawPermissions) {
1844
+ return {
1845
+ ...rawPermissions.record && { record: normalizePermission(rawPermissions.record) },
1846
+ ...rawPermissions.gql && { gql: normalizeGqlPermission(rawPermissions.gql) }
1847
+ };
1848
+ }
1849
+ /**
1850
+ * Normalize a single action permission into the standard format.
1851
+ * @param permission - Raw permission definition
1852
+ * @returns Normalized action permission
1853
+ */
1854
+ function normalizeActionPermission(permission) {
1855
+ if (isObjectFormat(permission)) {
1856
+ const conditions = permission.conditions;
1857
+ return {
1858
+ conditions: normalizeConditions(isSingleArrayConditionFormat(conditions) ? [conditions] : conditions),
1859
+ permit: permission.permit ? "allow" : "deny",
1860
+ description: permission.description
1861
+ };
1862
+ }
1863
+ if (!Array.isArray(permission)) throw new Error("Invalid permission format");
1864
+ if (isSingleArrayConditionFormat(permission)) {
1865
+ const [op1, operator, op2, permit] = [...permission, true];
1866
+ return {
1867
+ conditions: normalizeConditions([[
1868
+ op1,
1869
+ operator,
1870
+ op2
1871
+ ]]),
1872
+ permit: permit ? "allow" : "deny"
1873
+ };
1874
+ }
1875
+ const conditions = [];
1876
+ const conditionArray = permission;
1877
+ let conditionArrayPermit = true;
1878
+ for (const item of conditionArray) {
1879
+ if (typeof item === "boolean") {
1880
+ conditionArrayPermit = item;
1881
+ continue;
1882
+ }
1883
+ conditions.push(item);
1884
+ }
1885
+ return {
1886
+ conditions: normalizeConditions(conditions),
1887
+ permit: conditionArrayPermit ? "allow" : "deny"
1888
+ };
1889
+ }
1890
+
1891
+ //#endregion
1892
+ //#region src/parser/service/tailordb/relation.ts
1893
+ const relationTypes = {
1894
+ "1-1": "1-1",
1895
+ oneToOne: "1-1",
1896
+ "n-1": "n-1",
1897
+ manyToOne: "n-1",
1898
+ "N-1": "n-1",
1899
+ keyOnly: "keyOnly"
1900
+ };
1901
+ const relationTypesKeys = Object.keys(relationTypes);
1902
+ function fieldRef(context) {
1903
+ return `Field "${context.fieldName}" on type "${context.typeName}"`;
1904
+ }
1905
+ /**
1906
+ * Validate relation configuration.
1907
+ * @param rawRelation - Raw relation configuration from TailorDB type definition
1908
+ * @param context - Context information for the relation (type name, field name, all type names)
1909
+ */
1910
+ function validateRelationConfig(rawRelation, context) {
1911
+ if (!rawRelation.type) throw new Error(`${fieldRef(context)} has a relation but is missing the required 'type' property. Valid values: ${relationTypesKeys.join(", ")}.`);
1912
+ if (!(rawRelation.type in relationTypes)) throw new Error(`${fieldRef(context)} has invalid relation type '${rawRelation.type}'. Valid values: ${relationTypesKeys.join(", ")}.`);
1913
+ if (rawRelation.toward.type !== "self" && !context.allTypeNames.has(rawRelation.toward.type)) throw new Error(`${fieldRef(context)} references unknown type "${rawRelation.toward.type}".`);
1914
+ }
1915
+ /**
1916
+ * Process raw relation config and compute derived metadata values.
1917
+ * @param rawRelation - Raw relation configuration
1918
+ * @param context - Context information for the relation
1919
+ * @param isArrayField - Whether the target field is an array field
1920
+ * @returns Computed relation metadata to apply to field config
1921
+ */
1922
+ function processRelationMetadata(rawRelation, context, isArrayField = false) {
1923
+ const isUnique = relationTypes[rawRelation.type] === "1-1";
1924
+ const key = rawRelation.toward.key ?? "id";
1925
+ const targetTypeName = rawRelation.toward.type === "self" ? context.typeName : rawRelation.toward.type;
1926
+ const shouldSetIndex = !isArrayField;
1927
+ const shouldSetUnique = !isArrayField && isUnique;
1928
+ return {
1929
+ index: shouldSetIndex,
1930
+ foreignKey: true,
1931
+ relationType: rawRelation.type,
1932
+ unique: shouldSetUnique,
1933
+ foreignKeyType: targetTypeName,
1934
+ foreignKeyField: key
1935
+ };
1936
+ }
1937
+ /**
1938
+ * Build relation info for creating forward/backward relationships.
1939
+ * Returns undefined for keyOnly relations.
1940
+ * @param rawRelation - Raw relation configuration
1941
+ * @param context - Context information for the relation
1942
+ * @returns Relation information or undefined for keyOnly relations
1943
+ */
1944
+ function buildRelationInfo(rawRelation, context) {
1945
+ if (rawRelation.type === "keyOnly") return;
1946
+ const isUnique = relationTypes[rawRelation.type] === "1-1";
1947
+ const key = rawRelation.toward.key ?? "id";
1948
+ const targetTypeName = rawRelation.toward.type === "self" ? context.typeName : rawRelation.toward.type;
1949
+ let forwardName = rawRelation.toward.as;
1950
+ if (!forwardName) if (rawRelation.toward.type === "self") forwardName = context.fieldName.replace(/(ID|Id|id)$/u, "");
1951
+ else forwardName = inflection.camelize(targetTypeName, true);
1952
+ return {
1953
+ targetType: targetTypeName,
1954
+ forwardName,
1955
+ backwardName: rawRelation.backward ?? "",
1956
+ key,
1957
+ unique: isUnique
1958
+ };
1959
+ }
1960
+ /**
1961
+ * Apply processed relation metadata to field config.
1962
+ * @param fieldConfig - Original operator field configuration
1963
+ * @param metadata - Processed relation metadata to apply
1964
+ * @returns Field config with relation metadata applied
1965
+ */
1966
+ function applyRelationMetadataToFieldConfig(fieldConfig, metadata) {
1967
+ return {
1968
+ ...fieldConfig,
1969
+ index: metadata.index,
1970
+ foreignKey: metadata.foreignKey,
1971
+ unique: metadata.unique,
1972
+ foreignKeyType: metadata.foreignKeyType,
1973
+ foreignKeyField: metadata.foreignKeyField
1974
+ };
1975
+ }
1976
+
1977
+ //#endregion
1978
+ //#region src/parser/service/tailordb/type-parser.ts
1979
+ /**
1980
+ * Parse multiple TailorDB types, build relationships, and validate uniqueness.
1981
+ * This is the main entry point for parsing TailorDB types.
1982
+ * @param rawTypes - Raw TailorDB types keyed by name
1983
+ * @param namespace - TailorDB namespace name
1984
+ * @param typeSourceInfo - Optional type source information
1985
+ * @returns Parsed types
1986
+ */
1987
+ function parseTypes(rawTypes, namespace, typeSourceInfo) {
1988
+ const types = {};
1989
+ const allTypeNames = new Set(Object.keys(rawTypes));
1990
+ for (const [typeName, type] of Object.entries(rawTypes)) types[typeName] = parseTailorDBType(type, allTypeNames, rawTypes);
1991
+ buildBackwardRelationships(types, namespace, typeSourceInfo);
1992
+ validatePluralFormUniqueness(types, namespace, typeSourceInfo);
1993
+ return types;
1994
+ }
1995
+ /**
1996
+ * Parse a TailorDBTypeSchemaOutput into a TailorDBType.
1997
+ * @param type - TailorDB type to parse
1998
+ * @param allTypeNames - Set of all TailorDB type names
1999
+ * @param rawTypes - All raw TailorDB types keyed by name
2000
+ * @returns Parsed TailorDB type
2001
+ */
2002
+ function parseTailorDBType(type, allTypeNames, rawTypes) {
2003
+ const metadata = type.metadata;
2004
+ const pluralForm = metadata.settings?.pluralForm || inflection.pluralize(type.name);
2005
+ const fields = {};
2006
+ const forwardRelationships = {};
2007
+ for (const [fieldName, fieldDef] of Object.entries(type.fields)) {
2008
+ let fieldConfig = parseFieldConfig(fieldDef);
2009
+ const rawRelation = fieldConfig.rawRelation;
2010
+ const context = {
2011
+ typeName: type.name,
2012
+ fieldName,
2013
+ allTypeNames
2014
+ };
2015
+ if (rawRelation) {
2016
+ validateRelationConfig(rawRelation, context);
2017
+ if ([
2018
+ "n-1",
2019
+ "manyToOne",
2020
+ "N-1"
2021
+ ].includes(rawRelation.type) && fieldConfig.unique) throw new Error(`Field "${fieldName}" on type "${type.name}": cannot set unique on n-1 (manyToOne) relation. Use 1-1 (oneToOne) relation instead, or remove the unique constraint.`);
2022
+ const relationMetadata = processRelationMetadata(rawRelation, context, fieldConfig.array);
2023
+ fieldConfig = applyRelationMetadataToFieldConfig(fieldConfig, relationMetadata);
2024
+ }
2025
+ if (fieldConfig.array && fieldConfig.index) throw new Error(`Field "${fieldName}" on type "${type.name}": index cannot be set on array fields`);
2026
+ if (fieldConfig.array && fieldConfig.unique) throw new Error(`Field "${fieldName}" on type "${type.name}": unique cannot be set on array fields`);
2027
+ const parsedField = {
2028
+ name: fieldName,
2029
+ config: fieldConfig
2030
+ };
2031
+ const relationInfo = rawRelation ? buildRelationInfo(rawRelation, context) : void 0;
2032
+ if (relationInfo) {
2033
+ parsedField.relation = { ...relationInfo };
2034
+ const targetType = rawTypes[relationInfo.targetType];
2035
+ forwardRelationships[relationInfo.forwardName] = {
2036
+ name: relationInfo.forwardName,
2037
+ targetType: relationInfo.targetType,
2038
+ targetField: fieldName,
2039
+ sourceField: relationInfo.key,
2040
+ isArray: false,
2041
+ description: targetType?.metadata?.description || ""
2042
+ };
2043
+ }
2044
+ fields[fieldName] = parsedField;
2045
+ }
2046
+ return {
2047
+ name: type.name,
2048
+ pluralForm,
2049
+ description: metadata.description,
2050
+ fields,
2051
+ forwardRelationships,
2052
+ backwardRelationships: {},
2053
+ settings: metadata.settings || {},
2054
+ permissions: parsePermissions(metadata.permissions || {}),
2055
+ indexes: metadata.indexes,
2056
+ files: metadata.files
2057
+ };
2058
+ }
2059
+ /**
2060
+ * Build backward relationships between parsed types.
2061
+ * Also validates that backward relation names are unique within each type.
2062
+ * @param types - Parsed types
2063
+ * @param namespace - TailorDB namespace name
2064
+ * @param typeSourceInfo - Optional type source information
2065
+ */
2066
+ function buildBackwardRelationships(types, namespace, typeSourceInfo) {
2067
+ const backwardNameSources = {};
2068
+ for (const typeName of Object.keys(types)) backwardNameSources[typeName] = {};
2069
+ for (const [typeName, type] of Object.entries(types)) for (const [otherTypeName, otherType] of Object.entries(types)) for (const [fieldName, field] of Object.entries(otherType.fields)) if (field.relation && field.relation.targetType === typeName) {
2070
+ let backwardName = field.relation.backwardName;
2071
+ if (!backwardName) {
2072
+ const lowerName = inflection.camelize(otherTypeName, true);
2073
+ backwardName = field.relation.unique ? inflection.singularize(lowerName) : inflection.pluralize(lowerName);
2074
+ }
2075
+ if (!backwardNameSources[typeName][backwardName]) backwardNameSources[typeName][backwardName] = [];
2076
+ backwardNameSources[typeName][backwardName].push({
2077
+ sourceType: otherTypeName,
2078
+ fieldName
2079
+ });
2080
+ type.backwardRelationships[backwardName] = {
2081
+ name: backwardName,
2082
+ targetType: otherTypeName,
2083
+ targetField: fieldName,
2084
+ sourceField: field.relation.key,
2085
+ isArray: !field.relation.unique,
2086
+ description: otherType.description || ""
2087
+ };
2088
+ }
2089
+ const errors = [];
2090
+ for (const [targetTypeName, backwardNames] of Object.entries(backwardNameSources)) {
2091
+ const targetType = types[targetTypeName];
2092
+ const targetTypeSourceInfo = typeSourceInfo?.[targetTypeName];
2093
+ const targetLocation = targetTypeSourceInfo ? isPluginGeneratedType(targetTypeSourceInfo) ? ` (plugin: ${targetTypeSourceInfo.pluginId})` : ` (${targetTypeSourceInfo.filePath})` : "";
2094
+ for (const [backwardName, sources] of Object.entries(backwardNames)) {
2095
+ if (sources.length > 1) {
2096
+ const sourceList = sources.map((s) => {
2097
+ const sourceInfo = typeSourceInfo?.[s.sourceType];
2098
+ const location = sourceInfo ? isPluginGeneratedType(sourceInfo) ? ` (plugin: ${sourceInfo.pluginId})` : ` (${sourceInfo.filePath})` : "";
2099
+ return `${s.sourceType}.${s.fieldName}${location}`;
2100
+ }).join(", ");
2101
+ errors.push(`Backward relation name "${backwardName}" on type "${targetTypeName}" is duplicated from: ${sourceList}. Use the "backward" option in .relation() to specify unique names.`);
2102
+ }
2103
+ if (backwardName in targetType.fields) {
2104
+ const source = sources[0];
2105
+ const sourceInfo = typeSourceInfo?.[source.sourceType];
2106
+ const sourceLocation = sourceInfo ? isPluginGeneratedType(sourceInfo) ? ` (plugin: ${sourceInfo.pluginId})` : ` (${sourceInfo.filePath})` : "";
2107
+ errors.push(`Backward relation name "${backwardName}" from ${source.sourceType}.${source.fieldName}${sourceLocation} conflicts with existing field "${backwardName}" on type "${targetTypeName}"${targetLocation}. Use the "backward" option in .relation() to specify a different name.`);
2108
+ }
2109
+ if (targetType.files && backwardName in targetType.files) {
2110
+ const source = sources[0];
2111
+ const sourceInfo = typeSourceInfo?.[source.sourceType];
2112
+ const sourceLocation = sourceInfo ? isPluginGeneratedType(sourceInfo) ? ` (plugin: ${sourceInfo.pluginId})` : ` (${sourceInfo.filePath})` : "";
2113
+ errors.push(`Backward relation name "${backwardName}" from ${source.sourceType}.${source.fieldName}${sourceLocation} conflicts with files field "${backwardName}" on type "${targetTypeName}"${targetLocation}. Use the "backward" option in .relation() to specify a different name.`);
2114
+ }
2115
+ }
2116
+ }
2117
+ if (errors.length > 0) throw new Error(`Backward relation name conflicts detected in TailorDB service "${namespace}".\n${errors.map((e) => ` - ${e}`).join("\n")}`);
2118
+ }
2119
+ /**
2120
+ * Validate GraphQL query field name uniqueness.
2121
+ * Checks for:
2122
+ * 1. Each type's singular query name != plural query name
2123
+ * 2. No duplicate query names across all types
2124
+ * @param types - Parsed types
2125
+ * @param namespace - TailorDB namespace name
2126
+ * @param typeSourceInfo - Optional type source information
2127
+ */
2128
+ function validatePluralFormUniqueness(types, namespace, typeSourceInfo) {
2129
+ const errors = [];
2130
+ for (const [, parsedType] of Object.entries(types)) {
2131
+ const singularQuery = inflection.camelize(parsedType.name, true);
2132
+ if (singularQuery === inflection.camelize(parsedType.pluralForm, true)) {
2133
+ const sourceInfo = typeSourceInfo?.[parsedType.name];
2134
+ const location = sourceInfo ? isPluginGeneratedType(sourceInfo) ? ` (plugin: ${sourceInfo.pluginId})` : ` (${sourceInfo.filePath})` : "";
2135
+ errors.push(`Type "${parsedType.name}"${location} has identical singular and plural query names "${singularQuery}". Use db.type(["${parsedType.name}", "UniquePluralForm"], {...}) to set a unique pluralForm.`);
2136
+ }
2137
+ }
2138
+ const queryNameToSource = {};
2139
+ for (const parsedType of Object.values(types)) {
2140
+ const singularQuery = inflection.camelize(parsedType.name, true);
2141
+ const pluralQuery = inflection.camelize(parsedType.pluralForm, true);
2142
+ if (!queryNameToSource[singularQuery]) queryNameToSource[singularQuery] = [];
2143
+ queryNameToSource[singularQuery].push({
2144
+ typeName: parsedType.name,
2145
+ kind: "singular"
2146
+ });
2147
+ if (singularQuery !== pluralQuery) {
2148
+ if (!queryNameToSource[pluralQuery]) queryNameToSource[pluralQuery] = [];
2149
+ queryNameToSource[pluralQuery].push({
2150
+ typeName: parsedType.name,
2151
+ kind: "plural"
2152
+ });
2153
+ }
2154
+ }
2155
+ const duplicates = Object.entries(queryNameToSource).filter(([, sources]) => sources.length > 1);
2156
+ for (const [queryName, sources] of duplicates) {
2157
+ const sourceList = sources.map((s) => {
2158
+ const sourceInfo = typeSourceInfo?.[s.typeName];
2159
+ const location = sourceInfo ? isPluginGeneratedType(sourceInfo) ? ` (plugin: ${sourceInfo.pluginId})` : ` (${sourceInfo.filePath})` : "";
2160
+ return `"${s.typeName}"${location} (${s.kind})`;
2161
+ }).join(", ");
2162
+ errors.push(`GraphQL query field "${queryName}" conflicts between: ${sourceList}`);
2163
+ }
2164
+ if (errors.length > 0) throw new Error(`GraphQL field name conflicts detected in TailorDB service "${namespace}".\n${errors.map((e) => ` - ${e}`).join("\n")}`);
2165
+ }
2166
+
2167
+ //#endregion
2168
+ //#region src/parser/service/common.ts
2169
+ const functionSchema = z.custom((val) => typeof val === "function");
2170
+
2171
+ //#endregion
2172
+ //#region src/parser/service/tailordb/schema.ts
2173
+ /**
2174
+ * Normalize GqlOperationsConfig (alias or object) to GqlOperations object.
2175
+ * "query" alias expands to read-only mode: { create: false, update: false, delete: false, read: true }
2176
+ * @param config - The config to normalize
2177
+ * @returns The normalized GqlOperations object
2178
+ */
2179
+ function normalizeGqlOperations(config) {
2180
+ if (config === "query") return {
2181
+ create: false,
2182
+ update: false,
2183
+ delete: false,
2184
+ read: true
2185
+ };
2186
+ return config;
2187
+ }
2188
+ /**
2189
+ * Zod schema for GqlOperations configuration with normalization transform.
2190
+ * Accepts "query" alias or detailed object, normalizes to GqlOperations object.
2191
+ */
2192
+ const GqlOperationsSchema = z.union([z.literal("query"), z.object({
2193
+ create: z.boolean().optional().describe("Enable create mutation (default: true)"),
2194
+ update: z.boolean().optional().describe("Enable update mutation (default: true)"),
2195
+ delete: z.boolean().optional().describe("Enable delete mutation (default: true)"),
2196
+ read: z.boolean().optional().describe("Enable read queries - get, list, aggregation (default: true)")
2197
+ })]).describe("Configuration for GraphQL operations on a TailorDB type.\nAll operations are enabled by default (undefined or true = enabled, false = disabled).").transform((val) => normalizeGqlOperations(val));
2198
+ const TailorFieldTypeSchema$1 = z.enum([
2199
+ "uuid",
2200
+ "string",
2201
+ "boolean",
2202
+ "integer",
2203
+ "float",
2204
+ "decimal",
2205
+ "enum",
2206
+ "date",
2207
+ "datetime",
2208
+ "time",
2209
+ "nested"
2210
+ ]);
2211
+ const AllowedValueSchema$1 = z.object({
2212
+ value: z.string(),
2213
+ description: z.string().optional()
2214
+ });
2215
+ const DBFieldMetadataSchema = z.object({
2216
+ required: z.boolean().optional().describe("Whether the field is required"),
2217
+ array: z.boolean().optional().describe("Whether the field is an array"),
2218
+ description: z.string().optional().describe("Field description"),
2219
+ typeName: z.string().optional().describe("Type name for nested or enum fields"),
2220
+ allowedValues: z.array(AllowedValueSchema$1).optional().describe("Allowed values for enum fields"),
2221
+ index: z.boolean().optional().describe("Whether the field is indexed for faster queries"),
2222
+ unique: z.boolean().optional().describe("Whether the field value must be unique"),
2223
+ vector: z.boolean().optional().describe("Whether the field is a vector field for similarity search"),
2224
+ foreignKey: z.boolean().optional().describe("Whether the field is a foreign key"),
2225
+ foreignKeyType: z.string().optional().describe("Target type name for foreign key relations"),
2226
+ foreignKeyField: z.string().optional().describe("Target field name for foreign key relations"),
2227
+ hooks: z.object({
2228
+ create: functionSchema.optional().describe("Hook function called on record creation"),
2229
+ update: functionSchema.optional().describe("Hook function called on record update")
2230
+ }).optional().describe("Lifecycle hooks for the field"),
2231
+ validate: z.array(z.union([functionSchema, z.tuple([functionSchema, z.string()])])).optional().describe("Validation functions for the field"),
2232
+ serial: z.object({
2233
+ start: z.number().describe("Starting value for the serial sequence"),
2234
+ maxValue: z.number().optional().describe("Maximum value for the serial sequence"),
2235
+ format: z.string().optional().describe("Format string for serial value (string type only)")
2236
+ }).optional().describe("Serial (auto-increment) configuration"),
2237
+ scale: z.number().int().min(0).max(12).optional().describe("Decimal scale (number of digits after decimal point, 0-12)")
2238
+ });
2239
+ const RelationTypeSchema = z.enum(relationTypesKeys);
2240
+ const RawRelationConfigSchema = z.object({
2241
+ type: RelationTypeSchema.describe("Relation cardinality type"),
2242
+ toward: z.object({
2243
+ type: z.string().describe("Target type name, or 'self' for self-relations"),
2244
+ as: z.string().optional().describe("Custom forward relation name"),
2245
+ key: z.string().optional().describe("Target field to join on (default: 'id')")
2246
+ }),
2247
+ backward: z.string().optional().describe("Backward relation name on the target type")
2248
+ });
2249
+ const TailorDBFieldSchema = z.lazy(() => z.object({
2250
+ type: TailorFieldTypeSchema$1,
2251
+ fields: z.record(z.string(), TailorDBFieldSchema).optional(),
2252
+ metadata: DBFieldMetadataSchema,
2253
+ rawRelation: RawRelationConfigSchema.optional()
2254
+ }));
2255
+ /**
2256
+ * Schema for TailorDB type settings.
2257
+ * Normalizes gqlOperations from alias ("query") to object format.
2258
+ */
2259
+ const TailorDBTypeSettingsSchema = z.object({
2260
+ pluralForm: z.string().optional().describe("Custom plural form of the type name for GraphQL"),
2261
+ aggregation: z.boolean().optional().describe("Enable aggregation queries for this type"),
2262
+ bulkUpsert: z.boolean().optional().describe("Enable bulk upsert mutation for this type"),
2263
+ gqlOperations: GqlOperationsSchema.optional().describe("Configure GraphQL operations for this type. Use \"query\" for read-only mode, or an object for granular control."),
2264
+ publishEvents: z.boolean().optional().describe("Enable publishing events for this type.\nWhen enabled, record creation/update/deletion events are published.\nIf not specified, this is automatically set to true when an executor uses this type\nwith recordCreated/recordUpdated/recordDeleted triggers. If explicitly set to false\nwhile an executor uses this type, an error will be thrown during apply.")
2265
+ });
2266
+ const GQL_PERMISSION_INVALID_OPERAND_MESSAGE = "operand is not supported in gqlPermission. Use permission() for record-level conditions.";
2267
+ const GqlPermissionOperandSchema = z.union([
2268
+ z.object({ user: z.string() }).strict(),
2269
+ z.string(),
2270
+ z.boolean(),
2271
+ z.array(z.string()),
2272
+ z.array(z.boolean())
2273
+ ], { error: (issue) => {
2274
+ if (typeof issue.input === "object" && issue.input !== null) {
2275
+ const keys = Object.keys(issue.input);
2276
+ if (keys.length === 1) return `"${keys[0]}" ${GQL_PERMISSION_INVALID_OPERAND_MESSAGE}`;
2277
+ return "Operand object must have exactly 1 key";
2278
+ }
2279
+ return "Invalid operand in gqlPermission";
2280
+ } });
2281
+ const RecordPermissionOperandSchema = z.union([
2282
+ GqlPermissionOperandSchema,
2283
+ z.object({ record: z.string() }),
2284
+ z.object({ oldRecord: z.string() }),
2285
+ z.object({ newRecord: z.string() })
2286
+ ]);
2287
+ const PermissionOperatorSchema = z.enum([
2288
+ "=",
2289
+ "!=",
2290
+ "in",
2291
+ "not in",
2292
+ "hasAny",
2293
+ "not hasAny"
2294
+ ]);
2295
+ const RecordPermissionConditionSchema = z.tuple([
2296
+ RecordPermissionOperandSchema,
2297
+ PermissionOperatorSchema,
2298
+ RecordPermissionOperandSchema
2299
+ ]).readonly();
2300
+ const GqlPermissionConditionSchema = z.tuple([
2301
+ GqlPermissionOperandSchema,
2302
+ PermissionOperatorSchema,
2303
+ GqlPermissionOperandSchema
2304
+ ]).readonly();
2305
+ const ActionPermissionSchema = z.union([
2306
+ z.object({
2307
+ conditions: z.union([RecordPermissionConditionSchema, z.array(RecordPermissionConditionSchema).readonly()]),
2308
+ description: z.string().optional(),
2309
+ permit: z.boolean().optional()
2310
+ }),
2311
+ z.tuple([
2312
+ RecordPermissionOperandSchema,
2313
+ PermissionOperatorSchema,
2314
+ RecordPermissionOperandSchema
2315
+ ]).readonly(),
2316
+ z.tuple([
2317
+ RecordPermissionOperandSchema,
2318
+ PermissionOperatorSchema,
2319
+ RecordPermissionOperandSchema,
2320
+ z.boolean()
2321
+ ]).readonly(),
2322
+ z.array(z.union([RecordPermissionConditionSchema, z.boolean()])).refine((arr) => {
2323
+ const boolIndex = arr.findIndex((item) => typeof item === "boolean");
2324
+ return boolIndex === -1 || boolIndex === arr.length - 1;
2325
+ }, { message: "Boolean permit flag must only appear at the end" }).readonly()
2326
+ ]);
2327
+ const GqlPermissionActionSchema = z.enum([
2328
+ "read",
2329
+ "create",
2330
+ "update",
2331
+ "delete",
2332
+ "aggregate",
2333
+ "bulkUpsert"
2334
+ ]);
2335
+ const GqlPermissionPolicySchema = z.object({
2336
+ conditions: z.array(GqlPermissionConditionSchema).readonly(),
2337
+ actions: z.union([z.literal("all"), z.array(GqlPermissionActionSchema).readonly()]),
2338
+ permit: z.boolean().optional(),
2339
+ description: z.string().optional()
2340
+ });
2341
+ const RawPermissionsSchema = z.object({
2342
+ record: z.object({
2343
+ create: z.array(ActionPermissionSchema).readonly(),
2344
+ read: z.array(ActionPermissionSchema).readonly(),
2345
+ update: z.array(ActionPermissionSchema).readonly(),
2346
+ delete: z.array(ActionPermissionSchema).readonly()
2347
+ }).optional(),
2348
+ gql: z.array(GqlPermissionPolicySchema).readonly().optional()
2349
+ });
2350
+ const TailorDBTypeSchema = z.object({
2351
+ name: z.string(),
2352
+ fields: z.record(z.string(), TailorDBFieldSchema),
2353
+ metadata: z.object({
2354
+ name: z.string(),
2355
+ description: z.string().optional(),
2356
+ settings: TailorDBTypeSettingsSchema.optional(),
2357
+ permissions: RawPermissionsSchema,
2358
+ files: z.record(z.string(), z.string()),
2359
+ indexes: z.record(z.string(), z.object({
2360
+ fields: z.array(z.string()),
2361
+ unique: z.boolean().optional()
2362
+ })).optional()
2363
+ })
2364
+ });
2365
+ const TailorDBMigrationConfigSchema = z.object({
2366
+ directory: z.string().describe("Directory containing migration files"),
2367
+ machineUser: z.string().optional().describe("Machine user name for migration execution")
2368
+ });
2369
+ /**
2370
+ * Schema for TailorDB service configuration.
2371
+ * Normalizes gqlOperations from alias ("query") to object format.
2372
+ */
2373
+ const TailorDBServiceConfigSchema = z.object({
2374
+ files: z.array(z.string()).describe("Glob patterns for TailorDB type definition files"),
2375
+ ignores: z.array(z.string()).optional().describe("Glob patterns to exclude from type discovery"),
2376
+ erdSite: z.string().optional().describe("URL for the ERD (Entity Relationship Diagram) site"),
2377
+ migration: TailorDBMigrationConfigSchema.optional().describe("Migration configuration"),
2378
+ gqlOperations: GqlOperationsSchema.optional().describe("Default GraphQL operations for all types in this service")
2379
+ });
2380
+
2381
+ //#endregion
2382
+ //#region src/cli/services/tailordb/es-builtins.ts
2383
+ const globalsMap = globals.default ?? globals;
2384
+ /**
2385
+ * Runtime globals available in the PF execution environment.
2386
+ * Identifiers in this set are excluded from free variable detection
2387
+ * since they are always available in the runtime environment.
2388
+ *
2389
+ * Combines globals.builtin (ECMAScript language builtins) and
2390
+ * globals['shared-node-browser'] (shared runtime globals like
2391
+ * console, fetch, setTimeout, etc.) from the `globals` npm package.
2392
+ */
2393
+ const ES_BUILTINS = new Set([...Object.keys(globalsMap.builtin ?? {}), ...Object.keys(globalsMap["shared-node-browser"] ?? {})]);
2394
+
2395
+ //#endregion
2396
+ //#region src/cli/services/tailordb/hooks-validate-bundler.ts
2397
+ /**
2398
+ * Recursively extract binding names from a destructuring pattern node.
2399
+ * @param pattern - The binding pattern AST node.
2400
+ * @param bindings - Set to collect binding names into.
2401
+ */
2402
+ function collectBindingsFromPattern(pattern, bindings) {
2403
+ switch (pattern.type) {
2404
+ case "Identifier":
2405
+ bindings.add(pattern.name);
2406
+ break;
2407
+ case "ObjectPattern":
2408
+ for (const prop of pattern.properties) if (prop.type === "RestElement") collectBindingsFromPattern(prop.argument, bindings);
2409
+ else collectBindingsFromPattern(prop.value, bindings);
2410
+ break;
2411
+ case "ArrayPattern":
2412
+ for (const elem of pattern.elements) if (elem) if (elem.type === "RestElement") collectBindingsFromPattern(elem.argument, bindings);
2413
+ else collectBindingsFromPattern(elem, bindings);
2414
+ break;
2415
+ case "AssignmentPattern":
2416
+ collectBindingsFromPattern(pattern.left, bindings);
2417
+ break;
2418
+ }
2419
+ }
2420
+ /** Fields that contain TypeScript type annotations (not runtime references). */
2421
+ const TS_TYPE_FIELDS = new Set([
2422
+ "typeAnnotation",
2423
+ "typeParameters",
2424
+ "returnType",
2425
+ "superTypeArguments",
2426
+ "typeArguments"
2427
+ ]);
2428
+ function isBindingPattern(param) {
2429
+ return param.type !== "TSParameterProperty";
2430
+ }
2431
+ function toScriptFunction(value) {
2432
+ if (typeof value !== "function") return void 0;
2433
+ return value;
2434
+ }
2435
+ function collectScriptTargets(type) {
2436
+ const targets = [];
2437
+ const collectFieldTargets = (field) => {
2438
+ const metadata = field.metadata;
2439
+ const createHook = toScriptFunction(metadata.hooks?.create);
2440
+ if (createHook) targets.push({
2441
+ fn: createHook,
2442
+ kind: "hooks"
2443
+ });
2444
+ const updateHook = toScriptFunction(metadata.hooks?.update);
2445
+ if (updateHook) targets.push({
2446
+ fn: updateHook,
2447
+ kind: "hooks"
2448
+ });
2449
+ for (const validateInput of metadata.validate ?? []) if (typeof validateInput === "function") {
2450
+ const validateFn = toScriptFunction(validateInput);
2451
+ if (validateFn) targets.push({
2452
+ fn: validateFn,
2453
+ kind: "validate"
2454
+ });
2455
+ } else {
2456
+ const validateFn = toScriptFunction(validateInput[0]);
2457
+ if (validateFn) targets.push({
2458
+ fn: validateFn,
2459
+ kind: "validate"
2460
+ });
2461
+ }
2462
+ if (field.type === "nested" && field.fields) for (const nestedField of Object.values(field.fields)) collectFieldTargets(nestedField);
2463
+ };
2464
+ for (const field of Object.values(type.fields)) collectFieldTargets(field);
2465
+ return targets;
2466
+ }
2467
+ /**
2468
+ * Parse a code string with oxc-parser and return identifiers that are referenced
2469
+ * but never bound anywhere in the snippet (free variables), excluding ES builtins.
2470
+ * @param code - Valid JavaScript code to analyze.
2471
+ * @returns Set of undefined variable names.
2472
+ */
2473
+ function findUndefinedReferences(code) {
2474
+ const { program } = parseSync("_.js", code);
2475
+ const references = /* @__PURE__ */ new Set();
2476
+ const bindings = /* @__PURE__ */ new Set();
2477
+ const walk = (node) => {
2478
+ if (!node) return;
2479
+ switch (node.type) {
2480
+ case "VariableDeclarator":
2481
+ collectBindingsFromPattern(node.id, bindings);
2482
+ walk(node.init);
2483
+ return;
2484
+ case "FunctionDeclaration":
2485
+ case "FunctionExpression":
2486
+ if (node.id) bindings.add(node.id.name);
2487
+ for (const param of node.params) if (isBindingPattern(param)) {
2488
+ collectBindingsFromPattern(param, bindings);
2489
+ walk(param);
2490
+ }
2491
+ walk(node.body);
2492
+ return;
2493
+ case "ArrowFunctionExpression":
2494
+ for (const param of node.params) if (isBindingPattern(param)) {
2495
+ collectBindingsFromPattern(param, bindings);
2496
+ walk(param);
2497
+ }
2498
+ walk(node.body);
2499
+ return;
2500
+ case "ClassDeclaration":
2501
+ case "ClassExpression":
2502
+ if (node.id) bindings.add(node.id.name);
2503
+ walk(node.superClass);
2504
+ walk(node.body);
2505
+ return;
2506
+ case "CatchClause":
2507
+ if (node.param) collectBindingsFromPattern(node.param, bindings);
2508
+ walk(node.body);
2509
+ return;
2510
+ case "MemberExpression":
2511
+ walk(node.object);
2512
+ if (node.computed) walk(node.property);
2513
+ return;
2514
+ case "Property":
2515
+ if (node.computed) walk(node.key);
2516
+ walk(node.value);
2517
+ return;
2518
+ case "LabeledStatement":
2519
+ walk(node.body);
2520
+ return;
2521
+ case "Identifier":
2522
+ references.add(node.name);
2523
+ return;
2524
+ }
2525
+ const rec = node;
2526
+ for (const [key, value] of Object.entries(rec)) {
2527
+ if (key === "type" || TS_TYPE_FIELDS.has(key)) continue;
2528
+ if (Array.isArray(value)) for (const item of value) walk(item);
2529
+ else if (value && typeof value === "object" && "type" in value) walk(value);
2530
+ }
2531
+ };
2532
+ walk(program);
2533
+ const freeVars = /* @__PURE__ */ new Set();
2534
+ for (const ref of references) if (!bindings.has(ref) && !ES_BUILTINS.has(ref)) freeVars.add(ref);
2535
+ return freeVars;
2536
+ }
2537
+ /**
2538
+ * Collect all Identifier names from a TypeScript/JavaScript code string using oxc-parser.
2539
+ * @param code - Code string to analyze.
2540
+ * @returns Set of identifier names found in the code.
2541
+ */
2542
+ function collectIdentifierNames(code) {
2543
+ const { program } = parseSync("_.ts", code);
2544
+ const names = /* @__PURE__ */ new Set();
2545
+ const walk = (node) => {
2546
+ if (!node || typeof node !== "object") return;
2547
+ const record = node;
2548
+ if (record.type === "Identifier" && typeof record.name === "string") names.add(record.name);
2549
+ for (const [key, value] of Object.entries(record)) {
2550
+ if (key === "property" && record.type === "MemberExpression" && !record.computed) continue;
2551
+ if (key === "key" && record.type === "Property" && !record.computed) continue;
2552
+ if (TS_TYPE_FIELDS.has(key)) continue;
2553
+ if (Array.isArray(value)) for (const item of value) walk(item);
2554
+ else if (value && typeof value === "object" && "type" in value) walk(value);
2555
+ }
2556
+ };
2557
+ walk(program);
2558
+ return names;
2559
+ }
2560
+ /**
2561
+ * Collect top-level bindings (imports and declarations) from a TypeScript source file.
2562
+ * @param sourceFilePath - Absolute path to the source file.
2563
+ * @returns Map of binding name to SourceBinding.
2564
+ */
2565
+ function collectSourceBindings(sourceFilePath) {
2566
+ const source = readFileSync(sourceFilePath, "utf-8");
2567
+ const { program } = parseSync(sourceFilePath, source);
2568
+ const bindings = /* @__PURE__ */ new Map();
2569
+ for (const stmt of program.body) if (stmt.type === "ImportDeclaration") {
2570
+ const importDecl = stmt;
2571
+ const text = source.slice(importDecl.start, importDecl.end);
2572
+ if (importDecl.specifiers) for (const spec of importDecl.specifiers) bindings.set(spec.local.name, {
2573
+ name: spec.local.name,
2574
+ sourceText: text,
2575
+ kind: "import"
2576
+ });
2577
+ } else if (stmt.type === "VariableDeclaration") {
2578
+ const varDecl = stmt;
2579
+ const text = source.slice(varDecl.start, varDecl.end);
2580
+ for (const decl of varDecl.declarations) if (decl.id.type === "Identifier") bindings.set(decl.id.name, {
2581
+ name: decl.id.name,
2582
+ sourceText: text,
2583
+ kind: "declaration"
2584
+ });
2585
+ } else if (stmt.type === "FunctionDeclaration") {
2586
+ const funcDecl = stmt;
2587
+ if (funcDecl.id) {
2588
+ const text = source.slice(funcDecl.start, funcDecl.end);
2589
+ bindings.set(funcDecl.id.name, {
2590
+ name: funcDecl.id.name,
2591
+ sourceText: text,
2592
+ kind: "declaration"
2593
+ });
2594
+ }
2595
+ } else if (stmt.type === "ExportNamedDeclaration") {
2596
+ const innerDecl = stmt.declaration;
2597
+ if (!innerDecl) continue;
2598
+ if (innerDecl.type === "VariableDeclaration") {
2599
+ const varDecl = innerDecl;
2600
+ const text = source.slice(varDecl.start, varDecl.end);
2601
+ for (const decl of varDecl.declarations) if (decl.id.type === "Identifier") bindings.set(decl.id.name, {
2602
+ name: decl.id.name,
2603
+ sourceText: text,
2604
+ kind: "declaration"
2605
+ });
2606
+ } else if (innerDecl.type === "FunctionDeclaration") {
2607
+ const funcDecl = innerDecl;
2608
+ if (funcDecl.id) {
2609
+ const text = source.slice(funcDecl.start, funcDecl.end);
2610
+ bindings.set(funcDecl.id.name, {
2611
+ name: funcDecl.id.name,
2612
+ sourceText: text,
2613
+ kind: "declaration"
2614
+ });
2615
+ }
2616
+ }
2617
+ }
2618
+ return bindings;
2619
+ }
2620
+ /**
2621
+ * Resolve all bindings needed by a function, recursively including
2622
+ * dependencies of top-level declarations.
2623
+ * @param freeVars - Set of free variable names extracted from the function.
2624
+ * @param sourceBindings - Available bindings from the source file.
2625
+ * @returns Object with needed import statements and declaration texts.
2626
+ */
2627
+ function resolveNeededBindings(freeVars, sourceBindings) {
2628
+ const neededImports = /* @__PURE__ */ new Set();
2629
+ const neededDeclarations = /* @__PURE__ */ new Set();
2630
+ const unresolved = [];
2631
+ const resolved = /* @__PURE__ */ new Set();
2632
+ const resolveVars = (vars) => {
2633
+ for (const varName of vars) {
2634
+ if (resolved.has(varName)) continue;
2635
+ resolved.add(varName);
2636
+ const binding = sourceBindings.get(varName);
2637
+ if (!binding) {
2638
+ unresolved.push(varName);
2639
+ continue;
2640
+ }
2641
+ if (binding.kind === "import") neededImports.add(binding.sourceText);
2642
+ else {
2643
+ const identifiers = collectIdentifierNames(binding.sourceText);
2644
+ const referencedVars = /* @__PURE__ */ new Set();
2645
+ for (const id of identifiers) if (id !== varName && sourceBindings.has(id)) referencedVars.add(id);
2646
+ resolveVars(referencedVars);
2647
+ neededDeclarations.add(binding.sourceText);
2648
+ }
2649
+ }
2650
+ };
2651
+ resolveVars(freeVars);
2652
+ return {
2653
+ imports: [...neededImports],
2654
+ declarations: [...neededDeclarations],
2655
+ unresolved
2656
+ };
2657
+ }
2658
+ function buildPrecompiledExpr(bundleCode) {
2659
+ return `(() => {
2660
+ const module = { exports: {} };
2661
+ const exports = module.exports;
2662
+ ${bundleCode}\n return module.exports.main({ value: _value, data: _data, user: ${tailorUserMap} });\n})()`;
2663
+ }
2664
+ /**
2665
+ * Build entry file content from already-resolved imports and declarations.
2666
+ * @param imports - Import statement texts.
2667
+ * @param declarations - Declaration statement texts.
2668
+ * @param fnSource - The function source code.
2669
+ * @param sourceFilePath - Path to the source file for resolving relative imports.
2670
+ * @returns Entry file content string.
2671
+ */
2672
+ function buildMinimalEntryFromResolved(imports, declarations, fnSource, sourceFilePath) {
2673
+ const sourceDir = resolve(sourceFilePath, "..").replace(/\\/g, "/");
2674
+ return [
2675
+ ...imports.map((imp) => imp.replace(/from\s+["'](\.[^"']+)["']/g, (_match, relPath) => `from "${resolve(sourceDir, relPath).replace(/\\/g, "/")}"`)),
2676
+ ...declarations,
2677
+ `export function main(input) { return (${fnSource})(input); }`
2678
+ ].join("\n");
2679
+ }
2680
+ async function bundleScriptTarget(args) {
2681
+ const { fn, kind, sourceFilePath, sourceBindings, tempDir, targetIndex, tsconfig } = args;
2682
+ const fnSource = stringifyFunction(fn);
2683
+ const inlineExpr = `(${fnSource})({ value: _value, data: _data, user: ${tailorUserMap} })`;
2684
+ const freeVars = findUndefinedReferences(`const __fn = ${fnSource};`);
2685
+ if (freeVars.size === 0) return inlineExpr;
2686
+ const { imports, declarations, unresolved } = resolveNeededBindings(freeVars, sourceBindings);
2687
+ if (unresolved.length > 0) throw new Error(`${kind} in ${sourceFilePath} captures unresolvable variables (${unresolved.join(", ")}). Hooks and validators must not reference variables that cannot be resolved from the source file.
2688
+ ${kind}: ${fnSource}`);
2689
+ const entryContent = buildMinimalEntryFromResolved(imports, declarations, fnSource, sourceFilePath);
2690
+ const entryPath = join(tempDir, `tailordb-script-${targetIndex}.entry.ts`);
2691
+ const outputPath = join(tempDir, `tailordb-script-${targetIndex}.bundle.cjs`);
2692
+ writeFileSync(entryPath, entryContent);
2693
+ await rolldown.build(rolldown.defineConfig({
2694
+ input: entryPath,
2695
+ output: {
2696
+ file: outputPath,
2697
+ format: "cjs",
2698
+ sourcemap: false,
2699
+ minify: true,
2700
+ codeSplitting: false
2701
+ },
2702
+ tsconfig,
2703
+ treeshake: {
2704
+ moduleSideEffects: false,
2705
+ annotations: true,
2706
+ unknownGlobalSideEffects: false
2707
+ },
2708
+ logLevel: "silent"
2709
+ }));
2710
+ return buildPrecompiledExpr(readFileSync(outputPath, "utf-8"));
2711
+ }
2712
+ /**
2713
+ * Precompile TailorDB hooks/validators into self-contained script expressions using rolldown.
2714
+ * Uses oxc-parser AST walking to extract free variables from functions, then builds
2715
+ * minimal entry points containing only the needed imports and declarations.
2716
+ * @param type - TailorDB type schema output.
2717
+ * @param sourceFilePath - Source file where the type is defined.
2718
+ * @param tsconfig - Resolved tsconfig path, or undefined if not found.
2719
+ */
2720
+ async function precompileTailorDBTypeScripts(type, sourceFilePath, tsconfig) {
2721
+ const targets = collectScriptTargets(type);
2722
+ if (targets.length === 0) return;
2723
+ const sourceBindings = collectSourceBindings(sourceFilePath);
2724
+ const tempDir = resolve(getDistDir(), "hooks-validate-scripts", type.name);
2725
+ mkdirSync(tempDir, { recursive: true });
2726
+ try {
2727
+ const results = await Promise.allSettled(targets.map((target, index) => bundleScriptTarget({
2728
+ fn: target.fn,
2729
+ kind: target.kind,
2730
+ sourceFilePath,
2731
+ sourceBindings,
2732
+ tempDir,
2733
+ targetIndex: index,
2734
+ tsconfig
2735
+ })));
2736
+ const firstError = results.find((r) => r.status === "rejected");
2737
+ if (firstError && firstError.status === "rejected") throw firstError.reason;
2738
+ for (const [index, result] of results.entries()) if (result.status === "fulfilled") setPrecompiledScriptExpr(targets[index].fn, result.value);
2739
+ } finally {
2740
+ rmSync(tempDir, {
2741
+ recursive: true,
2742
+ force: true
2743
+ });
2744
+ }
2745
+ }
2746
+
2747
+ //#endregion
2748
+ //#region src/cli/services/tailordb/service.ts
2749
+ /**
2750
+ * Creates a new TailorDBService instance.
2751
+ * @param params - Parameters for creating the service
2752
+ * @returns A new TailorDBService instance
2753
+ */
2754
+ function createTailorDBService(params) {
2755
+ const { namespace, config, pluginManager } = params;
2756
+ const rawTypes = {};
2757
+ let types = {};
2758
+ const typeSourceInfo = {};
2759
+ const pluginAttachments = /* @__PURE__ */ new Map();
2760
+ let loadPromise;
2761
+ const doParseTypes = () => {
2762
+ const allTypes = {};
2763
+ for (const fileTypes of Object.values(rawTypes)) for (const [typeName, type] of Object.entries(fileTypes)) allTypes[typeName] = type;
2764
+ types = parseTypes(allTypes, namespace, typeSourceInfo);
2765
+ };
2766
+ /**
2767
+ * Process plugins for a type and add generated types to rawTypes
2768
+ * @param rawType - The raw TailorDB type being processed
2769
+ * @param attachments - Plugin attachments for this type
2770
+ * @param sourceFilePath - The file path where the type was loaded from
2771
+ */
2772
+ const processPluginsForType = async (rawType, attachments, sourceFilePath) => {
2773
+ if (!pluginManager) return;
2774
+ let currentType = rawType;
2775
+ for (const attachment of attachments) {
2776
+ const result = await pluginManager.processAttachment({
2777
+ type: currentType,
2778
+ typeConfig: attachment.config,
2779
+ namespace,
2780
+ pluginId: attachment.pluginId
2781
+ });
2782
+ if (!result.success) {
2783
+ logger.error(result.error);
2784
+ throw new Error(result.error);
2785
+ }
2786
+ const output = result.output;
2787
+ const extendFields = output.extends?.fields;
2788
+ if (extendFields && Object.keys(extendFields).length > 0) {
2789
+ const extendedType = pluginManager.extendType({
2790
+ originalType: currentType,
2791
+ extendFields,
2792
+ pluginId: attachment.pluginId
2793
+ });
2794
+ rawTypes[sourceFilePath][currentType.name] = extendedType;
2795
+ currentType = extendedType;
2796
+ logger.log(` Extended: ${styles.success(currentType.name)} with ${styles.highlight(Object.keys(extendFields).length.toString())} fields by plugin ${styles.info(attachment.pluginId)}`);
2797
+ }
2798
+ const plugin = pluginManager.getPlugin(attachment.pluginId);
2799
+ for (const [kind, generatedType] of Object.entries(output.types ?? {})) {
2800
+ rawTypes[sourceFilePath][generatedType.name] = generatedType;
2801
+ typeSourceInfo[generatedType.name] = {
2802
+ exportName: generatedType.name,
2803
+ pluginId: attachment.pluginId,
2804
+ pluginImportPath: pluginManager.getPluginImportPath(attachment.pluginId) ?? "",
2805
+ originalFilePath: sourceFilePath,
2806
+ originalExportName: typeSourceInfo[rawType.name]?.exportName || rawType.name,
2807
+ generatedTypeKind: kind,
2808
+ pluginConfig: plugin?.pluginConfig,
2809
+ namespace
2810
+ };
2811
+ logger.log(` Generated: ${styles.success(generatedType.name)} by plugin ${styles.info(attachment.pluginId)}`);
2812
+ }
2813
+ }
2814
+ };
2815
+ const loadTypeFile = async (typeFile, tsconfig) => {
2816
+ rawTypes[typeFile] = {};
2817
+ const loadedTypes = {};
2818
+ try {
2819
+ const module = await import(pathToFileURL(typeFile).href);
2820
+ for (const exportName of Object.keys(module)) {
2821
+ const exportedValue = module[exportName];
2822
+ const result = TailorDBTypeSchema.safeParse(exportedValue);
2823
+ if (!result.success) {
2824
+ if (isSdkBranded(exportedValue, "tailordb-type")) throw result.error;
2825
+ continue;
2826
+ }
2827
+ const relativePath = path.relative(process.cwd(), typeFile);
2828
+ logger.log(`Type: ${styles.successBright(`"${result.data.name}"`)} loaded from ${styles.path(relativePath)}`);
2829
+ await precompileTailorDBTypeScripts(result.data, typeFile, tsconfig);
2830
+ rawTypes[typeFile][result.data.name] = result.data;
2831
+ loadedTypes[result.data.name] = result.data;
2832
+ typeSourceInfo[result.data.name] = {
2833
+ filePath: typeFile,
2834
+ exportName
2835
+ };
2836
+ if (exportedValue.plugins && Array.isArray(exportedValue.plugins) && exportedValue.plugins.length > 0) {
2837
+ pluginAttachments.set(exportedValue.name, [...exportedValue.plugins]);
2838
+ logger.log(` Plugin attachments: ${styles.info(exportedValue.plugins.map((p) => p.pluginId).join(", "))}`);
2839
+ await processPluginsForType(exportedValue, exportedValue.plugins, typeFile);
2840
+ }
2841
+ }
2842
+ } catch (error) {
2843
+ const relativePath = path.relative(process.cwd(), typeFile);
2844
+ logger.error(`Failed to load type from ${styles.bold(relativePath)}`);
2845
+ logger.error(String(error));
2846
+ throw error;
2847
+ }
2848
+ return loadedTypes;
2849
+ };
2850
+ return {
2851
+ namespace,
2852
+ config,
2853
+ get types() {
2854
+ return types;
2855
+ },
2856
+ get typeSourceInfo() {
2857
+ return typeSourceInfo;
2858
+ },
2859
+ get pluginAttachments() {
2860
+ return pluginAttachments;
2861
+ },
2862
+ loadTypes: async () => {
2863
+ if (!loadPromise) loadPromise = (async () => {
2864
+ if (!config.files || config.files.length === 0) return;
2865
+ const typeFiles = loadFilesWithIgnores(config);
2866
+ let tsconfig;
2867
+ try {
2868
+ tsconfig = await resolveTSConfig();
2869
+ } catch {
2870
+ tsconfig = void 0;
2871
+ }
2872
+ logger.newline();
2873
+ logger.log(`Found ${styles.highlight(typeFiles.length.toString())} type files for TailorDB service ${styles.highlight(`"${namespace}"`)}`);
2874
+ if (pluginManager) for (const typeFile of typeFiles) await loadTypeFile(typeFile, tsconfig);
2875
+ else await Promise.all(typeFiles.map((typeFile) => loadTypeFile(typeFile, tsconfig)));
2876
+ doParseTypes();
2877
+ return types;
2878
+ })();
2879
+ return loadPromise;
2880
+ },
2881
+ processNamespacePlugins: async () => {
2882
+ if (!pluginManager) return;
2883
+ const results = await pluginManager.processNamespacePlugins(namespace);
2884
+ const pluginGeneratedKey = "__plugin_generated__";
2885
+ if (!rawTypes[pluginGeneratedKey]) rawTypes[pluginGeneratedKey] = {};
2886
+ let hasGeneratedTypes = false;
2887
+ for (const { pluginId, config, result } of results) {
2888
+ if (!result.success) {
2889
+ logger.error(result.error);
2890
+ throw new Error(result.error);
2891
+ }
2892
+ const output = result.output;
2893
+ for (const [kind, generatedType] of Object.entries(output.types ?? {})) {
2894
+ rawTypes[pluginGeneratedKey][generatedType.name] = generatedType;
2895
+ hasGeneratedTypes = true;
2896
+ typeSourceInfo[generatedType.name] = {
2897
+ exportName: generatedType.name,
2898
+ pluginId,
2899
+ pluginImportPath: pluginManager.getPluginImportPath(pluginId) ?? "",
2900
+ originalFilePath: "",
2901
+ originalExportName: "",
2902
+ generatedTypeKind: kind,
2903
+ pluginConfig: config,
2904
+ namespace
2905
+ };
2906
+ logger.log(` Generated: ${styles.success(generatedType.name)} by namespace plugin ${styles.info(pluginId)}`);
2907
+ }
2908
+ }
2909
+ if (hasGeneratedTypes) doParseTypes();
2910
+ }
2911
+ };
2912
+ }
2913
+
2914
+ //#endregion
2915
+ //#region src/parser/service/resolver/schema.ts
2916
+ const TailorFieldTypeSchema = z.enum([
2917
+ "uuid",
2918
+ "string",
2919
+ "boolean",
2920
+ "integer",
2921
+ "float",
2922
+ "decimal",
2923
+ "enum",
2924
+ "date",
2925
+ "datetime",
2926
+ "time",
2927
+ "nested"
2928
+ ]);
2929
+ const QueryTypeSchema = z.union([z.literal("query"), z.literal("mutation")]).describe("GraphQL operation type");
2930
+ const AllowedValueSchema = z.object({
2931
+ value: z.string().describe("The allowed value"),
2932
+ description: z.string().optional().describe("Description of the allowed value")
2933
+ });
2934
+ const FieldMetadataSchema = z.object({
2935
+ required: z.boolean().optional().describe("Whether the field is required"),
2936
+ array: z.boolean().optional().describe("Whether the field is an array"),
2937
+ description: z.string().optional().describe("Field description"),
2938
+ allowedValues: z.array(AllowedValueSchema).optional().describe("Allowed values for enum fields"),
2939
+ hooks: z.object({
2940
+ create: functionSchema.optional().describe("Hook function called on creation"),
2941
+ update: functionSchema.optional().describe("Hook function called on update")
2942
+ }).optional().describe("Lifecycle hooks"),
2943
+ typeName: z.string().optional().describe("Type name for nested or enum fields")
2944
+ });
2945
+ const TailorFieldSchema = z.object({
2946
+ type: TailorFieldTypeSchema.describe("Field data type"),
2947
+ metadata: FieldMetadataSchema.describe("Field metadata configuration"),
2948
+ get fields() {
2949
+ return z.record(z.string(), TailorFieldSchema);
2950
+ }
2951
+ });
2952
+ const ResolverSchema = z.object({
2953
+ operation: QueryTypeSchema.describe("GraphQL operation type (query or mutation)"),
2954
+ name: z.string().describe("Resolver name"),
2955
+ description: z.string().optional().describe("Resolver description"),
2956
+ input: z.record(z.string(), TailorFieldSchema).optional().describe("Input field definitions"),
2957
+ body: functionSchema.describe("Resolver implementation function"),
2958
+ output: TailorFieldSchema.describe("Output field definition"),
2959
+ publishEvents: z.boolean().optional().describe("Enable publishing events from this resolver"),
2960
+ get authInvoker() {
2961
+ return AuthInvokerSchema.optional().describe("Machine user to execute this resolver as");
2962
+ }
2963
+ });
2964
+
2965
+ //#endregion
2966
+ //#region src/parser/service/auth/schema.ts
2967
+ const AuthInvokerSchema = z.object({
2968
+ namespace: z.string().describe("Auth namespace"),
2969
+ machineUserName: z.string().describe("Machine user name for authentication")
2970
+ });
2971
+ const secretValueSchema = z.object({
2972
+ vaultName: z.string().describe("Vault name containing the secret"),
2973
+ secretKey: z.string().describe("Key of the secret in the vault")
2974
+ });
2975
+ const OIDCSchema = z.object({
2976
+ name: z.string().describe("Identity provider name"),
2977
+ kind: z.literal("OIDC"),
2978
+ clientID: z.string().describe("OAuth2 client ID"),
2979
+ clientSecret: secretValueSchema.describe("OAuth2 client secret"),
2980
+ providerURL: z.string().describe("OIDC provider URL"),
2981
+ issuerURL: z.string().optional().describe("OIDC issuer URL (defaults to providerURL)"),
2982
+ usernameClaim: z.string().optional().describe("JWT claim to use as username")
2983
+ });
2984
+ const SAMLSchema = z.object({
2985
+ name: z.string().describe("Identity provider name"),
2986
+ kind: z.literal("SAML"),
2987
+ enableSignRequest: z.boolean().default(false).describe("Enable signing of SAML requests"),
2988
+ metadataURL: z.string().optional().describe("URL to fetch SAML metadata (mutually exclusive with rawMetadata)"),
2989
+ rawMetadata: z.string().optional().describe("Raw SAML metadata XML (mutually exclusive with metadataURL)")
2990
+ }).refine((value) => {
2991
+ return value.metadataURL !== void 0 !== (value.rawMetadata !== void 0);
2992
+ }, "Provide either metadataURL or rawMetadata");
2993
+ const IDTokenSchema = z.object({
2994
+ name: z.string().describe("Identity provider name"),
2995
+ kind: z.literal("IDToken"),
2996
+ providerURL: z.string().describe("ID token provider URL"),
2997
+ issuerURL: z.string().optional().describe("ID token issuer URL"),
2998
+ clientID: z.string().describe("Client ID for ID token validation"),
2999
+ usernameClaim: z.string().optional().describe("JWT claim to use as username")
3000
+ });
3001
+ const BuiltinIdPSchema = z.object({
3002
+ name: z.string().describe("Identity provider name"),
3003
+ kind: z.literal("BuiltInIdP"),
3004
+ namespace: z.string().describe("IdP namespace"),
3005
+ clientName: z.string().describe("OAuth2 client name in the IdP")
3006
+ });
3007
+ const IdProviderSchema = z.discriminatedUnion("kind", [
3008
+ OIDCSchema,
3009
+ SAMLSchema,
3010
+ IDTokenSchema,
3011
+ BuiltinIdPSchema
3012
+ ]);
3013
+ const OAuth2ClientGrantTypeSchema = z.union([z.literal("authorization_code"), z.literal("refresh_token")]).describe("OAuth2 grant type");
3014
+ const OAuth2ClientSchema = z.object({
3015
+ description: z.string().optional().describe("Client description"),
3016
+ grantTypes: z.array(OAuth2ClientGrantTypeSchema).default(["authorization_code", "refresh_token"]).describe("Allowed OAuth2 grant types"),
3017
+ redirectURIs: z.array(z.union([
3018
+ z.templateLiteral(["https://", z.string()]),
3019
+ z.templateLiteral(["http://", z.string()]),
3020
+ z.templateLiteral([z.string(), ":url"]),
3021
+ z.templateLiteral([
3022
+ z.string(),
3023
+ ":url/",
3024
+ z.string()
3025
+ ])
3026
+ ])).describe("Allowed redirect URIs"),
3027
+ clientType: z.union([
3028
+ z.literal("confidential"),
3029
+ z.literal("public"),
3030
+ z.literal("browser")
3031
+ ]).optional().describe("OAuth2 client type"),
3032
+ accessTokenLifetimeSeconds: z.number().int().min(60, "Minimum access token lifetime is 60 seconds").max(86400, "Maximum access token lifetime is 1 day (86400 seconds)").optional().describe("Access token lifetime in seconds (60-86400)").transform((val) => val ? {
3033
+ seconds: BigInt(val),
3034
+ nanos: 0
3035
+ } : void 0),
3036
+ refreshTokenLifetimeSeconds: z.number().int().min(60, "Minimum refresh token lifetime is 60 seconds").max(604800, "Maximum refresh token lifetime is 7 days (604800 seconds)").optional().describe("Refresh token lifetime in seconds (60-604800)").transform((val) => val ? {
3037
+ seconds: BigInt(val),
3038
+ nanos: 0
3039
+ } : void 0),
3040
+ requireDpop: z.boolean().optional().describe("Require DPoP (Demonstrating Proof-of-Possession) for token requests")
3041
+ }).refine((data) => !(data.clientType === "browser" && data.requireDpop === true), {
3042
+ message: "requireDpop cannot be set to true for browser clients as they don't support DPoP",
3043
+ path: ["requireDpop"]
3044
+ });
3045
+ const SCIMAuthorizationSchema = z.object({
3046
+ type: z.union([z.literal("oauth2"), z.literal("bearer")]).describe("SCIM authorization type"),
3047
+ bearerSecret: secretValueSchema.optional().describe("Bearer token secret (required for bearer type)")
3048
+ });
3049
+ const SCIMAttributeTypeSchema = z.union([
3050
+ z.literal("string"),
3051
+ z.literal("number"),
3052
+ z.literal("boolean"),
3053
+ z.literal("datetime"),
3054
+ z.literal("complex")
3055
+ ]).describe("SCIM attribute data type");
3056
+ const SCIMAttributeSchema = z.object({
3057
+ type: SCIMAttributeTypeSchema.describe("Attribute data type"),
3058
+ name: z.string().describe("Attribute name"),
3059
+ description: z.string().optional().describe("Attribute description"),
3060
+ mutability: z.union([
3061
+ z.literal("readOnly"),
3062
+ z.literal("readWrite"),
3063
+ z.literal("writeOnly")
3064
+ ]).optional().describe("Attribute mutability"),
3065
+ required: z.boolean().optional().describe("Whether the attribute is required"),
3066
+ multiValued: z.boolean().optional().describe("Whether the attribute can have multiple values"),
3067
+ uniqueness: z.union([
3068
+ z.literal("none"),
3069
+ z.literal("server"),
3070
+ z.literal("global")
3071
+ ]).optional().describe("Uniqueness constraint"),
3072
+ canonicalValues: z.array(z.string()).nullable().optional().describe("List of canonical values"),
3073
+ get subAttributes() {
3074
+ return z.array(SCIMAttributeSchema).nullable().optional();
3075
+ }
3076
+ });
3077
+ const SCIMSchemaSchema = z.object({
3078
+ name: z.string().describe("SCIM schema name"),
3079
+ attributes: z.array(SCIMAttributeSchema).describe("Schema attributes")
3080
+ });
3081
+ const SCIMAttributeMappingSchema = z.object({
3082
+ tailorDBField: z.string().describe("TailorDB field name to map to"),
3083
+ scimPath: z.string().describe("SCIM attribute path")
3084
+ });
3085
+ const SCIMResourceSchema = z.object({
3086
+ name: z.string().describe("SCIM resource name"),
3087
+ tailorDBNamespace: z.string().describe("TailorDB namespace for the resource"),
3088
+ tailorDBType: z.string().describe("TailorDB type name for the resource"),
3089
+ coreSchema: SCIMSchemaSchema.describe("Core SCIM schema definition"),
3090
+ attributeMapping: z.array(SCIMAttributeMappingSchema).describe("Attribute mapping configuration")
3091
+ });
3092
+ const SCIMSchema = z.object({
3093
+ machineUserName: z.string().describe("Machine user name for SCIM operations"),
3094
+ authorization: SCIMAuthorizationSchema.describe("SCIM authorization configuration"),
3095
+ resources: z.array(SCIMResourceSchema).describe("SCIM resource definitions")
3096
+ });
3097
+ const TenantProviderSchema = z.object({
3098
+ namespace: z.string().describe("TailorDB namespace for the tenant type"),
3099
+ type: z.string().describe("TailorDB type name for tenants"),
3100
+ signatureField: z.string().describe("Field used as the tenant signature")
3101
+ });
3102
+ const UserProfileSchema = z.object({
3103
+ type: z.object({
3104
+ name: z.string(),
3105
+ fields: z.any(),
3106
+ metadata: z.any(),
3107
+ hooks: z.any(),
3108
+ validate: z.any(),
3109
+ features: z.any(),
3110
+ indexes: z.any(),
3111
+ files: z.any(),
3112
+ permission: z.any(),
3113
+ gqlPermission: z.any(),
3114
+ _output: z.any()
3115
+ }),
3116
+ usernameField: z.string(),
3117
+ attributes: z.record(z.string(), z.literal(true)).optional(),
3118
+ attributeList: z.array(z.string()).optional()
3119
+ });
3120
+ const ValueOperandSchema = z.union([
3121
+ z.string(),
3122
+ z.boolean(),
3123
+ z.array(z.string()),
3124
+ z.array(z.boolean())
3125
+ ]);
3126
+ const MachineUserSchema = z.object({
3127
+ attributes: z.record(z.string(), ValueOperandSchema).optional(),
3128
+ attributeList: z.array(z.uuid()).optional()
3129
+ });
3130
+ const BeforeLoginHookSchema = z.object({
3131
+ handler: z.function(),
3132
+ invoker: z.string()
3133
+ });
3134
+ const AuthConfigBaseSchema = z.object({
3135
+ name: z.string().describe("Auth service name"),
3136
+ hooks: z.object({ beforeLogin: BeforeLoginHookSchema.optional().describe("Before login auth hook") }).optional().describe("Auth hooks"),
3137
+ machineUsers: z.record(z.string(), MachineUserSchema).optional().describe("Machine user definitions"),
3138
+ oauth2Clients: z.record(z.string(), OAuth2ClientSchema).optional().describe("OAuth2 client definitions"),
3139
+ idProvider: IdProviderSchema.optional().describe("Identity provider configuration"),
3140
+ scim: SCIMSchema.optional().describe("SCIM provisioning configuration"),
3141
+ tenantProvider: TenantProviderSchema.optional().describe("Multi-tenant provider configuration"),
3142
+ publishSessionEvents: z.boolean().optional().describe("Enable publishing session events")
3143
+ });
3144
+ const AuthConfigSchema = z.union([AuthConfigBaseSchema.extend({
3145
+ userProfile: z.undefined().optional(),
3146
+ machineUserAttributes: z.undefined().optional()
3147
+ }), z.xor([AuthConfigBaseSchema.extend({
3148
+ userProfile: UserProfileSchema,
3149
+ machineUserAttributes: z.undefined().optional()
3150
+ }), AuthConfigBaseSchema.extend({
3151
+ userProfile: z.undefined().optional(),
3152
+ machineUserAttributes: z.record(z.string(), TailorFieldSchema)
3153
+ })])]).brand("AuthConfig");
3154
+
3155
+ //#endregion
3156
+ //#region src/cli/services/auth/service.ts
3157
+ /**
3158
+ * Creates a new AuthService instance.
3159
+ * @param config - The auth configuration
3160
+ * @param tailorDBServices - The TailorDB services
3161
+ * @param externalTailorDBNamespaces - External TailorDB namespaces
3162
+ * @returns A new AuthService instance
3163
+ */
3164
+ function createAuthService(config, tailorDBServices, externalTailorDBNamespaces) {
3165
+ const parsedConfig = {
3166
+ ...config,
3167
+ idProvider: IdProviderSchema.optional().parse(config.idProvider)
3168
+ };
3169
+ let userProfile;
3170
+ return {
3171
+ config,
3172
+ tailorDBServices,
3173
+ externalTailorDBNamespaces,
3174
+ parsedConfig,
3175
+ get userProfile() {
3176
+ return userProfile;
3177
+ },
3178
+ resolveNamespaces: async () => {
3179
+ if (!config.userProfile) return;
3180
+ if (config.userProfile.namespace) {
3181
+ userProfile = {
3182
+ ...config.userProfile,
3183
+ namespace: config.userProfile.namespace
3184
+ };
3185
+ return;
3186
+ }
3187
+ const totalNamespaceCount = tailorDBServices.length + externalTailorDBNamespaces.length;
3188
+ let userProfileNamespace;
3189
+ if (totalNamespaceCount === 1) userProfileNamespace = tailorDBServices[0]?.namespace ?? externalTailorDBNamespaces[0];
3190
+ else {
3191
+ await Promise.all(tailorDBServices.map((tailordb) => tailordb.loadTypes()));
3192
+ const userProfileTypeName = typeof config.userProfile.type === "object" && "name" in config.userProfile.type ? config.userProfile.type.name : void 0;
3193
+ if (userProfileTypeName) for (const service of tailorDBServices) {
3194
+ const types = service.types;
3195
+ if (Object.prototype.hasOwnProperty.call(types, userProfileTypeName)) {
3196
+ userProfileNamespace = service.namespace;
3197
+ break;
3198
+ }
3199
+ }
3200
+ if (!userProfileNamespace) throw new Error(`userProfile type "${config.userProfile.type.name}" not found in any TailorDB namespace`);
3201
+ }
3202
+ userProfile = {
3203
+ ...config.userProfile,
3204
+ namespace: userProfileNamespace
3205
+ };
3206
+ }
3207
+ };
3208
+ }
3209
+
3210
+ //#endregion
3211
+ //#region src/parser/service/executor/schema.ts
3212
+ const RecordTriggerSchema = z.object({
3213
+ kind: z.enum([
3214
+ "recordCreated",
3215
+ "recordUpdated",
3216
+ "recordDeleted"
3217
+ ]).describe("Record event type"),
3218
+ typeName: z.string().describe("TailorDB type name to watch for events"),
3219
+ condition: functionSchema.optional().describe("Condition function to filter events")
3220
+ });
3221
+ const ResolverExecutedTriggerSchema = z.object({
3222
+ kind: z.literal("resolverExecuted"),
3223
+ resolverName: z.string().describe("Name of the resolver to trigger on"),
3224
+ condition: functionSchema.optional().describe("Condition function to filter events")
3225
+ });
3226
+ const ScheduleTriggerSchema = z.object({
3227
+ kind: z.literal("schedule"),
3228
+ cron: z.string().describe("CRON expression for the schedule"),
3229
+ timezone: z.string().optional().default("UTC").describe("Timezone for the CRON schedule (default: UTC)")
3230
+ });
3231
+ const IncomingWebhookTriggerSchema = z.object({ kind: z.literal("incomingWebhook") });
3232
+ const IdpUserTriggerSchema = z.object({ kind: z.enum([
3233
+ "idpUserCreated",
3234
+ "idpUserUpdated",
3235
+ "idpUserDeleted"
3236
+ ]).describe("IdP user event type") });
3237
+ const AuthAccessTokenTriggerSchema = z.object({ kind: z.enum([
3238
+ "authAccessTokenIssued",
3239
+ "authAccessTokenRefreshed",
3240
+ "authAccessTokenRevoked"
3241
+ ]).describe("Auth access token event type") });
3242
+ const TriggerSchema = z.discriminatedUnion("kind", [
3243
+ RecordTriggerSchema,
3244
+ ResolverExecutedTriggerSchema,
3245
+ ScheduleTriggerSchema,
3246
+ IncomingWebhookTriggerSchema,
3247
+ IdpUserTriggerSchema,
3248
+ AuthAccessTokenTriggerSchema
3249
+ ]);
3250
+ const FunctionOperationSchema = z.object({
3251
+ kind: z.enum(["function", "jobFunction"]),
3252
+ body: functionSchema.describe("Function implementation"),
3253
+ authInvoker: AuthInvokerSchema.optional().describe("Auth invoker for the function execution")
3254
+ });
3255
+ const GqlOperationSchema = z.object({
3256
+ kind: z.literal("graphql"),
3257
+ appName: z.string().optional().describe("Target application name for the GraphQL query"),
3258
+ query: z.preprocess((val) => String(val), z.string().describe("GraphQL query string")),
3259
+ variables: functionSchema.optional().describe("Function to compute GraphQL variables"),
3260
+ authInvoker: AuthInvokerSchema.optional().describe("Auth invoker for the GraphQL execution")
3261
+ });
3262
+ const WebhookOperationSchema = z.object({
3263
+ kind: z.literal("webhook"),
3264
+ url: functionSchema.describe("Function returning the webhook URL"),
3265
+ requestBody: functionSchema.optional().describe("Function to compute the request body"),
3266
+ headers: z.record(z.string(), z.union([z.string(), z.object({
3267
+ vault: z.string(),
3268
+ key: z.string()
3269
+ })])).optional().describe("HTTP headers for the webhook request")
3270
+ });
3271
+ const WorkflowOperationSchema = z.preprocess((val) => {
3272
+ if (val == null || typeof val !== "object" || !("workflow" in val) || typeof val.workflow !== "object" || val.workflow === null) return val;
3273
+ const { workflow, ...rest } = val;
3274
+ return {
3275
+ ...rest,
3276
+ workflowName: workflow.name
3277
+ };
3278
+ }, z.object({
3279
+ kind: z.literal("workflow"),
3280
+ workflowName: z.string().describe("Name of the workflow to execute"),
3281
+ args: z.union([z.record(z.string(), z.unknown()), functionSchema]).optional().describe("Arguments to pass to the workflow"),
3282
+ authInvoker: AuthInvokerSchema.optional().describe("Auth invoker for the workflow execution")
3283
+ }));
3284
+ const OperationSchema = z.union([
3285
+ FunctionOperationSchema,
3286
+ GqlOperationSchema,
3287
+ WebhookOperationSchema,
3288
+ WorkflowOperationSchema
3289
+ ]);
3290
+ const ExecutorSchema = z.object({
3291
+ name: z.string().describe("Executor name"),
3292
+ description: z.string().optional().describe("Executor description"),
3293
+ disabled: z.boolean().optional().default(false).describe("Whether the executor is disabled"),
3294
+ trigger: TriggerSchema.describe("Event trigger configuration"),
3295
+ operation: OperationSchema.describe("Operation to execute when triggered")
3296
+ });
3297
+
3298
+ //#endregion
3299
+ //#region src/cli/services/executor/loader.ts
3300
+ /**
3301
+ * Load and validate an executor definition from a file.
3302
+ * @param executorFilePath - Path to the executor file
3303
+ * @returns Parsed executor or null if invalid
3304
+ */
3305
+ async function loadExecutor(executorFilePath) {
3306
+ const executor = (await import(pathToFileURL(executorFilePath).href)).default;
3307
+ const parseResult = ExecutorSchema.safeParse(executor);
3308
+ if (!parseResult.success) return null;
3309
+ return parseResult.data;
3310
+ }
3311
+
3312
+ //#endregion
3313
+ //#region src/cli/services/executor/bundler.ts
3314
+ /**
3315
+ * Bundle executors from the specified configuration
3316
+ *
3317
+ * This function:
3318
+ * 1. Creates entry file that extracts operation.body
3319
+ * 2. Bundles in a single step with tree-shaking
3320
+ * @param options - Bundle executor options
3321
+ * @returns Promise that resolves when bundling completes
3322
+ */
3323
+ async function bundleExecutors(options) {
3324
+ const { config, triggerContext, additionalFiles = [], cache, inlineSourcemap } = options;
3325
+ const files = [...loadFilesWithIgnores(config), ...additionalFiles];
3326
+ if (files.length === 0) {
3327
+ logger.warn(`No executor files found for patterns: ${config.files?.join(", ") ?? "(none)"}`);
3328
+ return;
3329
+ }
3330
+ logger.newline();
3331
+ logger.log(`Bundling ${styles.highlight(files.length.toString())} files for ${styles.info("\"executor\"")}`);
3332
+ const executors = [];
3333
+ for (const file of files) {
3334
+ const executor = await loadExecutor(file);
3335
+ if (!executor) {
3336
+ logger.debug(` Skipping: ${file} (could not be loaded)`);
3337
+ continue;
3338
+ }
3339
+ if (!["function", "jobFunction"].includes(executor.operation.kind)) {
3340
+ logger.debug(` Skipping: ${executor.name} (not a function executor)`);
3341
+ continue;
3342
+ }
3343
+ executors.push({
3344
+ name: executor.name,
3345
+ sourceFile: file
3346
+ });
3347
+ }
3348
+ if (executors.length === 0) {
3349
+ logger.debug(" No function executors to bundle");
3350
+ return;
3351
+ }
3352
+ const outputDir = path.resolve(getDistDir(), "executors");
3353
+ fs$1.mkdirSync(outputDir, { recursive: true });
3354
+ await removeStaleEntryFiles(outputDir);
3355
+ let tsconfig;
3356
+ try {
3357
+ tsconfig = await resolveTSConfig();
3358
+ } catch {
3359
+ tsconfig = void 0;
3360
+ }
3361
+ await Promise.all(executors.map((executor) => bundleSingleExecutor(executor, outputDir, tsconfig, triggerContext, cache, inlineSourcemap)));
3362
+ logger.log(`${styles.success("Bundled")} ${styles.info("\"executor\"")}`);
3363
+ }
3364
+ async function bundleSingleExecutor(executor, outputDir, tsconfig, triggerContext, cache, inlineSourcemap) {
3365
+ const outputPath = path.join(outputDir, `${executor.name}.js`);
3366
+ const serializedTriggerContext = serializeTriggerContext(triggerContext);
3367
+ const contextHash = computeBundlerContextHash({
3368
+ sourceFile: executor.sourceFile,
3369
+ serializedTriggerContext,
3370
+ tsconfig,
3371
+ inlineSourcemap
3372
+ });
3373
+ await withCache({
3374
+ cache,
3375
+ kind: "executor",
3376
+ name: executor.name,
3377
+ sourceFile: executor.sourceFile,
3378
+ outputPath,
3379
+ contextHash,
3380
+ async build(cachePlugins) {
3381
+ const entryPath = path.join(outputDir, `${executor.name}.entry.js`);
3382
+ const entryContent = ml`
3383
+ import _internalExecutor from "${path.resolve(executor.sourceFile)}";
3384
+
3385
+ const __executor_function = _internalExecutor.operation.body;
3386
+
3387
+ export { __executor_function as main };
3388
+ `;
3389
+ fs$1.writeFileSync(entryPath, entryContent);
3390
+ const triggerPlugin = createTriggerTransformPlugin(triggerContext);
3391
+ const plugins = triggerPlugin ? [triggerPlugin] : [];
3392
+ plugins.push(...cachePlugins);
3393
+ await rolldown.build(rolldown.defineConfig({
3394
+ input: entryPath,
3395
+ output: {
3396
+ file: outputPath,
3397
+ format: "esm",
3398
+ sourcemap: inlineSourcemap ? "inline" : true,
3399
+ minify: inlineSourcemap ? { mangle: { keepNames: true } } : true,
3400
+ codeSplitting: false
3401
+ },
3402
+ tsconfig,
3403
+ plugins,
3404
+ treeshake: {
3405
+ moduleSideEffects: false,
3406
+ annotations: true,
3407
+ unknownGlobalSideEffects: false
3408
+ },
3409
+ logLevel: "silent"
3410
+ }));
3411
+ }
3412
+ });
3413
+ }
3414
+
3415
+ //#endregion
3416
+ //#region src/cli/services/executor/service.ts
3417
+ /**
3418
+ * Creates a new ExecutorService instance.
3419
+ * @param params - Parameters for creating the service
3420
+ * @returns A new ExecutorService instance
3421
+ */
3422
+ function createExecutorService(params) {
3423
+ const { config } = params;
3424
+ const executors = {};
3425
+ const pluginExecutors = [];
3426
+ let loadPromise;
3427
+ const loadExecutorForFile = async (executorFile) => {
3428
+ try {
3429
+ const executorModule = await import(pathToFileURL(executorFile).href);
3430
+ const result = ExecutorSchema.safeParse(executorModule.default);
3431
+ if (result.success) {
3432
+ const relativePath = path.relative(process.cwd(), executorFile);
3433
+ logger.log(`Executor: ${styles.successBright(`"${result.data.name}"`)} loaded from ${styles.path(relativePath)}`);
3434
+ executors[executorFile] = result.data;
3435
+ return result.data;
3436
+ }
3437
+ if (isSdkBranded(executorModule.default, "executor")) throw result.error;
3438
+ } catch (error) {
3439
+ const relativePath = path.relative(process.cwd(), executorFile);
3440
+ logger.error(`Failed to load executor from ${styles.bold(relativePath)}`);
3441
+ logger.error(String(error));
3442
+ throw error;
3443
+ }
3444
+ };
3445
+ return {
3446
+ config,
3447
+ get executors() {
3448
+ return executors;
3449
+ },
3450
+ get pluginExecutors() {
3451
+ return pluginExecutors;
3452
+ },
3453
+ loadExecutors: async () => {
3454
+ if (!loadPromise) loadPromise = (async () => {
3455
+ if (!config.files || config.files.length === 0) return;
3456
+ const executorFiles = loadFilesWithIgnores(config);
3457
+ logger.newline();
3458
+ logger.log(`Found ${styles.highlight(executorFiles.length.toString())} executor files`);
3459
+ await Promise.all(executorFiles.map((executorFile) => loadExecutorForFile(executorFile)));
3460
+ return executors;
3461
+ })();
3462
+ return loadPromise;
3463
+ },
3464
+ loadPluginExecutorFiles: async (filePaths) => {
3465
+ if (filePaths.length === 0) return;
3466
+ logger.newline();
3467
+ logger.log(`Loading ${styles.highlight(filePaths.length.toString())} plugin-generated executor files`);
3468
+ for (const filePath of filePaths) {
3469
+ const executor = await loadExecutorForFile(filePath);
3470
+ if (executor) pluginExecutors.push({
3471
+ executor,
3472
+ pluginId: "plugin-generated",
3473
+ sourceTypeName: void 0
3474
+ });
3475
+ }
3476
+ }
3477
+ };
3478
+ }
3479
+
3480
+ //#endregion
3481
+ //#region src/cli/services/resolver/loader.ts
3482
+ /**
3483
+ * Load and validate a resolver definition from a file.
3484
+ * @param resolverFilePath - Path to the resolver file
3485
+ * @returns Parsed resolver or null if invalid
3486
+ */
3487
+ async function loadResolver(resolverFilePath) {
3488
+ const resolver = (await import(pathToFileURL(resolverFilePath).href)).default;
3489
+ const parseResult = ResolverSchema.safeParse(resolver);
3490
+ if (!parseResult.success) return null;
3491
+ return parseResult.data;
3492
+ }
3493
+
3494
+ //#endregion
3495
+ //#region src/cli/services/resolver/bundler.ts
3496
+ /**
3497
+ * Bundle resolvers for the specified namespace
3498
+ *
3499
+ * This function:
3500
+ * 1. Uses a transform plugin to add validation wrapper during bundling
3501
+ * 2. Creates entry file
3502
+ * 3. Bundles in a single step with tree-shaking
3503
+ * @param namespace - Resolver namespace name
3504
+ * @param config - Resolver file loading configuration
3505
+ * @param triggerContext - Trigger context for workflow/job transformations
3506
+ * @param cache - Optional bundle cache for skipping unchanged builds
3507
+ * @param inlineSourcemap - Whether to enable inline sourcemaps
3508
+ * @returns Promise that resolves when bundling completes
3509
+ */
3510
+ async function bundleResolvers(namespace, config, triggerContext, cache, inlineSourcemap) {
3511
+ const files = loadFilesWithIgnores(config);
3512
+ if (files.length === 0) {
3513
+ logger.warn(`No resolver files found for patterns: ${config.files?.join(", ") ?? "(none)"}`);
3514
+ return;
3515
+ }
3516
+ logger.newline();
3517
+ logger.log(`Bundling ${styles.highlight(files.length.toString())} files for ${styles.info(`"${namespace}"`)}`);
3518
+ const resolvers = [];
3519
+ for (const file of files) {
3520
+ const resolver = await loadResolver(file);
3521
+ if (!resolver) {
3522
+ logger.debug(` Skipping: ${file} (could not be loaded)`);
3523
+ continue;
3524
+ }
3525
+ resolvers.push({
3526
+ name: resolver.name,
3527
+ sourceFile: file
3528
+ });
3529
+ }
3530
+ const outputDir = path.resolve(getDistDir(), "resolvers");
3531
+ fs$1.mkdirSync(outputDir, { recursive: true });
3532
+ await removeStaleEntryFiles(outputDir);
3533
+ let tsconfig;
3534
+ try {
3535
+ tsconfig = await resolveTSConfig();
3536
+ } catch {
3537
+ tsconfig = void 0;
3538
+ }
3539
+ await Promise.all(resolvers.map((resolver) => bundleSingleResolver(resolver, outputDir, tsconfig, triggerContext, cache, inlineSourcemap)));
3540
+ logger.log(`${styles.success("Bundled")} ${styles.info(`"${namespace}"`)}`);
3541
+ }
3542
+ async function bundleSingleResolver(resolver, outputDir, tsconfig, triggerContext, cache, inlineSourcemap) {
3543
+ const outputPath = path.join(outputDir, `${resolver.name}.js`);
3544
+ const serializedTriggerContext = serializeTriggerContext(triggerContext);
3545
+ const contextHash = computeBundlerContextHash({
3546
+ sourceFile: resolver.sourceFile,
3547
+ serializedTriggerContext,
3548
+ tsconfig,
3549
+ inlineSourcemap
3550
+ });
3551
+ await withCache({
3552
+ cache,
3553
+ kind: "resolver",
3554
+ name: resolver.name,
3555
+ sourceFile: resolver.sourceFile,
3556
+ outputPath,
3557
+ contextHash,
3558
+ async build(cachePlugins) {
3559
+ const entryPath = path.join(outputDir, `${resolver.name}.entry.js`);
3560
+ const entryContent = ml`
3561
+ import _internalResolver from "${path.resolve(resolver.sourceFile)}";
3562
+ import { t } from "@tailor-platform/sdk";
3563
+
3564
+ const $tailor_resolver_body = async (context) => {
3565
+ if (_internalResolver.input) {
3566
+ const result = t.object(_internalResolver.input).parse({
3567
+ value: context.input,
3568
+ data: context.input,
3569
+ user: context.user,
3570
+ });
3571
+
3572
+ if (result.issues) {
3573
+ const errorMessages = result.issues
3574
+ .map(issue => {
3575
+ const path = issue.path ? issue.path.join('.') : '';
3576
+ return path ? \` \${path}: \${issue.message}\` : issue.message;
3577
+ })
3578
+ .join('\\n');
3579
+ throw new Error(\`Failed to input validation:\\n\${errorMessages}\`);
3580
+ }
3581
+ }
3582
+
3583
+ return _internalResolver.body(context);
3584
+ };
3585
+
3586
+ export { $tailor_resolver_body as main };
3587
+ `;
3588
+ fs$1.writeFileSync(entryPath, entryContent);
3589
+ const triggerPlugin = createTriggerTransformPlugin(triggerContext);
3590
+ const plugins = triggerPlugin ? [triggerPlugin] : [];
3591
+ plugins.push(...cachePlugins);
3592
+ await rolldown.build(rolldown.defineConfig({
3593
+ input: entryPath,
3594
+ output: {
3595
+ file: outputPath,
3596
+ format: "esm",
3597
+ sourcemap: inlineSourcemap ? "inline" : true,
3598
+ minify: inlineSourcemap ? { mangle: { keepNames: true } } : true,
3599
+ codeSplitting: false
3600
+ },
3601
+ tsconfig,
3602
+ plugins,
3603
+ treeshake: {
3604
+ moduleSideEffects: false,
3605
+ annotations: true,
3606
+ unknownGlobalSideEffects: false
3607
+ },
3608
+ logLevel: "silent"
3609
+ }));
3610
+ }
3611
+ });
3612
+ }
3613
+
3614
+ //#endregion
3615
+ //#region src/cli/services/resolver/service.ts
3616
+ /**
3617
+ * Creates a new ResolverService instance.
3618
+ * @param namespace - The namespace for this resolver service
3619
+ * @param config - The resolver service configuration
3620
+ * @returns A new ResolverService instance
3621
+ */
3622
+ function createResolverService(namespace, config) {
3623
+ const resolvers = {};
3624
+ const loadResolverForFile = async (resolverFile) => {
3625
+ try {
3626
+ const resolverModule = await import(pathToFileURL(resolverFile).href);
3627
+ const result = ResolverSchema.safeParse(resolverModule.default);
3628
+ if (result.success) {
3629
+ const relativePath = path.relative(process.cwd(), resolverFile);
3630
+ logger.log(`Resolver: ${styles.successBright(`"${result.data.name}"`)} loaded from ${styles.path(relativePath)}`);
3631
+ resolvers[resolverFile] = result.data;
3632
+ return result.data;
3633
+ }
3634
+ if (isSdkBranded(resolverModule.default, "resolver")) throw result.error;
3635
+ } catch (error) {
3636
+ const relativePath = path.relative(process.cwd(), resolverFile);
3637
+ logger.error(`Failed to load resolver from ${styles.bold(relativePath)}`);
3638
+ logger.error(String(error));
3639
+ throw error;
3640
+ }
3641
+ };
3642
+ return {
3643
+ namespace,
3644
+ config,
3645
+ get resolvers() {
3646
+ return resolvers;
3647
+ },
3648
+ loadResolvers: async () => {
3649
+ if (Object.keys(resolvers).length > 0) return;
3650
+ if (!config.files || config.files.length === 0) return;
3651
+ const resolverFiles = loadFilesWithIgnores(config);
3652
+ logger.log(`Found ${styles.highlight(resolverFiles.length.toString())} resolver files for service ${styles.highlight(`"${namespace}"`)}`);
3653
+ await Promise.all(resolverFiles.map((resolverFile) => loadResolverForFile(resolverFile)));
3654
+ }
3655
+ };
3656
+ }
3657
+
3658
+ //#endregion
3659
+ //#region src/cli/services/workflow/source-transformer.ts
3660
+ /**
3661
+ * Find variable declarations by export names
3662
+ * Returns a map of export name to statement range
3663
+ * @param program - Parsed TypeScript program
3664
+ * @returns Map of export name to statement range
3665
+ */
3666
+ function findVariableDeclarationsByName(program) {
3667
+ const declarations = /* @__PURE__ */ new Map();
3668
+ function walk(node) {
3669
+ if (!node || typeof node !== "object") return;
3670
+ const nodeType = node.type;
3671
+ if (nodeType === "VariableDeclaration") {
3672
+ const varDecl = node;
3673
+ for (const decl of varDecl.declarations || []) if (decl.id?.type === "Identifier" && decl.id.name) {
3674
+ if (!declarations.has(decl.id.name)) declarations.set(decl.id.name, {
3675
+ start: varDecl.start,
3676
+ end: varDecl.end
3677
+ });
3678
+ }
3679
+ }
3680
+ if (nodeType === "ExportNamedDeclaration") {
3681
+ const exportDecl = node;
3682
+ const declaration = exportDecl.declaration;
3683
+ if (declaration?.type === "VariableDeclaration") {
3684
+ const varDecl = declaration;
3685
+ for (const decl of varDecl.declarations || []) if (decl.id?.type === "Identifier" && decl.id.name) declarations.set(decl.id.name, {
3686
+ start: exportDecl.start,
3687
+ end: exportDecl.end
3688
+ });
3689
+ }
3690
+ }
3691
+ for (const key of Object.keys(node)) {
3692
+ const child = node[key];
3693
+ if (Array.isArray(child)) child.forEach((c) => walk(c));
3694
+ else if (child && typeof child === "object") walk(child);
3695
+ }
3696
+ }
3697
+ walk(program);
3698
+ return declarations;
3699
+ }
3700
+ /**
3701
+ * Find createWorkflow default export declarations
3702
+ * Returns the range of the export statement to remove
3703
+ * @param program - Parsed TypeScript program
3704
+ * @returns Range of the default export statement or null
3705
+ */
3706
+ function findWorkflowDefaultExport(program) {
3707
+ const bindings = collectSdkBindings(program, "createWorkflow");
3708
+ for (const statement of program.body) if (statement.type === "ExportDefaultDeclaration") {
3709
+ const exportDecl = statement;
3710
+ const declaration = exportDecl.declaration;
3711
+ if (isSdkFunctionCall(declaration, bindings, "createWorkflow")) return {
3712
+ start: exportDecl.start,
3713
+ end: exportDecl.end
3714
+ };
3715
+ if (declaration.type === "Identifier") return {
3716
+ start: exportDecl.start,
3717
+ end: exportDecl.end
3718
+ };
3719
+ }
3720
+ return null;
3721
+ }
3722
+ /**
3723
+ * Transform workflow source code
3724
+ * - Transform .trigger() calls to tailor.workflow.triggerJobFunction()
3725
+ * - Other jobs: remove entire variable declaration
3726
+ * @param source - The source code to transform
3727
+ * @param targetJobName - The name of the target job (from job config)
3728
+ * @param targetJobExportName - The export name of the target job (optional, for enhanced detection)
3729
+ * @param otherJobExportNames - Export names of other jobs to remove (optional, for enhanced detection)
3730
+ * @param allJobsMap - Map from export name to job name for trigger transformation (optional)
3731
+ * @returns Transformed workflow source code
3732
+ */
3733
+ function transformWorkflowSource(source, targetJobName, targetJobExportName, otherJobExportNames, allJobsMap) {
3734
+ const { program } = parseSync("input.ts", source);
3735
+ const detectedJobs = findAllJobs(program, source);
3736
+ const jobNameMap = allJobsMap ?? buildJobNameMap(detectedJobs);
3737
+ const allDeclarations = findVariableDeclarationsByName(program);
3738
+ const triggerCalls = detectTriggerCalls(program, source);
3739
+ const replacements = [];
3740
+ const removedRanges = [];
3741
+ const isInsideRemovedRange = (pos) => {
3742
+ return removedRanges.some((r) => pos >= r.start && pos < r.end);
3743
+ };
3744
+ const isAlreadyMarkedForRemoval = (start) => {
3745
+ return removedRanges.some((r) => r.start === start);
3746
+ };
3747
+ for (const job of detectedJobs) {
3748
+ if (job.name === targetJobName) continue;
3749
+ if (job.statementRange && !isAlreadyMarkedForRemoval(job.statementRange.start)) {
3750
+ const endPos = findStatementEnd(source, job.statementRange.end);
3751
+ removedRanges.push({
3752
+ start: job.statementRange.start,
3753
+ end: endPos
3754
+ });
3755
+ replacements.push({
3756
+ start: job.statementRange.start,
3757
+ end: endPos,
3758
+ text: ""
3759
+ });
3760
+ } else if (!job.statementRange) replacements.push({
3761
+ start: job.bodyValueRange.start,
3762
+ end: job.bodyValueRange.end,
3763
+ text: "() => {}"
3764
+ });
3765
+ }
3766
+ if (otherJobExportNames) for (const exportName of otherJobExportNames) {
3767
+ if (exportName === targetJobExportName) continue;
3768
+ const declRange = allDeclarations.get(exportName);
3769
+ if (declRange && !isAlreadyMarkedForRemoval(declRange.start)) {
3770
+ const endPos = findStatementEnd(source, declRange.end);
3771
+ removedRanges.push({
3772
+ start: declRange.start,
3773
+ end: endPos
3774
+ });
3775
+ replacements.push({
3776
+ start: declRange.start,
3777
+ end: endPos,
3778
+ text: ""
3779
+ });
3780
+ }
3781
+ }
3782
+ const workflowExport = findWorkflowDefaultExport(program);
3783
+ if (workflowExport && !isAlreadyMarkedForRemoval(workflowExport.start)) {
3784
+ const endPos = findStatementEnd(source, workflowExport.end);
3785
+ removedRanges.push({
3786
+ start: workflowExport.start,
3787
+ end: endPos
3788
+ });
3789
+ replacements.push({
3790
+ start: workflowExport.start,
3791
+ end: endPos,
3792
+ text: ""
3793
+ });
3794
+ }
3795
+ for (const call of triggerCalls) {
3796
+ if (isInsideRemovedRange(call.callRange.start)) continue;
3797
+ const jobName = jobNameMap.get(call.identifierName);
3798
+ if (jobName) {
3799
+ const transformedCall = `tailor.workflow.triggerJobFunction("${jobName}", ${call.argsText || "undefined"})`;
3800
+ replacements.push({
3801
+ start: call.fullRange.start,
3802
+ end: call.fullRange.end,
3803
+ text: transformedCall
3804
+ });
3805
+ }
3806
+ }
3807
+ return applyReplacements(source, replacements);
3808
+ }
3809
+
3810
+ //#endregion
3811
+ //#region src/cli/services/workflow/bundler.ts
3812
+ /**
3813
+ * Bundle workflow jobs
3814
+ *
3815
+ * This function:
3816
+ * 1. Detects which jobs are actually used (mainJobs + their dependencies)
3817
+ * 2. Uses a transform plugin to transform trigger calls during bundling
3818
+ * 3. Creates entry file and bundles with tree-shaking
3819
+ *
3820
+ * Returns metadata about which jobs each workflow uses.
3821
+ * @param allJobs - All available job infos
3822
+ * @param mainJobNames - Names of main jobs
3823
+ * @param env - Environment variables to inject
3824
+ * @param triggerContext - Trigger context for transformations
3825
+ * @param cache - Optional bundle cache for skipping unchanged builds
3826
+ * @param inlineSourcemap - Whether to enable inline sourcemaps
3827
+ * @returns Workflow job bundling result
3828
+ */
3829
+ async function bundleWorkflowJobs(allJobs, mainJobNames, env = {}, triggerContext, cache, inlineSourcemap) {
3830
+ if (allJobs.length === 0) {
3831
+ logger.warn("No workflow jobs to bundle");
3832
+ return { mainJobDeps: {} };
3833
+ }
3834
+ const { usedJobs, mainJobDeps } = await filterUsedJobs(allJobs, mainJobNames);
3835
+ logger.newline();
3836
+ logger.log(`Bundling ${styles.highlight(usedJobs.length.toString())} files for ${styles.info("\"workflow-job\"")}`);
3837
+ const outputDir = path.resolve(getDistDir(), "workflow-jobs");
3838
+ fs$1.mkdirSync(outputDir, { recursive: true });
3839
+ const currentJobNames = new Set(usedJobs.map((j) => j.name));
3840
+ const existingFiles = fs$1.readdirSync(outputDir);
3841
+ for (const file of existingFiles) if (file.endsWith(".js") && !currentJobNames.has(path.basename(file, ".js"))) fs$1.rmSync(path.join(outputDir, file), { force: true });
3842
+ else if (file.endsWith(".js.map") && !currentJobNames.has(path.basename(file, ".js.map"))) fs$1.rmSync(path.join(outputDir, file), { force: true });
3843
+ let tsconfig;
3844
+ try {
3845
+ tsconfig = await resolveTSConfig();
3846
+ } catch {
3847
+ tsconfig = void 0;
3848
+ }
3849
+ await Promise.all(usedJobs.map((job) => bundleSingleJob(job, usedJobs, outputDir, tsconfig, env, triggerContext, cache, inlineSourcemap)));
3850
+ logger.log(`${styles.success("Bundled")} ${styles.info("\"workflow-job\"")}`);
3851
+ return { mainJobDeps };
3852
+ }
3853
+ /**
3854
+ * Filter jobs to only include those that are actually used.
3855
+ * A job is "used" if:
3856
+ * - It's a mainJob of a workflow
3857
+ * - It's called via .trigger() from another used job (transitively)
3858
+ *
3859
+ * Also returns a map of mainJob -> all jobs it depends on (for metadata).
3860
+ * @param allJobs - All available job infos
3861
+ * @param mainJobNames - Names of main jobs
3862
+ * @returns Used jobs and main job dependency map
3863
+ */
3864
+ async function filterUsedJobs(allJobs, mainJobNames) {
3865
+ if (allJobs.length === 0 || mainJobNames.length === 0) return {
3866
+ usedJobs: [],
3867
+ mainJobDeps: {}
3868
+ };
3869
+ const jobsBySourceFile = /* @__PURE__ */ new Map();
3870
+ for (const job of allJobs) {
3871
+ const existing = jobsBySourceFile.get(job.sourceFile) || [];
3872
+ existing.push(job);
3873
+ jobsBySourceFile.set(job.sourceFile, existing);
3874
+ }
3875
+ const exportNameToJobName = /* @__PURE__ */ new Map();
3876
+ for (const job of allJobs) exportNameToJobName.set(job.exportName, job.name);
3877
+ const dependencies = /* @__PURE__ */ new Map();
3878
+ const fileResults = await Promise.all(Array.from(jobsBySourceFile.entries()).map(async ([sourceFile, jobs]) => {
3879
+ try {
3880
+ const source = await fs$1.promises.readFile(sourceFile, "utf-8");
3881
+ const { program } = parseSync("input.ts", source);
3882
+ const detectedJobs = findAllJobs(program, source);
3883
+ const localExportNameToJobName = /* @__PURE__ */ new Map();
3884
+ for (const detected of detectedJobs) if (detected.exportName) localExportNameToJobName.set(detected.exportName, detected.name);
3885
+ const triggerCalls = detectTriggerCalls(program, source);
3886
+ const jobDependencies = [];
3887
+ for (const job of jobs) {
3888
+ const detectedJob = detectedJobs.find((d) => d.name === job.name);
3889
+ if (!detectedJob) continue;
3890
+ const jobDeps = /* @__PURE__ */ new Set();
3891
+ for (const call of triggerCalls) if (detectedJob.bodyValueRange && call.callRange.start >= detectedJob.bodyValueRange.start && call.callRange.end <= detectedJob.bodyValueRange.end) {
3892
+ const triggeredJobName = localExportNameToJobName.get(call.identifierName) || exportNameToJobName.get(call.identifierName);
3893
+ if (triggeredJobName) jobDeps.add(triggeredJobName);
3894
+ }
3895
+ if (jobDeps.size > 0) jobDependencies.push({
3896
+ jobName: job.name,
3897
+ deps: jobDeps
3898
+ });
3899
+ }
3900
+ return jobDependencies;
3901
+ } catch {
3902
+ return [];
3903
+ }
3904
+ }));
3905
+ for (const jobDependencies of fileResults) for (const { jobName, deps } of jobDependencies) dependencies.set(jobName, deps);
3906
+ const usedJobNames = /* @__PURE__ */ new Set();
3907
+ const mainJobDeps = {};
3908
+ function collectDeps(jobName, collected) {
3909
+ if (collected.has(jobName)) return;
3910
+ collected.add(jobName);
3911
+ const deps = dependencies.get(jobName);
3912
+ if (deps) for (const dep of deps) collectDeps(dep, collected);
3913
+ }
3914
+ for (const mainJobName of mainJobNames) {
3915
+ const depsForMainJob = /* @__PURE__ */ new Set();
3916
+ collectDeps(mainJobName, depsForMainJob);
3917
+ mainJobDeps[mainJobName] = Array.from(depsForMainJob);
3918
+ for (const dep of depsForMainJob) usedJobNames.add(dep);
3919
+ }
3920
+ return {
3921
+ usedJobs: allJobs.filter((job) => usedJobNames.has(job.name)),
3922
+ mainJobDeps
3923
+ };
3924
+ }
3925
+ async function bundleSingleJob(job, allJobs, outputDir, tsconfig, env, triggerContext, cache, inlineSourcemap) {
3926
+ const outputPath = path.join(outputDir, `${job.name}.js`);
3927
+ const serializedTriggerContext = serializeTriggerContext(triggerContext);
3928
+ const sortedEnvPrefix = JSON.stringify(Object.fromEntries(Object.entries(env).sort(([a], [b]) => a.localeCompare(b))));
3929
+ const contextHash = computeBundlerContextHash({
3930
+ sourceFile: job.sourceFile,
3931
+ serializedTriggerContext,
3932
+ tsconfig,
3933
+ inlineSourcemap,
3934
+ prefix: sortedEnvPrefix
3935
+ });
3936
+ await withCache({
3937
+ cache,
3938
+ kind: "workflow-job",
3939
+ name: job.name,
3940
+ sourceFile: job.sourceFile,
3941
+ outputPath,
3942
+ contextHash,
3943
+ async build(cachePlugins) {
3944
+ const entryPath = path.join(outputDir, `${job.name}.entry.js`);
3945
+ const absoluteSourcePath = path.resolve(job.sourceFile);
3946
+ const entryContent = ml`
3947
+ import { ${job.exportName} } from "${absoluteSourcePath}";
3948
+
3949
+ export async function main(input) {
3950
+ const env = ${JSON.stringify(env)};
3951
+ return await ${job.exportName}.body(input, { env });
3952
+ }
3953
+ `;
3954
+ fs$1.writeFileSync(entryPath, entryContent);
3955
+ const otherJobExportNames = allJobs.filter((j) => j.name !== job.name).map((j) => j.exportName);
3956
+ const allJobsMap = /* @__PURE__ */ new Map();
3957
+ for (const j of allJobs) allJobsMap.set(j.exportName, j.name);
3958
+ const plugins = [{
3959
+ name: "workflow-transform",
3960
+ transform: {
3961
+ filter: { id: { include: [/\.ts$/, /\.js$/] } },
3962
+ handler(code, id) {
3963
+ if (!code.includes("createWorkflowJob") && !code.includes("createWorkflow") && !code.includes(".trigger(")) return null;
3964
+ let transformed = transformWorkflowSource(code, job.name, job.exportName, otherJobExportNames, allJobsMap);
3965
+ if (triggerContext && transformed.includes(".trigger(")) transformed = transformFunctionTriggers(transformed, triggerContext.workflowNameMap, triggerContext.jobNameMap, triggerContext.workflowFileMap, id);
3966
+ return { code: transformed };
3967
+ }
3968
+ }
3969
+ }, ...cachePlugins];
3970
+ await rolldown.build(rolldown.defineConfig({
3971
+ input: entryPath,
3972
+ output: {
3973
+ file: outputPath,
3974
+ format: "esm",
3975
+ sourcemap: inlineSourcemap ? "inline" : true,
3976
+ minify: inlineSourcemap ? { mangle: { keepNames: true } } : true,
3977
+ codeSplitting: false
3978
+ },
3979
+ tsconfig,
3980
+ plugins,
3981
+ treeshake: {
3982
+ moduleSideEffects: false,
3983
+ annotations: true,
3984
+ unknownGlobalSideEffects: false
3985
+ },
3986
+ logLevel: "silent"
3987
+ }));
3988
+ }
3989
+ });
3990
+ }
3991
+
3992
+ //#endregion
3993
+ //#region src/parser/service/workflow/schema.ts
3994
+ const WorkflowJobSchema = z.object({
3995
+ name: z.string().describe("Job name (must be unique across the project)"),
3996
+ trigger: functionSchema.describe("Trigger function that initiates the job"),
3997
+ body: functionSchema.describe("Job implementation function")
3998
+ });
3999
+ const durationUnits = [
4000
+ "ms",
4001
+ "s",
4002
+ "m"
4003
+ ];
4004
+ const unitToSeconds = {
4005
+ ms: 1 / 1e3,
4006
+ s: 1,
4007
+ m: 60
4008
+ };
4009
+ function durationToSeconds(duration) {
4010
+ const match = duration.match(/^(\d+)(ms|s|m)$/);
4011
+ if (!match) return 0;
4012
+ return parseInt(match[1], 10) * unitToSeconds[match[2]];
4013
+ }
4014
+ const baseDurationSchema = z.templateLiteral([z.number().int().positive(), z.enum(durationUnits)]);
4015
+ const durationSchema = (maxSeconds) => baseDurationSchema.refine((val) => durationToSeconds(val) <= maxSeconds, { message: `Duration must be at most ${maxSeconds} seconds` });
4016
+ const RetryPolicySchema = z.object({
4017
+ maxRetries: z.number().int().min(1).max(10).describe("Maximum number of retries (1-10)"),
4018
+ initialBackoff: durationSchema(3600).describe("Initial backoff duration (e.g., '1s', '500ms', '1m', max 1h)"),
4019
+ maxBackoff: durationSchema(86400).describe("Maximum backoff duration (e.g., '30s', '5m', max 24h)"),
4020
+ backoffMultiplier: z.number().min(1).describe("Backoff multiplier (>= 1)")
4021
+ }).refine((data) => durationToSeconds(data.initialBackoff) <= durationToSeconds(data.maxBackoff), {
4022
+ message: "initialBackoff must be less than or equal to maxBackoff",
4023
+ path: ["initialBackoff"]
4024
+ }).refine((data) => durationToSeconds(data.initialBackoff) > 0, {
4025
+ message: "initialBackoff must be greater than 0",
4026
+ path: ["initialBackoff"]
4027
+ });
4028
+ const WorkflowSchema = z.object({
4029
+ name: z.string().describe("Workflow name"),
4030
+ mainJob: WorkflowJobSchema.describe("Main job that starts the workflow"),
4031
+ retryPolicy: RetryPolicySchema.optional().describe("Retry policy for the workflow")
4032
+ });
4033
+
4034
+ //#endregion
4035
+ //#region src/cli/services/workflow/service.ts
4036
+ /**
4037
+ * Creates a new WorkflowService instance.
4038
+ * @param params - Parameters for creating the service
4039
+ * @returns A new WorkflowService instance
4040
+ */
4041
+ function createWorkflowService(params) {
4042
+ const { config } = params;
4043
+ let workflows = {};
4044
+ let workflowSources = [];
4045
+ let jobs = [];
4046
+ let fileCount = 0;
4047
+ let loaded = false;
4048
+ return {
4049
+ config,
4050
+ get workflows() {
4051
+ return workflows;
4052
+ },
4053
+ get workflowSources() {
4054
+ return workflowSources;
4055
+ },
4056
+ get jobs() {
4057
+ return jobs;
4058
+ },
4059
+ get fileCount() {
4060
+ return fileCount;
4061
+ },
4062
+ loadWorkflows: async () => {
4063
+ if (loaded) return;
4064
+ const result = await loadAndCollectJobs(config);
4065
+ workflows = result.workflows;
4066
+ workflowSources = result.workflowSources;
4067
+ jobs = result.jobs;
4068
+ fileCount = result.fileCount;
4069
+ loaded = true;
4070
+ },
4071
+ printLoadedWorkflows: () => {
4072
+ if (fileCount === 0) return;
4073
+ logger.newline();
4074
+ logger.log(`Found ${styles.highlight(fileCount.toString())} workflow files`);
4075
+ for (const { workflow, sourceFile } of workflowSources) {
4076
+ const relativePath = path.relative(process.cwd(), sourceFile);
4077
+ logger.log(`Workflow: ${styles.successBright(`"${workflow.name}"`)} loaded from ${styles.path(relativePath)}`);
4078
+ }
4079
+ }
4080
+ };
4081
+ }
4082
+ /**
4083
+ * Load workflow files and collect all jobs in a single pass.
4084
+ * Dependencies are detected at bundle time via AST analysis.
4085
+ * @param config - Workflow service configuration
4086
+ * @returns Loaded workflows and collected jobs
4087
+ */
4088
+ async function loadAndCollectJobs(config) {
4089
+ const workflows = {};
4090
+ const workflowSources = [];
4091
+ const collectedJobs = [];
4092
+ if (!config.files || config.files.length === 0) return {
4093
+ workflows,
4094
+ workflowSources,
4095
+ jobs: collectedJobs,
4096
+ fileCount: 0
4097
+ };
4098
+ const workflowFiles = loadFilesWithIgnores(config);
4099
+ const fileCount = workflowFiles.length;
4100
+ const allJobsMap = /* @__PURE__ */ new Map();
4101
+ const loadResults = await Promise.all(workflowFiles.map(async (workflowFile) => {
4102
+ const { jobs, workflow } = await loadFileContent(workflowFile);
4103
+ return {
4104
+ workflowFile,
4105
+ jobs,
4106
+ workflow
4107
+ };
4108
+ }));
4109
+ for (const { workflowFile, jobs, workflow } of loadResults) {
4110
+ if (workflow) {
4111
+ workflowSources.push({
4112
+ workflow,
4113
+ sourceFile: workflowFile
4114
+ });
4115
+ workflows[workflowFile] = workflow;
4116
+ }
4117
+ for (const job of jobs) {
4118
+ const existing = allJobsMap.get(job.name);
4119
+ if (existing) throw new Error(`Duplicate job name "${job.name}" found:\n - ${existing.sourceFile} (export: ${existing.exportName})\n - ${job.sourceFile} (export: ${job.exportName})\nEach job must have a unique name.`);
4120
+ allJobsMap.set(job.name, job);
4121
+ collectedJobs.push(job);
4122
+ }
4123
+ }
4124
+ return {
4125
+ workflows,
4126
+ workflowSources,
4127
+ jobs: collectedJobs,
4128
+ fileCount
4129
+ };
4130
+ }
4131
+ /**
4132
+ * Load a single file and extract jobs and workflow
4133
+ * @param filePath - Path to the workflow file
4134
+ * @returns Extracted jobs and workflow
4135
+ */
4136
+ async function loadFileContent(filePath) {
4137
+ const jobs = [];
4138
+ let workflow = null;
4139
+ try {
4140
+ const module = await import(pathToFileURL(filePath).href);
4141
+ for (const [exportName, exportValue] of Object.entries(module)) {
4142
+ if (exportName === "default") {
4143
+ const workflowResult = WorkflowSchema.safeParse(exportValue);
4144
+ if (workflowResult.success) workflow = workflowResult.data;
4145
+ else if (isSdkBranded(exportValue, ["workflow", "workflow-job"])) throw workflowResult.error;
4146
+ continue;
4147
+ }
4148
+ const jobResult = WorkflowJobSchema.safeParse(exportValue);
4149
+ if (jobResult.success) jobs.push({
4150
+ name: jobResult.data.name,
4151
+ exportName,
4152
+ sourceFile: filePath
4153
+ });
4154
+ else if (isSdkBranded(exportValue, ["workflow", "workflow-job"])) throw jobResult.error;
4155
+ }
4156
+ } catch (error) {
4157
+ const relativePath = path.relative(process.cwd(), filePath);
4158
+ logger.error(`${styles.error("Failed to load workflow from")} ${styles.errorBright(relativePath)}`);
4159
+ logger.error(String(error));
4160
+ throw error;
4161
+ }
4162
+ return {
4163
+ jobs,
4164
+ workflow
4165
+ };
4166
+ }
4167
+
4168
+ //#endregion
4169
+ //#region src/parser/generator-config/index.ts
4170
+ const DependencyKindSchema = z.enum([
4171
+ "tailordb",
4172
+ "resolver",
4173
+ "executor"
4174
+ ]);
4175
+ const KyselyTypeConfigSchema = z.tuple([z.literal("@tailor-platform/kysely-type"), z.object({ distPath: z.string() })]);
4176
+ const SeedConfigSchema = z.tuple([z.literal("@tailor-platform/seed"), z.object({
4177
+ distPath: z.string(),
4178
+ machineUserName: z.string().optional()
4179
+ })]);
4180
+ const EnumConstantsConfigSchema = z.tuple([z.literal("@tailor-platform/enum-constants"), z.object({ distPath: z.string() })]);
4181
+ const FileUtilsConfigSchema = z.tuple([z.literal("@tailor-platform/file-utils"), z.object({ distPath: z.string() })]);
4182
+ const CodeGeneratorSchema = z.object({
4183
+ id: z.string(),
4184
+ description: z.string(),
4185
+ dependencies: z.array(DependencyKindSchema),
4186
+ processType: z.function().optional(),
4187
+ processResolver: z.function().optional(),
4188
+ processExecutor: z.function().optional(),
4189
+ processTailorDBNamespace: z.function().optional(),
4190
+ processResolverNamespace: z.function().optional(),
4191
+ aggregate: z.function({ output: z.any() })
4192
+ });
4193
+ const BaseGeneratorConfigSchema = z.union([
4194
+ KyselyTypeConfigSchema,
4195
+ SeedConfigSchema,
4196
+ EnumConstantsConfigSchema,
4197
+ FileUtilsConfigSchema,
4198
+ CodeGeneratorSchema
4199
+ ]);
4200
+
4201
+ //#endregion
4202
+ //#region src/parser/plugin-config/schema.ts
4203
+ const PluginConfigSchema = z.object({
4204
+ id: z.string(),
4205
+ description: z.string(),
4206
+ importPath: z.string().optional(),
4207
+ pluginConfig: z.unknown().optional(),
4208
+ typeConfigRequired: z.union([z.boolean(), functionSchema]).optional(),
4209
+ onTypeLoaded: functionSchema.optional(),
4210
+ onNamespaceLoaded: functionSchema.optional(),
4211
+ onTailorDBReady: functionSchema.optional(),
4212
+ onResolverReady: functionSchema.optional(),
4213
+ onExecutorReady: functionSchema.optional()
4214
+ }).passthrough().refine((p) => {
4215
+ return !(p.onTypeLoaded || p.onNamespaceLoaded) || !!p.importPath;
4216
+ }, { message: "importPath is required when plugin has definition-time hooks (onTypeLoaded/onNamespaceLoaded)" }).transform((plugin) => plugin);
4217
+
4218
+ //#endregion
4219
+ //#region src/plugin/builtin/registry.ts
4220
+ const builtinPlugins = new Map([
4221
+ [KyselyGeneratorID, (options) => kyselyTypePlugin(options)],
4222
+ [SeedGeneratorID, (options) => seedPlugin(options)],
4223
+ [EnumConstantsGeneratorID, (options) => enumConstantsPlugin(options)],
4224
+ [FileUtilsGeneratorID, (options) => fileUtilsPlugin(options)]
4225
+ ]);
4226
+
4227
+ //#endregion
4228
+ //#region src/cli/shared/mock.ts
4229
+ globalThis.tailordb = { Client: class {
4230
+ constructor(_config) {}
4231
+ async connect() {}
4232
+ async end() {}
4233
+ async queryObject() {
4234
+ return {};
4235
+ }
4236
+ } };
4237
+
4238
+ //#endregion
4239
+ //#region src/cli/shared/config-loader.ts
4240
+ const GeneratorConfigSchema = CodeGeneratorSchema.brand("CodeGenerator");
4241
+ /**
4242
+ * Load Tailor configuration file and associated generators and plugins.
4243
+ * @param configPath - Optional explicit config path
4244
+ * @returns Loaded config, generators, plugins, and config path
4245
+ */
4246
+ async function loadConfig(configPath) {
4247
+ const foundPath = loadConfigPath(configPath);
4248
+ if (!foundPath) throw new Error("Configuration file not found: tailor.config.ts not found in current or parent directories");
4249
+ const resolvedPath = path.resolve(process.cwd(), foundPath);
4250
+ if (!fs$1.existsSync(resolvedPath)) throw new Error(`Configuration file not found: ${configPath}`);
4251
+ const configModule = await import(pathToFileURL(resolvedPath).href);
4252
+ if (!configModule || !configModule.default) throw new Error("Invalid Tailor config module: default export not found");
4253
+ const allGenerators = [];
4254
+ const allPlugins = [];
4255
+ for (const value of Object.values(configModule)) if (Array.isArray(value)) {
4256
+ const generatorParsed = value.reduce((acc, item) => {
4257
+ if (!acc.success) return acc;
4258
+ const baseResult = BaseGeneratorConfigSchema.safeParse(item);
4259
+ if (baseResult.success && Array.isArray(baseResult.data)) {
4260
+ const [id, options] = baseResult.data;
4261
+ const pluginFactory = builtinPlugins.get(id);
4262
+ if (pluginFactory) {
4263
+ acc.convertedPlugins.push(pluginFactory(options));
4264
+ return acc;
4265
+ }
4266
+ }
4267
+ const result = GeneratorConfigSchema.safeParse(item);
4268
+ if (result.success) acc.items.push(result.data);
4269
+ else acc.success = false;
4270
+ return acc;
4271
+ }, {
4272
+ success: true,
4273
+ items: [],
4274
+ convertedPlugins: []
4275
+ });
4276
+ if (generatorParsed.success && (generatorParsed.items.length > 0 || generatorParsed.convertedPlugins.length > 0)) {
4277
+ allGenerators.push(...generatorParsed.items);
4278
+ allPlugins.push(...generatorParsed.convertedPlugins);
4279
+ continue;
4280
+ }
4281
+ const pluginParsed = value.reduce((acc, item) => {
4282
+ if (!acc.success) return acc;
4283
+ const result = PluginConfigSchema.safeParse(item);
4284
+ if (result.success) acc.items.push(result.data);
4285
+ else acc.success = false;
4286
+ return acc;
4287
+ }, {
4288
+ success: true,
4289
+ items: []
4290
+ });
4291
+ if (pluginParsed.success && pluginParsed.items.length > 0) allPlugins.push(...pluginParsed.items);
4292
+ }
4293
+ return {
4294
+ config: {
4295
+ ...configModule.default,
4296
+ path: resolvedPath
4297
+ },
4298
+ generators: allGenerators,
4299
+ plugins: allPlugins
4300
+ };
4301
+ }
4302
+
4303
+ //#endregion
4304
+ //#region src/cli/shared/inline-sourcemap.ts
4305
+ /**
4306
+ * Resolve whether inline sourcemaps should be enabled.
4307
+ *
4308
+ * Resolution order:
4309
+ * 1. Config value (`inlineSourcemap` in defineConfig) — if explicitly set
4310
+ * 2. Environment variable `TAILOR_ENABLE_INLINE_SOURCEMAP` — if explicitly set
4311
+ * 3. Default: `true`
4312
+ * @param configValue - The `inlineSourcemap` value from AppConfig
4313
+ * @returns Whether inline sourcemaps should be enabled
4314
+ */
4315
+ function resolveInlineSourcemap(configValue) {
4316
+ if (configValue !== void 0) return configValue;
4317
+ if (process.env.TAILOR_ENABLE_INLINE_SOURCEMAP !== void 0) return process.env.TAILOR_ENABLE_INLINE_SOURCEMAP === "true";
4318
+ return true;
4319
+ }
4320
+
4321
+ //#endregion
4322
+ //#region src/parser/service/idp/schema.ts
4323
+ /**
4324
+ * Normalize IdPGqlOperationsConfig (alias or object) to IdPGqlOperations object.
4325
+ * "query" alias expands to read-only mode: { create: false, update: false, delete: false, read: true, sendPasswordResetEmail: false }
4326
+ * @param config - The config to normalize
4327
+ * @returns The normalized IdPGqlOperations object
4328
+ */
4329
+ function normalizeIdPGqlOperations(config) {
4330
+ if (config === "query") return {
4331
+ create: false,
4332
+ update: false,
4333
+ delete: false,
4334
+ read: true,
4335
+ sendPasswordResetEmail: false
4336
+ };
4337
+ return config;
4338
+ }
4339
+ /**
4340
+ * Zod schema for IdPGqlOperations configuration with normalization transform.
4341
+ * Accepts "query" alias or detailed object, normalizes to IdPGqlOperations object.
4342
+ */
4343
+ const IdPGqlOperationsSchema = z.union([z.literal("query"), z.object({
4344
+ create: z.boolean().optional().describe("Enable _createUser mutation (default: true)"),
4345
+ update: z.boolean().optional().describe("Enable _updateUser mutation (default: true)"),
4346
+ delete: z.boolean().optional().describe("Enable _deleteUser mutation (default: true)"),
4347
+ read: z.boolean().optional().describe("Enable _users and _user queries (default: true)"),
4348
+ sendPasswordResetEmail: z.boolean().optional().describe("Enable _sendPasswordResetEmail mutation (default: true)")
4349
+ })]).describe("Configuration for GraphQL operations on IdP users.\nAll operations are enabled by default (undefined or true = enabled, false = disabled).").transform((val) => normalizeIdPGqlOperations(val));
4350
+ const IdPLangSchema = z.enum(["en", "ja"]).describe("IdP UI language");
4351
+ const IdPUserAuthPolicySchema = z.object({
4352
+ useNonEmailIdentifier: z.boolean().optional().describe("Use non-email identifier for usernames"),
4353
+ allowSelfPasswordReset: z.boolean().optional().describe("Allow users to reset their own passwords"),
4354
+ passwordRequireUppercase: z.boolean().optional().describe("Require uppercase letters in passwords"),
4355
+ passwordRequireLowercase: z.boolean().optional().describe("Require lowercase letters in passwords"),
4356
+ passwordRequireNonAlphanumeric: z.boolean().optional().describe("Require non-alphanumeric characters in passwords"),
4357
+ passwordRequireNumeric: z.boolean().optional().describe("Require numeric characters in passwords"),
4358
+ passwordMinLength: z.number().int().refine((val) => val >= 6 && val <= 30, { message: "passwordMinLength must be between 6 and 30" }).optional().describe("Minimum password length (6-30)"),
4359
+ passwordMaxLength: z.number().int().refine((val) => val >= 6 && val <= 4096, { message: "passwordMaxLength must be between 6 and 4096" }).optional().describe("Maximum password length (6-4096)"),
4360
+ allowedEmailDomains: z.array(z.string()).optional().describe("Restrict registration to these email domains"),
4361
+ allowGoogleOauth: z.boolean().optional().describe("Enable Google OAuth login"),
4362
+ allowMicrosoftOauth: z.boolean().optional().describe("Enable Microsoft OAuth login"),
4363
+ disablePasswordAuth: z.boolean().optional().describe("Disable password-based authentication")
4364
+ }).refine((data) => data.passwordMinLength === void 0 || data.passwordMaxLength === void 0 || data.passwordMinLength <= data.passwordMaxLength, {
4365
+ message: "passwordMinLength must be less than or equal to passwordMaxLength",
4366
+ path: ["passwordMinLength"]
4367
+ }).refine((data) => !data.allowedEmailDomains || data.allowedEmailDomains.length === 0 || !data.useNonEmailIdentifier, {
4368
+ message: "allowedEmailDomains cannot be set when useNonEmailIdentifier is true",
4369
+ path: ["allowedEmailDomains"]
4370
+ }).refine((data) => data.allowGoogleOauth === void 0 || data.allowGoogleOauth === false || !data.useNonEmailIdentifier, {
4371
+ message: "allowGoogleOauth cannot be set when useNonEmailIdentifier is true",
4372
+ path: ["allowGoogleOauth"]
4373
+ }).refine((data) => !data.allowGoogleOauth || data.allowedEmailDomains && data.allowedEmailDomains.length > 0, {
4374
+ message: "allowGoogleOauth requires allowedEmailDomains to be set",
4375
+ path: ["allowGoogleOauth"]
4376
+ }).refine((data) => !data.allowMicrosoftOauth || !data.useNonEmailIdentifier, {
4377
+ message: "allowMicrosoftOauth cannot be set when useNonEmailIdentifier is true",
4378
+ path: ["allowMicrosoftOauth"]
4379
+ }).refine((data) => !data.allowMicrosoftOauth || data.allowedEmailDomains && data.allowedEmailDomains.length > 0, {
4380
+ message: "allowMicrosoftOauth requires allowedEmailDomains to be set",
4381
+ path: ["allowMicrosoftOauth"]
4382
+ }).refine((data) => !data.allowMicrosoftOauth || data.disablePasswordAuth === true, {
4383
+ message: "allowMicrosoftOauth requires disablePasswordAuth to be enabled",
4384
+ path: ["allowMicrosoftOauth"]
4385
+ }).refine((data) => !data.disablePasswordAuth || data.allowGoogleOauth === true || data.allowMicrosoftOauth === true, {
4386
+ message: "disablePasswordAuth requires allowGoogleOauth or allowMicrosoftOauth to be enabled",
4387
+ path: ["disablePasswordAuth"]
4388
+ }).refine((data) => !data.disablePasswordAuth || !data.allowSelfPasswordReset, {
4389
+ message: "disablePasswordAuth cannot be used with allowSelfPasswordReset",
4390
+ path: ["disablePasswordAuth"]
4391
+ });
4392
+ const IdPSchema = z.object({
4393
+ name: z.string().describe("IdP service name"),
4394
+ authorization: z.union([
4395
+ z.literal("insecure"),
4396
+ z.literal("loggedIn"),
4397
+ z.object({ cel: z.string() })
4398
+ ]).describe("Authorization mode for IdP API access"),
4399
+ clients: z.array(z.string()).describe("OAuth2 client names that can use this IdP"),
4400
+ lang: IdPLangSchema.optional().describe("UI language for IdP pages"),
4401
+ userAuthPolicy: IdPUserAuthPolicySchema.transform((input) => IdPUserAuthPolicySchema.parse(input ?? {})).optional().describe("User authentication policy configuration"),
4402
+ publishUserEvents: z.boolean().optional().describe("Enable publishing user lifecycle events"),
4403
+ gqlOperations: IdPGqlOperationsSchema.optional().describe("Configure which GraphQL operations are enabled")
4404
+ }).brand("IdPConfig");
4405
+
4406
+ //#endregion
4407
+ //#region src/parser/service/secrets/schema.ts
4408
+ const nameSchema = z.string().regex(/^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$/);
4409
+ const secretsVaultSchema = z.record(nameSchema, z.string());
4410
+ const SecretsSchema = z.record(nameSchema, secretsVaultSchema);
4411
+
4412
+ //#endregion
4413
+ //#region src/parser/service/staticwebsite/schema.ts
4414
+ const StaticWebsiteSchema = z.object({
4415
+ name: z.string().describe("Static website name"),
4416
+ description: z.string().optional().describe("Static website description"),
4417
+ allowedIpAddresses: z.array(z.string()).optional().describe("IP addresses allowed to access the website")
4418
+ }).brand("StaticWebsiteConfig");
4419
+
4420
+ //#endregion
4421
+ //#region src/cli/services/application.ts
4422
+ function defineTailorDB(config, pluginManager) {
4423
+ const tailorDBServices = [];
4424
+ const externalTailorDBNamespaces = [];
4425
+ const subgraphs = [];
4426
+ if (!config) return {
4427
+ tailorDBServices,
4428
+ externalTailorDBNamespaces,
4429
+ subgraphs
4430
+ };
4431
+ for (const [namespace, serviceConfig] of Object.entries(config)) {
4432
+ if ("external" in serviceConfig) externalTailorDBNamespaces.push(namespace);
4433
+ else {
4434
+ const tailorDB = createTailorDBService({
4435
+ namespace,
4436
+ config: TailorDBServiceConfigSchema.parse(serviceConfig),
4437
+ pluginManager
4438
+ });
4439
+ tailorDBServices.push(tailorDB);
4440
+ }
4441
+ subgraphs.push({
4442
+ Type: "tailordb",
4443
+ Name: namespace
4444
+ });
4445
+ }
4446
+ return {
4447
+ tailorDBServices,
4448
+ externalTailorDBNamespaces,
4449
+ subgraphs
4450
+ };
4451
+ }
4452
+ function defineResolver(config) {
4453
+ const resolverServices = [];
4454
+ const subgraphs = [];
4455
+ if (!config) return {
4456
+ resolverServices,
4457
+ subgraphs
4458
+ };
4459
+ for (const [namespace, serviceConfig] of Object.entries(config)) {
4460
+ if (!("external" in serviceConfig)) {
4461
+ const resolverService = createResolverService(namespace, serviceConfig);
4462
+ resolverServices.push(resolverService);
4463
+ }
4464
+ subgraphs.push({
4465
+ Type: "pipeline",
4466
+ Name: namespace
4467
+ });
4468
+ }
4469
+ return {
4470
+ resolverServices,
4471
+ subgraphs
4472
+ };
4473
+ }
4474
+ function defineIdp(config) {
4475
+ const idpServices = [];
4476
+ const subgraphs = [];
4477
+ if (!config) return {
4478
+ idpServices,
4479
+ subgraphs
4480
+ };
4481
+ const idpNames = /* @__PURE__ */ new Set();
4482
+ config.forEach((idpConfig) => {
4483
+ const name = idpConfig.name;
4484
+ if (idpNames.has(name)) throw new Error(`IdP with name "${name}" already defined.`);
4485
+ idpNames.add(name);
4486
+ if (!("external" in idpConfig)) {
4487
+ const idp = IdPSchema.parse(idpConfig);
4488
+ idpServices.push(idp);
4489
+ }
4490
+ subgraphs.push({
4491
+ Type: "idp",
4492
+ Name: name
4493
+ });
4494
+ });
4495
+ return {
4496
+ idpServices,
4497
+ subgraphs
4498
+ };
4499
+ }
4500
+ function defineAuth(config, tailorDBServices, externalTailorDBNamespaces) {
4501
+ const subgraphs = [];
4502
+ if (!config) return {
4503
+ authService: void 0,
4504
+ subgraphs
4505
+ };
4506
+ let authService;
4507
+ if (!("external" in config)) authService = createAuthService(config, tailorDBServices, externalTailorDBNamespaces);
4508
+ subgraphs.push({
4509
+ Type: "auth",
4510
+ Name: config.name
4511
+ });
4512
+ return {
4513
+ authService,
4514
+ subgraphs
4515
+ };
4516
+ }
4517
+ function defineExecutor(config, hasPluginExecutors) {
4518
+ if (!config && !hasPluginExecutors) return;
4519
+ return createExecutorService({ config: config ?? { files: [] } });
4520
+ }
4521
+ function defineWorkflow(config) {
4522
+ if (!config) return;
4523
+ return createWorkflowService({ config });
4524
+ }
4525
+ function defineStaticWebsites(websites) {
4526
+ const staticWebsiteServices = [];
4527
+ const websiteNames = /* @__PURE__ */ new Set();
4528
+ (websites ?? []).forEach((config) => {
4529
+ const website = StaticWebsiteSchema.parse(config);
4530
+ if (websiteNames.has(website.name)) throw new Error(`Static website with name "${website.name}" already defined.`);
4531
+ websiteNames.add(website.name);
4532
+ staticWebsiteServices.push(website);
4533
+ });
4534
+ return staticWebsiteServices;
4535
+ }
4536
+ function defineSecretManager(config) {
4537
+ if (!config) return [];
4538
+ const data = Object.fromEntries(Object.entries(config));
4539
+ const parsed = SecretsSchema.parse(data);
4540
+ return Object.entries(parsed).map(([vaultName, vaultSecrets]) => ({
4541
+ vaultName,
4542
+ secrets: Object.entries(vaultSecrets).map(([name, value]) => ({
4543
+ name,
4544
+ value
4545
+ }))
4546
+ }));
4547
+ }
4548
+ function defineServices(config, pluginManager) {
4549
+ const tailordbResult = defineTailorDB(config.db, pluginManager);
4550
+ return {
4551
+ tailordbResult,
4552
+ resolverResult: defineResolver(config.resolver),
4553
+ idpResult: defineIdp(config.idp),
4554
+ authResult: defineAuth(config.auth, tailordbResult.tailorDBServices, tailordbResult.externalTailorDBNamespaces),
4555
+ staticWebsiteServices: defineStaticWebsites(config.staticWebsites),
4556
+ secrets: defineSecretManager(config.secrets)
4557
+ };
4558
+ }
4559
+ function buildApplication(params) {
4560
+ const application = {
4561
+ name: params.config.name,
4562
+ config: params.config,
4563
+ subgraphs: [
4564
+ ...params.tailordbResult.subgraphs,
4565
+ ...params.resolverResult.subgraphs,
4566
+ ...params.idpResult.subgraphs,
4567
+ ...params.authResult.subgraphs
4568
+ ],
4569
+ tailorDBServices: params.tailordbResult.tailorDBServices,
4570
+ externalTailorDBNamespaces: params.tailordbResult.externalTailorDBNamespaces,
4571
+ resolverServices: params.resolverResult.resolverServices,
4572
+ idpServices: params.idpResult.idpServices,
4573
+ authService: params.authResult.authService,
4574
+ executorService: params.executorService,
4575
+ workflowService: params.workflowService,
4576
+ staticWebsiteServices: params.staticWebsiteServices,
4577
+ secrets: params.secrets,
4578
+ env: params.env,
4579
+ get applications() {
4580
+ return [application];
4581
+ }
4582
+ };
4583
+ return application;
4584
+ }
4585
+ /**
4586
+ * Define a Tailor application from the given configuration.
4587
+ * This is a lightweight, synchronous function that creates the application
4588
+ * structure without loading types or bundling files.
4589
+ * @param params - Parameters for defining the application
4590
+ * @returns Configured application instance
4591
+ */
4592
+ function defineApplication(params) {
4593
+ const { config, pluginManager } = params;
4594
+ const services = defineServices(config, pluginManager);
4595
+ const executorService = defineExecutor(config.executor, false);
4596
+ const workflowService = defineWorkflow(config.workflow);
4597
+ return buildApplication({
4598
+ config,
4599
+ ...services,
4600
+ executorService,
4601
+ workflowService,
4602
+ env: config.env ?? {}
4603
+ });
4604
+ }
4605
+ /**
4606
+ * Generate plugin type and executor files if a plugin manager is provided.
4607
+ * Collects source type info from TailorDB services and delegates to PluginManager.
4608
+ * @param pluginManager - Plugin manager instance (skips if undefined)
4609
+ * @param tailorDBServices - TailorDB services to collect type source info from
4610
+ * @param configPath - Path to tailor.config.ts for resolving plugin imports
4611
+ * @returns Generated executor file paths
4612
+ */
4613
+ function generatePluginFilesIfNeeded(pluginManager, tailorDBServices, configPath) {
4614
+ if (!pluginManager) return [];
4615
+ const sourceTypeInfoMap = /* @__PURE__ */ new Map();
4616
+ for (const db of tailorDBServices) {
4617
+ const typeSourceInfo = db.typeSourceInfo;
4618
+ for (const [typeName, sourceInfo] of Object.entries(typeSourceInfo)) if (sourceInfo.filePath) sourceTypeInfoMap.set(typeName, {
4619
+ filePath: sourceInfo.filePath,
4620
+ exportName: sourceInfo.exportName
4621
+ });
4622
+ }
4623
+ return pluginManager.generatePluginFiles({
4624
+ outputDir: path.join(getDistDir(), "plugin"),
4625
+ sourceTypeInfoMap,
4626
+ configPath,
4627
+ typeGenerator: generatePluginTypeFiles,
4628
+ executorGenerator: generatePluginExecutorFiles
4629
+ });
4630
+ }
4631
+ /**
4632
+ * Load and fully initialize a Tailor application.
4633
+ * This performs all I/O-heavy operations: loading types, processing plugins,
4634
+ * generating plugin files, bundling, and loading definitions for validation.
4635
+ * @param params - Parameters for defining and loading the application
4636
+ * @returns Fully initialized application with workflow results
4637
+ */
4638
+ async function loadApplication(params) {
4639
+ const { config, pluginManager, bundleCache } = params;
4640
+ const { tailordbResult, resolverResult, idpResult, authResult, staticWebsiteServices, secrets } = defineServices(config, pluginManager);
4641
+ for (const tailordb of tailordbResult.tailorDBServices) {
4642
+ await tailordb.loadTypes();
4643
+ await tailordb.processNamespacePlugins();
4644
+ }
4645
+ const pluginExecutorFiles = generatePluginFilesIfNeeded(pluginManager, tailordbResult.tailorDBServices, config.path);
4646
+ const executorService = defineExecutor(config.executor, pluginExecutorFiles.length > 0);
4647
+ const workflowService = defineWorkflow(config.workflow);
4648
+ if (workflowService) await workflowService.loadWorkflows();
4649
+ const triggerContext = await buildTriggerContext(config.workflow);
4650
+ const inlineSourcemap = resolveInlineSourcemap(config.inlineSourcemap);
4651
+ for (const pipeline of resolverResult.resolverServices) await bundleResolvers(pipeline.namespace, pipeline.config, triggerContext, bundleCache, inlineSourcemap);
4652
+ if (executorService) await bundleExecutors({
4653
+ config: executorService.config,
4654
+ triggerContext,
4655
+ additionalFiles: [...pluginExecutorFiles],
4656
+ cache: bundleCache,
4657
+ inlineSourcemap
4658
+ });
4659
+ let workflowBuildResult;
4660
+ if (workflowService && workflowService.jobs.length > 0) {
4661
+ const mainJobNames = workflowService.workflowSources.map((ws) => ws.workflow.mainJob.name);
4662
+ workflowBuildResult = await bundleWorkflowJobs(workflowService.jobs, mainJobNames, config.env ?? {}, triggerContext, bundleCache, inlineSourcemap);
4663
+ }
4664
+ if (authResult.authService?.config.hooks?.beforeLogin) {
4665
+ const authName = authResult.authService.config.name;
4666
+ await bundleAuthHooks({
4667
+ configPath: config.path,
4668
+ authName,
4669
+ handlerAccessPath: `auth.hooks.beforeLogin.handler`,
4670
+ triggerContext,
4671
+ cache: bundleCache,
4672
+ inlineSourcemap
4673
+ });
4674
+ }
4675
+ for (const pipeline of resolverResult.resolverServices) await pipeline.loadResolvers();
4676
+ if (executorService) {
4677
+ await executorService.loadExecutors();
4678
+ if (pluginExecutorFiles.length > 0) await executorService.loadPluginExecutorFiles([...pluginExecutorFiles]);
4679
+ }
4680
+ if (workflowService) workflowService.printLoadedWorkflows();
4681
+ logger.newline();
4682
+ return {
4683
+ application: buildApplication({
4684
+ config,
4685
+ tailordbResult,
4686
+ resolverResult,
4687
+ idpResult,
4688
+ authResult,
4689
+ executorService,
4690
+ workflowService,
4691
+ staticWebsiteServices,
4692
+ secrets,
4693
+ env: config.env ?? {}
4694
+ }),
4695
+ workflowBuildResult
4696
+ };
4697
+ }
4698
+
4699
+ //#endregion
4700
+ export { writePlatformConfig as S, hashFile as _, loadConfig as a, loadWorkspaceId as b, ExecutorSchema as c, TailorDBTypeSchema as d, stringifyFunction as f, getDistDir as g, createBundleCache as h, resolveInlineSourcemap as i, OAuth2ClientSchema as l, loadFilesWithIgnores as m, generatePluginFilesIfNeeded as n, WorkflowJobSchema as o, tailorUserMap as p, loadApplication as r, createExecutorService as s, defineApplication as t, ResolverSchema as u, fetchLatestToken as v, readPlatformConfig as x, loadAccessToken as y };
4701
+ //# sourceMappingURL=application-CBJFUKrU.mjs.map