@tailor-platform/sdk 1.21.0 → 1.23.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +61 -0
- package/dist/application-CTQe2HSB.mjs +5723 -0
- package/dist/application-CTQe2HSB.mjs.map +1 -0
- package/dist/application-DdSu3baZ.mjs +8 -0
- package/dist/{brand-BZJCv6UY.mjs → brand-DyPrAzpM.mjs} +1 -1
- package/dist/{brand-BZJCv6UY.mjs.map → brand-DyPrAzpM.mjs.map} +1 -1
- package/dist/cli/index.d.mts +0 -1
- package/dist/cli/index.mjs +544 -57
- package/dist/cli/index.mjs.map +1 -1
- package/dist/cli/lib.d.mts +49 -9
- package/dist/cli/lib.mjs +15 -16
- package/dist/cli/lib.mjs.map +1 -1
- package/dist/cli/skills.d.mts +0 -1
- package/dist/configure/index.d.mts +4 -5
- package/dist/configure/index.mjs +15 -4
- package/dist/configure/index.mjs.map +1 -1
- package/dist/{enum-constants-CGVvu3dd.mjs → enum-constants-B5Nl-yzx.mjs} +1 -1
- package/dist/{enum-constants-CGVvu3dd.mjs.map → enum-constants-B5Nl-yzx.mjs.map} +1 -1
- package/dist/{file-utils-GX_tGWl4.mjs → file-utils-sEOwAdJ4.mjs} +1 -1
- package/dist/{file-utils-GX_tGWl4.mjs.map → file-utils-sEOwAdJ4.mjs.map} +1 -1
- package/dist/{index-oZXVKyfX.d.mts → index-BSXclved.d.mts} +2 -3
- package/dist/{index-BWVAwea4.d.mts → index-CO-jsOMb.d.mts} +2 -3
- package/dist/{index-CnHd6BNg.d.mts → index-CU2kZzKa.d.mts} +4 -11
- package/dist/{index-DxlmLUag.d.mts → index-DQlsfhpg.d.mts} +2 -3
- package/dist/{index-Dn61THJK.d.mts → index-lIALNMi_.d.mts} +2 -3
- package/dist/{interceptor-D8MeZOxX.mjs → interceptor-DiARwPfw.mjs} +1 -1
- package/dist/{interceptor-D8MeZOxX.mjs.map → interceptor-DiARwPfw.mjs.map} +1 -1
- package/dist/{job-2Q82qQ6N.mjs → job-CRavYLLk.mjs} +4 -24
- package/dist/job-CRavYLLk.mjs.map +1 -0
- package/dist/kysely/index.d.mts +2 -3
- package/dist/kysely/index.mjs +2 -2
- package/dist/kysely/index.mjs.map +1 -1
- package/dist/{kysely-type-Cpq5TNGY.mjs → kysely-type-CSlcwNFH.mjs} +1 -1
- package/dist/{kysely-type-Cpq5TNGY.mjs.map → kysely-type-CSlcwNFH.mjs.map} +1 -1
- package/dist/package-json-BI0ng3_5.mjs +3 -0
- package/dist/{package-json-3H5gfhA4.mjs → package-json-iVBhE5Ef.mjs} +1 -1
- package/dist/{package-json-3H5gfhA4.mjs.map → package-json-iVBhE5Ef.mjs.map} +1 -1
- package/dist/plugin/builtin/enum-constants/index.d.mts +2 -3
- package/dist/plugin/builtin/enum-constants/index.mjs +1 -1
- package/dist/plugin/builtin/file-utils/index.d.mts +2 -3
- package/dist/plugin/builtin/file-utils/index.mjs +1 -1
- package/dist/plugin/builtin/kysely-type/index.d.mts +2 -3
- package/dist/plugin/builtin/kysely-type/index.mjs +1 -1
- package/dist/plugin/builtin/seed/index.d.mts +2 -3
- package/dist/plugin/builtin/seed/index.mjs +1 -1
- package/dist/plugin/index.d.mts +1 -2
- package/dist/plugin/index.mjs +3 -3
- package/dist/plugin/index.mjs.map +1 -1
- package/dist/{update-9MTRN1UA.mjs → query-BLQBOaAM.mjs} +1199 -198
- package/dist/query-BLQBOaAM.mjs.map +1 -0
- package/dist/{schema-D5Cpd8fQ.mjs → schema-Cjm-OvPF.mjs} +2 -2
- package/dist/schema-Cjm-OvPF.mjs.map +1 -0
- package/dist/{seed-D-rYCN5F.mjs → seed-CXvCW3Xc.mjs} +2 -2
- package/dist/{seed-D-rYCN5F.mjs.map → seed-CXvCW3Xc.mjs.map} +1 -1
- package/dist/telemetry-BAxP8-PR.mjs +3 -0
- package/dist/{telemetry-DuBhnd0X.mjs → telemetry-C46fds1l.mjs} +2 -2
- package/dist/{telemetry-DuBhnd0X.mjs.map → telemetry-C46fds1l.mjs.map} +1 -1
- package/dist/{types-QKq1usl7.d.mts → types--G4ilVmx.d.mts} +8 -9
- package/dist/{types-ClK_HJ0G.mjs → types-CBTSg-LK.mjs} +1 -1
- package/dist/{types-ClK_HJ0G.mjs.map → types-CBTSg-LK.mjs.map} +1 -1
- package/dist/{types-C0o90Cmb.d.mts → types-IR-hw0-y.d.mts} +6 -3
- package/dist/utils/test/index.d.mts +42 -5
- package/dist/utils/test/index.mjs +78 -3
- package/dist/utils/test/index.mjs.map +1 -1
- package/docs/cli/function.md +83 -3
- package/docs/services/auth.md +2 -2
- package/package.json +9 -7
- package/postinstall.mjs +4 -14
- package/dist/application-CEv5c7TU.mjs +0 -102207
- package/dist/application-CEv5c7TU.mjs.map +0 -1
- package/dist/application-DiCzM9b0.mjs +0 -9
- package/dist/chunk-CqAI0b6X.mjs +0 -47
- package/dist/jiti-DfS9jItj.mjs +0 -4482
- package/dist/jiti-DfS9jItj.mjs.map +0 -1
- package/dist/job-2Q82qQ6N.mjs.map +0 -1
- package/dist/package-json-DTDAqRRJ.mjs +0 -3
- package/dist/schema-D5Cpd8fQ.mjs.map +0 -1
- package/dist/src-Bb1UVstT.mjs +0 -1038
- package/dist/src-Bb1UVstT.mjs.map +0 -1
- package/dist/telemetry-Dhzj9Ncm.mjs +0 -3
- package/dist/update-9MTRN1UA.mjs.map +0 -1
- package/dist/user-defined.d.ts +0 -13
|
@@ -1,11 +1,10 @@
|
|
|
1
|
-
import { t as db } from "./schema-
|
|
2
|
-
import { $ as
|
|
3
|
-
import { t as readPackageJson } from "./package-json-
|
|
4
|
-
import { r as withSpan } from "./telemetry-
|
|
5
|
-
import { createRequire } from "node:module";
|
|
1
|
+
import { t as db } from "./schema-Cjm-OvPF.mjs";
|
|
2
|
+
import { $ as AuthSCIMAttribute_Mutability, A as platformBaseUrl, B as TailorDBType_Permission_Permit, C as readPlatformConfig, E as fetchMachineUserToken, F as WorkflowJobExecution_Status, H as PipelineResolver_OperationType, I as TailorDBGQLPermission_Action, J as ExecutorTriggerType, K as ExecutorJobStatus, L as TailorDBGQLPermission_Operator, M as userAgent, N as WorkspacePlatformUserRole, P as WorkflowExecution_Status, Q as AuthOAuth2Client_GrantType, R as TailorDBGQLPermission_Permit, S as loadWorkspaceId, T as fetchAll, U as IdPLang, V as TailorDBType_PermitAction, W as FunctionExecution_Status, X as AuthInvokerSchema, Y as AuthIDPConfig_AuthType, Z as AuthOAuth2Client_ClientType, _ as hashFile, a as loadConfig, at as UserProfileProviderConfig_UserProfileProviderType, b as loadFolderId, ct as Condition_Operator, d as TailorDBTypeSchema, dt as ApplicationSchemaUpdateAttemptStatus, et as AuthSCIMAttribute_Type, f as stringifyFunction, ft as Subgraph_ServiceType, g as getDistDir, h as createBundleCache, ht as symbols, it as TenantProviderConfig_TenantProviderType, j as resolveStaticWebsiteUrls, k as initOperatorClient, l as OAuth2ClientSchema, lt as FilterSchema, m as loadFilesWithIgnores, mt as styles, n as generatePluginFilesIfNeeded, nt as AuthSCIMConfig_AuthorizationType, ot as GetApplicationSchemaHealthResponse_ApplicationSchemaHealthStatus, p as tailorUserMap, pt as logger, q as ExecutorTargetType, r as loadApplication, s as createExecutorService, st as ConditionSchema, t as defineApplication, tt as AuthSCIMAttribute_Uniqueness, ut as PageDirection, w as writePlatformConfig, x as loadOrganizationId, y as loadAccessToken, z as TailorDBType_Permission_Operator } from "./application-CTQe2HSB.mjs";
|
|
3
|
+
import { t as readPackageJson } from "./package-json-iVBhE5Ef.mjs";
|
|
4
|
+
import { r as withSpan } from "./telemetry-C46fds1l.mjs";
|
|
6
5
|
import { arg, defineCommand, runCommand } from "politty";
|
|
7
6
|
import { z } from "zod";
|
|
8
|
-
import * as fs$
|
|
7
|
+
import * as fs$1 from "node:fs";
|
|
9
8
|
import { parseEnv } from "node:util";
|
|
10
9
|
import * as path from "pathe";
|
|
11
10
|
import chalk from "chalk";
|
|
@@ -17,19 +16,50 @@ import { resolveTSConfig } from "pkg-types";
|
|
|
17
16
|
import { findUpSync } from "find-up-simple";
|
|
18
17
|
import ml from "multiline-ts";
|
|
19
18
|
import * as crypto from "node:crypto";
|
|
19
|
+
import { pathToFileURL } from "node:url";
|
|
20
20
|
import * as inflection from "inflection";
|
|
21
|
-
import * as fs$1 from "node:fs/promises";
|
|
22
|
-
import { glob } from "node:fs/promises";
|
|
23
21
|
import * as rolldown from "rolldown";
|
|
22
|
+
import * as fs from "node:fs/promises";
|
|
23
|
+
import { glob } from "node:fs/promises";
|
|
24
24
|
import { create, fromJson, toJson } from "@bufbuild/protobuf";
|
|
25
25
|
import ora from "ora";
|
|
26
26
|
import { setTimeout as setTimeout$1 } from "timers/promises";
|
|
27
27
|
import { spawn } from "node:child_process";
|
|
28
28
|
import { watch } from "chokidar";
|
|
29
29
|
import * as madgeModule from "madge";
|
|
30
|
+
import { createInterface } from "node:readline/promises";
|
|
31
|
+
import { astVisitor, parse, toSql } from "pgsql-ast-parser";
|
|
32
|
+
import { parse as parse$1 } from "@0no-co/graphql.web";
|
|
30
33
|
|
|
31
34
|
//#region src/cli/shared/errors.ts
|
|
32
35
|
/**
|
|
36
|
+
* Format CLI error for output
|
|
37
|
+
* @param error - CLIError instance to format
|
|
38
|
+
* @returns Formatted error message
|
|
39
|
+
*/
|
|
40
|
+
function formatError(error) {
|
|
41
|
+
const parts = [chalk.red(`Error${error.code ? ` [${error.code}]` : ""}: ${error.message}`)];
|
|
42
|
+
if (error.details) parts.push(`\n ${chalk.gray("Details:")} ${error.details}`);
|
|
43
|
+
if (error.suggestion) parts.push(`\n ${chalk.cyan("Suggestion:")} ${error.suggestion}`);
|
|
44
|
+
if (error.command) parts.push(`\n ${chalk.gray("Help:")} Run \`tailor-sdk ${error.command} --help\` for usage information.`);
|
|
45
|
+
return parts.join("");
|
|
46
|
+
}
|
|
47
|
+
/**
|
|
48
|
+
* Create a CLI error with formatted output
|
|
49
|
+
* @param options - Options to construct a CLIError
|
|
50
|
+
* @returns Constructed CLIError instance
|
|
51
|
+
*/
|
|
52
|
+
function createCLIError(options) {
|
|
53
|
+
const error = new Error(options.message);
|
|
54
|
+
error.name = "CLIError";
|
|
55
|
+
error.code = options.code;
|
|
56
|
+
error.details = options.details;
|
|
57
|
+
error.suggestion = options.suggestion;
|
|
58
|
+
error.command = options.command;
|
|
59
|
+
error.format = () => formatError(error);
|
|
60
|
+
return error;
|
|
61
|
+
}
|
|
62
|
+
/**
|
|
33
63
|
* Type guard to check if an error is a CLIError
|
|
34
64
|
* @param error - Error to check
|
|
35
65
|
* @returns True if the error is a CLIError
|
|
@@ -83,11 +113,11 @@ function loadEnvFiles(envFiles, envFilesIfExists) {
|
|
|
83
113
|
const load = (files, required) => {
|
|
84
114
|
for (const file of [files ?? []].flat()) {
|
|
85
115
|
const envPath = path.resolve(process.cwd(), file);
|
|
86
|
-
if (!fs$
|
|
116
|
+
if (!fs$1.existsSync(envPath)) {
|
|
87
117
|
if (required) throw new Error(`Environment file not found: ${envPath}`);
|
|
88
118
|
continue;
|
|
89
119
|
}
|
|
90
|
-
const parsed = parseEnv(fs$
|
|
120
|
+
const parsed = parseEnv(fs$1.readFileSync(envPath, "utf-8"));
|
|
91
121
|
for (const [key, value] of Object.entries(parsed)) {
|
|
92
122
|
if (originalEnvKeys.has(key)) continue;
|
|
93
123
|
process.env[key] = value;
|
|
@@ -107,9 +137,19 @@ function loadEnvFiles(envFiles, envFilesIfExists) {
|
|
|
107
137
|
const commonArgs = {
|
|
108
138
|
"env-file": arg(z.string().optional(), {
|
|
109
139
|
alias: "e",
|
|
110
|
-
description: "Path to the environment file (error if not found)"
|
|
140
|
+
description: "Path to the environment file (error if not found)",
|
|
141
|
+
completion: {
|
|
142
|
+
type: "file",
|
|
143
|
+
matcher: [".env.*", ".env"]
|
|
144
|
+
}
|
|
145
|
+
}),
|
|
146
|
+
"env-file-if-exists": arg(z.string().optional(), {
|
|
147
|
+
description: "Path to the environment file (ignored if not found)",
|
|
148
|
+
completion: {
|
|
149
|
+
type: "file",
|
|
150
|
+
matcher: [".env.*", ".env"]
|
|
151
|
+
}
|
|
111
152
|
}),
|
|
112
|
-
"env-file-if-exists": arg(z.string().optional(), { description: "Path to the environment file (ignored if not found)" }),
|
|
113
153
|
verbose: arg(z.boolean().default(false), { description: "Enable verbose logging" })
|
|
114
154
|
};
|
|
115
155
|
/**
|
|
@@ -118,22 +158,32 @@ const commonArgs = {
|
|
|
118
158
|
const workspaceArgs = {
|
|
119
159
|
"workspace-id": arg(z.string().optional(), {
|
|
120
160
|
alias: "w",
|
|
121
|
-
description: "Workspace ID"
|
|
161
|
+
description: "Workspace ID",
|
|
162
|
+
completion: { type: "none" }
|
|
122
163
|
}),
|
|
123
164
|
profile: arg(z.string().optional(), {
|
|
124
165
|
alias: "p",
|
|
125
|
-
description: "Workspace profile"
|
|
166
|
+
description: "Workspace profile",
|
|
167
|
+
completion: { type: "none" }
|
|
126
168
|
})
|
|
127
169
|
};
|
|
128
170
|
/**
|
|
171
|
+
* Shared config arg for commands that accept a config file path
|
|
172
|
+
*/
|
|
173
|
+
const configArg = { config: arg(z.string().default("tailor.config.ts"), {
|
|
174
|
+
alias: "c",
|
|
175
|
+
description: "Path to SDK config file",
|
|
176
|
+
completion: {
|
|
177
|
+
type: "file",
|
|
178
|
+
extensions: ["ts"]
|
|
179
|
+
}
|
|
180
|
+
}) };
|
|
181
|
+
/**
|
|
129
182
|
* Arguments for commands that interact with deployed resources (includes config)
|
|
130
183
|
*/
|
|
131
184
|
const deploymentArgs = {
|
|
132
185
|
...workspaceArgs,
|
|
133
|
-
|
|
134
|
-
alias: "c",
|
|
135
|
-
description: "Path to SDK config file"
|
|
136
|
-
})
|
|
186
|
+
...configArg
|
|
137
187
|
};
|
|
138
188
|
/**
|
|
139
189
|
* Arguments for commands that require confirmation
|
|
@@ -162,7 +212,7 @@ const withCommonArgs = (handler) => async (args) => {
|
|
|
162
212
|
try {
|
|
163
213
|
if ("json" in args && typeof args.json === "boolean") logger.jsonMode = args.json;
|
|
164
214
|
loadEnvFiles(args["env-file"], args["env-file-if-exists"]);
|
|
165
|
-
const { initTelemetry } = await import("./telemetry-
|
|
215
|
+
const { initTelemetry } = await import("./telemetry-BAxP8-PR.mjs");
|
|
166
216
|
await initTelemetry();
|
|
167
217
|
await handler(args);
|
|
168
218
|
} catch (error) {
|
|
@@ -175,7 +225,7 @@ const withCommonArgs = (handler) => async (args) => {
|
|
|
175
225
|
} else logger.error(`Unknown error: ${error}`);
|
|
176
226
|
process.exit(1);
|
|
177
227
|
} finally {
|
|
178
|
-
const { shutdownTelemetry } = await import("./telemetry-
|
|
228
|
+
const { shutdownTelemetry } = await import("./telemetry-BAxP8-PR.mjs");
|
|
179
229
|
await shutdownTelemetry();
|
|
180
230
|
}
|
|
181
231
|
process.exit(0);
|
|
@@ -229,7 +279,7 @@ const apiCommand = defineCommand({
|
|
|
229
279
|
positional: true,
|
|
230
280
|
description: "API endpoint to call (e.g., 'GetApplication' or 'tailor.v1.OperatorService/GetApplication')"
|
|
231
281
|
})
|
|
232
|
-
}),
|
|
282
|
+
}).strict(),
|
|
233
283
|
run: withCommonArgs(async (args) => {
|
|
234
284
|
const result = await apiCall({
|
|
235
285
|
profile: args.profile,
|
|
@@ -283,7 +333,7 @@ function createCacheStore(config) {
|
|
|
283
333
|
}
|
|
284
334
|
function loadManifest() {
|
|
285
335
|
try {
|
|
286
|
-
const raw = fs$
|
|
336
|
+
const raw = fs$1.readFileSync(manifestPath(), "utf-8");
|
|
287
337
|
const result = cacheManifestSchema.safeParse(JSON.parse(raw));
|
|
288
338
|
if (!result.success) {
|
|
289
339
|
cachedManifest = void 0;
|
|
@@ -310,15 +360,15 @@ function createCacheStore(config) {
|
|
|
310
360
|
return cachedManifest;
|
|
311
361
|
}
|
|
312
362
|
function saveManifest(manifest) {
|
|
313
|
-
fs$
|
|
363
|
+
fs$1.mkdirSync(config.cacheDir, { recursive: true });
|
|
314
364
|
const target = manifestPath();
|
|
315
365
|
const tmpFile = path.join(config.cacheDir, `.manifest.${process.pid}.tmp`);
|
|
316
366
|
try {
|
|
317
|
-
fs$
|
|
318
|
-
fs$
|
|
367
|
+
fs$1.writeFileSync(tmpFile, JSON.stringify(manifest, null, 2), "utf-8");
|
|
368
|
+
fs$1.renameSync(tmpFile, target);
|
|
319
369
|
} catch (e) {
|
|
320
370
|
try {
|
|
321
|
-
fs$
|
|
371
|
+
fs$1.rmSync(tmpFile, { force: true });
|
|
322
372
|
} catch {}
|
|
323
373
|
throw e;
|
|
324
374
|
}
|
|
@@ -337,29 +387,29 @@ function createCacheStore(config) {
|
|
|
337
387
|
}
|
|
338
388
|
function storeBundleOutput(cacheKey, sourcePath) {
|
|
339
389
|
const dir = bundlesDir();
|
|
340
|
-
fs$
|
|
341
|
-
fs$
|
|
390
|
+
fs$1.mkdirSync(dir, { recursive: true });
|
|
391
|
+
fs$1.copyFileSync(sourcePath, bundlePath(cacheKey));
|
|
342
392
|
const mapSource = `${sourcePath}.map`;
|
|
343
393
|
const cachedMapPath = `${bundlePath(cacheKey)}.map`;
|
|
344
|
-
if (fs$
|
|
345
|
-
else fs$
|
|
394
|
+
if (fs$1.existsSync(mapSource)) fs$1.copyFileSync(mapSource, cachedMapPath);
|
|
395
|
+
else fs$1.rmSync(cachedMapPath, { force: true });
|
|
346
396
|
}
|
|
347
397
|
function restoreBundleOutput(cacheKey, targetPath) {
|
|
348
398
|
const cached = bundlePath(cacheKey);
|
|
349
399
|
const targetDir = path.dirname(targetPath);
|
|
350
|
-
fs$
|
|
400
|
+
fs$1.mkdirSync(targetDir, { recursive: true });
|
|
351
401
|
try {
|
|
352
|
-
fs$
|
|
402
|
+
fs$1.copyFileSync(cached, targetPath);
|
|
353
403
|
} catch (e) {
|
|
354
404
|
if (e.code === "ENOENT") return false;
|
|
355
405
|
throw e;
|
|
356
406
|
}
|
|
357
407
|
const cachedMap = `${cached}.map`;
|
|
358
|
-
if (fs$
|
|
408
|
+
if (fs$1.existsSync(cachedMap)) fs$1.copyFileSync(cachedMap, `${targetPath}.map`);
|
|
359
409
|
return true;
|
|
360
410
|
}
|
|
361
411
|
function clean() {
|
|
362
|
-
fs$
|
|
412
|
+
fs$1.rmSync(config.cacheDir, {
|
|
363
413
|
recursive: true,
|
|
364
414
|
force: true
|
|
365
415
|
});
|
|
@@ -470,7 +520,6 @@ export {};
|
|
|
470
520
|
|
|
471
521
|
`;
|
|
472
522
|
}
|
|
473
|
-
const require = createRequire(import.meta.url);
|
|
474
523
|
function collectAttributesFromConfig(config) {
|
|
475
524
|
const auth = config.auth;
|
|
476
525
|
if (!auth || typeof auth !== "object") return {};
|
|
@@ -513,12 +562,7 @@ function collectAttributesFromConfig(config) {
|
|
|
513
562
|
* @returns Absolute path to the type definition file
|
|
514
563
|
*/
|
|
515
564
|
function resolveTypeDefinitionPath(configPath) {
|
|
516
|
-
|
|
517
|
-
if (typePath) return path.resolve(process.cwd(), typePath);
|
|
518
|
-
const configDir = path.dirname(path.resolve(configPath));
|
|
519
|
-
const packageDir = resolvePackageDirectory(configDir);
|
|
520
|
-
if (!packageDir) return path.join(configDir, "node_modules", "@tailor-platform", "sdk", "dist", "user-defined.d.ts");
|
|
521
|
-
return path.join(packageDir, "dist", "user-defined.d.ts");
|
|
565
|
+
return path.join(path.dirname(path.resolve(configPath)), "tailor.d.ts");
|
|
522
566
|
}
|
|
523
567
|
/**
|
|
524
568
|
* Generate user type definitions from the app config and write them to disk.
|
|
@@ -536,8 +580,8 @@ async function generateUserTypes(options) {
|
|
|
536
580
|
if (env) logger.debug(`Extracted Env: ${JSON.stringify(env)}`);
|
|
537
581
|
const typeDefContent = generateTypeDefinition(attributeMap, attributeList, env);
|
|
538
582
|
const outputPath = resolveTypeDefinitionPath(configPath);
|
|
539
|
-
fs$
|
|
540
|
-
fs$
|
|
583
|
+
fs$1.mkdirSync(path.dirname(outputPath), { recursive: true });
|
|
584
|
+
fs$1.writeFileSync(outputPath, typeDefContent);
|
|
541
585
|
const relativePath = path.relative(process.cwd(), outputPath);
|
|
542
586
|
logger.newline();
|
|
543
587
|
logger.success(`Generated type definitions: ${relativePath}`, { mode: "plain" });
|
|
@@ -546,23 +590,6 @@ async function generateUserTypes(options) {
|
|
|
546
590
|
logger.error(String(error));
|
|
547
591
|
}
|
|
548
592
|
}
|
|
549
|
-
function resolvePackageDirectory(startDir) {
|
|
550
|
-
let currentDir = startDir;
|
|
551
|
-
const root = path.parse(currentDir).root;
|
|
552
|
-
while (true) {
|
|
553
|
-
const candidate = path.join(currentDir, "node_modules", "@tailor-platform", "sdk");
|
|
554
|
-
const packageJsonPath = path.join(candidate, "package.json");
|
|
555
|
-
if (fs$2.existsSync(packageJsonPath)) return candidate;
|
|
556
|
-
if (currentDir === root) break;
|
|
557
|
-
currentDir = path.dirname(currentDir);
|
|
558
|
-
}
|
|
559
|
-
try {
|
|
560
|
-
const resolved = require.resolve("@tailor-platform/sdk/package.json", { paths: [startDir] });
|
|
561
|
-
return path.dirname(resolved);
|
|
562
|
-
} catch {
|
|
563
|
-
return null;
|
|
564
|
-
}
|
|
565
|
-
}
|
|
566
593
|
|
|
567
594
|
//#endregion
|
|
568
595
|
//#region src/parser/plugin-config/generation-types.ts
|
|
@@ -999,6 +1026,10 @@ async function planApplication(context) {
|
|
|
999
1026
|
changeSet.print();
|
|
1000
1027
|
return changeSet;
|
|
1001
1028
|
}
|
|
1029
|
+
if (application.subgraphs.length === 0) {
|
|
1030
|
+
changeSet.print();
|
|
1031
|
+
return changeSet;
|
|
1032
|
+
}
|
|
1002
1033
|
let authNamespace;
|
|
1003
1034
|
let authIdpConfigName;
|
|
1004
1035
|
if (application.authService && application.authService.config) {
|
|
@@ -2352,6 +2383,73 @@ async function confirmImportantResourceDeletion(resources, yes) {
|
|
|
2352
2383
|
`);
|
|
2353
2384
|
}
|
|
2354
2385
|
|
|
2386
|
+
//#endregion
|
|
2387
|
+
//#region src/cli/shared/runtime-args.ts
|
|
2388
|
+
/**
|
|
2389
|
+
* Runtime args transformation for all services.
|
|
2390
|
+
*
|
|
2391
|
+
* Each service transforms server-side args/context into SDK-friendly format:
|
|
2392
|
+
* - Executor: server-side expression evaluated by platform before calling function
|
|
2393
|
+
* - Resolver: operationHook expression evaluated by platform before calling function
|
|
2394
|
+
*
|
|
2395
|
+
* The user field mapping (server → SDK) shared across services is defined in
|
|
2396
|
+
* `@/parser/service/tailordb` as `tailorUserMap`.
|
|
2397
|
+
*/
|
|
2398
|
+
/**
|
|
2399
|
+
* Actor field transformation expression.
|
|
2400
|
+
*
|
|
2401
|
+
* Transforms the server's actor object to match the SDK's TailorActor type:
|
|
2402
|
+
* server `attributeMap` → SDK `attributes`
|
|
2403
|
+
* server `attributes` → SDK `attributeList`
|
|
2404
|
+
* other fields → passed through
|
|
2405
|
+
* null/undefined actor → null
|
|
2406
|
+
*/
|
|
2407
|
+
const ACTOR_TRANSFORM_EXPR = "actor: args.actor ? (({ attributeMap, attributes: attrList, ...rest }) => ({ ...rest, attributes: attributeMap, attributeList: attrList }))(args.actor) : null";
|
|
2408
|
+
/**
|
|
2409
|
+
* Build the JavaScript expression that transforms server-format executor event
|
|
2410
|
+
* args into SDK-format args at runtime.
|
|
2411
|
+
*
|
|
2412
|
+
* The Tailor Platform server delivers event args with server-side field names.
|
|
2413
|
+
* The SDK exposes different field names to user code. This function produces a
|
|
2414
|
+
* JavaScript expression string that performs the mapping when evaluated
|
|
2415
|
+
* server-side.
|
|
2416
|
+
* @param triggerKind - The trigger kind discriminant from the parsed executor
|
|
2417
|
+
* @param env - Application env record to embed in the expression
|
|
2418
|
+
* @returns A JavaScript expression string, e.g. `({ ...args, ... })`
|
|
2419
|
+
*/
|
|
2420
|
+
function buildExecutorArgsExpr(triggerKind, env) {
|
|
2421
|
+
const envExpr = `env: ${JSON.stringify(env)}`;
|
|
2422
|
+
switch (triggerKind) {
|
|
2423
|
+
case "schedule":
|
|
2424
|
+
case "recordCreated":
|
|
2425
|
+
case "recordUpdated":
|
|
2426
|
+
case "recordDeleted":
|
|
2427
|
+
case "idpUserCreated":
|
|
2428
|
+
case "idpUserUpdated":
|
|
2429
|
+
case "idpUserDeleted":
|
|
2430
|
+
case "authAccessTokenIssued":
|
|
2431
|
+
case "authAccessTokenRefreshed":
|
|
2432
|
+
case "authAccessTokenRevoked": return `({ ...args, appNamespace: args.namespaceName, ${ACTOR_TRANSFORM_EXPR}, ${envExpr} })`;
|
|
2433
|
+
case "resolverExecuted": return `({ ...args, appNamespace: args.namespaceName, ${ACTOR_TRANSFORM_EXPR}, success: !!args.succeeded, result: args.succeeded?.result.resolver, error: args.failed?.error, ${envExpr} })`;
|
|
2434
|
+
case "incomingWebhook": return `({ ...args, appNamespace: args.namespaceName, rawBody: args.raw_body, ${envExpr} })`;
|
|
2435
|
+
default: throw new Error(`Unknown trigger kind for args expression: ${triggerKind}`);
|
|
2436
|
+
}
|
|
2437
|
+
}
|
|
2438
|
+
/**
|
|
2439
|
+
* Build the operationHook expression for resolver pipelines.
|
|
2440
|
+
*
|
|
2441
|
+
* Transforms server context to SDK resolver context:
|
|
2442
|
+
* context.args → input
|
|
2443
|
+
* context.pipeline → spread into result
|
|
2444
|
+
* user (global var) → TailorUser (via tailorUserMap: workspace_id→workspaceId, attribute_map→attributes, attributes→attributeList)
|
|
2445
|
+
* env → injected as JSON
|
|
2446
|
+
* @param env - Application env record to embed in the expression
|
|
2447
|
+
* @returns A JavaScript expression string for the operationHook
|
|
2448
|
+
*/
|
|
2449
|
+
function buildResolverOperationHookExpr(env) {
|
|
2450
|
+
return `({ ...context.pipeline, input: context.args, user: ${tailorUserMap}, env: ${JSON.stringify(env)} });`;
|
|
2451
|
+
}
|
|
2452
|
+
|
|
2355
2453
|
//#endregion
|
|
2356
2454
|
//#region src/cli/commands/apply/function-registry.ts
|
|
2357
2455
|
const CHUNK_SIZE = 64 * 1024;
|
|
@@ -2403,7 +2501,7 @@ function collectFunctionEntries(application, workflowJobs) {
|
|
|
2403
2501
|
for (const app of application.applications) for (const pipeline of app.resolverServices) for (const resolver of Object.values(pipeline.resolvers)) {
|
|
2404
2502
|
const scriptPath = path.join(distDir, "resolvers", `${resolver.name}.js`);
|
|
2405
2503
|
try {
|
|
2406
|
-
const content = fs$
|
|
2504
|
+
const content = fs$1.readFileSync(scriptPath, "utf-8");
|
|
2407
2505
|
entries.push({
|
|
2408
2506
|
name: resolverFunctionName(pipeline.namespace, resolver.name),
|
|
2409
2507
|
scriptContent: content,
|
|
@@ -2419,7 +2517,7 @@ function collectFunctionEntries(application, workflowJobs) {
|
|
|
2419
2517
|
for (const executor of Object.values(executors)) if (executor.operation.kind === "function" || executor.operation.kind === "jobFunction") {
|
|
2420
2518
|
const scriptPath = path.join(distDir, "executors", `${executor.name}.js`);
|
|
2421
2519
|
try {
|
|
2422
|
-
const content = fs$
|
|
2520
|
+
const content = fs$1.readFileSync(scriptPath, "utf-8");
|
|
2423
2521
|
entries.push({
|
|
2424
2522
|
name: executorFunctionName(executor.name),
|
|
2425
2523
|
scriptContent: content,
|
|
@@ -2434,7 +2532,7 @@ function collectFunctionEntries(application, workflowJobs) {
|
|
|
2434
2532
|
for (const job of workflowJobs) {
|
|
2435
2533
|
const scriptPath = path.join(distDir, "workflow-jobs", `${job.name}.js`);
|
|
2436
2534
|
try {
|
|
2437
|
-
const content = fs$
|
|
2535
|
+
const content = fs$1.readFileSync(scriptPath, "utf-8");
|
|
2438
2536
|
entries.push({
|
|
2439
2537
|
name: workflowJobFunctionName(job.name),
|
|
2440
2538
|
scriptContent: content,
|
|
@@ -3725,7 +3823,7 @@ function createSnapshotFromLocalTypes(types, namespace) {
|
|
|
3725
3823
|
* @returns {SchemaSnapshot} Loaded schema snapshot
|
|
3726
3824
|
*/
|
|
3727
3825
|
function loadSnapshot(filePath) {
|
|
3728
|
-
const content = fs$
|
|
3826
|
+
const content = fs$1.readFileSync(filePath, "utf-8");
|
|
3729
3827
|
return JSON.parse(content);
|
|
3730
3828
|
}
|
|
3731
3829
|
/**
|
|
@@ -3734,7 +3832,7 @@ function loadSnapshot(filePath) {
|
|
|
3734
3832
|
* @returns {MigrationDiff} Loaded migration diff
|
|
3735
3833
|
*/
|
|
3736
3834
|
function loadDiff(filePath) {
|
|
3737
|
-
const content = fs$
|
|
3835
|
+
const content = fs$1.readFileSync(filePath, "utf-8");
|
|
3738
3836
|
return JSON.parse(content);
|
|
3739
3837
|
}
|
|
3740
3838
|
/**
|
|
@@ -3743,8 +3841,8 @@ function loadDiff(filePath) {
|
|
|
3743
3841
|
* @returns {Array<{number: number, type: "schema" | "diff", path: string}>} Migration files sorted by number
|
|
3744
3842
|
*/
|
|
3745
3843
|
function getMigrationFiles(migrationsDir) {
|
|
3746
|
-
if (!fs$
|
|
3747
|
-
const entries = fs$
|
|
3844
|
+
if (!fs$1.existsSync(migrationsDir)) return [];
|
|
3845
|
+
const entries = fs$1.readdirSync(migrationsDir, { withFileTypes: true });
|
|
3748
3846
|
const migrations = [];
|
|
3749
3847
|
for (const entry of entries) {
|
|
3750
3848
|
if (!entry.isDirectory()) continue;
|
|
@@ -3752,13 +3850,13 @@ function getMigrationFiles(migrationsDir) {
|
|
|
3752
3850
|
const num = parseInt(entry.name, 10);
|
|
3753
3851
|
const migrationDir = path.join(migrationsDir, entry.name);
|
|
3754
3852
|
const schemaPath = path.join(migrationDir, SCHEMA_FILE_NAME);
|
|
3755
|
-
if (fs$
|
|
3853
|
+
if (fs$1.existsSync(schemaPath)) migrations.push({
|
|
3756
3854
|
number: num,
|
|
3757
3855
|
type: "schema",
|
|
3758
3856
|
path: schemaPath
|
|
3759
3857
|
});
|
|
3760
3858
|
const diffPath = path.join(migrationDir, DIFF_FILE_NAME);
|
|
3761
|
-
if (fs$
|
|
3859
|
+
if (fs$1.existsSync(diffPath)) migrations.push({
|
|
3762
3860
|
number: num,
|
|
3763
3861
|
type: "diff",
|
|
3764
3862
|
path: diffPath
|
|
@@ -4330,7 +4428,7 @@ function compareLocalTypesWithSnapshot(snapshot, localTypes, namespace) {
|
|
|
4330
4428
|
*/
|
|
4331
4429
|
function validateMigrationFiles(migrationsDir) {
|
|
4332
4430
|
const errors = [];
|
|
4333
|
-
if (!fs$
|
|
4431
|
+
if (!fs$1.existsSync(migrationsDir)) return errors;
|
|
4334
4432
|
const migrationFiles = getMigrationFiles(migrationsDir);
|
|
4335
4433
|
if (migrationFiles.length === 0) return errors;
|
|
4336
4434
|
const schemaFiles = [];
|
|
@@ -4573,7 +4671,7 @@ function formatSchemaDrifts(drifts) {
|
|
|
4573
4671
|
*/
|
|
4574
4672
|
async function bundleMigrationScript(sourceFile, namespace, migrationNumber) {
|
|
4575
4673
|
const outputDir = path.resolve(getDistDir(), "migrations");
|
|
4576
|
-
fs$
|
|
4674
|
+
fs$1.mkdirSync(outputDir, { recursive: true });
|
|
4577
4675
|
const entryPath = path.join(outputDir, `migration_${namespace}_${migrationNumber}.entry.js`);
|
|
4578
4676
|
const outputPath = path.join(outputDir, `migration_${namespace}_${migrationNumber}.js`);
|
|
4579
4677
|
const entryContent = ml`
|
|
@@ -4595,7 +4693,7 @@ async function bundleMigrationScript(sourceFile, namespace, migrationNumber) {
|
|
|
4595
4693
|
return { success: true };
|
|
4596
4694
|
}
|
|
4597
4695
|
`;
|
|
4598
|
-
fs$
|
|
4696
|
+
fs$1.writeFileSync(entryPath, entryContent);
|
|
4599
4697
|
let tsconfig;
|
|
4600
4698
|
try {
|
|
4601
4699
|
tsconfig = await resolveTSConfig();
|
|
@@ -4625,7 +4723,7 @@ async function bundleMigrationScript(sourceFile, namespace, migrationNumber) {
|
|
|
4625
4723
|
return {
|
|
4626
4724
|
namespace,
|
|
4627
4725
|
migrationNumber,
|
|
4628
|
-
bundledCode: fs$
|
|
4726
|
+
bundledCode: fs$1.readFileSync(outputPath, "utf-8")
|
|
4629
4727
|
};
|
|
4630
4728
|
}
|
|
4631
4729
|
|
|
@@ -4687,7 +4785,7 @@ async function waitForExecution$1(client, workspaceId, executionId, pollInterval
|
|
|
4687
4785
|
logs: execution.logs,
|
|
4688
4786
|
result: execution.result
|
|
4689
4787
|
};
|
|
4690
|
-
await new Promise((resolve) => setTimeout(resolve, pollInterval));
|
|
4788
|
+
await new Promise((resolve$1) => setTimeout(resolve$1, pollInterval));
|
|
4691
4789
|
}
|
|
4692
4790
|
}
|
|
4693
4791
|
/**
|
|
@@ -4702,28 +4800,26 @@ async function waitForExecution$1(client, workspaceId, executionId, pollInterval
|
|
|
4702
4800
|
*/
|
|
4703
4801
|
async function executeScript(options) {
|
|
4704
4802
|
const { client, workspaceId, name, code, arg: arg$1, invoker, pollInterval } = options;
|
|
4705
|
-
const
|
|
4803
|
+
const response = await client.testExecScript({
|
|
4706
4804
|
workspaceId,
|
|
4707
4805
|
name,
|
|
4708
4806
|
code,
|
|
4709
4807
|
arg: arg$1 ?? JSON.stringify({}),
|
|
4710
4808
|
invoker
|
|
4711
|
-
})
|
|
4809
|
+
});
|
|
4810
|
+
const executionId = response.executionId;
|
|
4712
4811
|
const result = await waitForExecution$1(client, workspaceId, executionId, pollInterval);
|
|
4713
4812
|
if (result.status === FunctionExecution_Status.SUCCESS) return {
|
|
4714
4813
|
success: true,
|
|
4715
4814
|
logs: result.logs,
|
|
4716
4815
|
result: result.result
|
|
4717
4816
|
};
|
|
4718
|
-
else {
|
|
4719
|
-
|
|
4720
|
-
|
|
4721
|
-
|
|
4722
|
-
|
|
4723
|
-
|
|
4724
|
-
error: errorDetails || "Script execution failed with unknown error"
|
|
4725
|
-
};
|
|
4726
|
-
}
|
|
4817
|
+
else return {
|
|
4818
|
+
success: false,
|
|
4819
|
+
logs: result.logs,
|
|
4820
|
+
result: result.result || response.result,
|
|
4821
|
+
error: result.result || response.result || "Script execution failed with unknown error"
|
|
4822
|
+
};
|
|
4727
4823
|
}
|
|
4728
4824
|
|
|
4729
4825
|
//#endregion
|
|
@@ -4766,10 +4862,10 @@ async function detectPendingMigrations(client, workspaceId, namespacesWithMigrat
|
|
|
4766
4862
|
for (const file of migrationFiles) {
|
|
4767
4863
|
if (file.number <= currentMigration) continue;
|
|
4768
4864
|
const diffPath = getMigrationFilePath(migrationsDir, file.number, "diff");
|
|
4769
|
-
if (!fs$
|
|
4865
|
+
if (!fs$1.existsSync(diffPath)) continue;
|
|
4770
4866
|
const diff = loadDiff(diffPath);
|
|
4771
4867
|
const scriptPath = getMigrationFilePath(migrationsDir, file.number, "migrate");
|
|
4772
|
-
if (diff.requiresMigrationScript && !fs$
|
|
4868
|
+
if (diff.requiresMigrationScript && !fs$1.existsSync(scriptPath)) {
|
|
4773
4869
|
logger.warn(`Migration ${namespace}/${file.number} requires a script but migrate.ts not found`);
|
|
4774
4870
|
continue;
|
|
4775
4871
|
}
|
|
@@ -6228,7 +6324,7 @@ async function apply(options) {
|
|
|
6228
6324
|
const packageJson = await readPackageJson();
|
|
6229
6325
|
const cacheDir = path.resolve(getDistDir(), "cache");
|
|
6230
6326
|
if (options?.cleanCache) {
|
|
6231
|
-
fs$
|
|
6327
|
+
fs$1.rmSync(cacheDir, {
|
|
6232
6328
|
recursive: true,
|
|
6233
6329
|
force: true
|
|
6234
6330
|
});
|
|
@@ -6718,7 +6814,7 @@ const getCommand$3 = defineCommand({
|
|
|
6718
6814
|
...jsonArgs,
|
|
6719
6815
|
...workspaceArgs,
|
|
6720
6816
|
...nameArgs$1
|
|
6721
|
-
}),
|
|
6817
|
+
}).strict(),
|
|
6722
6818
|
run: withCommonArgs(async (args) => {
|
|
6723
6819
|
const executor = await getExecutor({
|
|
6724
6820
|
name: args.name,
|
|
@@ -6921,7 +7017,7 @@ function toWorkflowExecutionInfo(execution) {
|
|
|
6921
7017
|
//#endregion
|
|
6922
7018
|
//#region src/cli/commands/workflow/executions.ts
|
|
6923
7019
|
function sleep$1(ms) {
|
|
6924
|
-
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
7020
|
+
return new Promise((resolve$1) => setTimeout(resolve$1, ms));
|
|
6925
7021
|
}
|
|
6926
7022
|
function formatTime$2(date) {
|
|
6927
7023
|
return date.toLocaleTimeString("en-US", { hour12: false });
|
|
@@ -7136,7 +7232,7 @@ const executionsCommand = defineCommand({
|
|
|
7136
7232
|
}),
|
|
7137
7233
|
...waitArgs,
|
|
7138
7234
|
logs: arg(z.boolean().default(false), { description: "Display job execution logs (detail mode only)" })
|
|
7139
|
-
}),
|
|
7235
|
+
}).strict(),
|
|
7140
7236
|
run: withCommonArgs(async (args) => {
|
|
7141
7237
|
if (args.executionId) {
|
|
7142
7238
|
const interval = parseDuration(args.interval);
|
|
@@ -7205,7 +7301,7 @@ const getCommand$2 = defineCommand({
|
|
|
7205
7301
|
...jsonArgs,
|
|
7206
7302
|
...workspaceArgs,
|
|
7207
7303
|
...nameArgs
|
|
7208
|
-
}),
|
|
7304
|
+
}).strict(),
|
|
7209
7305
|
run: withCommonArgs(async (args) => {
|
|
7210
7306
|
const workflow = await getWorkflow({
|
|
7211
7307
|
name: args.name,
|
|
@@ -7219,7 +7315,7 @@ const getCommand$2 = defineCommand({
|
|
|
7219
7315
|
//#endregion
|
|
7220
7316
|
//#region src/cli/commands/workflow/start.ts
|
|
7221
7317
|
function sleep(ms) {
|
|
7222
|
-
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
7318
|
+
return new Promise((resolve$1) => setTimeout(resolve$1, ms));
|
|
7223
7319
|
}
|
|
7224
7320
|
function formatTime$1(date) {
|
|
7225
7321
|
return date.toLocaleTimeString("en-US", { hour12: false });
|
|
@@ -7391,7 +7487,7 @@ const startCommand = defineCommand({
|
|
|
7391
7487
|
description: "Workflow argument (JSON string)"
|
|
7392
7488
|
}),
|
|
7393
7489
|
...waitArgs
|
|
7394
|
-
}),
|
|
7490
|
+
}).strict(),
|
|
7395
7491
|
run: withCommonArgs(async (args) => {
|
|
7396
7492
|
const { executionId, wait } = await startWorkflowByName({
|
|
7397
7493
|
name: args.name,
|
|
@@ -7725,7 +7821,7 @@ const jobsCommand = defineCommand({
|
|
|
7725
7821
|
description: "Display function execution logs after completion (requires --wait)"
|
|
7726
7822
|
}),
|
|
7727
7823
|
limit: arg(positiveIntArg.optional(), { description: "Maximum number of jobs to list (default: 50, max: 1000) (list mode only)" })
|
|
7728
|
-
}),
|
|
7824
|
+
}).strict(),
|
|
7729
7825
|
run: withCommonArgs(async (args) => {
|
|
7730
7826
|
if (args.jobId) {
|
|
7731
7827
|
if (args.wait) {
|
|
@@ -7829,7 +7925,7 @@ const listCommand$6 = defineCommand({
|
|
|
7829
7925
|
...commonArgs,
|
|
7830
7926
|
...jsonArgs,
|
|
7831
7927
|
...workspaceArgs
|
|
7832
|
-
}),
|
|
7928
|
+
}).strict(),
|
|
7833
7929
|
run: withCommonArgs(async (args) => {
|
|
7834
7930
|
const executors = await listExecutors({
|
|
7835
7931
|
workspaceId: args["workspace-id"],
|
|
@@ -7974,7 +8070,7 @@ The \`--logs\` option displays logs from the downstream execution when available
|
|
|
7974
8070
|
alias: "l",
|
|
7975
8071
|
description: "Display function execution logs after completion (requires --wait)"
|
|
7976
8072
|
})
|
|
7977
|
-
}),
|
|
8073
|
+
}).strict(),
|
|
7978
8074
|
run: withCommonArgs(async (args) => {
|
|
7979
8075
|
const client = await initOperatorClient(await loadAccessToken({
|
|
7980
8076
|
useProfile: true,
|
|
@@ -8104,7 +8200,7 @@ const listWebhookCommand = defineCommand({
|
|
|
8104
8200
|
...commonArgs,
|
|
8105
8201
|
...jsonArgs,
|
|
8106
8202
|
...workspaceArgs
|
|
8107
|
-
}),
|
|
8203
|
+
}).strict(),
|
|
8108
8204
|
run: withCommonArgs(async (args) => {
|
|
8109
8205
|
const executors = await listWebhookExecutors({
|
|
8110
8206
|
workspaceId: args["workspace-id"],
|
|
@@ -8532,7 +8628,7 @@ function createDependencyWatcher(options = {}) {
|
|
|
8532
8628
|
function createGenerationManager(params) {
|
|
8533
8629
|
const { application, config, generators = [], pluginManager } = params;
|
|
8534
8630
|
const baseDir = path.join(getDistDir(), "generated");
|
|
8535
|
-
fs$
|
|
8631
|
+
fs$1.mkdirSync(baseDir, { recursive: true });
|
|
8536
8632
|
const services = {
|
|
8537
8633
|
tailordb: {},
|
|
8538
8634
|
resolver: {},
|
|
@@ -8752,14 +8848,14 @@ function createGenerationManager(params) {
|
|
|
8752
8848
|
*/
|
|
8753
8849
|
async function writeGeneratedFiles(sourceId, result) {
|
|
8754
8850
|
await Promise.all(result.files.map(async (file) => {
|
|
8755
|
-
fs$
|
|
8756
|
-
return new Promise((resolve, reject) => {
|
|
8757
|
-
if (file.skipIfExists && fs$
|
|
8851
|
+
fs$1.mkdirSync(path.dirname(file.path), { recursive: true });
|
|
8852
|
+
return new Promise((resolve$1, reject) => {
|
|
8853
|
+
if (file.skipIfExists && fs$1.existsSync(file.path)) {
|
|
8758
8854
|
const relativePath = path.relative(process.cwd(), file.path);
|
|
8759
8855
|
logger.debug(`${sourceId} | skip existing: ${relativePath}`);
|
|
8760
|
-
return resolve();
|
|
8856
|
+
return resolve$1();
|
|
8761
8857
|
}
|
|
8762
|
-
fs$
|
|
8858
|
+
fs$1.writeFile(file.path, file.content, (err) => {
|
|
8763
8859
|
if (err) {
|
|
8764
8860
|
const relativePath = path.relative(process.cwd(), file.path);
|
|
8765
8861
|
logger.error(`Error writing file ${styles.bold(relativePath)}`);
|
|
@@ -8768,15 +8864,15 @@ function createGenerationManager(params) {
|
|
|
8768
8864
|
} else {
|
|
8769
8865
|
const relativePath = path.relative(process.cwd(), file.path);
|
|
8770
8866
|
logger.log(`${sourceId} | generate: ${styles.success(relativePath)}`);
|
|
8771
|
-
if (file.executable) fs$
|
|
8867
|
+
if (file.executable) fs$1.chmod(file.path, 493, (chmodErr) => {
|
|
8772
8868
|
if (chmodErr) {
|
|
8773
8869
|
const relativePath$1 = path.relative(process.cwd(), file.path);
|
|
8774
8870
|
logger.error(`Error setting executable permission on ${styles.bold(relativePath$1)}`);
|
|
8775
8871
|
logger.error(String(chmodErr));
|
|
8776
8872
|
reject(chmodErr);
|
|
8777
|
-
} else resolve();
|
|
8873
|
+
} else resolve$1();
|
|
8778
8874
|
});
|
|
8779
|
-
else resolve();
|
|
8875
|
+
else resolve$1();
|
|
8780
8876
|
}
|
|
8781
8877
|
});
|
|
8782
8878
|
});
|
|
@@ -9040,7 +9136,7 @@ const listCommand$5 = defineCommand({
|
|
|
9040
9136
|
...commonArgs,
|
|
9041
9137
|
...jsonArgs,
|
|
9042
9138
|
...deploymentArgs
|
|
9043
|
-
}),
|
|
9139
|
+
}).strict(),
|
|
9044
9140
|
run: withCommonArgs(async (args) => {
|
|
9045
9141
|
const machineUsers = await listMachineUsers({
|
|
9046
9142
|
workspaceId: args["workspace-id"],
|
|
@@ -9102,7 +9198,7 @@ const tokenCommand = defineCommand({
|
|
|
9102
9198
|
positional: true,
|
|
9103
9199
|
description: "Machine user name"
|
|
9104
9200
|
})
|
|
9105
|
-
}),
|
|
9201
|
+
}).strict(),
|
|
9106
9202
|
run: withCommonArgs(async (args) => {
|
|
9107
9203
|
const token = await getMachineUserToken({
|
|
9108
9204
|
name: args.name,
|
|
@@ -9205,7 +9301,7 @@ const getCommand$1 = defineCommand({
|
|
|
9205
9301
|
positional: true,
|
|
9206
9302
|
description: "OAuth2 client name"
|
|
9207
9303
|
})
|
|
9208
|
-
}),
|
|
9304
|
+
}).strict(),
|
|
9209
9305
|
run: withCommonArgs(async (args) => {
|
|
9210
9306
|
const credentials = await getOAuth2Client({
|
|
9211
9307
|
name: args.name,
|
|
@@ -9256,7 +9352,7 @@ const listCommand$4 = defineCommand({
|
|
|
9256
9352
|
...commonArgs,
|
|
9257
9353
|
...jsonArgs,
|
|
9258
9354
|
...deploymentArgs
|
|
9259
|
-
}),
|
|
9355
|
+
}).strict(),
|
|
9260
9356
|
run: withCommonArgs(async (args) => {
|
|
9261
9357
|
const oauth2Clients = await listOAuth2Clients({
|
|
9262
9358
|
workspaceId: args["workspace-id"],
|
|
@@ -9269,7 +9365,7 @@ const listCommand$4 = defineCommand({
|
|
|
9269
9365
|
|
|
9270
9366
|
//#endregion
|
|
9271
9367
|
//#region src/cli/commands/remove.ts
|
|
9272
|
-
async function loadOptions$
|
|
9368
|
+
async function loadOptions$10(options) {
|
|
9273
9369
|
const client = await initOperatorClient(await loadAccessToken({
|
|
9274
9370
|
useProfile: true,
|
|
9275
9371
|
profile: options?.profile
|
|
@@ -9325,7 +9421,7 @@ async function execRemove(client, workspaceId, application, config, confirm) {
|
|
|
9325
9421
|
* @returns Promise that resolves when removal completes
|
|
9326
9422
|
*/
|
|
9327
9423
|
async function remove(options) {
|
|
9328
|
-
const { client, workspaceId, application, config } = await loadOptions$
|
|
9424
|
+
const { client, workspaceId, application, config } = await loadOptions$10(options);
|
|
9329
9425
|
await execRemove(client, workspaceId, application, config);
|
|
9330
9426
|
}
|
|
9331
9427
|
const removeCommand$1 = defineCommand({
|
|
@@ -9335,9 +9431,9 @@ const removeCommand$1 = defineCommand({
|
|
|
9335
9431
|
...commonArgs,
|
|
9336
9432
|
...deploymentArgs,
|
|
9337
9433
|
...confirmationArgs
|
|
9338
|
-
}),
|
|
9434
|
+
}).strict(),
|
|
9339
9435
|
run: withCommonArgs(async (args) => {
|
|
9340
|
-
const { client, workspaceId, application, config } = await loadOptions$
|
|
9436
|
+
const { client, workspaceId, application, config } = await loadOptions$10({
|
|
9341
9437
|
workspaceId: args["workspace-id"],
|
|
9342
9438
|
profile: args.profile,
|
|
9343
9439
|
configPath: args.config
|
|
@@ -9409,7 +9505,7 @@ const showCommand = defineCommand({
|
|
|
9409
9505
|
...commonArgs,
|
|
9410
9506
|
...jsonArgs,
|
|
9411
9507
|
...deploymentArgs
|
|
9412
|
-
}),
|
|
9508
|
+
}).strict(),
|
|
9413
9509
|
run: withCommonArgs(async (args) => {
|
|
9414
9510
|
const appInfo$1 = await show({
|
|
9415
9511
|
workspaceId: args["workspace-id"],
|
|
@@ -9670,7 +9766,7 @@ function generateFieldType(config, isOptionalToRequired, enumValueChange) {
|
|
|
9670
9766
|
async function writeDbTypesFile(snapshot, migrationsDir, migrationNumber, diff) {
|
|
9671
9767
|
const content = generateDbTypesFromSnapshot(snapshot, diff);
|
|
9672
9768
|
const filePath = getMigrationFilePath(migrationsDir, migrationNumber, "db");
|
|
9673
|
-
await fs
|
|
9769
|
+
await fs.writeFile(filePath, content);
|
|
9674
9770
|
return filePath;
|
|
9675
9771
|
}
|
|
9676
9772
|
|
|
@@ -9692,7 +9788,7 @@ async function writeDbTypesFile(snapshot, migrationsDir, migrationNumber, diff)
|
|
|
9692
9788
|
*/
|
|
9693
9789
|
async function fileExists(filePath) {
|
|
9694
9790
|
try {
|
|
9695
|
-
await fs
|
|
9791
|
+
await fs.access(filePath);
|
|
9696
9792
|
return true;
|
|
9697
9793
|
} catch {
|
|
9698
9794
|
return false;
|
|
@@ -9715,10 +9811,10 @@ async function ensureFileNotExists(filePath) {
|
|
|
9715
9811
|
*/
|
|
9716
9812
|
async function generateSchemaFile(snapshot, migrationsDir, migrationNumber) {
|
|
9717
9813
|
const migrationDir = getMigrationDirPath(migrationsDir, migrationNumber);
|
|
9718
|
-
await fs
|
|
9814
|
+
await fs.mkdir(migrationDir, { recursive: true });
|
|
9719
9815
|
const filePath = getMigrationFilePath(migrationsDir, migrationNumber, "schema");
|
|
9720
9816
|
await ensureFileNotExists(filePath);
|
|
9721
|
-
await fs
|
|
9817
|
+
await fs.writeFile(filePath, JSON.stringify(snapshot, null, 2));
|
|
9722
9818
|
return {
|
|
9723
9819
|
filePath,
|
|
9724
9820
|
migrationNumber
|
|
@@ -9735,7 +9831,7 @@ async function generateSchemaFile(snapshot, migrationsDir, migrationNumber) {
|
|
|
9735
9831
|
*/
|
|
9736
9832
|
async function generateDiffFiles(diff, migrationsDir, migrationNumber, previousSnapshot, description) {
|
|
9737
9833
|
const migrationDir = getMigrationDirPath(migrationsDir, migrationNumber);
|
|
9738
|
-
await fs
|
|
9834
|
+
await fs.mkdir(migrationDir, { recursive: true });
|
|
9739
9835
|
const diffFilePath = getMigrationFilePath(migrationsDir, migrationNumber, "diff");
|
|
9740
9836
|
const migrateFilePath = getMigrationFilePath(migrationsDir, migrationNumber, "migrate");
|
|
9741
9837
|
const dbTypesFilePath = getMigrationFilePath(migrationsDir, migrationNumber, "db");
|
|
@@ -9748,14 +9844,14 @@ async function generateDiffFiles(diff, migrationsDir, migrationNumber, previousS
|
|
|
9748
9844
|
...diff,
|
|
9749
9845
|
description
|
|
9750
9846
|
};
|
|
9751
|
-
await fs
|
|
9847
|
+
await fs.writeFile(diffFilePath, JSON.stringify(diff, null, 2));
|
|
9752
9848
|
const result = {
|
|
9753
9849
|
diffFilePath,
|
|
9754
9850
|
migrationNumber
|
|
9755
9851
|
};
|
|
9756
9852
|
if (diff.requiresMigrationScript) {
|
|
9757
9853
|
const scriptContent = generateMigrationScript(diff);
|
|
9758
|
-
await fs
|
|
9854
|
+
await fs.writeFile(migrateFilePath, scriptContent);
|
|
9759
9855
|
result.migrateFilePath = migrateFilePath;
|
|
9760
9856
|
await writeDbTypesFile(previousSnapshot, migrationsDir, migrationNumber, diff);
|
|
9761
9857
|
result.dbTypesFilePath = dbTypesFilePath;
|
|
@@ -9890,7 +9986,7 @@ function generateChangeScript(change) {
|
|
|
9890
9986
|
* @returns {Promise<void>}
|
|
9891
9987
|
*/
|
|
9892
9988
|
async function handleInitOption(namespaces, skipConfirmation) {
|
|
9893
|
-
const existingDirs = namespaces.filter(({ migrationsDir }) => fs$
|
|
9989
|
+
const existingDirs = namespaces.filter(({ migrationsDir }) => fs$1.existsSync(migrationsDir));
|
|
9894
9990
|
if (existingDirs.length === 0) {
|
|
9895
9991
|
logger.info("No existing migration directories found.");
|
|
9896
9992
|
return;
|
|
@@ -9910,7 +10006,7 @@ async function handleInitOption(namespaces, skipConfirmation) {
|
|
|
9910
10006
|
logger.newline();
|
|
9911
10007
|
}
|
|
9912
10008
|
for (const { namespace, migrationsDir } of existingDirs) try {
|
|
9913
|
-
await fs
|
|
10009
|
+
await fs.rm(migrationsDir, {
|
|
9914
10010
|
recursive: true,
|
|
9915
10011
|
force: true
|
|
9916
10012
|
});
|
|
@@ -9940,7 +10036,7 @@ async function generate(options) {
|
|
|
9940
10036
|
if (options.init) await handleInitOption(namespacesWithMigrations, options.yes);
|
|
9941
10037
|
let pluginManager;
|
|
9942
10038
|
if (plugins.length > 0) pluginManager = new PluginManager(plugins);
|
|
9943
|
-
const { defineApplication: defineApplication$1 } = await import("./application-
|
|
10039
|
+
const { defineApplication: defineApplication$1 } = await import("./application-DdSu3baZ.mjs");
|
|
9944
10040
|
const application = defineApplication$1({
|
|
9945
10041
|
config,
|
|
9946
10042
|
pluginManager
|
|
@@ -10051,7 +10147,7 @@ async function openInEditor(filePath) {
|
|
|
10051
10147
|
const editor = process.env.EDITOR;
|
|
10052
10148
|
if (!editor) return;
|
|
10053
10149
|
try {
|
|
10054
|
-
await fs
|
|
10150
|
+
await fs.access(filePath);
|
|
10055
10151
|
} catch {
|
|
10056
10152
|
return;
|
|
10057
10153
|
}
|
|
@@ -10062,9 +10158,9 @@ async function openInEditor(filePath) {
|
|
|
10062
10158
|
stdio: "inherit",
|
|
10063
10159
|
detached: false
|
|
10064
10160
|
});
|
|
10065
|
-
await new Promise((resolve) => {
|
|
10066
|
-
child.on("close", () => resolve());
|
|
10067
|
-
child.on("error", () => resolve());
|
|
10161
|
+
await new Promise((resolve$1) => {
|
|
10162
|
+
child.on("close", () => resolve$1());
|
|
10163
|
+
child.on("error", () => resolve$1());
|
|
10068
10164
|
});
|
|
10069
10165
|
}
|
|
10070
10166
|
/**
|
|
@@ -10076,16 +10172,13 @@ const generateCommand = defineCommand({
|
|
|
10076
10172
|
args: z.object({
|
|
10077
10173
|
...commonArgs,
|
|
10078
10174
|
...confirmationArgs,
|
|
10079
|
-
|
|
10080
|
-
alias: "c",
|
|
10081
|
-
description: "Path to SDK config file"
|
|
10082
|
-
}),
|
|
10175
|
+
...configArg,
|
|
10083
10176
|
name: arg(z.string().optional(), {
|
|
10084
10177
|
alias: "n",
|
|
10085
10178
|
description: "Optional description for the migration"
|
|
10086
10179
|
}),
|
|
10087
10180
|
init: arg(z.boolean().default(false), { description: "Delete existing migrations and start fresh" })
|
|
10088
|
-
}),
|
|
10181
|
+
}).strict(),
|
|
10089
10182
|
run: withCommonArgs(async (args) => {
|
|
10090
10183
|
await generate({
|
|
10091
10184
|
configPath: args.config,
|
|
@@ -10096,6 +10189,63 @@ const generateCommand = defineCommand({
|
|
|
10096
10189
|
})
|
|
10097
10190
|
});
|
|
10098
10191
|
|
|
10192
|
+
//#endregion
|
|
10193
|
+
//#region src/cli/shared/config.ts
|
|
10194
|
+
/**
|
|
10195
|
+
* Extracts all configured namespace names from loaded application config.
|
|
10196
|
+
* Currently namespaces are derived from the `db` section.
|
|
10197
|
+
* @param config - Loaded application configuration.
|
|
10198
|
+
* @returns Namespace names in insertion order.
|
|
10199
|
+
*/
|
|
10200
|
+
function extractAllNamespaces(config) {
|
|
10201
|
+
const namespaces = /* @__PURE__ */ new Set();
|
|
10202
|
+
if (config.db) for (const [namespaceName] of Object.entries(config.db)) namespaces.add(namespaceName);
|
|
10203
|
+
return Array.from(namespaces);
|
|
10204
|
+
}
|
|
10205
|
+
|
|
10206
|
+
//#endregion
|
|
10207
|
+
//#region src/cli/shared/tailordb-namespace.ts
|
|
10208
|
+
/**
|
|
10209
|
+
* Resolve TailorDB type names to namespace names.
|
|
10210
|
+
* @param args - Resolution inputs
|
|
10211
|
+
* @returns Type to namespace map for found types
|
|
10212
|
+
*/
|
|
10213
|
+
async function resolveTypeNamespaces(args) {
|
|
10214
|
+
const requestedTypesByLowercase = /* @__PURE__ */ new Map();
|
|
10215
|
+
for (const typeName of args.typeNames) {
|
|
10216
|
+
const key = typeName.toLowerCase();
|
|
10217
|
+
const existing = requestedTypesByLowercase.get(key);
|
|
10218
|
+
if (existing) {
|
|
10219
|
+
existing.push(typeName);
|
|
10220
|
+
continue;
|
|
10221
|
+
}
|
|
10222
|
+
requestedTypesByLowercase.set(key, [typeName]);
|
|
10223
|
+
}
|
|
10224
|
+
const unresolvedTypes = new Set(args.typeNames);
|
|
10225
|
+
const typeNamespaceMap = /* @__PURE__ */ new Map();
|
|
10226
|
+
for (const namespace of args.namespaces) {
|
|
10227
|
+
if (unresolvedTypes.size === 0) break;
|
|
10228
|
+
try {
|
|
10229
|
+
const { tailordbTypes } = await args.client.listTailorDBTypes({
|
|
10230
|
+
workspaceId: args.workspaceId,
|
|
10231
|
+
namespaceName: namespace
|
|
10232
|
+
});
|
|
10233
|
+
for (const type of tailordbTypes) {
|
|
10234
|
+
const matchedRequestedTypes = requestedTypesByLowercase.get(type.name.toLowerCase());
|
|
10235
|
+
if (!matchedRequestedTypes) continue;
|
|
10236
|
+
for (const requestedTypeName of matchedRequestedTypes) {
|
|
10237
|
+
if (typeNamespaceMap.has(requestedTypeName)) continue;
|
|
10238
|
+
typeNamespaceMap.set(requestedTypeName, namespace);
|
|
10239
|
+
unresolvedTypes.delete(requestedTypeName);
|
|
10240
|
+
}
|
|
10241
|
+
}
|
|
10242
|
+
} catch {
|
|
10243
|
+
continue;
|
|
10244
|
+
}
|
|
10245
|
+
}
|
|
10246
|
+
return typeNamespaceMap;
|
|
10247
|
+
}
|
|
10248
|
+
|
|
10099
10249
|
//#endregion
|
|
10100
10250
|
//#region src/cli/commands/tailordb/truncate.ts
|
|
10101
10251
|
async function truncateSingleType(options, client) {
|
|
@@ -10113,25 +10263,6 @@ async function truncateNamespace(workspaceId, namespaceName, client) {
|
|
|
10113
10263
|
});
|
|
10114
10264
|
logger.success(`Truncated all types in namespace "${namespaceName}"`);
|
|
10115
10265
|
}
|
|
10116
|
-
async function getAllNamespaces(configPath) {
|
|
10117
|
-
const { config } = await loadConfig(configPath);
|
|
10118
|
-
const namespaces = /* @__PURE__ */ new Set();
|
|
10119
|
-
if (config.db) for (const [namespaceName] of Object.entries(config.db)) namespaces.add(namespaceName);
|
|
10120
|
-
return Array.from(namespaces);
|
|
10121
|
-
}
|
|
10122
|
-
async function getTypeNamespace(workspaceId, typeName, client, configPath) {
|
|
10123
|
-
const namespaces = await getAllNamespaces(configPath);
|
|
10124
|
-
for (const namespace of namespaces) try {
|
|
10125
|
-
const { tailordbTypes } = await client.listTailorDBTypes({
|
|
10126
|
-
workspaceId,
|
|
10127
|
-
namespaceName: namespace
|
|
10128
|
-
});
|
|
10129
|
-
if (tailordbTypes.some((type) => type.name === typeName)) return namespace;
|
|
10130
|
-
} catch {
|
|
10131
|
-
continue;
|
|
10132
|
-
}
|
|
10133
|
-
return null;
|
|
10134
|
-
}
|
|
10135
10266
|
/**
|
|
10136
10267
|
* Truncate TailorDB data based on the given options.
|
|
10137
10268
|
* @param options - Truncate options (all, namespace, or types)
|
|
@@ -10162,7 +10293,8 @@ async function $truncate(options) {
|
|
|
10162
10293
|
].filter(Boolean).length;
|
|
10163
10294
|
if (optionCount === 0) throw new Error("Please specify one of: --all, --namespace <name>, or type names");
|
|
10164
10295
|
if (optionCount > 1) throw new Error("Options --all, --namespace, and type names are mutually exclusive. Please specify only one.");
|
|
10165
|
-
const
|
|
10296
|
+
const { config } = await loadConfig(options?.configPath);
|
|
10297
|
+
const namespaces = extractAllNamespaces(config);
|
|
10166
10298
|
if (hasAll) {
|
|
10167
10299
|
if (namespaces.length === 0) {
|
|
10168
10300
|
logger.warn("No namespaces found in config file.");
|
|
@@ -10199,13 +10331,13 @@ async function $truncate(options) {
|
|
|
10199
10331
|
}
|
|
10200
10332
|
if (hasTypes && options?.types) {
|
|
10201
10333
|
const typeNames = options.types;
|
|
10202
|
-
const typeNamespaceMap =
|
|
10203
|
-
|
|
10204
|
-
|
|
10205
|
-
|
|
10206
|
-
|
|
10207
|
-
|
|
10208
|
-
|
|
10334
|
+
const typeNamespaceMap = await resolveTypeNamespaces({
|
|
10335
|
+
workspaceId,
|
|
10336
|
+
namespaces,
|
|
10337
|
+
typeNames,
|
|
10338
|
+
client
|
|
10339
|
+
});
|
|
10340
|
+
const notFoundTypes = typeNames.filter((typeName) => !typeNamespaceMap.has(typeName));
|
|
10209
10341
|
if (notFoundTypes.length > 0) throw new Error(`The following types were not found in any namespace: ${notFoundTypes.join(", ")}`);
|
|
10210
10342
|
if (!options.yes) {
|
|
10211
10343
|
const typeList = typeNames.join(", ");
|
|
@@ -10247,7 +10379,7 @@ const truncateCommand = defineCommand({
|
|
|
10247
10379
|
alias: "n",
|
|
10248
10380
|
description: "Truncate all tables in specified namespace"
|
|
10249
10381
|
})
|
|
10250
|
-
}),
|
|
10382
|
+
}).strict(),
|
|
10251
10383
|
run: withCommonArgs(async (args) => {
|
|
10252
10384
|
const types = args.types && args.types.length > 0 ? args.types : void 0;
|
|
10253
10385
|
await $truncate({
|
|
@@ -10294,7 +10426,7 @@ const listCommand$3 = defineCommand({
|
|
|
10294
10426
|
...commonArgs,
|
|
10295
10427
|
...jsonArgs,
|
|
10296
10428
|
...workspaceArgs
|
|
10297
|
-
}),
|
|
10429
|
+
}).strict(),
|
|
10298
10430
|
run: withCommonArgs(async (args) => {
|
|
10299
10431
|
const workflows = await listWorkflows({
|
|
10300
10432
|
workspaceId: args["workspace-id"],
|
|
@@ -10359,7 +10491,7 @@ const resumeCommand = defineCommand({
|
|
|
10359
10491
|
description: "Failed execution ID"
|
|
10360
10492
|
}),
|
|
10361
10493
|
...waitArgs
|
|
10362
|
-
}),
|
|
10494
|
+
}).strict(),
|
|
10363
10495
|
run: withCommonArgs(async (args) => {
|
|
10364
10496
|
const { executionId, wait } = await resumeWorkflow({
|
|
10365
10497
|
executionId: args.executionId,
|
|
@@ -10427,7 +10559,7 @@ const healthOptionsSchema = z.object({
|
|
|
10427
10559
|
profile: z.string().optional(),
|
|
10428
10560
|
name: z.string().min(1, { message: "name is required" })
|
|
10429
10561
|
});
|
|
10430
|
-
async function loadOptions$
|
|
10562
|
+
async function loadOptions$9(options) {
|
|
10431
10563
|
const result = healthOptionsSchema.safeParse(options);
|
|
10432
10564
|
if (!result.success) throw new Error(result.error.issues[0].message);
|
|
10433
10565
|
return {
|
|
@@ -10445,7 +10577,7 @@ async function loadOptions$8(options) {
|
|
|
10445
10577
|
* @returns Application health information
|
|
10446
10578
|
*/
|
|
10447
10579
|
async function getAppHealth(options) {
|
|
10448
|
-
const { client, workspaceId, name } = await loadOptions$
|
|
10580
|
+
const { client, workspaceId, name } = await loadOptions$9(options);
|
|
10449
10581
|
return appHealthInfo(name, await client.getApplicationSchemaHealth({
|
|
10450
10582
|
workspaceId,
|
|
10451
10583
|
applicationName: name
|
|
@@ -10462,7 +10594,7 @@ const healthCommand = defineCommand({
|
|
|
10462
10594
|
description: "Application name",
|
|
10463
10595
|
alias: "n"
|
|
10464
10596
|
})
|
|
10465
|
-
}),
|
|
10597
|
+
}).strict(),
|
|
10466
10598
|
run: withCommonArgs(async (args) => {
|
|
10467
10599
|
const health = await getAppHealth({
|
|
10468
10600
|
workspaceId: args["workspace-id"],
|
|
@@ -10485,7 +10617,7 @@ const listAppsOptionsSchema = z.object({
|
|
|
10485
10617
|
profile: z.string().optional(),
|
|
10486
10618
|
limit: z.coerce.number().int().positive().optional()
|
|
10487
10619
|
});
|
|
10488
|
-
async function loadOptions$
|
|
10620
|
+
async function loadOptions$8(options) {
|
|
10489
10621
|
const result = listAppsOptionsSchema.safeParse(options);
|
|
10490
10622
|
if (!result.success) throw new Error(result.error.issues[0].message);
|
|
10491
10623
|
return {
|
|
@@ -10503,7 +10635,7 @@ async function loadOptions$7(options) {
|
|
|
10503
10635
|
* @returns List of applications
|
|
10504
10636
|
*/
|
|
10505
10637
|
async function listApps(options) {
|
|
10506
|
-
const { client, workspaceId, limit } = await loadOptions$
|
|
10638
|
+
const { client, workspaceId, limit } = await loadOptions$8(options);
|
|
10507
10639
|
const hasLimit = limit !== void 0;
|
|
10508
10640
|
const results = [];
|
|
10509
10641
|
let pageToken = "";
|
|
@@ -10535,7 +10667,7 @@ const listCommand$2 = defineCommand({
|
|
|
10535
10667
|
alias: "l",
|
|
10536
10668
|
description: "Maximum number of applications to list"
|
|
10537
10669
|
})
|
|
10538
|
-
}),
|
|
10670
|
+
}).strict(),
|
|
10539
10671
|
run: withCommonArgs(async (args) => {
|
|
10540
10672
|
const apps = await listApps({
|
|
10541
10673
|
workspaceId: args["workspace-id"],
|
|
@@ -10640,7 +10772,7 @@ const createCommand = defineCommand({
|
|
|
10640
10772
|
description: "Profile name to create"
|
|
10641
10773
|
}),
|
|
10642
10774
|
"profile-user": arg(z.string().optional(), { description: "User email for the profile (defaults to current user)" })
|
|
10643
|
-
}),
|
|
10775
|
+
}).strict(),
|
|
10644
10776
|
run: withCommonArgs(async (args) => {
|
|
10645
10777
|
const workspace = await createWorkspace({
|
|
10646
10778
|
name: args.name,
|
|
@@ -10688,7 +10820,7 @@ const createCommand = defineCommand({
|
|
|
10688
10820
|
//#endregion
|
|
10689
10821
|
//#region src/cli/commands/workspace/delete.ts
|
|
10690
10822
|
const deleteWorkspaceOptionsSchema = z.object({ workspaceId: z.uuid({ message: "workspace-id must be a valid UUID" }) });
|
|
10691
|
-
async function loadOptions$
|
|
10823
|
+
async function loadOptions$7(options) {
|
|
10692
10824
|
const result = deleteWorkspaceOptionsSchema.safeParse(options);
|
|
10693
10825
|
if (!result.success) throw new Error(result.error.issues[0].message);
|
|
10694
10826
|
return {
|
|
@@ -10702,7 +10834,7 @@ async function loadOptions$6(options) {
|
|
|
10702
10834
|
* @returns Promise that resolves when deletion completes
|
|
10703
10835
|
*/
|
|
10704
10836
|
async function deleteWorkspace(options) {
|
|
10705
|
-
const { client, workspaceId } = await loadOptions$
|
|
10837
|
+
const { client, workspaceId } = await loadOptions$7(options);
|
|
10706
10838
|
await client.deleteWorkspace({ workspaceId });
|
|
10707
10839
|
}
|
|
10708
10840
|
const deleteCommand = defineCommand({
|
|
@@ -10715,9 +10847,9 @@ const deleteCommand = defineCommand({
|
|
|
10715
10847
|
description: "Workspace ID"
|
|
10716
10848
|
}),
|
|
10717
10849
|
...confirmationArgs
|
|
10718
|
-
}),
|
|
10850
|
+
}).strict(),
|
|
10719
10851
|
run: withCommonArgs(async (args) => {
|
|
10720
|
-
const { client, workspaceId } = await loadOptions$
|
|
10852
|
+
const { client, workspaceId } = await loadOptions$7({ workspaceId: args["workspace-id"] });
|
|
10721
10853
|
let workspace;
|
|
10722
10854
|
try {
|
|
10723
10855
|
workspace = await client.getWorkspace({ workspaceId });
|
|
@@ -10748,7 +10880,7 @@ const getWorkspaceOptionsSchema = z.object({
|
|
|
10748
10880
|
workspaceId: z.uuid({ message: "workspace-id must be a valid UUID" }).optional(),
|
|
10749
10881
|
profile: z.string().optional()
|
|
10750
10882
|
});
|
|
10751
|
-
async function loadOptions$
|
|
10883
|
+
async function loadOptions$6(options) {
|
|
10752
10884
|
const result = getWorkspaceOptionsSchema.safeParse(options);
|
|
10753
10885
|
if (!result.success) throw new Error(result.error.issues[0].message);
|
|
10754
10886
|
return {
|
|
@@ -10765,7 +10897,7 @@ async function loadOptions$5(options) {
|
|
|
10765
10897
|
* @returns Workspace details
|
|
10766
10898
|
*/
|
|
10767
10899
|
async function getWorkspace(options) {
|
|
10768
|
-
const { client, workspaceId } = await loadOptions$
|
|
10900
|
+
const { client, workspaceId } = await loadOptions$6(options);
|
|
10769
10901
|
const response = await client.getWorkspace({ workspaceId });
|
|
10770
10902
|
if (!response.workspace) throw new Error(`Workspace "${workspaceId}" not found.`);
|
|
10771
10903
|
return workspaceDetails(response.workspace);
|
|
@@ -10777,7 +10909,7 @@ const getCommand = defineCommand({
|
|
|
10777
10909
|
...commonArgs,
|
|
10778
10910
|
...jsonArgs,
|
|
10779
10911
|
...workspaceArgs
|
|
10780
|
-
}),
|
|
10912
|
+
}).strict(),
|
|
10781
10913
|
run: withCommonArgs(async (args) => {
|
|
10782
10914
|
const workspace = await getWorkspace({
|
|
10783
10915
|
workspaceId: args["workspace-id"],
|
|
@@ -10831,7 +10963,7 @@ const listCommand$1 = defineCommand({
|
|
|
10831
10963
|
alias: "l",
|
|
10832
10964
|
description: "Maximum number of workspaces to list"
|
|
10833
10965
|
})
|
|
10834
|
-
}),
|
|
10966
|
+
}).strict(),
|
|
10835
10967
|
run: withCommonArgs(async (args) => {
|
|
10836
10968
|
const workspaces = await listWorkspaces({ limit: args.limit });
|
|
10837
10969
|
logger.out(workspaces, { display: { updatedAt: null } });
|
|
@@ -10841,7 +10973,7 @@ const listCommand$1 = defineCommand({
|
|
|
10841
10973
|
//#endregion
|
|
10842
10974
|
//#region src/cli/commands/workspace/restore.ts
|
|
10843
10975
|
const restoreWorkspaceOptionsSchema = z.object({ workspaceId: z.uuid({ message: "workspace-id must be a valid UUID" }) });
|
|
10844
|
-
async function loadOptions$
|
|
10976
|
+
async function loadOptions$5(options) {
|
|
10845
10977
|
const result = restoreWorkspaceOptionsSchema.safeParse(options);
|
|
10846
10978
|
if (!result.success) throw new Error(result.error.issues[0].message);
|
|
10847
10979
|
return {
|
|
@@ -10855,7 +10987,7 @@ async function loadOptions$4(options) {
|
|
|
10855
10987
|
* @returns Promise that resolves when restoration completes
|
|
10856
10988
|
*/
|
|
10857
10989
|
async function restoreWorkspace(options) {
|
|
10858
|
-
const { client, workspaceId } = await loadOptions$
|
|
10990
|
+
const { client, workspaceId } = await loadOptions$5(options);
|
|
10859
10991
|
await client.restoreWorkspace({ workspaceId });
|
|
10860
10992
|
}
|
|
10861
10993
|
const restoreCommand = defineCommand({
|
|
@@ -10868,9 +11000,9 @@ const restoreCommand = defineCommand({
|
|
|
10868
11000
|
description: "Workspace ID"
|
|
10869
11001
|
}),
|
|
10870
11002
|
...confirmationArgs
|
|
10871
|
-
}),
|
|
11003
|
+
}).strict(),
|
|
10872
11004
|
run: withCommonArgs(async (args) => {
|
|
10873
|
-
const { client, workspaceId } = await loadOptions$
|
|
11005
|
+
const { client, workspaceId } = await loadOptions$5({ workspaceId: args["workspace-id"] });
|
|
10874
11006
|
if (!args.yes) {
|
|
10875
11007
|
if (await logger.prompt(`Are you sure you want to restore workspace "${workspaceId}"? (yes/no):`, { type: "text" }) !== "yes") {
|
|
10876
11008
|
logger.info("Workspace restoration cancelled.");
|
|
@@ -10921,7 +11053,7 @@ const inviteUserOptionsSchema = z.object({
|
|
|
10921
11053
|
email: z.email({ message: "email must be a valid email address" }),
|
|
10922
11054
|
role: z.enum(validRoles, { message: `role must be one of: ${validRoles.join(", ")}` })
|
|
10923
11055
|
});
|
|
10924
|
-
async function loadOptions$
|
|
11056
|
+
async function loadOptions$4(options) {
|
|
10925
11057
|
const result = inviteUserOptionsSchema.safeParse(options);
|
|
10926
11058
|
if (!result.success) throw new Error(result.error.issues[0].message);
|
|
10927
11059
|
return {
|
|
@@ -10940,7 +11072,7 @@ async function loadOptions$3(options) {
|
|
|
10940
11072
|
* @returns Promise that resolves when invitation is sent
|
|
10941
11073
|
*/
|
|
10942
11074
|
async function inviteUser(options) {
|
|
10943
|
-
const { client, workspaceId, email, role } = await loadOptions$
|
|
11075
|
+
const { client, workspaceId, email, role } = await loadOptions$4(options);
|
|
10944
11076
|
await client.inviteWorkspacePlatformUser({
|
|
10945
11077
|
workspaceId,
|
|
10946
11078
|
email,
|
|
@@ -10958,7 +11090,7 @@ const inviteCommand = defineCommand({
|
|
|
10958
11090
|
description: `Role to assign (${validRoles.join(", ")})`,
|
|
10959
11091
|
alias: "r"
|
|
10960
11092
|
})
|
|
10961
|
-
}),
|
|
11093
|
+
}).strict(),
|
|
10962
11094
|
run: withCommonArgs(async (args) => {
|
|
10963
11095
|
await inviteUser({
|
|
10964
11096
|
workspaceId: args["workspace-id"],
|
|
@@ -10977,7 +11109,7 @@ const listUsersOptionsSchema = z.object({
|
|
|
10977
11109
|
profile: z.string().optional(),
|
|
10978
11110
|
limit: z.coerce.number().int().positive().optional()
|
|
10979
11111
|
});
|
|
10980
|
-
async function loadOptions$
|
|
11112
|
+
async function loadOptions$3(options) {
|
|
10981
11113
|
const result = listUsersOptionsSchema.safeParse(options);
|
|
10982
11114
|
if (!result.success) throw new Error(result.error.issues[0].message);
|
|
10983
11115
|
return {
|
|
@@ -10995,7 +11127,7 @@ async function loadOptions$2(options) {
|
|
|
10995
11127
|
* @returns List of workspace users
|
|
10996
11128
|
*/
|
|
10997
11129
|
async function listUsers(options) {
|
|
10998
|
-
const { client, workspaceId, limit } = await loadOptions$
|
|
11130
|
+
const { client, workspaceId, limit } = await loadOptions$3(options);
|
|
10999
11131
|
const hasLimit = limit !== void 0;
|
|
11000
11132
|
const results = [];
|
|
11001
11133
|
let pageToken = "";
|
|
@@ -11027,7 +11159,7 @@ const listCommand = defineCommand({
|
|
|
11027
11159
|
alias: "l",
|
|
11028
11160
|
description: "Maximum number of users to list"
|
|
11029
11161
|
})
|
|
11030
|
-
}),
|
|
11162
|
+
}).strict(),
|
|
11031
11163
|
run: withCommonArgs(async (args) => {
|
|
11032
11164
|
const users = await listUsers({
|
|
11033
11165
|
workspaceId: args["workspace-id"],
|
|
@@ -11045,7 +11177,7 @@ const removeUserOptionsSchema = z.object({
|
|
|
11045
11177
|
profile: z.string().optional(),
|
|
11046
11178
|
email: z.string().email({ message: "email must be a valid email address" })
|
|
11047
11179
|
});
|
|
11048
|
-
async function loadOptions$
|
|
11180
|
+
async function loadOptions$2(options) {
|
|
11049
11181
|
const result = removeUserOptionsSchema.safeParse(options);
|
|
11050
11182
|
if (!result.success) throw new Error(result.error.issues[0].message);
|
|
11051
11183
|
return {
|
|
@@ -11063,7 +11195,7 @@ async function loadOptions$1(options) {
|
|
|
11063
11195
|
* @returns Promise that resolves when removal completes
|
|
11064
11196
|
*/
|
|
11065
11197
|
async function removeUser(options) {
|
|
11066
|
-
const { client, workspaceId, email } = await loadOptions$
|
|
11198
|
+
const { client, workspaceId, email } = await loadOptions$2(options);
|
|
11067
11199
|
await client.removeWorkspacePlatformUser({
|
|
11068
11200
|
workspaceId,
|
|
11069
11201
|
email
|
|
@@ -11077,7 +11209,7 @@ const removeCommand = defineCommand({
|
|
|
11077
11209
|
...workspaceArgs,
|
|
11078
11210
|
email: arg(z.email(), { description: "Email address of the user to remove" }),
|
|
11079
11211
|
...confirmationArgs
|
|
11080
|
-
}),
|
|
11212
|
+
}).strict(),
|
|
11081
11213
|
run: withCommonArgs(async (args) => {
|
|
11082
11214
|
if (!args.yes) {
|
|
11083
11215
|
if (await logger.prompt(`Are you sure you want to remove user "${args.email}" from the workspace? (yes/no):`, { type: "text" }) !== "yes") {
|
|
@@ -11102,7 +11234,7 @@ const updateUserOptionsSchema = z.object({
|
|
|
11102
11234
|
email: z.string().email({ message: "email must be a valid email address" }),
|
|
11103
11235
|
role: z.enum(validRoles, { message: `role must be one of: ${validRoles.join(", ")}` })
|
|
11104
11236
|
});
|
|
11105
|
-
async function loadOptions(options) {
|
|
11237
|
+
async function loadOptions$1(options) {
|
|
11106
11238
|
const result = updateUserOptionsSchema.safeParse(options);
|
|
11107
11239
|
if (!result.success) throw new Error(result.error.issues[0].message);
|
|
11108
11240
|
return {
|
|
@@ -11121,7 +11253,7 @@ async function loadOptions(options) {
|
|
|
11121
11253
|
* @returns Promise that resolves when update completes
|
|
11122
11254
|
*/
|
|
11123
11255
|
async function updateUser(options) {
|
|
11124
|
-
const { client, workspaceId, email, role } = await loadOptions(options);
|
|
11256
|
+
const { client, workspaceId, email, role } = await loadOptions$1(options);
|
|
11125
11257
|
await client.updateWorkspacePlatformUser({
|
|
11126
11258
|
workspaceId,
|
|
11127
11259
|
email,
|
|
@@ -11139,7 +11271,7 @@ const updateCommand = defineCommand({
|
|
|
11139
11271
|
description: `New role to assign (${validRoles.join(", ")})`,
|
|
11140
11272
|
alias: "r"
|
|
11141
11273
|
})
|
|
11142
|
-
}),
|
|
11274
|
+
}).strict(),
|
|
11143
11275
|
run: withCommonArgs(async (args) => {
|
|
11144
11276
|
await updateUser({
|
|
11145
11277
|
workspaceId: args["workspace-id"],
|
|
@@ -11152,5 +11284,874 @@ const updateCommand = defineCommand({
|
|
|
11152
11284
|
});
|
|
11153
11285
|
|
|
11154
11286
|
//#endregion
|
|
11155
|
-
|
|
11156
|
-
|
|
11287
|
+
//#region src/cli/bundler/query/query-bundler.ts
|
|
11288
|
+
function createSqlEntry() {
|
|
11289
|
+
return ml`
|
|
11290
|
+
import { Kysely, sql } from "@tailor-platform/sdk/kysely";
|
|
11291
|
+
import { TailordbDialect } from "@tailor-platform/function-kysely-tailordb";
|
|
11292
|
+
|
|
11293
|
+
type QueryInput = {
|
|
11294
|
+
namespace: string;
|
|
11295
|
+
queries: string[];
|
|
11296
|
+
};
|
|
11297
|
+
|
|
11298
|
+
function getDB(namespace: string) {
|
|
11299
|
+
const client = new tailordb.Client({ namespace });
|
|
11300
|
+
return new Kysely<Record<string, Record<string, unknown>>>({
|
|
11301
|
+
dialect: new TailordbDialect(client),
|
|
11302
|
+
});
|
|
11303
|
+
}
|
|
11304
|
+
|
|
11305
|
+
export async function main(input: QueryInput) {
|
|
11306
|
+
const db = getDB(input.namespace);
|
|
11307
|
+
const results = [];
|
|
11308
|
+
for (const query of input.queries) {
|
|
11309
|
+
const result = await sql.raw(query).execute(db);
|
|
11310
|
+
const rows = result.rows ?? [];
|
|
11311
|
+
results.push({ rows, rowCount: rows.length });
|
|
11312
|
+
}
|
|
11313
|
+
if (results.length === 1) {
|
|
11314
|
+
return results[0];
|
|
11315
|
+
}
|
|
11316
|
+
return results;
|
|
11317
|
+
}
|
|
11318
|
+
`;
|
|
11319
|
+
}
|
|
11320
|
+
function createGqlEntry() {
|
|
11321
|
+
return ml`
|
|
11322
|
+
type QueryInput = {
|
|
11323
|
+
endpoint: string;
|
|
11324
|
+
accessToken: string;
|
|
11325
|
+
query: string;
|
|
11326
|
+
};
|
|
11327
|
+
|
|
11328
|
+
export async function main(input: QueryInput) {
|
|
11329
|
+
const response = await fetch(input.endpoint, {
|
|
11330
|
+
method: "POST",
|
|
11331
|
+
headers: {
|
|
11332
|
+
"Content-Type": "application/json",
|
|
11333
|
+
Authorization: \`Bearer \${input.accessToken}\`,
|
|
11334
|
+
},
|
|
11335
|
+
body: JSON.stringify({
|
|
11336
|
+
query: input.query,
|
|
11337
|
+
}),
|
|
11338
|
+
});
|
|
11339
|
+
if (!response.ok) {
|
|
11340
|
+
let message = \`HTTP \${response.status}\`;
|
|
11341
|
+
try {
|
|
11342
|
+
const errorJson = await response.json();
|
|
11343
|
+
if (errorJson && typeof errorJson === "object" && "message" in errorJson) {
|
|
11344
|
+
message = String(errorJson.message);
|
|
11345
|
+
}
|
|
11346
|
+
} catch {
|
|
11347
|
+
// Keep default HTTP status message when response body is not JSON.
|
|
11348
|
+
}
|
|
11349
|
+
throw new Error(\`GraphQL request failed: \${message}\`);
|
|
11350
|
+
}
|
|
11351
|
+
|
|
11352
|
+
const json = await response.json();
|
|
11353
|
+
return json;
|
|
11354
|
+
}
|
|
11355
|
+
`;
|
|
11356
|
+
}
|
|
11357
|
+
/**
|
|
11358
|
+
* Bundle a query executor script for TestExecScript.
|
|
11359
|
+
* @param engine - Query engine type
|
|
11360
|
+
* @returns Bundled code
|
|
11361
|
+
*/
|
|
11362
|
+
async function bundleQueryScript(engine) {
|
|
11363
|
+
const outputDir = path.resolve(getDistDir(), "query");
|
|
11364
|
+
fs$1.mkdirSync(outputDir, { recursive: true });
|
|
11365
|
+
const entryPath = path.join(outputDir, `query_${engine}.entry.ts`);
|
|
11366
|
+
const outputPath = path.join(outputDir, `query_${engine}.js`);
|
|
11367
|
+
const entryContent = engine === "sql" ? createSqlEntry() : createGqlEntry();
|
|
11368
|
+
fs$1.writeFileSync(entryPath, entryContent);
|
|
11369
|
+
let tsconfig;
|
|
11370
|
+
try {
|
|
11371
|
+
tsconfig = await resolveTSConfig();
|
|
11372
|
+
} catch {
|
|
11373
|
+
tsconfig = void 0;
|
|
11374
|
+
}
|
|
11375
|
+
await rolldown.build(rolldown.defineConfig({
|
|
11376
|
+
input: entryPath,
|
|
11377
|
+
output: {
|
|
11378
|
+
file: outputPath,
|
|
11379
|
+
format: "esm",
|
|
11380
|
+
sourcemap: false,
|
|
11381
|
+
minify: false,
|
|
11382
|
+
inlineDynamicImports: true,
|
|
11383
|
+
globals: { tailordb: "tailordb" }
|
|
11384
|
+
},
|
|
11385
|
+
external: engine === "sql" ? ["tailordb"] : [],
|
|
11386
|
+
resolve: { conditionNames: ["node", "import"] },
|
|
11387
|
+
tsconfig,
|
|
11388
|
+
treeshake: {
|
|
11389
|
+
moduleSideEffects: false,
|
|
11390
|
+
annotations: true,
|
|
11391
|
+
unknownGlobalSideEffects: false
|
|
11392
|
+
},
|
|
11393
|
+
logLevel: "silent"
|
|
11394
|
+
}));
|
|
11395
|
+
return fs$1.readFileSync(outputPath, "utf-8");
|
|
11396
|
+
}
|
|
11397
|
+
|
|
11398
|
+
//#endregion
|
|
11399
|
+
//#region src/cli/query/errors.ts
|
|
11400
|
+
function toErrorMessage(error) {
|
|
11401
|
+
if (error instanceof Error) return error.message;
|
|
11402
|
+
return String(error);
|
|
11403
|
+
}
|
|
11404
|
+
/**
|
|
11405
|
+
* Maps errors from query execution to user-friendly CLI errors with suggestions when possible.
|
|
11406
|
+
* @param args - The error and context information for mapping
|
|
11407
|
+
* @returns A CLIError with a user-friendly message
|
|
11408
|
+
*/
|
|
11409
|
+
function mapQueryExecutionError(args) {
|
|
11410
|
+
const message = toErrorMessage(args.error);
|
|
11411
|
+
if (message.includes("machine user does not exist")) return createCLIError({
|
|
11412
|
+
code: "not_found",
|
|
11413
|
+
message: `Machine user '${args.machineUser ?? "unknown"}' was not found.`,
|
|
11414
|
+
suggestion: "Run `tailor-sdk machineuser list` and use an existing name."
|
|
11415
|
+
});
|
|
11416
|
+
if (args.engine === "sql" && message.includes("sqlaccess error: failed to fetch schema: query returned an unexpected number of rows")) return createCLIError({
|
|
11417
|
+
code: "invalid_namespace",
|
|
11418
|
+
message: `Failed to load TailorDB schema for namespace '${args.namespace}'.`,
|
|
11419
|
+
suggestion: "Ensure the query references TailorDB types from a single namespace and re-apply if needed."
|
|
11420
|
+
});
|
|
11421
|
+
if (args.engine === "sql" && message.includes("sqlaccess error: failed to parse:")) {
|
|
11422
|
+
const parserReason = message.split("sqlaccess error: failed to parse:").at(1)?.split("\n").at(0)?.trim();
|
|
11423
|
+
return createCLIError({
|
|
11424
|
+
code: "invalid_sql",
|
|
11425
|
+
message: "SQL parse error.",
|
|
11426
|
+
suggestion: parserReason ?? "The SQL query contains unsupported syntax."
|
|
11427
|
+
});
|
|
11428
|
+
}
|
|
11429
|
+
return args.error instanceof Error ? args.error : new Error(message);
|
|
11430
|
+
}
|
|
11431
|
+
|
|
11432
|
+
//#endregion
|
|
11433
|
+
//#region src/cli/query/graphql-repl.ts
|
|
11434
|
+
/**
|
|
11435
|
+
* Return true when the buffered GraphQL input parses as a complete document.
|
|
11436
|
+
* @param input - Buffered GraphQL input
|
|
11437
|
+
* @returns True when the GraphQL document is complete and ready to execute
|
|
11438
|
+
*/
|
|
11439
|
+
function isGraphQLInputComplete(input) {
|
|
11440
|
+
if (input.trim().length === 0) return false;
|
|
11441
|
+
try {
|
|
11442
|
+
parse$1(input);
|
|
11443
|
+
return true;
|
|
11444
|
+
} catch {
|
|
11445
|
+
return false;
|
|
11446
|
+
}
|
|
11447
|
+
}
|
|
11448
|
+
|
|
11449
|
+
//#endregion
|
|
11450
|
+
//#region src/cli/query/sql-repl.ts
|
|
11451
|
+
/**
|
|
11452
|
+
* Return true when the buffered SQL input ends with a real statement terminator.
|
|
11453
|
+
* @param input - Buffered SQL input
|
|
11454
|
+
* @returns True when the SQL statement is complete and ready to execute
|
|
11455
|
+
*/
|
|
11456
|
+
function isSqlInputComplete(input) {
|
|
11457
|
+
let inSingleQuote = false;
|
|
11458
|
+
let inDoubleQuote = false;
|
|
11459
|
+
let inLineComment = false;
|
|
11460
|
+
let blockCommentDepth = 0;
|
|
11461
|
+
let dollarQuoteTag = null;
|
|
11462
|
+
let lastSignificantTokenWasSemicolon = false;
|
|
11463
|
+
for (let i = 0; i < input.length; i += 1) {
|
|
11464
|
+
const char = input[i];
|
|
11465
|
+
const next = input[i + 1];
|
|
11466
|
+
if (inLineComment) {
|
|
11467
|
+
if (char === "\n") inLineComment = false;
|
|
11468
|
+
continue;
|
|
11469
|
+
}
|
|
11470
|
+
if (blockCommentDepth > 0) {
|
|
11471
|
+
if (char === "/" && next === "*") {
|
|
11472
|
+
blockCommentDepth += 1;
|
|
11473
|
+
i += 1;
|
|
11474
|
+
continue;
|
|
11475
|
+
}
|
|
11476
|
+
if (char === "*" && next === "/") {
|
|
11477
|
+
blockCommentDepth -= 1;
|
|
11478
|
+
i += 1;
|
|
11479
|
+
}
|
|
11480
|
+
continue;
|
|
11481
|
+
}
|
|
11482
|
+
if (dollarQuoteTag != null) {
|
|
11483
|
+
if (input.startsWith(dollarQuoteTag, i)) {
|
|
11484
|
+
i += dollarQuoteTag.length - 1;
|
|
11485
|
+
dollarQuoteTag = null;
|
|
11486
|
+
}
|
|
11487
|
+
continue;
|
|
11488
|
+
}
|
|
11489
|
+
if (inSingleQuote) {
|
|
11490
|
+
if (char === "'" && next === "'") {
|
|
11491
|
+
i += 1;
|
|
11492
|
+
continue;
|
|
11493
|
+
}
|
|
11494
|
+
if (char === "'") inSingleQuote = false;
|
|
11495
|
+
continue;
|
|
11496
|
+
}
|
|
11497
|
+
if (inDoubleQuote) {
|
|
11498
|
+
if (char === "\"" && next === "\"") {
|
|
11499
|
+
i += 1;
|
|
11500
|
+
continue;
|
|
11501
|
+
}
|
|
11502
|
+
if (char === "\"") inDoubleQuote = false;
|
|
11503
|
+
continue;
|
|
11504
|
+
}
|
|
11505
|
+
if (char === "-" && next === "-") {
|
|
11506
|
+
inLineComment = true;
|
|
11507
|
+
i += 1;
|
|
11508
|
+
continue;
|
|
11509
|
+
}
|
|
11510
|
+
if (char === "/" && next === "*") {
|
|
11511
|
+
blockCommentDepth = 1;
|
|
11512
|
+
i += 1;
|
|
11513
|
+
continue;
|
|
11514
|
+
}
|
|
11515
|
+
if (char === "'") {
|
|
11516
|
+
lastSignificantTokenWasSemicolon = false;
|
|
11517
|
+
inSingleQuote = true;
|
|
11518
|
+
continue;
|
|
11519
|
+
}
|
|
11520
|
+
if (char === "\"") {
|
|
11521
|
+
lastSignificantTokenWasSemicolon = false;
|
|
11522
|
+
inDoubleQuote = true;
|
|
11523
|
+
continue;
|
|
11524
|
+
}
|
|
11525
|
+
if (char === "$") {
|
|
11526
|
+
const rest = input.slice(i);
|
|
11527
|
+
const match = rest.match(/^\$[A-Za-z_][A-Za-z0-9_]*\$/) ?? rest.match(/^\$\$/);
|
|
11528
|
+
if (match != null) {
|
|
11529
|
+
lastSignificantTokenWasSemicolon = false;
|
|
11530
|
+
dollarQuoteTag = match[0];
|
|
11531
|
+
i += match[0].length - 1;
|
|
11532
|
+
continue;
|
|
11533
|
+
}
|
|
11534
|
+
}
|
|
11535
|
+
if (char === ";") {
|
|
11536
|
+
lastSignificantTokenWasSemicolon = true;
|
|
11537
|
+
continue;
|
|
11538
|
+
}
|
|
11539
|
+
if (!/\s/.test(char)) lastSignificantTokenWasSemicolon = false;
|
|
11540
|
+
}
|
|
11541
|
+
return lastSignificantTokenWasSemicolon && !inSingleQuote && !inDoubleQuote && blockCommentDepth === 0 && dollarQuoteTag == null;
|
|
11542
|
+
}
|
|
11543
|
+
|
|
11544
|
+
//#endregion
|
|
11545
|
+
//#region src/cli/query/sql-type-extractor.ts
|
|
11546
|
+
/**
|
|
11547
|
+
* Extract TailorDB type names from SQL query.
|
|
11548
|
+
* @param query - SQL query
|
|
11549
|
+
* @returns Type names referenced by query
|
|
11550
|
+
*/
|
|
11551
|
+
function extractTypeNamesFromSql(query$1) {
|
|
11552
|
+
let statements;
|
|
11553
|
+
try {
|
|
11554
|
+
statements = parse(query$1);
|
|
11555
|
+
} catch (error) {
|
|
11556
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
11557
|
+
throw new Error(`SQL parse error: ${message}\nIf your table name is a reserved keyword (e.g. User), wrap it in double quotes: SELECT * FROM "User"`);
|
|
11558
|
+
}
|
|
11559
|
+
const typeNames = /* @__PURE__ */ new Set();
|
|
11560
|
+
const visitor = astVisitor((mapper) => ({ tableRef: (tableRef) => {
|
|
11561
|
+
typeNames.add(tableRef.name);
|
|
11562
|
+
mapper.super().tableRef(tableRef);
|
|
11563
|
+
return tableRef;
|
|
11564
|
+
} }));
|
|
11565
|
+
for (const statement of statements) visitor.statement(statement);
|
|
11566
|
+
return [...typeNames];
|
|
11567
|
+
}
|
|
11568
|
+
function collectAliasMap(fromClauses) {
|
|
11569
|
+
const aliasMap = /* @__PURE__ */ new Map();
|
|
11570
|
+
for (const from of fromClauses) if (from.type === "table") {
|
|
11571
|
+
const tableName = from.name.name;
|
|
11572
|
+
const alias = from.name.alias ?? tableName;
|
|
11573
|
+
aliasMap.set(alias, tableName);
|
|
11574
|
+
}
|
|
11575
|
+
return aliasMap;
|
|
11576
|
+
}
|
|
11577
|
+
/**
|
|
11578
|
+
* Extract the column template from a SQL query's SELECT clause.
|
|
11579
|
+
* Returns an ordered list of column slots representing explicit columns
|
|
11580
|
+
* and wildcard expansions with their resolved type names.
|
|
11581
|
+
*
|
|
11582
|
+
* Only inspects the top-level SELECT statement, not subqueries.
|
|
11583
|
+
* TailorDB's sqlaccess does not currently support subqueries in FROM clauses,
|
|
11584
|
+
* but we intentionally avoid recursing into nested SELECTs to prevent
|
|
11585
|
+
* false positives if the parser accepts such queries.
|
|
11586
|
+
* @param query - SQL query
|
|
11587
|
+
* @returns Column slots if wildcards are present, null otherwise
|
|
11588
|
+
*/
|
|
11589
|
+
function extractColumnTemplate(query$1) {
|
|
11590
|
+
try {
|
|
11591
|
+
const statements = parse(query$1);
|
|
11592
|
+
for (const statement of statements) {
|
|
11593
|
+
if (statement.type !== "select" || !statement.columns) continue;
|
|
11594
|
+
const aliasMap = collectAliasMap(statement.from ?? []);
|
|
11595
|
+
const slots = [];
|
|
11596
|
+
let hasWildcard = false;
|
|
11597
|
+
for (const column of statement.columns) if (column.expr.type === "ref" && column.expr.name === "*") {
|
|
11598
|
+
hasWildcard = true;
|
|
11599
|
+
if (column.expr.table) {
|
|
11600
|
+
const typeName = aliasMap.get(column.expr.table.name);
|
|
11601
|
+
slots.push({
|
|
11602
|
+
type: "wildcard",
|
|
11603
|
+
typeNames: typeName ? [typeName] : []
|
|
11604
|
+
});
|
|
11605
|
+
} else slots.push({
|
|
11606
|
+
type: "wildcard",
|
|
11607
|
+
typeNames: [...new Set(aliasMap.values())]
|
|
11608
|
+
});
|
|
11609
|
+
} else {
|
|
11610
|
+
const name = column.alias?.name ?? (column.expr.type === "ref" ? column.expr.name : null);
|
|
11611
|
+
if (name) slots.push({
|
|
11612
|
+
type: "explicit",
|
|
11613
|
+
name
|
|
11614
|
+
});
|
|
11615
|
+
}
|
|
11616
|
+
return hasWildcard ? slots : null;
|
|
11617
|
+
}
|
|
11618
|
+
return null;
|
|
11619
|
+
} catch {
|
|
11620
|
+
return null;
|
|
11621
|
+
}
|
|
11622
|
+
}
|
|
11623
|
+
|
|
11624
|
+
//#endregion
|
|
11625
|
+
//#region src/cli/query/type-field-order.ts
|
|
11626
|
+
/**
|
|
11627
|
+
* Load field definition order for all TailorDB types in a namespace.
|
|
11628
|
+
* @param config - Loaded application configuration
|
|
11629
|
+
* @param namespace - TailorDB namespace name
|
|
11630
|
+
* @returns Map of type name to field names in definition order
|
|
11631
|
+
*/
|
|
11632
|
+
async function loadTypeFieldOrder(config, namespace) {
|
|
11633
|
+
const fieldOrder = /* @__PURE__ */ new Map();
|
|
11634
|
+
const dbConfig = config.db?.[namespace];
|
|
11635
|
+
if (!dbConfig || !("files" in dbConfig) || dbConfig.files.length === 0) return fieldOrder;
|
|
11636
|
+
const typeFiles = loadFilesWithIgnores(dbConfig);
|
|
11637
|
+
await Promise.all(typeFiles.map(async (typeFile) => {
|
|
11638
|
+
try {
|
|
11639
|
+
const module = await import(pathToFileURL(typeFile).href);
|
|
11640
|
+
for (const exportedValue of Object.values(module)) {
|
|
11641
|
+
const result = TailorDBTypeSchema.safeParse(exportedValue);
|
|
11642
|
+
if (!result.success) continue;
|
|
11643
|
+
fieldOrder.set(result.data.name, Object.keys(result.data.fields));
|
|
11644
|
+
}
|
|
11645
|
+
} catch {}
|
|
11646
|
+
}));
|
|
11647
|
+
return fieldOrder;
|
|
11648
|
+
}
|
|
11649
|
+
|
|
11650
|
+
//#endregion
|
|
11651
|
+
//#region src/cli/query/index.ts
|
|
11652
|
+
const queryEngineSchema = z.enum(["sql", "gql"]);
|
|
11653
|
+
const queryBaseOptionsSchema = z.object({
|
|
11654
|
+
workspaceId: z.string().optional(),
|
|
11655
|
+
profile: z.string().optional(),
|
|
11656
|
+
configPath: z.string().optional(),
|
|
11657
|
+
engine: queryEngineSchema,
|
|
11658
|
+
machineUser: z.string()
|
|
11659
|
+
});
|
|
11660
|
+
const queryOptionsSchema = queryBaseOptionsSchema.extend({ query: z.string() });
|
|
11661
|
+
async function getNamespaceFromSqlQuery(workspaceId, query$1, client, namespaces) {
|
|
11662
|
+
if (namespaces.length === 0) throw new Error("No namespaces found in configuration.");
|
|
11663
|
+
if (namespaces.length === 1) return namespaces[0];
|
|
11664
|
+
const typeNames = extractTypeNamesFromSql(query$1);
|
|
11665
|
+
if (typeNames.length === 0) throw new Error(`Could not infer namespace from query. Detected namespaces: ${namespaces.join(", ")}.`);
|
|
11666
|
+
const typeNamespaceMap = await resolveTypeNamespaces({
|
|
11667
|
+
workspaceId,
|
|
11668
|
+
namespaces,
|
|
11669
|
+
typeNames,
|
|
11670
|
+
client
|
|
11671
|
+
});
|
|
11672
|
+
const notFoundTypes = typeNames.filter((typeName) => !typeNamespaceMap.has(typeName));
|
|
11673
|
+
if (notFoundTypes.length > 0) throw new Error(`Could not find namespace for types in query: ${notFoundTypes.join(", ")}.`);
|
|
11674
|
+
const namespacesFromTypes = new Set(typeNamespaceMap.values());
|
|
11675
|
+
if (namespacesFromTypes.size === 1) return [...namespacesFromTypes][0];
|
|
11676
|
+
throw new Error(`Query references types from multiple namespaces: ${[...namespacesFromTypes].join(", ")}.`);
|
|
11677
|
+
}
|
|
11678
|
+
async function loadOptions(options) {
|
|
11679
|
+
const result = queryBaseOptionsSchema.safeParse(options);
|
|
11680
|
+
if (!result.success) throw new Error(result.error.issues[0].message);
|
|
11681
|
+
const client = await initOperatorClient(await loadAccessToken({
|
|
11682
|
+
useProfile: true,
|
|
11683
|
+
profile: result.data.profile
|
|
11684
|
+
}));
|
|
11685
|
+
const workspaceId = loadWorkspaceId({
|
|
11686
|
+
workspaceId: result.data.workspaceId,
|
|
11687
|
+
profile: result.data.profile
|
|
11688
|
+
});
|
|
11689
|
+
const { config } = await loadConfig(options.configPath);
|
|
11690
|
+
const namespaces = extractAllNamespaces(config);
|
|
11691
|
+
const { application } = await client.getApplication({
|
|
11692
|
+
workspaceId,
|
|
11693
|
+
applicationName: config.name
|
|
11694
|
+
});
|
|
11695
|
+
if (!application?.authNamespace) throw new Error(`Application ${config.name} does not have an auth configuration.`);
|
|
11696
|
+
const { machineUser: machineUserResource } = await client.getAuthMachineUser({
|
|
11697
|
+
workspaceId,
|
|
11698
|
+
authNamespace: application.authNamespace,
|
|
11699
|
+
name: result.data.machineUser
|
|
11700
|
+
});
|
|
11701
|
+
if (!machineUserResource) throw new Error(`Machine user ${result.data.machineUser} not found.`);
|
|
11702
|
+
return {
|
|
11703
|
+
engine: result.data.engine,
|
|
11704
|
+
client,
|
|
11705
|
+
workspaceId,
|
|
11706
|
+
config,
|
|
11707
|
+
application,
|
|
11708
|
+
machineUserResource,
|
|
11709
|
+
namespaces
|
|
11710
|
+
};
|
|
11711
|
+
}
|
|
11712
|
+
async function sqlQuery(client, invoker, args) {
|
|
11713
|
+
const queries = splitSqlStatements(args.query);
|
|
11714
|
+
const executed = await executeScript({
|
|
11715
|
+
client,
|
|
11716
|
+
workspaceId: args.workspaceId,
|
|
11717
|
+
name: `query-sql-${args.namespace}.js`,
|
|
11718
|
+
code: args.bundledCode,
|
|
11719
|
+
arg: JSON.stringify({
|
|
11720
|
+
namespace: args.namespace,
|
|
11721
|
+
queries
|
|
11722
|
+
}),
|
|
11723
|
+
invoker
|
|
11724
|
+
});
|
|
11725
|
+
if (!executed.success) throw new Error(executed.error);
|
|
11726
|
+
return {
|
|
11727
|
+
engine: "sql",
|
|
11728
|
+
namespace: args.namespace,
|
|
11729
|
+
query: args.query,
|
|
11730
|
+
result: parseExecutionResult(executed.result)
|
|
11731
|
+
};
|
|
11732
|
+
}
|
|
11733
|
+
async function gqlQuery(client, invoker, application, machineUser, args) {
|
|
11734
|
+
const { access_token: accessToken } = await fetchMachineUserToken(application.url, machineUser.clientId, machineUser.clientSecret);
|
|
11735
|
+
const executed = await executeScript({
|
|
11736
|
+
client,
|
|
11737
|
+
workspaceId: args.workspaceId,
|
|
11738
|
+
name: `query-gql.js`,
|
|
11739
|
+
code: args.bundledCode,
|
|
11740
|
+
arg: JSON.stringify({
|
|
11741
|
+
endpoint: `${application.url}/query`,
|
|
11742
|
+
accessToken,
|
|
11743
|
+
query: args.query
|
|
11744
|
+
}),
|
|
11745
|
+
invoker
|
|
11746
|
+
});
|
|
11747
|
+
if (!executed.success) throw new Error(executed.error);
|
|
11748
|
+
return {
|
|
11749
|
+
engine: "gql",
|
|
11750
|
+
query: args.query,
|
|
11751
|
+
result: parseExecutionResult(executed.result)
|
|
11752
|
+
};
|
|
11753
|
+
}
|
|
11754
|
+
function parseExecutionResult(result) {
|
|
11755
|
+
if (!result) return null;
|
|
11756
|
+
try {
|
|
11757
|
+
return JSON.parse(result);
|
|
11758
|
+
} catch {
|
|
11759
|
+
return result;
|
|
11760
|
+
}
|
|
11761
|
+
}
|
|
11762
|
+
/**
|
|
11763
|
+
* Resolve query input mode from CLI args.
|
|
11764
|
+
* @param args - Query input flags
|
|
11765
|
+
* @param args.query - Direct query string
|
|
11766
|
+
* @returns Normalized input mode
|
|
11767
|
+
*/
|
|
11768
|
+
function resolveQueryCommandInput(args) {
|
|
11769
|
+
if (args.query != null) return { query: args.query };
|
|
11770
|
+
return { query: void 0 };
|
|
11771
|
+
}
|
|
11772
|
+
/**
|
|
11773
|
+
* Dispatch query execution.
|
|
11774
|
+
* @param options - Query command options
|
|
11775
|
+
* @returns Dispatch result
|
|
11776
|
+
*/
|
|
11777
|
+
async function query(options) {
|
|
11778
|
+
const result = queryOptionsSchema.safeParse(options);
|
|
11779
|
+
if (!result.success) throw new Error(result.error.issues[0].message);
|
|
11780
|
+
return await (await prepareQueryExecutor(result.data))(result.data.query);
|
|
11781
|
+
}
|
|
11782
|
+
async function prepareQueryExecutor(options) {
|
|
11783
|
+
const { client, workspaceId, config, application, machineUserResource, engine, namespaces } = await loadOptions(options);
|
|
11784
|
+
const bundledCode = await bundleQueryScript(engine);
|
|
11785
|
+
const invoker = create(AuthInvokerSchema, {
|
|
11786
|
+
namespace: application.authNamespace,
|
|
11787
|
+
machineUserName: machineUserResource.name
|
|
11788
|
+
});
|
|
11789
|
+
return async (queryString) => {
|
|
11790
|
+
let namespace;
|
|
11791
|
+
try {
|
|
11792
|
+
switch (engine) {
|
|
11793
|
+
case "sql":
|
|
11794
|
+
namespace = await getNamespaceFromSqlQuery(workspaceId, queryString, client, namespaces);
|
|
11795
|
+
return reorderSqlColumns(await sqlQuery(client, invoker, {
|
|
11796
|
+
workspaceId,
|
|
11797
|
+
namespace,
|
|
11798
|
+
bundledCode,
|
|
11799
|
+
query: queryString
|
|
11800
|
+
}), config, namespace, queryString);
|
|
11801
|
+
case "gql": return await gqlQuery(client, invoker, application, machineUserResource, {
|
|
11802
|
+
workspaceId,
|
|
11803
|
+
bundledCode,
|
|
11804
|
+
query: queryString
|
|
11805
|
+
});
|
|
11806
|
+
default: throw new Error(`Unsupported query engine: ${engine}`);
|
|
11807
|
+
}
|
|
11808
|
+
} catch (error) {
|
|
11809
|
+
throw mapQueryExecutionError({
|
|
11810
|
+
error,
|
|
11811
|
+
engine,
|
|
11812
|
+
namespace,
|
|
11813
|
+
machineUser: options.machineUser
|
|
11814
|
+
});
|
|
11815
|
+
}
|
|
11816
|
+
};
|
|
11817
|
+
}
|
|
11818
|
+
function isReadlineTerminationError(error) {
|
|
11819
|
+
if (!(error instanceof Error) || !("code" in error)) return false;
|
|
11820
|
+
return error.code === "ABORT_ERR" || error.code === "ERR_USE_AFTER_CLOSE";
|
|
11821
|
+
}
|
|
11822
|
+
/**
|
|
11823
|
+
* Resolve a backslash REPL command into its normalized action.
|
|
11824
|
+
* @param input - Raw user input
|
|
11825
|
+
* @returns Normalized REPL command, or null for non-command input
|
|
11826
|
+
*/
|
|
11827
|
+
function resolveReplCommand(input) {
|
|
11828
|
+
const trimmed = input.trim();
|
|
11829
|
+
if (!trimmed.startsWith("\\")) return null;
|
|
11830
|
+
if (trimmed === "\\q" || trimmed === "\\quit") return "quit";
|
|
11831
|
+
if (trimmed === "\\help" || trimmed === "\\h" || trimmed === "\\?") return "help";
|
|
11832
|
+
if (trimmed === "\\clear" || trimmed === "\\c") return "clear";
|
|
11833
|
+
return "unknown";
|
|
11834
|
+
}
|
|
11835
|
+
/**
|
|
11836
|
+
* Decide how REPL should react to Ctrl+C based on current buffered input.
|
|
11837
|
+
* @param bufferedLines - Previously accepted lines in the current statement buffer
|
|
11838
|
+
* @param currentLine - In-progress line currently being edited
|
|
11839
|
+
* @returns Whether to clear the buffer or exit the REPL
|
|
11840
|
+
*/
|
|
11841
|
+
function resolveReplInterruptAction(bufferedLines, currentLine) {
|
|
11842
|
+
if (bufferedLines.length === 0 && currentLine.length === 0) return "exit";
|
|
11843
|
+
return "clear";
|
|
11844
|
+
}
|
|
11845
|
+
/**
|
|
11846
|
+
* Clear the interactive terminal screen and move the cursor to the top-left.
|
|
11847
|
+
*/
|
|
11848
|
+
function clearReplScreen() {
|
|
11849
|
+
process.stdout.write("\x1Bc");
|
|
11850
|
+
}
|
|
11851
|
+
async function runRepl(options) {
|
|
11852
|
+
if (!process.stdin.isTTY || !process.stdout.isTTY) throw new Error("Non-interactive terminals are not supported. Pass -q/--query to run a query.");
|
|
11853
|
+
const execute = await prepareQueryExecutor(options);
|
|
11854
|
+
const rl = createInterface({
|
|
11855
|
+
input: process.stdin,
|
|
11856
|
+
output: process.stdout
|
|
11857
|
+
});
|
|
11858
|
+
logger.info(`Entering ${options.engine.toUpperCase()} REPL mode.`);
|
|
11859
|
+
logger.info("Type \\help for usage, \\q to quit.");
|
|
11860
|
+
const lines = [];
|
|
11861
|
+
try {
|
|
11862
|
+
while (true) {
|
|
11863
|
+
const prompt = lines.length === 0 ? `${options.engine}> ` : " ";
|
|
11864
|
+
let line;
|
|
11865
|
+
let interruptAction = null;
|
|
11866
|
+
const controller = new AbortController();
|
|
11867
|
+
const handleSigint = () => {
|
|
11868
|
+
interruptAction = resolveReplInterruptAction(lines, rl.line);
|
|
11869
|
+
if (interruptAction === "clear") {
|
|
11870
|
+
lines.length = 0;
|
|
11871
|
+
rl.write(null, {
|
|
11872
|
+
ctrl: true,
|
|
11873
|
+
name: "u"
|
|
11874
|
+
});
|
|
11875
|
+
process.stdout.write("\n");
|
|
11876
|
+
} else rl.close();
|
|
11877
|
+
controller.abort();
|
|
11878
|
+
};
|
|
11879
|
+
rl.once("SIGINT", handleSigint);
|
|
11880
|
+
try {
|
|
11881
|
+
line = await rl.question(prompt, { signal: controller.signal });
|
|
11882
|
+
} catch (error) {
|
|
11883
|
+
rl.off("SIGINT", handleSigint);
|
|
11884
|
+
if (controller.signal.aborted) {
|
|
11885
|
+
if (interruptAction === "exit") return;
|
|
11886
|
+
continue;
|
|
11887
|
+
}
|
|
11888
|
+
if (isReadlineTerminationError(error)) return;
|
|
11889
|
+
throw error;
|
|
11890
|
+
} finally {
|
|
11891
|
+
rl.off("SIGINT", handleSigint);
|
|
11892
|
+
}
|
|
11893
|
+
const trimmed = line.trim();
|
|
11894
|
+
if (lines.length === 0 && trimmed === "") continue;
|
|
11895
|
+
if (lines.length === 0) {
|
|
11896
|
+
const command = resolveReplCommand(trimmed);
|
|
11897
|
+
if (command === "quit") return;
|
|
11898
|
+
if (command === "help") {
|
|
11899
|
+
printReplHelp(options.engine);
|
|
11900
|
+
continue;
|
|
11901
|
+
}
|
|
11902
|
+
if (command === "clear") {
|
|
11903
|
+
clearReplScreen();
|
|
11904
|
+
continue;
|
|
11905
|
+
}
|
|
11906
|
+
if (command === "unknown") {
|
|
11907
|
+
logger.warn(`Unknown command: ${trimmed}`);
|
|
11908
|
+
continue;
|
|
11909
|
+
}
|
|
11910
|
+
}
|
|
11911
|
+
lines.push(line);
|
|
11912
|
+
if (options.engine === "sql") {
|
|
11913
|
+
if (!isSqlInputComplete(lines.join("\n"))) continue;
|
|
11914
|
+
} else if (!isGraphQLInputComplete(lines.join("\n"))) continue;
|
|
11915
|
+
const statement = getReplStatement(lines, options.engine);
|
|
11916
|
+
lines.length = 0;
|
|
11917
|
+
if (statement.length === 0) continue;
|
|
11918
|
+
try {
|
|
11919
|
+
if (options.engine === "sql") {
|
|
11920
|
+
const result$1 = await execute(statement);
|
|
11921
|
+
if (result$1.engine !== "sql") throw new Error(`Expected sql engine result but got: ${result$1.engine}`);
|
|
11922
|
+
printSqlResult(result$1, { json: options.json });
|
|
11923
|
+
continue;
|
|
11924
|
+
}
|
|
11925
|
+
const result = await execute(statement);
|
|
11926
|
+
if (result.engine !== "gql") throw new Error(`Expected gql engine result but got: ${result.engine}`);
|
|
11927
|
+
printGqlResult(result, { json: options.json });
|
|
11928
|
+
} catch (error) {
|
|
11929
|
+
if (isCLIError(error)) {
|
|
11930
|
+
logger.log(error.format());
|
|
11931
|
+
continue;
|
|
11932
|
+
}
|
|
11933
|
+
if (error instanceof Error) {
|
|
11934
|
+
logger.error(error.message);
|
|
11935
|
+
continue;
|
|
11936
|
+
}
|
|
11937
|
+
logger.error(String(error));
|
|
11938
|
+
}
|
|
11939
|
+
}
|
|
11940
|
+
} finally {
|
|
11941
|
+
rl.close();
|
|
11942
|
+
}
|
|
11943
|
+
}
|
|
11944
|
+
function getReplStatement(lines, engine) {
|
|
11945
|
+
if (engine === "sql") return lines.join("\n").trim();
|
|
11946
|
+
let end = lines.length;
|
|
11947
|
+
while (end > 0 && lines[end - 1].trim() === "") end -= 1;
|
|
11948
|
+
return lines.slice(0, end).join("\n").trim();
|
|
11949
|
+
}
|
|
11950
|
+
function printReplHelp(engine) {
|
|
11951
|
+
logger.log("REPL commands:");
|
|
11952
|
+
logger.log(" \\help, \\h, \\? Show this help");
|
|
11953
|
+
logger.log(" Ctrl+C Clear current input");
|
|
11954
|
+
logger.log(" \\q, \\quit, Ctrl+D Exit REPL");
|
|
11955
|
+
logger.log(" \\clear, \\c Clear the screen");
|
|
11956
|
+
if (engine === "sql") {
|
|
11957
|
+
logger.log("SQL execution: statement ending with ';' runs immediately.");
|
|
11958
|
+
return;
|
|
11959
|
+
}
|
|
11960
|
+
logger.log("GraphQL execution: a complete GraphQL document runs immediately.");
|
|
11961
|
+
}
|
|
11962
|
+
/**
|
|
11963
|
+
* Execute SQL query directly.
|
|
11964
|
+
* @param options - Shared query options
|
|
11965
|
+
* @returns SQL query result
|
|
11966
|
+
*/
|
|
11967
|
+
async function querySql(options) {
|
|
11968
|
+
const result = await query({
|
|
11969
|
+
...options,
|
|
11970
|
+
engine: "sql"
|
|
11971
|
+
});
|
|
11972
|
+
if (result.engine !== "sql") throw new Error(`Expected sql engine result but got: ${result.engine}`);
|
|
11973
|
+
return result;
|
|
11974
|
+
}
|
|
11975
|
+
/**
|
|
11976
|
+
* Execute GraphQL query directly.
|
|
11977
|
+
* @param options - Shared query options
|
|
11978
|
+
* @returns GraphQL query result
|
|
11979
|
+
*/
|
|
11980
|
+
async function queryGql(options) {
|
|
11981
|
+
const result = await query({
|
|
11982
|
+
...options,
|
|
11983
|
+
engine: "gql"
|
|
11984
|
+
});
|
|
11985
|
+
if (result.engine !== "gql") throw new Error(`Expected gql engine result but got: ${result.engine}`);
|
|
11986
|
+
return result;
|
|
11987
|
+
}
|
|
11988
|
+
async function reorderSqlColumns(result, config, namespace, sqlQuery$1) {
|
|
11989
|
+
if (!isSQLExecutionResult(result.result) || result.result.rows.length === 0) return result;
|
|
11990
|
+
const template = extractColumnTemplate(sqlQuery$1);
|
|
11991
|
+
if (!template) return result;
|
|
11992
|
+
try {
|
|
11993
|
+
const expectedOrder = buildExpectedColumnOrder(template, await loadTypeFieldOrder(config, namespace));
|
|
11994
|
+
if (expectedOrder.length === 0) return result;
|
|
11995
|
+
const orderedRows = result.result.rows.map((row) => reorderRowByTemplate(row, expectedOrder));
|
|
11996
|
+
return {
|
|
11997
|
+
...result,
|
|
11998
|
+
result: {
|
|
11999
|
+
...result.result,
|
|
12000
|
+
rows: orderedRows
|
|
12001
|
+
}
|
|
12002
|
+
};
|
|
12003
|
+
} catch {
|
|
12004
|
+
return result;
|
|
12005
|
+
}
|
|
12006
|
+
}
|
|
12007
|
+
const SYSTEM_FIELD_ORDER = ["id"];
|
|
12008
|
+
function buildExpectedColumnOrder(template, fieldOrder) {
|
|
12009
|
+
const order = [];
|
|
12010
|
+
for (const slot of template) if (slot.type === "explicit") order.push(slot.name);
|
|
12011
|
+
else for (const typeName of slot.typeNames) {
|
|
12012
|
+
order.push(...SYSTEM_FIELD_ORDER);
|
|
12013
|
+
order.push(...fieldOrder.get(typeName) ?? []);
|
|
12014
|
+
}
|
|
12015
|
+
return order;
|
|
12016
|
+
}
|
|
12017
|
+
function reorderRowByTemplate(row, expectedOrder) {
|
|
12018
|
+
const ordered = {};
|
|
12019
|
+
const rowKeys = new Set(Object.keys(row));
|
|
12020
|
+
const lowerToOriginal = /* @__PURE__ */ new Map();
|
|
12021
|
+
for (const key of rowKeys) lowerToOriginal.set(key.toLowerCase(), key);
|
|
12022
|
+
for (const key of expectedOrder) {
|
|
12023
|
+
const original = lowerToOriginal.get(key.toLowerCase());
|
|
12024
|
+
if (original != null && rowKeys.has(original)) {
|
|
12025
|
+
ordered[original] = row[original];
|
|
12026
|
+
rowKeys.delete(original);
|
|
12027
|
+
lowerToOriginal.delete(key.toLowerCase());
|
|
12028
|
+
}
|
|
12029
|
+
}
|
|
12030
|
+
for (const key of rowKeys) ordered[key] = row[key];
|
|
12031
|
+
return ordered;
|
|
12032
|
+
}
|
|
12033
|
+
const queryCommand = defineCommand({
|
|
12034
|
+
name: "query",
|
|
12035
|
+
description: "Run SQL/GraphQL query.",
|
|
12036
|
+
args: z.object({
|
|
12037
|
+
...commonArgs,
|
|
12038
|
+
...jsonArgs,
|
|
12039
|
+
...deploymentArgs,
|
|
12040
|
+
engine: arg(queryEngineSchema, { description: "Query engine (sql or gql)" }),
|
|
12041
|
+
query: arg(z.string().optional(), {
|
|
12042
|
+
alias: "q",
|
|
12043
|
+
description: "Query string to execute directly; omit to start REPL mode"
|
|
12044
|
+
}),
|
|
12045
|
+
machineuser: arg(z.string(), {
|
|
12046
|
+
alias: "m",
|
|
12047
|
+
description: "Machine user name for query execution"
|
|
12048
|
+
})
|
|
12049
|
+
}).strict(),
|
|
12050
|
+
run: withCommonArgs(async (args) => {
|
|
12051
|
+
const mode = resolveQueryCommandInput({ query: args.query });
|
|
12052
|
+
const sharedOptions = {
|
|
12053
|
+
workspaceId: args["workspace-id"],
|
|
12054
|
+
profile: args.profile,
|
|
12055
|
+
configPath: args.config,
|
|
12056
|
+
engine: args.engine,
|
|
12057
|
+
machineUser: args.machineuser
|
|
12058
|
+
};
|
|
12059
|
+
if (mode.query == null) {
|
|
12060
|
+
await runRepl({
|
|
12061
|
+
...sharedOptions,
|
|
12062
|
+
json: args.json
|
|
12063
|
+
});
|
|
12064
|
+
return;
|
|
12065
|
+
}
|
|
12066
|
+
const directQuery = mode.query;
|
|
12067
|
+
if (args.engine === "sql") {
|
|
12068
|
+
printSqlResult(await querySql({
|
|
12069
|
+
...sharedOptions,
|
|
12070
|
+
query: directQuery
|
|
12071
|
+
}), { json: args.json });
|
|
12072
|
+
return;
|
|
12073
|
+
}
|
|
12074
|
+
printGqlResult(await queryGql({
|
|
12075
|
+
...sharedOptions,
|
|
12076
|
+
query: directQuery
|
|
12077
|
+
}), { json: args.json });
|
|
12078
|
+
})
|
|
12079
|
+
});
|
|
12080
|
+
function isSQLExecutionResult(value) {
|
|
12081
|
+
if (!value || typeof value !== "object") return false;
|
|
12082
|
+
const candidate = value;
|
|
12083
|
+
return Array.isArray(candidate.rows) && typeof candidate.rowCount === "number";
|
|
12084
|
+
}
|
|
12085
|
+
function printSingleSqlResult(execResult, options = {}) {
|
|
12086
|
+
if (execResult.rows.length === 0) {
|
|
12087
|
+
if (options.json) {
|
|
12088
|
+
logger.out({
|
|
12089
|
+
results: [],
|
|
12090
|
+
rowCount: 0
|
|
12091
|
+
});
|
|
12092
|
+
return;
|
|
12093
|
+
}
|
|
12094
|
+
logger.info("No rows returned.");
|
|
12095
|
+
return;
|
|
12096
|
+
}
|
|
12097
|
+
if (options.json) {
|
|
12098
|
+
logger.out({
|
|
12099
|
+
results: execResult.rows,
|
|
12100
|
+
rowCount: execResult.rowCount
|
|
12101
|
+
});
|
|
12102
|
+
return;
|
|
12103
|
+
}
|
|
12104
|
+
logger.out(execResult.rows, { showNull: true });
|
|
12105
|
+
logger.out(`rows: ${execResult.rowCount}`);
|
|
12106
|
+
}
|
|
12107
|
+
function splitSqlStatements(query$1) {
|
|
12108
|
+
try {
|
|
12109
|
+
const statements = parse(query$1);
|
|
12110
|
+
if (statements.length === 0) return [];
|
|
12111
|
+
return statements.map((s) => toSql.statement(s));
|
|
12112
|
+
} catch {
|
|
12113
|
+
const trimmed = query$1.trim();
|
|
12114
|
+
return trimmed.length > 0 ? [trimmed] : [];
|
|
12115
|
+
}
|
|
12116
|
+
}
|
|
12117
|
+
function isSQLExecutionResultArray(value) {
|
|
12118
|
+
return Array.isArray(value) && value.length > 0 && value.every(isSQLExecutionResult);
|
|
12119
|
+
}
|
|
12120
|
+
function printSqlResult(result, options = {}) {
|
|
12121
|
+
if (isSQLExecutionResultArray(result.result)) {
|
|
12122
|
+
if (options.json) {
|
|
12123
|
+
logger.out(result.result.map((r) => ({
|
|
12124
|
+
results: r.rows,
|
|
12125
|
+
rowCount: r.rowCount
|
|
12126
|
+
})));
|
|
12127
|
+
return;
|
|
12128
|
+
}
|
|
12129
|
+
const queries = splitSqlStatements(result.query);
|
|
12130
|
+
for (let i = 0; i < result.result.length; i++) {
|
|
12131
|
+
if (i > 0) logger.log("");
|
|
12132
|
+
logger.info(queries[i] ?? `Statement ${i + 1}`);
|
|
12133
|
+
printSingleSqlResult(result.result[i], options);
|
|
12134
|
+
}
|
|
12135
|
+
return;
|
|
12136
|
+
}
|
|
12137
|
+
if (isSQLExecutionResult(result.result)) {
|
|
12138
|
+
printSingleSqlResult(result.result, options);
|
|
12139
|
+
return;
|
|
12140
|
+
}
|
|
12141
|
+
logger.out({
|
|
12142
|
+
engine: result.engine,
|
|
12143
|
+
query: result.query,
|
|
12144
|
+
result: result.result
|
|
12145
|
+
});
|
|
12146
|
+
}
|
|
12147
|
+
function printGqlResult(result, options = {}) {
|
|
12148
|
+
if (options.json) {
|
|
12149
|
+
logger.out({ result: result.result });
|
|
12150
|
+
return;
|
|
12151
|
+
}
|
|
12152
|
+
logger.out(JSON.stringify(result.result, null, 2));
|
|
12153
|
+
}
|
|
12154
|
+
|
|
12155
|
+
//#endregion
|
|
12156
|
+
export { getExecutorJob as $, truncateCommand as A, getMigrationDirPath as At, getCommand$1 as B, getNamespacesWithMigrations as Bt, getAppHealth as C, MIGRATE_FILE_NAME as Ct, listCommand$3 as D, createSnapshotFromLocalTypes as Dt, resumeWorkflow as E, compareSnapshots as Et, showCommand as F, loadDiff as Ft, listMachineUsers as G, commonArgs as Gt, getMachineUserToken as H, generateUserTypes as Ht, remove as I, reconstructSnapshotFromMigrations as It, webhookCommand as J, jsonArgs as Jt, generate$1 as K, confirmationArgs as Kt, removeCommand$1 as L, formatDiffSummary as Lt, generateCommand as M, getMigrationFiles as Mt, logBetaWarning as N, getNextMigrationNumber as Nt, listWorkflows as O, formatMigrationNumber as Ot, show as P, isValidMigrationNumber as Pt, listExecutors as Q, listCommand$4 as R, formatMigrationDiff as Rt, listCommand$2 as S, INITIAL_SCHEMA_NUMBER as St, resumeCommand as T, compareLocalTypesWithSnapshot as Tt, tokenCommand as U, apiCall as Ut, getOAuth2Client as V, trnPrefix as Vt, listCommand$5 as W, apiCommand as Wt, triggerExecutor as X, workspaceArgs as Xt, triggerCommand as Y, withCommonArgs as Yt, listCommand$6 as Z, deleteCommand as _, MIGRATION_LABEL_KEY as _t, removeCommand as a, getCommand$2 as at, createWorkspace as b, DB_TYPES_FILE_NAME as bt, listUsers as c, getWorkflowExecution as ct, restoreCommand as d, formatKeyValueTable as dt, jobsCommand as et, restoreWorkspace as f, getCommand$3 as ft, getWorkspace as g, waitForExecution$1 as gt, getCommand as h, executeScript as ht, updateUser as i, startWorkflow as it, generate as j, getMigrationFilePath as jt, truncate as k, getLatestMigrationNumber as kt, inviteCommand as l, listWorkflowExecutions as lt, listWorkspaces as m, apply as mt, queryCommand as n, watchExecutorJob as nt, removeUser as o, getWorkflow as ot, listCommand$1 as p, getExecutor as pt, listWebhookExecutors as q, deploymentArgs as qt, updateCommand as r, startCommand as rt, listCommand as s, executionsCommand as st, query as t, listExecutorJobs as tt, inviteUser as u, functionExecutionStatusToString as ut, deleteWorkspace as v, parseMigrationLabelNumber as vt, healthCommand as w, SCHEMA_FILE_NAME as wt, listApps as x, DIFF_FILE_NAME as xt, createCommand as y, bundleMigrationScript as yt, listOAuth2Clients as z, hasChanges as zt };
|
|
12157
|
+
//# sourceMappingURL=query-BLQBOaAM.mjs.map
|