@tailor-platform/sdk 1.25.4 → 1.26.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +20 -0
- package/dist/{application-DegTCDd8.mjs → application-CxH6Yp54.mjs} +1 -1
- package/dist/{application-91Th6tm6.mjs → application-D9xahQRQ.mjs} +2066 -1968
- package/dist/application-D9xahQRQ.mjs.map +1 -0
- package/dist/cli/index.mjs +152 -3
- package/dist/cli/index.mjs.map +1 -1
- package/dist/cli/lib.d.mts +350 -8
- package/dist/cli/lib.mjs +2 -2
- package/dist/configure/index.d.mts +5 -5
- package/dist/configure/index.mjs.map +1 -1
- package/dist/{env-uBeVwE9B.d.mts → env-CSsVESbH.d.mts} +2 -2
- package/dist/{index-Bu12qy3m.d.mts → index-BJg0DTbR.d.mts} +4 -4
- package/dist/{index-CT53egux.d.mts → index-BKy-OC5C.d.mts} +2 -2
- package/dist/{index-cZilKprY.d.mts → index-BtYPY8ya.d.mts} +2 -2
- package/dist/{index-BD-K97-C.d.mts → index-DgRShBpu.d.mts} +2 -2
- package/dist/{index-D1J5SfyK.d.mts → index-DkJbItB-.d.mts} +2 -2
- package/dist/plugin/builtin/enum-constants/index.d.mts +1 -1
- package/dist/plugin/builtin/file-utils/index.d.mts +1 -1
- package/dist/plugin/builtin/kysely-type/index.d.mts +1 -1
- package/dist/plugin/builtin/seed/index.d.mts +1 -1
- package/dist/plugin/index.d.mts +2 -2
- package/dist/{plugin-zY5wvV82.d.mts → plugin-B1hNwcCC.d.mts} +15 -3
- package/dist/{query-kb_4EQp4.mjs → query-B8ml6ClT.mjs} +454 -358
- package/dist/query-B8ml6ClT.mjs.map +1 -0
- package/dist/utils/test/index.d.mts +2 -2
- package/dist/{workflow.generated-v1LXRuB6.d.mts → workflow.generated-Bm4b8hEk.d.mts} +2 -2
- package/docs/cli/setup.md +82 -0
- package/docs/cli-reference.md +8 -0
- package/docs/services/auth.md +33 -0
- package/package.json +4 -4
- package/dist/application-91Th6tm6.mjs.map +0 -1
- package/dist/query-kb_4EQp4.mjs.map +0 -1
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { t as db } from "./schema-BePzTFBV.mjs";
|
|
2
|
-
import { $ as
|
|
2
|
+
import { $ as AuthSCIMAttribute_Type, A as userAgent, B as PipelineResolver_OperationType, C as fetchAll, D as initOperatorClient, F as TailorDBGQLPermission_Operator, G as ExecutorTargetType, H as FunctionExecution_Status, I as TailorDBGQLPermission_Permit, J as AuthIDPConfig_AuthType, K as ExecutorTriggerType, L as TailorDBType_Permission_Operator, M as WorkflowExecution_Status, N as WorkflowJobExecution_Status, O as platformBaseUrl, P as TailorDBGQLPermission_Action, Q as AuthSCIMAttribute_Mutability, R as TailorDBType_Permission_Permit, S as writePlatformConfig, V as IdPLang, W as ExecutorJobStatus, X as AuthOAuth2Client_ClientType, Y as AuthInvokerSchema, Z as AuthOAuth2Client_GrantType, _ as hashFile, a as loadConfig, at as GetApplicationSchemaHealthResponse_ApplicationSchemaHealthStatus, b as loadWorkspaceId, ct as FilterSchema, d as TailorDBTypeSchema, dt as Subgraph_ServiceType, et as AuthSCIMAttribute_Uniqueness, f as stringifyFunction, ft as CIPromptError, g as getDistDir, h as createBundleCache, ht as symbols, it as UserProfileProviderConfig_UserProfileProviderType, j as WorkspacePlatformUserRole, k as resolveStaticWebsiteUrls, l as OAuth2ClientSchema, lt as PageDirection, m as loadFilesWithIgnores, mt as styles, n as generatePluginFilesIfNeeded, ot as ConditionSchema, p as tailorUserMap, pt as logger, q as AuthHookPoint, r as loadApplication, rt as TenantProviderConfig_TenantProviderType, s as createExecutorService, st as Condition_Operator, t as defineApplication, tt as AuthSCIMConfig_AuthorizationType, ut as ApplicationSchemaUpdateAttemptStatus, w as fetchMachineUserToken, x as readPlatformConfig, y as loadAccessToken, z as TailorDBType_PermitAction } from "./application-D9xahQRQ.mjs";
|
|
3
3
|
import { t as readPackageJson } from "./package-json-CVUv8Y9T.mjs";
|
|
4
4
|
import { r as withSpan } from "./telemetry-0w8OupuQ.mjs";
|
|
5
5
|
import { arg, createDefineCommand, defineCommand, runCommand } from "politty";
|
|
@@ -19,11 +19,11 @@ import { findUpSync } from "find-up-simple";
|
|
|
19
19
|
import ml from "multiline-ts";
|
|
20
20
|
import * as crypto from "node:crypto";
|
|
21
21
|
import { createHash } from "node:crypto";
|
|
22
|
-
import { pathToFileURL } from "node:url";
|
|
23
|
-
import * as inflection from "inflection";
|
|
24
22
|
import * as rolldown from "rolldown";
|
|
25
23
|
import * as fs from "node:fs/promises";
|
|
26
24
|
import { glob } from "node:fs/promises";
|
|
25
|
+
import { pathToFileURL } from "node:url";
|
|
26
|
+
import * as inflection from "inflection";
|
|
27
27
|
import { create, fromJson, toJson } from "@bufbuild/protobuf";
|
|
28
28
|
import { ExitPromptError } from "@inquirer/core";
|
|
29
29
|
import { confirm, input } from "@inquirer/prompts";
|
|
@@ -1073,6 +1073,273 @@ function protoSubgraph(subgraph) {
|
|
|
1073
1073
|
};
|
|
1074
1074
|
}
|
|
1075
1075
|
|
|
1076
|
+
//#endregion
|
|
1077
|
+
//#region src/cli/commands/apply/function-registry.ts
|
|
1078
|
+
const CHUNK_SIZE = 64 * 1024;
|
|
1079
|
+
/**
|
|
1080
|
+
* Compute SHA-256 content hash for a script string.
|
|
1081
|
+
* @param content - Script content to hash
|
|
1082
|
+
* @returns Hex-encoded SHA-256 hash
|
|
1083
|
+
*/
|
|
1084
|
+
function computeContentHash(content) {
|
|
1085
|
+
return crypto.createHash("sha256").update(content, "utf-8").digest("hex");
|
|
1086
|
+
}
|
|
1087
|
+
function functionRegistryTrn(workspaceId, name) {
|
|
1088
|
+
return `trn:v1:workspace:${workspaceId}:function_registry:${name}`;
|
|
1089
|
+
}
|
|
1090
|
+
/**
|
|
1091
|
+
* Build a function registry name for a resolver.
|
|
1092
|
+
* @param namespace - Resolver namespace
|
|
1093
|
+
* @param resolverName - Resolver name
|
|
1094
|
+
* @returns Function registry name
|
|
1095
|
+
*/
|
|
1096
|
+
function resolverFunctionName(namespace, resolverName) {
|
|
1097
|
+
return `resolver--${namespace}--${resolverName}`;
|
|
1098
|
+
}
|
|
1099
|
+
/**
|
|
1100
|
+
* Build a function registry name for an executor.
|
|
1101
|
+
* @param executorName - Executor name
|
|
1102
|
+
* @returns Function registry name
|
|
1103
|
+
*/
|
|
1104
|
+
function executorFunctionName(executorName) {
|
|
1105
|
+
return `executor--${executorName}`;
|
|
1106
|
+
}
|
|
1107
|
+
/**
|
|
1108
|
+
* Build a function registry name for a workflow job.
|
|
1109
|
+
* @param jobName - Workflow job name
|
|
1110
|
+
* @returns Function registry name
|
|
1111
|
+
*/
|
|
1112
|
+
function workflowJobFunctionName(jobName) {
|
|
1113
|
+
return `workflow--${jobName}`;
|
|
1114
|
+
}
|
|
1115
|
+
/**
|
|
1116
|
+
* Build a function registry name for an auth hook.
|
|
1117
|
+
* @param authName - Auth namespace name
|
|
1118
|
+
* @param hookPoint - Hook point identifier (e.g. "before-login")
|
|
1119
|
+
* @returns Function registry name
|
|
1120
|
+
*/
|
|
1121
|
+
function authHookFunctionName(authName, hookPoint) {
|
|
1122
|
+
return `auth-hook--${authName}--${hookPoint}`;
|
|
1123
|
+
}
|
|
1124
|
+
/**
|
|
1125
|
+
* Collect all function entries from bundled scripts for all services.
|
|
1126
|
+
* @param application - Application definition
|
|
1127
|
+
* @param workflowJobs - Collected workflow jobs from config
|
|
1128
|
+
* @returns Array of function entries to register
|
|
1129
|
+
*/
|
|
1130
|
+
function collectFunctionEntries(application, workflowJobs) {
|
|
1131
|
+
const entries = [];
|
|
1132
|
+
const distDir = getDistDir();
|
|
1133
|
+
for (const app of application.applications) for (const pipeline of app.resolverServices) for (const resolver of Object.values(pipeline.resolvers)) {
|
|
1134
|
+
const scriptPath = path.join(distDir, "resolvers", `${resolver.name}.js`);
|
|
1135
|
+
try {
|
|
1136
|
+
const content = fs$1.readFileSync(scriptPath, "utf-8");
|
|
1137
|
+
entries.push({
|
|
1138
|
+
name: resolverFunctionName(pipeline.namespace, resolver.name),
|
|
1139
|
+
scriptContent: content,
|
|
1140
|
+
contentHash: computeContentHash(content),
|
|
1141
|
+
description: `Resolver: ${pipeline.namespace}/${resolver.name}`
|
|
1142
|
+
});
|
|
1143
|
+
} catch {
|
|
1144
|
+
logger.warn(`Function file not found: ${scriptPath}`);
|
|
1145
|
+
}
|
|
1146
|
+
}
|
|
1147
|
+
if (application.executorService) {
|
|
1148
|
+
const executors = application.executorService.executors;
|
|
1149
|
+
for (const executor of Object.values(executors)) if (executor.operation.kind === "function" || executor.operation.kind === "jobFunction") {
|
|
1150
|
+
const scriptPath = path.join(distDir, "executors", `${executor.name}.js`);
|
|
1151
|
+
try {
|
|
1152
|
+
const content = fs$1.readFileSync(scriptPath, "utf-8");
|
|
1153
|
+
entries.push({
|
|
1154
|
+
name: executorFunctionName(executor.name),
|
|
1155
|
+
scriptContent: content,
|
|
1156
|
+
contentHash: computeContentHash(content),
|
|
1157
|
+
description: `Executor: ${executor.name}`
|
|
1158
|
+
});
|
|
1159
|
+
} catch {
|
|
1160
|
+
logger.warn(`Function file not found: ${scriptPath}`);
|
|
1161
|
+
}
|
|
1162
|
+
}
|
|
1163
|
+
}
|
|
1164
|
+
for (const job of workflowJobs) {
|
|
1165
|
+
const scriptPath = path.join(distDir, "workflow-jobs", `${job.name}.js`);
|
|
1166
|
+
try {
|
|
1167
|
+
const content = fs$1.readFileSync(scriptPath, "utf-8");
|
|
1168
|
+
entries.push({
|
|
1169
|
+
name: workflowJobFunctionName(job.name),
|
|
1170
|
+
scriptContent: content,
|
|
1171
|
+
contentHash: computeContentHash(content),
|
|
1172
|
+
description: `Workflow job: ${job.name}`
|
|
1173
|
+
});
|
|
1174
|
+
} catch {
|
|
1175
|
+
logger.warn(`Function file not found: ${scriptPath}`);
|
|
1176
|
+
}
|
|
1177
|
+
}
|
|
1178
|
+
for (const app of application.applications) if (app.authService?.config.hooks?.beforeLogin) {
|
|
1179
|
+
const authName = app.authService.config.name;
|
|
1180
|
+
const funcName = authHookFunctionName(authName, "before-login");
|
|
1181
|
+
const scriptPath = path.join(distDir, "auth-hooks", `${funcName}.js`);
|
|
1182
|
+
try {
|
|
1183
|
+
const content = fs$1.readFileSync(scriptPath, "utf-8");
|
|
1184
|
+
entries.push({
|
|
1185
|
+
name: funcName,
|
|
1186
|
+
scriptContent: content,
|
|
1187
|
+
contentHash: computeContentHash(content),
|
|
1188
|
+
description: `Auth hook: ${authName}/before-login`
|
|
1189
|
+
});
|
|
1190
|
+
} catch {
|
|
1191
|
+
logger.warn(`Function file not found: ${scriptPath}`);
|
|
1192
|
+
}
|
|
1193
|
+
}
|
|
1194
|
+
return entries;
|
|
1195
|
+
}
|
|
1196
|
+
/**
|
|
1197
|
+
* Plan function registry changes based on current and desired state.
|
|
1198
|
+
* @param client - Operator client instance
|
|
1199
|
+
* @param workspaceId - Workspace ID
|
|
1200
|
+
* @param appName - Application name
|
|
1201
|
+
* @param entries - Desired function entries
|
|
1202
|
+
* @returns Planned changes
|
|
1203
|
+
*/
|
|
1204
|
+
async function planFunctionRegistry(client, workspaceId, appName, entries) {
|
|
1205
|
+
const changeSet = createChangeSet("Function registry");
|
|
1206
|
+
const conflicts = [];
|
|
1207
|
+
const unmanaged = [];
|
|
1208
|
+
const resourceOwners = /* @__PURE__ */ new Set();
|
|
1209
|
+
const existingFunctions = await fetchAll(async (pageToken, maxPageSize) => {
|
|
1210
|
+
try {
|
|
1211
|
+
const response = await client.listFunctionRegistries({
|
|
1212
|
+
workspaceId,
|
|
1213
|
+
pageToken,
|
|
1214
|
+
pageSize: maxPageSize
|
|
1215
|
+
});
|
|
1216
|
+
return [response.functions.map((f) => ({
|
|
1217
|
+
name: f.name,
|
|
1218
|
+
contentHash: f.contentHash
|
|
1219
|
+
})), response.nextPageToken];
|
|
1220
|
+
} catch (error) {
|
|
1221
|
+
if (error instanceof ConnectError && error.code === Code.NotFound) return [[], ""];
|
|
1222
|
+
throw error;
|
|
1223
|
+
}
|
|
1224
|
+
});
|
|
1225
|
+
const existingMap = {};
|
|
1226
|
+
await Promise.all(existingFunctions.map(async (func) => {
|
|
1227
|
+
const { metadata } = await client.getMetadata({ trn: functionRegistryTrn(workspaceId, func.name) });
|
|
1228
|
+
existingMap[func.name] = {
|
|
1229
|
+
resource: func,
|
|
1230
|
+
label: metadata?.labels[sdkNameLabelKey]
|
|
1231
|
+
};
|
|
1232
|
+
}));
|
|
1233
|
+
for (const entry of entries) {
|
|
1234
|
+
const existing = existingMap[entry.name];
|
|
1235
|
+
const metaRequest = await buildMetaRequest(functionRegistryTrn(workspaceId, entry.name), appName);
|
|
1236
|
+
if (existing) {
|
|
1237
|
+
if (!existing.label) unmanaged.push({
|
|
1238
|
+
resourceType: "Function registry",
|
|
1239
|
+
resourceName: entry.name
|
|
1240
|
+
});
|
|
1241
|
+
else if (existing.label !== appName) conflicts.push({
|
|
1242
|
+
resourceType: "Function registry",
|
|
1243
|
+
resourceName: entry.name,
|
|
1244
|
+
currentOwner: existing.label
|
|
1245
|
+
});
|
|
1246
|
+
changeSet.updates.push({
|
|
1247
|
+
name: entry.name,
|
|
1248
|
+
entry,
|
|
1249
|
+
metaRequest
|
|
1250
|
+
});
|
|
1251
|
+
delete existingMap[entry.name];
|
|
1252
|
+
} else changeSet.creates.push({
|
|
1253
|
+
name: entry.name,
|
|
1254
|
+
entry,
|
|
1255
|
+
metaRequest
|
|
1256
|
+
});
|
|
1257
|
+
}
|
|
1258
|
+
for (const [name, existing] of Object.entries(existingMap)) {
|
|
1259
|
+
if (!existing) continue;
|
|
1260
|
+
const label = existing.label;
|
|
1261
|
+
if (label && label !== appName) resourceOwners.add(label);
|
|
1262
|
+
if (label === appName) changeSet.deletes.push({
|
|
1263
|
+
name,
|
|
1264
|
+
workspaceId
|
|
1265
|
+
});
|
|
1266
|
+
}
|
|
1267
|
+
changeSet.print();
|
|
1268
|
+
return {
|
|
1269
|
+
changeSet,
|
|
1270
|
+
conflicts,
|
|
1271
|
+
unmanaged,
|
|
1272
|
+
resourceOwners
|
|
1273
|
+
};
|
|
1274
|
+
}
|
|
1275
|
+
/**
|
|
1276
|
+
* Upload a function script to the function registry using client streaming.
|
|
1277
|
+
* @param client - Operator client instance
|
|
1278
|
+
* @param workspaceId - Workspace ID
|
|
1279
|
+
* @param entry - Function entry to upload
|
|
1280
|
+
* @param isCreate - Whether this is a create (true) or update (false)
|
|
1281
|
+
*/
|
|
1282
|
+
async function uploadFunctionScript(client, workspaceId, entry, isCreate) {
|
|
1283
|
+
const buffer = Buffer.from(entry.scriptContent, "utf-8");
|
|
1284
|
+
const info = {
|
|
1285
|
+
workspaceId,
|
|
1286
|
+
name: entry.name,
|
|
1287
|
+
description: entry.description,
|
|
1288
|
+
sizeBytes: BigInt(buffer.length),
|
|
1289
|
+
contentHash: entry.contentHash
|
|
1290
|
+
};
|
|
1291
|
+
if (isCreate) {
|
|
1292
|
+
/** @yields {MessageInitShape<typeof CreateFunctionRegistryRequestSchema>} Create request messages (info header followed by content chunks) */
|
|
1293
|
+
async function* createStream() {
|
|
1294
|
+
yield { payload: {
|
|
1295
|
+
case: "info",
|
|
1296
|
+
value: info
|
|
1297
|
+
} };
|
|
1298
|
+
for (let i = 0; i < buffer.length; i += CHUNK_SIZE) yield { payload: {
|
|
1299
|
+
case: "chunk",
|
|
1300
|
+
value: buffer.subarray(i, Math.min(i + CHUNK_SIZE, buffer.length))
|
|
1301
|
+
} };
|
|
1302
|
+
}
|
|
1303
|
+
await client.createFunctionRegistry(createStream());
|
|
1304
|
+
} else {
|
|
1305
|
+
/** @yields {MessageInitShape<typeof UpdateFunctionRegistryRequestSchema>} Update request messages (info header followed by content chunks) */
|
|
1306
|
+
async function* updateStream() {
|
|
1307
|
+
yield { payload: {
|
|
1308
|
+
case: "info",
|
|
1309
|
+
value: info
|
|
1310
|
+
} };
|
|
1311
|
+
for (let i = 0; i < buffer.length; i += CHUNK_SIZE) yield { payload: {
|
|
1312
|
+
case: "chunk",
|
|
1313
|
+
value: buffer.subarray(i, Math.min(i + CHUNK_SIZE, buffer.length))
|
|
1314
|
+
} };
|
|
1315
|
+
}
|
|
1316
|
+
await client.updateFunctionRegistry(updateStream());
|
|
1317
|
+
}
|
|
1318
|
+
}
|
|
1319
|
+
/**
|
|
1320
|
+
* Apply function registry changes for the given phase.
|
|
1321
|
+
* @param client - Operator client instance
|
|
1322
|
+
* @param workspaceId - Workspace ID
|
|
1323
|
+
* @param result - Planned function registry changes
|
|
1324
|
+
* @param phase - Apply phase
|
|
1325
|
+
*/
|
|
1326
|
+
async function applyFunctionRegistry(client, workspaceId, result, phase = "create-update") {
|
|
1327
|
+
const { changeSet } = result;
|
|
1328
|
+
if (phase === "create-update") {
|
|
1329
|
+
for (const create of changeSet.creates) {
|
|
1330
|
+
await uploadFunctionScript(client, workspaceId, create.entry, true);
|
|
1331
|
+
await client.setMetadata(create.metaRequest);
|
|
1332
|
+
}
|
|
1333
|
+
for (const update of changeSet.updates) {
|
|
1334
|
+
await uploadFunctionScript(client, workspaceId, update.entry, false);
|
|
1335
|
+
await client.setMetadata(update.metaRequest);
|
|
1336
|
+
}
|
|
1337
|
+
} else if (phase === "delete") await Promise.all(changeSet.deletes.map((del) => client.deleteFunctionRegistry({
|
|
1338
|
+
workspaceId: del.workspaceId,
|
|
1339
|
+
name: del.name
|
|
1340
|
+
})));
|
|
1341
|
+
}
|
|
1342
|
+
|
|
1076
1343
|
//#endregion
|
|
1077
1344
|
//#region src/cli/commands/apply/idp.ts
|
|
1078
1345
|
/**
|
|
@@ -1400,6 +1667,7 @@ async function applyAuth(client, result, phase = "create-update") {
|
|
|
1400
1667
|
await Promise.all([...changeSet.userProfileConfig.creates.map((create) => client.createUserProfileConfig(create.request)), ...changeSet.userProfileConfig.updates.map((update) => client.updateUserProfileConfig(update.request))]);
|
|
1401
1668
|
await Promise.all([...changeSet.tenantConfig.creates.map((create) => client.createTenantConfig(create.request)), ...changeSet.tenantConfig.updates.map((update) => client.updateTenantConfig(update.request))]);
|
|
1402
1669
|
await Promise.all([...changeSet.machineUser.creates.map((create) => client.createAuthMachineUser(create.request)), ...changeSet.machineUser.updates.map((update) => client.updateAuthMachineUser(update.request))]);
|
|
1670
|
+
await Promise.all([...changeSet.authHook.creates.map((create) => client.createAuthHook(create.request)), ...changeSet.authHook.updates.map((update) => client.updateAuthHook(update.request))]);
|
|
1403
1671
|
await Promise.all([...changeSet.oauth2Client.creates.map(async (create) => {
|
|
1404
1672
|
create.request.oauth2Client.redirectUris = await resolveStaticWebsiteUrls(client, create.request.workspaceId, create.request.oauth2Client.redirectUris, "OAuth2 redirect URIs");
|
|
1405
1673
|
return client.createAuthOAuth2Client(create.request);
|
|
@@ -1418,6 +1686,7 @@ async function applyAuth(client, result, phase = "create-update") {
|
|
|
1418
1686
|
await Promise.all(changeSet.scimResource.deletes.map((del) => client.deleteAuthSCIMResource(del.request)));
|
|
1419
1687
|
await Promise.all(changeSet.scim.deletes.map((del) => client.deleteAuthSCIMConfig(del.request)));
|
|
1420
1688
|
await Promise.all(changeSet.oauth2Client.deletes.map((del) => client.deleteAuthOAuth2Client(del.request)));
|
|
1689
|
+
await Promise.all(changeSet.authHook.deletes.map((del) => client.deleteAuthHook(del.request)));
|
|
1421
1690
|
await Promise.all(changeSet.machineUser.deletes.map((del) => client.deleteAuthMachineUser(del.request)));
|
|
1422
1691
|
await Promise.all(changeSet.tenantConfig.deletes.map((del) => client.deleteTenantConfig(del.request)));
|
|
1423
1692
|
await Promise.all(changeSet.userProfileConfig.deletes.map((del) => client.deleteUserProfileConfig(del.request)));
|
|
@@ -1438,11 +1707,12 @@ async function planAuth(context) {
|
|
|
1438
1707
|
}
|
|
1439
1708
|
const { changeSet: serviceChangeSet, conflicts, unmanaged, resourceOwners } = await planServices$2(client, workspaceId, application.name, auths);
|
|
1440
1709
|
const deletedServices = serviceChangeSet.deletes.map((del) => del.name);
|
|
1441
|
-
const [idpConfigChangeSet, userProfileConfigChangeSet, tenantConfigChangeSet, machineUserChangeSet, oauth2ClientChangeSet, scimChangeSet, scimResourceChangeSet] = await Promise.all([
|
|
1710
|
+
const [idpConfigChangeSet, userProfileConfigChangeSet, tenantConfigChangeSet, machineUserChangeSet, authHookChangeSet, oauth2ClientChangeSet, scimChangeSet, scimResourceChangeSet] = await Promise.all([
|
|
1442
1711
|
planIdPConfigs(client, workspaceId, auths, deletedServices),
|
|
1443
1712
|
planUserProfileConfigs(client, workspaceId, auths, deletedServices),
|
|
1444
1713
|
planTenantConfigs(client, workspaceId, auths, deletedServices),
|
|
1445
1714
|
planMachineUsers(client, workspaceId, auths, deletedServices),
|
|
1715
|
+
planAuthHooks(client, workspaceId, auths, deletedServices),
|
|
1446
1716
|
planOAuth2Clients(client, workspaceId, auths, deletedServices),
|
|
1447
1717
|
planSCIMConfigs(client, workspaceId, auths, deletedServices),
|
|
1448
1718
|
planSCIMResources(client, workspaceId, auths, deletedServices)
|
|
@@ -1452,6 +1722,7 @@ async function planAuth(context) {
|
|
|
1452
1722
|
userProfileConfigChangeSet.print();
|
|
1453
1723
|
tenantConfigChangeSet.print();
|
|
1454
1724
|
machineUserChangeSet.print();
|
|
1725
|
+
authHookChangeSet.print();
|
|
1455
1726
|
oauth2ClientChangeSet.print();
|
|
1456
1727
|
scimChangeSet.print();
|
|
1457
1728
|
scimResourceChangeSet.print();
|
|
@@ -1462,6 +1733,7 @@ async function planAuth(context) {
|
|
|
1462
1733
|
userProfileConfig: userProfileConfigChangeSet,
|
|
1463
1734
|
tenantConfig: tenantConfigChangeSet,
|
|
1464
1735
|
machineUser: machineUserChangeSet,
|
|
1736
|
+
authHook: authHookChangeSet,
|
|
1465
1737
|
oauth2Client: oauth2ClientChangeSet,
|
|
1466
1738
|
scim: scimChangeSet,
|
|
1467
1739
|
scimResource: scimResourceChangeSet
|
|
@@ -2261,6 +2533,72 @@ function protoSCIMAttribute(attr) {
|
|
|
2261
2533
|
subAttributes: attr.subAttributes?.map((attr) => protoSCIMAttribute(attr))
|
|
2262
2534
|
};
|
|
2263
2535
|
}
|
|
2536
|
+
async function planAuthHooks(client, workspaceId, auths, deletedServices) {
|
|
2537
|
+
const changeSet = createChangeSet("Auth hooks");
|
|
2538
|
+
for (const auth of auths) {
|
|
2539
|
+
const { parsedConfig: config } = auth;
|
|
2540
|
+
const beforeLogin = config.hooks?.beforeLogin;
|
|
2541
|
+
let existingHook;
|
|
2542
|
+
try {
|
|
2543
|
+
await client.getAuthHook({
|
|
2544
|
+
workspaceId,
|
|
2545
|
+
namespaceName: config.name,
|
|
2546
|
+
hookPoint: AuthHookPoint.BEFORE_LOGIN
|
|
2547
|
+
});
|
|
2548
|
+
existingHook = true;
|
|
2549
|
+
} catch (error) {
|
|
2550
|
+
if (error instanceof ConnectError && error.code === Code.NotFound) existingHook = false;
|
|
2551
|
+
else throw error;
|
|
2552
|
+
}
|
|
2553
|
+
if (beforeLogin) {
|
|
2554
|
+
const hookRequest = {
|
|
2555
|
+
workspaceId,
|
|
2556
|
+
namespaceName: config.name,
|
|
2557
|
+
hook: {
|
|
2558
|
+
hookPoint: AuthHookPoint.BEFORE_LOGIN,
|
|
2559
|
+
scriptRef: authHookFunctionName(config.name, "before-login"),
|
|
2560
|
+
invoker: {
|
|
2561
|
+
namespace: config.name,
|
|
2562
|
+
machineUserName: beforeLogin.invoker
|
|
2563
|
+
}
|
|
2564
|
+
}
|
|
2565
|
+
};
|
|
2566
|
+
if (existingHook) changeSet.updates.push({
|
|
2567
|
+
name: `${config.name}/before-login`,
|
|
2568
|
+
request: hookRequest
|
|
2569
|
+
});
|
|
2570
|
+
else changeSet.creates.push({
|
|
2571
|
+
name: `${config.name}/before-login`,
|
|
2572
|
+
request: hookRequest
|
|
2573
|
+
});
|
|
2574
|
+
} else if (existingHook) changeSet.deletes.push({
|
|
2575
|
+
name: `${config.name}/before-login`,
|
|
2576
|
+
request: {
|
|
2577
|
+
workspaceId,
|
|
2578
|
+
namespaceName: config.name,
|
|
2579
|
+
hookPoint: AuthHookPoint.BEFORE_LOGIN
|
|
2580
|
+
}
|
|
2581
|
+
});
|
|
2582
|
+
}
|
|
2583
|
+
for (const namespaceName of deletedServices) try {
|
|
2584
|
+
await client.getAuthHook({
|
|
2585
|
+
workspaceId,
|
|
2586
|
+
namespaceName,
|
|
2587
|
+
hookPoint: AuthHookPoint.BEFORE_LOGIN
|
|
2588
|
+
});
|
|
2589
|
+
changeSet.deletes.push({
|
|
2590
|
+
name: `${namespaceName}/before-login`,
|
|
2591
|
+
request: {
|
|
2592
|
+
workspaceId,
|
|
2593
|
+
namespaceName,
|
|
2594
|
+
hookPoint: AuthHookPoint.BEFORE_LOGIN
|
|
2595
|
+
}
|
|
2596
|
+
});
|
|
2597
|
+
} catch (error) {
|
|
2598
|
+
if (error instanceof ConnectError && error.code === Code.NotFound) {} else throw error;
|
|
2599
|
+
}
|
|
2600
|
+
return changeSet;
|
|
2601
|
+
}
|
|
2264
2602
|
|
|
2265
2603
|
//#endregion
|
|
2266
2604
|
//#region src/cli/shared/prompt.ts
|
|
@@ -2325,358 +2663,116 @@ async function confirmOwnerConflict(conflicts, appName, yes) {
|
|
|
2325
2663
|
*/
|
|
2326
2664
|
async function confirmUnmanagedResources(resources, appName, yes) {
|
|
2327
2665
|
if (resources.length === 0) return;
|
|
2328
|
-
logger.warn("Existing resources not tracked by tailor-sdk were found:");
|
|
2329
|
-
logger.log(` ${styles.info("Resources")}:`);
|
|
2330
|
-
for (const r of resources) logger.log(` • ${styles.bold(r.resourceType)} ${styles.info(`"${r.resourceName}"`)}`);
|
|
2331
|
-
logger.newline();
|
|
2332
|
-
logger.log(" These resources may have been created by older SDK versions, Terraform, or CUE.");
|
|
2333
|
-
logger.log(" To continue, confirm that tailor-sdk should manage them.");
|
|
2334
|
-
logger.log(" If they are managed by another tool (e.g., Terraform), cancel and manage them there instead.");
|
|
2335
|
-
if (yes) {
|
|
2336
|
-
logger.success(`Adding to "${appName}" (--yes flag specified)...`, { mode: "plain" });
|
|
2337
|
-
return;
|
|
2338
|
-
}
|
|
2339
|
-
if (!await prompt.confirm({
|
|
2340
|
-
message: `Allow tailor-sdk to manage these resources for "${appName}"?`,
|
|
2341
|
-
default: false
|
|
2342
|
-
})) throw new Error(ml`
|
|
2343
|
-
Apply cancelled. Resources remain unmanaged.
|
|
2344
|
-
To override, run again and confirm, or use --yes flag.
|
|
2345
|
-
`);
|
|
2346
|
-
}
|
|
2347
|
-
/**
|
|
2348
|
-
* Confirm deletion of important resources.
|
|
2349
|
-
* @param resources - Resources scheduled for deletion
|
|
2350
|
-
* @param yes - Whether to auto-confirm without prompting
|
|
2351
|
-
* @returns Promise that resolves when confirmation completes
|
|
2352
|
-
*/
|
|
2353
|
-
async function confirmImportantResourceDeletion(resources, yes) {
|
|
2354
|
-
if (resources.length === 0) return;
|
|
2355
|
-
logger.warn("The following resources will be deleted:");
|
|
2356
|
-
logger.log(` ${styles.info("Resources")}:`);
|
|
2357
|
-
for (const r of resources) logger.log(` • ${styles.bold(r.resourceType)} ${styles.error(`"${r.resourceName}"`)}`);
|
|
2358
|
-
logger.newline();
|
|
2359
|
-
logger.log(styles.warning(" Deleting these resources will permanently remove all associated data."));
|
|
2360
|
-
if (yes) {
|
|
2361
|
-
logger.success("Deleting resources (--yes flag specified)...", { mode: "plain" });
|
|
2362
|
-
return;
|
|
2363
|
-
}
|
|
2364
|
-
if (!await prompt.confirm({
|
|
2365
|
-
message: "Are you sure you want to delete these resources?",
|
|
2366
|
-
default: false
|
|
2367
|
-
})) throw new Error(ml`
|
|
2368
|
-
Apply cancelled. Resources will not be deleted.
|
|
2369
|
-
To override, run again and confirm, or use --yes flag.
|
|
2370
|
-
`);
|
|
2371
|
-
}
|
|
2372
|
-
|
|
2373
|
-
//#endregion
|
|
2374
|
-
//#region src/cli/shared/runtime-args.ts
|
|
2375
|
-
/**
|
|
2376
|
-
* Runtime args transformation for all services.
|
|
2377
|
-
*
|
|
2378
|
-
* Each service transforms server-side args/context into SDK-friendly format:
|
|
2379
|
-
* - Executor: server-side expression evaluated by platform before calling function
|
|
2380
|
-
* - Resolver: operationHook expression evaluated by platform before calling function
|
|
2381
|
-
*
|
|
2382
|
-
* The user field mapping (server → SDK) shared across services is defined in
|
|
2383
|
-
* `@/parser/service/tailordb` as `tailorUserMap`.
|
|
2384
|
-
*/
|
|
2385
|
-
/**
|
|
2386
|
-
* Actor field transformation expression.
|
|
2387
|
-
*
|
|
2388
|
-
* Transforms the server's actor object to match the SDK's TailorActor type:
|
|
2389
|
-
* server `attributeMap` → SDK `attributes`
|
|
2390
|
-
* server `attributes` → SDK `attributeList`
|
|
2391
|
-
* other fields → passed through
|
|
2392
|
-
* null/undefined actor → null
|
|
2393
|
-
*/
|
|
2394
|
-
const ACTOR_TRANSFORM_EXPR = "actor: args.actor ? (({ attributeMap, attributes: attrList, ...rest }) => ({ ...rest, attributes: attributeMap, attributeList: attrList }))(args.actor) : null";
|
|
2395
|
-
/**
|
|
2396
|
-
* Build the JavaScript expression that transforms server-format executor event
|
|
2397
|
-
* args into SDK-format args at runtime.
|
|
2398
|
-
*
|
|
2399
|
-
* The Tailor Platform server delivers event args with server-side field names.
|
|
2400
|
-
* The SDK exposes different field names to user code. This function produces a
|
|
2401
|
-
* JavaScript expression string that performs the mapping when evaluated
|
|
2402
|
-
* server-side.
|
|
2403
|
-
* @param triggerKind - The trigger kind discriminant from the parsed executor
|
|
2404
|
-
* @param env - Application env record to embed in the expression
|
|
2405
|
-
* @returns A JavaScript expression string, e.g. `({ ...args, ... })`
|
|
2406
|
-
*/
|
|
2407
|
-
function buildExecutorArgsExpr(triggerKind, env) {
|
|
2408
|
-
const envExpr = `env: ${JSON.stringify(env)}`;
|
|
2409
|
-
switch (triggerKind) {
|
|
2410
|
-
case "schedule":
|
|
2411
|
-
case "recordCreated":
|
|
2412
|
-
case "recordUpdated":
|
|
2413
|
-
case "recordDeleted":
|
|
2414
|
-
case "idpUserCreated":
|
|
2415
|
-
case "idpUserUpdated":
|
|
2416
|
-
case "idpUserDeleted":
|
|
2417
|
-
case "authAccessTokenIssued":
|
|
2418
|
-
case "authAccessTokenRefreshed":
|
|
2419
|
-
case "authAccessTokenRevoked": return `({ ...args, appNamespace: args.namespaceName, ${ACTOR_TRANSFORM_EXPR}, ${envExpr} })`;
|
|
2420
|
-
case "resolverExecuted": return `({ ...args, appNamespace: args.namespaceName, ${ACTOR_TRANSFORM_EXPR}, success: !!args.succeeded, result: args.succeeded?.result.resolver, error: args.failed?.error, ${envExpr} })`;
|
|
2421
|
-
case "incomingWebhook": return `({ ...args, appNamespace: args.namespaceName, rawBody: args.raw_body, ${envExpr} })`;
|
|
2422
|
-
default: throw new Error(`Unknown trigger kind for args expression: ${triggerKind}`);
|
|
2423
|
-
}
|
|
2424
|
-
}
|
|
2425
|
-
/**
|
|
2426
|
-
* Build the operationHook expression for resolver pipelines.
|
|
2427
|
-
*
|
|
2428
|
-
* Transforms server context to SDK resolver context:
|
|
2429
|
-
* context.args → input
|
|
2430
|
-
* context.pipeline → spread into result
|
|
2431
|
-
* user (global var) → TailorUser (via tailorUserMap: workspace_id→workspaceId, attribute_map→attributes, attributes→attributeList)
|
|
2432
|
-
* env → injected as JSON
|
|
2433
|
-
* @param env - Application env record to embed in the expression
|
|
2434
|
-
* @returns A JavaScript expression string for the operationHook
|
|
2435
|
-
*/
|
|
2436
|
-
function buildResolverOperationHookExpr(env) {
|
|
2437
|
-
return `({ ...context.pipeline, input: context.args, user: ${tailorUserMap}, env: ${JSON.stringify(env)} });`;
|
|
2438
|
-
}
|
|
2439
|
-
|
|
2440
|
-
//#endregion
|
|
2441
|
-
//#region src/cli/commands/apply/function-registry.ts
|
|
2442
|
-
const CHUNK_SIZE = 64 * 1024;
|
|
2443
|
-
/**
|
|
2444
|
-
* Compute SHA-256 content hash for a script string.
|
|
2445
|
-
* @param content - Script content to hash
|
|
2446
|
-
* @returns Hex-encoded SHA-256 hash
|
|
2447
|
-
*/
|
|
2448
|
-
function computeContentHash(content) {
|
|
2449
|
-
return crypto.createHash("sha256").update(content, "utf-8").digest("hex");
|
|
2450
|
-
}
|
|
2451
|
-
function functionRegistryTrn(workspaceId, name) {
|
|
2452
|
-
return `trn:v1:workspace:${workspaceId}:function_registry:${name}`;
|
|
2453
|
-
}
|
|
2454
|
-
/**
|
|
2455
|
-
* Build a function registry name for a resolver.
|
|
2456
|
-
* @param namespace - Resolver namespace
|
|
2457
|
-
* @param resolverName - Resolver name
|
|
2458
|
-
* @returns Function registry name
|
|
2459
|
-
*/
|
|
2460
|
-
function resolverFunctionName(namespace, resolverName) {
|
|
2461
|
-
return `resolver--${namespace}--${resolverName}`;
|
|
2462
|
-
}
|
|
2463
|
-
/**
|
|
2464
|
-
* Build a function registry name for an executor.
|
|
2465
|
-
* @param executorName - Executor name
|
|
2466
|
-
* @returns Function registry name
|
|
2467
|
-
*/
|
|
2468
|
-
function executorFunctionName(executorName) {
|
|
2469
|
-
return `executor--${executorName}`;
|
|
2470
|
-
}
|
|
2471
|
-
/**
|
|
2472
|
-
* Build a function registry name for a workflow job.
|
|
2473
|
-
* @param jobName - Workflow job name
|
|
2474
|
-
* @returns Function registry name
|
|
2475
|
-
*/
|
|
2476
|
-
function workflowJobFunctionName(jobName) {
|
|
2477
|
-
return `workflow--${jobName}`;
|
|
2478
|
-
}
|
|
2479
|
-
/**
|
|
2480
|
-
* Collect all function entries from bundled scripts for all services.
|
|
2481
|
-
* @param application - Application definition
|
|
2482
|
-
* @param workflowJobs - Collected workflow jobs from config
|
|
2483
|
-
* @returns Array of function entries to register
|
|
2484
|
-
*/
|
|
2485
|
-
function collectFunctionEntries(application, workflowJobs) {
|
|
2486
|
-
const entries = [];
|
|
2487
|
-
const distDir = getDistDir();
|
|
2488
|
-
for (const app of application.applications) for (const pipeline of app.resolverServices) for (const resolver of Object.values(pipeline.resolvers)) {
|
|
2489
|
-
const scriptPath = path.join(distDir, "resolvers", `${resolver.name}.js`);
|
|
2490
|
-
try {
|
|
2491
|
-
const content = fs$1.readFileSync(scriptPath, "utf-8");
|
|
2492
|
-
entries.push({
|
|
2493
|
-
name: resolverFunctionName(pipeline.namespace, resolver.name),
|
|
2494
|
-
scriptContent: content,
|
|
2495
|
-
contentHash: computeContentHash(content),
|
|
2496
|
-
description: `Resolver: ${pipeline.namespace}/${resolver.name}`
|
|
2497
|
-
});
|
|
2498
|
-
} catch {
|
|
2499
|
-
logger.warn(`Function file not found: ${scriptPath}`);
|
|
2500
|
-
}
|
|
2501
|
-
}
|
|
2502
|
-
if (application.executorService) {
|
|
2503
|
-
const executors = application.executorService.executors;
|
|
2504
|
-
for (const executor of Object.values(executors)) if (executor.operation.kind === "function" || executor.operation.kind === "jobFunction") {
|
|
2505
|
-
const scriptPath = path.join(distDir, "executors", `${executor.name}.js`);
|
|
2506
|
-
try {
|
|
2507
|
-
const content = fs$1.readFileSync(scriptPath, "utf-8");
|
|
2508
|
-
entries.push({
|
|
2509
|
-
name: executorFunctionName(executor.name),
|
|
2510
|
-
scriptContent: content,
|
|
2511
|
-
contentHash: computeContentHash(content),
|
|
2512
|
-
description: `Executor: ${executor.name}`
|
|
2513
|
-
});
|
|
2514
|
-
} catch {
|
|
2515
|
-
logger.warn(`Function file not found: ${scriptPath}`);
|
|
2516
|
-
}
|
|
2517
|
-
}
|
|
2518
|
-
}
|
|
2519
|
-
for (const job of workflowJobs) {
|
|
2520
|
-
const scriptPath = path.join(distDir, "workflow-jobs", `${job.name}.js`);
|
|
2521
|
-
try {
|
|
2522
|
-
const content = fs$1.readFileSync(scriptPath, "utf-8");
|
|
2523
|
-
entries.push({
|
|
2524
|
-
name: workflowJobFunctionName(job.name),
|
|
2525
|
-
scriptContent: content,
|
|
2526
|
-
contentHash: computeContentHash(content),
|
|
2527
|
-
description: `Workflow job: ${job.name}`
|
|
2528
|
-
});
|
|
2529
|
-
} catch {
|
|
2530
|
-
logger.warn(`Function file not found: ${scriptPath}`);
|
|
2531
|
-
}
|
|
2666
|
+
logger.warn("Existing resources not tracked by tailor-sdk were found:");
|
|
2667
|
+
logger.log(` ${styles.info("Resources")}:`);
|
|
2668
|
+
for (const r of resources) logger.log(` • ${styles.bold(r.resourceType)} ${styles.info(`"${r.resourceName}"`)}`);
|
|
2669
|
+
logger.newline();
|
|
2670
|
+
logger.log(" These resources may have been created by older SDK versions, Terraform, or CUE.");
|
|
2671
|
+
logger.log(" To continue, confirm that tailor-sdk should manage them.");
|
|
2672
|
+
logger.log(" If they are managed by another tool (e.g., Terraform), cancel and manage them there instead.");
|
|
2673
|
+
if (yes) {
|
|
2674
|
+
logger.success(`Adding to "${appName}" (--yes flag specified)...`, { mode: "plain" });
|
|
2675
|
+
return;
|
|
2532
2676
|
}
|
|
2533
|
-
|
|
2677
|
+
if (!await prompt.confirm({
|
|
2678
|
+
message: `Allow tailor-sdk to manage these resources for "${appName}"?`,
|
|
2679
|
+
default: false
|
|
2680
|
+
})) throw new Error(ml`
|
|
2681
|
+
Apply cancelled. Resources remain unmanaged.
|
|
2682
|
+
To override, run again and confirm, or use --yes flag.
|
|
2683
|
+
`);
|
|
2534
2684
|
}
|
|
2535
2685
|
/**
|
|
2536
|
-
*
|
|
2537
|
-
* @param
|
|
2538
|
-
* @param
|
|
2539
|
-
* @
|
|
2540
|
-
* @param entries - Desired function entries
|
|
2541
|
-
* @returns Planned changes
|
|
2686
|
+
* Confirm deletion of important resources.
|
|
2687
|
+
* @param resources - Resources scheduled for deletion
|
|
2688
|
+
* @param yes - Whether to auto-confirm without prompting
|
|
2689
|
+
* @returns Promise that resolves when confirmation completes
|
|
2542
2690
|
*/
|
|
2543
|
-
async function
|
|
2544
|
-
|
|
2545
|
-
|
|
2546
|
-
|
|
2547
|
-
const
|
|
2548
|
-
|
|
2549
|
-
|
|
2550
|
-
|
|
2551
|
-
|
|
2552
|
-
|
|
2553
|
-
pageSize: maxPageSize
|
|
2554
|
-
});
|
|
2555
|
-
return [response.functions.map((f) => ({
|
|
2556
|
-
name: f.name,
|
|
2557
|
-
contentHash: f.contentHash
|
|
2558
|
-
})), response.nextPageToken];
|
|
2559
|
-
} catch (error) {
|
|
2560
|
-
if (error instanceof ConnectError && error.code === Code.NotFound) return [[], ""];
|
|
2561
|
-
throw error;
|
|
2562
|
-
}
|
|
2563
|
-
});
|
|
2564
|
-
const existingMap = {};
|
|
2565
|
-
await Promise.all(existingFunctions.map(async (func) => {
|
|
2566
|
-
const { metadata } = await client.getMetadata({ trn: functionRegistryTrn(workspaceId, func.name) });
|
|
2567
|
-
existingMap[func.name] = {
|
|
2568
|
-
resource: func,
|
|
2569
|
-
label: metadata?.labels[sdkNameLabelKey]
|
|
2570
|
-
};
|
|
2571
|
-
}));
|
|
2572
|
-
for (const entry of entries) {
|
|
2573
|
-
const existing = existingMap[entry.name];
|
|
2574
|
-
const metaRequest = await buildMetaRequest(functionRegistryTrn(workspaceId, entry.name), appName);
|
|
2575
|
-
if (existing) {
|
|
2576
|
-
if (!existing.label) unmanaged.push({
|
|
2577
|
-
resourceType: "Function registry",
|
|
2578
|
-
resourceName: entry.name
|
|
2579
|
-
});
|
|
2580
|
-
else if (existing.label !== appName) conflicts.push({
|
|
2581
|
-
resourceType: "Function registry",
|
|
2582
|
-
resourceName: entry.name,
|
|
2583
|
-
currentOwner: existing.label
|
|
2584
|
-
});
|
|
2585
|
-
changeSet.updates.push({
|
|
2586
|
-
name: entry.name,
|
|
2587
|
-
entry,
|
|
2588
|
-
metaRequest
|
|
2589
|
-
});
|
|
2590
|
-
delete existingMap[entry.name];
|
|
2591
|
-
} else changeSet.creates.push({
|
|
2592
|
-
name: entry.name,
|
|
2593
|
-
entry,
|
|
2594
|
-
metaRequest
|
|
2595
|
-
});
|
|
2596
|
-
}
|
|
2597
|
-
for (const [name, existing] of Object.entries(existingMap)) {
|
|
2598
|
-
if (!existing) continue;
|
|
2599
|
-
const label = existing.label;
|
|
2600
|
-
if (label && label !== appName) resourceOwners.add(label);
|
|
2601
|
-
if (label === appName) changeSet.deletes.push({
|
|
2602
|
-
name,
|
|
2603
|
-
workspaceId
|
|
2604
|
-
});
|
|
2691
|
+
async function confirmImportantResourceDeletion(resources, yes) {
|
|
2692
|
+
if (resources.length === 0) return;
|
|
2693
|
+
logger.warn("The following resources will be deleted:");
|
|
2694
|
+
logger.log(` ${styles.info("Resources")}:`);
|
|
2695
|
+
for (const r of resources) logger.log(` • ${styles.bold(r.resourceType)} ${styles.error(`"${r.resourceName}"`)}`);
|
|
2696
|
+
logger.newline();
|
|
2697
|
+
logger.log(styles.warning(" Deleting these resources will permanently remove all associated data."));
|
|
2698
|
+
if (yes) {
|
|
2699
|
+
logger.success("Deleting resources (--yes flag specified)...", { mode: "plain" });
|
|
2700
|
+
return;
|
|
2605
2701
|
}
|
|
2606
|
-
|
|
2607
|
-
|
|
2608
|
-
|
|
2609
|
-
|
|
2610
|
-
|
|
2611
|
-
|
|
2612
|
-
|
|
2702
|
+
if (!await prompt.confirm({
|
|
2703
|
+
message: "Are you sure you want to delete these resources?",
|
|
2704
|
+
default: false
|
|
2705
|
+
})) throw new Error(ml`
|
|
2706
|
+
Apply cancelled. Resources will not be deleted.
|
|
2707
|
+
To override, run again and confirm, or use --yes flag.
|
|
2708
|
+
`);
|
|
2613
2709
|
}
|
|
2710
|
+
|
|
2711
|
+
//#endregion
|
|
2712
|
+
//#region src/cli/shared/runtime-args.ts
|
|
2614
2713
|
/**
|
|
2615
|
-
*
|
|
2616
|
-
*
|
|
2617
|
-
*
|
|
2618
|
-
*
|
|
2619
|
-
*
|
|
2714
|
+
* Runtime args transformation for all services.
|
|
2715
|
+
*
|
|
2716
|
+
* Each service transforms server-side args/context into SDK-friendly format:
|
|
2717
|
+
* - Executor: server-side expression evaluated by platform before calling function
|
|
2718
|
+
* - Resolver: operationHook expression evaluated by platform before calling function
|
|
2719
|
+
*
|
|
2720
|
+
* The user field mapping (server → SDK) shared across services is defined in
|
|
2721
|
+
* `@/parser/service/tailordb` as `tailorUserMap`.
|
|
2620
2722
|
*/
|
|
2621
|
-
|
|
2622
|
-
|
|
2623
|
-
|
|
2624
|
-
|
|
2625
|
-
|
|
2626
|
-
|
|
2627
|
-
|
|
2628
|
-
|
|
2629
|
-
|
|
2630
|
-
|
|
2631
|
-
|
|
2632
|
-
|
|
2633
|
-
|
|
2634
|
-
|
|
2635
|
-
|
|
2636
|
-
|
|
2637
|
-
|
|
2638
|
-
|
|
2639
|
-
|
|
2640
|
-
|
|
2641
|
-
|
|
2642
|
-
|
|
2643
|
-
|
|
2644
|
-
|
|
2645
|
-
|
|
2646
|
-
|
|
2647
|
-
|
|
2648
|
-
|
|
2649
|
-
|
|
2650
|
-
|
|
2651
|
-
|
|
2652
|
-
|
|
2653
|
-
|
|
2654
|
-
|
|
2655
|
-
|
|
2723
|
+
/**
|
|
2724
|
+
* Actor field transformation expression.
|
|
2725
|
+
*
|
|
2726
|
+
* Transforms the server's actor object to match the SDK's TailorActor type:
|
|
2727
|
+
* server `attributeMap` → SDK `attributes`
|
|
2728
|
+
* server `attributes` → SDK `attributeList`
|
|
2729
|
+
* other fields → passed through
|
|
2730
|
+
* null/undefined actor → null
|
|
2731
|
+
*/
|
|
2732
|
+
const ACTOR_TRANSFORM_EXPR = "actor: args.actor ? (({ attributeMap, attributes: attrList, ...rest }) => ({ ...rest, attributes: attributeMap, attributeList: attrList }))(args.actor) : null";
|
|
2733
|
+
/**
|
|
2734
|
+
* Build the JavaScript expression that transforms server-format executor event
|
|
2735
|
+
* args into SDK-format args at runtime.
|
|
2736
|
+
*
|
|
2737
|
+
* The Tailor Platform server delivers event args with server-side field names.
|
|
2738
|
+
* The SDK exposes different field names to user code. This function produces a
|
|
2739
|
+
* JavaScript expression string that performs the mapping when evaluated
|
|
2740
|
+
* server-side.
|
|
2741
|
+
* @param triggerKind - The trigger kind discriminant from the parsed executor
|
|
2742
|
+
* @param env - Application env record to embed in the expression
|
|
2743
|
+
* @returns A JavaScript expression string, e.g. `({ ...args, ... })`
|
|
2744
|
+
*/
|
|
2745
|
+
function buildExecutorArgsExpr(triggerKind, env) {
|
|
2746
|
+
const envExpr = `env: ${JSON.stringify(env)}`;
|
|
2747
|
+
switch (triggerKind) {
|
|
2748
|
+
case "schedule":
|
|
2749
|
+
case "recordCreated":
|
|
2750
|
+
case "recordUpdated":
|
|
2751
|
+
case "recordDeleted":
|
|
2752
|
+
case "idpUserCreated":
|
|
2753
|
+
case "idpUserUpdated":
|
|
2754
|
+
case "idpUserDeleted":
|
|
2755
|
+
case "authAccessTokenIssued":
|
|
2756
|
+
case "authAccessTokenRefreshed":
|
|
2757
|
+
case "authAccessTokenRevoked": return `({ ...args, appNamespace: args.namespaceName, ${ACTOR_TRANSFORM_EXPR}, ${envExpr} })`;
|
|
2758
|
+
case "resolverExecuted": return `({ ...args, appNamespace: args.namespaceName, ${ACTOR_TRANSFORM_EXPR}, success: !!args.succeeded, result: args.succeeded?.result.resolver, error: args.failed?.error, ${envExpr} })`;
|
|
2759
|
+
case "incomingWebhook": return `({ ...args, appNamespace: args.namespaceName, rawBody: args.raw_body, ${envExpr} })`;
|
|
2760
|
+
default: throw new Error(`Unknown trigger kind for args expression: ${triggerKind}`);
|
|
2656
2761
|
}
|
|
2657
2762
|
}
|
|
2658
2763
|
/**
|
|
2659
|
-
*
|
|
2660
|
-
*
|
|
2661
|
-
*
|
|
2662
|
-
*
|
|
2663
|
-
*
|
|
2764
|
+
* Build the operationHook expression for resolver pipelines.
|
|
2765
|
+
*
|
|
2766
|
+
* Transforms server context to SDK resolver context:
|
|
2767
|
+
* context.args → input
|
|
2768
|
+
* context.pipeline → spread into result
|
|
2769
|
+
* user (global var) → TailorUser (via tailorUserMap: workspace_id→workspaceId, attribute_map→attributes, attributes→attributeList)
|
|
2770
|
+
* env → injected as JSON
|
|
2771
|
+
* @param env - Application env record to embed in the expression
|
|
2772
|
+
* @returns A JavaScript expression string for the operationHook
|
|
2664
2773
|
*/
|
|
2665
|
-
|
|
2666
|
-
|
|
2667
|
-
if (phase === "create-update") {
|
|
2668
|
-
for (const create of changeSet.creates) {
|
|
2669
|
-
await uploadFunctionScript(client, workspaceId, create.entry, true);
|
|
2670
|
-
await client.setMetadata(create.metaRequest);
|
|
2671
|
-
}
|
|
2672
|
-
for (const update of changeSet.updates) {
|
|
2673
|
-
await uploadFunctionScript(client, workspaceId, update.entry, false);
|
|
2674
|
-
await client.setMetadata(update.metaRequest);
|
|
2675
|
-
}
|
|
2676
|
-
} else if (phase === "delete") await Promise.all(changeSet.deletes.map((del) => client.deleteFunctionRegistry({
|
|
2677
|
-
workspaceId: del.workspaceId,
|
|
2678
|
-
name: del.name
|
|
2679
|
-
})));
|
|
2774
|
+
function buildResolverOperationHookExpr(env) {
|
|
2775
|
+
return `({ ...context.pipeline, input: context.args, user: ${tailorUserMap}, env: ${JSON.stringify(env)} });`;
|
|
2680
2776
|
}
|
|
2681
2777
|
|
|
2682
2778
|
//#endregion
|
|
@@ -9685,6 +9781,17 @@ const removeCommand$1 = defineAppCommand({
|
|
|
9685
9781
|
}
|
|
9686
9782
|
});
|
|
9687
9783
|
|
|
9784
|
+
//#endregion
|
|
9785
|
+
//#region src/cli/shared/beta.ts
|
|
9786
|
+
/**
|
|
9787
|
+
* Warn that a feature is in beta.
|
|
9788
|
+
* @param {string} featureName - Name of the beta feature (e.g., "tailordb erd", "tailordb migration")
|
|
9789
|
+
*/
|
|
9790
|
+
function logBetaWarning(featureName) {
|
|
9791
|
+
logger.warn(`The '${featureName}' command is a beta feature and may introduce breaking changes in future releases.`);
|
|
9792
|
+
logger.newline();
|
|
9793
|
+
}
|
|
9794
|
+
|
|
9688
9795
|
//#endregion
|
|
9689
9796
|
//#region src/cli/commands/show.ts
|
|
9690
9797
|
function applicationInfo(app) {
|
|
@@ -9742,17 +9849,6 @@ const showCommand = defineAppCommand({
|
|
|
9742
9849
|
}
|
|
9743
9850
|
});
|
|
9744
9851
|
|
|
9745
|
-
//#endregion
|
|
9746
|
-
//#region src/cli/shared/beta.ts
|
|
9747
|
-
/**
|
|
9748
|
-
* Warn that a feature is in beta.
|
|
9749
|
-
* @param {string} featureName - Name of the beta feature (e.g., "tailordb erd", "tailordb migration")
|
|
9750
|
-
*/
|
|
9751
|
-
function logBetaWarning(featureName) {
|
|
9752
|
-
logger.warn(`The '${featureName}' command is a beta feature and may introduce breaking changes in future releases.`);
|
|
9753
|
-
logger.newline();
|
|
9754
|
-
}
|
|
9755
|
-
|
|
9756
9852
|
//#endregion
|
|
9757
9853
|
//#region src/cli/shared/editor.ts
|
|
9758
9854
|
const DEFAULT_EDITOR = "editor";
|
|
@@ -10326,7 +10422,7 @@ async function generate(options) {
|
|
|
10326
10422
|
if (options.init) await handleInitOption(namespacesWithMigrations, options.yes);
|
|
10327
10423
|
let pluginManager;
|
|
10328
10424
|
if (plugins.length > 0) pluginManager = new PluginManager(plugins);
|
|
10329
|
-
const { defineApplication } = await import("./application-
|
|
10425
|
+
const { defineApplication } = await import("./application-CxH6Yp54.mjs");
|
|
10330
10426
|
const application = defineApplication({
|
|
10331
10427
|
config,
|
|
10332
10428
|
pluginManager
|
|
@@ -12504,5 +12600,5 @@ function printGqlResult(result, options = {}) {
|
|
|
12504
12600
|
}
|
|
12505
12601
|
|
|
12506
12602
|
//#endregion
|
|
12507
|
-
export { listExecutors as $, truncate as A, getLatestMigrationNumber as At, listOAuth2Clients as B, hasChanges as Bt, listCommand$2 as C, INITIAL_SCHEMA_NUMBER as Ct, resumeWorkflow as D, compareSnapshots as Dt, resumeCommand as E, compareLocalTypesWithSnapshot as Et,
|
|
12508
|
-
//# sourceMappingURL=query-
|
|
12603
|
+
export { listExecutors as $, truncate as A, getLatestMigrationNumber as At, listOAuth2Clients as B, hasChanges as Bt, listCommand$2 as C, INITIAL_SCHEMA_NUMBER as Ct, resumeWorkflow as D, compareSnapshots as Dt, resumeCommand as E, compareLocalTypesWithSnapshot as Et, showCommand as F, isValidMigrationNumber as Ft, listCommand$5 as G, apiCall as Gt, getOAuth2Client as H, prompt as Ht, logBetaWarning as I, loadDiff as It, listWebhookExecutors as J, commonArgs as Jt, listMachineUsers as K, apiCommand as Kt, remove as L, reconstructSnapshotFromMigrations as Lt, generate as M, getMigrationFilePath as Mt, generateCommand as N, getMigrationFiles as Nt, listCommand$3 as O, createSnapshotFromLocalTypes as Ot, show as P, getNextMigrationNumber as Pt, listCommand$6 as Q, workspaceArgs as Qt, removeCommand$1 as R, formatDiffSummary as Rt, listApps as S, DIFF_FILE_NAME as St, healthCommand as T, SCHEMA_FILE_NAME as Tt, getMachineUserToken as U, trnPrefix as Ut, getCommand$1 as V, getNamespacesWithMigrations as Vt, tokenCommand as W, generateUserTypes as Wt, triggerCommand as X, deploymentArgs as Xt, webhookCommand as Y, confirmationArgs as Yt, triggerExecutor as Z, isVerbose as Zt, getWorkspace as _, waitForExecution$1 as _t, updateUser as a, startWorkflow as at, createCommand as b, bundleMigrationScript as bt, listCommand as c, executionsCommand as ct, inviteUser as d, functionExecutionStatusToString as dt, getExecutorJob as et, restoreCommand as f, formatKeyValueTable as ft, getCommand as g, executeScript as gt, listWorkspaces as h, apply as ht, updateCommand as i, startCommand as it, truncateCommand as j, getMigrationDirPath as jt, listWorkflows as k, formatMigrationNumber as kt, listUsers as l, getWorkflowExecution as lt, listCommand$1 as m, getExecutor as mt, queryCommand as n, listExecutorJobs as nt, removeCommand as o, getCommand$2 as ot, restoreWorkspace as p, getCommand$3 as pt, generate$1 as q, defineAppCommand as qt, isCLIError as r, watchExecutorJob as rt, removeUser as s, getWorkflow as st, query as t, jobsCommand as tt, inviteCommand as u, listWorkflowExecutions as ut, deleteCommand as v, MIGRATION_LABEL_KEY as vt, getAppHealth as w, MIGRATE_FILE_NAME as wt, createWorkspace as x, DB_TYPES_FILE_NAME as xt, deleteWorkspace as y, parseMigrationLabelNumber as yt, listCommand$4 as z, formatMigrationDiff as zt };
|
|
12604
|
+
//# sourceMappingURL=query-B8ml6ClT.mjs.map
|