@tailor-platform/sdk 1.2.6 → 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +57 -0
- package/dist/cli/index.mjs +554 -13
- package/dist/cli/index.mjs.map +1 -1
- package/dist/cli/lib.d.mts +1 -1
- package/dist/cli/lib.mjs +1 -1
- package/dist/config-BmQRlW1j.mjs.map +1 -1
- package/dist/configure/index.d.mts +2 -2
- package/dist/{index-Cf3uhB_7.d.mts → index-ByDQH56l.d.mts} +33 -4
- package/dist/{list-WMFoshbx.mjs → list-BCl3ViG0.mjs} +53 -26
- package/dist/list-BCl3ViG0.mjs.map +1 -0
- package/dist/{src-BhwQdist.mjs → src-Bhwd-tei.mjs} +2 -2
- package/dist/{src-BhwQdist.mjs.map → src-Bhwd-tei.mjs.map} +1 -1
- package/dist/{types-D2rYkxav.d.mts → types-22f0o6xW.d.mts} +10 -9
- package/dist/utils/test/index.d.mts +2 -2
- package/docs/cli/application.md +129 -0
- package/docs/cli-reference.md +26 -9
- package/docs/services/auth.md +5 -0
- package/docs/services/executor.md +6 -0
- package/docs/services/idp.md +13 -1
- package/docs/services/resolver.md +6 -0
- package/docs/services/staticwebsite.md +12 -1
- package/docs/services/tailordb.md +126 -7
- package/docs/services/workflow.md +8 -0
- package/package.json +26 -13
- package/dist/list-WMFoshbx.mjs.map +0 -1
package/dist/cli/index.mjs
CHANGED
|
@@ -1,17 +1,19 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
import "../chunk-CIV_ash9.mjs";
|
|
3
3
|
import "../config-BmQRlW1j.mjs";
|
|
4
|
-
import { $ as withCommonArgs, B as loadWorkspaceId, D as tokenCommand, F as loadConfig, G as initOAuth2Client, H as writePlatformConfig, J as PATScope, K as initOperatorClient, L as apiCommand, N as applyCommand, O as listCommand$5, Q as jsonArgs, R as fetchLatestToken, S as listCommand$6, U as fetchAll, V as readPlatformConfig, W as fetchUserInfo, X as confirmationArgs, Y as commonArgs, Z as deploymentArgs, a as createCommand$3, d as listCommand$7, et as workspaceArgs, h as executionsCommand, j as generateCommand, l as startCommand, p as getCommand$2, q as readPackageJson, r as deleteCommand$3, s as resumeCommand, t as listCommand$8, tt as logger, w as getCommand$1, x as removeCommand, y as showCommand, z as loadAccessToken } from "../list-
|
|
5
|
-
import { register } from "node:module";
|
|
4
|
+
import { $ as withCommonArgs, B as loadWorkspaceId, D as tokenCommand, F as loadConfig, G as initOAuth2Client, H as writePlatformConfig, J as PATScope, K as initOperatorClient, L as apiCommand, N as applyCommand, O as listCommand$5, Q as jsonArgs, R as fetchLatestToken, S as listCommand$6, U as fetchAll, V as readPlatformConfig, W as fetchUserInfo, X as confirmationArgs, Y as commonArgs, Z as deploymentArgs, a as createCommand$3, d as listCommand$7, et as workspaceArgs, h as executionsCommand, j as generateCommand, l as startCommand, p as getCommand$2, q as readPackageJson, r as deleteCommand$3, s as resumeCommand, t as listCommand$8, tt as logger, w as getCommand$1, x as removeCommand, y as showCommand, z as loadAccessToken } from "../list-BCl3ViG0.mjs";
|
|
5
|
+
import { createRequire, register } from "node:module";
|
|
6
6
|
import { defineCommand, runCommand, runMain } from "citty";
|
|
7
|
+
import * as fs$1 from "node:fs";
|
|
8
|
+
import * as path from "pathe";
|
|
7
9
|
import { generateCodeVerifier } from "@badgateway/oauth2-client";
|
|
8
10
|
import { timestampDate } from "@bufbuild/protobuf/wkt";
|
|
9
11
|
import { Code, ConnectError } from "@connectrpc/connect";
|
|
12
|
+
import { findUpSync } from "find-up-simple";
|
|
10
13
|
import ml from "multiline-ts";
|
|
11
|
-
import * as
|
|
12
|
-
import * as fs from "fs";
|
|
14
|
+
import * as fs$2 from "fs";
|
|
13
15
|
import pLimit from "p-limit";
|
|
14
|
-
import { spawnSync } from "node:child_process";
|
|
16
|
+
import { spawn, spawnSync } from "node:child_process";
|
|
15
17
|
import * as crypto from "node:crypto";
|
|
16
18
|
import * as http from "node:http";
|
|
17
19
|
import open from "open";
|
|
@@ -55,7 +57,7 @@ const initCommand = defineCommand({
|
|
|
55
57
|
const version = packageJson$1.version && packageJson$1.version !== "0.0.0" ? packageJson$1.version : "latest";
|
|
56
58
|
let packageManager = detectPackageManager();
|
|
57
59
|
if (!packageManager) {
|
|
58
|
-
logger.warn("
|
|
60
|
+
logger.warn("Could not detect package manager, defaulting to npm");
|
|
59
61
|
packageManager = "npm";
|
|
60
62
|
}
|
|
61
63
|
const initArgs = [
|
|
@@ -817,6 +819,15 @@ function shouldIgnoreFile(filePath) {
|
|
|
817
819
|
const fileName = path.basename(filePath).toLowerCase();
|
|
818
820
|
return IGNORED_FILES.has(fileName);
|
|
819
821
|
}
|
|
822
|
+
/**
|
|
823
|
+
* Deploy a static website by creating a deployment, uploading files, and publishing it.
|
|
824
|
+
* @param {OperatorClient} client - Operator client instance
|
|
825
|
+
* @param {string} workspaceId - Workspace ID
|
|
826
|
+
* @param {string} name - Static website name
|
|
827
|
+
* @param {string} distDir - Directory containing static site files
|
|
828
|
+
* @param {boolean} [showProgress=true] - Whether to show upload progress
|
|
829
|
+
* @returns {Promise<DeployResult>} Deployment result with URL and skipped files
|
|
830
|
+
*/
|
|
820
831
|
async function deployStaticWebsite(client, workspaceId, name, distDir, showProgress = true) {
|
|
821
832
|
const { deploymentId } = await client.createDeployment({
|
|
822
833
|
workspaceId,
|
|
@@ -851,9 +862,15 @@ async function uploadDirectory(client, workspaceId, deploymentId, rootDir, showP
|
|
|
851
862
|
if (progress) progress.finish();
|
|
852
863
|
return skippedFiles;
|
|
853
864
|
}
|
|
865
|
+
/**
|
|
866
|
+
* Recursively collect all deployable files under the given directory.
|
|
867
|
+
* @param {string} rootDir - Root directory to scan
|
|
868
|
+
* @param {string} [currentDir=""] - Current relative directory (for recursion)
|
|
869
|
+
* @returns {Promise<string[]>} List of file paths relative to rootDir
|
|
870
|
+
*/
|
|
854
871
|
async function collectFiles(rootDir, currentDir = "") {
|
|
855
872
|
const dirPath = path.join(rootDir, currentDir);
|
|
856
|
-
const entries = await fs.promises.readdir(dirPath, { withFileTypes: true });
|
|
873
|
+
const entries = await fs$2.promises.readdir(dirPath, { withFileTypes: true });
|
|
857
874
|
const files = [];
|
|
858
875
|
for (const entry of entries) {
|
|
859
876
|
const rel = path.join(currentDir, entry.name);
|
|
@@ -864,16 +881,15 @@ async function collectFiles(rootDir, currentDir = "") {
|
|
|
864
881
|
}
|
|
865
882
|
return files;
|
|
866
883
|
}
|
|
867
|
-
async function uploadSingleFile(client, workspaceId, deploymentId, rootDir,
|
|
868
|
-
const absPath = path.join(rootDir,
|
|
869
|
-
const filePath = relativePath.split(path.sep).join("/");
|
|
884
|
+
async function uploadSingleFile(client, workspaceId, deploymentId, rootDir, filePath, skippedFiles) {
|
|
885
|
+
const absPath = path.join(rootDir, filePath);
|
|
870
886
|
const mime = lookup(filePath);
|
|
871
887
|
if (!mime) {
|
|
872
888
|
skippedFiles.push(`${filePath} (unsupported content type; no MIME mapping found)`);
|
|
873
889
|
return;
|
|
874
890
|
}
|
|
875
891
|
const contentType = mime;
|
|
876
|
-
const readStream = fs.createReadStream(absPath, { highWaterMark: CHUNK_SIZE });
|
|
892
|
+
const readStream = fs$2.createReadStream(absPath, { highWaterMark: CHUNK_SIZE });
|
|
877
893
|
async function* requestStream() {
|
|
878
894
|
yield { payload: {
|
|
879
895
|
case: "initialMetadata",
|
|
@@ -902,6 +918,11 @@ async function uploadSingleFile(client, workspaceId, deploymentId, rootDir, rela
|
|
|
902
918
|
}
|
|
903
919
|
await withTimeout(uploadWithLogging(), 2 * 6e4, `Upload timed out for "${filePath}"`);
|
|
904
920
|
}
|
|
921
|
+
/**
|
|
922
|
+
* Log skipped files after a deployment, including reasons for skipping.
|
|
923
|
+
* @param {string[]} skippedFiles - List of skipped file descriptions
|
|
924
|
+
* @returns {void}
|
|
925
|
+
*/
|
|
905
926
|
function logSkippedFiles(skippedFiles) {
|
|
906
927
|
if (skippedFiles.length === 0) return;
|
|
907
928
|
logger.warn("Deployment completed, but some files failed to upload. These files may have unsupported content types or other validation issues. Please review the list below:");
|
|
@@ -941,7 +962,7 @@ const deployCommand = defineCommand({
|
|
|
941
962
|
workspaceId: args["workspace-id"],
|
|
942
963
|
profile: args.profile
|
|
943
964
|
});
|
|
944
|
-
if (!fs.existsSync(dir) || !fs.statSync(dir).isDirectory()) throw new Error(`Directory not found or not a directory: ${dir}`);
|
|
965
|
+
if (!fs$2.existsSync(dir) || !fs$2.statSync(dir).isDirectory()) throw new Error(`Directory not found or not a directory: ${dir}`);
|
|
945
966
|
const { url, skippedFiles } = await withTimeout(deployStaticWebsite(client, workspaceId, name, dir, !args.json), 10 * 6e4, "Deployment timed out after 10 minutes.");
|
|
946
967
|
if (args.json) logger.out({
|
|
947
968
|
name,
|
|
@@ -1084,6 +1105,523 @@ const staticwebsiteCommand = defineCommand({
|
|
|
1084
1105
|
}
|
|
1085
1106
|
});
|
|
1086
1107
|
|
|
1108
|
+
//#endregion
|
|
1109
|
+
//#region src/cli/utils/resolve-cli-bin.ts
|
|
1110
|
+
/**
|
|
1111
|
+
* Resolve a CLI binary path from the caller's project dependencies.
|
|
1112
|
+
* @param {ResolveCliBinOptions} options - Resolution options for locating the CLI binary.
|
|
1113
|
+
* @returns {string} Absolute path to the CLI binary entry.
|
|
1114
|
+
*/
|
|
1115
|
+
function resolveCliBinPath({ cwd, packageName, binName, installHint }) {
|
|
1116
|
+
const projectPackageJsonPath = findUpSync("package.json", { cwd });
|
|
1117
|
+
if (!projectPackageJsonPath) throw new Error(`Failed to locate package.json from ${cwd}.`);
|
|
1118
|
+
const requireFromProject = createRequire(projectPackageJsonPath);
|
|
1119
|
+
let pkgJsonPath;
|
|
1120
|
+
try {
|
|
1121
|
+
pkgJsonPath = requireFromProject.resolve(`${packageName}/package.json`);
|
|
1122
|
+
} catch {
|
|
1123
|
+
throw new Error(`Missing optional dependency \`${packageName}\`. Install it in your project (e.g. \`${installHint}\`).`);
|
|
1124
|
+
}
|
|
1125
|
+
const binRelativePath = JSON.parse(fs$1.readFileSync(pkgJsonPath, "utf8")).bin?.[binName];
|
|
1126
|
+
if (!binRelativePath) throw new Error(`\`${packageName}\` does not expose a \`${binName}\` binary entry.`);
|
|
1127
|
+
return path.resolve(path.dirname(pkgJsonPath), binRelativePath);
|
|
1128
|
+
}
|
|
1129
|
+
|
|
1130
|
+
//#endregion
|
|
1131
|
+
//#region src/cli/tailordb/erd/schema.ts
|
|
1132
|
+
/**
|
|
1133
|
+
* Convert TailorDB field config to tbls column definition.
|
|
1134
|
+
* @param {string} fieldName - Field name
|
|
1135
|
+
* @param {TailorDBType_FieldConfig} fieldConfig - TailorDB field configuration
|
|
1136
|
+
* @returns {TblsColumn} tbls column definition
|
|
1137
|
+
*/
|
|
1138
|
+
function toTblsColumn(fieldName, fieldConfig) {
|
|
1139
|
+
const baseType = fieldConfig.type || "string";
|
|
1140
|
+
return {
|
|
1141
|
+
name: fieldName,
|
|
1142
|
+
type: fieldConfig.array ? `${baseType}[]` : baseType,
|
|
1143
|
+
nullable: !fieldConfig.required,
|
|
1144
|
+
comment: fieldConfig.description ?? ""
|
|
1145
|
+
};
|
|
1146
|
+
}
|
|
1147
|
+
/**
|
|
1148
|
+
* Build tbls schema JSON from TailorDB types.
|
|
1149
|
+
* @param {TailorDBProtoType[]} types - TailorDB types fetched from platform
|
|
1150
|
+
* @param {string} namespace - TailorDB namespace
|
|
1151
|
+
* @returns {TblsSchema} tbls-compatible schema representation
|
|
1152
|
+
*/
|
|
1153
|
+
function buildTblsSchema(types, namespace) {
|
|
1154
|
+
const tables = [];
|
|
1155
|
+
const relations = [];
|
|
1156
|
+
const referencedByTable = {};
|
|
1157
|
+
const constraintsByTable = {};
|
|
1158
|
+
const enumsMap = /* @__PURE__ */ new Map();
|
|
1159
|
+
for (const type of types) {
|
|
1160
|
+
const tableName = type.name;
|
|
1161
|
+
const schema = type.schema;
|
|
1162
|
+
const columns = [];
|
|
1163
|
+
const tableConstraints = [];
|
|
1164
|
+
columns.push({
|
|
1165
|
+
name: "id",
|
|
1166
|
+
type: "uuid",
|
|
1167
|
+
nullable: false,
|
|
1168
|
+
comment: ""
|
|
1169
|
+
});
|
|
1170
|
+
tableConstraints.push({
|
|
1171
|
+
name: `pk_${tableName}`,
|
|
1172
|
+
type: "PRIMARY KEY",
|
|
1173
|
+
def: "",
|
|
1174
|
+
table: tableName,
|
|
1175
|
+
columns: ["id"]
|
|
1176
|
+
});
|
|
1177
|
+
if (schema) for (const [fieldName, fieldConfig] of Object.entries(schema.fields ?? {})) {
|
|
1178
|
+
columns.push(toTblsColumn(fieldName, fieldConfig));
|
|
1179
|
+
if (fieldConfig.type === "enum" && fieldConfig.allowedValues.length > 0) {
|
|
1180
|
+
const enumName = `${tableName}_${fieldName}`;
|
|
1181
|
+
let values = enumsMap.get(enumName);
|
|
1182
|
+
if (!values) {
|
|
1183
|
+
values = /* @__PURE__ */ new Set();
|
|
1184
|
+
enumsMap.set(enumName, values);
|
|
1185
|
+
}
|
|
1186
|
+
for (const value of fieldConfig.allowedValues) values.add(value.value);
|
|
1187
|
+
}
|
|
1188
|
+
if (fieldConfig.foreignKey && fieldConfig.foreignKeyType) {
|
|
1189
|
+
const foreignTable = fieldConfig.foreignKeyType;
|
|
1190
|
+
const foreignColumn = fieldConfig.foreignKeyField || "id";
|
|
1191
|
+
const childCardinality = fieldConfig.required ? "exactly_one" : "zero_or_one";
|
|
1192
|
+
relations.push({
|
|
1193
|
+
table: tableName,
|
|
1194
|
+
columns: [fieldName],
|
|
1195
|
+
parent_table: foreignTable,
|
|
1196
|
+
parent_columns: [foreignColumn],
|
|
1197
|
+
cardinality: childCardinality,
|
|
1198
|
+
parent_cardinality: "zero_or_more",
|
|
1199
|
+
def: ""
|
|
1200
|
+
});
|
|
1201
|
+
tableConstraints.push({
|
|
1202
|
+
name: `fk_${tableName}_${fieldName}`,
|
|
1203
|
+
type: "FOREIGN KEY",
|
|
1204
|
+
def: "",
|
|
1205
|
+
table: tableName,
|
|
1206
|
+
columns: [fieldName],
|
|
1207
|
+
referenced_table: foreignTable,
|
|
1208
|
+
referenced_columns: [foreignColumn]
|
|
1209
|
+
});
|
|
1210
|
+
if (!referencedByTable[tableName]) referencedByTable[tableName] = /* @__PURE__ */ new Set();
|
|
1211
|
+
referencedByTable[tableName].add(foreignTable);
|
|
1212
|
+
}
|
|
1213
|
+
}
|
|
1214
|
+
constraintsByTable[tableName] = tableConstraints;
|
|
1215
|
+
tables.push({
|
|
1216
|
+
name: tableName,
|
|
1217
|
+
type: "table",
|
|
1218
|
+
comment: schema?.description ?? "",
|
|
1219
|
+
columns,
|
|
1220
|
+
indexes: [],
|
|
1221
|
+
constraints: constraintsByTable[tableName] ?? [],
|
|
1222
|
+
triggers: [],
|
|
1223
|
+
def: "",
|
|
1224
|
+
referenced_tables: []
|
|
1225
|
+
});
|
|
1226
|
+
}
|
|
1227
|
+
for (const table of tables) {
|
|
1228
|
+
const referenced = referencedByTable[table.name];
|
|
1229
|
+
table.referenced_tables = referenced ? Array.from(referenced) : [];
|
|
1230
|
+
}
|
|
1231
|
+
const enums = [];
|
|
1232
|
+
for (const [name, values] of enumsMap.entries()) enums.push({
|
|
1233
|
+
name,
|
|
1234
|
+
values: Array.from(values)
|
|
1235
|
+
});
|
|
1236
|
+
return {
|
|
1237
|
+
name: namespace,
|
|
1238
|
+
tables,
|
|
1239
|
+
relations,
|
|
1240
|
+
enums
|
|
1241
|
+
};
|
|
1242
|
+
}
|
|
1243
|
+
/**
|
|
1244
|
+
* Export apply-applied TailorDB schema for a namespace as tbls-compatible JSON.
|
|
1245
|
+
* @param {TailorDBSchemaOptions} options - Export options
|
|
1246
|
+
* @returns {Promise<TblsSchema>} tbls schema representation
|
|
1247
|
+
*/
|
|
1248
|
+
async function exportTailorDBSchema(options) {
|
|
1249
|
+
const { client, workspaceId, namespace } = options;
|
|
1250
|
+
const types = await fetchAll(async (pageToken) => {
|
|
1251
|
+
try {
|
|
1252
|
+
const { tailordbTypes, nextPageToken } = await client.listTailorDBTypes({
|
|
1253
|
+
workspaceId,
|
|
1254
|
+
namespaceName: namespace,
|
|
1255
|
+
pageToken
|
|
1256
|
+
});
|
|
1257
|
+
return [tailordbTypes, nextPageToken];
|
|
1258
|
+
} catch (error) {
|
|
1259
|
+
if (error instanceof ConnectError && error.code === Code.NotFound) return [[], ""];
|
|
1260
|
+
throw error;
|
|
1261
|
+
}
|
|
1262
|
+
});
|
|
1263
|
+
if (types.length === 0) logger.warn(`No TailorDB types found in namespace "${namespace}". Returning empty schema.`);
|
|
1264
|
+
return buildTblsSchema(types, namespace);
|
|
1265
|
+
}
|
|
1266
|
+
/**
|
|
1267
|
+
* Writes the TailorDB schema to a file in tbls-compatible JSON format.
|
|
1268
|
+
* @param {WriteSchemaOptions} options - The options for writing the schema file.
|
|
1269
|
+
*/
|
|
1270
|
+
async function writeTblsSchemaToFile(options) {
|
|
1271
|
+
const schema = await exportTailorDBSchema(options);
|
|
1272
|
+
const json = JSON.stringify(schema, null, 2);
|
|
1273
|
+
fs$1.mkdirSync(path.dirname(options.outputPath), { recursive: true });
|
|
1274
|
+
fs$1.writeFileSync(options.outputPath, json, "utf8");
|
|
1275
|
+
const relativePath = path.relative(process.cwd(), options.outputPath);
|
|
1276
|
+
logger.success(`Wrote ERD schema to ${relativePath}`);
|
|
1277
|
+
}
|
|
1278
|
+
|
|
1279
|
+
//#endregion
|
|
1280
|
+
//#region src/cli/utils/beta.ts
|
|
1281
|
+
/**
|
|
1282
|
+
* Warn that the ERD CLI is a beta feature.
|
|
1283
|
+
*/
|
|
1284
|
+
function logErdBetaWarning() {
|
|
1285
|
+
logger.warn("The ERD command is a beta feature and may introduce breaking changes in future releases.");
|
|
1286
|
+
logger.newline();
|
|
1287
|
+
}
|
|
1288
|
+
|
|
1289
|
+
//#endregion
|
|
1290
|
+
//#region src/cli/tailordb/erd/utils.ts
|
|
1291
|
+
/**
|
|
1292
|
+
* Initialize shared ERD command context.
|
|
1293
|
+
* @param {{ profile?: string; workspaceId?: string; config?: string }} args - CLI arguments.
|
|
1294
|
+
* @param {string | undefined} args.profile - Workspace profile.
|
|
1295
|
+
* @param {string | undefined} args.workspaceId - Workspace ID override.
|
|
1296
|
+
* @param {string | undefined} args.config - Config path override.
|
|
1297
|
+
* @returns {Promise<ErdCommandContext>} Initialized context.
|
|
1298
|
+
*/
|
|
1299
|
+
async function initErdContext(args) {
|
|
1300
|
+
logErdBetaWarning();
|
|
1301
|
+
const client = await initOperatorClient(await loadAccessToken({
|
|
1302
|
+
useProfile: true,
|
|
1303
|
+
profile: args.profile
|
|
1304
|
+
}));
|
|
1305
|
+
const workspaceId = loadWorkspaceId({
|
|
1306
|
+
workspaceId: args.workspaceId,
|
|
1307
|
+
profile: args.profile
|
|
1308
|
+
});
|
|
1309
|
+
const { config } = await loadConfig(args.config);
|
|
1310
|
+
return {
|
|
1311
|
+
client,
|
|
1312
|
+
workspaceId,
|
|
1313
|
+
config
|
|
1314
|
+
};
|
|
1315
|
+
}
|
|
1316
|
+
|
|
1317
|
+
//#endregion
|
|
1318
|
+
//#region src/cli/tailordb/erd/export.ts
|
|
1319
|
+
const DEFAULT_ERD_BASE_DIR = ".tailor-sdk/erd";
|
|
1320
|
+
/**
|
|
1321
|
+
* Resolve TailorDB config and namespace.
|
|
1322
|
+
* @param {AppConfig} config - Loaded Tailor SDK config.
|
|
1323
|
+
* @param {string | undefined} explicitNamespace - Namespace override.
|
|
1324
|
+
* @returns {{ namespace: string; erdSite: string | undefined }} Resolved namespace and erdSite.
|
|
1325
|
+
*/
|
|
1326
|
+
function resolveDbConfig(config, explicitNamespace) {
|
|
1327
|
+
const namespace = explicitNamespace ?? Object.keys(config.db ?? {})[0];
|
|
1328
|
+
if (!namespace) throw new Error("No TailorDB namespaces found in config. Please define db services in tailor.config.ts or pass --namespace.");
|
|
1329
|
+
const dbConfig = config.db?.[namespace];
|
|
1330
|
+
if (!dbConfig || typeof dbConfig !== "object" || "external" in dbConfig) throw new Error(`TailorDB namespace "${namespace}" not found in config.db.`);
|
|
1331
|
+
return {
|
|
1332
|
+
namespace,
|
|
1333
|
+
erdSite: dbConfig.erdSite
|
|
1334
|
+
};
|
|
1335
|
+
}
|
|
1336
|
+
/**
|
|
1337
|
+
* Get all namespaces with erdSite configured.
|
|
1338
|
+
* @param {AppConfig} config - Loaded Tailor SDK config.
|
|
1339
|
+
* @returns {Array<{ namespace: string; erdSite: string }>} Namespaces with erdSite.
|
|
1340
|
+
*/
|
|
1341
|
+
function resolveAllErdSites(config) {
|
|
1342
|
+
const results = [];
|
|
1343
|
+
for (const [namespace, dbConfig] of Object.entries(config.db ?? {})) if (dbConfig && typeof dbConfig === "object" && !("external" in dbConfig) && dbConfig.erdSite) results.push({
|
|
1344
|
+
namespace,
|
|
1345
|
+
erdSite: dbConfig.erdSite
|
|
1346
|
+
});
|
|
1347
|
+
return results;
|
|
1348
|
+
}
|
|
1349
|
+
/**
|
|
1350
|
+
* Run the liam CLI to build an ERD static site from a schema file.
|
|
1351
|
+
* @param {string} schemaPath - Path to the ERD schema JSON file
|
|
1352
|
+
* @param {string} cwd - Working directory where liam will run (dist is created here)
|
|
1353
|
+
* @returns {Promise<void>} Resolves when the build completes successfully
|
|
1354
|
+
*/
|
|
1355
|
+
async function runLiamBuild(schemaPath, cwd) {
|
|
1356
|
+
fs$1.mkdirSync(cwd, { recursive: true });
|
|
1357
|
+
return await new Promise((resolve, reject) => {
|
|
1358
|
+
let liamBinPath;
|
|
1359
|
+
try {
|
|
1360
|
+
liamBinPath = resolveCliBinPath({
|
|
1361
|
+
cwd,
|
|
1362
|
+
packageName: "@liam-hq/cli",
|
|
1363
|
+
binName: "liam",
|
|
1364
|
+
installHint: "npm i -D @liam-hq/cli"
|
|
1365
|
+
});
|
|
1366
|
+
} catch (error) {
|
|
1367
|
+
logger.error(String(error));
|
|
1368
|
+
reject(error);
|
|
1369
|
+
return;
|
|
1370
|
+
}
|
|
1371
|
+
const child = spawn(process.execPath, [
|
|
1372
|
+
liamBinPath,
|
|
1373
|
+
"erd",
|
|
1374
|
+
"build",
|
|
1375
|
+
"--format",
|
|
1376
|
+
"tbls",
|
|
1377
|
+
"--input",
|
|
1378
|
+
schemaPath
|
|
1379
|
+
], {
|
|
1380
|
+
stdio: "inherit",
|
|
1381
|
+
cwd
|
|
1382
|
+
});
|
|
1383
|
+
child.on("error", (error) => {
|
|
1384
|
+
logger.error("Failed to run `@liam-hq/cli`. Ensure it is installed in your project.");
|
|
1385
|
+
reject(error);
|
|
1386
|
+
});
|
|
1387
|
+
child.on("exit", (code) => {
|
|
1388
|
+
if (code === 0) resolve();
|
|
1389
|
+
else {
|
|
1390
|
+
logger.error("liam CLI exited with a non-zero code. Ensure `@liam-hq/cli erd build --format tbls --input schema.json` works in your project.");
|
|
1391
|
+
reject(/* @__PURE__ */ new Error(`liam CLI exited with code ${code ?? 1}`));
|
|
1392
|
+
}
|
|
1393
|
+
});
|
|
1394
|
+
});
|
|
1395
|
+
}
|
|
1396
|
+
/**
|
|
1397
|
+
* Export TailorDB schema and build ERD artifacts via liam.
|
|
1398
|
+
* @param {TailorDBSchemaOptions & { outputPath: string; erdDir: string }} options - Build options.
|
|
1399
|
+
*/
|
|
1400
|
+
async function prepareErdBuild(options) {
|
|
1401
|
+
await writeTblsSchemaToFile(options);
|
|
1402
|
+
await runLiamBuild(options.outputPath, options.erdDir);
|
|
1403
|
+
}
|
|
1404
|
+
/**
|
|
1405
|
+
* Prepare ERD builds for one or more namespaces.
|
|
1406
|
+
* @param {{ client: OperatorClient; workspaceId: string; config: AppConfig; namespace?: string; outputDir?: string }} options - Build options.
|
|
1407
|
+
* @param {OperatorClient} options.client - Operator client.
|
|
1408
|
+
* @param {string} options.workspaceId - Workspace ID.
|
|
1409
|
+
* @param {AppConfig} options.config - Loaded Tailor config.
|
|
1410
|
+
* @param {string | undefined} options.namespace - Namespace override.
|
|
1411
|
+
* @param {string | undefined} options.outputDir - Output directory override.
|
|
1412
|
+
* @returns {Promise<ErdBuildResult[]>} Build results by namespace.
|
|
1413
|
+
*/
|
|
1414
|
+
async function prepareErdBuilds(options) {
|
|
1415
|
+
const { client, workspaceId, config } = options;
|
|
1416
|
+
const baseDir = options.outputDir ?? path.resolve(process.cwd(), DEFAULT_ERD_BASE_DIR);
|
|
1417
|
+
let targets;
|
|
1418
|
+
if (options.namespace) {
|
|
1419
|
+
const { namespace, erdSite } = resolveDbConfig(config, options.namespace);
|
|
1420
|
+
const erdDir = path.join(baseDir, namespace);
|
|
1421
|
+
targets = [{
|
|
1422
|
+
namespace,
|
|
1423
|
+
erdSite,
|
|
1424
|
+
schemaOutputPath: path.join(erdDir, "schema.json"),
|
|
1425
|
+
distDir: path.join(erdDir, "dist"),
|
|
1426
|
+
erdDir
|
|
1427
|
+
}];
|
|
1428
|
+
} else {
|
|
1429
|
+
const erdSites = resolveAllErdSites(config);
|
|
1430
|
+
if (erdSites.length === 0) throw new Error("No namespaces with erdSite configured found. Add erdSite: \"<static-website-name>\" to db.<namespace> in tailor.config.ts.");
|
|
1431
|
+
logger.info(`Found ${erdSites.length} namespace(s) with erdSite configured.`);
|
|
1432
|
+
targets = erdSites.map(({ namespace, erdSite }) => {
|
|
1433
|
+
const erdDir = path.join(baseDir, namespace);
|
|
1434
|
+
return {
|
|
1435
|
+
namespace,
|
|
1436
|
+
erdSite,
|
|
1437
|
+
schemaOutputPath: path.join(erdDir, "schema.json"),
|
|
1438
|
+
distDir: path.join(erdDir, "dist"),
|
|
1439
|
+
erdDir
|
|
1440
|
+
};
|
|
1441
|
+
});
|
|
1442
|
+
}
|
|
1443
|
+
await Promise.all(targets.map((target) => prepareErdBuild({
|
|
1444
|
+
namespace: target.namespace,
|
|
1445
|
+
client,
|
|
1446
|
+
workspaceId,
|
|
1447
|
+
outputPath: target.schemaOutputPath,
|
|
1448
|
+
erdDir: target.erdDir
|
|
1449
|
+
})));
|
|
1450
|
+
return targets;
|
|
1451
|
+
}
|
|
1452
|
+
const erdExportCommand = defineCommand({
|
|
1453
|
+
meta: {
|
|
1454
|
+
name: "export",
|
|
1455
|
+
description: "Export Liam ERD dist from applied TailorDB schema (beta)"
|
|
1456
|
+
},
|
|
1457
|
+
args: {
|
|
1458
|
+
...commonArgs,
|
|
1459
|
+
...deploymentArgs,
|
|
1460
|
+
...jsonArgs,
|
|
1461
|
+
namespace: {
|
|
1462
|
+
type: "string",
|
|
1463
|
+
description: "TailorDB namespace name (optional if only one namespace is defined in config)",
|
|
1464
|
+
alias: "n"
|
|
1465
|
+
},
|
|
1466
|
+
output: {
|
|
1467
|
+
type: "string",
|
|
1468
|
+
description: "Output directory path for tbls-compatible ERD JSON (writes to <outputDir>/<namespace>/schema.json)",
|
|
1469
|
+
alias: "o",
|
|
1470
|
+
default: DEFAULT_ERD_BASE_DIR
|
|
1471
|
+
}
|
|
1472
|
+
},
|
|
1473
|
+
run: withCommonArgs(async (args) => {
|
|
1474
|
+
const { client, workspaceId, config } = await initErdContext(args);
|
|
1475
|
+
const outputDir = path.resolve(process.cwd(), String(args.output));
|
|
1476
|
+
const results = await prepareErdBuilds({
|
|
1477
|
+
client,
|
|
1478
|
+
workspaceId,
|
|
1479
|
+
config,
|
|
1480
|
+
namespace: args.namespace,
|
|
1481
|
+
outputDir
|
|
1482
|
+
});
|
|
1483
|
+
if (args.json) logger.out(results.map((result) => ({
|
|
1484
|
+
namespace: result.namespace,
|
|
1485
|
+
distDir: result.distDir,
|
|
1486
|
+
schemaOutputPath: result.schemaOutputPath
|
|
1487
|
+
})));
|
|
1488
|
+
else for (const result of results) {
|
|
1489
|
+
logger.out(`Exported ERD for namespace "${result.namespace}"`);
|
|
1490
|
+
logger.out(` - Liam ERD dist: ${result.distDir}`);
|
|
1491
|
+
logger.out(` - tbls schema.json: ${result.schemaOutputPath}`);
|
|
1492
|
+
}
|
|
1493
|
+
})
|
|
1494
|
+
});
|
|
1495
|
+
|
|
1496
|
+
//#endregion
|
|
1497
|
+
//#region src/cli/tailordb/erd/deploy.ts
|
|
1498
|
+
const erdDeployCommand = defineCommand({
|
|
1499
|
+
meta: {
|
|
1500
|
+
name: "deploy",
|
|
1501
|
+
description: "Deploy ERD static website for TailorDB namespace(s) (beta)"
|
|
1502
|
+
},
|
|
1503
|
+
args: {
|
|
1504
|
+
...commonArgs,
|
|
1505
|
+
...deploymentArgs,
|
|
1506
|
+
...jsonArgs,
|
|
1507
|
+
namespace: {
|
|
1508
|
+
type: "string",
|
|
1509
|
+
description: "TailorDB namespace name (optional - deploys all namespaces with erdSite if omitted)",
|
|
1510
|
+
alias: "n"
|
|
1511
|
+
}
|
|
1512
|
+
},
|
|
1513
|
+
run: withCommonArgs(async (args) => {
|
|
1514
|
+
const { client, workspaceId, config } = await initErdContext(args);
|
|
1515
|
+
const buildResults = await prepareErdBuilds({
|
|
1516
|
+
client,
|
|
1517
|
+
workspaceId,
|
|
1518
|
+
config,
|
|
1519
|
+
namespace: args.namespace
|
|
1520
|
+
});
|
|
1521
|
+
const deployResults = await Promise.all(buildResults.map(async (result) => {
|
|
1522
|
+
if (!result.erdSite) throw new Error(`No erdSite configured for namespace "${result.namespace}". Add erdSite: "<static-website-name>" to db.${result.namespace} in tailor.config.ts.`);
|
|
1523
|
+
if (!args.json) logger.info(`Deploying ERD for namespace "${result.namespace}" to site "${result.erdSite}"...`);
|
|
1524
|
+
const { url, skippedFiles } = await deployStaticWebsite(client, workspaceId, result.erdSite, result.distDir, !args.json);
|
|
1525
|
+
return {
|
|
1526
|
+
namespace: result.namespace,
|
|
1527
|
+
erdSite: result.erdSite,
|
|
1528
|
+
url,
|
|
1529
|
+
skippedFiles
|
|
1530
|
+
};
|
|
1531
|
+
}));
|
|
1532
|
+
if (args.json) logger.out(deployResults);
|
|
1533
|
+
else for (const result of deployResults) {
|
|
1534
|
+
logger.success(`ERD site "${result.erdSite}" deployed successfully.`);
|
|
1535
|
+
logger.out(result.url);
|
|
1536
|
+
logSkippedFiles(result.skippedFiles);
|
|
1537
|
+
}
|
|
1538
|
+
})
|
|
1539
|
+
});
|
|
1540
|
+
|
|
1541
|
+
//#endregion
|
|
1542
|
+
//#region src/cli/tailordb/erd/serve.ts
|
|
1543
|
+
function formatServeCommand(namespace) {
|
|
1544
|
+
return `tailor-sdk tailordb erd serve --namespace ${namespace}`;
|
|
1545
|
+
}
|
|
1546
|
+
async function runServeDist(results) {
|
|
1547
|
+
if (results.length === 0) throw new Error("No ERD build results found.");
|
|
1548
|
+
const [primary, ...rest] = results;
|
|
1549
|
+
logger.info(`Serving ERD for namespace "${primary.namespace}".`);
|
|
1550
|
+
if (rest.length > 0) {
|
|
1551
|
+
const commands = rest.map((result) => ` - ${formatServeCommand(result.namespace)}`).join("\n");
|
|
1552
|
+
logger.warn(`Multiple namespaces found. To serve another namespace, run:\n${commands}`);
|
|
1553
|
+
}
|
|
1554
|
+
fs$1.mkdirSync(primary.erdDir, { recursive: true });
|
|
1555
|
+
return await new Promise((resolve, reject) => {
|
|
1556
|
+
let serveBinPath;
|
|
1557
|
+
try {
|
|
1558
|
+
serveBinPath = resolveCliBinPath({
|
|
1559
|
+
cwd: primary.erdDir,
|
|
1560
|
+
packageName: "serve",
|
|
1561
|
+
binName: "serve",
|
|
1562
|
+
installHint: "npm i -D serve"
|
|
1563
|
+
});
|
|
1564
|
+
} catch (error) {
|
|
1565
|
+
logger.error(String(error));
|
|
1566
|
+
reject(error);
|
|
1567
|
+
return;
|
|
1568
|
+
}
|
|
1569
|
+
const child = spawn(process.execPath, [serveBinPath, "dist"], {
|
|
1570
|
+
stdio: "inherit",
|
|
1571
|
+
cwd: primary.erdDir
|
|
1572
|
+
});
|
|
1573
|
+
child.on("error", (error) => {
|
|
1574
|
+
logger.error("Failed to run `serve dist`. Ensure `serve` is installed in your project.");
|
|
1575
|
+
reject(error);
|
|
1576
|
+
});
|
|
1577
|
+
child.on("exit", (code) => {
|
|
1578
|
+
if (code === 0) resolve();
|
|
1579
|
+
else {
|
|
1580
|
+
logger.error("serve CLI exited with a non-zero code. Ensure `serve dist` works in your project.");
|
|
1581
|
+
reject(/* @__PURE__ */ new Error(`serve CLI exited with code ${code ?? 1}`));
|
|
1582
|
+
}
|
|
1583
|
+
});
|
|
1584
|
+
});
|
|
1585
|
+
}
|
|
1586
|
+
const erdServeCommand = defineCommand({
|
|
1587
|
+
meta: {
|
|
1588
|
+
name: "serve",
|
|
1589
|
+
description: "Generate and serve ERD (liam build + `serve dist`) (beta)"
|
|
1590
|
+
},
|
|
1591
|
+
args: {
|
|
1592
|
+
...commonArgs,
|
|
1593
|
+
...deploymentArgs,
|
|
1594
|
+
namespace: {
|
|
1595
|
+
type: "string",
|
|
1596
|
+
description: "TailorDB namespace name (uses first namespace in config if not specified)",
|
|
1597
|
+
alias: "n"
|
|
1598
|
+
}
|
|
1599
|
+
},
|
|
1600
|
+
run: withCommonArgs(async (args) => {
|
|
1601
|
+
const { client, workspaceId, config } = await initErdContext(args);
|
|
1602
|
+
await runServeDist(await prepareErdBuilds({
|
|
1603
|
+
client,
|
|
1604
|
+
workspaceId,
|
|
1605
|
+
config,
|
|
1606
|
+
namespace: args.namespace
|
|
1607
|
+
}));
|
|
1608
|
+
})
|
|
1609
|
+
});
|
|
1610
|
+
|
|
1611
|
+
//#endregion
|
|
1612
|
+
//#region src/cli/tailordb/erd/index.ts
|
|
1613
|
+
const erdCommand = defineCommand({
|
|
1614
|
+
meta: {
|
|
1615
|
+
name: "erd",
|
|
1616
|
+
description: "ERD utilities for TailorDB (beta)"
|
|
1617
|
+
},
|
|
1618
|
+
subCommands: {
|
|
1619
|
+
export: erdExportCommand,
|
|
1620
|
+
serve: erdServeCommand,
|
|
1621
|
+
deploy: erdDeployCommand
|
|
1622
|
+
}
|
|
1623
|
+
});
|
|
1624
|
+
|
|
1087
1625
|
//#endregion
|
|
1088
1626
|
//#region src/cli/tailordb/truncate.ts
|
|
1089
1627
|
async function truncateSingleType(options, client) {
|
|
@@ -1262,7 +1800,10 @@ const tailordbCommand = defineCommand({
|
|
|
1262
1800
|
name: "tailordb",
|
|
1263
1801
|
description: "Manage TailorDB tables and data"
|
|
1264
1802
|
},
|
|
1265
|
-
subCommands: {
|
|
1803
|
+
subCommands: {
|
|
1804
|
+
erd: erdCommand,
|
|
1805
|
+
truncate: truncateCommand
|
|
1806
|
+
}
|
|
1266
1807
|
});
|
|
1267
1808
|
|
|
1268
1809
|
//#endregion
|