@tailor-platform/sdk 1.2.6 → 1.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +90 -0
- package/dist/cli/index.mjs +532 -173
- package/dist/cli/index.mjs.map +1 -1
- package/dist/cli/lib.d.mts +78 -60
- package/dist/cli/lib.mjs +3 -3
- package/dist/cli/lib.mjs.map +1 -1
- package/dist/{config-BmQRlW1j.mjs → config-CBpYlVa-.mjs} +23 -34
- package/dist/config-CBpYlVa-.mjs.map +1 -0
- package/dist/configure/index.d.mts +2 -2
- package/dist/configure/index.mjs +34 -31
- package/dist/configure/index.mjs.map +1 -1
- package/dist/{index-Cf3uhB_7.d.mts → index-CTExbeYE.d.mts} +73 -68
- package/dist/{list-WMFoshbx.mjs → list-1cs_CGF8.mjs} +928 -510
- package/dist/list-1cs_CGF8.mjs.map +1 -0
- package/dist/{src-BhwQdist.mjs → src-Bhwd-tei.mjs} +2 -2
- package/dist/{src-BhwQdist.mjs.map → src-Bhwd-tei.mjs.map} +1 -1
- package/dist/{types-D2rYkxav.d.mts → types-Yxg4lgU0.d.mts} +53 -55
- package/dist/utils/test/index.d.mts +7 -7
- package/dist/utils/test/index.mjs +5 -5
- package/dist/utils/test/index.mjs.map +1 -1
- package/docs/cli/application.md +129 -0
- package/docs/cli-reference.md +26 -9
- package/docs/services/auth.md +5 -0
- package/docs/services/executor.md +6 -0
- package/docs/services/idp.md +13 -1
- package/docs/services/resolver.md +6 -0
- package/docs/services/staticwebsite.md +12 -1
- package/docs/services/tailordb.md +126 -7
- package/docs/services/workflow.md +8 -0
- package/package.json +27 -14
- package/dist/config-BmQRlW1j.mjs.map +0 -1
- package/dist/list-WMFoshbx.mjs.map +0 -1
package/dist/cli/index.mjs
CHANGED
|
@@ -1,17 +1,19 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
import "../chunk-CIV_ash9.mjs";
|
|
3
|
-
import "../config-
|
|
4
|
-
import { $ as
|
|
5
|
-
import { register } from "node:module";
|
|
3
|
+
import "../config-CBpYlVa-.mjs";
|
|
4
|
+
import { $ as deploymentArgs, A as listCommand$5, B as fetchLatestToken, C as removeCommand, E as getCommand$1, F as applyCommand, G as fetchAll, H as loadWorkspaceId, J as initOperatorClient, K as fetchUserInfo, L as loadConfig, N as generateCommand, Q as confirmationArgs, U as readPlatformConfig, V as loadAccessToken, W as writePlatformConfig, X as PATScope, Y as readPackageJson, Z as commonArgs, a as createCommand$3, d as listCommand$7, et as jsonArgs, h as executionsCommand, k as tokenCommand, l as startCommand, nt as workspaceArgs, p as getCommand$2, q as initOAuth2Client, r as deleteCommand$3, rt as logger, s as resumeCommand, t as listCommand$8, tt as withCommonArgs, w as listCommand$6, x as showCommand, y as truncateCommand, z as apiCommand } from "../list-1cs_CGF8.mjs";
|
|
5
|
+
import { createRequire, register } from "node:module";
|
|
6
6
|
import { defineCommand, runCommand, runMain } from "citty";
|
|
7
|
+
import * as fs$1 from "node:fs";
|
|
8
|
+
import * as path from "pathe";
|
|
7
9
|
import { generateCodeVerifier } from "@badgateway/oauth2-client";
|
|
8
10
|
import { timestampDate } from "@bufbuild/protobuf/wkt";
|
|
9
11
|
import { Code, ConnectError } from "@connectrpc/connect";
|
|
12
|
+
import { findUpSync } from "find-up-simple";
|
|
10
13
|
import ml from "multiline-ts";
|
|
11
|
-
import * as
|
|
12
|
-
import * as fs from "fs";
|
|
14
|
+
import * as fs$2 from "fs";
|
|
13
15
|
import pLimit from "p-limit";
|
|
14
|
-
import { spawnSync } from "node:child_process";
|
|
16
|
+
import { spawn, spawnSync } from "node:child_process";
|
|
15
17
|
import * as crypto from "node:crypto";
|
|
16
18
|
import * as http from "node:http";
|
|
17
19
|
import open from "open";
|
|
@@ -55,7 +57,7 @@ const initCommand = defineCommand({
|
|
|
55
57
|
const version = packageJson$1.version && packageJson$1.version !== "0.0.0" ? packageJson$1.version : "latest";
|
|
56
58
|
let packageManager = detectPackageManager();
|
|
57
59
|
if (!packageManager) {
|
|
58
|
-
logger.warn("
|
|
60
|
+
logger.warn("Could not detect package manager, defaulting to npm");
|
|
59
61
|
packageManager = "npm";
|
|
60
62
|
}
|
|
61
63
|
const initArgs = [
|
|
@@ -516,8 +518,8 @@ function secretInfo(secret) {
|
|
|
516
518
|
}
|
|
517
519
|
/**
|
|
518
520
|
* List secrets in a Secret Manager vault.
|
|
519
|
-
* @param
|
|
520
|
-
* @returns
|
|
521
|
+
* @param options - Secret listing options
|
|
522
|
+
* @returns List of secrets
|
|
521
523
|
*/
|
|
522
524
|
async function secretList(options) {
|
|
523
525
|
const client = await initOperatorClient(await loadAccessToken({
|
|
@@ -693,8 +695,8 @@ function vaultInfo(vault) {
|
|
|
693
695
|
}
|
|
694
696
|
/**
|
|
695
697
|
* List Secret Manager vaults in the workspace.
|
|
696
|
-
* @param
|
|
697
|
-
* @returns
|
|
698
|
+
* @param options - Vault listing options
|
|
699
|
+
* @returns List of vaults
|
|
698
700
|
*/
|
|
699
701
|
async function vaultList(options) {
|
|
700
702
|
const client = await initOperatorClient(await loadAccessToken({
|
|
@@ -772,9 +774,9 @@ const secretCommand = defineCommand({
|
|
|
772
774
|
//#region src/cli/utils/progress.ts
|
|
773
775
|
/**
|
|
774
776
|
* Create a simple progress reporter that writes updates to stderr.
|
|
775
|
-
* @param
|
|
776
|
-
* @param
|
|
777
|
-
* @returns
|
|
777
|
+
* @param label - Label to prefix progress output
|
|
778
|
+
* @param total - Total number of steps
|
|
779
|
+
* @returns Progress helpers
|
|
778
780
|
*/
|
|
779
781
|
function createProgress(label, total) {
|
|
780
782
|
let current = 0;
|
|
@@ -794,10 +796,10 @@ function createProgress(label, total) {
|
|
|
794
796
|
/**
|
|
795
797
|
* Wrap a promise with a timeout, rejecting if the timeout elapses first.
|
|
796
798
|
* @template T
|
|
797
|
-
* @param
|
|
798
|
-
* @param
|
|
799
|
-
* @param
|
|
800
|
-
* @returns
|
|
799
|
+
* @param p - Promise to await
|
|
800
|
+
* @param ms - Timeout in milliseconds
|
|
801
|
+
* @param message - Error message on timeout
|
|
802
|
+
* @returns Result of the original promise if it completes in time
|
|
801
803
|
*/
|
|
802
804
|
async function withTimeout(p, ms, message) {
|
|
803
805
|
return await Promise.race([p, setTimeout$1(ms).then(() => {
|
|
@@ -817,6 +819,15 @@ function shouldIgnoreFile(filePath) {
|
|
|
817
819
|
const fileName = path.basename(filePath).toLowerCase();
|
|
818
820
|
return IGNORED_FILES.has(fileName);
|
|
819
821
|
}
|
|
822
|
+
/**
|
|
823
|
+
* Deploy a static website by creating a deployment, uploading files, and publishing it.
|
|
824
|
+
* @param client - Operator client instance
|
|
825
|
+
* @param workspaceId - Workspace ID
|
|
826
|
+
* @param name - Static website name
|
|
827
|
+
* @param distDir - Directory containing static site files
|
|
828
|
+
* @param showProgress - Whether to show upload progress
|
|
829
|
+
* @returns Deployment result with URL and skipped files
|
|
830
|
+
*/
|
|
820
831
|
async function deployStaticWebsite(client, workspaceId, name, distDir, showProgress = true) {
|
|
821
832
|
const { deploymentId } = await client.createDeployment({
|
|
822
833
|
workspaceId,
|
|
@@ -851,9 +862,15 @@ async function uploadDirectory(client, workspaceId, deploymentId, rootDir, showP
|
|
|
851
862
|
if (progress) progress.finish();
|
|
852
863
|
return skippedFiles;
|
|
853
864
|
}
|
|
865
|
+
/**
|
|
866
|
+
* Recursively collect all deployable files under the given directory.
|
|
867
|
+
* @param rootDir - Root directory to scan
|
|
868
|
+
* @param currentDir - Current relative directory (for recursion)
|
|
869
|
+
* @returns List of file paths relative to rootDir
|
|
870
|
+
*/
|
|
854
871
|
async function collectFiles(rootDir, currentDir = "") {
|
|
855
872
|
const dirPath = path.join(rootDir, currentDir);
|
|
856
|
-
const entries = await fs.promises.readdir(dirPath, { withFileTypes: true });
|
|
873
|
+
const entries = await fs$2.promises.readdir(dirPath, { withFileTypes: true });
|
|
857
874
|
const files = [];
|
|
858
875
|
for (const entry of entries) {
|
|
859
876
|
const rel = path.join(currentDir, entry.name);
|
|
@@ -864,16 +881,15 @@ async function collectFiles(rootDir, currentDir = "") {
|
|
|
864
881
|
}
|
|
865
882
|
return files;
|
|
866
883
|
}
|
|
867
|
-
async function uploadSingleFile(client, workspaceId, deploymentId, rootDir,
|
|
868
|
-
const absPath = path.join(rootDir,
|
|
869
|
-
const filePath = relativePath.split(path.sep).join("/");
|
|
884
|
+
async function uploadSingleFile(client, workspaceId, deploymentId, rootDir, filePath, skippedFiles) {
|
|
885
|
+
const absPath = path.join(rootDir, filePath);
|
|
870
886
|
const mime = lookup(filePath);
|
|
871
887
|
if (!mime) {
|
|
872
888
|
skippedFiles.push(`${filePath} (unsupported content type; no MIME mapping found)`);
|
|
873
889
|
return;
|
|
874
890
|
}
|
|
875
891
|
const contentType = mime;
|
|
876
|
-
const readStream = fs.createReadStream(absPath, { highWaterMark: CHUNK_SIZE });
|
|
892
|
+
const readStream = fs$2.createReadStream(absPath, { highWaterMark: CHUNK_SIZE });
|
|
877
893
|
async function* requestStream() {
|
|
878
894
|
yield { payload: {
|
|
879
895
|
case: "initialMetadata",
|
|
@@ -902,6 +918,10 @@ async function uploadSingleFile(client, workspaceId, deploymentId, rootDir, rela
|
|
|
902
918
|
}
|
|
903
919
|
await withTimeout(uploadWithLogging(), 2 * 6e4, `Upload timed out for "${filePath}"`);
|
|
904
920
|
}
|
|
921
|
+
/**
|
|
922
|
+
* Log skipped files after a deployment, including reasons for skipping.
|
|
923
|
+
* @param skippedFiles - List of skipped file descriptions
|
|
924
|
+
*/
|
|
905
925
|
function logSkippedFiles(skippedFiles) {
|
|
906
926
|
if (skippedFiles.length === 0) return;
|
|
907
927
|
logger.warn("Deployment completed, but some files failed to upload. These files may have unsupported content types or other validation issues. Please review the list below:");
|
|
@@ -941,7 +961,7 @@ const deployCommand = defineCommand({
|
|
|
941
961
|
workspaceId: args["workspace-id"],
|
|
942
962
|
profile: args.profile
|
|
943
963
|
});
|
|
944
|
-
if (!fs.existsSync(dir) || !fs.statSync(dir).isDirectory()) throw new Error(`Directory not found or not a directory: ${dir}`);
|
|
964
|
+
if (!fs$2.existsSync(dir) || !fs$2.statSync(dir).isDirectory()) throw new Error(`Directory not found or not a directory: ${dir}`);
|
|
945
965
|
const { url, skippedFiles } = await withTimeout(deployStaticWebsite(client, workspaceId, name, dir, !args.json), 10 * 6e4, "Deployment timed out after 10 minutes.");
|
|
946
966
|
if (args.json) logger.out({
|
|
947
967
|
name,
|
|
@@ -1008,10 +1028,8 @@ const getCommand = defineCommand({
|
|
|
1008
1028
|
//#region src/cli/staticwebsite/list.ts
|
|
1009
1029
|
/**
|
|
1010
1030
|
* List static websites in the workspace.
|
|
1011
|
-
* @param
|
|
1012
|
-
* @
|
|
1013
|
-
* @param {string} [options.profile] - Workspace profile
|
|
1014
|
-
* @returns {Promise<StaticWebsiteInfo[]>} List of static websites
|
|
1031
|
+
* @param options - Static website listing options
|
|
1032
|
+
* @returns List of static websites
|
|
1015
1033
|
*/
|
|
1016
1034
|
async function listStaticWebsites(options) {
|
|
1017
1035
|
const client = await initOperatorClient(await loadAccessToken({
|
|
@@ -1085,176 +1103,515 @@ const staticwebsiteCommand = defineCommand({
|
|
|
1085
1103
|
});
|
|
1086
1104
|
|
|
1087
1105
|
//#endregion
|
|
1088
|
-
//#region src/cli/
|
|
1089
|
-
|
|
1090
|
-
|
|
1091
|
-
|
|
1092
|
-
|
|
1093
|
-
|
|
1106
|
+
//#region src/cli/utils/resolve-cli-bin.ts
|
|
1107
|
+
/**
|
|
1108
|
+
* Resolve a CLI binary path from the caller's project dependencies.
|
|
1109
|
+
* @param options - Resolution options for locating the CLI binary.
|
|
1110
|
+
* @returns Absolute path to the CLI binary entry.
|
|
1111
|
+
*/
|
|
1112
|
+
function resolveCliBinPath(options) {
|
|
1113
|
+
const { cwd, packageName, binName, installHint } = options;
|
|
1114
|
+
const projectPackageJsonPath = findUpSync("package.json", { cwd });
|
|
1115
|
+
if (!projectPackageJsonPath) throw new Error(`Failed to locate package.json from ${cwd}.`);
|
|
1116
|
+
const requireFromProject = createRequire(projectPackageJsonPath);
|
|
1117
|
+
let pkgJsonPath;
|
|
1118
|
+
try {
|
|
1119
|
+
pkgJsonPath = requireFromProject.resolve(`${packageName}/package.json`);
|
|
1120
|
+
} catch {
|
|
1121
|
+
throw new Error(`Missing optional dependency \`${packageName}\`. Install it in your project (e.g. \`${installHint}\`).`);
|
|
1122
|
+
}
|
|
1123
|
+
const binRelativePath = JSON.parse(fs$1.readFileSync(pkgJsonPath, "utf8")).bin?.[binName];
|
|
1124
|
+
if (!binRelativePath) throw new Error(`\`${packageName}\` does not expose a \`${binName}\` binary entry.`);
|
|
1125
|
+
return path.resolve(path.dirname(pkgJsonPath), binRelativePath);
|
|
1126
|
+
}
|
|
1127
|
+
|
|
1128
|
+
//#endregion
|
|
1129
|
+
//#region src/cli/tailordb/erd/schema.ts
|
|
1130
|
+
/**
|
|
1131
|
+
* Convert TailorDB field config to tbls column definition.
|
|
1132
|
+
* @param fieldName - Field name
|
|
1133
|
+
* @param fieldConfig - TailorDB field configuration
|
|
1134
|
+
* @returns tbls column definition
|
|
1135
|
+
*/
|
|
1136
|
+
function toTblsColumn(fieldName, fieldConfig) {
|
|
1137
|
+
const baseType = fieldConfig.type || "string";
|
|
1138
|
+
return {
|
|
1139
|
+
name: fieldName,
|
|
1140
|
+
type: fieldConfig.array ? `${baseType}[]` : baseType,
|
|
1141
|
+
nullable: !fieldConfig.required,
|
|
1142
|
+
comment: fieldConfig.description ?? ""
|
|
1143
|
+
};
|
|
1144
|
+
}
|
|
1145
|
+
/**
|
|
1146
|
+
* Build tbls schema JSON from TailorDB types.
|
|
1147
|
+
* @param types - TailorDB types fetched from platform
|
|
1148
|
+
* @param namespace - TailorDB namespace
|
|
1149
|
+
* @returns tbls-compatible schema representation
|
|
1150
|
+
*/
|
|
1151
|
+
function buildTblsSchema(types, namespace) {
|
|
1152
|
+
const tables = [];
|
|
1153
|
+
const relations = [];
|
|
1154
|
+
const referencedByTable = {};
|
|
1155
|
+
const constraintsByTable = {};
|
|
1156
|
+
const enumsMap = /* @__PURE__ */ new Map();
|
|
1157
|
+
for (const type of types) {
|
|
1158
|
+
const tableName = type.name;
|
|
1159
|
+
const schema = type.schema;
|
|
1160
|
+
const columns = [];
|
|
1161
|
+
const tableConstraints = [];
|
|
1162
|
+
columns.push({
|
|
1163
|
+
name: "id",
|
|
1164
|
+
type: "uuid",
|
|
1165
|
+
nullable: false,
|
|
1166
|
+
comment: ""
|
|
1167
|
+
});
|
|
1168
|
+
tableConstraints.push({
|
|
1169
|
+
name: `pk_${tableName}`,
|
|
1170
|
+
type: "PRIMARY KEY",
|
|
1171
|
+
def: "",
|
|
1172
|
+
table: tableName,
|
|
1173
|
+
columns: ["id"]
|
|
1174
|
+
});
|
|
1175
|
+
if (schema) for (const [fieldName, fieldConfig] of Object.entries(schema.fields ?? {})) {
|
|
1176
|
+
columns.push(toTblsColumn(fieldName, fieldConfig));
|
|
1177
|
+
if (fieldConfig.type === "enum" && fieldConfig.allowedValues.length > 0) {
|
|
1178
|
+
const enumName = `${tableName}_${fieldName}`;
|
|
1179
|
+
let values = enumsMap.get(enumName);
|
|
1180
|
+
if (!values) {
|
|
1181
|
+
values = /* @__PURE__ */ new Set();
|
|
1182
|
+
enumsMap.set(enumName, values);
|
|
1183
|
+
}
|
|
1184
|
+
for (const value of fieldConfig.allowedValues) values.add(value.value);
|
|
1185
|
+
}
|
|
1186
|
+
if (fieldConfig.foreignKey && fieldConfig.foreignKeyType) {
|
|
1187
|
+
const foreignTable = fieldConfig.foreignKeyType;
|
|
1188
|
+
const foreignColumn = fieldConfig.foreignKeyField || "id";
|
|
1189
|
+
const childCardinality = fieldConfig.required ? "exactly_one" : "zero_or_one";
|
|
1190
|
+
relations.push({
|
|
1191
|
+
table: tableName,
|
|
1192
|
+
columns: [fieldName],
|
|
1193
|
+
parent_table: foreignTable,
|
|
1194
|
+
parent_columns: [foreignColumn],
|
|
1195
|
+
cardinality: childCardinality,
|
|
1196
|
+
parent_cardinality: "zero_or_more",
|
|
1197
|
+
def: ""
|
|
1198
|
+
});
|
|
1199
|
+
tableConstraints.push({
|
|
1200
|
+
name: `fk_${tableName}_${fieldName}`,
|
|
1201
|
+
type: "FOREIGN KEY",
|
|
1202
|
+
def: "",
|
|
1203
|
+
table: tableName,
|
|
1204
|
+
columns: [fieldName],
|
|
1205
|
+
referenced_table: foreignTable,
|
|
1206
|
+
referenced_columns: [foreignColumn]
|
|
1207
|
+
});
|
|
1208
|
+
if (!referencedByTable[tableName]) referencedByTable[tableName] = /* @__PURE__ */ new Set();
|
|
1209
|
+
referencedByTable[tableName].add(foreignTable);
|
|
1210
|
+
}
|
|
1211
|
+
}
|
|
1212
|
+
constraintsByTable[tableName] = tableConstraints;
|
|
1213
|
+
tables.push({
|
|
1214
|
+
name: tableName,
|
|
1215
|
+
type: "table",
|
|
1216
|
+
comment: schema?.description ?? "",
|
|
1217
|
+
columns,
|
|
1218
|
+
indexes: [],
|
|
1219
|
+
constraints: constraintsByTable[tableName] ?? [],
|
|
1220
|
+
triggers: [],
|
|
1221
|
+
def: "",
|
|
1222
|
+
referenced_tables: []
|
|
1223
|
+
});
|
|
1224
|
+
}
|
|
1225
|
+
for (const table of tables) {
|
|
1226
|
+
const referenced = referencedByTable[table.name];
|
|
1227
|
+
table.referenced_tables = referenced ? Array.from(referenced) : [];
|
|
1228
|
+
}
|
|
1229
|
+
const enums = [];
|
|
1230
|
+
for (const [name, values] of enumsMap.entries()) enums.push({
|
|
1231
|
+
name,
|
|
1232
|
+
values: Array.from(values)
|
|
1094
1233
|
});
|
|
1095
|
-
|
|
1234
|
+
return {
|
|
1235
|
+
name: namespace,
|
|
1236
|
+
tables,
|
|
1237
|
+
relations,
|
|
1238
|
+
enums
|
|
1239
|
+
};
|
|
1096
1240
|
}
|
|
1097
|
-
|
|
1098
|
-
|
|
1099
|
-
|
|
1100
|
-
|
|
1241
|
+
/**
|
|
1242
|
+
* Export apply-applied TailorDB schema for a namespace as tbls-compatible JSON.
|
|
1243
|
+
* @param options - Export options
|
|
1244
|
+
* @returns tbls schema representation
|
|
1245
|
+
*/
|
|
1246
|
+
async function exportTailorDBSchema(options) {
|
|
1247
|
+
const { client, workspaceId, namespace } = options;
|
|
1248
|
+
const types = await fetchAll(async (pageToken) => {
|
|
1249
|
+
try {
|
|
1250
|
+
const { tailordbTypes, nextPageToken } = await client.listTailorDBTypes({
|
|
1251
|
+
workspaceId,
|
|
1252
|
+
namespaceName: namespace,
|
|
1253
|
+
pageToken
|
|
1254
|
+
});
|
|
1255
|
+
return [tailordbTypes, nextPageToken];
|
|
1256
|
+
} catch (error) {
|
|
1257
|
+
if (error instanceof ConnectError && error.code === Code.NotFound) return [[], ""];
|
|
1258
|
+
throw error;
|
|
1259
|
+
}
|
|
1101
1260
|
});
|
|
1102
|
-
logger.
|
|
1261
|
+
if (types.length === 0) logger.warn(`No TailorDB types found in namespace "${namespace}". Returning empty schema.`);
|
|
1262
|
+
return buildTblsSchema(types, namespace);
|
|
1103
1263
|
}
|
|
1104
|
-
|
|
1105
|
-
|
|
1106
|
-
|
|
1107
|
-
|
|
1108
|
-
|
|
1264
|
+
/**
|
|
1265
|
+
* Writes the TailorDB schema to a file in tbls-compatible JSON format.
|
|
1266
|
+
* @param options - The options for writing the schema file.
|
|
1267
|
+
*/
|
|
1268
|
+
async function writeTblsSchemaToFile(options) {
|
|
1269
|
+
const schema = await exportTailorDBSchema(options);
|
|
1270
|
+
const json = JSON.stringify(schema, null, 2);
|
|
1271
|
+
fs$1.mkdirSync(path.dirname(options.outputPath), { recursive: true });
|
|
1272
|
+
fs$1.writeFileSync(options.outputPath, json, "utf8");
|
|
1273
|
+
const relativePath = path.relative(process.cwd(), options.outputPath);
|
|
1274
|
+
logger.success(`Wrote ERD schema to ${relativePath}`);
|
|
1109
1275
|
}
|
|
1110
|
-
|
|
1111
|
-
|
|
1112
|
-
|
|
1113
|
-
|
|
1114
|
-
|
|
1115
|
-
|
|
1116
|
-
|
|
1117
|
-
|
|
1118
|
-
|
|
1119
|
-
continue;
|
|
1120
|
-
}
|
|
1121
|
-
return null;
|
|
1276
|
+
|
|
1277
|
+
//#endregion
|
|
1278
|
+
//#region src/cli/utils/beta.ts
|
|
1279
|
+
/**
|
|
1280
|
+
* Warn that the ERD CLI is a beta feature.
|
|
1281
|
+
*/
|
|
1282
|
+
function logErdBetaWarning() {
|
|
1283
|
+
logger.warn("The ERD command is a beta feature and may introduce breaking changes in future releases.");
|
|
1284
|
+
logger.newline();
|
|
1122
1285
|
}
|
|
1286
|
+
|
|
1287
|
+
//#endregion
|
|
1288
|
+
//#region src/cli/tailordb/erd/utils.ts
|
|
1123
1289
|
/**
|
|
1124
|
-
*
|
|
1125
|
-
* @param
|
|
1126
|
-
* @returns
|
|
1290
|
+
* Initialize shared ERD command context.
|
|
1291
|
+
* @param args - CLI arguments.
|
|
1292
|
+
* @returns Initialized context.
|
|
1127
1293
|
*/
|
|
1128
|
-
async function
|
|
1294
|
+
async function initErdContext(args) {
|
|
1295
|
+
logErdBetaWarning();
|
|
1129
1296
|
const client = await initOperatorClient(await loadAccessToken({
|
|
1130
1297
|
useProfile: true,
|
|
1131
|
-
profile:
|
|
1298
|
+
profile: args.profile
|
|
1132
1299
|
}));
|
|
1133
1300
|
const workspaceId = loadWorkspaceId({
|
|
1134
|
-
workspaceId:
|
|
1135
|
-
profile:
|
|
1301
|
+
workspaceId: args.workspaceId,
|
|
1302
|
+
profile: args.profile
|
|
1303
|
+
});
|
|
1304
|
+
const { config } = await loadConfig(args.config);
|
|
1305
|
+
return {
|
|
1306
|
+
client,
|
|
1307
|
+
workspaceId,
|
|
1308
|
+
config
|
|
1309
|
+
};
|
|
1310
|
+
}
|
|
1311
|
+
|
|
1312
|
+
//#endregion
|
|
1313
|
+
//#region src/cli/tailordb/erd/export.ts
|
|
1314
|
+
const DEFAULT_ERD_BASE_DIR = ".tailor-sdk/erd";
|
|
1315
|
+
/**
|
|
1316
|
+
* Resolve TailorDB config and namespace.
|
|
1317
|
+
* @param config - Loaded Tailor SDK config.
|
|
1318
|
+
* @param explicitNamespace - Namespace override.
|
|
1319
|
+
* @returns Resolved namespace and erdSite.
|
|
1320
|
+
*/
|
|
1321
|
+
function resolveDbConfig(config, explicitNamespace) {
|
|
1322
|
+
const namespace = explicitNamespace ?? Object.keys(config.db ?? {})[0];
|
|
1323
|
+
if (!namespace) throw new Error("No TailorDB namespaces found in config. Please define db services in tailor.config.ts or pass --namespace.");
|
|
1324
|
+
const dbConfig = config.db?.[namespace];
|
|
1325
|
+
if (!dbConfig || typeof dbConfig !== "object" || "external" in dbConfig) throw new Error(`TailorDB namespace "${namespace}" not found in config.db.`);
|
|
1326
|
+
return {
|
|
1327
|
+
namespace,
|
|
1328
|
+
erdSite: dbConfig.erdSite
|
|
1329
|
+
};
|
|
1330
|
+
}
|
|
1331
|
+
/**
|
|
1332
|
+
* Get all namespaces with erdSite configured.
|
|
1333
|
+
* @param config - Loaded Tailor SDK config.
|
|
1334
|
+
* @returns Namespaces with erdSite.
|
|
1335
|
+
*/
|
|
1336
|
+
function resolveAllErdSites(config) {
|
|
1337
|
+
const results = [];
|
|
1338
|
+
for (const [namespace, dbConfig] of Object.entries(config.db ?? {})) if (dbConfig && typeof dbConfig === "object" && !("external" in dbConfig) && dbConfig.erdSite) results.push({
|
|
1339
|
+
namespace,
|
|
1340
|
+
erdSite: dbConfig.erdSite
|
|
1136
1341
|
});
|
|
1137
|
-
|
|
1138
|
-
|
|
1139
|
-
|
|
1140
|
-
|
|
1141
|
-
|
|
1142
|
-
|
|
1143
|
-
|
|
1144
|
-
|
|
1145
|
-
|
|
1146
|
-
|
|
1147
|
-
|
|
1148
|
-
|
|
1149
|
-
|
|
1150
|
-
|
|
1342
|
+
return results;
|
|
1343
|
+
}
|
|
1344
|
+
/**
|
|
1345
|
+
* Run the liam CLI to build an ERD static site from a schema file.
|
|
1346
|
+
* @param schemaPath - Path to the ERD schema JSON file
|
|
1347
|
+
* @param cwd - Working directory where liam will run (dist is created here)
|
|
1348
|
+
* @returns Resolves when the build completes successfully
|
|
1349
|
+
*/
|
|
1350
|
+
async function runLiamBuild(schemaPath, cwd) {
|
|
1351
|
+
fs$1.mkdirSync(cwd, { recursive: true });
|
|
1352
|
+
return await new Promise((resolve, reject) => {
|
|
1353
|
+
let liamBinPath;
|
|
1354
|
+
try {
|
|
1355
|
+
liamBinPath = resolveCliBinPath({
|
|
1356
|
+
cwd,
|
|
1357
|
+
packageName: "@liam-hq/cli",
|
|
1358
|
+
binName: "liam",
|
|
1359
|
+
installHint: "npm i -D @liam-hq/cli"
|
|
1360
|
+
});
|
|
1361
|
+
} catch (error) {
|
|
1362
|
+
logger.error(String(error));
|
|
1363
|
+
reject(error);
|
|
1151
1364
|
return;
|
|
1152
1365
|
}
|
|
1153
|
-
|
|
1154
|
-
|
|
1155
|
-
|
|
1156
|
-
|
|
1157
|
-
|
|
1158
|
-
|
|
1159
|
-
|
|
1160
|
-
|
|
1366
|
+
const child = spawn(process.execPath, [
|
|
1367
|
+
liamBinPath,
|
|
1368
|
+
"erd",
|
|
1369
|
+
"build",
|
|
1370
|
+
"--format",
|
|
1371
|
+
"tbls",
|
|
1372
|
+
"--input",
|
|
1373
|
+
schemaPath
|
|
1374
|
+
], {
|
|
1375
|
+
stdio: "inherit",
|
|
1376
|
+
cwd
|
|
1377
|
+
});
|
|
1378
|
+
child.on("error", (error) => {
|
|
1379
|
+
logger.error("Failed to run `@liam-hq/cli`. Ensure it is installed in your project.");
|
|
1380
|
+
reject(error);
|
|
1381
|
+
});
|
|
1382
|
+
child.on("exit", (code) => {
|
|
1383
|
+
if (code === 0) resolve();
|
|
1384
|
+
else {
|
|
1385
|
+
logger.error("liam CLI exited with a non-zero code. Ensure `@liam-hq/cli erd build --format tbls --input schema.json` works in your project.");
|
|
1386
|
+
reject(/* @__PURE__ */ new Error(`liam CLI exited with code ${code ?? 1}`));
|
|
1161
1387
|
}
|
|
1162
|
-
}
|
|
1163
|
-
|
|
1164
|
-
|
|
1165
|
-
|
|
1388
|
+
});
|
|
1389
|
+
});
|
|
1390
|
+
}
|
|
1391
|
+
/**
|
|
1392
|
+
* Export TailorDB schema and build ERD artifacts via liam.
|
|
1393
|
+
* @param options - Build options.
|
|
1394
|
+
*/
|
|
1395
|
+
async function prepareErdBuild(options) {
|
|
1396
|
+
await writeTblsSchemaToFile(options);
|
|
1397
|
+
await runLiamBuild(options.outputPath, options.erdDir);
|
|
1398
|
+
}
|
|
1399
|
+
/**
|
|
1400
|
+
* Prepare ERD builds for one or more namespaces.
|
|
1401
|
+
* @param options - Build options.
|
|
1402
|
+
* @returns Build results by namespace.
|
|
1403
|
+
*/
|
|
1404
|
+
async function prepareErdBuilds(options) {
|
|
1405
|
+
const { client, workspaceId, config } = options;
|
|
1406
|
+
const baseDir = options.outputDir ?? path.resolve(process.cwd(), DEFAULT_ERD_BASE_DIR);
|
|
1407
|
+
let targets;
|
|
1408
|
+
if (options.namespace) {
|
|
1409
|
+
const { namespace, erdSite } = resolveDbConfig(config, options.namespace);
|
|
1410
|
+
const erdDir = path.join(baseDir, namespace);
|
|
1411
|
+
targets = [{
|
|
1412
|
+
namespace,
|
|
1413
|
+
erdSite,
|
|
1414
|
+
schemaOutputPath: path.join(erdDir, "schema.json"),
|
|
1415
|
+
distDir: path.join(erdDir, "dist"),
|
|
1416
|
+
erdDir
|
|
1417
|
+
}];
|
|
1418
|
+
} else {
|
|
1419
|
+
const erdSites = resolveAllErdSites(config);
|
|
1420
|
+
if (erdSites.length === 0) throw new Error("No namespaces with erdSite configured found. Add erdSite: \"<static-website-name>\" to db.<namespace> in tailor.config.ts.");
|
|
1421
|
+
logger.info(`Found ${erdSites.length} namespace(s) with erdSite configured.`);
|
|
1422
|
+
targets = erdSites.map(({ namespace, erdSite }) => {
|
|
1423
|
+
const erdDir = path.join(baseDir, namespace);
|
|
1424
|
+
return {
|
|
1425
|
+
namespace,
|
|
1426
|
+
erdSite,
|
|
1427
|
+
schemaOutputPath: path.join(erdDir, "schema.json"),
|
|
1428
|
+
distDir: path.join(erdDir, "dist"),
|
|
1429
|
+
erdDir
|
|
1430
|
+
};
|
|
1431
|
+
});
|
|
1166
1432
|
}
|
|
1167
|
-
|
|
1168
|
-
|
|
1169
|
-
|
|
1170
|
-
|
|
1171
|
-
|
|
1172
|
-
|
|
1173
|
-
|
|
1174
|
-
|
|
1175
|
-
|
|
1176
|
-
|
|
1177
|
-
|
|
1433
|
+
await Promise.all(targets.map((target) => prepareErdBuild({
|
|
1434
|
+
namespace: target.namespace,
|
|
1435
|
+
client,
|
|
1436
|
+
workspaceId,
|
|
1437
|
+
outputPath: target.schemaOutputPath,
|
|
1438
|
+
erdDir: target.erdDir
|
|
1439
|
+
})));
|
|
1440
|
+
return targets;
|
|
1441
|
+
}
|
|
1442
|
+
const erdExportCommand = defineCommand({
|
|
1443
|
+
meta: {
|
|
1444
|
+
name: "export",
|
|
1445
|
+
description: "Export Liam ERD dist from applied TailorDB schema (beta)"
|
|
1446
|
+
},
|
|
1447
|
+
args: {
|
|
1448
|
+
...commonArgs,
|
|
1449
|
+
...deploymentArgs,
|
|
1450
|
+
...jsonArgs,
|
|
1451
|
+
namespace: {
|
|
1452
|
+
type: "string",
|
|
1453
|
+
description: "TailorDB namespace name (optional if only one namespace is defined in config)",
|
|
1454
|
+
alias: "n"
|
|
1455
|
+
},
|
|
1456
|
+
output: {
|
|
1457
|
+
type: "string",
|
|
1458
|
+
description: "Output directory path for tbls-compatible ERD JSON (writes to <outputDir>/<namespace>/schema.json)",
|
|
1459
|
+
alias: "o",
|
|
1460
|
+
default: DEFAULT_ERD_BASE_DIR
|
|
1178
1461
|
}
|
|
1179
|
-
|
|
1180
|
-
|
|
1181
|
-
|
|
1182
|
-
|
|
1183
|
-
const
|
|
1184
|
-
|
|
1185
|
-
|
|
1186
|
-
|
|
1187
|
-
|
|
1188
|
-
|
|
1189
|
-
|
|
1462
|
+
},
|
|
1463
|
+
run: withCommonArgs(async (args) => {
|
|
1464
|
+
const { client, workspaceId, config } = await initErdContext(args);
|
|
1465
|
+
const outputDir = path.resolve(process.cwd(), String(args.output));
|
|
1466
|
+
const results = await prepareErdBuilds({
|
|
1467
|
+
client,
|
|
1468
|
+
workspaceId,
|
|
1469
|
+
config,
|
|
1470
|
+
namespace: args.namespace,
|
|
1471
|
+
outputDir
|
|
1472
|
+
});
|
|
1473
|
+
if (args.json) logger.out(results.map((result) => ({
|
|
1474
|
+
namespace: result.namespace,
|
|
1475
|
+
distDir: result.distDir,
|
|
1476
|
+
schemaOutputPath: result.schemaOutputPath
|
|
1477
|
+
})));
|
|
1478
|
+
else for (const result of results) {
|
|
1479
|
+
logger.out(`Exported ERD for namespace "${result.namespace}"`);
|
|
1480
|
+
logger.out(` - Liam ERD dist: ${result.distDir}`);
|
|
1481
|
+
logger.out(` - tbls schema.json: ${result.schemaOutputPath}`);
|
|
1190
1482
|
}
|
|
1191
|
-
|
|
1192
|
-
|
|
1193
|
-
|
|
1194
|
-
|
|
1195
|
-
|
|
1196
|
-
|
|
1197
|
-
|
|
1198
|
-
|
|
1199
|
-
|
|
1200
|
-
|
|
1483
|
+
})
|
|
1484
|
+
});
|
|
1485
|
+
|
|
1486
|
+
//#endregion
|
|
1487
|
+
//#region src/cli/tailordb/erd/deploy.ts
|
|
1488
|
+
const erdDeployCommand = defineCommand({
|
|
1489
|
+
meta: {
|
|
1490
|
+
name: "deploy",
|
|
1491
|
+
description: "Deploy ERD static website for TailorDB namespace(s) (beta)"
|
|
1492
|
+
},
|
|
1493
|
+
args: {
|
|
1494
|
+
...commonArgs,
|
|
1495
|
+
...deploymentArgs,
|
|
1496
|
+
...jsonArgs,
|
|
1497
|
+
namespace: {
|
|
1498
|
+
type: "string",
|
|
1499
|
+
description: "TailorDB namespace name (optional - deploys all namespaces with erdSite if omitted)",
|
|
1500
|
+
alias: "n"
|
|
1201
1501
|
}
|
|
1202
|
-
|
|
1203
|
-
|
|
1204
|
-
|
|
1205
|
-
|
|
1206
|
-
|
|
1207
|
-
|
|
1208
|
-
|
|
1209
|
-
|
|
1502
|
+
},
|
|
1503
|
+
run: withCommonArgs(async (args) => {
|
|
1504
|
+
const { client, workspaceId, config } = await initErdContext(args);
|
|
1505
|
+
const buildResults = await prepareErdBuilds({
|
|
1506
|
+
client,
|
|
1507
|
+
workspaceId,
|
|
1508
|
+
config,
|
|
1509
|
+
namespace: args.namespace
|
|
1510
|
+
});
|
|
1511
|
+
const deployResults = await Promise.all(buildResults.map(async (result) => {
|
|
1512
|
+
if (!result.erdSite) throw new Error(`No erdSite configured for namespace "${result.namespace}". Add erdSite: "<static-website-name>" to db.${result.namespace} in tailor.config.ts.`);
|
|
1513
|
+
if (!args.json) logger.info(`Deploying ERD for namespace "${result.namespace}" to site "${result.erdSite}"...`);
|
|
1514
|
+
const { url, skippedFiles } = await deployStaticWebsite(client, workspaceId, result.erdSite, result.distDir, !args.json);
|
|
1515
|
+
return {
|
|
1516
|
+
namespace: result.namespace,
|
|
1517
|
+
erdSite: result.erdSite,
|
|
1518
|
+
url,
|
|
1519
|
+
skippedFiles
|
|
1520
|
+
};
|
|
1521
|
+
}));
|
|
1522
|
+
if (args.json) logger.out(deployResults);
|
|
1523
|
+
else for (const result of deployResults) {
|
|
1524
|
+
logger.success(`ERD site "${result.erdSite}" deployed successfully.`);
|
|
1525
|
+
logger.out(result.url);
|
|
1526
|
+
logSkippedFiles(result.skippedFiles);
|
|
1210
1527
|
}
|
|
1528
|
+
})
|
|
1529
|
+
});
|
|
1530
|
+
|
|
1531
|
+
//#endregion
|
|
1532
|
+
//#region src/cli/tailordb/erd/serve.ts
|
|
1533
|
+
function formatServeCommand(namespace) {
|
|
1534
|
+
return `tailor-sdk tailordb erd serve --namespace ${namespace}`;
|
|
1535
|
+
}
|
|
1536
|
+
async function runServeDist(results) {
|
|
1537
|
+
if (results.length === 0) throw new Error("No ERD build results found.");
|
|
1538
|
+
const [primary, ...rest] = results;
|
|
1539
|
+
logger.info(`Serving ERD for namespace "${primary.namespace}".`);
|
|
1540
|
+
if (rest.length > 0) {
|
|
1541
|
+
const commands = rest.map((result) => ` - ${formatServeCommand(result.namespace)}`).join("\n");
|
|
1542
|
+
logger.warn(`Multiple namespaces found. To serve another namespace, run:\n${commands}`);
|
|
1211
1543
|
}
|
|
1544
|
+
fs$1.mkdirSync(primary.erdDir, { recursive: true });
|
|
1545
|
+
return await new Promise((resolve, reject) => {
|
|
1546
|
+
let serveBinPath;
|
|
1547
|
+
try {
|
|
1548
|
+
serveBinPath = resolveCliBinPath({
|
|
1549
|
+
cwd: primary.erdDir,
|
|
1550
|
+
packageName: "serve",
|
|
1551
|
+
binName: "serve",
|
|
1552
|
+
installHint: "npm i -D serve"
|
|
1553
|
+
});
|
|
1554
|
+
} catch (error) {
|
|
1555
|
+
logger.error(String(error));
|
|
1556
|
+
reject(error);
|
|
1557
|
+
return;
|
|
1558
|
+
}
|
|
1559
|
+
const child = spawn(process.execPath, [serveBinPath, "dist"], {
|
|
1560
|
+
stdio: "inherit",
|
|
1561
|
+
cwd: primary.erdDir
|
|
1562
|
+
});
|
|
1563
|
+
child.on("error", (error) => {
|
|
1564
|
+
logger.error("Failed to run `serve dist`. Ensure `serve` is installed in your project.");
|
|
1565
|
+
reject(error);
|
|
1566
|
+
});
|
|
1567
|
+
child.on("exit", (code) => {
|
|
1568
|
+
if (code === 0) resolve();
|
|
1569
|
+
else {
|
|
1570
|
+
logger.error("serve CLI exited with a non-zero code. Ensure `serve dist` works in your project.");
|
|
1571
|
+
reject(/* @__PURE__ */ new Error(`serve CLI exited with code ${code ?? 1}`));
|
|
1572
|
+
}
|
|
1573
|
+
});
|
|
1574
|
+
});
|
|
1212
1575
|
}
|
|
1213
|
-
const
|
|
1576
|
+
const erdServeCommand = defineCommand({
|
|
1214
1577
|
meta: {
|
|
1215
|
-
name: "
|
|
1216
|
-
description: "
|
|
1578
|
+
name: "serve",
|
|
1579
|
+
description: "Generate and serve ERD (liam build + `serve dist`) (beta)"
|
|
1217
1580
|
},
|
|
1218
1581
|
args: {
|
|
1219
1582
|
...commonArgs,
|
|
1220
|
-
|
|
1221
|
-
type: "positional",
|
|
1222
|
-
description: "Type names to truncate",
|
|
1223
|
-
required: false
|
|
1224
|
-
},
|
|
1225
|
-
all: {
|
|
1226
|
-
type: "boolean",
|
|
1227
|
-
description: "Truncate all tables in all namespaces",
|
|
1228
|
-
default: false,
|
|
1229
|
-
alias: "a"
|
|
1230
|
-
},
|
|
1583
|
+
...deploymentArgs,
|
|
1231
1584
|
namespace: {
|
|
1232
1585
|
type: "string",
|
|
1233
|
-
description: "
|
|
1586
|
+
description: "TailorDB namespace name (uses first namespace in config if not specified)",
|
|
1234
1587
|
alias: "n"
|
|
1235
|
-
}
|
|
1236
|
-
yes: {
|
|
1237
|
-
type: "boolean",
|
|
1238
|
-
description: "Skip confirmation prompt",
|
|
1239
|
-
alias: "y",
|
|
1240
|
-
default: false
|
|
1241
|
-
},
|
|
1242
|
-
...deploymentArgs
|
|
1588
|
+
}
|
|
1243
1589
|
},
|
|
1244
1590
|
run: withCommonArgs(async (args) => {
|
|
1245
|
-
const
|
|
1246
|
-
await
|
|
1247
|
-
|
|
1248
|
-
|
|
1249
|
-
|
|
1250
|
-
|
|
1251
|
-
|
|
1252
|
-
types,
|
|
1253
|
-
yes: args.yes
|
|
1254
|
-
});
|
|
1591
|
+
const { client, workspaceId, config } = await initErdContext(args);
|
|
1592
|
+
await runServeDist(await prepareErdBuilds({
|
|
1593
|
+
client,
|
|
1594
|
+
workspaceId,
|
|
1595
|
+
config,
|
|
1596
|
+
namespace: args.namespace
|
|
1597
|
+
}));
|
|
1255
1598
|
})
|
|
1256
1599
|
});
|
|
1257
1600
|
|
|
1601
|
+
//#endregion
|
|
1602
|
+
//#region src/cli/tailordb/erd/index.ts
|
|
1603
|
+
const erdCommand = defineCommand({
|
|
1604
|
+
meta: {
|
|
1605
|
+
name: "erd",
|
|
1606
|
+
description: "ERD utilities for TailorDB (beta)"
|
|
1607
|
+
},
|
|
1608
|
+
subCommands: {
|
|
1609
|
+
export: erdExportCommand,
|
|
1610
|
+
serve: erdServeCommand,
|
|
1611
|
+
deploy: erdDeployCommand
|
|
1612
|
+
}
|
|
1613
|
+
});
|
|
1614
|
+
|
|
1258
1615
|
//#endregion
|
|
1259
1616
|
//#region src/cli/tailordb/index.ts
|
|
1260
1617
|
const tailordbCommand = defineCommand({
|
|
@@ -1262,7 +1619,10 @@ const tailordbCommand = defineCommand({
|
|
|
1262
1619
|
name: "tailordb",
|
|
1263
1620
|
description: "Manage TailorDB tables and data"
|
|
1264
1621
|
},
|
|
1265
|
-
subCommands: {
|
|
1622
|
+
subCommands: {
|
|
1623
|
+
erd: erdCommand,
|
|
1624
|
+
truncate: truncateCommand
|
|
1625
|
+
}
|
|
1266
1626
|
});
|
|
1267
1627
|
|
|
1268
1628
|
//#endregion
|
|
@@ -1330,8 +1690,8 @@ function patScopeToString(scope) {
|
|
|
1330
1690
|
}
|
|
1331
1691
|
/**
|
|
1332
1692
|
* Transform a PersonalAccessToken into CLI-friendly info.
|
|
1333
|
-
* @param
|
|
1334
|
-
* @returns
|
|
1693
|
+
* @param pat - Personal access token resource
|
|
1694
|
+
* @returns Flattened token info
|
|
1335
1695
|
*/
|
|
1336
1696
|
function transformPersonalAccessToken(pat) {
|
|
1337
1697
|
return {
|
|
@@ -1341,8 +1701,8 @@ function transformPersonalAccessToken(pat) {
|
|
|
1341
1701
|
}
|
|
1342
1702
|
/**
|
|
1343
1703
|
* Get PAT scopes from a write flag.
|
|
1344
|
-
* @param
|
|
1345
|
-
* @returns
|
|
1704
|
+
* @param write - Whether write access is required
|
|
1705
|
+
* @returns Scopes to apply to the token
|
|
1346
1706
|
*/
|
|
1347
1707
|
function getScopesFromWriteFlag(write) {
|
|
1348
1708
|
return write ? [PATScope.PAT_SCOPE_READ, PATScope.PAT_SCOPE_WRITE] : [PATScope.PAT_SCOPE_READ];
|
|
@@ -1352,11 +1712,10 @@ function getScopeStringsFromWriteFlag(write) {
|
|
|
1352
1712
|
}
|
|
1353
1713
|
/**
|
|
1354
1714
|
* Print the created or updated personal access token to the logger.
|
|
1355
|
-
* @param
|
|
1356
|
-
* @param
|
|
1357
|
-
* @param
|
|
1358
|
-
* @param
|
|
1359
|
-
* @returns {void}
|
|
1715
|
+
* @param name - Token name
|
|
1716
|
+
* @param token - Token value
|
|
1717
|
+
* @param write - Whether the token has write scope
|
|
1718
|
+
* @param action - Action performed
|
|
1360
1719
|
*/
|
|
1361
1720
|
function printCreatedToken(name, token, write, action) {
|
|
1362
1721
|
const scopes = getScopeStringsFromWriteFlag(write);
|