@tailor-platform/sdk 0.22.3 → 0.23.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,5 +1,5 @@
1
1
  /// <reference path="./user-defined.d.ts" />
2
- import { A as TailorDBType, Ct as InferFieldsOutput, H as AuthInvoker, M as AllowedValues, N as AllowedValuesOutput, St as FieldOutput, Tt as output$1, a as IncomingWebhookTrigger$1, bt as FieldMetadata, c as ScheduleTriggerInput, ht as TailorField, i as GqlOperation$1, l as WebhookOperation$1, n as ExecutorInput, o as RecordTrigger$1, pt as TailorUser, r as FunctionOperation$1, s as ResolverExecutedTrigger$1, u as WorkflowOperation$1, vt as ResolverInput, wt as JsonCompatible, xt as FieldOptions, yt as ArrayFieldOutput } from "./types-BaiXm10C.mjs";
2
+ import { A as TailorDBType, Ct as InferFieldsOutput, H as AuthInvoker, M as AllowedValues, N as AllowedValuesOutput, St as FieldOutput, Tt as output$1, a as IncomingWebhookTrigger$1, bt as FieldMetadata, c as ScheduleTriggerInput, ht as TailorField, i as GqlOperation$1, l as WebhookOperation$1, n as ExecutorInput, o as RecordTrigger$1, pt as TailorUser, r as FunctionOperation$1, s as ResolverExecutedTrigger$1, u as WorkflowOperation$1, vt as ResolverInput, wt as JsonCompatible, xt as FieldOptions, yt as ArrayFieldOutput } from "./types-ClG0gN3S.mjs";
3
3
  import { EmptyObject, JsonPrimitive, Jsonifiable, Jsonify } from "type-fest";
4
4
  import { Client } from "@urql/core";
5
5
  import { StandardCRON } from "ts-cron-validator";
@@ -355,4 +355,4 @@ declare namespace t {
355
355
  }
356
356
  //#endregion
357
357
  export { WORKFLOW_JOB_BRAND as A, GqlOperation as C, Workflow as D, WorkflowOperation as E, createWorkflowJob as F, createResolver as I, Env as L, WorkflowJobContext as M, WorkflowJobInput as N, WorkflowConfig as O, WorkflowJobOutput as P, FunctionOperation as S, WebhookOperation as T, ResolverExecutedTrigger as _, Trigger as a, recordUpdatedTrigger as b, IncomingWebhookTrigger as c, scheduleTrigger as d, RecordCreatedArgs as f, ResolverExecutedArgs as g, RecordUpdatedArgs as h, createExecutor as i, WorkflowJob as j, createWorkflow as k, incomingWebhookTrigger as l, RecordTrigger as m, output as n, IncomingWebhookArgs as o, RecordDeletedArgs as p, t as r, IncomingWebhookRequest as s, infer as t, ScheduleTrigger as u, recordCreatedTrigger as v, Operation as w, resolverExecutedTrigger as x, recordDeletedTrigger as y };
358
- //# sourceMappingURL=index-Bz_i9lgm.d.mts.map
358
+ //# sourceMappingURL=index-QGMXFOXH.d.mts.map
@@ -4,7 +4,7 @@ import Module, { createRequire } from "node:module";
4
4
  import { defineCommand } from "citty";
5
5
  import * as path$20 from "node:path";
6
6
  import path from "node:path";
7
- import process$1, { loadEnvFile } from "node:process";
7
+ import { loadEnvFile } from "node:process";
8
8
  import { z } from "zod";
9
9
  import chalk from "chalk";
10
10
  import { createConsola } from "consola";
@@ -19,10 +19,11 @@ import fs from "node:fs";
19
19
  import * as os$3 from "node:os";
20
20
  import os from "node:os";
21
21
  import { parseTOML, parseYAML, stringifyYAML } from "confbox";
22
- import fsPromises, { glob } from "node:fs/promises";
23
- import { fileURLToPath, pathToFileURL } from "node:url";
22
+ import { findUpSync } from "find-up-simple";
24
23
  import ml from "multiline-ts";
25
24
  import { xdgConfig } from "xdg-basedir";
25
+ import { pathToFileURL } from "node:url";
26
+ import { glob } from "node:fs/promises";
26
27
  import util from "node:util";
27
28
  import assert from "node:assert";
28
29
  import * as inflection from "inflection";
@@ -1211,25 +1212,6 @@ async function fetchMachineUserToken(url, clientId, clientSecret) {
1211
1212
  }).parse(rawJson);
1212
1213
  }
1213
1214
 
1214
- //#endregion
1215
- //#region ../../node_modules/find-up-simple/index.js
1216
- const toPath = (urlOrPath) => urlOrPath instanceof URL ? fileURLToPath(urlOrPath) : urlOrPath;
1217
- function findUpSync(name$1, { cwd = process$1.cwd(), type = "file", stopAt } = {}) {
1218
- let directory = path.resolve(toPath(cwd) ?? "");
1219
- const { root } = path.parse(directory);
1220
- stopAt = path.resolve(directory, toPath(stopAt) ?? root);
1221
- const isAbsoluteName = path.isAbsolute(name$1);
1222
- while (directory) {
1223
- const filePath = isAbsoluteName ? name$1 : path.join(directory, name$1);
1224
- try {
1225
- const stats = fs.statSync(filePath, { throwIfNoEntry: false });
1226
- if (type === "file" && stats?.isFile() || type === "directory" && stats?.isDirectory()) return filePath;
1227
- } catch {}
1228
- if (directory === stopAt || directory === root) break;
1229
- directory = path.dirname(directory);
1230
- }
1231
- }
1232
-
1233
1215
  //#endregion
1234
1216
  //#region src/cli/context.ts
1235
1217
  const pfConfigSchema = z.object({
@@ -87997,7 +87979,7 @@ var require_eslint_helpers = /* @__PURE__ */ __commonJSMin(((exports, module) =>
87997
87979
  */
87998
87980
  async function globMatch({ basePath, pattern }) {
87999
87981
  let found = false;
88000
- const { hfs } = await import("./src-DqwtAff-.mjs");
87982
+ const { hfs } = await import("./src-BU1BDRRs.mjs");
88001
87983
  const matcher = new Minimatch(normalizeToPosix(path$9.relative(basePath, pattern)), MINIMATCH_OPTIONS);
88002
87984
  const walkSettings = {
88003
87985
  directoryFilter(entry) {
@@ -88044,7 +88026,7 @@ var require_eslint_helpers = /* @__PURE__ */ __commonJSMin(((exports, module) =>
88044
88026
  return new Minimatch(patternToUse, MINIMATCH_OPTIONS);
88045
88027
  });
88046
88028
  const unmatchedPatterns = new Set([...relativeToPatterns.keys()]);
88047
- const { hfs } = await import("./src-DqwtAff-.mjs");
88029
+ const { hfs } = await import("./src-BU1BDRRs.mjs");
88048
88030
  const walk = hfs.walk(basePath, {
88049
88031
  async directoryFilter(entry) {
88050
88032
  if (!matchers.some((matcher) => matcher.match(entry.path, true))) return false;
@@ -98208,8 +98190,18 @@ function normalizeActionPermission(permission) {
98208
98190
  //#endregion
98209
98191
  //#region src/parser/service/tailordb/type-parser.ts
98210
98192
  /**
98193
+ * Parse multiple TailorDB types, build relationships, and validate uniqueness.
98194
+ * This is the main entry point for parsing TailorDB types.
98195
+ */
98196
+ function parseTypes(rawTypes, namespace, typeSourceInfo) {
98197
+ const types$2 = {};
98198
+ for (const [typeName, type] of Object.entries(rawTypes)) types$2[typeName] = parseTailorDBType(type);
98199
+ buildBackwardRelationships(types$2);
98200
+ validatePluralFormUniqueness(types$2, namespace, typeSourceInfo);
98201
+ return types$2;
98202
+ }
98203
+ /**
98211
98204
  * Parse a TailorDBType into a ParsedTailorDBType.
98212
- * This is the main entry point for parsing TailorDB types in the parser layer.
98213
98205
  */
98214
98206
  function parseTailorDBType(type) {
98215
98207
  const metadata = type.metadata;
@@ -98282,6 +98274,50 @@ function buildBackwardRelationships(types$2) {
98282
98274
  };
98283
98275
  }
98284
98276
  }
98277
+ /**
98278
+ * Validate GraphQL query field name uniqueness.
98279
+ * Checks for:
98280
+ * 1. Each type's singular query name != plural query name
98281
+ * 2. No duplicate query names across all types
98282
+ */
98283
+ function validatePluralFormUniqueness(types$2, namespace, typeSourceInfo) {
98284
+ const errors = [];
98285
+ for (const [, parsedType] of Object.entries(types$2)) {
98286
+ const singularQuery = inflection.camelize(parsedType.name, true);
98287
+ if (singularQuery === inflection.camelize(parsedType.pluralForm, true)) {
98288
+ const sourceInfo = typeSourceInfo?.[parsedType.name];
98289
+ const location = sourceInfo ? ` (${sourceInfo.filePath})` : "";
98290
+ errors.push(`Type "${parsedType.name}"${location} has identical singular and plural query names "${singularQuery}". Use db.type(["${parsedType.name}", "UniquePluralForm"], {...}) to set a unique pluralForm.`);
98291
+ }
98292
+ }
98293
+ const queryNameToSource = {};
98294
+ for (const parsedType of Object.values(types$2)) {
98295
+ const singularQuery = inflection.camelize(parsedType.name, true);
98296
+ const pluralQuery = inflection.camelize(parsedType.pluralForm, true);
98297
+ if (!queryNameToSource[singularQuery]) queryNameToSource[singularQuery] = [];
98298
+ queryNameToSource[singularQuery].push({
98299
+ typeName: parsedType.name,
98300
+ kind: "singular"
98301
+ });
98302
+ if (singularQuery !== pluralQuery) {
98303
+ if (!queryNameToSource[pluralQuery]) queryNameToSource[pluralQuery] = [];
98304
+ queryNameToSource[pluralQuery].push({
98305
+ typeName: parsedType.name,
98306
+ kind: "plural"
98307
+ });
98308
+ }
98309
+ }
98310
+ const duplicates = Object.entries(queryNameToSource).filter(([, sources]) => sources.length > 1);
98311
+ for (const [queryName, sources] of duplicates) {
98312
+ const sourceList = sources.map((s) => {
98313
+ const sourceInfo = typeSourceInfo?.[s.typeName];
98314
+ const location = sourceInfo ? ` (${sourceInfo.filePath})` : "";
98315
+ return `"${s.typeName}"${location} (${s.kind})`;
98316
+ }).join(", ");
98317
+ errors.push(`GraphQL query field "${queryName}" conflicts between: ${sourceList}`);
98318
+ }
98319
+ if (errors.length > 0) throw new Error(`GraphQL field name conflicts detected in TailorDB service "${namespace}".\n${errors.map((e) => ` - ${e}`).join("\n")}`);
98320
+ }
98285
98321
 
98286
98322
  //#endregion
98287
98323
  //#region src/cli/application/tailordb/service.ts
@@ -98343,9 +98379,7 @@ var TailorDBService = class {
98343
98379
  parseTypes() {
98344
98380
  const allTypes = {};
98345
98381
  for (const fileTypes of Object.values(this.rawTypes)) for (const [typeName, type] of Object.entries(fileTypes)) allTypes[typeName] = type;
98346
- this.types = {};
98347
- for (const [typeName, type] of Object.entries(allTypes)) this.types[typeName] = parseTailorDBType(type);
98348
- buildBackwardRelationships(this.types);
98382
+ this.types = parseTypes(allTypes, this.namespace, this.typeSourceInfo);
98349
98383
  }
98350
98384
  };
98351
98385
 
@@ -103494,7 +103528,7 @@ const applyCommand = defineCommand({
103494
103528
  "dry-run": {
103495
103529
  type: "boolean",
103496
103530
  description: "Run the command without making any changes",
103497
- alias: "n"
103531
+ alias: "d"
103498
103532
  },
103499
103533
  yes: {
103500
103534
  type: "boolean",
@@ -103875,7 +103909,7 @@ var DependencyWatcher = class {
103875
103909
  this.dependencyCache.clear();
103876
103910
  const impactResult = this.calculateImpact(absolutePath);
103877
103911
  if (impactResult.affectedGroups.length > 0) {
103878
- logger.warn("File change detected, restarting watch process...", { mode: "stream" });
103912
+ logger.info("File change detected, restarting watch process...", { mode: "stream" });
103879
103913
  logger.info(`Changed file: ${absolutePath}`, { mode: "stream" });
103880
103914
  logger.info(`Affected groups: ${impactResult.affectedGroups.join(", ")}`, { mode: "stream" });
103881
103915
  if (this.restartCallback) this.restartCallback();
@@ -104175,7 +104209,7 @@ var GenerationManager = class {
104175
104209
  }
104176
104210
  async restartWatchProcess() {
104177
104211
  logger.newline();
104178
- logger.warn("Restarting watch process to clear module cache...", { mode: "stream" });
104212
+ logger.info("Restarting watch process to clear module cache...", { mode: "stream" });
104179
104213
  logger.newline();
104180
104214
  if (this.watcher) await this.watcher.stop();
104181
104215
  const args = process.argv.slice(2);
@@ -105414,7 +105448,7 @@ const createCommand = defineCommand({
105414
105448
  type: "string",
105415
105449
  description: "Workspace name",
105416
105450
  required: true,
105417
- alias: "N"
105451
+ alias: "n"
105418
105452
  },
105419
105453
  region: {
105420
105454
  type: "string",
@@ -105425,7 +105459,7 @@ const createCommand = defineCommand({
105425
105459
  "delete-protection": {
105426
105460
  type: "boolean",
105427
105461
  description: "Enable delete protection",
105428
- alias: "D",
105462
+ alias: "d",
105429
105463
  default: false
105430
105464
  },
105431
105465
  "organization-id": {
@@ -105562,4 +105596,4 @@ const listCommand = defineCommand({
105562
105596
 
105563
105597
  //#endregion
105564
105598
  export { jsonArgs as $, printData as A, loadAccessToken as B, listOAuth2Clients as C, tokenCommand as D, getMachineUserToken as E, generateUserTypes as F, fetchUserInfo as G, readPlatformConfig as H, loadConfig as I, readPackageJson as J, initOAuth2Client as K, apiCall as L, generateCommand as M, apply as N, listCommand$3 as O, applyCommand as P, deploymentArgs as Q, apiCommand as R, listCommand$2 as S, getOAuth2Client as T, writePlatformConfig as U, loadWorkspaceId as V, fetchAll as W, commonArgs as X, PATScope as Y, confirmationArgs as Z, listWorkflowExecutions as _, createCommand as a, remove as b, resumeWorkflow as c, listCommand$1 as d, withCommonArgs as et, listWorkflows as f, getWorkflowExecution as g, executionsCommand as h, deleteWorkspace as i, generate as j, listMachineUsers as k, startCommand as l, getWorkflow as m, listWorkspaces as n, logger as nt, createWorkspace as o, getCommand as p, initOperatorClient as q, deleteCommand as r, resumeCommand as s, listCommand as t, workspaceArgs as tt, startWorkflow as u, show as v, getCommand$1 as w, removeCommand as x, showCommand as y, fetchLatestToken as z };
105565
- //# sourceMappingURL=list-BHj1dQPk.mjs.map
105599
+ //# sourceMappingURL=list-DLqfJ2jD.mjs.map