@tailor-platform/sdk 0.22.2 → 0.22.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -275,81 +275,6 @@ const unauthenticatedTailorUser = {
275
275
 
276
276
  //#endregion
277
277
  //#region src/configure/services/tailordb/permission.ts
278
- const operatorMap = {
279
- "=": "eq",
280
- "!=": "ne",
281
- in: "in",
282
- "not in": "nin"
283
- };
284
- function normalizeOperand(operand) {
285
- if (typeof operand === "object" && "user" in operand) return { user: { id: "_id" }[operand.user] ?? operand.user };
286
- return operand;
287
- }
288
- function normalizeConditions(conditions) {
289
- return conditions.map((cond) => {
290
- const [left, operator, right] = cond;
291
- return [
292
- normalizeOperand(left),
293
- operatorMap[operator],
294
- normalizeOperand(right)
295
- ];
296
- });
297
- }
298
- function isObjectFormat(p) {
299
- return typeof p === "object" && p !== null && "conditions" in p;
300
- }
301
- function isSingleArrayConditionFormat(cond) {
302
- return cond.length >= 2 && typeof cond[1] === "string";
303
- }
304
- function normalizePermission(permission) {
305
- return Object.keys(permission).reduce((acc, action) => {
306
- acc[action] = permission[action].map((p) => normalizeActionPermission(p));
307
- return acc;
308
- }, {});
309
- }
310
- function normalizeGqlPermission(permission) {
311
- return permission.map((policy) => normalizeGqlPolicy(policy));
312
- }
313
- function normalizeGqlPolicy(policy) {
314
- return {
315
- conditions: policy.conditions ? normalizeConditions(policy.conditions) : [],
316
- actions: policy.actions === "all" ? ["all"] : policy.actions,
317
- permit: policy.permit ? "allow" : "deny",
318
- description: policy.description
319
- };
320
- }
321
- function normalizeActionPermission(permission) {
322
- if (isObjectFormat(permission)) return {
323
- conditions: normalizeConditions(isSingleArrayConditionFormat(permission.conditions) ? [permission.conditions] : permission.conditions),
324
- permit: permission.permit ? "allow" : "deny",
325
- description: permission.description
326
- };
327
- if (isSingleArrayConditionFormat(permission)) {
328
- const [op1, operator, op2, permit] = [...permission, true];
329
- return {
330
- conditions: normalizeConditions([[
331
- op1,
332
- operator,
333
- op2
334
- ]]),
335
- permit: permit ? "allow" : "deny"
336
- };
337
- }
338
- const conditions = [];
339
- const conditionArray = permission;
340
- let conditionArrayPermit = true;
341
- for (const item of conditionArray) {
342
- if (typeof item === "boolean") {
343
- conditionArrayPermit = item;
344
- continue;
345
- }
346
- conditions.push(item);
347
- }
348
- return {
349
- conditions: normalizeConditions(conditions),
350
- permit: conditionArrayPermit ? "allow" : "deny"
351
- };
352
- }
353
278
  /**
354
279
  * Grants full record-level access without any conditions.
355
280
  *
@@ -603,12 +528,12 @@ var TailorDBType = class {
603
528
  }
604
529
  permission(permission) {
605
530
  const ret = this;
606
- ret._permissions.record = normalizePermission(permission);
531
+ ret._permissions.record = permission;
607
532
  return ret;
608
533
  }
609
534
  gqlPermission(permission) {
610
535
  const ret = this;
611
- ret._permissions.gql = normalizeGqlPermission(permission);
536
+ ret._permissions.gql = permission;
612
537
  return ret;
613
538
  }
614
539
  description(description) {
@@ -728,4 +653,4 @@ function defineGenerators(...configs) {
728
653
 
729
654
  //#endregion
730
655
  export { createWorkflowJob as a, unsafeAllowAllGqlPermission as c, TailorField as d, t as f, WORKFLOW_JOB_BRAND as i, unsafeAllowAllTypePermission as l, defineGenerators as n, defineAuth as o, getDistDir as r, db as s, defineConfig as t, unauthenticatedTailorUser as u };
731
- //# sourceMappingURL=config-BYrX78K0.mjs.map
656
+ //# sourceMappingURL=config-CtRi0Lgg.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"config-CtRi0Lgg.mjs","names":["type: TailorFieldType","fields: Record<string, TailorField<any>>","issues: StandardSchemaV1.Issue[]","createField","uuid","string","bool","int","float","date","datetime","time","_enum","object","unauthenticatedTailorUser: TailorUser","unsafeAllowAllTypePermission: TailorTypePermission","unsafeAllowAllGqlPermission: TailorTypeGqlPermission","name: string","fields: Fields","indexes: Record<string, { fields: string[]; unique?: boolean }>","validators","description: string | undefined","fieldDef: F","distPath: string | null"],"sources":["../src/configure/types/field.ts","../src/configure/types/type.ts","../src/configure/types/user.ts","../src/configure/services/tailordb/permission.ts","../src/configure/services/tailordb/schema.ts","../src/configure/services/auth/index.ts","../src/configure/services/workflow/job.ts","../src/configure/config.ts"],"sourcesContent":["import { type EnumValue } from \"@/parser/service/tailordb/types\";\n\nexport type AllowedValue = EnumValue;\n\nexport type AllowedValues = [string | EnumValue, ...(string | EnumValue)[]];\n\nexport function mapAllowedValues(values: AllowedValues): AllowedValue[] {\n return values.map((value) => {\n if (typeof value === \"string\") {\n return { value, description: \"\" };\n }\n return { ...value, description: value.description ?? \"\" };\n });\n}\n\nexport type AllowedValuesOutput<V extends AllowedValues> =\n V[number] extends infer T\n ? T extends string\n ? T\n : T extends { value: infer K }\n ? K\n : never\n : never;\n","import {\n type AllowedValues,\n type AllowedValuesOutput,\n mapAllowedValues,\n} from \"./field\";\nimport {\n type TailorFieldType,\n type TailorToTs,\n type FieldMetadata,\n type DefinedFieldMetadata,\n type FieldOptions,\n type FieldOutput,\n} from \"./types\";\nimport type { Prettify, InferFieldsOutput } from \"./helpers\";\nimport type { FieldValidateInput } from \"./validation\";\nimport type { TailorUser } from \"@/configure/types\";\nimport type { TailorFieldInput } from \"@/parser/service/resolver/types\";\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\";\n\nconst regex = {\n uuid: /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i,\n date: /^(?<year>\\d{4})-(?<month>\\d{2})-(?<day>\\d{2})$/,\n time: /^(?<hour>\\d{2}):(?<minute>\\d{2})$/,\n datetime:\n /^(?<year>\\d{4})-(?<month>\\d{2})-(?<day>\\d{2})T(?<hour>\\d{2}):(?<minute>\\d{2}):(?<second>\\d{2})(.(?<millisec>\\d{3}))?Z$/,\n} as const;\n\nexport class TailorField<\n const Defined extends DefinedFieldMetadata = DefinedFieldMetadata,\n const Output = any,\n M extends FieldMetadata = FieldMetadata,\n> implements TailorFieldInput {\n protected _metadata: M;\n public readonly _defined: Defined = undefined as unknown as Defined;\n public readonly _output = undefined as Output;\n\n get metadata() {\n return { ...this._metadata };\n }\n\n protected constructor(\n public readonly type: TailorFieldType,\n options?: FieldOptions,\n public readonly fields: Record<string, TailorField<any>> = {},\n values?: AllowedValues,\n ) {\n this._metadata = { required: true } as M;\n if (options) {\n if (options.optional === true) {\n this._metadata.required = false;\n }\n if (options.array === true) {\n this._metadata.array = true;\n }\n }\n if (values) {\n this._metadata.allowedValues = mapAllowedValues(values);\n }\n }\n\n static create<\n const TType extends TailorFieldType,\n const TOptions extends FieldOptions,\n const OutputBase = TailorToTs[TType],\n >(\n type: TType,\n options?: TOptions,\n fields?: Record<string, TailorField<any>>,\n values?: AllowedValues,\n ) {\n return new TailorField<\n { type: TType; array: TOptions extends { array: true } ? true : false },\n FieldOutput<OutputBase, TOptions>\n >(type, options, fields, values);\n }\n\n description<CurrentDefined extends Defined>(\n this: CurrentDefined extends { description: unknown }\n ? never\n : TailorField<CurrentDefined, Output>,\n description: string,\n ) {\n this._metadata.description = description;\n return this as TailorField<\n Prettify<CurrentDefined & { description: true }>,\n Output\n >;\n }\n\n typeName<CurrentDefined extends Defined>(\n this: CurrentDefined extends { typeName: unknown }\n ? never\n : CurrentDefined extends { type: \"enum\" | \"nested\" }\n ? TailorField<CurrentDefined, Output>\n : never,\n typeName: string,\n ) {\n this._metadata.typeName = typeName;\n return this as TailorField<\n Prettify<CurrentDefined & { typeName: true }>,\n Output\n >;\n }\n\n validate<CurrentDefined extends Defined>(\n this: CurrentDefined extends { validate: unknown }\n ? never\n : TailorField<CurrentDefined, Output>,\n ...validate: FieldValidateInput<Output>[]\n ) {\n this._metadata.validate = validate;\n return this as TailorField<\n Prettify<CurrentDefined & { validate: true }>,\n Output\n >;\n }\n\n /**\n * Parse and validate a value against this field's validation rules\n * Returns StandardSchema Result type with success or failure\n */\n parse(args: {\n value: any;\n data: any;\n user: TailorUser;\n }): StandardSchemaV1.Result<Output> {\n return this._parseInternal({\n value: args.value,\n data: args.data,\n user: args.user,\n pathArray: [],\n });\n }\n\n /**\n * Validate a single value (not an array element)\n * Used internally for array element validation\n * @private\n */\n private _validateValue(args: {\n value: any;\n data: any;\n user: TailorUser;\n pathArray: string[];\n }): StandardSchemaV1.Issue[] {\n const { value, data, user, pathArray } = args;\n const issues: StandardSchemaV1.Issue[] = [];\n\n // Type-specific validation\n switch (this.type) {\n case \"string\":\n if (typeof value !== \"string\") {\n issues.push({\n message: `Expected a string: received ${String(value)}`,\n path: pathArray.length > 0 ? pathArray : undefined,\n });\n }\n break;\n\n case \"integer\":\n if (typeof value !== \"number\" || !Number.isInteger(value)) {\n issues.push({\n message: `Expected an integer: received ${String(value)}`,\n path: pathArray.length > 0 ? pathArray : undefined,\n });\n }\n break;\n\n case \"float\":\n if (typeof value !== \"number\" || !Number.isFinite(value)) {\n issues.push({\n message: `Expected a number: received ${String(value)}`,\n path: pathArray.length > 0 ? pathArray : undefined,\n });\n }\n break;\n\n case \"boolean\":\n if (typeof value !== \"boolean\") {\n issues.push({\n message: `Expected a boolean: received ${String(value)}`,\n path: pathArray.length > 0 ? pathArray : undefined,\n });\n }\n break;\n\n case \"uuid\":\n if (typeof value !== \"string\" || !regex.uuid.test(value)) {\n issues.push({\n message: `Expected a valid UUID: received ${String(value)}`,\n path: pathArray.length > 0 ? pathArray : undefined,\n });\n }\n break;\n case \"date\":\n if (typeof value !== \"string\" || !regex.date.test(value)) {\n issues.push({\n message: `Expected to match \"yyyy-MM-dd\" format: received ${String(value)}`,\n path: pathArray.length > 0 ? pathArray : undefined,\n });\n }\n break;\n case \"datetime\":\n if (typeof value !== \"string\" || !regex.datetime.test(value)) {\n issues.push({\n message: `Expected to match ISO format: received ${String(value)}`,\n path: pathArray.length > 0 ? pathArray : undefined,\n });\n }\n break;\n case \"time\":\n if (typeof value !== \"string\" || !regex.time.test(value)) {\n issues.push({\n message: `Expected to match \"HH:mm:ss\" format`,\n path: pathArray.length > 0 ? pathArray : undefined,\n });\n }\n break;\n case \"enum\":\n if (this.metadata.allowedValues) {\n const allowedValues = this.metadata.allowedValues.map((v) => v.value);\n if (!allowedValues.includes(value)) {\n issues.push({\n message: `Must be one of [${allowedValues.join(\", \")}]: received ${String(value)}`,\n path: pathArray.length > 0 ? pathArray : undefined,\n });\n }\n }\n break;\n\n case \"nested\":\n // Validate nested object fields\n if (\n typeof value !== \"object\" ||\n value === null ||\n Array.isArray(value)\n ) {\n issues.push({\n message: `Expected an object: received ${String(value)}`,\n path: pathArray.length > 0 ? pathArray : undefined,\n });\n } else if (this.fields && Object.keys(this.fields).length > 0) {\n for (const [fieldName, field] of Object.entries(this.fields)) {\n const fieldValue = value?.[fieldName];\n const result = field._parseInternal({\n value: fieldValue,\n data,\n user,\n pathArray: pathArray.concat(fieldName),\n });\n if (result.issues) {\n issues.push(...result.issues);\n }\n }\n }\n break;\n }\n\n // Custom validation functions\n const validateFns = this.metadata.validate;\n if (validateFns && validateFns.length > 0) {\n for (const validateInput of validateFns) {\n const { fn, message } =\n typeof validateInput === \"function\"\n ? { fn: validateInput, message: \"Validation failed\" }\n : { fn: validateInput[0], message: validateInput[1] };\n\n if (!fn({ value, data, user })) {\n issues.push({\n message,\n path: pathArray.length > 0 ? pathArray : undefined,\n });\n }\n }\n }\n\n return issues;\n }\n\n /**\n * Internal parse method that tracks field path for nested validation\n * @private\n */\n private _parseInternal(args: {\n value: any;\n data: any;\n user: TailorUser;\n pathArray: string[];\n }): StandardSchemaV1.Result<Output> {\n const { value, data, user, pathArray } = args;\n const issues: StandardSchemaV1.Issue[] = [];\n\n // 1. Check required/optional\n const isNullOrUndefined = value === null || value === undefined;\n if (this.metadata.required && isNullOrUndefined) {\n issues.push({\n message: \"Required field is missing\",\n path: pathArray.length > 0 ? pathArray : undefined,\n });\n return { issues };\n }\n\n // If optional and null/undefined, skip further validation\n if (!this.metadata.required && isNullOrUndefined) {\n return { value };\n }\n\n // 2. Check array type\n if (this.metadata.array) {\n if (!Array.isArray(value)) {\n issues.push({\n message: \"Expected an array\",\n path: pathArray.length > 0 ? pathArray : undefined,\n });\n return { issues };\n }\n\n // Validate each array element (without array flag)\n for (let i = 0; i < value.length; i++) {\n const elementValue = value[i];\n const elementPath = pathArray.concat(`[${i}]`);\n\n // Validate element with same type but without array flag\n const elementIssues = this._validateValue({\n value: elementValue,\n data,\n user,\n pathArray: elementPath,\n });\n if (elementIssues.length > 0) {\n issues.push(...elementIssues);\n }\n }\n\n if (issues.length > 0) {\n return { issues };\n }\n return { value: value as Output };\n }\n\n // 3. Type-specific validation and custom validation\n const valueIssues = this._validateValue({ value, data, user, pathArray });\n issues.push(...valueIssues);\n\n if (issues.length > 0) {\n return { issues };\n }\n\n return { value };\n }\n}\n\nconst createField = TailorField.create;\nfunction uuid<const Opt extends FieldOptions>(options?: Opt) {\n return createField(\"uuid\", options);\n}\n\nfunction string<const Opt extends FieldOptions>(options?: Opt) {\n return createField(\"string\", options);\n}\n\nfunction bool<const Opt extends FieldOptions>(options?: Opt) {\n return createField(\"boolean\", options);\n}\n\nfunction int<const Opt extends FieldOptions>(options?: Opt) {\n return createField(\"integer\", options);\n}\n\nfunction float<const Opt extends FieldOptions>(options?: Opt) {\n return createField(\"float\", options);\n}\n\nfunction date<const Opt extends FieldOptions>(options?: Opt) {\n return createField(\"date\", options);\n}\n\nfunction datetime<const Opt extends FieldOptions>(options?: Opt) {\n return createField(\"datetime\", options);\n}\n\nfunction time<const Opt extends FieldOptions>(options?: Opt) {\n return createField(\"time\", options);\n}\n\nfunction _enum<const V extends AllowedValues, const Opt extends FieldOptions>(\n values: V,\n options?: Opt,\n): TailorField<\n { type: \"enum\"; array: Opt extends { array: true } ? true : false },\n FieldOutput<AllowedValuesOutput<V>, Opt>\n> {\n return createField<\"enum\", Opt, AllowedValuesOutput<V>>(\n \"enum\",\n options,\n undefined,\n values,\n );\n}\n\nfunction object<\n const F extends Record<string, TailorField<any>>,\n const Opt extends FieldOptions,\n>(fields: F, options?: Opt) {\n const objectField = createField(\"nested\", options, fields) as TailorField<\n { type: \"nested\"; array: Opt extends { array: true } ? true : false },\n FieldOutput<InferFieldsOutput<F>, Opt>\n >;\n return objectField;\n}\n\nexport const t = {\n uuid,\n string,\n bool,\n int,\n float,\n date,\n datetime,\n time,\n enum: _enum,\n object,\n};\n","// Interfaces for module augmentation\n// Users can extend these via: declare module \"@tailor-platform/sdk\" { interface AttributeMap { ... } }\n// eslint-disable-next-line @typescript-eslint/no-empty-object-type\nexport interface AttributeMap {}\nexport interface AttributeList {\n __tuple?: []; // Marker for tuple type\n}\n\nexport type InferredAttributeMap = keyof AttributeMap extends never\n ? Record<string, string | string[] | boolean | boolean[] | undefined>\n : AttributeMap;\n\nexport type InferredAttributeList = AttributeList[\"__tuple\"] extends []\n ? string[]\n : AttributeList[\"__tuple\"];\n\n/** Represents a user in the Tailor platform. */\nexport type TailorUser = {\n /**\n * The ID of the user.\n * For unauthenticated users, this will be a nil UUID.\n */\n id: string;\n /**\n * The type of the user.\n * For unauthenticated users, this will be an empty string.\n */\n type: \"user\" | \"machine_user\" | \"\";\n /** The ID of the workspace the user belongs to. */\n workspaceId: string;\n /**\n * A map of the user's attributes.\n * For unauthenticated users, this will be null.\n */\n attributes: InferredAttributeMap | null;\n /**\n * A list of the user's attributes.\n * For unauthenticated users, this will be an empty array.\n */\n attributeList: InferredAttributeList;\n};\n\n/** Represents an unauthenticated user in the Tailor platform. */\nexport const unauthenticatedTailorUser: TailorUser = {\n id: \"00000000-0000-0000-0000-000000000000\",\n type: \"\",\n workspaceId: \"00000000-0000-0000-0000-000000000000\",\n attributes: null,\n attributeList: [],\n};\n","import type { InferredAttributeMap } from \"../../types\";\nimport type { ValueOperand } from \"../auth\";\n\nexport type TailorTypePermission<\n User extends object = InferredAttributeMap,\n Type extends object = object,\n> = {\n create: readonly ActionPermission<\"record\", User, Type, false>[];\n read: readonly ActionPermission<\"record\", User, Type, false>[];\n update: readonly ActionPermission<\"record\", User, Type, true>[];\n delete: readonly ActionPermission<\"record\", User, Type, false>[];\n};\n\ntype ActionPermission<\n Level extends \"record\" | \"gql\" = \"record\" | \"gql\",\n User extends object = InferredAttributeMap,\n Type extends object = object,\n Update extends boolean = boolean,\n> =\n | {\n conditions:\n | PermissionCondition<Level, User, Update, Type>\n | readonly PermissionCondition<Level, User, Update, Type>[];\n description?: string | undefined;\n permit?: boolean;\n }\n | readonly [\n ...PermissionCondition<Level, User, Update, Type>,\n ...([] | [boolean]),\n ] // single array condition\n | readonly [\n ...PermissionCondition<Level, User, Update, Type>[],\n ...([] | [boolean]),\n ]; // multiple array condition\n\nexport type TailorTypeGqlPermission<\n User extends object = InferredAttributeMap,\n Type extends object = object,\n> = readonly GqlPermissionPolicy<User, Type>[];\n\ntype GqlPermissionPolicy<\n User extends object = InferredAttributeMap,\n Type extends object = object,\n> = {\n conditions: readonly PermissionCondition<\"gql\", User, boolean, Type>[];\n actions: \"all\" | readonly GqlPermissionAction[];\n permit?: boolean;\n description?: string;\n};\n\ntype GqlPermissionAction =\n | \"read\"\n | \"create\"\n | \"update\"\n | \"delete\"\n | \"aggregate\"\n | \"bulkUpsert\";\n\nexport type PermissionCondition<\n Level extends \"record\" | \"gql\" = \"record\" | \"gql\",\n User extends object = InferredAttributeMap,\n Update extends boolean = boolean,\n Type extends object = object,\n> = readonly [\n PermissionOperand<Level, User, Type, Update>,\n PermissionOperator,\n PermissionOperand<Level, User, Type, Update>,\n];\n\ntype UserOperand<User extends object = InferredAttributeMap> = {\n user:\n | {\n [K in keyof User]: User[K] extends\n | string\n | string[]\n | boolean\n | boolean[]\n ? K\n : never;\n }[keyof User]\n | \"id\"\n | \"_loggedIn\";\n};\n\ntype RecordOperand<\n Type extends object,\n Update extends boolean = false,\n> = Update extends true\n ?\n | { oldRecord: (keyof Type & string) | \"id\" }\n | { newRecord: (keyof Type & string) | \"id\" }\n : { record: (keyof Type & string) | \"id\" };\n\nexport type PermissionOperand<\n Level extends \"record\" | \"gql\" = \"record\" | \"gql\",\n User extends object = InferredAttributeMap,\n Type extends object = object,\n Update extends boolean = boolean,\n> =\n | UserOperand<User>\n | ValueOperand\n | (Level extends \"record\" ? RecordOperand<Type, Update> : never);\n\ntype PermissionOperator = \"=\" | \"!=\" | \"in\" | \"not in\";\n\n/**\n * Grants full record-level access without any conditions.\n *\n * Unsafe and intended only for local development, prototyping, or tests.\n * Do not use this in production environments, as it effectively disables\n * authorization checks.\n */\nexport const unsafeAllowAllTypePermission: TailorTypePermission = {\n create: [{ conditions: [], permit: true }],\n read: [{ conditions: [], permit: true }],\n update: [{ conditions: [], permit: true }],\n delete: [{ conditions: [], permit: true }],\n};\n\n/**\n * Grants full GraphQL access (all actions) without any conditions.\n *\n * Unsafe and intended only for local development, prototyping, or tests.\n * Do not use this in production environments, as it effectively disables\n * authorization checks.\n */\nexport const unsafeAllowAllGqlPermission: TailorTypeGqlPermission = [\n { conditions: [], actions: \"all\", permit: true },\n];\n","import { clone } from \"es-toolkit\";\nimport {\n type AllowedValues,\n type AllowedValuesOutput,\n} from \"@/configure/types/field\";\nimport { TailorField } from \"@/configure/types/type\";\nimport {\n type FieldOptions,\n type FieldOutput,\n type TailorFieldType,\n type TailorToTs,\n} from \"@/configure/types/types\";\nimport {\n type TailorDBTypeMetadata,\n type RawPermissions,\n} from \"@/parser/service/tailordb/types\";\nimport {\n type TailorTypeGqlPermission,\n type TailorTypePermission,\n} from \"./permission\";\nimport {\n type DBFieldMetadata,\n type DefinedDBFieldMetadata,\n type Hooks,\n type Hook,\n type SerialConfig,\n type IndexDef,\n type TypeFeatures,\n type ExcludeNestedDBFields,\n} from \"./types\";\nimport type { InferredAttributeMap } from \"@/configure/types\";\nimport type {\n Prettify,\n output,\n InferFieldsOutput,\n} from \"@/configure/types/helpers\";\nimport type {\n FieldValidateInput,\n ValidateConfig,\n Validators,\n} from \"@/configure/types/validation\";\n\ntype RelationType =\n | \"oneToOne\"\n | \"1-1\"\n | \"manyToOne\"\n | \"n-1\"\n | \"N-1\"\n | \"keyOnly\";\n\ninterface RelationConfig<S extends RelationType, T extends TailorDBType> {\n type: S;\n toward: {\n type: T;\n as?: string;\n key?: keyof T[\"fields\"] & string;\n };\n backward?: string;\n}\n\n// Special config variant for self-referencing relations\ntype RelationSelfConfig = {\n type: RelationType;\n toward: {\n type: \"self\";\n as?: string;\n key?: string;\n };\n backward?: string;\n};\n\ninterface PendingSelfRelation {\n type: RelationType;\n as?: string;\n key: string;\n backward: string;\n}\n\nfunction isRelationSelfConfig(\n config: RelationConfig<RelationType, TailorDBType> | RelationSelfConfig,\n): config is RelationSelfConfig {\n return config.toward.type === \"self\";\n}\n\ninterface ReferenceConfig<T extends TailorDBType<any, any>> {\n type: TailorDBType<any, any>;\n key: keyof T[\"fields\"] & string;\n nameMap: [string | undefined, string];\n}\n\nexport class TailorDBField<\n const Defined extends DefinedDBFieldMetadata,\n const Output,\n> extends TailorField<Defined, Output, DBFieldMetadata> {\n private _ref: ReferenceConfig<TailorDBType> | undefined = undefined;\n private _pendingSelfRelation: PendingSelfRelation | undefined = undefined;\n\n get reference(): Readonly<ReferenceConfig<TailorDBType>> | undefined {\n return clone(this._ref);\n }\n\n get metadata() {\n return { ...this._metadata };\n }\n\n private constructor(\n type: TailorFieldType,\n options?: FieldOptions,\n fields?: Record<string, TailorDBField<any, any>>,\n values?: AllowedValues,\n ) {\n super(type, options, fields, values);\n }\n\n static create<\n const T extends TailorFieldType,\n const TOptions extends FieldOptions,\n const OutputBase = TailorToTs[T],\n >(\n type: T,\n options?: TOptions,\n fields?: Record<string, TailorDBField<any, any>>,\n values?: AllowedValues,\n ) {\n return new TailorDBField<\n { type: T; array: TOptions extends { array: true } ? true : false },\n FieldOutput<OutputBase, TOptions>\n >(type, options, fields, values);\n }\n\n description<CurrentDefined extends Defined>(\n this: CurrentDefined extends { description: unknown }\n ? never\n : TailorField<CurrentDefined, Output>,\n description: string,\n ): TailorDBField<Prettify<CurrentDefined & { description: true }>, Output> {\n return super.description(description) as any;\n }\n\n relation<\n S extends RelationType,\n T extends TailorDBType<any, any>,\n CurrentDefined extends Defined,\n >(\n this: CurrentDefined extends { relation: unknown }\n ? never\n : TailorDBField<CurrentDefined, Output>,\n config: RelationConfig<S, T>,\n ): TailorDBField<\n S extends \"oneToOne\" | \"1-1\"\n ? Prettify<CurrentDefined & { unique: true; index: true; relation: true }>\n : Prettify<CurrentDefined & { index: true; relation: true }>,\n Output\n >;\n\n // Overload: self-referencing variant\n relation<S extends RelationSelfConfig, CurrentDefined extends Defined>(\n this: CurrentDefined extends { relation: unknown }\n ? never\n : TailorDBField<CurrentDefined, Output>,\n config: S,\n ): TailorDBField<\n S[\"type\"] extends \"oneToOne\" | \"1-1\"\n ? Prettify<CurrentDefined & { unique: true; index: true; relation: true }>\n : Prettify<CurrentDefined & { index: true; relation: true }>,\n Output\n >;\n\n // Implementation\n relation<CurrentDefined extends Defined>(\n this: CurrentDefined extends { relation: unknown }\n ? never\n : TailorDBField<CurrentDefined, Output>,\n config: RelationConfig<RelationType, TailorDBType> | RelationSelfConfig,\n ): TailorDBField<DefinedDBFieldMetadata, Output> {\n this._metadata.index = true;\n this._metadata.foreignKey = true;\n this._metadata.unique = [\"oneToOne\", \"1-1\"].includes(config.type);\n\n const key = config.toward.key ?? \"id\";\n const backward = config.backward ?? \"\";\n\n if (isRelationSelfConfig(config)) {\n // Defer resolving the self reference until the type is constructed\n this._pendingSelfRelation = {\n type: config.type,\n as: config.toward.as,\n key,\n backward,\n };\n return this;\n }\n\n this._metadata.foreignKeyType = config.toward.type.name;\n this._metadata.foreignKeyField = key;\n if (config.type === \"keyOnly\") {\n return this;\n }\n\n const forward = config.toward.as;\n this._ref = {\n type: config.toward.type,\n nameMap: [forward, backward],\n key,\n };\n this._metadata.relation = true;\n return this;\n }\n\n index<CurrentDefined extends Defined>(\n this: CurrentDefined extends { index: unknown }\n ? never\n : TailorDBField<CurrentDefined, Output>,\n ) {\n this._metadata.index = true;\n return this as TailorDBField<\n Prettify<CurrentDefined & { index: true }>,\n Output\n >;\n }\n\n unique<CurrentDefined extends Defined>(\n this: CurrentDefined extends { unique: unknown }\n ? never\n : TailorDBField<CurrentDefined, Output>,\n ) {\n this._metadata.unique = true;\n this._metadata.index = true;\n return this as TailorDBField<\n Prettify<CurrentDefined & { unique: true; index: true }>,\n Output\n >;\n }\n\n vector<CurrentDefined extends Defined>(\n this: CurrentDefined extends { vector: unknown }\n ? never\n : CurrentDefined extends { type: \"string\"; array: false }\n ? TailorDBField<CurrentDefined, Output>\n : never,\n ) {\n this._metadata.vector = true;\n return this as TailorDBField<\n Prettify<CurrentDefined & { vector: true }>,\n Output\n >;\n }\n\n hooks<CurrentDefined extends Defined, const H extends Hook<unknown, Output>>(\n this: CurrentDefined extends { hooks: unknown }\n ? never\n : CurrentDefined extends { type: \"nested\" }\n ? never\n : TailorDBField<CurrentDefined, Output>,\n hooks: H,\n ) {\n this._metadata.hooks = hooks;\n return this as TailorDBField<\n Prettify<\n CurrentDefined & {\n hooks?: {\n create: H extends { create: unknown } ? true : false;\n update: H extends { update: unknown } ? true : false;\n };\n serial: false;\n }\n >,\n Output\n >;\n }\n\n validate<CurrentDefined extends Defined>(\n this: CurrentDefined extends { validate: unknown }\n ? never\n : TailorDBField<CurrentDefined, Output>,\n ...validate: FieldValidateInput<Output>[]\n ) {\n this._metadata.validate = validate;\n return this as TailorDBField<\n Prettify<CurrentDefined & { validate: true }>,\n Output\n >;\n }\n\n serial<CurrentDefined extends Defined>(\n this: CurrentDefined extends { serial: unknown }\n ? never\n : Output extends null\n ? never\n : CurrentDefined extends { type: \"integer\" | \"string\"; array: false }\n ? TailorDBField<CurrentDefined, Output>\n : never,\n config: SerialConfig<CurrentDefined[\"type\"] & (\"integer\" | \"string\")>,\n ) {\n (this as TailorDBField<CurrentDefined, Output>)._metadata.serial = config;\n return this as TailorDBField<\n Prettify<\n CurrentDefined & {\n serial: true;\n hooks: { create: false; update: false };\n }\n >,\n Output\n >;\n }\n\n /**\n * Clone the field with optional overrides for field options\n * @param options - Optional field options to override\n * @returns A new TailorDBField instance with the same configuration\n */\n clone<const NewOpt extends FieldOptions>(\n options?: NewOpt,\n ): TailorDBField<\n Prettify<\n Omit<Defined, \"array\"> & {\n array: NewOpt extends { array: true } ? true : Defined[\"array\"];\n }\n >,\n FieldOutput<TailorToTs[Defined[\"type\"]], NewOpt>\n > {\n // Create a clone using Object.create to preserve prototype chain\n const clonedField = Object.create(\n Object.getPrototypeOf(this),\n ) as TailorDBField<Defined, Output>;\n\n // Copy all properties\n Object.assign(clonedField, {\n type: this.type,\n fields: this.fields,\n _defined: this._defined,\n _output: this._output,\n });\n\n // Clone and merge metadata with new options\n clonedField._metadata = { ...this._metadata };\n if (options) {\n if (options.optional !== undefined) {\n clonedField._metadata.required = !options.optional;\n }\n if (options.array !== undefined) {\n clonedField._metadata.array = options.array;\n }\n }\n\n // Copy internal state\n if (this._ref) {\n clonedField._ref = clone(this._ref);\n }\n if (this._pendingSelfRelation) {\n clonedField._pendingSelfRelation = { ...this._pendingSelfRelation };\n }\n\n return clonedField as TailorDBField<any, any>;\n }\n}\n\nconst createField = TailorDBField.create;\nfunction uuid<const Opt extends FieldOptions>(options?: Opt) {\n return createField(\"uuid\", options);\n}\n\nfunction string<const Opt extends FieldOptions>(options?: Opt) {\n return createField(\"string\", options);\n}\n\nfunction bool<const Opt extends FieldOptions>(options?: Opt) {\n return createField(\"boolean\", options);\n}\n\nfunction int<const Opt extends FieldOptions>(options?: Opt) {\n return createField(\"integer\", options);\n}\n\nfunction float<const Opt extends FieldOptions>(options?: Opt) {\n return createField(\"float\", options);\n}\n\nfunction date<const Opt extends FieldOptions>(options?: Opt) {\n return createField(\"date\", options);\n}\n\nfunction datetime<const Opt extends FieldOptions>(options?: Opt) {\n return createField(\"datetime\", options);\n}\n\nfunction time<const Opt extends FieldOptions>(options?: Opt) {\n return createField(\"time\", options);\n}\n\nfunction _enum<const V extends AllowedValues, const Opt extends FieldOptions>(\n values: V,\n options?: Opt,\n): TailorDBField<\n { type: \"enum\"; array: Opt extends { array: true } ? true : false },\n FieldOutput<AllowedValuesOutput<V>, Opt>\n> {\n return createField<\"enum\", Opt, AllowedValuesOutput<V>>(\n \"enum\",\n options,\n undefined,\n values,\n );\n}\n\nfunction object<\n const F extends Record<string, TailorDBField<any, any>> &\n ExcludeNestedDBFields<F>,\n const Opt extends FieldOptions,\n>(fields: F, options?: Opt) {\n return createField(\"nested\", options, fields) as unknown as TailorDBField<\n { type: \"nested\"; array: Opt extends { array: true } ? true : false },\n FieldOutput<InferFieldsOutput<F>, Opt>\n >;\n}\n\nexport class TailorDBType<\n const Fields extends Record<string, TailorDBField<any, any>> = any,\n User extends object = InferredAttributeMap,\n> {\n public readonly _output = null as unknown as InferFieldsOutput<Fields>;\n public _description?: string;\n private _settings: TypeFeatures = {};\n private _indexes: IndexDef<this>[] = [];\n private _permissions: RawPermissions = {};\n private _files: Record<string, string> = {};\n\n constructor(\n public readonly name: string,\n public readonly fields: Fields,\n options: { pluralForm?: string; description?: string },\n ) {\n this._description = options.description;\n\n if (options.pluralForm) {\n if (name === options.pluralForm) {\n throw new Error(\n `The name and the plural form must be different. name=${name}`,\n );\n }\n this._settings.pluralForm = options.pluralForm;\n }\n\n // Resolve any pending self-references now that the type is constructed\n Object.entries(this.fields).forEach(([fieldName, field]) => {\n const f = field as unknown as {\n _pendingSelfRelation: PendingSelfRelation | undefined;\n _metadata: DBFieldMetadata;\n _ref: ReferenceConfig<TailorDBType<any, any>>;\n };\n const pending = f._pendingSelfRelation;\n if (pending) {\n f._metadata.foreignKeyType = this.name;\n f._metadata.foreignKeyField = pending.key;\n if (pending.type === \"keyOnly\") {\n return this;\n }\n\n const forward = pending.as ?? fieldName.replace(/(ID|Id|id)$/u, \"\");\n // Type conversion for manipulating private _ref.\n f._ref = {\n type: this,\n nameMap: [forward, pending.backward],\n key: pending.key,\n };\n }\n });\n }\n\n get metadata(): TailorDBTypeMetadata {\n // Convert indexes to the format expected by the manifest\n const indexes: Record<string, { fields: string[]; unique?: boolean }> = {};\n if (this._indexes && this._indexes.length > 0) {\n this._indexes.forEach((index) => {\n const fieldNames = index.fields.map((field) => String(field));\n const key = index.name || `idx_${fieldNames.join(\"_\")}`;\n indexes[key] = {\n fields: fieldNames,\n unique: index.unique,\n };\n });\n }\n\n return {\n name: this.name,\n description: this._description,\n settings: this._settings,\n permissions: this._permissions,\n files: this._files,\n ...(Object.keys(indexes).length > 0 && { indexes }),\n };\n }\n\n hooks(hooks: Hooks<Fields>) {\n Object.entries(hooks).forEach(([fieldName, fieldHooks]: [string, any]) => {\n this.fields[fieldName].hooks(fieldHooks);\n });\n return this;\n }\n\n validate(validators: Validators<Fields>) {\n Object.entries(validators).forEach(([fieldName, fieldValidators]) => {\n const field = this.fields[fieldName] as TailorDBField<any, any>;\n\n const validators = fieldValidators as\n | FieldValidateInput<unknown>\n | FieldValidateInput<unknown>[];\n\n const isValidateConfig = (v: unknown): v is ValidateConfig<unknown> => {\n return Array.isArray(v) && v.length === 2 && typeof v[1] === \"string\";\n };\n\n if (Array.isArray(validators)) {\n if (isValidateConfig(validators)) {\n field.validate(validators);\n } else {\n field.validate(...validators);\n }\n } else {\n field.validate(validators);\n }\n });\n return this;\n }\n\n features(features: Omit<TypeFeatures, \"pluralForm\">) {\n this._settings = { ...this._settings, ...features };\n return this;\n }\n\n indexes(...indexes: IndexDef<this>[]) {\n this._indexes = indexes;\n return this;\n }\n\n files<const F extends string>(\n files: Record<F, string> & Partial<Record<keyof output<this>, never>>,\n ) {\n this._files = files;\n return this;\n }\n\n permission<\n U extends object = User,\n P extends TailorTypePermission<U, output<this>> = TailorTypePermission<\n U,\n output<this>\n >,\n >(permission: P) {\n const ret = this as TailorDBType<Fields, U>;\n ret._permissions.record = permission;\n return ret;\n }\n\n gqlPermission<\n U extends object = User,\n P extends TailorTypeGqlPermission<U> = TailorTypeGqlPermission<U>,\n >(permission: P) {\n const ret = this as TailorDBType<Fields, U>;\n ret._permissions.gql = permission;\n return ret;\n }\n\n description(description: string) {\n this._description = description;\n return this;\n }\n\n /**\n * Pick specific fields from the type\n * @param keys - Array of field keys to pick\n * @param options - Optional field options to apply to picked fields\n * @returns An object containing only the specified fields\n */\n pickFields<K extends keyof Fields, const Opt extends FieldOptions>(\n keys: K[],\n options: Opt,\n ) {\n const result = {} as Record<K, TailorDBField<any, any>>;\n for (const key of keys) {\n if (options) {\n result[key] = this.fields[key].clone(options);\n } else {\n result[key] = this.fields[key];\n }\n }\n return result as {\n [P in K]: Fields[P] extends TailorDBField<infer D, infer _O>\n ? TailorDBField<\n Omit<D, \"array\"> & {\n array: Opt extends { array: true } ? true : D[\"array\"];\n },\n FieldOutput<TailorToTs[D[\"type\"]], Opt>\n >\n : never;\n };\n }\n\n /**\n * Omit specific fields from the type\n * @param keys - Array of field keys to omit\n * @returns An object containing all fields except the specified ones\n */\n omitFields<K extends keyof Fields>(keys: K[]): Omit<Fields, K> {\n const keysSet = new Set(keys);\n const result = {} as Record<string, TailorDBField<any, any>>;\n for (const key in this.fields) {\n if (\n Object.hasOwn(this.fields, key) &&\n !keysSet.has(key as unknown as K)\n ) {\n result[key] = this.fields[key];\n }\n }\n return result as Omit<Fields, K>;\n }\n}\n\nexport type TailorDBInstance<\n Fields extends Record<string, TailorDBField<any, any>> = any,\n User extends object = InferredAttributeMap,\n> = InstanceType<typeof TailorDBType<Fields, User>>;\n\nconst idField = uuid();\ntype idField = typeof idField;\ntype DBType<\n F extends { id?: never } & Record<string, TailorDBField<any, any>>,\n> = TailorDBInstance<{ id: idField } & F>;\n\n/**\n * Creates a new database type with the specified fields\n * @param name - The name of the type, or a tuple of [name, pluralForm]\n * @param fields - The field definitions for the type\n * @returns A new TailorDBType instance\n */\nfunction dbType<\n const F extends { id?: never } & Record<string, TailorDBField<any, any>>,\n>(name: string | [string, string], fields: F): DBType<F>;\n/**\n * Creates a new database type with the specified fields and description\n * @param name - The name of the type, or a tuple of [name, pluralForm]\n * @param description - A description of the type\n * @param fields - The field definitions for the type\n * @returns A new TailorDBType instance\n */\nfunction dbType<\n const F extends { id?: never } & Record<string, TailorDBField<any, any>>,\n>(name: string | [string, string], description: string, fields: F): DBType<F>;\nfunction dbType<\n const F extends { id?: never } & Record<string, TailorDBField<any, any>>,\n>(\n name: string | [string, string],\n fieldsOrDescription: string | F,\n fields?: F,\n): DBType<F> {\n const typeName = Array.isArray(name) ? name[0] : name;\n const pluralForm = Array.isArray(name) ? name[1] : undefined;\n\n let description: string | undefined;\n let fieldDef: F;\n if (typeof fieldsOrDescription === \"string\") {\n description = fieldsOrDescription;\n fieldDef = fields as F;\n } else {\n fieldDef = fieldsOrDescription;\n }\n return new TailorDBType<{ id: idField } & F>(\n typeName,\n {\n id: idField,\n ...fieldDef,\n },\n { pluralForm, description },\n ) as DBType<F>;\n}\n\nexport const db = {\n type: dbType,\n uuid,\n string,\n bool,\n int,\n float,\n date,\n datetime,\n time,\n enum: _enum,\n object,\n fields: {\n timestamps: () => ({\n createdAt: datetime()\n .hooks({ create: () => new Date() })\n .description(\"Record creation timestamp\"),\n updatedAt: datetime({ optional: true })\n .hooks({ update: () => new Date() })\n .description(\"Record last update timestamp\"),\n }),\n },\n};\n","import { type TailorDBInstance } from \"../tailordb/schema\";\nimport type {\n AuthInvoker as ParserAuthInvoker,\n AuthServiceInput,\n UserAttributeListKey,\n UserAttributeMap,\n} from \"@/parser/service/auth/types\";\n\ndeclare const authDefinitionBrand: unique symbol;\ntype AuthDefinitionBrand = { readonly [authDefinitionBrand]: true };\n\nexport type {\n OIDC,\n SAML,\n IDToken,\n BuiltinIdP,\n IdProviderConfig,\n OAuth2ClientGrantType,\n OAuth2ClientInput as OAuth2Client,\n SCIMAuthorization,\n SCIMAttributeType,\n SCIMAttribute,\n SCIMAttributeMapping,\n SCIMResource,\n SCIMConfig,\n TenantProviderConfig,\n ValueOperand,\n UsernameFieldKey,\n UserAttributeKey,\n UserAttributeListKey,\n UserAttributeMap,\n AuthServiceInput,\n} from \"@/parser/service/auth/types\";\n\n/**\n * Invoker type compatible with tailor.v1.AuthInvoker\n * - namespace: auth service name\n * - machineUserName: machine user name\n */\nexport type AuthInvoker<M extends string> = Omit<\n ParserAuthInvoker,\n \"machineUserName\"\n> & {\n machineUserName: M;\n};\n\nexport function defineAuth<\n const Name extends string,\n const User extends TailorDBInstance,\n const AttributeMap extends UserAttributeMap<User>,\n const AttributeList extends UserAttributeListKey<User>[],\n const MachineUserNames extends string,\n>(\n name: Name,\n config: AuthServiceInput<User, AttributeMap, AttributeList, MachineUserNames>,\n) {\n const result = {\n ...config,\n name,\n invoker<M extends MachineUserNames>(machineUser: M) {\n return { namespace: name, machineUserName: machineUser } as const;\n },\n } as const satisfies AuthServiceInput<\n User,\n AttributeMap,\n AttributeList,\n MachineUserNames\n > & {\n name: string;\n invoker<M extends MachineUserNames>(machineUser: M): AuthInvoker<M>;\n };\n\n return result as typeof result & AuthDefinitionBrand;\n}\n\nexport type AuthExternalConfig = { name: string; external: true };\n\nexport type AuthOwnConfig = ReturnType<\n typeof defineAuth<string, any, any, any, string>\n>;\n\nexport type AuthConfig = AuthOwnConfig | AuthExternalConfig;\n","import type { TailorEnv } from \"@/configure/types/env\";\nimport type { JsonCompatible } from \"@/configure/types/helpers\";\nimport type { Jsonifiable, Jsonify, JsonPrimitive } from \"type-fest\";\n\n/**\n * Symbol used to brand WorkflowJob objects created by createWorkflowJob.\n * This enables reliable runtime detection of workflow jobs regardless of\n * how they were imported or assigned (variable reassignment, destructuring, etc.)\n */\nexport const WORKFLOW_JOB_BRAND = Symbol.for(\"tailor:workflow-job\");\n\n/**\n * Context object passed as the second argument to workflow job body functions.\n */\nexport type WorkflowJobContext = {\n env: TailorEnv;\n};\n\n/**\n * Allowed output types for workflow job body functions.\n * Includes Jsonifiable (JSON-serializable values including objects with toJSON like Date),\n * undefined, and void.\n */\nexport type WorkflowJobOutput = Jsonifiable | undefined | void;\n\n/**\n * Convert output type to what trigger returns after JSON serialization.\n * - Jsonifiable values are converted via Jsonify (Date -> string, etc.)\n * - undefined remains undefined\n * - void becomes void\n */\ntype JsonifyOutput<T> = T extends Jsonifiable ? Jsonify<T> : T;\n\n/**\n * Input type constraint for workflow jobs.\n * Accepts any type that is JSON-compatible (primitives, arrays, objects with JSON-compatible values).\n * Excludes objects with toJSON method (like Date) since they won't be serialized in input.\n */\nexport type WorkflowJobInput = undefined | JsonCompatible<unknown>;\n\n/**\n * WorkflowJob represents a job that can be triggered in a workflow.\n *\n * Type constraints:\n * - Input: Must be JSON-compatible (no Date/toJSON objects) or undefined. Interfaces are allowed.\n * - Output: Must be Jsonifiable, undefined, or void\n * - Trigger returns Jsonify<Output> (Date becomes string after JSON.stringify)\n */\nexport interface WorkflowJob<\n Name extends string = string,\n Input = undefined,\n Output = undefined,\n> {\n readonly [WORKFLOW_JOB_BRAND]?: true;\n name: Name;\n /**\n * Trigger this job with the given input.\n * At runtime, this is a placeholder that calls the body function.\n * During bundling, calls to .trigger() are transformed to\n * tailor.workflow.triggerJobFunction(\"<job-name>\", args).\n *\n * Returns Jsonify<Output> because the value passes through JSON.stringify.\n */\n trigger: [Input] extends [undefined]\n ? () => Promise<JsonifyOutput<Awaited<Output>>>\n : (input: Input) => Promise<JsonifyOutput<Awaited<Output>>>;\n body: (input: Input, context: WorkflowJobContext) => Output | Promise<Output>;\n}\n\n/**\n * Check if a type contains any non-JSON-compatible values.\n * Returns `true` if the type is valid for input, `false` otherwise.\n */\ntype IsValidInput<T> = T extends undefined\n ? true\n : T extends JsonPrimitive\n ? true\n : T extends readonly (infer U)[]\n ? IsValidInput<U>\n : T extends object\n ? T extends { toJSON: () => unknown }\n ? false\n : { [K in keyof T]: IsValidInput<T[K]> }[keyof T] extends true\n ? true\n : false\n : false;\n\n/**\n * Check if a type is valid for output.\n * Returns `true` if the type is valid, `false` otherwise.\n */\ntype IsValidOutput<T> = T extends undefined | void\n ? true\n : T extends JsonPrimitive\n ? true\n : T extends readonly (infer U)[]\n ? IsValidOutput<U>\n : T extends object\n ? { [K in keyof T]: IsValidOutput<T[K]> }[keyof T] extends true\n ? true\n : false\n : false;\n\n/**\n * Body function type with conditional constraint.\n * If input contains invalid types (like Date), the body type becomes `never` to cause an error.\n */\ntype WorkflowJobBody<I, O> =\n IsValidInput<I> extends true\n ? IsValidOutput<O> extends true\n ? (input: I, context: WorkflowJobContext) => O | Promise<O>\n : never\n : never;\n\nexport const createWorkflowJob = <\n const Name extends string,\n I = undefined,\n O = undefined,\n>(config: {\n readonly name: Name;\n readonly body: WorkflowJobBody<I, O>;\n}): WorkflowJob<Name, I, Awaited<O>> => {\n return {\n [WORKFLOW_JOB_BRAND]: true,\n name: config.name,\n // JSON.parse(JSON.stringify(...)) ensures the return value matches Jsonify<Output> type.\n // This converts Date objects to strings, matching actual runtime behavior.\n trigger: async (args?: unknown) => {\n const ret = await tailor.workflow.triggerJobFunction(config.name, args);\n return ret ? JSON.parse(JSON.stringify(ret)) : ret;\n },\n body: config.body,\n } as WorkflowJob<Name, I, Awaited<O>>;\n};\n","import type { AuthConfig } from \"@/configure/services/auth\";\nimport type { ExecutorServiceInput } from \"@/configure/services/executor/types\";\nimport type { IdPConfig } from \"@/configure/services/idp\";\nimport type { ResolverServiceInput } from \"@/configure/services/resolver/types\";\nimport type { StaticWebsiteConfig } from \"@/configure/services/staticwebsite\";\nimport type { TailorDBServiceInput } from \"@/configure/services/tailordb/types\";\nimport type { WorkflowServiceInput } from \"@/configure/services/workflow/types\";\nimport type { GeneratorConfig } from \"@/parser/generator-config/types\";\n\nexport interface AppConfig<\n Auth extends AuthConfig = AuthConfig,\n Idp extends IdPConfig[] = IdPConfig[],\n StaticWebsites extends StaticWebsiteConfig[] = StaticWebsiteConfig[],\n Env extends Record<string, string | number | boolean> = Record<\n string,\n string | number | boolean\n >,\n> {\n name: string;\n env?: Env;\n cors?: string[];\n allowedIPAddresses?: string[];\n disableIntrospection?: boolean;\n db?: TailorDBServiceInput;\n resolver?: ResolverServiceInput;\n idp?: Idp;\n auth?: Auth;\n executor?: ExecutorServiceInput;\n workflow?: WorkflowServiceInput;\n staticWebsites?: StaticWebsites;\n}\n\nlet distPath: string | null = null;\nexport const getDistDir = (): string => {\n const configured = process.env.TAILOR_SDK_OUTPUT_DIR;\n if (configured && configured !== distPath) {\n distPath = configured;\n } else if (distPath === null) {\n distPath = configured || \".tailor-sdk\";\n }\n return distPath;\n};\n\nexport function defineConfig<\n const Config extends AppConfig &\n // type-fest's Exact works recursively and causes type errors, so we use a shallow version here.\n Record<Exclude<keyof Config, keyof AppConfig>, never>,\n>(config: Config) {\n return config;\n}\n\nexport function defineGenerators(...configs: GeneratorConfig[]) {\n return configs;\n}\n"],"mappings":";;;AAMA,SAAgB,iBAAiB,QAAuC;AACtE,QAAO,OAAO,KAAK,UAAU;AAC3B,MAAI,OAAO,UAAU,SACnB,QAAO;GAAE;GAAO,aAAa;GAAI;AAEnC,SAAO;GAAE,GAAG;GAAO,aAAa,MAAM,eAAe;GAAI;GACzD;;;;;ACOJ,MAAM,QAAQ;CACZ,MAAM;CACN,MAAM;CACN,MAAM;CACN,UACE;CACH;AAED,IAAa,cAAb,MAAa,YAIiB;CAC5B,AAAU;CACV,AAAgB,WAAoB;CACpC,AAAgB,UAAU;CAE1B,IAAI,WAAW;AACb,SAAO,EAAE,GAAG,KAAK,WAAW;;CAG9B,AAAU,YACR,AAAgBA,MAChB,SACA,AAAgBC,SAA2C,EAAE,EAC7D,QACA;EAJgB;EAEA;AAGhB,OAAK,YAAY,EAAE,UAAU,MAAM;AACnC,MAAI,SAAS;AACX,OAAI,QAAQ,aAAa,KACvB,MAAK,UAAU,WAAW;AAE5B,OAAI,QAAQ,UAAU,KACpB,MAAK,UAAU,QAAQ;;AAG3B,MAAI,OACF,MAAK,UAAU,gBAAgB,iBAAiB,OAAO;;CAI3D,OAAO,OAKL,MACA,SACA,QACA,QACA;AACA,SAAO,IAAI,YAGT,MAAM,SAAS,QAAQ,OAAO;;CAGlC,YAIE,aACA;AACA,OAAK,UAAU,cAAc;AAC7B,SAAO;;CAMT,SAME,UACA;AACA,OAAK,UAAU,WAAW;AAC1B,SAAO;;CAMT,SAIE,GAAG,UACH;AACA,OAAK,UAAU,WAAW;AAC1B,SAAO;;;;;;CAUT,MAAM,MAI8B;AAClC,SAAO,KAAK,eAAe;GACzB,OAAO,KAAK;GACZ,MAAM,KAAK;GACX,MAAM,KAAK;GACX,WAAW,EAAE;GACd,CAAC;;;;;;;CAQJ,AAAQ,eAAe,MAKM;EAC3B,MAAM,EAAE,OAAO,MAAM,MAAM,cAAc;EACzC,MAAMC,SAAmC,EAAE;AAG3C,UAAQ,KAAK,MAAb;GACE,KAAK;AACH,QAAI,OAAO,UAAU,SACnB,QAAO,KAAK;KACV,SAAS,+BAA+B,OAAO,MAAM;KACrD,MAAM,UAAU,SAAS,IAAI,YAAY;KAC1C,CAAC;AAEJ;GAEF,KAAK;AACH,QAAI,OAAO,UAAU,YAAY,CAAC,OAAO,UAAU,MAAM,CACvD,QAAO,KAAK;KACV,SAAS,iCAAiC,OAAO,MAAM;KACvD,MAAM,UAAU,SAAS,IAAI,YAAY;KAC1C,CAAC;AAEJ;GAEF,KAAK;AACH,QAAI,OAAO,UAAU,YAAY,CAAC,OAAO,SAAS,MAAM,CACtD,QAAO,KAAK;KACV,SAAS,+BAA+B,OAAO,MAAM;KACrD,MAAM,UAAU,SAAS,IAAI,YAAY;KAC1C,CAAC;AAEJ;GAEF,KAAK;AACH,QAAI,OAAO,UAAU,UACnB,QAAO,KAAK;KACV,SAAS,gCAAgC,OAAO,MAAM;KACtD,MAAM,UAAU,SAAS,IAAI,YAAY;KAC1C,CAAC;AAEJ;GAEF,KAAK;AACH,QAAI,OAAO,UAAU,YAAY,CAAC,MAAM,KAAK,KAAK,MAAM,CACtD,QAAO,KAAK;KACV,SAAS,mCAAmC,OAAO,MAAM;KACzD,MAAM,UAAU,SAAS,IAAI,YAAY;KAC1C,CAAC;AAEJ;GACF,KAAK;AACH,QAAI,OAAO,UAAU,YAAY,CAAC,MAAM,KAAK,KAAK,MAAM,CACtD,QAAO,KAAK;KACV,SAAS,mDAAmD,OAAO,MAAM;KACzE,MAAM,UAAU,SAAS,IAAI,YAAY;KAC1C,CAAC;AAEJ;GACF,KAAK;AACH,QAAI,OAAO,UAAU,YAAY,CAAC,MAAM,SAAS,KAAK,MAAM,CAC1D,QAAO,KAAK;KACV,SAAS,0CAA0C,OAAO,MAAM;KAChE,MAAM,UAAU,SAAS,IAAI,YAAY;KAC1C,CAAC;AAEJ;GACF,KAAK;AACH,QAAI,OAAO,UAAU,YAAY,CAAC,MAAM,KAAK,KAAK,MAAM,CACtD,QAAO,KAAK;KACV,SAAS;KACT,MAAM,UAAU,SAAS,IAAI,YAAY;KAC1C,CAAC;AAEJ;GACF,KAAK;AACH,QAAI,KAAK,SAAS,eAAe;KAC/B,MAAM,gBAAgB,KAAK,SAAS,cAAc,KAAK,MAAM,EAAE,MAAM;AACrE,SAAI,CAAC,cAAc,SAAS,MAAM,CAChC,QAAO,KAAK;MACV,SAAS,mBAAmB,cAAc,KAAK,KAAK,CAAC,cAAc,OAAO,MAAM;MAChF,MAAM,UAAU,SAAS,IAAI,YAAY;MAC1C,CAAC;;AAGN;GAEF,KAAK;AAEH,QACE,OAAO,UAAU,YACjB,UAAU,QACV,MAAM,QAAQ,MAAM,CAEpB,QAAO,KAAK;KACV,SAAS,gCAAgC,OAAO,MAAM;KACtD,MAAM,UAAU,SAAS,IAAI,YAAY;KAC1C,CAAC;aACO,KAAK,UAAU,OAAO,KAAK,KAAK,OAAO,CAAC,SAAS,EAC1D,MAAK,MAAM,CAAC,WAAW,UAAU,OAAO,QAAQ,KAAK,OAAO,EAAE;KAC5D,MAAM,aAAa,QAAQ;KAC3B,MAAM,SAAS,MAAM,eAAe;MAClC,OAAO;MACP;MACA;MACA,WAAW,UAAU,OAAO,UAAU;MACvC,CAAC;AACF,SAAI,OAAO,OACT,QAAO,KAAK,GAAG,OAAO,OAAO;;AAInC;;EAIJ,MAAM,cAAc,KAAK,SAAS;AAClC,MAAI,eAAe,YAAY,SAAS,EACtC,MAAK,MAAM,iBAAiB,aAAa;GACvC,MAAM,EAAE,IAAI,YACV,OAAO,kBAAkB,aACrB;IAAE,IAAI;IAAe,SAAS;IAAqB,GACnD;IAAE,IAAI,cAAc;IAAI,SAAS,cAAc;IAAI;AAEzD,OAAI,CAAC,GAAG;IAAE;IAAO;IAAM;IAAM,CAAC,CAC5B,QAAO,KAAK;IACV;IACA,MAAM,UAAU,SAAS,IAAI,YAAY;IAC1C,CAAC;;AAKR,SAAO;;;;;;CAOT,AAAQ,eAAe,MAKa;EAClC,MAAM,EAAE,OAAO,MAAM,MAAM,cAAc;EACzC,MAAMA,SAAmC,EAAE;EAG3C,MAAM,oBAAoB,UAAU,QAAQ,UAAU;AACtD,MAAI,KAAK,SAAS,YAAY,mBAAmB;AAC/C,UAAO,KAAK;IACV,SAAS;IACT,MAAM,UAAU,SAAS,IAAI,YAAY;IAC1C,CAAC;AACF,UAAO,EAAE,QAAQ;;AAInB,MAAI,CAAC,KAAK,SAAS,YAAY,kBAC7B,QAAO,EAAE,OAAO;AAIlB,MAAI,KAAK,SAAS,OAAO;AACvB,OAAI,CAAC,MAAM,QAAQ,MAAM,EAAE;AACzB,WAAO,KAAK;KACV,SAAS;KACT,MAAM,UAAU,SAAS,IAAI,YAAY;KAC1C,CAAC;AACF,WAAO,EAAE,QAAQ;;AAInB,QAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;IACrC,MAAM,eAAe,MAAM;IAC3B,MAAM,cAAc,UAAU,OAAO,IAAI,EAAE,GAAG;IAG9C,MAAM,gBAAgB,KAAK,eAAe;KACxC,OAAO;KACP;KACA;KACA,WAAW;KACZ,CAAC;AACF,QAAI,cAAc,SAAS,EACzB,QAAO,KAAK,GAAG,cAAc;;AAIjC,OAAI,OAAO,SAAS,EAClB,QAAO,EAAE,QAAQ;AAEnB,UAAO,EAAS,OAAiB;;EAInC,MAAM,cAAc,KAAK,eAAe;GAAE;GAAO;GAAM;GAAM;GAAW,CAAC;AACzE,SAAO,KAAK,GAAG,YAAY;AAE3B,MAAI,OAAO,SAAS,EAClB,QAAO,EAAE,QAAQ;AAGnB,SAAO,EAAE,OAAO;;;AAIpB,MAAMC,gBAAc,YAAY;AAChC,SAASC,OAAqC,SAAe;AAC3D,QAAOD,cAAY,QAAQ,QAAQ;;AAGrC,SAASE,SAAuC,SAAe;AAC7D,QAAOF,cAAY,UAAU,QAAQ;;AAGvC,SAASG,OAAqC,SAAe;AAC3D,QAAOH,cAAY,WAAW,QAAQ;;AAGxC,SAASI,MAAoC,SAAe;AAC1D,QAAOJ,cAAY,WAAW,QAAQ;;AAGxC,SAASK,QAAsC,SAAe;AAC5D,QAAOL,cAAY,SAAS,QAAQ;;AAGtC,SAASM,OAAqC,SAAe;AAC3D,QAAON,cAAY,QAAQ,QAAQ;;AAGrC,SAASO,WAAyC,SAAe;AAC/D,QAAOP,cAAY,YAAY,QAAQ;;AAGzC,SAASQ,OAAqC,SAAe;AAC3D,QAAOR,cAAY,QAAQ,QAAQ;;AAGrC,SAASS,QACP,QACA,SAIA;AACA,QAAOT,cACL,QACA,SACA,QACA,OACD;;AAGH,SAASU,SAGP,QAAW,SAAe;AAK1B,QAJoBV,cAAY,UAAU,SAAS,OAAO;;AAO5D,MAAa,IAAI;CACf;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA,MAAMS;CACN;CACD;;;;;AC3XD,MAAaE,4BAAwC;CACnD,IAAI;CACJ,MAAM;CACN,aAAa;CACb,YAAY;CACZ,eAAe,EAAE;CAClB;;;;;;;;;;;AC+DD,MAAaC,+BAAqD;CAChE,QAAQ,CAAC;EAAE,YAAY,EAAE;EAAE,QAAQ;EAAM,CAAC;CAC1C,MAAM,CAAC;EAAE,YAAY,EAAE;EAAE,QAAQ;EAAM,CAAC;CACxC,QAAQ,CAAC;EAAE,YAAY,EAAE;EAAE,QAAQ;EAAM,CAAC;CAC1C,QAAQ,CAAC;EAAE,YAAY,EAAE;EAAE,QAAQ;EAAM,CAAC;CAC3C;;;;;;;;AASD,MAAaC,8BAAuD,CAClE;CAAE,YAAY,EAAE;CAAE,SAAS;CAAO,QAAQ;CAAM,CACjD;;;;AClDD,SAAS,qBACP,QAC8B;AAC9B,QAAO,OAAO,OAAO,SAAS;;AAShC,IAAa,gBAAb,MAAa,sBAGH,YAA8C;CACtD,AAAQ,OAAkD;CAC1D,AAAQ,uBAAwD;CAEhE,IAAI,YAAiE;AACnE,SAAO,MAAM,KAAK,KAAK;;CAGzB,IAAI,WAAW;AACb,SAAO,EAAE,GAAG,KAAK,WAAW;;CAG9B,AAAQ,YACN,MACA,SACA,QACA,QACA;AACA,QAAM,MAAM,SAAS,QAAQ,OAAO;;CAGtC,OAAO,OAKL,MACA,SACA,QACA,QACA;AACA,SAAO,IAAI,cAGT,MAAM,SAAS,QAAQ,OAAO;;CAGlC,YAIE,aACyE;AACzE,SAAO,MAAM,YAAY,YAAY;;CAiCvC,SAIE,QAC+C;AAC/C,OAAK,UAAU,QAAQ;AACvB,OAAK,UAAU,aAAa;AAC5B,OAAK,UAAU,SAAS,CAAC,YAAY,MAAM,CAAC,SAAS,OAAO,KAAK;EAEjE,MAAM,MAAM,OAAO,OAAO,OAAO;EACjC,MAAM,WAAW,OAAO,YAAY;AAEpC,MAAI,qBAAqB,OAAO,EAAE;AAEhC,QAAK,uBAAuB;IAC1B,MAAM,OAAO;IACb,IAAI,OAAO,OAAO;IAClB;IACA;IACD;AACD,UAAO;;AAGT,OAAK,UAAU,iBAAiB,OAAO,OAAO,KAAK;AACnD,OAAK,UAAU,kBAAkB;AACjC,MAAI,OAAO,SAAS,UAClB,QAAO;EAGT,MAAM,UAAU,OAAO,OAAO;AAC9B,OAAK,OAAO;GACV,MAAM,OAAO,OAAO;GACpB,SAAS,CAAC,SAAS,SAAS;GAC5B;GACD;AACD,OAAK,UAAU,WAAW;AAC1B,SAAO;;CAGT,QAIE;AACA,OAAK,UAAU,QAAQ;AACvB,SAAO;;CAMT,SAIE;AACA,OAAK,UAAU,SAAS;AACxB,OAAK,UAAU,QAAQ;AACvB,SAAO;;CAMT,SAME;AACA,OAAK,UAAU,SAAS;AACxB,SAAO;;CAMT,MAME,OACA;AACA,OAAK,UAAU,QAAQ;AACvB,SAAO;;CAcT,SAIE,GAAG,UACH;AACA,OAAK,UAAU,WAAW;AAC1B,SAAO;;CAMT,OAQE,QACA;AACA,EAAC,KAA+C,UAAU,SAAS;AACnE,SAAO;;;;;;;CAgBT,MACE,SAQA;EAEA,MAAM,cAAc,OAAO,OACzB,OAAO,eAAe,KAAK,CAC5B;AAGD,SAAO,OAAO,aAAa;GACzB,MAAM,KAAK;GACX,QAAQ,KAAK;GACb,UAAU,KAAK;GACf,SAAS,KAAK;GACf,CAAC;AAGF,cAAY,YAAY,EAAE,GAAG,KAAK,WAAW;AAC7C,MAAI,SAAS;AACX,OAAI,QAAQ,aAAa,OACvB,aAAY,UAAU,WAAW,CAAC,QAAQ;AAE5C,OAAI,QAAQ,UAAU,OACpB,aAAY,UAAU,QAAQ,QAAQ;;AAK1C,MAAI,KAAK,KACP,aAAY,OAAO,MAAM,KAAK,KAAK;AAErC,MAAI,KAAK,qBACP,aAAY,uBAAuB,EAAE,GAAG,KAAK,sBAAsB;AAGrE,SAAO;;;AAIX,MAAM,cAAc,cAAc;AAClC,SAAS,KAAqC,SAAe;AAC3D,QAAO,YAAY,QAAQ,QAAQ;;AAGrC,SAAS,OAAuC,SAAe;AAC7D,QAAO,YAAY,UAAU,QAAQ;;AAGvC,SAAS,KAAqC,SAAe;AAC3D,QAAO,YAAY,WAAW,QAAQ;;AAGxC,SAAS,IAAoC,SAAe;AAC1D,QAAO,YAAY,WAAW,QAAQ;;AAGxC,SAAS,MAAsC,SAAe;AAC5D,QAAO,YAAY,SAAS,QAAQ;;AAGtC,SAAS,KAAqC,SAAe;AAC3D,QAAO,YAAY,QAAQ,QAAQ;;AAGrC,SAAS,SAAyC,SAAe;AAC/D,QAAO,YAAY,YAAY,QAAQ;;AAGzC,SAAS,KAAqC,SAAe;AAC3D,QAAO,YAAY,QAAQ,QAAQ;;AAGrC,SAAS,MACP,QACA,SAIA;AACA,QAAO,YACL,QACA,SACA,QACA,OACD;;AAGH,SAAS,OAIP,QAAW,SAAe;AAC1B,QAAO,YAAY,UAAU,SAAS,OAAO;;AAM/C,IAAa,eAAb,MAGE;CACA,AAAgB,UAAU;CAC1B,AAAO;CACP,AAAQ,YAA0B,EAAE;CACpC,AAAQ,WAA6B,EAAE;CACvC,AAAQ,eAA+B,EAAE;CACzC,AAAQ,SAAiC,EAAE;CAE3C,YACE,AAAgBC,MAChB,AAAgBC,QAChB,SACA;EAHgB;EACA;AAGhB,OAAK,eAAe,QAAQ;AAE5B,MAAI,QAAQ,YAAY;AACtB,OAAI,SAAS,QAAQ,WACnB,OAAM,IAAI,MACR,wDAAwD,OACzD;AAEH,QAAK,UAAU,aAAa,QAAQ;;AAItC,SAAO,QAAQ,KAAK,OAAO,CAAC,SAAS,CAAC,WAAW,WAAW;GAC1D,MAAM,IAAI;GAKV,MAAM,UAAU,EAAE;AAClB,OAAI,SAAS;AACX,MAAE,UAAU,iBAAiB,KAAK;AAClC,MAAE,UAAU,kBAAkB,QAAQ;AACtC,QAAI,QAAQ,SAAS,UACnB,QAAO;IAGT,MAAM,UAAU,QAAQ,MAAM,UAAU,QAAQ,gBAAgB,GAAG;AAEnE,MAAE,OAAO;KACP,MAAM;KACN,SAAS,CAAC,SAAS,QAAQ,SAAS;KACpC,KAAK,QAAQ;KACd;;IAEH;;CAGJ,IAAI,WAAiC;EAEnC,MAAMC,UAAkE,EAAE;AAC1E,MAAI,KAAK,YAAY,KAAK,SAAS,SAAS,EAC1C,MAAK,SAAS,SAAS,UAAU;GAC/B,MAAM,aAAa,MAAM,OAAO,KAAK,UAAU,OAAO,MAAM,CAAC;GAC7D,MAAM,MAAM,MAAM,QAAQ,OAAO,WAAW,KAAK,IAAI;AACrD,WAAQ,OAAO;IACb,QAAQ;IACR,QAAQ,MAAM;IACf;IACD;AAGJ,SAAO;GACL,MAAM,KAAK;GACX,aAAa,KAAK;GAClB,UAAU,KAAK;GACf,aAAa,KAAK;GAClB,OAAO,KAAK;GACZ,GAAI,OAAO,KAAK,QAAQ,CAAC,SAAS,KAAK,EAAE,SAAS;GACnD;;CAGH,MAAM,OAAsB;AAC1B,SAAO,QAAQ,MAAM,CAAC,SAAS,CAAC,WAAW,gBAA+B;AACxE,QAAK,OAAO,WAAW,MAAM,WAAW;IACxC;AACF,SAAO;;CAGT,SAAS,YAAgC;AACvC,SAAO,QAAQ,WAAW,CAAC,SAAS,CAAC,WAAW,qBAAqB;GACnE,MAAM,QAAQ,KAAK,OAAO;GAE1B,MAAMC,eAAa;GAInB,MAAM,oBAAoB,MAA6C;AACrE,WAAO,MAAM,QAAQ,EAAE,IAAI,EAAE,WAAW,KAAK,OAAO,EAAE,OAAO;;AAG/D,OAAI,MAAM,QAAQA,aAAW,CAC3B,KAAI,iBAAiBA,aAAW,CAC9B,OAAM,SAASA,aAAW;OAE1B,OAAM,SAAS,GAAGA,aAAW;OAG/B,OAAM,SAASA,aAAW;IAE5B;AACF,SAAO;;CAGT,SAAS,UAA4C;AACnD,OAAK,YAAY;GAAE,GAAG,KAAK;GAAW,GAAG;GAAU;AACnD,SAAO;;CAGT,QAAQ,GAAG,SAA2B;AACpC,OAAK,WAAW;AAChB,SAAO;;CAGT,MACE,OACA;AACA,OAAK,SAAS;AACd,SAAO;;CAGT,WAME,YAAe;EACf,MAAM,MAAM;AACZ,MAAI,aAAa,SAAS;AAC1B,SAAO;;CAGT,cAGE,YAAe;EACf,MAAM,MAAM;AACZ,MAAI,aAAa,MAAM;AACvB,SAAO;;CAGT,YAAY,aAAqB;AAC/B,OAAK,eAAe;AACpB,SAAO;;;;;;;;CAST,WACE,MACA,SACA;EACA,MAAM,SAAS,EAAE;AACjB,OAAK,MAAM,OAAO,KAChB,KAAI,QACF,QAAO,OAAO,KAAK,OAAO,KAAK,MAAM,QAAQ;MAE7C,QAAO,OAAO,KAAK,OAAO;AAG9B,SAAO;;;;;;;CAiBT,WAAmC,MAA4B;EAC7D,MAAM,UAAU,IAAI,IAAI,KAAK;EAC7B,MAAM,SAAS,EAAE;AACjB,OAAK,MAAM,OAAO,KAAK,OACrB,KACE,OAAO,OAAO,KAAK,QAAQ,IAAI,IAC/B,CAAC,QAAQ,IAAI,IAAoB,CAEjC,QAAO,OAAO,KAAK,OAAO;AAG9B,SAAO;;;AASX,MAAM,UAAU,MAAM;AAyBtB,SAAS,OAGP,MACA,qBACA,QACW;CACX,MAAM,WAAW,MAAM,QAAQ,KAAK,GAAG,KAAK,KAAK;CACjD,MAAM,aAAa,MAAM,QAAQ,KAAK,GAAG,KAAK,KAAK;CAEnD,IAAIC;CACJ,IAAIC;AACJ,KAAI,OAAO,wBAAwB,UAAU;AAC3C,gBAAc;AACd,aAAW;OAEX,YAAW;AAEb,QAAO,IAAI,aACT,UACA;EACE,IAAI;EACJ,GAAG;EACJ,EACD;EAAE;EAAY;EAAa,CAC5B;;AAGH,MAAa,KAAK;CAChB,MAAM;CACN;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA,MAAM;CACN;CACA,QAAQ,EACN,mBAAmB;EACjB,WAAW,UAAU,CAClB,MAAM,EAAE,8BAAc,IAAI,MAAM,EAAE,CAAC,CACnC,YAAY,4BAA4B;EAC3C,WAAW,SAAS,EAAE,UAAU,MAAM,CAAC,CACpC,MAAM,EAAE,8BAAc,IAAI,MAAM,EAAE,CAAC,CACnC,YAAY,+BAA+B;EAC/C,GACF;CACF;;;;AC5oBD,SAAgB,WAOd,MACA,QACA;AAiBA,QAhBe;EACb,GAAG;EACH;EACA,QAAoC,aAAgB;AAClD,UAAO;IAAE,WAAW;IAAM,iBAAiB;IAAa;;EAE3D;;;;;;;;;;ACrDH,MAAa,qBAAqB,OAAO,IAAI,sBAAsB;AAyGnE,MAAa,qBAIX,WAGsC;AACtC,QAAO;GACJ,qBAAqB;EACtB,MAAM,OAAO;EAGb,SAAS,OAAO,SAAmB;GACjC,MAAM,MAAM,MAAM,OAAO,SAAS,mBAAmB,OAAO,MAAM,KAAK;AACvE,UAAO,MAAM,KAAK,MAAM,KAAK,UAAU,IAAI,CAAC,GAAG;;EAEjD,MAAM,OAAO;EACd;;;;;ACpGH,IAAIC,WAA0B;AAC9B,MAAa,mBAA2B;CACtC,MAAM,aAAa,QAAQ,IAAI;AAC/B,KAAI,cAAc,eAAe,SAC/B,YAAW;UACF,aAAa,KACtB,YAAW,cAAc;AAE3B,QAAO;;AAGT,SAAgB,aAId,QAAgB;AAChB,QAAO;;AAGT,SAAgB,iBAAiB,GAAG,SAA4B;AAC9D,QAAO"}
@@ -1,4 +1,4 @@
1
1
  /// <reference path="./../user-defined.d.ts" />
2
- import { $ as Resolver, A as TailorDBType, B as AuthConfig, C as ResolverExternalConfig, Ct as UsernameFieldKey, D as ExecutorServiceInput, E as ExecutorServiceConfig, F as PermissionCondition, G as Env, H as AuthInvoker, I as TailorTypeGqlPermission, J as AttributeMap, L as TailorTypePermission, O as TailorDBField, Q as QueryType, R as unsafeAllowAllGqlPermission, S as defineIdp, St as UserAttributeMap, T as ResolverServiceInput, U as AuthOwnConfig, V as AuthExternalConfig, W as defineAuth, X as unauthenticatedTailorUser, Y as TailorUser, Z as TailorField, _ as WorkflowServiceInput, _t as SCIMConfig, at as AuthServiceInput, b as IdPConfig, bt as UserAttributeKey, ct as IdProviderConfig, dt as OIDC, f as defineConfig, ft as SAML, g as WorkflowServiceConfig, gt as SCIMAuthorization, ht as SCIMAttributeType, j as db, k as TailorDBInstance, lt as OAuth2ClientGrantType, mt as SCIMAttributeMapping, ot as BuiltinIdP, p as defineGenerators, pt as SCIMAttribute, q as AttributeList, st as IDToken, ut as OAuth2ClientInput, v as StaticWebsiteConfig, vt as SCIMResource, w as ResolverServiceConfig, wt as ValueOperand, x as IdPExternalConfig, xt as UserAttributeListKey, y as defineStaticWebSite, yt as TenantProviderConfig, z as unsafeAllowAllTypePermission } from "../types-DkG_CWKo.mjs";
3
- import { A as WORKFLOW_JOB_BRAND, C as GqlOperation, D as Workflow, E as WorkflowOperation, F as createWorkflowJob, I as createResolver, M as WorkflowJobContext, N as WorkflowJobInput, O as WorkflowConfig, P as WorkflowJobOutput, S as FunctionOperation, T as WebhookOperation, _ as ResolverExecutedTrigger, a as Trigger, b as recordUpdatedTrigger, c as IncomingWebhookTrigger, d as scheduleTrigger, f as RecordCreatedArgs, g as ResolverExecutedArgs, h as RecordUpdatedArgs, i as createExecutor, j as WorkflowJob, k as createWorkflow, l as incomingWebhookTrigger, m as RecordTrigger, n as output, o as IncomingWebhookArgs, p as RecordDeletedArgs, r as t, s as IncomingWebhookRequest, t as infer, u as ScheduleTrigger, v as recordCreatedTrigger, w as Operation, x as resolverExecutedTrigger, y as recordDeletedTrigger } from "../index-43O2EJml.mjs";
2
+ import { $ as SCIMAttribute, A as TailorDBType, B as AuthConfig, C as ResolverExternalConfig, D as ExecutorServiceInput, E as ExecutorServiceConfig, F as PermissionCondition, G as AuthServiceInput, H as AuthInvoker, I as TailorTypeGqlPermission, J as IdProviderConfig, K as BuiltinIdP, L as TailorTypePermission, O as TailorDBField, Q as SAML, R as unsafeAllowAllGqlPermission, S as defineIdp, T as ResolverServiceInput, U as AuthOwnConfig, V as AuthExternalConfig, W as defineAuth, X as OAuth2ClientInput, Y as OAuth2ClientGrantType, Z as OIDC, _ as WorkflowServiceInput, _t as Resolver, at as TenantProviderConfig, b as IdPConfig, ct as UserAttributeMap, dt as AttributeList, et as SCIMAttributeMapping, f as defineConfig, ft as AttributeMap, g as WorkflowServiceConfig, gt as QueryType, ht as TailorField, it as SCIMResource, j as db, k as TailorDBInstance, lt as UsernameFieldKey, mt as unauthenticatedTailorUser, nt as SCIMAuthorization, ot as UserAttributeKey, p as defineGenerators, pt as TailorUser, q as IDToken, rt as SCIMConfig, st as UserAttributeListKey, tt as SCIMAttributeType, ut as ValueOperand, v as StaticWebsiteConfig, w as ResolverServiceConfig, x as IdPExternalConfig, y as defineStaticWebSite, z as unsafeAllowAllTypePermission } from "../types-BaiXm10C.mjs";
3
+ import { A as WORKFLOW_JOB_BRAND, C as GqlOperation, D as Workflow, E as WorkflowOperation, F as createWorkflowJob, I as createResolver, L as Env, M as WorkflowJobContext, N as WorkflowJobInput, O as WorkflowConfig, P as WorkflowJobOutput, S as FunctionOperation, T as WebhookOperation, _ as ResolverExecutedTrigger, a as Trigger, b as recordUpdatedTrigger, c as IncomingWebhookTrigger, d as scheduleTrigger, f as RecordCreatedArgs, g as ResolverExecutedArgs, h as RecordUpdatedArgs, i as createExecutor, j as WorkflowJob, k as createWorkflow, l as incomingWebhookTrigger, m as RecordTrigger, n as output, o as IncomingWebhookArgs, p as RecordDeletedArgs, r as t, s as IncomingWebhookRequest, t as infer, u as ScheduleTrigger, v as recordCreatedTrigger, w as Operation, x as resolverExecutedTrigger, y as recordDeletedTrigger } from "../index-Bz_i9lgm.mjs";
4
4
  export { AttributeList, AttributeMap, AuthConfig, AuthExternalConfig, AuthInvoker, AuthOwnConfig, AuthServiceInput, BuiltinIdP, Env, ExecutorServiceConfig, ExecutorServiceInput, FunctionOperation, GqlOperation, IDToken, IdPConfig, IdPExternalConfig, IdProviderConfig, IncomingWebhookArgs, IncomingWebhookRequest, IncomingWebhookTrigger, OAuth2ClientInput as OAuth2Client, OAuth2ClientGrantType, OIDC, Operation, PermissionCondition, QueryType, RecordCreatedArgs, RecordDeletedArgs, RecordTrigger, RecordUpdatedArgs, Resolver, ResolverExecutedArgs, ResolverExecutedTrigger, ResolverExternalConfig, ResolverServiceConfig, ResolverServiceInput, SAML, SCIMAttribute, SCIMAttributeMapping, SCIMAttributeType, SCIMAuthorization, SCIMConfig, SCIMResource, ScheduleTrigger, StaticWebsiteConfig, TailorDBField, TailorDBInstance, TailorDBType, TailorField, TailorTypeGqlPermission, TailorTypePermission, TailorUser, TenantProviderConfig, Trigger, UserAttributeKey, UserAttributeListKey, UserAttributeMap, UsernameFieldKey, ValueOperand, WORKFLOW_JOB_BRAND, WebhookOperation, Workflow, WorkflowConfig, WorkflowJob, WorkflowJobContext, WorkflowJobInput, WorkflowJobOutput, WorkflowOperation, WorkflowServiceConfig, WorkflowServiceInput, createExecutor, createResolver, createWorkflow, createWorkflowJob, db, defineAuth, defineConfig, defineGenerators, defineIdp, defineStaticWebSite, incomingWebhookTrigger, infer, output, recordCreatedTrigger, recordDeletedTrigger, recordUpdatedTrigger, resolverExecutedTrigger, scheduleTrigger, t, unauthenticatedTailorUser, unsafeAllowAllGqlPermission, unsafeAllowAllTypePermission };
@@ -1,4 +1,4 @@
1
- import { a as createWorkflowJob, c as unsafeAllowAllGqlPermission, d as TailorField, f as t$1, i as WORKFLOW_JOB_BRAND, l as unsafeAllowAllTypePermission, n as defineGenerators, o as defineAuth, s as db, t as defineConfig, u as unauthenticatedTailorUser } from "../config-BYrX78K0.mjs";
1
+ import { a as createWorkflowJob, c as unsafeAllowAllGqlPermission, d as TailorField, f as t$1, i as WORKFLOW_JOB_BRAND, l as unsafeAllowAllTypePermission, n as defineGenerators, o as defineAuth, s as db, t as defineConfig, u as unauthenticatedTailorUser } from "../config-CtRi0Lgg.mjs";
2
2
 
3
3
  //#region src/configure/services/resolver/resolver.ts
4
4
  function createResolver(config) {
@@ -1,9 +1,14 @@
1
1
  /// <reference path="./user-defined.d.ts" />
2
- import { A as TailorDBType, Dt as output$1, Et as JsonCompatible, H as AuthInvoker, K as TailorEnv, M as AllowedValues, N as AllowedValuesOutput, Tt as InferFieldsOutput, Y as TailorUser, Z as TailorField, a as IncomingWebhookTrigger$1, c as ScheduleTriggerInput, et as ResolverInput, i as GqlOperation$1, it as FieldOutput, l as WebhookOperation$1, n as ExecutorInput, nt as FieldMetadata, o as RecordTrigger$1, r as FunctionOperation$1, rt as FieldOptions, s as ResolverExecutedTrigger$1, tt as ArrayFieldOutput, u as WorkflowOperation$1 } from "./types-DkG_CWKo.mjs";
2
+ import { A as TailorDBType, Ct as InferFieldsOutput, H as AuthInvoker, M as AllowedValues, N as AllowedValuesOutput, St as FieldOutput, Tt as output$1, a as IncomingWebhookTrigger$1, bt as FieldMetadata, c as ScheduleTriggerInput, ht as TailorField, i as GqlOperation$1, l as WebhookOperation$1, n as ExecutorInput, o as RecordTrigger$1, pt as TailorUser, r as FunctionOperation$1, s as ResolverExecutedTrigger$1, u as WorkflowOperation$1, vt as ResolverInput, wt as JsonCompatible, xt as FieldOptions, yt as ArrayFieldOutput } from "./types-BaiXm10C.mjs";
3
3
  import { EmptyObject, JsonPrimitive, Jsonifiable, Jsonify } from "type-fest";
4
4
  import { Client } from "@urql/core";
5
5
  import { StandardCRON } from "ts-cron-validator";
6
6
 
7
+ //#region src/configure/types/env.d.ts
8
+ interface Env {}
9
+ /** Represents environment variables in the Tailor platform. */
10
+ type TailorEnv = keyof Env extends never ? Record<string, string> : Env;
11
+ //#endregion
7
12
  //#region src/configure/services/resolver/resolver.d.ts
8
13
  type Context<Input extends Record<string, TailorField<any>> | undefined> = {
9
14
  input: Input extends Record<string, TailorField<any>> ? InferFieldsOutput<Input> : never;
@@ -349,5 +354,5 @@ declare namespace t {
349
354
  type infer<T> = TailorOutput<T>;
350
355
  }
351
356
  //#endregion
352
- export { WORKFLOW_JOB_BRAND as A, GqlOperation as C, Workflow as D, WorkflowOperation as E, createWorkflowJob as F, createResolver as I, WorkflowJobContext as M, WorkflowJobInput as N, WorkflowConfig as O, WorkflowJobOutput as P, FunctionOperation as S, WebhookOperation as T, ResolverExecutedTrigger as _, Trigger as a, recordUpdatedTrigger as b, IncomingWebhookTrigger as c, scheduleTrigger as d, RecordCreatedArgs as f, ResolverExecutedArgs as g, RecordUpdatedArgs as h, createExecutor as i, WorkflowJob as j, createWorkflow as k, incomingWebhookTrigger as l, RecordTrigger as m, output as n, IncomingWebhookArgs as o, RecordDeletedArgs as p, t as r, IncomingWebhookRequest as s, infer as t, ScheduleTrigger as u, recordCreatedTrigger as v, Operation as w, resolverExecutedTrigger as x, recordDeletedTrigger as y };
353
- //# sourceMappingURL=index-43O2EJml.d.mts.map
357
+ export { WORKFLOW_JOB_BRAND as A, GqlOperation as C, Workflow as D, WorkflowOperation as E, createWorkflowJob as F, createResolver as I, Env as L, WorkflowJobContext as M, WorkflowJobInput as N, WorkflowConfig as O, WorkflowJobOutput as P, FunctionOperation as S, WebhookOperation as T, ResolverExecutedTrigger as _, Trigger as a, recordUpdatedTrigger as b, IncomingWebhookTrigger as c, scheduleTrigger as d, RecordCreatedArgs as f, ResolverExecutedArgs as g, RecordUpdatedArgs as h, createExecutor as i, WorkflowJob as j, createWorkflow as k, incomingWebhookTrigger as l, RecordTrigger as m, output as n, IncomingWebhookArgs as o, RecordDeletedArgs as p, t as r, IncomingWebhookRequest as s, infer as t, ScheduleTrigger as u, recordCreatedTrigger as v, Operation as w, resolverExecutedTrigger as x, recordDeletedTrigger as y };
358
+ //# sourceMappingURL=index-Bz_i9lgm.d.mts.map
@@ -1,5 +1,5 @@
1
1
  import { a as __toCommonJS, i as __require, n as __esmMin, o as __toESM, r as __export, t as __commonJSMin } from "./chunk-DhYkiPYI.mjs";
2
- import { i as WORKFLOW_JOB_BRAND, r as getDistDir } from "./config-BYrX78K0.mjs";
2
+ import { i as WORKFLOW_JOB_BRAND, r as getDistDir } from "./config-CtRi0Lgg.mjs";
3
3
  import Module, { createRequire } from "node:module";
4
4
  import { defineCommand } from "citty";
5
5
  import * as path$20 from "node:path";
@@ -23,9 +23,9 @@ import fsPromises, { glob } from "node:fs/promises";
23
23
  import { fileURLToPath, pathToFileURL } from "node:url";
24
24
  import ml from "multiline-ts";
25
25
  import { xdgConfig } from "xdg-basedir";
26
- import * as inflection from "inflection";
27
26
  import util from "node:util";
28
27
  import assert from "node:assert";
28
+ import * as inflection from "inflection";
29
29
  import * as rolldown from "rolldown";
30
30
  import { parseSync } from "oxc-parser";
31
31
  import { create, fromJson } from "@bufbuild/protobuf";
@@ -98113,6 +98113,176 @@ function ensureNoExternalVariablesInFieldScripts(typeName, fieldName, fieldConfi
98113
98113
  });
98114
98114
  }
98115
98115
 
98116
+ //#endregion
98117
+ //#region src/parser/service/tailordb/permission.ts
98118
+ const operatorMap = {
98119
+ "=": "eq",
98120
+ "!=": "ne",
98121
+ in: "in",
98122
+ "not in": "nin"
98123
+ };
98124
+ function normalizeOperand(operand) {
98125
+ if (typeof operand === "object" && "user" in operand) return { user: operand.user === "id" ? "_id" : operand.user };
98126
+ return operand;
98127
+ }
98128
+ function normalizeConditions(conditions) {
98129
+ return conditions.map((cond) => {
98130
+ const [left, operator, right] = cond;
98131
+ return [
98132
+ normalizeOperand(left),
98133
+ operatorMap[operator],
98134
+ normalizeOperand(right)
98135
+ ];
98136
+ });
98137
+ }
98138
+ function isObjectFormat(p$1) {
98139
+ return typeof p$1 === "object" && p$1 !== null && "conditions" in p$1;
98140
+ }
98141
+ function isSingleArrayConditionFormat(cond) {
98142
+ return cond.length >= 2 && typeof cond[1] === "string";
98143
+ }
98144
+ function normalizePermission(permission) {
98145
+ return Object.keys(permission).reduce((acc, action) => {
98146
+ acc[action] = permission[action].map((p$1) => normalizeActionPermission(p$1));
98147
+ return acc;
98148
+ }, {});
98149
+ }
98150
+ function normalizeGqlPermission(permission) {
98151
+ return permission.map((policy) => normalizeGqlPolicy(policy));
98152
+ }
98153
+ function normalizeGqlPolicy(policy) {
98154
+ return {
98155
+ conditions: policy.conditions ? normalizeConditions(policy.conditions) : [],
98156
+ actions: policy.actions === "all" ? ["all"] : policy.actions,
98157
+ permit: policy.permit ? "allow" : "deny",
98158
+ description: policy.description
98159
+ };
98160
+ }
98161
+ /**
98162
+ * Parse raw permissions into normalized permissions.
98163
+ * This is the main entry point for permission parsing in the parser layer.
98164
+ */
98165
+ function parsePermissions(rawPermissions) {
98166
+ return {
98167
+ ...rawPermissions.record && { record: normalizePermission(rawPermissions.record) },
98168
+ ...rawPermissions.gql && { gql: normalizeGqlPermission(rawPermissions.gql) }
98169
+ };
98170
+ }
98171
+ function normalizeActionPermission(permission) {
98172
+ if (isObjectFormat(permission)) {
98173
+ const conditions$1 = permission.conditions;
98174
+ return {
98175
+ conditions: normalizeConditions(isSingleArrayConditionFormat(conditions$1) ? [conditions$1] : conditions$1),
98176
+ permit: permission.permit ? "allow" : "deny",
98177
+ description: permission.description
98178
+ };
98179
+ }
98180
+ if (!Array.isArray(permission)) throw new Error("Invalid permission format");
98181
+ if (isSingleArrayConditionFormat(permission)) {
98182
+ const [op1, operator, op2, permit] = [...permission, true];
98183
+ return {
98184
+ conditions: normalizeConditions([[
98185
+ op1,
98186
+ operator,
98187
+ op2
98188
+ ]]),
98189
+ permit: permit ? "allow" : "deny"
98190
+ };
98191
+ }
98192
+ const conditions = [];
98193
+ const conditionArray = permission;
98194
+ let conditionArrayPermit = true;
98195
+ for (const item of conditionArray) {
98196
+ if (typeof item === "boolean") {
98197
+ conditionArrayPermit = item;
98198
+ continue;
98199
+ }
98200
+ conditions.push(item);
98201
+ }
98202
+ return {
98203
+ conditions: normalizeConditions(conditions),
98204
+ permit: conditionArrayPermit ? "allow" : "deny"
98205
+ };
98206
+ }
98207
+
98208
+ //#endregion
98209
+ //#region src/parser/service/tailordb/type-parser.ts
98210
+ /**
98211
+ * Parse a TailorDBType into a ParsedTailorDBType.
98212
+ * This is the main entry point for parsing TailorDB types in the parser layer.
98213
+ */
98214
+ function parseTailorDBType(type) {
98215
+ const metadata = type.metadata;
98216
+ const pluralForm = metadata.settings?.pluralForm || inflection.pluralize(type.name);
98217
+ const fields = {};
98218
+ const forwardRelationships = {};
98219
+ for (const [fieldName, fieldDef] of Object.entries(type.fields)) {
98220
+ const fieldConfig = parseFieldConfig(fieldDef);
98221
+ ensureNoExternalVariablesInFieldScripts(type.name, fieldName, fieldConfig);
98222
+ const parsedField = {
98223
+ name: fieldName,
98224
+ config: fieldConfig
98225
+ };
98226
+ const ref$1 = fieldDef.reference;
98227
+ if (ref$1) {
98228
+ const targetType = ref$1.type?.name;
98229
+ if (targetType) {
98230
+ const forwardName = ref$1.nameMap?.[0] || inflection.camelize(targetType, true);
98231
+ const backwardName = ref$1.nameMap?.[1] || "";
98232
+ const key = ref$1.key || "id";
98233
+ parsedField.relation = {
98234
+ targetType,
98235
+ forwardName,
98236
+ backwardName,
98237
+ key,
98238
+ unique: fieldDef.metadata?.unique ?? false
98239
+ };
98240
+ forwardRelationships[forwardName] = {
98241
+ name: forwardName,
98242
+ targetType,
98243
+ targetField: fieldName,
98244
+ sourceField: key,
98245
+ isArray: false,
98246
+ description: ref$1.type?.metadata?.description || ""
98247
+ };
98248
+ }
98249
+ }
98250
+ fields[fieldName] = parsedField;
98251
+ }
98252
+ return {
98253
+ name: type.name,
98254
+ pluralForm,
98255
+ description: metadata.description,
98256
+ fields,
98257
+ forwardRelationships,
98258
+ backwardRelationships: {},
98259
+ settings: metadata.settings || {},
98260
+ permissions: parsePermissions(metadata.permissions || {}),
98261
+ indexes: metadata.indexes,
98262
+ files: metadata.files
98263
+ };
98264
+ }
98265
+ /**
98266
+ * Build backward relationships between parsed types.
98267
+ */
98268
+ function buildBackwardRelationships(types$2) {
98269
+ for (const [typeName, type] of Object.entries(types$2)) for (const [otherTypeName, otherType] of Object.entries(types$2)) for (const [fieldName, field] of Object.entries(otherType.fields)) if (field.relation && field.relation.targetType === typeName) {
98270
+ let backwardName = field.relation.backwardName;
98271
+ if (!backwardName) {
98272
+ const lowerName = inflection.camelize(otherTypeName, true);
98273
+ backwardName = field.relation.unique ? inflection.singularize(lowerName) : inflection.pluralize(lowerName);
98274
+ }
98275
+ type.backwardRelationships[backwardName] = {
98276
+ name: backwardName,
98277
+ targetType: otherTypeName,
98278
+ targetField: fieldName,
98279
+ sourceField: field.relation.key,
98280
+ isArray: !field.relation.unique,
98281
+ description: otherType.description || ""
98282
+ };
98283
+ }
98284
+ }
98285
+
98116
98286
  //#endregion
98117
98287
  //#region src/cli/application/tailordb/service.ts
98118
98288
  var TailorDBService = class {
@@ -98174,76 +98344,8 @@ var TailorDBService = class {
98174
98344
  const allTypes = {};
98175
98345
  for (const fileTypes of Object.values(this.rawTypes)) for (const [typeName, type] of Object.entries(fileTypes)) allTypes[typeName] = type;
98176
98346
  this.types = {};
98177
- for (const [typeName, type] of Object.entries(allTypes)) this.types[typeName] = this.parseTailorDBType(type);
98178
- this.buildBackwardRelationships(this.types);
98179
- }
98180
- parseTailorDBType(type) {
98181
- const metadata = type.metadata;
98182
- const pluralForm = metadata.settings?.pluralForm || inflection.pluralize(type.name);
98183
- const fields = {};
98184
- const forwardRelationships = {};
98185
- for (const [fieldName, fieldDef] of Object.entries(type.fields)) {
98186
- const fieldConfig = parseFieldConfig(fieldDef);
98187
- ensureNoExternalVariablesInFieldScripts(type.name, fieldName, fieldConfig);
98188
- const parsedField = {
98189
- name: fieldName,
98190
- config: fieldConfig
98191
- };
98192
- const ref$1 = fieldDef.reference;
98193
- if (ref$1) {
98194
- const targetType = ref$1.type?.name;
98195
- if (targetType) {
98196
- const forwardName = ref$1.nameMap?.[0] || inflection.camelize(targetType, true);
98197
- const backwardName = ref$1.nameMap?.[1] || "";
98198
- const key = ref$1.key || "id";
98199
- parsedField.relation = {
98200
- targetType,
98201
- forwardName,
98202
- backwardName,
98203
- key,
98204
- unique: fieldDef.metadata?.unique ?? false
98205
- };
98206
- forwardRelationships[forwardName] = {
98207
- name: forwardName,
98208
- targetType,
98209
- targetField: fieldName,
98210
- sourceField: key,
98211
- isArray: false,
98212
- description: ref$1.type?.metadata?.description || ""
98213
- };
98214
- }
98215
- }
98216
- fields[fieldName] = parsedField;
98217
- }
98218
- return {
98219
- name: type.name,
98220
- pluralForm,
98221
- description: metadata.description,
98222
- fields,
98223
- forwardRelationships,
98224
- backwardRelationships: {},
98225
- settings: metadata.settings || {},
98226
- permissions: metadata.permissions || {},
98227
- indexes: metadata.indexes,
98228
- files: metadata.files
98229
- };
98230
- }
98231
- buildBackwardRelationships(types$2) {
98232
- for (const [typeName, type] of Object.entries(types$2)) for (const [otherTypeName, otherType] of Object.entries(types$2)) for (const [fieldName, field] of Object.entries(otherType.fields)) if (field.relation && field.relation.targetType === typeName) {
98233
- let backwardName = field.relation.backwardName;
98234
- if (!backwardName) {
98235
- const lowerName = inflection.camelize(otherTypeName, true);
98236
- backwardName = field.relation.unique ? inflection.singularize(lowerName) : inflection.pluralize(lowerName);
98237
- }
98238
- type.backwardRelationships[backwardName] = {
98239
- name: backwardName,
98240
- targetType: otherTypeName,
98241
- targetField: fieldName,
98242
- sourceField: field.relation.key,
98243
- isArray: !field.relation.unique,
98244
- description: otherType.description || ""
98245
- };
98246
- }
98347
+ for (const [typeName, type] of Object.entries(allTypes)) this.types[typeName] = parseTailorDBType(type);
98348
+ buildBackwardRelationships(this.types);
98247
98349
  }
98248
98350
  };
98249
98351
 
@@ -101875,16 +101977,18 @@ async function confirmOwnerConflict(conflicts, appName, yes) {
101875
101977
  }
101876
101978
  async function confirmUnmanagedResources(resources, appName, yes) {
101877
101979
  if (resources.length === 0) return;
101878
- logger.warn("Unmanaged resources detected:");
101980
+ logger.warn("Existing resources not tracked by tailor-sdk were found:");
101879
101981
  logger.log(` ${styles.info("Resources")}:`);
101880
101982
  for (const r$1 of resources) logger.log(` • ${styles.bold(r$1.resourceType)} ${styles.info(`"${r$1.resourceName}"`)}`);
101881
101983
  logger.newline();
101882
- logger.log(" These resources are not managed by any application.");
101984
+ logger.log(" These resources may have been created by older SDK versions, Terraform, or CUE.");
101985
+ logger.log(" To continue, confirm that tailor-sdk should manage them.");
101986
+ logger.log(" If they are managed by another tool (e.g., Terraform), cancel and manage them there instead.");
101883
101987
  if (yes) {
101884
101988
  logger.success(`Adding to "${appName}" (--yes flag specified)...`, { mode: "plain" });
101885
101989
  return;
101886
101990
  }
101887
- if (!await logger.prompt(`Add these resources to "${appName}"?`, {
101991
+ if (!await logger.prompt(`Allow tailor-sdk to manage these resources for "${appName}"?`, {
101888
101992
  type: "confirm",
101889
101993
  initial: false
101890
101994
  })) throw new Error(ml`
@@ -105458,4 +105562,4 @@ const listCommand = defineCommand({
105458
105562
 
105459
105563
  //#endregion
105460
105564
  export { jsonArgs as $, printData as A, loadAccessToken as B, listOAuth2Clients as C, tokenCommand as D, getMachineUserToken as E, generateUserTypes as F, fetchUserInfo as G, readPlatformConfig as H, loadConfig as I, readPackageJson as J, initOAuth2Client as K, apiCall as L, generateCommand as M, apply as N, listCommand$3 as O, applyCommand as P, deploymentArgs as Q, apiCommand as R, listCommand$2 as S, getOAuth2Client as T, writePlatformConfig as U, loadWorkspaceId as V, fetchAll as W, commonArgs as X, PATScope as Y, confirmationArgs as Z, listWorkflowExecutions as _, createCommand as a, remove as b, resumeWorkflow as c, listCommand$1 as d, withCommonArgs as et, listWorkflows as f, getWorkflowExecution as g, executionsCommand as h, deleteWorkspace as i, generate as j, listMachineUsers as k, startCommand as l, getWorkflow as m, listWorkspaces as n, logger as nt, createWorkspace as o, getCommand as p, initOperatorClient as q, deleteCommand as r, resumeCommand as s, listCommand as t, workspaceArgs as tt, startWorkflow as u, show as v, getCommand$1 as w, removeCommand as x, showCommand as y, fetchLatestToken as z };
105461
- //# sourceMappingURL=list-CqNMJdug.mjs.map
105565
+ //# sourceMappingURL=list-BHj1dQPk.mjs.map